mirror of
https://github.com/nushell/nushell.git
synced 2025-05-17 08:20:49 +02:00
Merge branch 'main' into ecow-record
This commit is contained in:
commit
0f463b18ac
2
.github/pull_request_template.md
vendored
2
.github/pull_request_template.md
vendored
@ -26,7 +26,7 @@ Make sure you've run and fixed any issues with these commands:
|
|||||||
- `cargo fmt --all -- --check` to check standard code formatting (`cargo fmt --all` applies these changes)
|
- `cargo fmt --all -- --check` to check standard code formatting (`cargo fmt --all` applies these changes)
|
||||||
- `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used` to check that you're using the standard code style
|
- `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used` to check that you're using the standard code style
|
||||||
- `cargo test --workspace` to check that all tests pass (on Windows make sure to [enable developer mode](https://learn.microsoft.com/en-us/windows/apps/get-started/developer-mode-features-and-debugging))
|
- `cargo test --workspace` to check that all tests pass (on Windows make sure to [enable developer mode](https://learn.microsoft.com/en-us/windows/apps/get-started/developer-mode-features-and-debugging))
|
||||||
- `cargo run -- -c "use std testing; testing run-tests --path crates/nu-std"` to run the tests for the standard library
|
- `cargo run -- -c "use toolkit.nu; toolkit test stdlib"` to run the tests for the standard library
|
||||||
|
|
||||||
> **Note**
|
> **Note**
|
||||||
> from `nushell` you can also use the `toolkit` as follows
|
> from `nushell` you can also use the `toolkit` as follows
|
||||||
|
2
.github/workflows/audit.yml
vendored
2
.github/workflows/audit.yml
vendored
@ -19,7 +19,7 @@ jobs:
|
|||||||
# Prevent sudden announcement of a new advisory from failing ci:
|
# Prevent sudden announcement of a new advisory from failing ci:
|
||||||
continue-on-error: true
|
continue-on-error: true
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4.1.4
|
- uses: actions/checkout@v4.1.5
|
||||||
- uses: rustsec/audit-check@v1.4.1
|
- uses: rustsec/audit-check@v1.4.1
|
||||||
with:
|
with:
|
||||||
token: ${{ secrets.GITHUB_TOKEN }}
|
token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
8
.github/workflows/ci.yml
vendored
8
.github/workflows/ci.yml
vendored
@ -44,7 +44,7 @@ jobs:
|
|||||||
runs-on: ${{ matrix.platform }}
|
runs-on: ${{ matrix.platform }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4.1.4
|
- uses: actions/checkout@v4.1.5
|
||||||
|
|
||||||
- name: Setup Rust toolchain and cache
|
- name: Setup Rust toolchain and cache
|
||||||
uses: actions-rust-lang/setup-rust-toolchain@v1.8.0
|
uses: actions-rust-lang/setup-rust-toolchain@v1.8.0
|
||||||
@ -89,7 +89,7 @@ jobs:
|
|||||||
runs-on: ${{ matrix.platform }}
|
runs-on: ${{ matrix.platform }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4.1.4
|
- uses: actions/checkout@v4.1.5
|
||||||
|
|
||||||
- name: Setup Rust toolchain and cache
|
- name: Setup Rust toolchain and cache
|
||||||
uses: actions-rust-lang/setup-rust-toolchain@v1.8.0
|
uses: actions-rust-lang/setup-rust-toolchain@v1.8.0
|
||||||
@ -121,7 +121,7 @@ jobs:
|
|||||||
runs-on: ${{ matrix.platform }}
|
runs-on: ${{ matrix.platform }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4.1.4
|
- uses: actions/checkout@v4.1.5
|
||||||
|
|
||||||
- name: Setup Rust toolchain and cache
|
- name: Setup Rust toolchain and cache
|
||||||
uses: actions-rust-lang/setup-rust-toolchain@v1.8.0
|
uses: actions-rust-lang/setup-rust-toolchain@v1.8.0
|
||||||
@ -174,7 +174,7 @@ jobs:
|
|||||||
runs-on: ${{ matrix.platform }}
|
runs-on: ${{ matrix.platform }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4.1.4
|
- uses: actions/checkout@v4.1.5
|
||||||
|
|
||||||
- name: Setup Rust toolchain and cache
|
- name: Setup Rust toolchain and cache
|
||||||
uses: actions-rust-lang/setup-rust-toolchain@v1.8.0
|
uses: actions-rust-lang/setup-rust-toolchain@v1.8.0
|
||||||
|
12
.github/workflows/nightly-build.yml
vendored
12
.github/workflows/nightly-build.yml
vendored
@ -27,7 +27,7 @@ jobs:
|
|||||||
# if: github.repository == 'nushell/nightly'
|
# if: github.repository == 'nushell/nightly'
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v4.1.4
|
uses: actions/checkout@v4.1.5
|
||||||
if: github.repository == 'nushell/nightly'
|
if: github.repository == 'nushell/nightly'
|
||||||
with:
|
with:
|
||||||
ref: main
|
ref: main
|
||||||
@ -123,7 +123,7 @@ jobs:
|
|||||||
runs-on: ${{matrix.os}}
|
runs-on: ${{matrix.os}}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4.1.4
|
- uses: actions/checkout@v4.1.5
|
||||||
with:
|
with:
|
||||||
ref: main
|
ref: main
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
@ -174,7 +174,7 @@ jobs:
|
|||||||
# REF: https://github.com/marketplace/actions/gh-release
|
# REF: https://github.com/marketplace/actions/gh-release
|
||||||
# Create a release only in nushell/nightly repo
|
# Create a release only in nushell/nightly repo
|
||||||
- name: Publish Archive
|
- name: Publish Archive
|
||||||
uses: softprops/action-gh-release@v2.0.4
|
uses: softprops/action-gh-release@v2.0.5
|
||||||
if: ${{ startsWith(github.repository, 'nushell/nightly') }}
|
if: ${{ startsWith(github.repository, 'nushell/nightly') }}
|
||||||
with:
|
with:
|
||||||
prerelease: true
|
prerelease: true
|
||||||
@ -235,7 +235,7 @@ jobs:
|
|||||||
runs-on: ${{matrix.os}}
|
runs-on: ${{matrix.os}}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4.1.4
|
- uses: actions/checkout@v4.1.5
|
||||||
with:
|
with:
|
||||||
ref: main
|
ref: main
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
@ -286,7 +286,7 @@ jobs:
|
|||||||
# REF: https://github.com/marketplace/actions/gh-release
|
# REF: https://github.com/marketplace/actions/gh-release
|
||||||
# Create a release only in nushell/nightly repo
|
# Create a release only in nushell/nightly repo
|
||||||
- name: Publish Archive
|
- name: Publish Archive
|
||||||
uses: softprops/action-gh-release@v2.0.4
|
uses: softprops/action-gh-release@v2.0.5
|
||||||
if: ${{ startsWith(github.repository, 'nushell/nightly') }}
|
if: ${{ startsWith(github.repository, 'nushell/nightly') }}
|
||||||
with:
|
with:
|
||||||
draft: false
|
draft: false
|
||||||
@ -310,7 +310,7 @@ jobs:
|
|||||||
- name: Waiting for Release
|
- name: Waiting for Release
|
||||||
run: sleep 1800
|
run: sleep 1800
|
||||||
|
|
||||||
- uses: actions/checkout@v4.1.4
|
- uses: actions/checkout@v4.1.5
|
||||||
with:
|
with:
|
||||||
ref: main
|
ref: main
|
||||||
|
|
||||||
|
8
.github/workflows/release.yml
vendored
8
.github/workflows/release.yml
vendored
@ -73,7 +73,7 @@ jobs:
|
|||||||
runs-on: ${{matrix.os}}
|
runs-on: ${{matrix.os}}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4.1.4
|
- uses: actions/checkout@v4.1.5
|
||||||
|
|
||||||
- name: Update Rust Toolchain Target
|
- name: Update Rust Toolchain Target
|
||||||
run: |
|
run: |
|
||||||
@ -104,7 +104,7 @@ jobs:
|
|||||||
|
|
||||||
# REF: https://github.com/marketplace/actions/gh-release
|
# REF: https://github.com/marketplace/actions/gh-release
|
||||||
- name: Publish Archive
|
- name: Publish Archive
|
||||||
uses: softprops/action-gh-release@v2.0.4
|
uses: softprops/action-gh-release@v2.0.5
|
||||||
if: ${{ startsWith(github.ref, 'refs/tags/') }}
|
if: ${{ startsWith(github.ref, 'refs/tags/') }}
|
||||||
with:
|
with:
|
||||||
draft: true
|
draft: true
|
||||||
@ -163,7 +163,7 @@ jobs:
|
|||||||
runs-on: ${{matrix.os}}
|
runs-on: ${{matrix.os}}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4.1.4
|
- uses: actions/checkout@v4.1.5
|
||||||
|
|
||||||
- name: Update Rust Toolchain Target
|
- name: Update Rust Toolchain Target
|
||||||
run: |
|
run: |
|
||||||
@ -194,7 +194,7 @@ jobs:
|
|||||||
|
|
||||||
# REF: https://github.com/marketplace/actions/gh-release
|
# REF: https://github.com/marketplace/actions/gh-release
|
||||||
- name: Publish Archive
|
- name: Publish Archive
|
||||||
uses: softprops/action-gh-release@v2.0.4
|
uses: softprops/action-gh-release@v2.0.5
|
||||||
if: ${{ startsWith(github.ref, 'refs/tags/') }}
|
if: ${{ startsWith(github.ref, 'refs/tags/') }}
|
||||||
with:
|
with:
|
||||||
draft: true
|
draft: true
|
||||||
|
2
.github/workflows/typos.yml
vendored
2
.github/workflows/typos.yml
vendored
@ -7,7 +7,7 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout Actions Repository
|
- name: Checkout Actions Repository
|
||||||
uses: actions/checkout@v4.1.4
|
uses: actions/checkout@v4.1.5
|
||||||
|
|
||||||
- name: Check spelling
|
- name: Check spelling
|
||||||
uses: crate-ci/typos@v1.21.0
|
uses: crate-ci/typos@v1.21.0
|
||||||
|
@ -55,7 +55,6 @@ It is good practice to cover your changes with a test. Also, try to think about
|
|||||||
|
|
||||||
Tests can be found in different places:
|
Tests can be found in different places:
|
||||||
* `/tests`
|
* `/tests`
|
||||||
* `src/tests`
|
|
||||||
* command examples
|
* command examples
|
||||||
* crate-specific tests
|
* crate-specific tests
|
||||||
|
|
||||||
|
24
Cargo.lock
generated
24
Cargo.lock
generated
@ -377,7 +377,7 @@ dependencies = [
|
|||||||
"bitflags 2.5.0",
|
"bitflags 2.5.0",
|
||||||
"cexpr",
|
"cexpr",
|
||||||
"clang-sys",
|
"clang-sys",
|
||||||
"itertools 0.12.1",
|
"itertools 0.11.0",
|
||||||
"lazy_static",
|
"lazy_static",
|
||||||
"lazycell",
|
"lazycell",
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
@ -2043,9 +2043,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "interprocess"
|
name = "interprocess"
|
||||||
version = "2.0.1"
|
version = "2.1.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "7c7fb8583fab9503654385e2bafda123376445a77027a1b106dd7e44cf51122f"
|
checksum = "7b4d0250d41da118226e55b3d50ca3f0d9e0a0f6829b92f543ac0054aeea1572"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"libc",
|
"libc",
|
||||||
"recvmsg",
|
"recvmsg",
|
||||||
@ -2861,6 +2861,7 @@ dependencies = [
|
|||||||
"reedline",
|
"reedline",
|
||||||
"rstest",
|
"rstest",
|
||||||
"sysinfo",
|
"sysinfo",
|
||||||
|
"tempfile",
|
||||||
"unicode-segmentation",
|
"unicode-segmentation",
|
||||||
"uuid",
|
"uuid",
|
||||||
"which",
|
"which",
|
||||||
@ -3263,11 +3264,13 @@ dependencies = [
|
|||||||
"indexmap",
|
"indexmap",
|
||||||
"lru",
|
"lru",
|
||||||
"miette",
|
"miette",
|
||||||
|
"nix",
|
||||||
"nu-path",
|
"nu-path",
|
||||||
"nu-system",
|
"nu-system",
|
||||||
"nu-test-support",
|
"nu-test-support",
|
||||||
"nu-utils",
|
"nu-utils",
|
||||||
"num-format",
|
"num-format",
|
||||||
|
"os_pipe",
|
||||||
"pretty_assertions",
|
"pretty_assertions",
|
||||||
"rmp-serde",
|
"rmp-serde",
|
||||||
"rstest",
|
"rstest",
|
||||||
@ -4850,8 +4853,7 @@ dependencies = [
|
|||||||
[[package]]
|
[[package]]
|
||||||
name = "reedline"
|
name = "reedline"
|
||||||
version = "0.32.0"
|
version = "0.32.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "git+https://github.com/nushell/reedline?branch=main#a580ea56d4e5a889468b2969d2a1534379504ab6"
|
||||||
checksum = "abf59e4c97b5049ba96b052cdb652368305a2eddcbce9bf1c16f9d003139eeea"
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"arboard",
|
"arboard",
|
||||||
"chrono",
|
"chrono",
|
||||||
@ -5065,9 +5067,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "rust-embed"
|
name = "rust-embed"
|
||||||
version = "8.3.0"
|
version = "8.4.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "fb78f46d0066053d16d4ca7b898e9343bc3530f71c61d5ad84cd404ada068745"
|
checksum = "19549741604902eb99a7ed0ee177a0663ee1eda51a29f71401f166e47e77806a"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"rust-embed-impl",
|
"rust-embed-impl",
|
||||||
"rust-embed-utils",
|
"rust-embed-utils",
|
||||||
@ -5076,9 +5078,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "rust-embed-impl"
|
name = "rust-embed-impl"
|
||||||
version = "8.3.0"
|
version = "8.4.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "b91ac2a3c6c0520a3fb3dd89321177c3c692937c4eb21893378219da10c44fc8"
|
checksum = "cb9f96e283ec64401f30d3df8ee2aaeb2561f34c824381efa24a35f79bf40ee4"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
@ -5089,9 +5091,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "rust-embed-utils"
|
name = "rust-embed-utils"
|
||||||
version = "8.3.0"
|
version = "8.4.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "86f69089032567ffff4eada41c573fc43ff466c7db7c5688b2e7969584345581"
|
checksum = "38c74a686185620830701348de757fd36bef4aa9680fd23c49fc539ddcc1af32"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"sha2",
|
"sha2",
|
||||||
"walkdir",
|
"walkdir",
|
||||||
|
@ -94,7 +94,7 @@ heck = "0.5.0"
|
|||||||
human-date-parser = "0.1.1"
|
human-date-parser = "0.1.1"
|
||||||
indexmap = "2.2"
|
indexmap = "2.2"
|
||||||
indicatif = "0.17"
|
indicatif = "0.17"
|
||||||
interprocess = "2.0.1"
|
interprocess = "2.1.0"
|
||||||
is_executable = "1.0"
|
is_executable = "1.0"
|
||||||
itertools = "0.12"
|
itertools = "0.12"
|
||||||
libc = "0.2"
|
libc = "0.2"
|
||||||
@ -119,7 +119,7 @@ num-traits = "0.2"
|
|||||||
omnipath = "0.1"
|
omnipath = "0.1"
|
||||||
once_cell = "1.18"
|
once_cell = "1.18"
|
||||||
open = "5.1"
|
open = "5.1"
|
||||||
os_pipe = "1.1"
|
os_pipe = { version = "1.1", features = ["io_safety"] }
|
||||||
pathdiff = "0.2"
|
pathdiff = "0.2"
|
||||||
percent-encoding = "2"
|
percent-encoding = "2"
|
||||||
pretty_assertions = "1.4"
|
pretty_assertions = "1.4"
|
||||||
@ -140,7 +140,7 @@ ropey = "1.6.1"
|
|||||||
roxmltree = "0.19"
|
roxmltree = "0.19"
|
||||||
rstest = { version = "0.18", default-features = false }
|
rstest = { version = "0.18", default-features = false }
|
||||||
rusqlite = "0.31"
|
rusqlite = "0.31"
|
||||||
rust-embed = "8.3.0"
|
rust-embed = "8.4.0"
|
||||||
same-file = "1.0"
|
same-file = "1.0"
|
||||||
serde = { version = "1.0", default-features = false }
|
serde = { version = "1.0", default-features = false }
|
||||||
serde_json = "1.0"
|
serde_json = "1.0"
|
||||||
@ -305,7 +305,7 @@ bench = false
|
|||||||
# To use a development version of a dependency please use a global override here
|
# To use a development version of a dependency please use a global override here
|
||||||
# changing versions in each sub-crate of the workspace is tedious
|
# changing versions in each sub-crate of the workspace is tedious
|
||||||
[patch.crates-io]
|
[patch.crates-io]
|
||||||
# reedline = { git = "https://github.com/nushell/reedline", branch = "main" }
|
reedline = { git = "https://github.com/nushell/reedline", branch = "main" }
|
||||||
# nu-ansi-term = {git = "https://github.com/nushell/nu-ansi-term.git", branch = "main"}
|
# nu-ansi-term = {git = "https://github.com/nushell/nu-ansi-term.git", branch = "main"}
|
||||||
|
|
||||||
# Run all benchmarks with `cargo bench`
|
# Run all benchmarks with `cargo bench`
|
||||||
|
@ -4,15 +4,11 @@ use nu_plugin_protocol::{PluginCallResponse, PluginOutput};
|
|||||||
|
|
||||||
use nu_protocol::{
|
use nu_protocol::{
|
||||||
engine::{EngineState, Stack},
|
engine::{EngineState, Stack},
|
||||||
eval_const::create_nu_constant,
|
PipelineData, Span, Spanned, Value,
|
||||||
PipelineData, Span, Spanned, Value, NU_VARIABLE_ID,
|
|
||||||
};
|
};
|
||||||
use nu_std::load_standard_library;
|
use nu_std::load_standard_library;
|
||||||
use nu_utils::{get_default_config, get_default_env};
|
use nu_utils::{get_default_config, get_default_env};
|
||||||
use std::{
|
use std::rc::Rc;
|
||||||
path::{Path, PathBuf},
|
|
||||||
rc::Rc,
|
|
||||||
};
|
|
||||||
|
|
||||||
use std::hint::black_box;
|
use std::hint::black_box;
|
||||||
|
|
||||||
@ -22,38 +18,18 @@ fn load_bench_commands() -> EngineState {
|
|||||||
nu_command::add_shell_command_context(nu_cmd_lang::create_default_context())
|
nu_command::add_shell_command_context(nu_cmd_lang::create_default_context())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn canonicalize_path(engine_state: &EngineState, path: &Path) -> PathBuf {
|
|
||||||
let cwd = engine_state.cwd_as_string(None).unwrap();
|
|
||||||
|
|
||||||
if path.exists() {
|
|
||||||
match nu_path::canonicalize_with(path, cwd) {
|
|
||||||
Ok(canon_path) => canon_path,
|
|
||||||
Err(_) => path.to_owned(),
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
path.to_owned()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn get_home_path(engine_state: &EngineState) -> PathBuf {
|
|
||||||
nu_path::home_dir()
|
|
||||||
.map(|path| canonicalize_path(engine_state, &path))
|
|
||||||
.unwrap_or_default()
|
|
||||||
}
|
|
||||||
|
|
||||||
fn setup_engine() -> EngineState {
|
fn setup_engine() -> EngineState {
|
||||||
let mut engine_state = load_bench_commands();
|
let mut engine_state = load_bench_commands();
|
||||||
let home_path = get_home_path(&engine_state);
|
let cwd = std::env::current_dir()
|
||||||
|
.unwrap()
|
||||||
|
.into_os_string()
|
||||||
|
.into_string()
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
// parsing config.nu breaks without PWD set, so set a valid path
|
// parsing config.nu breaks without PWD set, so set a valid path
|
||||||
engine_state.add_env_var(
|
engine_state.add_env_var("PWD".into(), Value::string(cwd, Span::test_data()));
|
||||||
"PWD".into(),
|
|
||||||
Value::string(home_path.to_string_lossy(), Span::test_data()),
|
|
||||||
);
|
|
||||||
|
|
||||||
let nu_const = create_nu_constant(&engine_state, Span::unknown())
|
engine_state.generate_nu_constant();
|
||||||
.expect("Failed to create nushell constant.");
|
|
||||||
engine_state.set_variable_const_val(NU_VARIABLE_ID, nu_const);
|
|
||||||
|
|
||||||
engine_state
|
engine_state
|
||||||
}
|
}
|
||||||
@ -107,6 +83,7 @@ fn bench_command(
|
|||||||
b.iter(move || {
|
b.iter(move || {
|
||||||
let mut stack = stack.clone();
|
let mut stack = stack.clone();
|
||||||
let mut engine = engine.clone();
|
let mut engine = engine.clone();
|
||||||
|
#[allow(clippy::unit_arg)]
|
||||||
black_box(
|
black_box(
|
||||||
evaluate_commands(
|
evaluate_commands(
|
||||||
&commands,
|
&commands,
|
||||||
|
@ -15,6 +15,7 @@ nu-cmd-lang = { path = "../nu-cmd-lang", version = "0.93.1" }
|
|||||||
nu-command = { path = "../nu-command", version = "0.93.1" }
|
nu-command = { path = "../nu-command", version = "0.93.1" }
|
||||||
nu-test-support = { path = "../nu-test-support", version = "0.93.1" }
|
nu-test-support = { path = "../nu-test-support", version = "0.93.1" }
|
||||||
rstest = { workspace = true, default-features = false }
|
rstest = { workspace = true, default-features = false }
|
||||||
|
tempfile = { workspace = true }
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
nu-cmd-base = { path = "../nu-cmd-base", version = "0.93.1" }
|
nu-cmd-base = { path = "../nu-cmd-base", version = "0.93.1" }
|
||||||
|
@ -1,13 +1,18 @@
|
|||||||
use crate::completions::{CompletionOptions, SortBy};
|
use crate::completions::{CompletionOptions, SortBy};
|
||||||
use nu_protocol::{engine::StateWorkingSet, levenshtein_distance, Span};
|
use nu_protocol::{
|
||||||
|
engine::{Stack, StateWorkingSet},
|
||||||
|
levenshtein_distance, Span,
|
||||||
|
};
|
||||||
use reedline::Suggestion;
|
use reedline::Suggestion;
|
||||||
|
|
||||||
// Completer trait represents the three stages of the completion
|
// Completer trait represents the three stages of the completion
|
||||||
// fetch, filter and sort
|
// fetch, filter and sort
|
||||||
pub trait Completer {
|
pub trait Completer {
|
||||||
|
#[allow(clippy::too_many_arguments)]
|
||||||
fn fetch(
|
fn fetch(
|
||||||
&mut self,
|
&mut self,
|
||||||
working_set: &StateWorkingSet,
|
working_set: &StateWorkingSet,
|
||||||
|
stack: &Stack,
|
||||||
prefix: Vec<u8>,
|
prefix: Vec<u8>,
|
||||||
span: Span,
|
span: Span,
|
||||||
offset: usize,
|
offset: usize,
|
||||||
|
@ -4,16 +4,14 @@ use crate::{
|
|||||||
};
|
};
|
||||||
use nu_parser::FlatShape;
|
use nu_parser::FlatShape;
|
||||||
use nu_protocol::{
|
use nu_protocol::{
|
||||||
engine::{CachedFile, EngineState, StateWorkingSet},
|
engine::{CachedFile, Stack, StateWorkingSet},
|
||||||
Span,
|
Span,
|
||||||
};
|
};
|
||||||
use reedline::Suggestion;
|
use reedline::Suggestion;
|
||||||
use std::sync::Arc;
|
|
||||||
|
|
||||||
use super::SemanticSuggestion;
|
use super::SemanticSuggestion;
|
||||||
|
|
||||||
pub struct CommandCompletion {
|
pub struct CommandCompletion {
|
||||||
engine_state: Arc<EngineState>,
|
|
||||||
flattened: Vec<(Span, FlatShape)>,
|
flattened: Vec<(Span, FlatShape)>,
|
||||||
flat_shape: FlatShape,
|
flat_shape: FlatShape,
|
||||||
force_completion_after_space: bool,
|
force_completion_after_space: bool,
|
||||||
@ -21,14 +19,11 @@ pub struct CommandCompletion {
|
|||||||
|
|
||||||
impl CommandCompletion {
|
impl CommandCompletion {
|
||||||
pub fn new(
|
pub fn new(
|
||||||
engine_state: Arc<EngineState>,
|
|
||||||
_: &StateWorkingSet,
|
|
||||||
flattened: Vec<(Span, FlatShape)>,
|
flattened: Vec<(Span, FlatShape)>,
|
||||||
flat_shape: FlatShape,
|
flat_shape: FlatShape,
|
||||||
force_completion_after_space: bool,
|
force_completion_after_space: bool,
|
||||||
) -> Self {
|
) -> Self {
|
||||||
Self {
|
Self {
|
||||||
engine_state,
|
|
||||||
flattened,
|
flattened,
|
||||||
flat_shape,
|
flat_shape,
|
||||||
force_completion_after_space,
|
force_completion_after_space,
|
||||||
@ -37,13 +32,14 @@ impl CommandCompletion {
|
|||||||
|
|
||||||
fn external_command_completion(
|
fn external_command_completion(
|
||||||
&self,
|
&self,
|
||||||
|
working_set: &StateWorkingSet,
|
||||||
prefix: &str,
|
prefix: &str,
|
||||||
match_algorithm: MatchAlgorithm,
|
match_algorithm: MatchAlgorithm,
|
||||||
) -> Vec<String> {
|
) -> Vec<String> {
|
||||||
let mut executables = vec![];
|
let mut executables = vec![];
|
||||||
|
|
||||||
// os agnostic way to get the PATH env var
|
// os agnostic way to get the PATH env var
|
||||||
let paths = self.engine_state.get_path_env_var();
|
let paths = working_set.permanent_state.get_path_env_var();
|
||||||
|
|
||||||
if let Some(paths) = paths {
|
if let Some(paths) = paths {
|
||||||
if let Ok(paths) = paths.as_list() {
|
if let Ok(paths) = paths.as_list() {
|
||||||
@ -52,7 +48,10 @@ impl CommandCompletion {
|
|||||||
|
|
||||||
if let Ok(mut contents) = std::fs::read_dir(path.as_ref()) {
|
if let Ok(mut contents) = std::fs::read_dir(path.as_ref()) {
|
||||||
while let Some(Ok(item)) = contents.next() {
|
while let Some(Ok(item)) = contents.next() {
|
||||||
if self.engine_state.config.max_external_completion_results
|
if working_set
|
||||||
|
.permanent_state
|
||||||
|
.config
|
||||||
|
.max_external_completion_results
|
||||||
> executables.len() as i64
|
> executables.len() as i64
|
||||||
&& !executables.contains(
|
&& !executables.contains(
|
||||||
&item
|
&item
|
||||||
@ -114,7 +113,7 @@ impl CommandCompletion {
|
|||||||
|
|
||||||
if find_externals {
|
if find_externals {
|
||||||
let results_external = self
|
let results_external = self
|
||||||
.external_command_completion(&partial, match_algorithm)
|
.external_command_completion(working_set, &partial, match_algorithm)
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(move |x| SemanticSuggestion {
|
.map(move |x| SemanticSuggestion {
|
||||||
suggestion: Suggestion {
|
suggestion: Suggestion {
|
||||||
@ -161,6 +160,7 @@ impl Completer for CommandCompletion {
|
|||||||
fn fetch(
|
fn fetch(
|
||||||
&mut self,
|
&mut self,
|
||||||
working_set: &StateWorkingSet,
|
working_set: &StateWorkingSet,
|
||||||
|
_stack: &Stack,
|
||||||
_prefix: Vec<u8>,
|
_prefix: Vec<u8>,
|
||||||
span: Span,
|
span: Span,
|
||||||
offset: usize,
|
offset: usize,
|
||||||
@ -266,6 +266,8 @@ pub fn is_passthrough_command(working_set_file_contents: &[CachedFile]) -> bool
|
|||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod command_completions_tests {
|
mod command_completions_tests {
|
||||||
use super::*;
|
use super::*;
|
||||||
|
use nu_protocol::engine::EngineState;
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_find_non_whitespace_index() {
|
fn test_find_non_whitespace_index() {
|
||||||
|
@ -22,10 +22,10 @@ pub struct NuCompleter {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl NuCompleter {
|
impl NuCompleter {
|
||||||
pub fn new(engine_state: Arc<EngineState>, stack: Stack) -> Self {
|
pub fn new(engine_state: Arc<EngineState>, stack: Arc<Stack>) -> Self {
|
||||||
Self {
|
Self {
|
||||||
engine_state,
|
engine_state,
|
||||||
stack: stack.reset_out_dest().capture(),
|
stack: Stack::with_parent(stack).reset_out_dest().capture(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -52,8 +52,15 @@ impl NuCompleter {
|
|||||||
};
|
};
|
||||||
|
|
||||||
// Fetch
|
// Fetch
|
||||||
let mut suggestions =
|
let mut suggestions = completer.fetch(
|
||||||
completer.fetch(working_set, prefix.clone(), new_span, offset, pos, &options);
|
working_set,
|
||||||
|
&self.stack,
|
||||||
|
prefix.clone(),
|
||||||
|
new_span,
|
||||||
|
offset,
|
||||||
|
pos,
|
||||||
|
&options,
|
||||||
|
);
|
||||||
|
|
||||||
// Sort
|
// Sort
|
||||||
suggestions = completer.sort(suggestions, prefix);
|
suggestions = completer.sort(suggestions, prefix);
|
||||||
@ -96,9 +103,8 @@ impl NuCompleter {
|
|||||||
PipelineData::empty(),
|
PipelineData::empty(),
|
||||||
);
|
);
|
||||||
|
|
||||||
match result {
|
match result.and_then(|data| data.into_value(span)) {
|
||||||
Ok(pd) => {
|
Ok(value) => {
|
||||||
let value = pd.into_value(span);
|
|
||||||
if let Value::List { vals, .. } = value {
|
if let Value::List { vals, .. } = value {
|
||||||
let result =
|
let result =
|
||||||
map_value_completions(vals.iter(), Span::new(span.start, span.end), offset);
|
map_value_completions(vals.iter(), Span::new(span.start, span.end), offset);
|
||||||
@ -175,11 +181,8 @@ impl NuCompleter {
|
|||||||
|
|
||||||
// Variables completion
|
// Variables completion
|
||||||
if prefix.starts_with(b"$") || most_left_var.is_some() {
|
if prefix.starts_with(b"$") || most_left_var.is_some() {
|
||||||
let mut completer = VariableCompletion::new(
|
let mut completer =
|
||||||
self.engine_state.clone(),
|
VariableCompletion::new(most_left_var.unwrap_or((vec![], vec![])));
|
||||||
self.stack.clone(),
|
|
||||||
most_left_var.unwrap_or((vec![], vec![])),
|
|
||||||
);
|
|
||||||
|
|
||||||
return self.process_completion(
|
return self.process_completion(
|
||||||
&mut completer,
|
&mut completer,
|
||||||
@ -224,8 +227,6 @@ impl NuCompleter {
|
|||||||
|| (flat_idx == 0 && working_set.get_span_contents(new_span).is_empty())
|
|| (flat_idx == 0 && working_set.get_span_contents(new_span).is_empty())
|
||||||
{
|
{
|
||||||
let mut completer = CommandCompletion::new(
|
let mut completer = CommandCompletion::new(
|
||||||
self.engine_state.clone(),
|
|
||||||
&working_set,
|
|
||||||
flattened.clone(),
|
flattened.clone(),
|
||||||
// flat_idx,
|
// flat_idx,
|
||||||
FlatShape::String,
|
FlatShape::String,
|
||||||
@ -253,10 +254,7 @@ impl NuCompleter {
|
|||||||
|| prev_expr_str == b"overlay use"
|
|| prev_expr_str == b"overlay use"
|
||||||
|| prev_expr_str == b"source-env"
|
|| prev_expr_str == b"source-env"
|
||||||
{
|
{
|
||||||
let mut completer = DotNuCompletion::new(
|
let mut completer = DotNuCompletion::new();
|
||||||
self.engine_state.clone(),
|
|
||||||
self.stack.clone(),
|
|
||||||
);
|
|
||||||
|
|
||||||
return self.process_completion(
|
return self.process_completion(
|
||||||
&mut completer,
|
&mut completer,
|
||||||
@ -267,10 +265,7 @@ impl NuCompleter {
|
|||||||
pos,
|
pos,
|
||||||
);
|
);
|
||||||
} else if prev_expr_str == b"ls" {
|
} else if prev_expr_str == b"ls" {
|
||||||
let mut completer = FileCompletion::new(
|
let mut completer = FileCompletion::new();
|
||||||
self.engine_state.clone(),
|
|
||||||
self.stack.clone(),
|
|
||||||
);
|
|
||||||
|
|
||||||
return self.process_completion(
|
return self.process_completion(
|
||||||
&mut completer,
|
&mut completer,
|
||||||
@ -288,7 +283,6 @@ impl NuCompleter {
|
|||||||
match &flat.1 {
|
match &flat.1 {
|
||||||
FlatShape::Custom(decl_id) => {
|
FlatShape::Custom(decl_id) => {
|
||||||
let mut completer = CustomCompletion::new(
|
let mut completer = CustomCompletion::new(
|
||||||
self.engine_state.clone(),
|
|
||||||
self.stack.clone(),
|
self.stack.clone(),
|
||||||
*decl_id,
|
*decl_id,
|
||||||
initial_line,
|
initial_line,
|
||||||
@ -304,10 +298,7 @@ impl NuCompleter {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
FlatShape::Directory => {
|
FlatShape::Directory => {
|
||||||
let mut completer = DirectoryCompletion::new(
|
let mut completer = DirectoryCompletion::new();
|
||||||
self.engine_state.clone(),
|
|
||||||
self.stack.clone(),
|
|
||||||
);
|
|
||||||
|
|
||||||
return self.process_completion(
|
return self.process_completion(
|
||||||
&mut completer,
|
&mut completer,
|
||||||
@ -319,10 +310,7 @@ impl NuCompleter {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
FlatShape::Filepath | FlatShape::GlobPattern => {
|
FlatShape::Filepath | FlatShape::GlobPattern => {
|
||||||
let mut completer = FileCompletion::new(
|
let mut completer = FileCompletion::new();
|
||||||
self.engine_state.clone(),
|
|
||||||
self.stack.clone(),
|
|
||||||
);
|
|
||||||
|
|
||||||
return self.process_completion(
|
return self.process_completion(
|
||||||
&mut completer,
|
&mut completer,
|
||||||
@ -335,8 +323,6 @@ impl NuCompleter {
|
|||||||
}
|
}
|
||||||
flat_shape => {
|
flat_shape => {
|
||||||
let mut completer = CommandCompletion::new(
|
let mut completer = CommandCompletion::new(
|
||||||
self.engine_state.clone(),
|
|
||||||
&working_set,
|
|
||||||
flattened.clone(),
|
flattened.clone(),
|
||||||
// flat_idx,
|
// flat_idx,
|
||||||
flat_shape.clone(),
|
flat_shape.clone(),
|
||||||
@ -369,10 +355,7 @@ impl NuCompleter {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Check for file completion
|
// Check for file completion
|
||||||
let mut completer = FileCompletion::new(
|
let mut completer = FileCompletion::new();
|
||||||
self.engine_state.clone(),
|
|
||||||
self.stack.clone(),
|
|
||||||
);
|
|
||||||
out = self.process_completion(
|
out = self.process_completion(
|
||||||
&mut completer,
|
&mut completer,
|
||||||
&working_set,
|
&working_set,
|
||||||
@ -557,7 +540,7 @@ mod completer_tests {
|
|||||||
result.err().unwrap()
|
result.err().unwrap()
|
||||||
);
|
);
|
||||||
|
|
||||||
let mut completer = NuCompleter::new(engine_state.into(), Stack::new());
|
let mut completer = NuCompleter::new(engine_state.into(), Arc::new(Stack::new()));
|
||||||
let dataset = [
|
let dataset = [
|
||||||
("sudo", false, "", Vec::new()),
|
("sudo", false, "", Vec::new()),
|
||||||
("sudo l", true, "l", vec!["ls", "let", "lines", "loop"]),
|
("sudo l", true, "l", vec!["ls", "let", "lines", "loop"]),
|
||||||
|
@ -6,14 +6,13 @@ use nu_engine::eval_call;
|
|||||||
use nu_protocol::{
|
use nu_protocol::{
|
||||||
ast::{Argument, Call, Expr, Expression},
|
ast::{Argument, Call, Expr, Expression},
|
||||||
debugger::WithoutDebug,
|
debugger::WithoutDebug,
|
||||||
engine::{EngineState, Stack, StateWorkingSet},
|
engine::{Stack, StateWorkingSet},
|
||||||
PipelineData, Span, Type, Value,
|
PipelineData, Span, Type, Value,
|
||||||
};
|
};
|
||||||
use nu_utils::IgnoreCaseExt;
|
use nu_utils::IgnoreCaseExt;
|
||||||
use std::{collections::HashMap, sync::Arc};
|
use std::collections::HashMap;
|
||||||
|
|
||||||
pub struct CustomCompletion {
|
pub struct CustomCompletion {
|
||||||
engine_state: Arc<EngineState>,
|
|
||||||
stack: Stack,
|
stack: Stack,
|
||||||
decl_id: usize,
|
decl_id: usize,
|
||||||
line: String,
|
line: String,
|
||||||
@ -21,10 +20,9 @@ pub struct CustomCompletion {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl CustomCompletion {
|
impl CustomCompletion {
|
||||||
pub fn new(engine_state: Arc<EngineState>, stack: Stack, decl_id: usize, line: String) -> Self {
|
pub fn new(stack: Stack, decl_id: usize, line: String) -> Self {
|
||||||
Self {
|
Self {
|
||||||
engine_state,
|
stack,
|
||||||
stack: stack.reset_out_dest().capture(),
|
|
||||||
decl_id,
|
decl_id,
|
||||||
line,
|
line,
|
||||||
sort_by: SortBy::None,
|
sort_by: SortBy::None,
|
||||||
@ -35,7 +33,8 @@ impl CustomCompletion {
|
|||||||
impl Completer for CustomCompletion {
|
impl Completer for CustomCompletion {
|
||||||
fn fetch(
|
fn fetch(
|
||||||
&mut self,
|
&mut self,
|
||||||
_: &StateWorkingSet,
|
working_set: &StateWorkingSet,
|
||||||
|
_stack: &Stack,
|
||||||
prefix: Vec<u8>,
|
prefix: Vec<u8>,
|
||||||
span: Span,
|
span: Span,
|
||||||
offset: usize,
|
offset: usize,
|
||||||
@ -47,7 +46,7 @@ impl Completer for CustomCompletion {
|
|||||||
|
|
||||||
// Call custom declaration
|
// Call custom declaration
|
||||||
let result = eval_call::<WithoutDebug>(
|
let result = eval_call::<WithoutDebug>(
|
||||||
&self.engine_state,
|
working_set.permanent_state,
|
||||||
&mut self.stack,
|
&mut self.stack,
|
||||||
&Call {
|
&Call {
|
||||||
decl_id: self.decl_id,
|
decl_id: self.decl_id,
|
||||||
@ -75,9 +74,8 @@ impl Completer for CustomCompletion {
|
|||||||
|
|
||||||
// Parse result
|
// Parse result
|
||||||
let suggestions = result
|
let suggestions = result
|
||||||
.map(|pd| {
|
.and_then(|data| data.into_value(span))
|
||||||
let value = pd.into_value(span);
|
.map(|value| match &value {
|
||||||
match &value {
|
|
||||||
Value::Record { val, .. } => {
|
Value::Record { val, .. } => {
|
||||||
let completions = val
|
let completions = val
|
||||||
.get("completions")
|
.get("completions")
|
||||||
@ -123,7 +121,6 @@ impl Completer for CustomCompletion {
|
|||||||
}
|
}
|
||||||
Value::List { vals, .. } => map_value_completions(vals.iter(), span, offset),
|
Value::List { vals, .. } => map_value_completions(vals.iter(), span, offset),
|
||||||
_ => vec![],
|
_ => vec![],
|
||||||
}
|
|
||||||
})
|
})
|
||||||
.unwrap_or_default();
|
.unwrap_or_default();
|
||||||
|
|
||||||
|
@ -8,25 +8,16 @@ use nu_protocol::{
|
|||||||
levenshtein_distance, Span,
|
levenshtein_distance, Span,
|
||||||
};
|
};
|
||||||
use reedline::Suggestion;
|
use reedline::Suggestion;
|
||||||
use std::{
|
use std::path::{Path, MAIN_SEPARATOR as SEP};
|
||||||
path::{Path, MAIN_SEPARATOR as SEP},
|
|
||||||
sync::Arc,
|
|
||||||
};
|
|
||||||
|
|
||||||
use super::SemanticSuggestion;
|
use super::SemanticSuggestion;
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone, Default)]
|
||||||
pub struct DirectoryCompletion {
|
pub struct DirectoryCompletion {}
|
||||||
engine_state: Arc<EngineState>,
|
|
||||||
stack: Stack,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl DirectoryCompletion {
|
impl DirectoryCompletion {
|
||||||
pub fn new(engine_state: Arc<EngineState>, stack: Stack) -> Self {
|
pub fn new() -> Self {
|
||||||
Self {
|
Self::default()
|
||||||
engine_state,
|
|
||||||
stack,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -34,10 +25,11 @@ impl Completer for DirectoryCompletion {
|
|||||||
fn fetch(
|
fn fetch(
|
||||||
&mut self,
|
&mut self,
|
||||||
working_set: &StateWorkingSet,
|
working_set: &StateWorkingSet,
|
||||||
|
stack: &Stack,
|
||||||
prefix: Vec<u8>,
|
prefix: Vec<u8>,
|
||||||
span: Span,
|
span: Span,
|
||||||
offset: usize,
|
offset: usize,
|
||||||
_: usize,
|
_pos: usize,
|
||||||
options: &CompletionOptions,
|
options: &CompletionOptions,
|
||||||
) -> Vec<SemanticSuggestion> {
|
) -> Vec<SemanticSuggestion> {
|
||||||
let AdjustView { prefix, span, .. } = adjust_if_intermediate(&prefix, working_set, span);
|
let AdjustView { prefix, span, .. } = adjust_if_intermediate(&prefix, working_set, span);
|
||||||
@ -47,10 +39,10 @@ impl Completer for DirectoryCompletion {
|
|||||||
let output: Vec<_> = directory_completion(
|
let output: Vec<_> = directory_completion(
|
||||||
span,
|
span,
|
||||||
&prefix,
|
&prefix,
|
||||||
&self.engine_state.current_work_dir(),
|
&working_set.permanent_state.current_work_dir(),
|
||||||
options,
|
options,
|
||||||
self.engine_state.as_ref(),
|
working_set.permanent_state,
|
||||||
&self.stack,
|
stack,
|
||||||
)
|
)
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(move |x| SemanticSuggestion {
|
.map(move |x| SemanticSuggestion {
|
||||||
|
@ -1,39 +1,31 @@
|
|||||||
use crate::completions::{file_path_completion, Completer, CompletionOptions, SortBy};
|
use crate::completions::{file_path_completion, Completer, CompletionOptions, SortBy};
|
||||||
use nu_protocol::{
|
use nu_protocol::{
|
||||||
engine::{EngineState, Stack, StateWorkingSet},
|
engine::{Stack, StateWorkingSet},
|
||||||
Span,
|
Span,
|
||||||
};
|
};
|
||||||
use reedline::Suggestion;
|
use reedline::Suggestion;
|
||||||
use std::{
|
use std::path::{is_separator, Path, MAIN_SEPARATOR as SEP, MAIN_SEPARATOR_STR};
|
||||||
path::{is_separator, Path, MAIN_SEPARATOR as SEP, MAIN_SEPARATOR_STR},
|
|
||||||
sync::Arc,
|
|
||||||
};
|
|
||||||
|
|
||||||
use super::SemanticSuggestion;
|
use super::SemanticSuggestion;
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone, Default)]
|
||||||
pub struct DotNuCompletion {
|
pub struct DotNuCompletion {}
|
||||||
engine_state: Arc<EngineState>,
|
|
||||||
stack: Stack,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl DotNuCompletion {
|
impl DotNuCompletion {
|
||||||
pub fn new(engine_state: Arc<EngineState>, stack: Stack) -> Self {
|
pub fn new() -> Self {
|
||||||
Self {
|
Self::default()
|
||||||
engine_state,
|
|
||||||
stack,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Completer for DotNuCompletion {
|
impl Completer for DotNuCompletion {
|
||||||
fn fetch(
|
fn fetch(
|
||||||
&mut self,
|
&mut self,
|
||||||
_: &StateWorkingSet,
|
working_set: &StateWorkingSet,
|
||||||
|
stack: &Stack,
|
||||||
prefix: Vec<u8>,
|
prefix: Vec<u8>,
|
||||||
span: Span,
|
span: Span,
|
||||||
offset: usize,
|
offset: usize,
|
||||||
_: usize,
|
_pos: usize,
|
||||||
options: &CompletionOptions,
|
options: &CompletionOptions,
|
||||||
) -> Vec<SemanticSuggestion> {
|
) -> Vec<SemanticSuggestion> {
|
||||||
let prefix_str = String::from_utf8_lossy(&prefix).replace('`', "");
|
let prefix_str = String::from_utf8_lossy(&prefix).replace('`', "");
|
||||||
@ -49,8 +41,7 @@ impl Completer for DotNuCompletion {
|
|||||||
let mut is_current_folder = false;
|
let mut is_current_folder = false;
|
||||||
|
|
||||||
// Fetch the lib dirs
|
// Fetch the lib dirs
|
||||||
let lib_dirs: Vec<String> =
|
let lib_dirs: Vec<String> = if let Some(lib_dirs) = working_set.get_env_var("NU_LIB_DIRS") {
|
||||||
if let Some(lib_dirs) = self.engine_state.get_env_var("NU_LIB_DIRS") {
|
|
||||||
lib_dirs
|
lib_dirs
|
||||||
.as_list()
|
.as_list()
|
||||||
.into_iter()
|
.into_iter()
|
||||||
@ -85,7 +76,7 @@ impl Completer for DotNuCompletion {
|
|||||||
} else {
|
} else {
|
||||||
// Fetch the current folder
|
// Fetch the current folder
|
||||||
#[allow(deprecated)]
|
#[allow(deprecated)]
|
||||||
let current_folder = self.engine_state.current_work_dir();
|
let current_folder = working_set.permanent_state.current_work_dir();
|
||||||
is_current_folder = true;
|
is_current_folder = true;
|
||||||
|
|
||||||
// Add the current folder and the lib dirs into the
|
// Add the current folder and the lib dirs into the
|
||||||
@ -104,8 +95,8 @@ impl Completer for DotNuCompletion {
|
|||||||
&partial,
|
&partial,
|
||||||
&search_dir,
|
&search_dir,
|
||||||
options,
|
options,
|
||||||
self.engine_state.as_ref(),
|
working_set.permanent_state,
|
||||||
&self.stack,
|
stack,
|
||||||
);
|
);
|
||||||
completions
|
completions
|
||||||
.into_iter()
|
.into_iter()
|
||||||
|
@ -9,25 +9,16 @@ use nu_protocol::{
|
|||||||
};
|
};
|
||||||
use nu_utils::IgnoreCaseExt;
|
use nu_utils::IgnoreCaseExt;
|
||||||
use reedline::Suggestion;
|
use reedline::Suggestion;
|
||||||
use std::{
|
use std::path::{Path, MAIN_SEPARATOR as SEP};
|
||||||
path::{Path, MAIN_SEPARATOR as SEP},
|
|
||||||
sync::Arc,
|
|
||||||
};
|
|
||||||
|
|
||||||
use super::SemanticSuggestion;
|
use super::SemanticSuggestion;
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone, Default)]
|
||||||
pub struct FileCompletion {
|
pub struct FileCompletion {}
|
||||||
engine_state: Arc<EngineState>,
|
|
||||||
stack: Stack,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl FileCompletion {
|
impl FileCompletion {
|
||||||
pub fn new(engine_state: Arc<EngineState>, stack: Stack) -> Self {
|
pub fn new() -> Self {
|
||||||
Self {
|
Self::default()
|
||||||
engine_state,
|
|
||||||
stack,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -35,10 +26,11 @@ impl Completer for FileCompletion {
|
|||||||
fn fetch(
|
fn fetch(
|
||||||
&mut self,
|
&mut self,
|
||||||
working_set: &StateWorkingSet,
|
working_set: &StateWorkingSet,
|
||||||
|
stack: &Stack,
|
||||||
prefix: Vec<u8>,
|
prefix: Vec<u8>,
|
||||||
span: Span,
|
span: Span,
|
||||||
offset: usize,
|
offset: usize,
|
||||||
_: usize,
|
_pos: usize,
|
||||||
options: &CompletionOptions,
|
options: &CompletionOptions,
|
||||||
) -> Vec<SemanticSuggestion> {
|
) -> Vec<SemanticSuggestion> {
|
||||||
let AdjustView {
|
let AdjustView {
|
||||||
@ -52,10 +44,10 @@ impl Completer for FileCompletion {
|
|||||||
readjusted,
|
readjusted,
|
||||||
span,
|
span,
|
||||||
&prefix,
|
&prefix,
|
||||||
&self.engine_state.current_work_dir(),
|
&working_set.permanent_state.current_work_dir(),
|
||||||
options,
|
options,
|
||||||
self.engine_state.as_ref(),
|
working_set.permanent_state,
|
||||||
&self.stack,
|
stack,
|
||||||
)
|
)
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(move |x| SemanticSuggestion {
|
.map(move |x| SemanticSuggestion {
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
use crate::completions::{Completer, CompletionOptions};
|
use crate::completions::{Completer, CompletionOptions};
|
||||||
use nu_protocol::{
|
use nu_protocol::{
|
||||||
ast::{Expr, Expression},
|
ast::{Expr, Expression},
|
||||||
engine::StateWorkingSet,
|
engine::{Stack, StateWorkingSet},
|
||||||
Span,
|
Span,
|
||||||
};
|
};
|
||||||
use reedline::Suggestion;
|
use reedline::Suggestion;
|
||||||
@ -23,10 +23,11 @@ impl Completer for FlagCompletion {
|
|||||||
fn fetch(
|
fn fetch(
|
||||||
&mut self,
|
&mut self,
|
||||||
working_set: &StateWorkingSet,
|
working_set: &StateWorkingSet,
|
||||||
|
_stack: &Stack,
|
||||||
prefix: Vec<u8>,
|
prefix: Vec<u8>,
|
||||||
span: Span,
|
span: Span,
|
||||||
offset: usize,
|
offset: usize,
|
||||||
_: usize,
|
_pos: usize,
|
||||||
options: &CompletionOptions,
|
options: &CompletionOptions,
|
||||||
) -> Vec<SemanticSuggestion> {
|
) -> Vec<SemanticSuggestion> {
|
||||||
// Check if it's a flag
|
// Check if it's a flag
|
||||||
|
@ -3,30 +3,20 @@ use crate::completions::{
|
|||||||
};
|
};
|
||||||
use nu_engine::{column::get_columns, eval_variable};
|
use nu_engine::{column::get_columns, eval_variable};
|
||||||
use nu_protocol::{
|
use nu_protocol::{
|
||||||
engine::{EngineState, Stack, StateWorkingSet},
|
engine::{Stack, StateWorkingSet},
|
||||||
Span, Value,
|
Span, Value,
|
||||||
};
|
};
|
||||||
use reedline::Suggestion;
|
use reedline::Suggestion;
|
||||||
use std::{str, sync::Arc};
|
use std::str;
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct VariableCompletion {
|
pub struct VariableCompletion {
|
||||||
engine_state: Arc<EngineState>, // TODO: Is engine state necessary? It's already a part of working set in fetch()
|
|
||||||
stack: Stack,
|
|
||||||
var_context: (Vec<u8>, Vec<Vec<u8>>), // tuple with $var and the sublevels (.b.c.d)
|
var_context: (Vec<u8>, Vec<Vec<u8>>), // tuple with $var and the sublevels (.b.c.d)
|
||||||
}
|
}
|
||||||
|
|
||||||
impl VariableCompletion {
|
impl VariableCompletion {
|
||||||
pub fn new(
|
pub fn new(var_context: (Vec<u8>, Vec<Vec<u8>>)) -> Self {
|
||||||
engine_state: Arc<EngineState>,
|
Self { var_context }
|
||||||
stack: Stack,
|
|
||||||
var_context: (Vec<u8>, Vec<Vec<u8>>),
|
|
||||||
) -> Self {
|
|
||||||
Self {
|
|
||||||
engine_state,
|
|
||||||
stack,
|
|
||||||
var_context,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -34,10 +24,11 @@ impl Completer for VariableCompletion {
|
|||||||
fn fetch(
|
fn fetch(
|
||||||
&mut self,
|
&mut self,
|
||||||
working_set: &StateWorkingSet,
|
working_set: &StateWorkingSet,
|
||||||
|
stack: &Stack,
|
||||||
prefix: Vec<u8>,
|
prefix: Vec<u8>,
|
||||||
span: Span,
|
span: Span,
|
||||||
offset: usize,
|
offset: usize,
|
||||||
_: usize,
|
_pos: usize,
|
||||||
options: &CompletionOptions,
|
options: &CompletionOptions,
|
||||||
) -> Vec<SemanticSuggestion> {
|
) -> Vec<SemanticSuggestion> {
|
||||||
let mut output = vec![];
|
let mut output = vec![];
|
||||||
@ -54,7 +45,7 @@ impl Completer for VariableCompletion {
|
|||||||
if !var_str.is_empty() {
|
if !var_str.is_empty() {
|
||||||
// Completion for $env.<tab>
|
// Completion for $env.<tab>
|
||||||
if var_str == "$env" {
|
if var_str == "$env" {
|
||||||
let env_vars = self.stack.get_env_vars(&self.engine_state);
|
let env_vars = stack.get_env_vars(working_set.permanent_state);
|
||||||
|
|
||||||
// Return nested values
|
// Return nested values
|
||||||
if sublevels_count > 0 {
|
if sublevels_count > 0 {
|
||||||
@ -110,8 +101,8 @@ impl Completer for VariableCompletion {
|
|||||||
if var_str == "$nu" {
|
if var_str == "$nu" {
|
||||||
// Eval nu var
|
// Eval nu var
|
||||||
if let Ok(nuval) = eval_variable(
|
if let Ok(nuval) = eval_variable(
|
||||||
&self.engine_state,
|
working_set.permanent_state,
|
||||||
&self.stack,
|
stack,
|
||||||
nu_protocol::NU_VARIABLE_ID,
|
nu_protocol::NU_VARIABLE_ID,
|
||||||
nu_protocol::Span::new(current_span.start, current_span.end),
|
nu_protocol::Span::new(current_span.start, current_span.end),
|
||||||
) {
|
) {
|
||||||
@ -133,7 +124,7 @@ impl Completer for VariableCompletion {
|
|||||||
// Completion other variable types
|
// Completion other variable types
|
||||||
if let Some(var_id) = var_id {
|
if let Some(var_id) = var_id {
|
||||||
// Extract the variable value from the stack
|
// Extract the variable value from the stack
|
||||||
let var = self.stack.get_var(var_id, Span::new(span.start, span.end));
|
let var = stack.get_var(var_id, Span::new(span.start, span.end));
|
||||||
|
|
||||||
// If the value exists and it's of type Record
|
// If the value exists and it's of type Record
|
||||||
if let Ok(value) = var {
|
if let Ok(value) = var {
|
||||||
@ -207,7 +198,11 @@ impl Completer for VariableCompletion {
|
|||||||
|
|
||||||
// Permanent state vars
|
// Permanent state vars
|
||||||
// for scope in &self.engine_state.scope {
|
// for scope in &self.engine_state.scope {
|
||||||
for overlay_frame in self.engine_state.active_overlays(&removed_overlays).rev() {
|
for overlay_frame in working_set
|
||||||
|
.permanent_state
|
||||||
|
.active_overlays(&removed_overlays)
|
||||||
|
.rev()
|
||||||
|
{
|
||||||
for v in &overlay_frame.vars {
|
for v in &overlay_frame.vars {
|
||||||
if options.match_algorithm.matches_u8_insensitive(
|
if options.match_algorithm.matches_u8_insensitive(
|
||||||
options.case_sensitive,
|
options.case_sensitive,
|
||||||
|
@ -1,12 +1,12 @@
|
|||||||
use crate::util::eval_source;
|
use crate::util::eval_source;
|
||||||
#[cfg(feature = "plugin")]
|
#[cfg(feature = "plugin")]
|
||||||
use nu_path::canonicalize_with;
|
use nu_path::canonicalize_with;
|
||||||
use nu_protocol::{
|
|
||||||
engine::{EngineState, Stack, StateWorkingSet},
|
|
||||||
report_error, HistoryFileFormat, PipelineData,
|
|
||||||
};
|
|
||||||
#[cfg(feature = "plugin")]
|
#[cfg(feature = "plugin")]
|
||||||
use nu_protocol::{ParseError, PluginRegistryFile, Spanned};
|
use nu_protocol::{engine::StateWorkingSet, report_error, ParseError, PluginRegistryFile, Spanned};
|
||||||
|
use nu_protocol::{
|
||||||
|
engine::{EngineState, Stack},
|
||||||
|
report_error_new, HistoryFileFormat, PipelineData,
|
||||||
|
};
|
||||||
#[cfg(feature = "plugin")]
|
#[cfg(feature = "plugin")]
|
||||||
use nu_utils::utils::perf;
|
use nu_utils::utils::perf;
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
@ -25,10 +25,9 @@ pub fn read_plugin_file(
|
|||||||
plugin_file: Option<Spanned<String>>,
|
plugin_file: Option<Spanned<String>>,
|
||||||
storage_path: &str,
|
storage_path: &str,
|
||||||
) {
|
) {
|
||||||
|
use nu_protocol::ShellError;
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
|
|
||||||
use nu_protocol::{report_error_new, ShellError};
|
|
||||||
|
|
||||||
let span = plugin_file.as_ref().map(|s| s.span);
|
let span = plugin_file.as_ref().map(|s| s.span);
|
||||||
|
|
||||||
// Check and warn + abort if this is a .nu plugin file
|
// Check and warn + abort if this is a .nu plugin file
|
||||||
@ -239,13 +238,11 @@ pub fn eval_config_contents(
|
|||||||
match engine_state.cwd(Some(stack)) {
|
match engine_state.cwd(Some(stack)) {
|
||||||
Ok(cwd) => {
|
Ok(cwd) => {
|
||||||
if let Err(e) = engine_state.merge_env(stack, cwd) {
|
if let Err(e) = engine_state.merge_env(stack, cwd) {
|
||||||
let working_set = StateWorkingSet::new(engine_state);
|
report_error_new(engine_state, &e);
|
||||||
report_error(&working_set, &e);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
let working_set = StateWorkingSet::new(engine_state);
|
report_error_new(engine_state, &e);
|
||||||
report_error(&working_set, &e);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -266,8 +263,8 @@ pub(crate) fn get_history_path(storage_path: &str, mode: HistoryFileFormat) -> O
|
|||||||
#[cfg(feature = "plugin")]
|
#[cfg(feature = "plugin")]
|
||||||
pub fn migrate_old_plugin_file(engine_state: &EngineState, storage_path: &str) -> bool {
|
pub fn migrate_old_plugin_file(engine_state: &EngineState, storage_path: &str) -> bool {
|
||||||
use nu_protocol::{
|
use nu_protocol::{
|
||||||
report_error_new, PluginExample, PluginIdentity, PluginRegistryItem,
|
PluginExample, PluginIdentity, PluginRegistryItem, PluginRegistryItemData, PluginSignature,
|
||||||
PluginRegistryItemData, PluginSignature, ShellError,
|
ShellError,
|
||||||
};
|
};
|
||||||
use std::collections::BTreeMap;
|
use std::collections::BTreeMap;
|
||||||
|
|
||||||
@ -309,14 +306,15 @@ pub fn migrate_old_plugin_file(engine_state: &EngineState, storage_path: &str) -
|
|||||||
let mut engine_state = engine_state.clone();
|
let mut engine_state = engine_state.clone();
|
||||||
let mut stack = Stack::new();
|
let mut stack = Stack::new();
|
||||||
|
|
||||||
if !eval_source(
|
if eval_source(
|
||||||
&mut engine_state,
|
&mut engine_state,
|
||||||
&mut stack,
|
&mut stack,
|
||||||
&old_contents,
|
&old_contents,
|
||||||
&old_plugin_file_path.to_string_lossy(),
|
&old_plugin_file_path.to_string_lossy(),
|
||||||
PipelineData::Empty,
|
PipelineData::Empty,
|
||||||
false,
|
false,
|
||||||
) {
|
) != 0
|
||||||
|
{
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,12 +1,12 @@
|
|||||||
use log::info;
|
use log::info;
|
||||||
use miette::Result;
|
|
||||||
use nu_engine::{convert_env_values, eval_block};
|
use nu_engine::{convert_env_values, eval_block};
|
||||||
use nu_parser::parse;
|
use nu_parser::parse;
|
||||||
use nu_protocol::{
|
use nu_protocol::{
|
||||||
debugger::WithoutDebug,
|
debugger::WithoutDebug,
|
||||||
engine::{EngineState, Stack, StateWorkingSet},
|
engine::{EngineState, Stack, StateWorkingSet},
|
||||||
report_error, PipelineData, Spanned, Value,
|
report_error, PipelineData, ShellError, Spanned, Value,
|
||||||
};
|
};
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
/// Run a command (or commands) given to us by the user
|
/// Run a command (or commands) given to us by the user
|
||||||
pub fn evaluate_commands(
|
pub fn evaluate_commands(
|
||||||
@ -16,13 +16,9 @@ pub fn evaluate_commands(
|
|||||||
input: PipelineData,
|
input: PipelineData,
|
||||||
table_mode: Option<Value>,
|
table_mode: Option<Value>,
|
||||||
no_newline: bool,
|
no_newline: bool,
|
||||||
) -> Result<Option<i64>> {
|
) -> Result<(), ShellError> {
|
||||||
// Translate environment variables from Strings to Values
|
// Translate environment variables from Strings to Values
|
||||||
if let Some(e) = convert_env_values(engine_state, stack) {
|
convert_env_values(engine_state, stack)?;
|
||||||
let working_set = StateWorkingSet::new(engine_state);
|
|
||||||
report_error(&working_set, &e);
|
|
||||||
std::process::exit(1);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Parse the source code
|
// Parse the source code
|
||||||
let (block, delta) = {
|
let (block, delta) = {
|
||||||
@ -41,7 +37,6 @@ pub fn evaluate_commands(
|
|||||||
|
|
||||||
if let Some(err) = working_set.parse_errors.first() {
|
if let Some(err) = working_set.parse_errors.first() {
|
||||||
report_error(&working_set, err);
|
report_error(&working_set, err);
|
||||||
|
|
||||||
std::process::exit(1);
|
std::process::exit(1);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -49,35 +44,27 @@ pub fn evaluate_commands(
|
|||||||
};
|
};
|
||||||
|
|
||||||
// Update permanent state
|
// Update permanent state
|
||||||
if let Err(err) = engine_state.merge_delta(delta) {
|
engine_state.merge_delta(delta)?;
|
||||||
let working_set = StateWorkingSet::new(engine_state);
|
|
||||||
report_error(&working_set, &err);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Run the block
|
// Run the block
|
||||||
let exit_code = match eval_block::<WithoutDebug>(engine_state, stack, &block, input) {
|
let pipeline = eval_block::<WithoutDebug>(engine_state, stack, &block, input)?;
|
||||||
Ok(pipeline_data) => {
|
|
||||||
let mut config = engine_state.get_config().clone();
|
|
||||||
if let Some(t_mode) = table_mode {
|
|
||||||
config.table_mode = t_mode.coerce_str()?.parse().unwrap_or_default();
|
|
||||||
}
|
|
||||||
crate::eval_file::print_table_or_error(
|
|
||||||
engine_state,
|
|
||||||
stack,
|
|
||||||
pipeline_data,
|
|
||||||
&mut config,
|
|
||||||
no_newline,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
Err(err) => {
|
|
||||||
let working_set = StateWorkingSet::new(engine_state);
|
|
||||||
|
|
||||||
report_error(&working_set, &err);
|
if let PipelineData::Value(Value::Error { error, .. }, ..) = pipeline {
|
||||||
std::process::exit(1);
|
return Err(*error);
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(t_mode) = table_mode {
|
||||||
|
Arc::make_mut(&mut engine_state.config).table_mode =
|
||||||
|
t_mode.coerce_str()?.parse().unwrap_or_default();
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(status) = pipeline.print(engine_state, stack, no_newline, false)? {
|
||||||
|
if status.code() != 0 {
|
||||||
|
std::process::exit(status.code())
|
||||||
|
}
|
||||||
}
|
}
|
||||||
};
|
|
||||||
|
|
||||||
info!("evaluate {}:{}:{}", file!(), line!(), column!());
|
info!("evaluate {}:{}:{}", file!(), line!(), column!());
|
||||||
|
|
||||||
Ok(exit_code)
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -1,15 +1,14 @@
|
|||||||
use crate::util::eval_source;
|
use crate::util::eval_source;
|
||||||
use log::{info, trace};
|
use log::{info, trace};
|
||||||
use miette::{IntoDiagnostic, Result};
|
|
||||||
use nu_engine::{convert_env_values, eval_block};
|
use nu_engine::{convert_env_values, eval_block};
|
||||||
use nu_parser::parse;
|
use nu_parser::parse;
|
||||||
use nu_path::canonicalize_with;
|
use nu_path::canonicalize_with;
|
||||||
use nu_protocol::{
|
use nu_protocol::{
|
||||||
debugger::WithoutDebug,
|
debugger::WithoutDebug,
|
||||||
engine::{EngineState, Stack, StateWorkingSet},
|
engine::{EngineState, Stack, StateWorkingSet},
|
||||||
report_error, Config, PipelineData, ShellError, Span, Value,
|
report_error, PipelineData, ShellError, Span, Value,
|
||||||
};
|
};
|
||||||
use std::{io::Write, sync::Arc};
|
use std::sync::Arc;
|
||||||
|
|
||||||
/// Entry point for evaluating a file.
|
/// Entry point for evaluating a file.
|
||||||
///
|
///
|
||||||
@ -21,73 +20,40 @@ pub fn evaluate_file(
|
|||||||
engine_state: &mut EngineState,
|
engine_state: &mut EngineState,
|
||||||
stack: &mut Stack,
|
stack: &mut Stack,
|
||||||
input: PipelineData,
|
input: PipelineData,
|
||||||
) -> Result<()> {
|
) -> Result<(), ShellError> {
|
||||||
// Convert environment variables from Strings to Values and store them in the engine state.
|
// Convert environment variables from Strings to Values and store them in the engine state.
|
||||||
if let Some(e) = convert_env_values(engine_state, stack) {
|
convert_env_values(engine_state, stack)?;
|
||||||
let working_set = StateWorkingSet::new(engine_state);
|
|
||||||
report_error(&working_set, &e);
|
|
||||||
std::process::exit(1);
|
|
||||||
}
|
|
||||||
|
|
||||||
let cwd = engine_state.cwd_as_string(Some(stack))?;
|
let cwd = engine_state.cwd_as_string(Some(stack))?;
|
||||||
|
|
||||||
let file_path = canonicalize_with(&path, cwd).unwrap_or_else(|e| {
|
let file_path =
|
||||||
let working_set = StateWorkingSet::new(engine_state);
|
canonicalize_with(&path, cwd).map_err(|err| ShellError::FileNotFoundCustom {
|
||||||
report_error(
|
msg: format!("Could not access file '{path}': {err}"),
|
||||||
&working_set,
|
|
||||||
&ShellError::FileNotFoundCustom {
|
|
||||||
msg: format!("Could not access file '{}': {:?}", path, e.to_string()),
|
|
||||||
span: Span::unknown(),
|
span: Span::unknown(),
|
||||||
},
|
})?;
|
||||||
);
|
|
||||||
std::process::exit(1);
|
|
||||||
});
|
|
||||||
|
|
||||||
let file_path_str = file_path.to_str().unwrap_or_else(|| {
|
let file_path_str = file_path
|
||||||
let working_set = StateWorkingSet::new(engine_state);
|
.to_str()
|
||||||
report_error(
|
.ok_or_else(|| ShellError::NonUtf8Custom {
|
||||||
&working_set,
|
|
||||||
&ShellError::NonUtf8Custom {
|
|
||||||
msg: format!(
|
msg: format!(
|
||||||
"Input file name '{}' is not valid UTF8",
|
"Input file name '{}' is not valid UTF8",
|
||||||
file_path.to_string_lossy()
|
file_path.to_string_lossy()
|
||||||
),
|
),
|
||||||
span: Span::unknown(),
|
span: Span::unknown(),
|
||||||
},
|
})?;
|
||||||
);
|
|
||||||
std::process::exit(1);
|
|
||||||
});
|
|
||||||
|
|
||||||
let file = std::fs::read(&file_path)
|
let file = std::fs::read(&file_path).map_err(|err| ShellError::FileNotFoundCustom {
|
||||||
.into_diagnostic()
|
msg: format!("Could not read file '{file_path_str}': {err}"),
|
||||||
.unwrap_or_else(|e| {
|
|
||||||
let working_set = StateWorkingSet::new(engine_state);
|
|
||||||
report_error(
|
|
||||||
&working_set,
|
|
||||||
&ShellError::FileNotFoundCustom {
|
|
||||||
msg: format!(
|
|
||||||
"Could not read file '{}': {:?}",
|
|
||||||
file_path_str,
|
|
||||||
e.to_string()
|
|
||||||
),
|
|
||||||
span: Span::unknown(),
|
span: Span::unknown(),
|
||||||
},
|
})?;
|
||||||
);
|
|
||||||
std::process::exit(1);
|
|
||||||
});
|
|
||||||
engine_state.file = Some(file_path.clone());
|
engine_state.file = Some(file_path.clone());
|
||||||
|
|
||||||
let parent = file_path.parent().unwrap_or_else(|| {
|
let parent = file_path
|
||||||
let working_set = StateWorkingSet::new(engine_state);
|
.parent()
|
||||||
report_error(
|
.ok_or_else(|| ShellError::FileNotFoundCustom {
|
||||||
&working_set,
|
|
||||||
&ShellError::FileNotFoundCustom {
|
|
||||||
msg: format!("The file path '{file_path_str}' does not have a parent"),
|
msg: format!("The file path '{file_path_str}' does not have a parent"),
|
||||||
span: Span::unknown(),
|
span: Span::unknown(),
|
||||||
},
|
})?;
|
||||||
);
|
|
||||||
std::process::exit(1);
|
|
||||||
});
|
|
||||||
|
|
||||||
stack.add_env_var(
|
stack.add_env_var(
|
||||||
"FILE_PWD".to_string(),
|
"FILE_PWD".to_string(),
|
||||||
@ -127,119 +93,48 @@ pub fn evaluate_file(
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Merge the changes into the engine state.
|
// Merge the changes into the engine state.
|
||||||
engine_state
|
engine_state.merge_delta(working_set.delta)?;
|
||||||
.merge_delta(working_set.delta)
|
|
||||||
.expect("merging delta into engine_state should succeed");
|
|
||||||
|
|
||||||
// Check if the file contains a main command.
|
// Check if the file contains a main command.
|
||||||
if engine_state.find_decl(b"main", &[]).is_some() {
|
let exit_code = if engine_state.find_decl(b"main", &[]).is_some() {
|
||||||
// Evaluate the file, but don't run main yet.
|
// Evaluate the file, but don't run main yet.
|
||||||
let pipeline_data =
|
let pipeline =
|
||||||
eval_block::<WithoutDebug>(engine_state, stack, &block, PipelineData::empty());
|
match eval_block::<WithoutDebug>(engine_state, stack, &block, PipelineData::empty()) {
|
||||||
let pipeline_data = match pipeline_data {
|
Ok(data) => data,
|
||||||
Err(ShellError::Return { .. }) => {
|
Err(ShellError::Return { .. }) => {
|
||||||
// Allow early return before main is run.
|
// Allow early return before main is run.
|
||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
x => x,
|
Err(err) => return Err(err),
|
||||||
}
|
};
|
||||||
.unwrap_or_else(|e| {
|
|
||||||
let working_set = StateWorkingSet::new(engine_state);
|
|
||||||
report_error(&working_set, &e);
|
|
||||||
std::process::exit(1);
|
|
||||||
});
|
|
||||||
|
|
||||||
// Print the pipeline output of the file.
|
// Print the pipeline output of the last command of the file.
|
||||||
// The pipeline output of a file is the pipeline output of its last command.
|
if let Some(status) = pipeline.print(engine_state, stack, true, false)? {
|
||||||
let result = pipeline_data.print(engine_state, stack, true, false);
|
if status.code() != 0 {
|
||||||
match result {
|
std::process::exit(status.code())
|
||||||
Err(err) => {
|
|
||||||
let working_set = StateWorkingSet::new(engine_state);
|
|
||||||
report_error(&working_set, &err);
|
|
||||||
std::process::exit(1);
|
|
||||||
}
|
|
||||||
Ok(exit_code) => {
|
|
||||||
if exit_code != 0 {
|
|
||||||
std::process::exit(exit_code as i32);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Invoke the main command with arguments.
|
// Invoke the main command with arguments.
|
||||||
// Arguments with whitespaces are quoted, thus can be safely concatenated by whitespace.
|
// Arguments with whitespaces are quoted, thus can be safely concatenated by whitespace.
|
||||||
let args = format!("main {}", args.join(" "));
|
let args = format!("main {}", args.join(" "));
|
||||||
if !eval_source(
|
eval_source(
|
||||||
engine_state,
|
engine_state,
|
||||||
stack,
|
stack,
|
||||||
args.as_bytes(),
|
args.as_bytes(),
|
||||||
"<commandline>",
|
"<commandline>",
|
||||||
input,
|
input,
|
||||||
true,
|
true,
|
||||||
) {
|
)
|
||||||
std::process::exit(1);
|
} else {
|
||||||
}
|
eval_source(engine_state, stack, &file, file_path_str, input, true)
|
||||||
} else if !eval_source(engine_state, stack, &file, file_path_str, input, true) {
|
};
|
||||||
std::process::exit(1);
|
|
||||||
|
if exit_code != 0 {
|
||||||
|
std::process::exit(exit_code)
|
||||||
}
|
}
|
||||||
|
|
||||||
info!("evaluate {}:{}:{}", file!(), line!(), column!());
|
info!("evaluate {}:{}:{}", file!(), line!(), column!());
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn print_table_or_error(
|
|
||||||
engine_state: &mut EngineState,
|
|
||||||
stack: &mut Stack,
|
|
||||||
mut pipeline_data: PipelineData,
|
|
||||||
config: &mut Config,
|
|
||||||
no_newline: bool,
|
|
||||||
) -> Option<i64> {
|
|
||||||
let exit_code = match &mut pipeline_data {
|
|
||||||
PipelineData::ExternalStream { exit_code, .. } => exit_code.take(),
|
|
||||||
_ => None,
|
|
||||||
};
|
|
||||||
|
|
||||||
// Change the engine_state config to use the passed in configuration
|
|
||||||
engine_state.set_config(config.clone());
|
|
||||||
|
|
||||||
if let PipelineData::Value(Value::Error { error, .. }, ..) = &pipeline_data {
|
|
||||||
let working_set = StateWorkingSet::new(engine_state);
|
|
||||||
report_error(&working_set, &**error);
|
|
||||||
std::process::exit(1);
|
|
||||||
}
|
|
||||||
|
|
||||||
// We don't need to do anything special to print a table because print() handles it
|
|
||||||
print_or_exit(pipeline_data, engine_state, stack, no_newline);
|
|
||||||
|
|
||||||
// Make sure everything has finished
|
|
||||||
if let Some(exit_code) = exit_code {
|
|
||||||
let mut exit_code: Vec<_> = exit_code.into_iter().collect();
|
|
||||||
exit_code
|
|
||||||
.pop()
|
|
||||||
.and_then(|last_exit_code| match last_exit_code {
|
|
||||||
Value::Int { val: code, .. } => Some(code),
|
|
||||||
_ => None,
|
|
||||||
})
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn print_or_exit(
|
|
||||||
pipeline_data: PipelineData,
|
|
||||||
engine_state: &EngineState,
|
|
||||||
stack: &mut Stack,
|
|
||||||
no_newline: bool,
|
|
||||||
) {
|
|
||||||
let result = pipeline_data.print(engine_state, stack, no_newline, false);
|
|
||||||
|
|
||||||
let _ = std::io::stdout().flush();
|
|
||||||
let _ = std::io::stderr().flush();
|
|
||||||
|
|
||||||
if let Err(error) = result {
|
|
||||||
let working_set = StateWorkingSet::new(engine_state);
|
|
||||||
report_error(&working_set, &error);
|
|
||||||
let _ = std::io::stderr().flush();
|
|
||||||
std::process::exit(1);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
@ -59,8 +59,7 @@ impl Completer for NuMenuCompleter {
|
|||||||
|
|
||||||
let res = eval_block::<WithoutDebug>(&self.engine_state, &mut self.stack, block, input);
|
let res = eval_block::<WithoutDebug>(&self.engine_state, &mut self.stack, block, input);
|
||||||
|
|
||||||
if let Ok(values) = res {
|
if let Ok(values) = res.and_then(|data| data.into_value(self.span)) {
|
||||||
let values = values.into_value(self.span);
|
|
||||||
convert_to_suggestions(values, line, pos, self.only_buffer_difference)
|
convert_to_suggestions(values, line, pos, self.only_buffer_difference)
|
||||||
} else {
|
} else {
|
||||||
Vec::new()
|
Vec::new()
|
||||||
|
@ -129,9 +129,11 @@ impl Prompt for NushellPrompt {
|
|||||||
{
|
{
|
||||||
// We're in vscode and we have osc633 enabled
|
// We're in vscode and we have osc633 enabled
|
||||||
format!("{VSCODE_PRE_PROMPT_MARKER}{prompt}{VSCODE_POST_PROMPT_MARKER}").into()
|
format!("{VSCODE_PRE_PROMPT_MARKER}{prompt}{VSCODE_POST_PROMPT_MARKER}").into()
|
||||||
} else {
|
} else if self.shell_integration_osc133 {
|
||||||
// If we're in VSCode but we don't find the env var, just return the regular markers
|
// If we're in VSCode but we don't find the env var, but we have osc133 set, then use it
|
||||||
format!("{PRE_PROMPT_MARKER}{prompt}{POST_PROMPT_MARKER}").into()
|
format!("{PRE_PROMPT_MARKER}{prompt}{POST_PROMPT_MARKER}").into()
|
||||||
|
} else {
|
||||||
|
prompt.into()
|
||||||
}
|
}
|
||||||
} else if self.shell_integration_osc133 {
|
} else if self.shell_integration_osc133 {
|
||||||
format!("{PRE_PROMPT_MARKER}{prompt}{POST_PROMPT_MARKER}").into()
|
format!("{PRE_PROMPT_MARKER}{prompt}{POST_PROMPT_MARKER}").into()
|
||||||
|
@ -2,8 +2,8 @@ use crate::NushellPrompt;
|
|||||||
use log::trace;
|
use log::trace;
|
||||||
use nu_engine::ClosureEvalOnce;
|
use nu_engine::ClosureEvalOnce;
|
||||||
use nu_protocol::{
|
use nu_protocol::{
|
||||||
engine::{EngineState, Stack, StateWorkingSet},
|
engine::{EngineState, Stack},
|
||||||
report_error, Config, PipelineData, Value,
|
report_error_new, Config, PipelineData, Value,
|
||||||
};
|
};
|
||||||
use reedline::Prompt;
|
use reedline::Prompt;
|
||||||
|
|
||||||
@ -65,7 +65,7 @@ fn get_prompt_string(
|
|||||||
.get_env_var(engine_state, prompt)
|
.get_env_var(engine_state, prompt)
|
||||||
.and_then(|v| match v {
|
.and_then(|v| match v {
|
||||||
Value::Closure { val, .. } => {
|
Value::Closure { val, .. } => {
|
||||||
let result = ClosureEvalOnce::new(engine_state, stack, val)
|
let result = ClosureEvalOnce::new(engine_state, stack, *val)
|
||||||
.run_with_input(PipelineData::Empty);
|
.run_with_input(PipelineData::Empty);
|
||||||
|
|
||||||
trace!(
|
trace!(
|
||||||
@ -77,8 +77,7 @@ fn get_prompt_string(
|
|||||||
|
|
||||||
result
|
result
|
||||||
.map_err(|err| {
|
.map_err(|err| {
|
||||||
let working_set = StateWorkingSet::new(engine_state);
|
report_error_new(engine_state, &err);
|
||||||
report_error(&working_set, &err);
|
|
||||||
})
|
})
|
||||||
.ok()
|
.ok()
|
||||||
}
|
}
|
||||||
@ -108,50 +107,34 @@ pub(crate) fn update_prompt(
|
|||||||
stack: &mut Stack,
|
stack: &mut Stack,
|
||||||
nu_prompt: &mut NushellPrompt,
|
nu_prompt: &mut NushellPrompt,
|
||||||
) {
|
) {
|
||||||
let left_prompt_string = get_prompt_string(PROMPT_COMMAND, config, engine_state, stack);
|
let configured_left_prompt_string =
|
||||||
|
match get_prompt_string(PROMPT_COMMAND, config, engine_state, stack) {
|
||||||
|
Some(s) => s,
|
||||||
|
None => "".to_string(),
|
||||||
|
};
|
||||||
|
|
||||||
// Now that we have the prompt string lets ansify it.
|
// Now that we have the prompt string lets ansify it.
|
||||||
// <133 A><prompt><133 B><command><133 C><command output>
|
// <133 A><prompt><133 B><command><133 C><command output>
|
||||||
let left_prompt_string_133 = if config.shell_integration_osc133 {
|
let left_prompt_string = if config.shell_integration_osc633 {
|
||||||
if let Some(prompt_string) = left_prompt_string.clone() {
|
if stack.get_env_var(engine_state, "TERM_PROGRAM") == Some(Value::test_string("vscode")) {
|
||||||
|
// We're in vscode and we have osc633 enabled
|
||||||
Some(format!(
|
Some(format!(
|
||||||
"{PRE_PROMPT_MARKER}{prompt_string}{POST_PROMPT_MARKER}"
|
"{VSCODE_PRE_PROMPT_MARKER}{configured_left_prompt_string}{VSCODE_POST_PROMPT_MARKER}"
|
||||||
|
))
|
||||||
|
} else if config.shell_integration_osc133 {
|
||||||
|
// If we're in VSCode but we don't find the env var, but we have osc133 set, then use it
|
||||||
|
Some(format!(
|
||||||
|
"{PRE_PROMPT_MARKER}{configured_left_prompt_string}{POST_PROMPT_MARKER}"
|
||||||
))
|
))
|
||||||
} else {
|
} else {
|
||||||
left_prompt_string.clone()
|
configured_left_prompt_string.into()
|
||||||
}
|
}
|
||||||
} else {
|
} else if config.shell_integration_osc133 {
|
||||||
left_prompt_string.clone()
|
|
||||||
};
|
|
||||||
|
|
||||||
let left_prompt_string_633 = if config.shell_integration_osc633 {
|
|
||||||
if let Some(prompt_string) = left_prompt_string.clone() {
|
|
||||||
if stack.get_env_var(engine_state, "TERM_PROGRAM") == Some(Value::test_string("vscode"))
|
|
||||||
{
|
|
||||||
// If the user enabled osc633 and we're in vscode, use the vscode markers
|
|
||||||
Some(format!(
|
Some(format!(
|
||||||
"{VSCODE_PRE_PROMPT_MARKER}{prompt_string}{VSCODE_POST_PROMPT_MARKER}"
|
"{PRE_PROMPT_MARKER}{configured_left_prompt_string}{POST_PROMPT_MARKER}"
|
||||||
))
|
))
|
||||||
} else {
|
} else {
|
||||||
// otherwise, use the regular osc133 markers
|
configured_left_prompt_string.into()
|
||||||
Some(format!(
|
|
||||||
"{PRE_PROMPT_MARKER}{prompt_string}{POST_PROMPT_MARKER}"
|
|
||||||
))
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
left_prompt_string.clone()
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
left_prompt_string.clone()
|
|
||||||
};
|
|
||||||
|
|
||||||
let left_prompt_string = match (left_prompt_string_133, left_prompt_string_633) {
|
|
||||||
(None, None) => left_prompt_string,
|
|
||||||
(None, Some(l633)) => Some(l633),
|
|
||||||
(Some(l133), None) => Some(l133),
|
|
||||||
// If both are set, it means we're in vscode, so use the vscode markers
|
|
||||||
// and even if we're not actually in vscode atm, the regular 133 markers are used
|
|
||||||
(Some(_l133), Some(l633)) => Some(l633),
|
|
||||||
};
|
};
|
||||||
|
|
||||||
let right_prompt_string = get_prompt_string(PROMPT_COMMAND_RIGHT, config, engine_state, stack);
|
let right_prompt_string = get_prompt_string(PROMPT_COMMAND_RIGHT, config, engine_state, stack);
|
||||||
|
@ -26,9 +26,8 @@ use nu_parser::{lex, parse, trim_quotes_str};
|
|||||||
use nu_protocol::{
|
use nu_protocol::{
|
||||||
config::NuCursorShape,
|
config::NuCursorShape,
|
||||||
engine::{EngineState, Stack, StateWorkingSet},
|
engine::{EngineState, Stack, StateWorkingSet},
|
||||||
eval_const::create_nu_constant,
|
|
||||||
report_error_new, HistoryConfig, HistoryFileFormat, PipelineData, ShellError, Span, Spanned,
|
report_error_new, HistoryConfig, HistoryFileFormat, PipelineData, ShellError, Span, Spanned,
|
||||||
Value, NU_VARIABLE_ID,
|
Value,
|
||||||
};
|
};
|
||||||
use nu_utils::{
|
use nu_utils::{
|
||||||
filesystem::{have_permission, PermissionResult},
|
filesystem::{have_permission, PermissionResult},
|
||||||
@ -87,7 +86,7 @@ pub fn evaluate_repl(
|
|||||||
|
|
||||||
let start_time = std::time::Instant::now();
|
let start_time = std::time::Instant::now();
|
||||||
// Translate environment variables from Strings to Values
|
// Translate environment variables from Strings to Values
|
||||||
if let Some(e) = convert_env_values(engine_state, &unique_stack) {
|
if let Err(e) = convert_env_values(engine_state, &unique_stack) {
|
||||||
report_error_new(engine_state, &e);
|
report_error_new(engine_state, &e);
|
||||||
}
|
}
|
||||||
perf(
|
perf(
|
||||||
@ -145,8 +144,7 @@ pub fn evaluate_repl(
|
|||||||
engine_state.set_startup_time(entire_start_time.elapsed().as_nanos() as i64);
|
engine_state.set_startup_time(entire_start_time.elapsed().as_nanos() as i64);
|
||||||
|
|
||||||
// Regenerate the $nu constant to contain the startup time and any other potential updates
|
// Regenerate the $nu constant to contain the startup time and any other potential updates
|
||||||
let nu_const = create_nu_constant(engine_state, Span::unknown())?;
|
engine_state.generate_nu_constant();
|
||||||
engine_state.set_variable_const_val(NU_VARIABLE_ID, nu_const);
|
|
||||||
|
|
||||||
if load_std_lib.is_none() && engine_state.get_config().show_banner {
|
if load_std_lib.is_none() && engine_state.get_config().show_banner {
|
||||||
eval_source(
|
eval_source(
|
||||||
@ -389,7 +387,7 @@ fn loop_iteration(ctx: LoopContext) -> (bool, Stack, Reedline) {
|
|||||||
.with_completer(Box::new(NuCompleter::new(
|
.with_completer(Box::new(NuCompleter::new(
|
||||||
engine_reference.clone(),
|
engine_reference.clone(),
|
||||||
// STACK-REFERENCE 2
|
// STACK-REFERENCE 2
|
||||||
Stack::with_parent(stack_arc.clone()),
|
stack_arc.clone(),
|
||||||
)))
|
)))
|
||||||
.with_quick_completions(config.quick_completions)
|
.with_quick_completions(config.quick_completions)
|
||||||
.with_partial_completions(config.partial_completions)
|
.with_partial_completions(config.partial_completions)
|
||||||
@ -544,7 +542,9 @@ fn loop_iteration(ctx: LoopContext) -> (bool, Stack, Reedline) {
|
|||||||
let shell_integration_osc633 = config.shell_integration_osc633;
|
let shell_integration_osc633 = config.shell_integration_osc633;
|
||||||
let shell_integration_reset_application_mode = config.shell_integration_reset_application_mode;
|
let shell_integration_reset_application_mode = config.shell_integration_reset_application_mode;
|
||||||
|
|
||||||
let mut stack = Stack::unwrap_unique(stack_arc);
|
// TODO: we may clone the stack, this can lead to major performance issues
|
||||||
|
// so we should avoid it or making stack cheaper to clone.
|
||||||
|
let mut stack = Arc::unwrap_or_clone(stack_arc);
|
||||||
|
|
||||||
perf(
|
perf(
|
||||||
"line_editor setup",
|
"line_editor setup",
|
||||||
@ -620,7 +620,7 @@ fn loop_iteration(ctx: LoopContext) -> (bool, Stack, Reedline) {
|
|||||||
column!(),
|
column!(),
|
||||||
use_color,
|
use_color,
|
||||||
);
|
);
|
||||||
} else {
|
} else if shell_integration_osc133 {
|
||||||
start_time = Instant::now();
|
start_time = Instant::now();
|
||||||
|
|
||||||
run_ansi_sequence(PRE_EXECUTION_MARKER);
|
run_ansi_sequence(PRE_EXECUTION_MARKER);
|
||||||
@ -660,9 +660,9 @@ fn loop_iteration(ctx: LoopContext) -> (bool, Stack, Reedline) {
|
|||||||
run_finaliziation_ansi_sequence(
|
run_finaliziation_ansi_sequence(
|
||||||
&stack,
|
&stack,
|
||||||
engine_state,
|
engine_state,
|
||||||
|
use_color,
|
||||||
shell_integration_osc633,
|
shell_integration_osc633,
|
||||||
shell_integration_osc133,
|
shell_integration_osc133,
|
||||||
use_color,
|
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
ReplOperation::RunCommand(cmd) => {
|
ReplOperation::RunCommand(cmd) => {
|
||||||
@ -679,9 +679,9 @@ fn loop_iteration(ctx: LoopContext) -> (bool, Stack, Reedline) {
|
|||||||
run_finaliziation_ansi_sequence(
|
run_finaliziation_ansi_sequence(
|
||||||
&stack,
|
&stack,
|
||||||
engine_state,
|
engine_state,
|
||||||
|
use_color,
|
||||||
shell_integration_osc633,
|
shell_integration_osc633,
|
||||||
shell_integration_osc133,
|
shell_integration_osc133,
|
||||||
use_color,
|
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
// as the name implies, we do nothing in this case
|
// as the name implies, we do nothing in this case
|
||||||
@ -731,9 +731,9 @@ fn loop_iteration(ctx: LoopContext) -> (bool, Stack, Reedline) {
|
|||||||
run_finaliziation_ansi_sequence(
|
run_finaliziation_ansi_sequence(
|
||||||
&stack,
|
&stack,
|
||||||
engine_state,
|
engine_state,
|
||||||
|
use_color,
|
||||||
shell_integration_osc633,
|
shell_integration_osc633,
|
||||||
shell_integration_osc133,
|
shell_integration_osc133,
|
||||||
use_color,
|
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
Ok(Signal::CtrlD) => {
|
Ok(Signal::CtrlD) => {
|
||||||
@ -742,9 +742,9 @@ fn loop_iteration(ctx: LoopContext) -> (bool, Stack, Reedline) {
|
|||||||
run_finaliziation_ansi_sequence(
|
run_finaliziation_ansi_sequence(
|
||||||
&stack,
|
&stack,
|
||||||
engine_state,
|
engine_state,
|
||||||
|
use_color,
|
||||||
shell_integration_osc633,
|
shell_integration_osc633,
|
||||||
shell_integration_osc133,
|
shell_integration_osc133,
|
||||||
use_color,
|
|
||||||
);
|
);
|
||||||
|
|
||||||
println!();
|
println!();
|
||||||
@ -763,9 +763,9 @@ fn loop_iteration(ctx: LoopContext) -> (bool, Stack, Reedline) {
|
|||||||
run_finaliziation_ansi_sequence(
|
run_finaliziation_ansi_sequence(
|
||||||
&stack,
|
&stack,
|
||||||
engine_state,
|
engine_state,
|
||||||
|
use_color,
|
||||||
shell_integration_osc633,
|
shell_integration_osc633,
|
||||||
shell_integration_osc133,
|
shell_integration_osc133,
|
||||||
use_color,
|
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -872,7 +872,7 @@ fn parse_operation(
|
|||||||
let tokens = lex(s.as_bytes(), 0, &[], &[], false);
|
let tokens = lex(s.as_bytes(), 0, &[], &[], false);
|
||||||
// Check if this is a single call to a directory, if so auto-cd
|
// Check if this is a single call to a directory, if so auto-cd
|
||||||
#[allow(deprecated)]
|
#[allow(deprecated)]
|
||||||
let cwd = nu_engine::env::current_dir_str(engine_state, stack)?;
|
let cwd = nu_engine::env::current_dir_str(engine_state, stack).unwrap_or_default();
|
||||||
let mut orig = s.clone();
|
let mut orig = s.clone();
|
||||||
if orig.starts_with('`') {
|
if orig.starts_with('`') {
|
||||||
orig = trim_quotes_str(&orig).to_string()
|
orig = trim_quotes_str(&orig).to_string()
|
||||||
@ -929,7 +929,10 @@ fn do_auto_cd(
|
|||||||
|
|
||||||
//FIXME: this only changes the current scope, but instead this environment variable
|
//FIXME: this only changes the current scope, but instead this environment variable
|
||||||
//should probably be a block that loads the information from the state in the overlay
|
//should probably be a block that loads the information from the state in the overlay
|
||||||
stack.add_env_var("PWD".into(), Value::string(path.clone(), Span::unknown()));
|
if let Err(err) = stack.set_cwd(&path) {
|
||||||
|
report_error_new(engine_state, &err);
|
||||||
|
return;
|
||||||
|
};
|
||||||
let cwd = Value::string(cwd, span);
|
let cwd = Value::string(cwd, span);
|
||||||
|
|
||||||
let shells = stack.get_env_var(engine_state, "NUSHELL_SHELLS");
|
let shells = stack.get_env_var(engine_state, "NUSHELL_SHELLS");
|
||||||
@ -1298,27 +1301,46 @@ fn map_nucursorshape_to_cursorshape(shape: NuCursorShape) -> Option<SetCursorSty
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_command_finished_marker(stack: &Stack, engine_state: &EngineState, vscode: bool) -> String {
|
fn get_command_finished_marker(
|
||||||
|
stack: &Stack,
|
||||||
|
engine_state: &EngineState,
|
||||||
|
shell_integration_osc633: bool,
|
||||||
|
shell_integration_osc133: bool,
|
||||||
|
) -> String {
|
||||||
let exit_code = stack
|
let exit_code = stack
|
||||||
.get_env_var(engine_state, "LAST_EXIT_CODE")
|
.get_env_var(engine_state, "LAST_EXIT_CODE")
|
||||||
.and_then(|e| e.as_i64().ok());
|
.and_then(|e| e.as_i64().ok());
|
||||||
|
|
||||||
if vscode {
|
if shell_integration_osc633 {
|
||||||
// format!("\x1b]633;D;{}\x1b\\", exit_code.unwrap_or(0))
|
if stack.get_env_var(engine_state, "TERM_PROGRAM") == Some(Value::test_string("vscode")) {
|
||||||
|
// We're in vscode and we have osc633 enabled
|
||||||
format!(
|
format!(
|
||||||
"{}{}{}",
|
"{}{}{}",
|
||||||
VSCODE_POST_EXECUTION_MARKER_PREFIX,
|
VSCODE_POST_EXECUTION_MARKER_PREFIX,
|
||||||
exit_code.unwrap_or(0),
|
exit_code.unwrap_or(0),
|
||||||
VSCODE_POST_EXECUTION_MARKER_SUFFIX
|
VSCODE_POST_EXECUTION_MARKER_SUFFIX
|
||||||
)
|
)
|
||||||
} else {
|
} else if shell_integration_osc133 {
|
||||||
// format!("\x1b]133;D;{}\x1b\\", exit_code.unwrap_or(0))
|
// If we're in VSCode but we don't find the env var, just return the regular markers
|
||||||
format!(
|
format!(
|
||||||
"{}{}{}",
|
"{}{}{}",
|
||||||
POST_EXECUTION_MARKER_PREFIX,
|
POST_EXECUTION_MARKER_PREFIX,
|
||||||
exit_code.unwrap_or(0),
|
exit_code.unwrap_or(0),
|
||||||
POST_EXECUTION_MARKER_SUFFIX
|
POST_EXECUTION_MARKER_SUFFIX
|
||||||
)
|
)
|
||||||
|
} else {
|
||||||
|
// We're not in vscode, so we don't need to do anything special
|
||||||
|
"\x1b[0m".to_string()
|
||||||
|
}
|
||||||
|
} else if shell_integration_osc133 {
|
||||||
|
format!(
|
||||||
|
"{}{}{}",
|
||||||
|
POST_EXECUTION_MARKER_PREFIX,
|
||||||
|
exit_code.unwrap_or(0),
|
||||||
|
POST_EXECUTION_MARKER_SUFFIX
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
"\x1b[0m".to_string()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1342,7 +1364,12 @@ fn run_finaliziation_ansi_sequence(
|
|||||||
if stack.get_env_var(engine_state, "TERM_PROGRAM") == Some(Value::test_string("vscode")) {
|
if stack.get_env_var(engine_state, "TERM_PROGRAM") == Some(Value::test_string("vscode")) {
|
||||||
let start_time = Instant::now();
|
let start_time = Instant::now();
|
||||||
|
|
||||||
run_ansi_sequence(&get_command_finished_marker(stack, engine_state, true));
|
run_ansi_sequence(&get_command_finished_marker(
|
||||||
|
stack,
|
||||||
|
engine_state,
|
||||||
|
shell_integration_osc633,
|
||||||
|
shell_integration_osc133,
|
||||||
|
));
|
||||||
|
|
||||||
perf(
|
perf(
|
||||||
"post_execute_marker (633;D) ansi escape sequences",
|
"post_execute_marker (633;D) ansi escape sequences",
|
||||||
@ -1352,10 +1379,15 @@ fn run_finaliziation_ansi_sequence(
|
|||||||
column!(),
|
column!(),
|
||||||
use_color,
|
use_color,
|
||||||
);
|
);
|
||||||
} else {
|
} else if shell_integration_osc133 {
|
||||||
let start_time = Instant::now();
|
let start_time = Instant::now();
|
||||||
|
|
||||||
run_ansi_sequence(&get_command_finished_marker(stack, engine_state, false));
|
run_ansi_sequence(&get_command_finished_marker(
|
||||||
|
stack,
|
||||||
|
engine_state,
|
||||||
|
shell_integration_osc633,
|
||||||
|
shell_integration_osc133,
|
||||||
|
));
|
||||||
|
|
||||||
perf(
|
perf(
|
||||||
"post_execute_marker (133;D) ansi escape sequences",
|
"post_execute_marker (133;D) ansi escape sequences",
|
||||||
@ -1369,7 +1401,12 @@ fn run_finaliziation_ansi_sequence(
|
|||||||
} else if shell_integration_osc133 {
|
} else if shell_integration_osc133 {
|
||||||
let start_time = Instant::now();
|
let start_time = Instant::now();
|
||||||
|
|
||||||
run_ansi_sequence(&get_command_finished_marker(stack, engine_state, false));
|
run_ansi_sequence(&get_command_finished_marker(
|
||||||
|
stack,
|
||||||
|
engine_state,
|
||||||
|
shell_integration_osc633,
|
||||||
|
shell_integration_osc133,
|
||||||
|
));
|
||||||
|
|
||||||
perf(
|
perf(
|
||||||
"post_execute_marker (133;D) ansi escape sequences",
|
"post_execute_marker (133;D) ansi escape sequences",
|
||||||
@ -1447,3 +1484,136 @@ fn are_session_ids_in_sync() {
|
|||||||
engine_state.history_session_id
|
engine_state.history_session_id
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod test_auto_cd {
|
||||||
|
use super::{do_auto_cd, parse_operation, ReplOperation};
|
||||||
|
use nu_protocol::engine::{EngineState, Stack};
|
||||||
|
use std::path::Path;
|
||||||
|
use tempfile::tempdir;
|
||||||
|
|
||||||
|
/// Create a symlink. Works on both Unix and Windows.
|
||||||
|
#[cfg(any(unix, windows))]
|
||||||
|
fn symlink(original: impl AsRef<Path>, link: impl AsRef<Path>) -> std::io::Result<()> {
|
||||||
|
#[cfg(unix)]
|
||||||
|
{
|
||||||
|
std::os::unix::fs::symlink(original, link)
|
||||||
|
}
|
||||||
|
#[cfg(windows)]
|
||||||
|
{
|
||||||
|
if original.as_ref().is_dir() {
|
||||||
|
std::os::windows::fs::symlink_dir(original, link)
|
||||||
|
} else {
|
||||||
|
std::os::windows::fs::symlink_file(original, link)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Run one test case on the auto-cd feature. PWD is initially set to
|
||||||
|
/// `before`, and after `input` is parsed and evaluated, PWD should be
|
||||||
|
/// changed to `after`.
|
||||||
|
#[track_caller]
|
||||||
|
fn check(before: impl AsRef<Path>, input: &str, after: impl AsRef<Path>) {
|
||||||
|
// Setup EngineState and Stack.
|
||||||
|
let mut engine_state = EngineState::new();
|
||||||
|
let mut stack = Stack::new();
|
||||||
|
stack.set_cwd(before).unwrap();
|
||||||
|
|
||||||
|
// Parse the input. It must be an auto-cd operation.
|
||||||
|
let op = parse_operation(input.to_string(), &engine_state, &stack).unwrap();
|
||||||
|
let ReplOperation::AutoCd { cwd, target, span } = op else {
|
||||||
|
panic!("'{}' was not parsed into an auto-cd operation", input)
|
||||||
|
};
|
||||||
|
|
||||||
|
// Perform the auto-cd operation.
|
||||||
|
do_auto_cd(target, cwd, &mut stack, &mut engine_state, span);
|
||||||
|
let updated_cwd = engine_state.cwd(Some(&stack)).unwrap();
|
||||||
|
|
||||||
|
// Check that `updated_cwd` and `after` point to the same place. They
|
||||||
|
// don't have to be byte-wise equal (on Windows, the 8.3 filename
|
||||||
|
// conversion messes things up),
|
||||||
|
let updated_cwd = std::fs::canonicalize(updated_cwd).unwrap();
|
||||||
|
let after = std::fs::canonicalize(after).unwrap();
|
||||||
|
assert_eq!(updated_cwd, after);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn auto_cd_root() {
|
||||||
|
let tempdir = tempdir().unwrap();
|
||||||
|
let root = if cfg!(windows) { r"C:\" } else { "/" };
|
||||||
|
check(&tempdir, root, root);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn auto_cd_tilde() {
|
||||||
|
let tempdir = tempdir().unwrap();
|
||||||
|
let home = nu_path::home_dir().unwrap();
|
||||||
|
check(&tempdir, "~", home);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn auto_cd_dot() {
|
||||||
|
let tempdir = tempdir().unwrap();
|
||||||
|
check(&tempdir, ".", &tempdir);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn auto_cd_double_dot() {
|
||||||
|
let tempdir = tempdir().unwrap();
|
||||||
|
let dir = tempdir.path().join("foo");
|
||||||
|
std::fs::create_dir_all(&dir).unwrap();
|
||||||
|
check(dir, "..", &tempdir);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn auto_cd_triple_dot() {
|
||||||
|
let tempdir = tempdir().unwrap();
|
||||||
|
let dir = tempdir.path().join("foo").join("bar");
|
||||||
|
std::fs::create_dir_all(&dir).unwrap();
|
||||||
|
check(dir, "...", &tempdir);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn auto_cd_relative() {
|
||||||
|
let tempdir = tempdir().unwrap();
|
||||||
|
let foo = tempdir.path().join("foo");
|
||||||
|
let bar = tempdir.path().join("bar");
|
||||||
|
std::fs::create_dir_all(&foo).unwrap();
|
||||||
|
std::fs::create_dir_all(&bar).unwrap();
|
||||||
|
|
||||||
|
let input = if cfg!(windows) { r"..\bar" } else { "../bar" };
|
||||||
|
check(foo, input, bar);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn auto_cd_trailing_slash() {
|
||||||
|
let tempdir = tempdir().unwrap();
|
||||||
|
let dir = tempdir.path().join("foo");
|
||||||
|
std::fs::create_dir_all(&dir).unwrap();
|
||||||
|
|
||||||
|
let input = if cfg!(windows) { r"foo\" } else { "foo/" };
|
||||||
|
check(&tempdir, input, dir);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn auto_cd_symlink() {
|
||||||
|
let tempdir = tempdir().unwrap();
|
||||||
|
let dir = tempdir.path().join("foo");
|
||||||
|
std::fs::create_dir_all(&dir).unwrap();
|
||||||
|
let link = tempdir.path().join("link");
|
||||||
|
symlink(&dir, &link).unwrap();
|
||||||
|
|
||||||
|
let input = if cfg!(windows) { r".\link" } else { "./link" };
|
||||||
|
check(&tempdir, input, link);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
#[should_panic(expected = "was not parsed into an auto-cd operation")]
|
||||||
|
fn auto_cd_nonexistent_directory() {
|
||||||
|
let tempdir = tempdir().unwrap();
|
||||||
|
let dir = tempdir.path().join("foo");
|
||||||
|
|
||||||
|
let input = if cfg!(windows) { r"foo\" } else { "foo/" };
|
||||||
|
check(&tempdir, input, dir);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
@ -4,7 +4,7 @@ use nu_parser::{escape_quote_string, lex, parse, unescape_unquote_string, Token,
|
|||||||
use nu_protocol::{
|
use nu_protocol::{
|
||||||
debugger::WithoutDebug,
|
debugger::WithoutDebug,
|
||||||
engine::{EngineState, Stack, StateWorkingSet},
|
engine::{EngineState, Stack, StateWorkingSet},
|
||||||
print_if_stream, report_error, report_error_new, PipelineData, ShellError, Span, Value,
|
report_error, report_error_new, PipelineData, ShellError, Span, Value,
|
||||||
};
|
};
|
||||||
#[cfg(windows)]
|
#[cfg(windows)]
|
||||||
use nu_utils::enable_vt_processing;
|
use nu_utils::enable_vt_processing;
|
||||||
@ -39,9 +39,8 @@ fn gather_env_vars(
|
|||||||
init_cwd: &Path,
|
init_cwd: &Path,
|
||||||
) {
|
) {
|
||||||
fn report_capture_error(engine_state: &EngineState, env_str: &str, msg: &str) {
|
fn report_capture_error(engine_state: &EngineState, env_str: &str, msg: &str) {
|
||||||
let working_set = StateWorkingSet::new(engine_state);
|
report_error_new(
|
||||||
report_error(
|
engine_state,
|
||||||
&working_set,
|
|
||||||
&ShellError::GenericError {
|
&ShellError::GenericError {
|
||||||
error: format!("Environment variable was not captured: {env_str}"),
|
error: format!("Environment variable was not captured: {env_str}"),
|
||||||
msg: "".into(),
|
msg: "".into(),
|
||||||
@ -71,9 +70,8 @@ fn gather_env_vars(
|
|||||||
}
|
}
|
||||||
None => {
|
None => {
|
||||||
// Could not capture current working directory
|
// Could not capture current working directory
|
||||||
let working_set = StateWorkingSet::new(engine_state);
|
report_error_new(
|
||||||
report_error(
|
engine_state,
|
||||||
&working_set,
|
|
||||||
&ShellError::GenericError {
|
&ShellError::GenericError {
|
||||||
error: "Current directory is not a valid utf-8 path".into(),
|
error: "Current directory is not a valid utf-8 path".into(),
|
||||||
msg: "".into(),
|
msg: "".into(),
|
||||||
@ -208,9 +206,48 @@ pub fn eval_source(
|
|||||||
fname: &str,
|
fname: &str,
|
||||||
input: PipelineData,
|
input: PipelineData,
|
||||||
allow_return: bool,
|
allow_return: bool,
|
||||||
) -> bool {
|
) -> i32 {
|
||||||
let start_time = std::time::Instant::now();
|
let start_time = std::time::Instant::now();
|
||||||
|
|
||||||
|
let exit_code = match evaluate_source(engine_state, stack, source, fname, input, allow_return) {
|
||||||
|
Ok(code) => code.unwrap_or(0),
|
||||||
|
Err(err) => {
|
||||||
|
report_error_new(engine_state, &err);
|
||||||
|
1
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
stack.add_env_var(
|
||||||
|
"LAST_EXIT_CODE".to_string(),
|
||||||
|
Value::int(exit_code.into(), Span::unknown()),
|
||||||
|
);
|
||||||
|
|
||||||
|
// reset vt processing, aka ansi because illbehaved externals can break it
|
||||||
|
#[cfg(windows)]
|
||||||
|
{
|
||||||
|
let _ = enable_vt_processing();
|
||||||
|
}
|
||||||
|
|
||||||
|
perf(
|
||||||
|
&format!("eval_source {}", &fname),
|
||||||
|
start_time,
|
||||||
|
file!(),
|
||||||
|
line!(),
|
||||||
|
column!(),
|
||||||
|
engine_state.get_config().use_ansi_coloring,
|
||||||
|
);
|
||||||
|
|
||||||
|
exit_code
|
||||||
|
}
|
||||||
|
|
||||||
|
fn evaluate_source(
|
||||||
|
engine_state: &mut EngineState,
|
||||||
|
stack: &mut Stack,
|
||||||
|
source: &[u8],
|
||||||
|
fname: &str,
|
||||||
|
input: PipelineData,
|
||||||
|
allow_return: bool,
|
||||||
|
) -> Result<Option<i32>, ShellError> {
|
||||||
let (block, delta) = {
|
let (block, delta) = {
|
||||||
let mut working_set = StateWorkingSet::new(engine_state);
|
let mut working_set = StateWorkingSet::new(engine_state);
|
||||||
let output = parse(
|
let output = parse(
|
||||||
@ -224,104 +261,40 @@ pub fn eval_source(
|
|||||||
}
|
}
|
||||||
|
|
||||||
if let Some(err) = working_set.parse_errors.first() {
|
if let Some(err) = working_set.parse_errors.first() {
|
||||||
set_last_exit_code(stack, 1);
|
|
||||||
report_error(&working_set, err);
|
report_error(&working_set, err);
|
||||||
return false;
|
return Ok(Some(1));
|
||||||
}
|
}
|
||||||
|
|
||||||
(output, working_set.render())
|
(output, working_set.render())
|
||||||
};
|
};
|
||||||
|
|
||||||
if let Err(err) = engine_state.merge_delta(delta) {
|
engine_state.merge_delta(delta)?;
|
||||||
set_last_exit_code(stack, 1);
|
|
||||||
report_error_new(engine_state, &err);
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
let b = if allow_return {
|
let pipeline = if allow_return {
|
||||||
eval_block_with_early_return::<WithoutDebug>(engine_state, stack, &block, input)
|
eval_block_with_early_return::<WithoutDebug>(engine_state, stack, &block, input)
|
||||||
} else {
|
} else {
|
||||||
eval_block::<WithoutDebug>(engine_state, stack, &block, input)
|
eval_block::<WithoutDebug>(engine_state, stack, &block, input)
|
||||||
};
|
}?;
|
||||||
|
|
||||||
match b {
|
let status = if let PipelineData::ByteStream(stream, ..) = pipeline {
|
||||||
Ok(pipeline_data) => {
|
stream.print(false)?
|
||||||
let config = engine_state.get_config();
|
} else {
|
||||||
let result;
|
if let Some(hook) = engine_state.get_config().hooks.display_output.clone() {
|
||||||
if let PipelineData::ExternalStream {
|
let pipeline = eval_hook(
|
||||||
stdout: stream,
|
|
||||||
stderr: stderr_stream,
|
|
||||||
exit_code,
|
|
||||||
..
|
|
||||||
} = pipeline_data
|
|
||||||
{
|
|
||||||
result = print_if_stream(stream, stderr_stream, false, exit_code);
|
|
||||||
} else if let Some(hook) = config.hooks.display_output.clone() {
|
|
||||||
match eval_hook(
|
|
||||||
engine_state,
|
engine_state,
|
||||||
stack,
|
stack,
|
||||||
Some(pipeline_data),
|
Some(pipeline),
|
||||||
vec![],
|
vec![],
|
||||||
&hook,
|
&hook,
|
||||||
"display_output",
|
"display_output",
|
||||||
) {
|
)?;
|
||||||
Err(err) => {
|
pipeline.print(engine_state, stack, false, false)
|
||||||
result = Err(err);
|
|
||||||
}
|
|
||||||
Ok(val) => {
|
|
||||||
result = val.print(engine_state, stack, false, false);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
} else {
|
||||||
result = pipeline_data.print(engine_state, stack, true, false);
|
pipeline.print(engine_state, stack, true, false)
|
||||||
}
|
}?
|
||||||
|
};
|
||||||
|
|
||||||
match result {
|
Ok(status.map(|status| status.code()))
|
||||||
Err(err) => {
|
|
||||||
let working_set = StateWorkingSet::new(engine_state);
|
|
||||||
|
|
||||||
report_error(&working_set, &err);
|
|
||||||
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
Ok(exit_code) => {
|
|
||||||
set_last_exit_code(stack, exit_code);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// reset vt processing, aka ansi because illbehaved externals can break it
|
|
||||||
#[cfg(windows)]
|
|
||||||
{
|
|
||||||
let _ = enable_vt_processing();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Err(err) => {
|
|
||||||
set_last_exit_code(stack, 1);
|
|
||||||
|
|
||||||
let working_set = StateWorkingSet::new(engine_state);
|
|
||||||
|
|
||||||
report_error(&working_set, &err);
|
|
||||||
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
perf(
|
|
||||||
&format!("eval_source {}", &fname),
|
|
||||||
start_time,
|
|
||||||
file!(),
|
|
||||||
line!(),
|
|
||||||
column!(),
|
|
||||||
engine_state.get_config().use_ansi_coloring,
|
|
||||||
);
|
|
||||||
|
|
||||||
true
|
|
||||||
}
|
|
||||||
|
|
||||||
fn set_last_exit_code(stack: &mut Stack, exit_code: i64) {
|
|
||||||
stack.add_env_var(
|
|
||||||
"LAST_EXIT_CODE".to_string(),
|
|
||||||
Value::int(exit_code, Span::unknown()),
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
|
1
crates/nu-cli/tests/commands/mod.rs
Normal file
1
crates/nu-cli/tests/commands/mod.rs
Normal file
@ -0,0 +1 @@
|
|||||||
|
mod nu_highlight;
|
7
crates/nu-cli/tests/commands/nu_highlight.rs
Normal file
7
crates/nu-cli/tests/commands/nu_highlight.rs
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
use nu_test_support::nu;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn nu_highlight_not_expr() {
|
||||||
|
let actual = nu!("'not false' | nu-highlight | ansi strip");
|
||||||
|
assert_eq!(actual.out, "not false");
|
||||||
|
}
|
@ -6,7 +6,10 @@ use nu_parser::parse;
|
|||||||
use nu_protocol::{debugger::WithoutDebug, engine::StateWorkingSet, PipelineData};
|
use nu_protocol::{debugger::WithoutDebug, engine::StateWorkingSet, PipelineData};
|
||||||
use reedline::{Completer, Suggestion};
|
use reedline::{Completer, Suggestion};
|
||||||
use rstest::{fixture, rstest};
|
use rstest::{fixture, rstest};
|
||||||
use std::path::{PathBuf, MAIN_SEPARATOR};
|
use std::{
|
||||||
|
path::{PathBuf, MAIN_SEPARATOR},
|
||||||
|
sync::Arc,
|
||||||
|
};
|
||||||
use support::{
|
use support::{
|
||||||
completions_helpers::{new_partial_engine, new_quote_engine},
|
completions_helpers::{new_partial_engine, new_quote_engine},
|
||||||
file, folder, match_suggestions, new_engine,
|
file, folder, match_suggestions, new_engine,
|
||||||
@ -22,7 +25,7 @@ fn completer() -> NuCompleter {
|
|||||||
assert!(support::merge_input(record.as_bytes(), &mut engine, &mut stack, dir).is_ok());
|
assert!(support::merge_input(record.as_bytes(), &mut engine, &mut stack, dir).is_ok());
|
||||||
|
|
||||||
// Instantiate a new completer
|
// Instantiate a new completer
|
||||||
NuCompleter::new(std::sync::Arc::new(engine), stack)
|
NuCompleter::new(Arc::new(engine), Arc::new(stack))
|
||||||
}
|
}
|
||||||
|
|
||||||
#[fixture]
|
#[fixture]
|
||||||
@ -36,7 +39,7 @@ fn completer_strings() -> NuCompleter {
|
|||||||
assert!(support::merge_input(record.as_bytes(), &mut engine, &mut stack, dir).is_ok());
|
assert!(support::merge_input(record.as_bytes(), &mut engine, &mut stack, dir).is_ok());
|
||||||
|
|
||||||
// Instantiate a new completer
|
// Instantiate a new completer
|
||||||
NuCompleter::new(std::sync::Arc::new(engine), stack)
|
NuCompleter::new(Arc::new(engine), Arc::new(stack))
|
||||||
}
|
}
|
||||||
|
|
||||||
#[fixture]
|
#[fixture]
|
||||||
@ -56,7 +59,7 @@ fn extern_completer() -> NuCompleter {
|
|||||||
assert!(support::merge_input(record.as_bytes(), &mut engine, &mut stack, dir).is_ok());
|
assert!(support::merge_input(record.as_bytes(), &mut engine, &mut stack, dir).is_ok());
|
||||||
|
|
||||||
// Instantiate a new completer
|
// Instantiate a new completer
|
||||||
NuCompleter::new(std::sync::Arc::new(engine), stack)
|
NuCompleter::new(Arc::new(engine), Arc::new(stack))
|
||||||
}
|
}
|
||||||
|
|
||||||
#[fixture]
|
#[fixture]
|
||||||
@ -79,14 +82,14 @@ fn custom_completer() -> NuCompleter {
|
|||||||
assert!(support::merge_input(record.as_bytes(), &mut engine, &mut stack, dir).is_ok());
|
assert!(support::merge_input(record.as_bytes(), &mut engine, &mut stack, dir).is_ok());
|
||||||
|
|
||||||
// Instantiate a new completer
|
// Instantiate a new completer
|
||||||
NuCompleter::new(std::sync::Arc::new(engine), stack)
|
NuCompleter::new(Arc::new(engine), Arc::new(stack))
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn variables_dollar_sign_with_varialblecompletion() {
|
fn variables_dollar_sign_with_varialblecompletion() {
|
||||||
let (_, _, engine, stack) = new_engine();
|
let (_, _, engine, stack) = new_engine();
|
||||||
|
|
||||||
let mut completer = NuCompleter::new(std::sync::Arc::new(engine), stack);
|
let mut completer = NuCompleter::new(Arc::new(engine), Arc::new(stack));
|
||||||
|
|
||||||
let target_dir = "$ ";
|
let target_dir = "$ ";
|
||||||
let suggestions = completer.complete(target_dir, target_dir.len());
|
let suggestions = completer.complete(target_dir, target_dir.len());
|
||||||
@ -138,7 +141,7 @@ fn dotnu_completions() {
|
|||||||
let (_, _, engine, stack) = new_engine();
|
let (_, _, engine, stack) = new_engine();
|
||||||
|
|
||||||
// Instantiate a new completer
|
// Instantiate a new completer
|
||||||
let mut completer = NuCompleter::new(std::sync::Arc::new(engine), stack);
|
let mut completer = NuCompleter::new(Arc::new(engine), Arc::new(stack));
|
||||||
|
|
||||||
// Test source completion
|
// Test source completion
|
||||||
let completion_str = "source-env ".to_string();
|
let completion_str = "source-env ".to_string();
|
||||||
@ -217,7 +220,7 @@ fn file_completions() {
|
|||||||
let (dir, dir_str, engine, stack) = new_engine();
|
let (dir, dir_str, engine, stack) = new_engine();
|
||||||
|
|
||||||
// Instantiate a new completer
|
// Instantiate a new completer
|
||||||
let mut completer = NuCompleter::new(std::sync::Arc::new(engine), stack);
|
let mut completer = NuCompleter::new(Arc::new(engine), Arc::new(stack));
|
||||||
|
|
||||||
// Test completions for the current folder
|
// Test completions for the current folder
|
||||||
let target_dir = format!("cp {dir_str}{MAIN_SEPARATOR}");
|
let target_dir = format!("cp {dir_str}{MAIN_SEPARATOR}");
|
||||||
@ -265,7 +268,7 @@ fn partial_completions() {
|
|||||||
let (dir, _, engine, stack) = new_partial_engine();
|
let (dir, _, engine, stack) = new_partial_engine();
|
||||||
|
|
||||||
// Instantiate a new completer
|
// Instantiate a new completer
|
||||||
let mut completer = NuCompleter::new(std::sync::Arc::new(engine), stack);
|
let mut completer = NuCompleter::new(Arc::new(engine), Arc::new(stack));
|
||||||
|
|
||||||
// Test completions for a folder's name
|
// Test completions for a folder's name
|
||||||
let target_dir = format!("cd {}", file(dir.join("pa")));
|
let target_dir = format!("cd {}", file(dir.join("pa")));
|
||||||
@ -363,7 +366,7 @@ fn partial_completions() {
|
|||||||
fn command_ls_with_filecompletion() {
|
fn command_ls_with_filecompletion() {
|
||||||
let (_, _, engine, stack) = new_engine();
|
let (_, _, engine, stack) = new_engine();
|
||||||
|
|
||||||
let mut completer = NuCompleter::new(std::sync::Arc::new(engine), stack);
|
let mut completer = NuCompleter::new(Arc::new(engine), Arc::new(stack));
|
||||||
|
|
||||||
let target_dir = "ls ";
|
let target_dir = "ls ";
|
||||||
let suggestions = completer.complete(target_dir, target_dir.len());
|
let suggestions = completer.complete(target_dir, target_dir.len());
|
||||||
@ -397,7 +400,7 @@ fn command_ls_with_filecompletion() {
|
|||||||
fn command_open_with_filecompletion() {
|
fn command_open_with_filecompletion() {
|
||||||
let (_, _, engine, stack) = new_engine();
|
let (_, _, engine, stack) = new_engine();
|
||||||
|
|
||||||
let mut completer = NuCompleter::new(std::sync::Arc::new(engine), stack);
|
let mut completer = NuCompleter::new(Arc::new(engine), Arc::new(stack));
|
||||||
|
|
||||||
let target_dir = "open ";
|
let target_dir = "open ";
|
||||||
let suggestions = completer.complete(target_dir, target_dir.len());
|
let suggestions = completer.complete(target_dir, target_dir.len());
|
||||||
@ -432,7 +435,7 @@ fn command_open_with_filecompletion() {
|
|||||||
fn command_rm_with_globcompletion() {
|
fn command_rm_with_globcompletion() {
|
||||||
let (_, _, engine, stack) = new_engine();
|
let (_, _, engine, stack) = new_engine();
|
||||||
|
|
||||||
let mut completer = NuCompleter::new(std::sync::Arc::new(engine), stack);
|
let mut completer = NuCompleter::new(Arc::new(engine), Arc::new(stack));
|
||||||
|
|
||||||
let target_dir = "rm ";
|
let target_dir = "rm ";
|
||||||
let suggestions = completer.complete(target_dir, target_dir.len());
|
let suggestions = completer.complete(target_dir, target_dir.len());
|
||||||
@ -467,7 +470,7 @@ fn command_rm_with_globcompletion() {
|
|||||||
fn command_cp_with_globcompletion() {
|
fn command_cp_with_globcompletion() {
|
||||||
let (_, _, engine, stack) = new_engine();
|
let (_, _, engine, stack) = new_engine();
|
||||||
|
|
||||||
let mut completer = NuCompleter::new(std::sync::Arc::new(engine), stack);
|
let mut completer = NuCompleter::new(Arc::new(engine), Arc::new(stack));
|
||||||
|
|
||||||
let target_dir = "cp ";
|
let target_dir = "cp ";
|
||||||
let suggestions = completer.complete(target_dir, target_dir.len());
|
let suggestions = completer.complete(target_dir, target_dir.len());
|
||||||
@ -502,7 +505,7 @@ fn command_cp_with_globcompletion() {
|
|||||||
fn command_save_with_filecompletion() {
|
fn command_save_with_filecompletion() {
|
||||||
let (_, _, engine, stack) = new_engine();
|
let (_, _, engine, stack) = new_engine();
|
||||||
|
|
||||||
let mut completer = NuCompleter::new(std::sync::Arc::new(engine), stack);
|
let mut completer = NuCompleter::new(Arc::new(engine), Arc::new(stack));
|
||||||
|
|
||||||
let target_dir = "save ";
|
let target_dir = "save ";
|
||||||
let suggestions = completer.complete(target_dir, target_dir.len());
|
let suggestions = completer.complete(target_dir, target_dir.len());
|
||||||
@ -537,7 +540,7 @@ fn command_save_with_filecompletion() {
|
|||||||
fn command_touch_with_filecompletion() {
|
fn command_touch_with_filecompletion() {
|
||||||
let (_, _, engine, stack) = new_engine();
|
let (_, _, engine, stack) = new_engine();
|
||||||
|
|
||||||
let mut completer = NuCompleter::new(std::sync::Arc::new(engine), stack);
|
let mut completer = NuCompleter::new(Arc::new(engine), Arc::new(stack));
|
||||||
|
|
||||||
let target_dir = "touch ";
|
let target_dir = "touch ";
|
||||||
let suggestions = completer.complete(target_dir, target_dir.len());
|
let suggestions = completer.complete(target_dir, target_dir.len());
|
||||||
@ -572,7 +575,7 @@ fn command_touch_with_filecompletion() {
|
|||||||
fn command_watch_with_filecompletion() {
|
fn command_watch_with_filecompletion() {
|
||||||
let (_, _, engine, stack) = new_engine();
|
let (_, _, engine, stack) = new_engine();
|
||||||
|
|
||||||
let mut completer = NuCompleter::new(std::sync::Arc::new(engine), stack);
|
let mut completer = NuCompleter::new(Arc::new(engine), Arc::new(stack));
|
||||||
|
|
||||||
let target_dir = "watch ";
|
let target_dir = "watch ";
|
||||||
let suggestions = completer.complete(target_dir, target_dir.len());
|
let suggestions = completer.complete(target_dir, target_dir.len());
|
||||||
@ -607,7 +610,7 @@ fn command_watch_with_filecompletion() {
|
|||||||
fn file_completion_quoted() {
|
fn file_completion_quoted() {
|
||||||
let (_, _, engine, stack) = new_quote_engine();
|
let (_, _, engine, stack) = new_quote_engine();
|
||||||
|
|
||||||
let mut completer = NuCompleter::new(std::sync::Arc::new(engine), stack);
|
let mut completer = NuCompleter::new(Arc::new(engine), Arc::new(stack));
|
||||||
|
|
||||||
let target_dir = "open ";
|
let target_dir = "open ";
|
||||||
let suggestions = completer.complete(target_dir, target_dir.len());
|
let suggestions = completer.complete(target_dir, target_dir.len());
|
||||||
@ -645,7 +648,7 @@ fn flag_completions() {
|
|||||||
let (_, _, engine, stack) = new_engine();
|
let (_, _, engine, stack) = new_engine();
|
||||||
|
|
||||||
// Instantiate a new completer
|
// Instantiate a new completer
|
||||||
let mut completer = NuCompleter::new(std::sync::Arc::new(engine), stack);
|
let mut completer = NuCompleter::new(Arc::new(engine), Arc::new(stack));
|
||||||
// Test completions for the 'ls' flags
|
// Test completions for the 'ls' flags
|
||||||
let suggestions = completer.complete("ls -", 4);
|
let suggestions = completer.complete("ls -", 4);
|
||||||
|
|
||||||
@ -680,7 +683,7 @@ fn folder_with_directorycompletions() {
|
|||||||
let (dir, dir_str, engine, stack) = new_engine();
|
let (dir, dir_str, engine, stack) = new_engine();
|
||||||
|
|
||||||
// Instantiate a new completer
|
// Instantiate a new completer
|
||||||
let mut completer = NuCompleter::new(std::sync::Arc::new(engine), stack);
|
let mut completer = NuCompleter::new(Arc::new(engine), Arc::new(stack));
|
||||||
|
|
||||||
// Test completions for the current folder
|
// Test completions for the current folder
|
||||||
let target_dir = format!("cd {dir_str}{MAIN_SEPARATOR}");
|
let target_dir = format!("cd {dir_str}{MAIN_SEPARATOR}");
|
||||||
@ -709,7 +712,7 @@ fn variables_completions() {
|
|||||||
assert!(support::merge_input(record.as_bytes(), &mut engine, &mut stack, dir).is_ok());
|
assert!(support::merge_input(record.as_bytes(), &mut engine, &mut stack, dir).is_ok());
|
||||||
|
|
||||||
// Instantiate a new completer
|
// Instantiate a new completer
|
||||||
let mut completer = NuCompleter::new(std::sync::Arc::new(engine), stack);
|
let mut completer = NuCompleter::new(Arc::new(engine), Arc::new(stack));
|
||||||
|
|
||||||
// Test completions for $nu
|
// Test completions for $nu
|
||||||
let suggestions = completer.complete("$nu.", 4);
|
let suggestions = completer.complete("$nu.", 4);
|
||||||
@ -815,7 +818,7 @@ fn alias_of_command_and_flags() {
|
|||||||
let alias = r#"alias ll = ls -l"#;
|
let alias = r#"alias ll = ls -l"#;
|
||||||
assert!(support::merge_input(alias.as_bytes(), &mut engine, &mut stack, dir).is_ok());
|
assert!(support::merge_input(alias.as_bytes(), &mut engine, &mut stack, dir).is_ok());
|
||||||
|
|
||||||
let mut completer = NuCompleter::new(std::sync::Arc::new(engine), stack);
|
let mut completer = NuCompleter::new(Arc::new(engine), Arc::new(stack));
|
||||||
|
|
||||||
let suggestions = completer.complete("ll t", 4);
|
let suggestions = completer.complete("ll t", 4);
|
||||||
#[cfg(windows)]
|
#[cfg(windows)]
|
||||||
@ -834,7 +837,7 @@ fn alias_of_basic_command() {
|
|||||||
let alias = r#"alias ll = ls "#;
|
let alias = r#"alias ll = ls "#;
|
||||||
assert!(support::merge_input(alias.as_bytes(), &mut engine, &mut stack, dir).is_ok());
|
assert!(support::merge_input(alias.as_bytes(), &mut engine, &mut stack, dir).is_ok());
|
||||||
|
|
||||||
let mut completer = NuCompleter::new(std::sync::Arc::new(engine), stack);
|
let mut completer = NuCompleter::new(Arc::new(engine), Arc::new(stack));
|
||||||
|
|
||||||
let suggestions = completer.complete("ll t", 4);
|
let suggestions = completer.complete("ll t", 4);
|
||||||
#[cfg(windows)]
|
#[cfg(windows)]
|
||||||
@ -856,7 +859,7 @@ fn alias_of_another_alias() {
|
|||||||
let alias = r#"alias lf = ll -f"#;
|
let alias = r#"alias lf = ll -f"#;
|
||||||
assert!(support::merge_input(alias.as_bytes(), &mut engine, &mut stack, dir).is_ok());
|
assert!(support::merge_input(alias.as_bytes(), &mut engine, &mut stack, dir).is_ok());
|
||||||
|
|
||||||
let mut completer = NuCompleter::new(std::sync::Arc::new(engine), stack);
|
let mut completer = NuCompleter::new(Arc::new(engine), Arc::new(stack));
|
||||||
|
|
||||||
let suggestions = completer.complete("lf t", 4);
|
let suggestions = completer.complete("lf t", 4);
|
||||||
#[cfg(windows)]
|
#[cfg(windows)]
|
||||||
@ -890,7 +893,7 @@ fn run_external_completion(completer: &str, input: &str) -> Vec<Suggestion> {
|
|||||||
assert!(engine_state.merge_env(&mut stack, &dir).is_ok());
|
assert!(engine_state.merge_env(&mut stack, &dir).is_ok());
|
||||||
|
|
||||||
// Instantiate a new completer
|
// Instantiate a new completer
|
||||||
let mut completer = NuCompleter::new(std::sync::Arc::new(engine_state), stack);
|
let mut completer = NuCompleter::new(Arc::new(engine_state), Arc::new(stack));
|
||||||
|
|
||||||
completer.complete(input, input.len())
|
completer.complete(input, input.len())
|
||||||
}
|
}
|
||||||
@ -899,7 +902,7 @@ fn run_external_completion(completer: &str, input: &str) -> Vec<Suggestion> {
|
|||||||
fn unknown_command_completion() {
|
fn unknown_command_completion() {
|
||||||
let (_, _, engine, stack) = new_engine();
|
let (_, _, engine, stack) = new_engine();
|
||||||
|
|
||||||
let mut completer = NuCompleter::new(std::sync::Arc::new(engine), stack);
|
let mut completer = NuCompleter::new(Arc::new(engine), Arc::new(stack));
|
||||||
|
|
||||||
let target_dir = "thiscommanddoesnotexist ";
|
let target_dir = "thiscommanddoesnotexist ";
|
||||||
let suggestions = completer.complete(target_dir, target_dir.len());
|
let suggestions = completer.complete(target_dir, target_dir.len());
|
||||||
@ -962,7 +965,7 @@ fn flagcompletion_triggers_after_cursor_piped(mut completer: NuCompleter) {
|
|||||||
fn filecompletions_triggers_after_cursor() {
|
fn filecompletions_triggers_after_cursor() {
|
||||||
let (_, _, engine, stack) = new_engine();
|
let (_, _, engine, stack) = new_engine();
|
||||||
|
|
||||||
let mut completer = NuCompleter::new(std::sync::Arc::new(engine), stack);
|
let mut completer = NuCompleter::new(Arc::new(engine), Arc::new(stack));
|
||||||
|
|
||||||
let suggestions = completer.complete("cp test_c", 3);
|
let suggestions = completer.complete("cp test_c", 3);
|
||||||
|
|
||||||
@ -1071,7 +1074,7 @@ fn alias_offset_bug_7648() {
|
|||||||
let alias = r#"alias ea = ^$env.EDITOR /tmp/test.s"#;
|
let alias = r#"alias ea = ^$env.EDITOR /tmp/test.s"#;
|
||||||
assert!(support::merge_input(alias.as_bytes(), &mut engine, &mut stack, dir).is_ok());
|
assert!(support::merge_input(alias.as_bytes(), &mut engine, &mut stack, dir).is_ok());
|
||||||
|
|
||||||
let mut completer = NuCompleter::new(std::sync::Arc::new(engine), stack);
|
let mut completer = NuCompleter::new(Arc::new(engine), Arc::new(stack));
|
||||||
|
|
||||||
// Issue #7648
|
// Issue #7648
|
||||||
// Nushell crashes when an alias name is shorter than the alias command
|
// Nushell crashes when an alias name is shorter than the alias command
|
||||||
@ -1090,7 +1093,7 @@ fn alias_offset_bug_7754() {
|
|||||||
let alias = r#"alias ll = ls -l"#;
|
let alias = r#"alias ll = ls -l"#;
|
||||||
assert!(support::merge_input(alias.as_bytes(), &mut engine, &mut stack, dir).is_ok());
|
assert!(support::merge_input(alias.as_bytes(), &mut engine, &mut stack, dir).is_ok());
|
||||||
|
|
||||||
let mut completer = NuCompleter::new(std::sync::Arc::new(engine), stack);
|
let mut completer = NuCompleter::new(Arc::new(engine), Arc::new(stack));
|
||||||
|
|
||||||
// Issue #7754
|
// Issue #7754
|
||||||
// Nushell crashes when an alias name is shorter than the alias command
|
// Nushell crashes when an alias name is shorter than the alias command
|
@ -3,8 +3,7 @@ use nu_parser::parse;
|
|||||||
use nu_protocol::{
|
use nu_protocol::{
|
||||||
debugger::WithoutDebug,
|
debugger::WithoutDebug,
|
||||||
engine::{EngineState, Stack, StateWorkingSet},
|
engine::{EngineState, Stack, StateWorkingSet},
|
||||||
eval_const::create_nu_constant,
|
PipelineData, ShellError, Span, Value,
|
||||||
PipelineData, ShellError, Span, Value, NU_VARIABLE_ID,
|
|
||||||
};
|
};
|
||||||
use nu_test_support::fs;
|
use nu_test_support::fs;
|
||||||
use reedline::Suggestion;
|
use reedline::Suggestion;
|
||||||
@ -28,9 +27,7 @@ pub fn new_engine() -> (PathBuf, String, EngineState, Stack) {
|
|||||||
let mut engine_state = create_default_context();
|
let mut engine_state = create_default_context();
|
||||||
|
|
||||||
// Add $nu
|
// Add $nu
|
||||||
let nu_const =
|
engine_state.generate_nu_constant();
|
||||||
create_nu_constant(&engine_state, Span::test_data()).expect("Failed creating $nu");
|
|
||||||
engine_state.set_variable_const_val(NU_VARIABLE_ID, nu_const);
|
|
||||||
|
|
||||||
// New stack
|
// New stack
|
||||||
let mut stack = Stack::new();
|
let mut stack = Stack::new();
|
2
crates/nu-cli/tests/main.rs
Normal file
2
crates/nu-cli/tests/main.rs
Normal file
@ -0,0 +1,2 @@
|
|||||||
|
mod commands;
|
||||||
|
mod completions;
|
@ -1,6 +1,6 @@
|
|||||||
use nu_protocol::{
|
use nu_protocol::{
|
||||||
engine::{EngineState, Stack, StateWorkingSet},
|
engine::{EngineState, Stack},
|
||||||
report_error, Range, ShellError, Span, Value,
|
Range, ShellError, Span, Value,
|
||||||
};
|
};
|
||||||
use std::{ops::Bound, path::PathBuf};
|
use std::{ops::Bound, path::PathBuf};
|
||||||
|
|
||||||
@ -13,11 +13,9 @@ pub fn get_init_cwd() -> PathBuf {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_guaranteed_cwd(engine_state: &EngineState, stack: &Stack) -> PathBuf {
|
pub fn get_guaranteed_cwd(engine_state: &EngineState, stack: &Stack) -> PathBuf {
|
||||||
engine_state.cwd(Some(stack)).unwrap_or_else(|e| {
|
engine_state
|
||||||
let working_set = StateWorkingSet::new(engine_state);
|
.cwd(Some(stack))
|
||||||
report_error(&working_set, &e);
|
.unwrap_or(crate::util::get_init_cwd())
|
||||||
crate::util::get_init_cwd()
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
type MakeRangeError = fn(&str, Span) -> ShellError;
|
type MakeRangeError = fn(&str, Span) -> ShellError;
|
||||||
|
@ -79,7 +79,7 @@ impl Command for CastDF {
|
|||||||
call: &Call,
|
call: &Call,
|
||||||
input: PipelineData,
|
input: PipelineData,
|
||||||
) -> Result<PipelineData, ShellError> {
|
) -> Result<PipelineData, ShellError> {
|
||||||
let value = input.into_value(call.head);
|
let value = input.into_value(call.head)?;
|
||||||
if NuLazyFrame::can_downcast(&value) {
|
if NuLazyFrame::can_downcast(&value) {
|
||||||
let (dtype, column_nm) = df_args(engine_state, stack, call)?;
|
let (dtype, column_nm) = df_args(engine_state, stack, call)?;
|
||||||
let df = NuLazyFrame::try_from_value(value)?;
|
let df = NuLazyFrame::try_from_value(value)?;
|
||||||
|
@ -72,8 +72,7 @@ impl Command for FilterWith {
|
|||||||
call: &Call,
|
call: &Call,
|
||||||
input: PipelineData,
|
input: PipelineData,
|
||||||
) -> Result<PipelineData, ShellError> {
|
) -> Result<PipelineData, ShellError> {
|
||||||
let value = input.into_value(call.head);
|
let value = input.into_value(call.head)?;
|
||||||
|
|
||||||
if NuLazyFrame::can_downcast(&value) {
|
if NuLazyFrame::can_downcast(&value) {
|
||||||
let df = NuLazyFrame::try_from_value(value)?;
|
let df = NuLazyFrame::try_from_value(value)?;
|
||||||
command_lazy(engine_state, stack, call, df)
|
command_lazy(engine_state, stack, call, df)
|
||||||
|
@ -86,7 +86,7 @@ impl Command for FirstDF {
|
|||||||
call: &Call,
|
call: &Call,
|
||||||
input: PipelineData,
|
input: PipelineData,
|
||||||
) -> Result<PipelineData, ShellError> {
|
) -> Result<PipelineData, ShellError> {
|
||||||
let value = input.into_value(call.head);
|
let value = input.into_value(call.head)?;
|
||||||
if NuDataFrame::can_downcast(&value) {
|
if NuDataFrame::can_downcast(&value) {
|
||||||
let df = NuDataFrame::try_from_value(value)?;
|
let df = NuDataFrame::try_from_value(value)?;
|
||||||
command(engine_state, stack, call, df)
|
command(engine_state, stack, call, df)
|
||||||
|
@ -61,7 +61,7 @@ impl Command for LastDF {
|
|||||||
call: &Call,
|
call: &Call,
|
||||||
input: PipelineData,
|
input: PipelineData,
|
||||||
) -> Result<PipelineData, ShellError> {
|
) -> Result<PipelineData, ShellError> {
|
||||||
let value = input.into_value(call.head);
|
let value = input.into_value(call.head)?;
|
||||||
if NuDataFrame::can_downcast(&value) {
|
if NuDataFrame::can_downcast(&value) {
|
||||||
let df = NuDataFrame::try_from_value(value)?;
|
let df = NuDataFrame::try_from_value(value)?;
|
||||||
command(engine_state, stack, call, df)
|
command(engine_state, stack, call, df)
|
||||||
|
@ -109,8 +109,7 @@ impl Command for RenameDF {
|
|||||||
call: &Call,
|
call: &Call,
|
||||||
input: PipelineData,
|
input: PipelineData,
|
||||||
) -> Result<PipelineData, ShellError> {
|
) -> Result<PipelineData, ShellError> {
|
||||||
let value = input.into_value(call.head);
|
let value = input.into_value(call.head)?;
|
||||||
|
|
||||||
if NuLazyFrame::can_downcast(&value) {
|
if NuLazyFrame::can_downcast(&value) {
|
||||||
let df = NuLazyFrame::try_from_value(value)?;
|
let df = NuLazyFrame::try_from_value(value)?;
|
||||||
command_lazy(engine_state, stack, call, df)
|
command_lazy(engine_state, stack, call, df)
|
||||||
|
@ -76,7 +76,7 @@ impl Command for ToNu {
|
|||||||
call: &Call,
|
call: &Call,
|
||||||
input: PipelineData,
|
input: PipelineData,
|
||||||
) -> Result<PipelineData, ShellError> {
|
) -> Result<PipelineData, ShellError> {
|
||||||
let value = input.into_value(call.head);
|
let value = input.into_value(call.head)?;
|
||||||
if NuDataFrame::can_downcast(&value) {
|
if NuDataFrame::can_downcast(&value) {
|
||||||
dataframe_command(engine_state, stack, call, value)
|
dataframe_command(engine_state, stack, call, value)
|
||||||
} else {
|
} else {
|
||||||
|
@ -102,8 +102,7 @@ impl Command for WithColumn {
|
|||||||
call: &Call,
|
call: &Call,
|
||||||
input: PipelineData,
|
input: PipelineData,
|
||||||
) -> Result<PipelineData, ShellError> {
|
) -> Result<PipelineData, ShellError> {
|
||||||
let value = input.into_value(call.head);
|
let value = input.into_value(call.head)?;
|
||||||
|
|
||||||
if NuLazyFrame::can_downcast(&value) {
|
if NuLazyFrame::can_downcast(&value) {
|
||||||
let df = NuLazyFrame::try_from_value(value)?;
|
let df = NuLazyFrame::try_from_value(value)?;
|
||||||
command_lazy(engine_state, stack, call, df)
|
command_lazy(engine_state, stack, call, df)
|
||||||
|
@ -172,7 +172,7 @@ macro_rules! lazy_expr_command {
|
|||||||
call: &Call,
|
call: &Call,
|
||||||
input: PipelineData,
|
input: PipelineData,
|
||||||
) -> Result<PipelineData, ShellError> {
|
) -> Result<PipelineData, ShellError> {
|
||||||
let value = input.into_value(call.head);
|
let value = input.into_value(call.head)?;
|
||||||
if NuDataFrame::can_downcast(&value) {
|
if NuDataFrame::can_downcast(&value) {
|
||||||
let lazy = NuLazyFrame::try_from_value(value)?;
|
let lazy = NuLazyFrame::try_from_value(value)?;
|
||||||
let lazy = NuLazyFrame::new(
|
let lazy = NuLazyFrame::new(
|
||||||
@ -271,7 +271,7 @@ macro_rules! lazy_expr_command {
|
|||||||
call: &Call,
|
call: &Call,
|
||||||
input: PipelineData,
|
input: PipelineData,
|
||||||
) -> Result<PipelineData, ShellError> {
|
) -> Result<PipelineData, ShellError> {
|
||||||
let value = input.into_value(call.head);
|
let value = input.into_value(call.head)?;
|
||||||
if NuDataFrame::can_downcast(&value) {
|
if NuDataFrame::can_downcast(&value) {
|
||||||
let lazy = NuLazyFrame::try_from_value(value)?;
|
let lazy = NuLazyFrame::try_from_value(value)?;
|
||||||
let lazy = NuLazyFrame::new(
|
let lazy = NuLazyFrame::new(
|
||||||
|
@ -91,7 +91,7 @@ impl Command for ExprOtherwise {
|
|||||||
let otherwise_predicate: Value = call.req(engine_state, stack, 0)?;
|
let otherwise_predicate: Value = call.req(engine_state, stack, 0)?;
|
||||||
let otherwise_predicate = NuExpression::try_from_value(otherwise_predicate)?;
|
let otherwise_predicate = NuExpression::try_from_value(otherwise_predicate)?;
|
||||||
|
|
||||||
let value = input.into_value(call.head);
|
let value = input.into_value(call.head)?;
|
||||||
let complete: NuExpression = match NuWhen::try_from_value(value)? {
|
let complete: NuExpression = match NuWhen::try_from_value(value)? {
|
||||||
NuWhen::Then(then) => then.otherwise(otherwise_predicate.into_polars()).into(),
|
NuWhen::Then(then) => then.otherwise(otherwise_predicate.into_polars()).into(),
|
||||||
NuWhen::ChainedThen(chained_when) => chained_when
|
NuWhen::ChainedThen(chained_when) => chained_when
|
||||||
|
@ -67,7 +67,7 @@ impl Command for ExprQuantile {
|
|||||||
call: &Call,
|
call: &Call,
|
||||||
input: PipelineData,
|
input: PipelineData,
|
||||||
) -> Result<PipelineData, ShellError> {
|
) -> Result<PipelineData, ShellError> {
|
||||||
let value = input.into_value(call.head);
|
let value = input.into_value(call.head)?;
|
||||||
let quantile: f64 = call.req(engine_state, stack, 0)?;
|
let quantile: f64 = call.req(engine_state, stack, 0)?;
|
||||||
|
|
||||||
let expr = NuExpression::try_from_value(value)?;
|
let expr = NuExpression::try_from_value(value)?;
|
||||||
|
@ -103,7 +103,7 @@ impl Command for ExprWhen {
|
|||||||
let then_predicate: Value = call.req(engine_state, stack, 1)?;
|
let then_predicate: Value = call.req(engine_state, stack, 1)?;
|
||||||
let then_predicate = NuExpression::try_from_value(then_predicate)?;
|
let then_predicate = NuExpression::try_from_value(then_predicate)?;
|
||||||
|
|
||||||
let value = input.into_value(call.head);
|
let value = input.into_value(call.head)?;
|
||||||
let when_then: NuWhen = match value {
|
let when_then: NuWhen = match value {
|
||||||
Value::Nothing { .. } => when(when_predicate.into_polars())
|
Value::Nothing { .. } => when(when_predicate.into_polars())
|
||||||
.then(then_predicate.into_polars())
|
.then(then_predicate.into_polars())
|
||||||
|
@ -100,7 +100,7 @@ impl Command for LazyExplode {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn explode(call: &Call, input: PipelineData) -> Result<PipelineData, ShellError> {
|
pub(crate) fn explode(call: &Call, input: PipelineData) -> Result<PipelineData, ShellError> {
|
||||||
let value = input.into_value(call.head);
|
let value = input.into_value(call.head)?;
|
||||||
if NuDataFrame::can_downcast(&value) {
|
if NuDataFrame::can_downcast(&value) {
|
||||||
let df = NuLazyFrame::try_from_value(value)?;
|
let df = NuLazyFrame::try_from_value(value)?;
|
||||||
let columns: Vec<String> = call
|
let columns: Vec<String> = call
|
||||||
|
@ -82,7 +82,7 @@ impl Command for LazyFillNA {
|
|||||||
input: PipelineData,
|
input: PipelineData,
|
||||||
) -> Result<PipelineData, ShellError> {
|
) -> Result<PipelineData, ShellError> {
|
||||||
let fill: Value = call.req(engine_state, stack, 0)?;
|
let fill: Value = call.req(engine_state, stack, 0)?;
|
||||||
let value = input.into_value(call.head);
|
let value = input.into_value(call.head)?;
|
||||||
|
|
||||||
if NuExpression::can_downcast(&value) {
|
if NuExpression::can_downcast(&value) {
|
||||||
let expr = NuExpression::try_from_value(value)?;
|
let expr = NuExpression::try_from_value(value)?;
|
||||||
|
@ -59,7 +59,7 @@ impl Command for LazyFillNull {
|
|||||||
input: PipelineData,
|
input: PipelineData,
|
||||||
) -> Result<PipelineData, ShellError> {
|
) -> Result<PipelineData, ShellError> {
|
||||||
let fill: Value = call.req(engine_state, stack, 0)?;
|
let fill: Value = call.req(engine_state, stack, 0)?;
|
||||||
let value = input.into_value(call.head);
|
let value = input.into_value(call.head)?;
|
||||||
|
|
||||||
if NuExpression::can_downcast(&value) {
|
if NuExpression::can_downcast(&value) {
|
||||||
let expr = NuExpression::try_from_value(value)?;
|
let expr = NuExpression::try_from_value(value)?;
|
||||||
|
@ -219,7 +219,7 @@ impl Command for LazyJoin {
|
|||||||
let suffix: Option<String> = call.get_flag(engine_state, stack, "suffix")?;
|
let suffix: Option<String> = call.get_flag(engine_state, stack, "suffix")?;
|
||||||
let suffix = suffix.unwrap_or_else(|| "_x".into());
|
let suffix = suffix.unwrap_or_else(|| "_x".into());
|
||||||
|
|
||||||
let value = input.into_value(call.head);
|
let value = input.into_value(call.head)?;
|
||||||
let lazy = NuLazyFrame::try_from_value(value)?;
|
let lazy = NuLazyFrame::try_from_value(value)?;
|
||||||
let from_eager = lazy.from_eager;
|
let from_eager = lazy.from_eager;
|
||||||
let lazy = lazy.into_polars();
|
let lazy = lazy.into_polars();
|
||||||
|
@ -54,7 +54,7 @@ impl Command for LazyQuantile {
|
|||||||
call: &Call,
|
call: &Call,
|
||||||
input: PipelineData,
|
input: PipelineData,
|
||||||
) -> Result<PipelineData, ShellError> {
|
) -> Result<PipelineData, ShellError> {
|
||||||
let value = input.into_value(call.head);
|
let value = input.into_value(call.head)?;
|
||||||
let quantile: f64 = call.req(engine_state, stack, 0)?;
|
let quantile: f64 = call.req(engine_state, stack, 0)?;
|
||||||
|
|
||||||
let lazy = NuLazyFrame::try_from_value(value)?;
|
let lazy = NuLazyFrame::try_from_value(value)?;
|
||||||
|
@ -68,7 +68,7 @@ impl Command for IsNotNull {
|
|||||||
call: &Call,
|
call: &Call,
|
||||||
input: PipelineData,
|
input: PipelineData,
|
||||||
) -> Result<PipelineData, ShellError> {
|
) -> Result<PipelineData, ShellError> {
|
||||||
let value = input.into_value(call.head);
|
let value = input.into_value(call.head)?;
|
||||||
if NuDataFrame::can_downcast(&value) {
|
if NuDataFrame::can_downcast(&value) {
|
||||||
let df = NuDataFrame::try_from_value(value)?;
|
let df = NuDataFrame::try_from_value(value)?;
|
||||||
command(engine_state, stack, call, df)
|
command(engine_state, stack, call, df)
|
||||||
|
@ -68,7 +68,7 @@ impl Command for IsNull {
|
|||||||
call: &Call,
|
call: &Call,
|
||||||
input: PipelineData,
|
input: PipelineData,
|
||||||
) -> Result<PipelineData, ShellError> {
|
) -> Result<PipelineData, ShellError> {
|
||||||
let value = input.into_value(call.head);
|
let value = input.into_value(call.head)?;
|
||||||
if NuDataFrame::can_downcast(&value) {
|
if NuDataFrame::can_downcast(&value) {
|
||||||
let df = NuDataFrame::try_from_value(value)?;
|
let df = NuDataFrame::try_from_value(value)?;
|
||||||
command(engine_state, stack, call, df)
|
command(engine_state, stack, call, df)
|
||||||
|
@ -60,7 +60,7 @@ impl Command for NUnique {
|
|||||||
call: &Call,
|
call: &Call,
|
||||||
input: PipelineData,
|
input: PipelineData,
|
||||||
) -> Result<PipelineData, ShellError> {
|
) -> Result<PipelineData, ShellError> {
|
||||||
let value = input.into_value(call.head);
|
let value = input.into_value(call.head)?;
|
||||||
if NuDataFrame::can_downcast(&value) {
|
if NuDataFrame::can_downcast(&value) {
|
||||||
let df = NuDataFrame::try_from_value(value)?;
|
let df = NuDataFrame::try_from_value(value)?;
|
||||||
command(engine_state, stack, call, df)
|
command(engine_state, stack, call, df)
|
||||||
|
@ -56,8 +56,7 @@ impl Command for Shift {
|
|||||||
call: &Call,
|
call: &Call,
|
||||||
input: PipelineData,
|
input: PipelineData,
|
||||||
) -> Result<PipelineData, ShellError> {
|
) -> Result<PipelineData, ShellError> {
|
||||||
let value = input.into_value(call.head);
|
let value = input.into_value(call.head)?;
|
||||||
|
|
||||||
if NuLazyFrame::can_downcast(&value) {
|
if NuLazyFrame::can_downcast(&value) {
|
||||||
let df = NuLazyFrame::try_from_value(value)?;
|
let df = NuLazyFrame::try_from_value(value)?;
|
||||||
command_lazy(engine_state, stack, call, df)
|
command_lazy(engine_state, stack, call, df)
|
||||||
|
@ -72,8 +72,7 @@ impl Command for Unique {
|
|||||||
call: &Call,
|
call: &Call,
|
||||||
input: PipelineData,
|
input: PipelineData,
|
||||||
) -> Result<PipelineData, ShellError> {
|
) -> Result<PipelineData, ShellError> {
|
||||||
let value = input.into_value(call.head);
|
let value = input.into_value(call.head)?;
|
||||||
|
|
||||||
if NuLazyFrame::can_downcast(&value) {
|
if NuLazyFrame::can_downcast(&value) {
|
||||||
let df = NuLazyFrame::try_from_value(value)?;
|
let df = NuLazyFrame::try_from_value(value)?;
|
||||||
command_lazy(engine_state, stack, call, df)
|
command_lazy(engine_state, stack, call, df)
|
||||||
|
@ -80,7 +80,8 @@ pub fn test_dataframe_example(engine_state: &mut Box<EngineState>, example: &Exa
|
|||||||
let result =
|
let result =
|
||||||
eval_block::<WithoutDebug>(engine_state, &mut stack, &block, PipelineData::empty())
|
eval_block::<WithoutDebug>(engine_state, &mut stack, &block, PipelineData::empty())
|
||||||
.unwrap_or_else(|err| panic!("test eval error in `{}`: {:?}", example.example, err))
|
.unwrap_or_else(|err| panic!("test eval error in `{}`: {:?}", example.example, err))
|
||||||
.into_value(Span::test_data());
|
.into_value(Span::test_data())
|
||||||
|
.expect("ok value");
|
||||||
|
|
||||||
println!("input: {}", example.example);
|
println!("input: {}", example.example);
|
||||||
println!("result: {result:?}");
|
println!("result: {result:?}");
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
use super::{operations::Axis, NuDataFrame};
|
use super::{operations::Axis, NuDataFrame};
|
||||||
use nu_protocol::{
|
use nu_protocol::{
|
||||||
ast::{Boolean, Comparison, Math, Operator},
|
ast::{Boolean, Comparison, Math, Operator},
|
||||||
span, ShellError, Span, Spanned, Value,
|
ShellError, Span, Spanned, Value,
|
||||||
};
|
};
|
||||||
use num::Zero;
|
use num::Zero;
|
||||||
use polars::prelude::{
|
use polars::prelude::{
|
||||||
@ -17,7 +17,7 @@ pub(super) fn between_dataframes(
|
|||||||
right: &Value,
|
right: &Value,
|
||||||
rhs: &NuDataFrame,
|
rhs: &NuDataFrame,
|
||||||
) -> Result<Value, ShellError> {
|
) -> Result<Value, ShellError> {
|
||||||
let operation_span = span(&[left.span(), right.span()]);
|
let operation_span = Span::merge(left.span(), right.span());
|
||||||
match operator.item {
|
match operator.item {
|
||||||
Operator::Math(Math::Plus) => match lhs.append_df(rhs, Axis::Row, operation_span) {
|
Operator::Math(Math::Plus) => match lhs.append_df(rhs, Axis::Row, operation_span) {
|
||||||
Ok(df) => Ok(df.into_value(operation_span)),
|
Ok(df) => Ok(df.into_value(operation_span)),
|
||||||
@ -40,7 +40,7 @@ pub(super) fn compute_between_series(
|
|||||||
right: &Value,
|
right: &Value,
|
||||||
rhs: &Series,
|
rhs: &Series,
|
||||||
) -> Result<Value, ShellError> {
|
) -> Result<Value, ShellError> {
|
||||||
let operation_span = span(&[left.span(), right.span()]);
|
let operation_span = Span::merge(left.span(), right.span());
|
||||||
match operator.item {
|
match operator.item {
|
||||||
Operator::Math(Math::Plus) => {
|
Operator::Math(Math::Plus) => {
|
||||||
let mut res = lhs + rhs;
|
let mut res = lhs + rhs;
|
||||||
|
@ -295,7 +295,7 @@ impl NuDataFrame {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn try_from_pipeline(input: PipelineData, span: Span) -> Result<Self, ShellError> {
|
pub fn try_from_pipeline(input: PipelineData, span: Span) -> Result<Self, ShellError> {
|
||||||
let value = input.into_value(span);
|
let value = input.into_value(span)?;
|
||||||
Self::try_from_value(value)
|
Self::try_from_value(value)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -84,7 +84,7 @@ impl NuExpression {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn try_from_pipeline(input: PipelineData, span: Span) -> Result<Self, ShellError> {
|
pub fn try_from_pipeline(input: PipelineData, span: Span) -> Result<Self, ShellError> {
|
||||||
let value = input.into_value(span);
|
let value = input.into_value(span)?;
|
||||||
Self::try_from_value(value)
|
Self::try_from_value(value)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -134,7 +134,7 @@ impl NuLazyFrame {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn try_from_pipeline(input: PipelineData, span: Span) -> Result<Self, ShellError> {
|
pub fn try_from_pipeline(input: PipelineData, span: Span) -> Result<Self, ShellError> {
|
||||||
let value = input.into_value(span);
|
let value = input.into_value(span)?;
|
||||||
Self::try_from_value(value)
|
Self::try_from_value(value)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -107,7 +107,7 @@ impl NuLazyGroupBy {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn try_from_pipeline(input: PipelineData, span: Span) -> Result<Self, ShellError> {
|
pub fn try_from_pipeline(input: PipelineData, span: Span) -> Result<Self, ShellError> {
|
||||||
let value = input.into_value(span);
|
let value = input.into_value(span)?;
|
||||||
Self::try_from_value(value)
|
Self::try_from_value(value)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
use nu_protocol::{span as span_join, ShellError, Span, Spanned, Value};
|
use nu_protocol::{ShellError, Span, Spanned, Value};
|
||||||
|
|
||||||
// Default value used when selecting rows from dataframe
|
// Default value used when selecting rows from dataframe
|
||||||
pub const DEFAULT_ROWS: usize = 5;
|
pub const DEFAULT_ROWS: usize = 5;
|
||||||
@ -27,7 +27,7 @@ pub(crate) fn convert_columns(
|
|||||||
let span = value.span();
|
let span = value.span();
|
||||||
match value {
|
match value {
|
||||||
Value::String { val, .. } => {
|
Value::String { val, .. } => {
|
||||||
col_span = span_join(&[col_span, span]);
|
col_span = col_span.merge(span);
|
||||||
Ok(Spanned { item: val, span })
|
Ok(Spanned { item: val, span })
|
||||||
}
|
}
|
||||||
_ => Err(ShellError::GenericError {
|
_ => Err(ShellError::GenericError {
|
||||||
@ -68,7 +68,7 @@ pub(crate) fn convert_columns_string(
|
|||||||
let span = value.span();
|
let span = value.span();
|
||||||
match value {
|
match value {
|
||||||
Value::String { val, .. } => {
|
Value::String { val, .. } => {
|
||||||
col_span = span_join(&[col_span, span]);
|
col_span = col_span.merge(span);
|
||||||
Ok(val)
|
Ok(val)
|
||||||
}
|
}
|
||||||
_ => Err(ShellError::GenericError {
|
_ => Err(ShellError::GenericError {
|
||||||
|
@ -118,23 +118,13 @@ fn into_bits(
|
|||||||
let cell_paths = call.rest(engine_state, stack, 0)?;
|
let cell_paths = call.rest(engine_state, stack, 0)?;
|
||||||
let cell_paths = (!cell_paths.is_empty()).then_some(cell_paths);
|
let cell_paths = (!cell_paths.is_empty()).then_some(cell_paths);
|
||||||
|
|
||||||
match input {
|
if let PipelineData::ByteStream(stream, ..) = input {
|
||||||
PipelineData::ExternalStream { stdout: None, .. } => {
|
|
||||||
Ok(Value::binary(vec![], head).into_pipeline_data())
|
|
||||||
}
|
|
||||||
PipelineData::ExternalStream {
|
|
||||||
stdout: Some(stream),
|
|
||||||
..
|
|
||||||
} => {
|
|
||||||
// TODO: in the future, we may want this to stream out, converting each to bytes
|
// TODO: in the future, we may want this to stream out, converting each to bytes
|
||||||
let output = stream.into_bytes()?;
|
Ok(Value::binary(stream.into_bytes()?, head).into_pipeline_data())
|
||||||
Ok(Value::binary(output.item, head).into_pipeline_data())
|
} else {
|
||||||
}
|
|
||||||
_ => {
|
|
||||||
let args = Arguments { cell_paths };
|
let args = Arguments { cell_paths };
|
||||||
operate(action, args, input, call.head, engine_state.ctrlc.clone())
|
operate(action, args, input, call.head, engine_state.ctrlc.clone())
|
||||||
}
|
}
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn convert_to_smallest_number_type(num: i64, span: Span) -> Value {
|
fn convert_to_smallest_number_type(num: i64, span: Span) -> Value {
|
||||||
|
@ -10,7 +10,7 @@ impl Command for EachWhile {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn usage(&self) -> &str {
|
fn usage(&self) -> &str {
|
||||||
"Run a block on each row of the input list until a null is found, then create a new list with the results."
|
"Run a closure on each row of the input list until a null is found, then create a new list with the results."
|
||||||
}
|
}
|
||||||
|
|
||||||
fn search_terms(&self) -> Vec<&str> {
|
fn search_terms(&self) -> Vec<&str> {
|
||||||
@ -79,36 +79,38 @@ impl Command for EachWhile {
|
|||||||
let mut closure = ClosureEval::new(engine_state, stack, closure);
|
let mut closure = ClosureEval::new(engine_state, stack, closure);
|
||||||
Ok(input
|
Ok(input
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map_while(move |value| match closure.run_with_value(value) {
|
.map_while(move |value| {
|
||||||
Ok(data) => {
|
match closure
|
||||||
let value = data.into_value(head);
|
.run_with_value(value)
|
||||||
(!value.is_nothing()).then_some(value)
|
.and_then(|data| data.into_value(head))
|
||||||
}
|
{
|
||||||
|
Ok(value) => (!value.is_nothing()).then_some(value),
|
||||||
Err(_) => None,
|
Err(_) => None,
|
||||||
|
}
|
||||||
})
|
})
|
||||||
.fuse()
|
.fuse()
|
||||||
.into_pipeline_data(head, engine_state.ctrlc.clone()))
|
.into_pipeline_data(head, engine_state.ctrlc.clone()))
|
||||||
}
|
}
|
||||||
PipelineData::ExternalStream { stdout: None, .. } => Ok(PipelineData::empty()),
|
PipelineData::ByteStream(stream, ..) => {
|
||||||
PipelineData::ExternalStream {
|
let span = stream.span();
|
||||||
stdout: Some(stream),
|
if let Some(chunks) = stream.chunks() {
|
||||||
..
|
|
||||||
} => {
|
|
||||||
let mut closure = ClosureEval::new(engine_state, stack, closure);
|
let mut closure = ClosureEval::new(engine_state, stack, closure);
|
||||||
Ok(stream
|
Ok(chunks
|
||||||
.into_iter()
|
|
||||||
.map_while(move |value| {
|
.map_while(move |value| {
|
||||||
let value = value.ok()?;
|
let value = value.ok()?;
|
||||||
match closure.run_with_value(value) {
|
match closure
|
||||||
Ok(data) => {
|
.run_with_value(value)
|
||||||
let value = data.into_value(head);
|
.and_then(|data| data.into_value(span))
|
||||||
(!value.is_nothing()).then_some(value)
|
{
|
||||||
}
|
Ok(value) => (!value.is_nothing()).then_some(value),
|
||||||
Err(_) => None,
|
Err(_) => None,
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
.fuse()
|
.fuse()
|
||||||
.into_pipeline_data(head, engine_state.ctrlc.clone()))
|
.into_pipeline_data(head, engine_state.ctrlc.clone()))
|
||||||
|
} else {
|
||||||
|
Ok(PipelineData::Empty)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
// This match allows non-iterables to be accepted,
|
// This match allows non-iterables to be accepted,
|
||||||
// which is currently considered undesirable (Nov 2022).
|
// which is currently considered undesirable (Nov 2022).
|
||||||
|
@ -56,7 +56,7 @@ impl Command for RollDown {
|
|||||||
let by: Option<usize> = call.get_flag(engine_state, stack, "by")?;
|
let by: Option<usize> = call.get_flag(engine_state, stack, "by")?;
|
||||||
let metadata = input.metadata();
|
let metadata = input.metadata();
|
||||||
|
|
||||||
let value = input.into_value(call.head);
|
let value = input.into_value(call.head)?;
|
||||||
let rotated_value = vertical_rotate_value(value, by, VerticalDirection::Down)?;
|
let rotated_value = vertical_rotate_value(value, by, VerticalDirection::Down)?;
|
||||||
|
|
||||||
Ok(rotated_value.into_pipeline_data().set_metadata(metadata))
|
Ok(rotated_value.into_pipeline_data().set_metadata(metadata))
|
||||||
|
@ -94,7 +94,7 @@ impl Command for RollLeft {
|
|||||||
let metadata = input.metadata();
|
let metadata = input.metadata();
|
||||||
|
|
||||||
let cells_only = call.has_flag(engine_state, stack, "cells-only")?;
|
let cells_only = call.has_flag(engine_state, stack, "cells-only")?;
|
||||||
let value = input.into_value(call.head);
|
let value = input.into_value(call.head)?;
|
||||||
let rotated_value =
|
let rotated_value =
|
||||||
horizontal_rotate_value(value, by, cells_only, &HorizontalDirection::Left)?;
|
horizontal_rotate_value(value, by, cells_only, &HorizontalDirection::Left)?;
|
||||||
|
|
||||||
|
@ -94,7 +94,7 @@ impl Command for RollRight {
|
|||||||
let metadata = input.metadata();
|
let metadata = input.metadata();
|
||||||
|
|
||||||
let cells_only = call.has_flag(engine_state, stack, "cells-only")?;
|
let cells_only = call.has_flag(engine_state, stack, "cells-only")?;
|
||||||
let value = input.into_value(call.head);
|
let value = input.into_value(call.head)?;
|
||||||
let rotated_value =
|
let rotated_value =
|
||||||
horizontal_rotate_value(value, by, cells_only, &HorizontalDirection::Right)?;
|
horizontal_rotate_value(value, by, cells_only, &HorizontalDirection::Right)?;
|
||||||
|
|
||||||
|
@ -56,7 +56,7 @@ impl Command for RollUp {
|
|||||||
let by: Option<usize> = call.get_flag(engine_state, stack, "by")?;
|
let by: Option<usize> = call.get_flag(engine_state, stack, "by")?;
|
||||||
let metadata = input.metadata();
|
let metadata = input.metadata();
|
||||||
|
|
||||||
let value = input.into_value(call.head);
|
let value = input.into_value(call.head)?;
|
||||||
let rotated_value = vertical_rotate_value(value, by, VerticalDirection::Up)?;
|
let rotated_value = vertical_rotate_value(value, by, VerticalDirection::Up)?;
|
||||||
|
|
||||||
Ok(rotated_value.into_pipeline_data().set_metadata(metadata))
|
Ok(rotated_value.into_pipeline_data().set_metadata(metadata))
|
||||||
|
@ -151,7 +151,7 @@ impl Iterator for UpdateCellIterator {
|
|||||||
fn eval_value(closure: &mut ClosureEval, span: Span, value: Value) -> Value {
|
fn eval_value(closure: &mut ClosureEval, span: Span, value: Value) -> Value {
|
||||||
closure
|
closure
|
||||||
.run_with_value(value)
|
.run_with_value(value)
|
||||||
.map(|data| data.into_value(span))
|
.and_then(|data| data.into_value(span))
|
||||||
.unwrap_or_else(|err| Value::error(err, span))
|
.unwrap_or_else(|err| Value::error(err, span))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -39,7 +39,7 @@ impl Command for FormatPattern {
|
|||||||
let mut working_set = StateWorkingSet::new(engine_state);
|
let mut working_set = StateWorkingSet::new(engine_state);
|
||||||
|
|
||||||
let specified_pattern: Result<Value, ShellError> = call.req(engine_state, stack, 0);
|
let specified_pattern: Result<Value, ShellError> = call.req(engine_state, stack, 0);
|
||||||
let input_val = input.into_value(call.head);
|
let input_val = input.into_value(call.head)?;
|
||||||
// add '$it' variable to support format like this: $it.column1.column2.
|
// add '$it' variable to support format like this: $it.column1.column2.
|
||||||
let it_id = working_set.add_variable(b"$it".to_vec(), call.head, Type::Any, false);
|
let it_id = working_set.add_variable(b"$it".to_vec(), call.head, Type::Any, false);
|
||||||
stack.add_var(it_id, input_val.clone());
|
stack.add_var(it_id, input_val.clone());
|
||||||
|
@ -19,102 +19,102 @@ fn basic_string_fails() {
|
|||||||
assert_eq!(actual.out, "");
|
assert_eq!(actual.out, "");
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
// #[test]
|
||||||
fn short_stream_binary() {
|
// fn short_stream_binary() {
|
||||||
let actual = nu!(r#"
|
// let actual = nu!(r#"
|
||||||
nu --testbin repeater (0x[01]) 5 | bytes starts-with 0x[010101]
|
// nu --testbin repeater (0x[01]) 5 | bytes starts-with 0x[010101]
|
||||||
"#);
|
// "#);
|
||||||
|
|
||||||
assert_eq!(actual.out, "true");
|
// assert_eq!(actual.out, "true");
|
||||||
}
|
// }
|
||||||
|
|
||||||
#[test]
|
// #[test]
|
||||||
fn short_stream_mismatch() {
|
// fn short_stream_mismatch() {
|
||||||
let actual = nu!(r#"
|
// let actual = nu!(r#"
|
||||||
nu --testbin repeater (0x[010203]) 5 | bytes starts-with 0x[010204]
|
// nu --testbin repeater (0x[010203]) 5 | bytes starts-with 0x[010204]
|
||||||
"#);
|
// "#);
|
||||||
|
|
||||||
assert_eq!(actual.out, "false");
|
// assert_eq!(actual.out, "false");
|
||||||
}
|
// }
|
||||||
|
|
||||||
#[test]
|
// #[test]
|
||||||
fn short_stream_binary_overflow() {
|
// fn short_stream_binary_overflow() {
|
||||||
let actual = nu!(r#"
|
// let actual = nu!(r#"
|
||||||
nu --testbin repeater (0x[01]) 5 | bytes starts-with 0x[010101010101]
|
// nu --testbin repeater (0x[01]) 5 | bytes starts-with 0x[010101010101]
|
||||||
"#);
|
// "#);
|
||||||
|
|
||||||
assert_eq!(actual.out, "false");
|
// assert_eq!(actual.out, "false");
|
||||||
}
|
// }
|
||||||
|
|
||||||
#[test]
|
// #[test]
|
||||||
fn long_stream_binary() {
|
// fn long_stream_binary() {
|
||||||
let actual = nu!(r#"
|
// let actual = nu!(r#"
|
||||||
nu --testbin repeater (0x[01]) 32768 | bytes starts-with 0x[010101]
|
// nu --testbin repeater (0x[01]) 32768 | bytes starts-with 0x[010101]
|
||||||
"#);
|
// "#);
|
||||||
|
|
||||||
assert_eq!(actual.out, "true");
|
// assert_eq!(actual.out, "true");
|
||||||
}
|
// }
|
||||||
|
|
||||||
#[test]
|
// #[test]
|
||||||
fn long_stream_binary_overflow() {
|
// fn long_stream_binary_overflow() {
|
||||||
// .. ranges are inclusive..inclusive, so we don't need to +1 to check for an overflow
|
// // .. ranges are inclusive..inclusive, so we don't need to +1 to check for an overflow
|
||||||
let actual = nu!(r#"
|
// let actual = nu!(r#"
|
||||||
nu --testbin repeater (0x[01]) 32768 | bytes starts-with (0..32768 | each {|| 0x[01] } | bytes collect)
|
// nu --testbin repeater (0x[01]) 32768 | bytes starts-with (0..32768 | each {|| 0x[01] } | bytes collect)
|
||||||
"#);
|
// "#);
|
||||||
|
|
||||||
assert_eq!(actual.out, "false");
|
// assert_eq!(actual.out, "false");
|
||||||
}
|
// }
|
||||||
|
|
||||||
#[test]
|
// #[test]
|
||||||
fn long_stream_binary_exact() {
|
// fn long_stream_binary_exact() {
|
||||||
// ranges are inclusive..inclusive, so we don't need to +1 to check for an overflow
|
// // ranges are inclusive..inclusive, so we don't need to +1 to check for an overflow
|
||||||
let actual = nu!(r#"
|
// let actual = nu!(r#"
|
||||||
nu --testbin repeater (0x[01020304]) 8192 | bytes starts-with (0..<8192 | each {|| 0x[01020304] } | bytes collect)
|
// nu --testbin repeater (0x[01020304]) 8192 | bytes starts-with (0..<8192 | each {|| 0x[01020304] } | bytes collect)
|
||||||
"#);
|
// "#);
|
||||||
|
|
||||||
assert_eq!(actual.out, "true");
|
// assert_eq!(actual.out, "true");
|
||||||
}
|
// }
|
||||||
|
|
||||||
#[test]
|
// #[test]
|
||||||
fn long_stream_string_exact() {
|
// fn long_stream_string_exact() {
|
||||||
// ranges are inclusive..inclusive, so we don't need to +1 to check for an overflow
|
// // ranges are inclusive..inclusive, so we don't need to +1 to check for an overflow
|
||||||
let actual = nu!(r#"
|
// let actual = nu!(r#"
|
||||||
nu --testbin repeater hell 8192 | bytes starts-with (0..<8192 | each {|| "hell" | into binary } | bytes collect)
|
// nu --testbin repeater hell 8192 | bytes starts-with (0..<8192 | each {|| "hell" | into binary } | bytes collect)
|
||||||
"#);
|
// "#);
|
||||||
|
|
||||||
assert_eq!(actual.out, "true");
|
// assert_eq!(actual.out, "true");
|
||||||
}
|
// }
|
||||||
|
|
||||||
#[test]
|
// #[test]
|
||||||
fn long_stream_mixed_exact() {
|
// fn long_stream_mixed_exact() {
|
||||||
// ranges are inclusive..inclusive, so we don't need to +1 to check for an overflow
|
// // ranges are inclusive..inclusive, so we don't need to +1 to check for an overflow
|
||||||
let actual = nu!(r#"
|
// let actual = nu!(r#"
|
||||||
let binseg = (0..<2048 | each {|| 0x[003d9fbf] } | bytes collect)
|
// let binseg = (0..<2048 | each {|| 0x[003d9fbf] } | bytes collect)
|
||||||
let strseg = (0..<2048 | each {|| "hell" | into binary } | bytes collect)
|
// let strseg = (0..<2048 | each {|| "hell" | into binary } | bytes collect)
|
||||||
|
|
||||||
nu --testbin repeat_bytes 003d9fbf 2048 68656c6c 2048 | bytes starts-with (bytes build $binseg $strseg)
|
// nu --testbin repeat_bytes 003d9fbf 2048 68656c6c 2048 | bytes starts-with (bytes build $binseg $strseg)
|
||||||
"#);
|
// "#);
|
||||||
|
|
||||||
assert_eq!(
|
// assert_eq!(
|
||||||
actual.err, "",
|
// actual.err, "",
|
||||||
"invocation failed. command line limit likely reached"
|
// "invocation failed. command line limit likely reached"
|
||||||
);
|
// );
|
||||||
assert_eq!(actual.out, "true");
|
// assert_eq!(actual.out, "true");
|
||||||
}
|
// }
|
||||||
|
|
||||||
#[test]
|
// #[test]
|
||||||
fn long_stream_mixed_overflow() {
|
// fn long_stream_mixed_overflow() {
|
||||||
// ranges are inclusive..inclusive, so we don't need to +1 to check for an overflow
|
// // ranges are inclusive..inclusive, so we don't need to +1 to check for an overflow
|
||||||
let actual = nu!(r#"
|
// let actual = nu!(r#"
|
||||||
let binseg = (0..<2048 | each {|| 0x[003d9fbf] } | bytes collect)
|
// let binseg = (0..<2048 | each {|| 0x[003d9fbf] } | bytes collect)
|
||||||
let strseg = (0..<2048 | each {|| "hell" | into binary } | bytes collect)
|
// let strseg = (0..<2048 | each {|| "hell" | into binary } | bytes collect)
|
||||||
|
|
||||||
nu --testbin repeat_bytes 003d9fbf 2048 68656c6c 2048 | bytes starts-with (bytes build $binseg $strseg 0x[01])
|
// nu --testbin repeat_bytes 003d9fbf 2048 68656c6c 2048 | bytes starts-with (bytes build $binseg $strseg 0x[01])
|
||||||
"#);
|
// "#);
|
||||||
|
|
||||||
assert_eq!(
|
// assert_eq!(
|
||||||
actual.err, "",
|
// actual.err, "",
|
||||||
"invocation failed. command line limit likely reached"
|
// "invocation failed. command line limit likely reached"
|
||||||
);
|
// );
|
||||||
assert_eq!(actual.out, "false");
|
// assert_eq!(actual.out, "false");
|
||||||
}
|
// }
|
||||||
|
@ -43,7 +43,7 @@ impl Command for Collect {
|
|||||||
stack.captures_to_stack_preserve_out_dest(closure.captures.clone());
|
stack.captures_to_stack_preserve_out_dest(closure.captures.clone());
|
||||||
|
|
||||||
let metadata = input.metadata();
|
let metadata = input.metadata();
|
||||||
let input = input.into_value(call.head);
|
let input = input.into_value(call.head)?;
|
||||||
|
|
||||||
let mut saved_positional = None;
|
let mut saved_positional = None;
|
||||||
if let Some(var) = block.signature.get_positional(0) {
|
if let Some(var) = block.signature.get_positional(0) {
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
use nu_engine::command_prelude::*;
|
use nu_engine::command_prelude::*;
|
||||||
use nu_protocol::{engine::StateWorkingSet, PipelineMetadata};
|
use nu_protocol::{engine::StateWorkingSet, ByteStreamSource, PipelineMetadata};
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct Describe;
|
pub struct Describe;
|
||||||
@ -162,73 +162,38 @@ fn run(
|
|||||||
let metadata = input.metadata();
|
let metadata = input.metadata();
|
||||||
|
|
||||||
let description = match input {
|
let description = match input {
|
||||||
PipelineData::ExternalStream {
|
PipelineData::ByteStream(stream, ..) => {
|
||||||
ref stdout,
|
let description = if options.detailed {
|
||||||
ref stderr,
|
let origin = match stream.source() {
|
||||||
ref exit_code,
|
ByteStreamSource::Read(_) => "unknown",
|
||||||
..
|
ByteStreamSource::File(_) => "file",
|
||||||
} => {
|
ByteStreamSource::Child(_) => "external",
|
||||||
if options.detailed {
|
|
||||||
let stdout = if stdout.is_some() {
|
|
||||||
Value::record(
|
|
||||||
record! {
|
|
||||||
"type" => Value::string("stream", head),
|
|
||||||
"origin" => Value::string("external", head),
|
|
||||||
"subtype" => Value::string("any", head),
|
|
||||||
},
|
|
||||||
head,
|
|
||||||
)
|
|
||||||
} else {
|
|
||||||
Value::nothing(head)
|
|
||||||
};
|
|
||||||
|
|
||||||
let stderr = if stderr.is_some() {
|
|
||||||
Value::record(
|
|
||||||
record! {
|
|
||||||
"type" => Value::string("stream", head),
|
|
||||||
"origin" => Value::string("external", head),
|
|
||||||
"subtype" => Value::string("any", head),
|
|
||||||
},
|
|
||||||
head,
|
|
||||||
)
|
|
||||||
} else {
|
|
||||||
Value::nothing(head)
|
|
||||||
};
|
|
||||||
|
|
||||||
let exit_code = if exit_code.is_some() {
|
|
||||||
Value::record(
|
|
||||||
record! {
|
|
||||||
"type" => Value::string("stream", head),
|
|
||||||
"origin" => Value::string("external", head),
|
|
||||||
"subtype" => Value::string("int", head),
|
|
||||||
},
|
|
||||||
head,
|
|
||||||
)
|
|
||||||
} else {
|
|
||||||
Value::nothing(head)
|
|
||||||
};
|
};
|
||||||
|
|
||||||
Value::record(
|
Value::record(
|
||||||
record! {
|
record! {
|
||||||
"type" => Value::string("stream", head),
|
"type" => Value::string("byte stream", head),
|
||||||
"origin" => Value::string("external", head),
|
"origin" => Value::string(origin, head),
|
||||||
"stdout" => stdout,
|
|
||||||
"stderr" => stderr,
|
|
||||||
"exit_code" => exit_code,
|
|
||||||
"metadata" => metadata_to_value(metadata, head),
|
"metadata" => metadata_to_value(metadata, head),
|
||||||
},
|
},
|
||||||
head,
|
head,
|
||||||
)
|
)
|
||||||
} else {
|
} else {
|
||||||
Value::string("raw input", head)
|
Value::string("byte stream", head)
|
||||||
|
};
|
||||||
|
|
||||||
|
if !options.no_collect {
|
||||||
|
stream.drain()?;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
description
|
||||||
}
|
}
|
||||||
PipelineData::ListStream(_, _) => {
|
PipelineData::ListStream(stream, ..) => {
|
||||||
if options.detailed {
|
if options.detailed {
|
||||||
let subtype = if options.no_collect {
|
let subtype = if options.no_collect {
|
||||||
Value::string("any", head)
|
Value::string("any", head)
|
||||||
} else {
|
} else {
|
||||||
describe_value(input.into_value(head), head, engine_state)
|
describe_value(stream.into_value(), head, engine_state)
|
||||||
};
|
};
|
||||||
Value::record(
|
Value::record(
|
||||||
record! {
|
record! {
|
||||||
@ -242,19 +207,19 @@ fn run(
|
|||||||
} else if options.no_collect {
|
} else if options.no_collect {
|
||||||
Value::string("stream", head)
|
Value::string("stream", head)
|
||||||
} else {
|
} else {
|
||||||
let value = input.into_value(head);
|
let value = stream.into_value();
|
||||||
let base_description = value.get_type().to_string();
|
let base_description = value.get_type().to_string();
|
||||||
Value::string(format!("{} (stream)", base_description), head)
|
Value::string(format!("{} (stream)", base_description), head)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
_ => {
|
PipelineData::Value(value, ..) => {
|
||||||
let value = input.into_value(head);
|
|
||||||
if !options.detailed {
|
if !options.detailed {
|
||||||
Value::string(value.get_type().to_string(), head)
|
Value::string(value.get_type().to_string(), head)
|
||||||
} else {
|
} else {
|
||||||
describe_value(value, head, engine_state)
|
describe_value(value, head, engine_state)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
PipelineData::Empty => Value::string(Type::Nothing.to_string(), head),
|
||||||
};
|
};
|
||||||
|
|
||||||
Ok(description.into_pipeline_data())
|
Ok(description.into_pipeline_data())
|
||||||
|
@ -1,6 +1,13 @@
|
|||||||
use nu_engine::{command_prelude::*, get_eval_block_with_early_return, redirect_env};
|
use nu_engine::{command_prelude::*, get_eval_block_with_early_return, redirect_env};
|
||||||
use nu_protocol::{engine::Closure, ListStream, OutDest, RawStream};
|
use nu_protocol::{
|
||||||
use std::thread;
|
engine::Closure,
|
||||||
|
process::{ChildPipe, ChildProcess, ExitStatus},
|
||||||
|
ByteStream, ByteStreamSource, OutDest,
|
||||||
|
};
|
||||||
|
use std::{
|
||||||
|
io::{Cursor, Read},
|
||||||
|
thread,
|
||||||
|
};
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct Do;
|
pub struct Do;
|
||||||
@ -86,14 +93,10 @@ impl Command for Do {
|
|||||||
}
|
}
|
||||||
|
|
||||||
match result {
|
match result {
|
||||||
Ok(PipelineData::ExternalStream {
|
Ok(PipelineData::ByteStream(stream, metadata)) if capture_errors => {
|
||||||
stdout,
|
let span = stream.span();
|
||||||
stderr,
|
match stream.into_child() {
|
||||||
exit_code,
|
Ok(mut child) => {
|
||||||
span,
|
|
||||||
metadata,
|
|
||||||
trim_end_newline,
|
|
||||||
}) if capture_errors => {
|
|
||||||
// Use a thread to receive stdout message.
|
// Use a thread to receive stdout message.
|
||||||
// Or we may get a deadlock if child process sends out too much bytes to stderr.
|
// Or we may get a deadlock if child process sends out too much bytes to stderr.
|
||||||
//
|
//
|
||||||
@ -102,21 +105,16 @@ impl Command for Do {
|
|||||||
// consumes the first 65535 bytes
|
// consumes the first 65535 bytes
|
||||||
// So we need a thread to receive stdout message, then the current thread can continue to consume
|
// So we need a thread to receive stdout message, then the current thread can continue to consume
|
||||||
// stderr messages.
|
// stderr messages.
|
||||||
let stdout_handler = stdout
|
let stdout_handler = child
|
||||||
.map(|stdout_stream| {
|
.stdout
|
||||||
|
.take()
|
||||||
|
.map(|mut stdout| {
|
||||||
thread::Builder::new()
|
thread::Builder::new()
|
||||||
.name("stderr redirector".to_string())
|
.name("stdout consumer".to_string())
|
||||||
.spawn(move || {
|
.spawn(move || {
|
||||||
let ctrlc = stdout_stream.ctrlc.clone();
|
let mut buf = Vec::new();
|
||||||
let span = stdout_stream.span;
|
stdout.read_to_end(&mut buf)?;
|
||||||
RawStream::new(
|
Ok::<_, ShellError>(buf)
|
||||||
Box::new(std::iter::once(
|
|
||||||
stdout_stream.into_bytes().map(|s| s.item),
|
|
||||||
)),
|
|
||||||
ctrlc,
|
|
||||||
span,
|
|
||||||
None,
|
|
||||||
)
|
|
||||||
})
|
})
|
||||||
.err_span(head)
|
.err_span(head)
|
||||||
})
|
})
|
||||||
@ -124,12 +122,12 @@ impl Command for Do {
|
|||||||
|
|
||||||
// Intercept stderr so we can return it in the error if the exit code is non-zero.
|
// Intercept stderr so we can return it in the error if the exit code is non-zero.
|
||||||
// The threading issues mentioned above dictate why we also need to intercept stdout.
|
// The threading issues mentioned above dictate why we also need to intercept stdout.
|
||||||
let mut stderr_ctrlc = None;
|
let stderr_msg = match child.stderr.take() {
|
||||||
let stderr_msg = match stderr {
|
None => String::new(),
|
||||||
None => "".to_string(),
|
Some(mut stderr) => {
|
||||||
Some(stderr_stream) => {
|
let mut buf = String::new();
|
||||||
stderr_ctrlc.clone_from(&stderr_stream.ctrlc);
|
stderr.read_to_string(&mut buf).err_span(span)?;
|
||||||
stderr_stream.into_string().map(|s| s.item)?
|
buf
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -143,58 +141,43 @@ impl Command for Do {
|
|||||||
span,
|
span,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
Ok(res) => Some(res),
|
Ok(res) => Some(res?),
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
};
|
};
|
||||||
|
|
||||||
let exit_code: Vec<Value> = match exit_code {
|
if child.wait()? != ExitStatus::Exited(0) {
|
||||||
None => vec![],
|
|
||||||
Some(exit_code_stream) => exit_code_stream.into_iter().collect(),
|
|
||||||
};
|
|
||||||
if let Some(Value::Int { val: code, .. }) = exit_code.last() {
|
|
||||||
if *code != 0 {
|
|
||||||
return Err(ShellError::ExternalCommand {
|
return Err(ShellError::ExternalCommand {
|
||||||
label: "External command failed".to_string(),
|
label: "External command failed".to_string(),
|
||||||
help: stderr_msg,
|
help: stderr_msg,
|
||||||
span,
|
span,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
Ok(PipelineData::ExternalStream {
|
let mut child = ChildProcess::from_raw(None, None, None, span);
|
||||||
stdout,
|
if let Some(stdout) = stdout {
|
||||||
stderr: Some(RawStream::new(
|
child.stdout = Some(ChildPipe::Tee(Box::new(Cursor::new(stdout))));
|
||||||
Box::new(std::iter::once(Ok(stderr_msg.into_bytes()))),
|
|
||||||
stderr_ctrlc,
|
|
||||||
span,
|
|
||||||
None,
|
|
||||||
)),
|
|
||||||
exit_code: Some(ListStream::new(exit_code.into_iter(), span, None)),
|
|
||||||
span,
|
|
||||||
metadata,
|
|
||||||
trim_end_newline,
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
Ok(PipelineData::ExternalStream {
|
if !stderr_msg.is_empty() {
|
||||||
stdout,
|
child.stderr = Some(ChildPipe::Tee(Box::new(Cursor::new(stderr_msg))));
|
||||||
stderr,
|
}
|
||||||
exit_code: _,
|
Ok(PipelineData::ByteStream(
|
||||||
span,
|
ByteStream::child(child, span),
|
||||||
metadata,
|
metadata,
|
||||||
trim_end_newline,
|
))
|
||||||
}) if ignore_program_errors
|
}
|
||||||
|
Err(stream) => Ok(PipelineData::ByteStream(stream, metadata)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(PipelineData::ByteStream(mut stream, metadata))
|
||||||
|
if ignore_program_errors
|
||||||
&& !matches!(caller_stack.stdout(), OutDest::Pipe | OutDest::Capture) =>
|
&& !matches!(caller_stack.stdout(), OutDest::Pipe | OutDest::Capture) =>
|
||||||
{
|
{
|
||||||
Ok(PipelineData::ExternalStream {
|
if let ByteStreamSource::Child(child) = stream.source_mut() {
|
||||||
stdout,
|
child.set_exit_code(0)
|
||||||
stderr,
|
}
|
||||||
exit_code: None,
|
Ok(PipelineData::ByteStream(stream, metadata))
|
||||||
span,
|
|
||||||
metadata,
|
|
||||||
trim_end_newline,
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
Ok(PipelineData::Value(Value::Error { .. }, ..)) | Err(_) if ignore_shell_errors => {
|
Ok(PipelineData::Value(Value::Error { .. }, ..)) | Err(_) if ignore_shell_errors => {
|
||||||
Ok(PipelineData::empty())
|
Ok(PipelineData::empty())
|
||||||
|
@ -121,12 +121,14 @@ impl Command for For {
|
|||||||
Err(err) => {
|
Err(err) => {
|
||||||
return Err(err);
|
return Err(err);
|
||||||
}
|
}
|
||||||
Ok(pipeline) => {
|
Ok(data) => {
|
||||||
let exit_code = pipeline.drain_with_exit_code()?;
|
if let Some(status) = data.drain()? {
|
||||||
if exit_code != 0 {
|
let code = status.code();
|
||||||
return Ok(PipelineData::new_external_stream_with_only_exit_code(
|
if code != 0 {
|
||||||
exit_code,
|
return Ok(
|
||||||
));
|
PipelineData::new_external_stream_with_only_exit_code(code),
|
||||||
|
);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -159,12 +161,14 @@ impl Command for For {
|
|||||||
Err(err) => {
|
Err(err) => {
|
||||||
return Err(err);
|
return Err(err);
|
||||||
}
|
}
|
||||||
Ok(pipeline) => {
|
Ok(data) => {
|
||||||
let exit_code = pipeline.drain_with_exit_code()?;
|
if let Some(status) = data.drain()? {
|
||||||
if exit_code != 0 {
|
let code = status.code();
|
||||||
return Ok(PipelineData::new_external_stream_with_only_exit_code(
|
if code != 0 {
|
||||||
exit_code,
|
return Ok(
|
||||||
));
|
PipelineData::new_external_stream_with_only_exit_code(code),
|
||||||
|
);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -173,7 +177,7 @@ impl Command for For {
|
|||||||
x => {
|
x => {
|
||||||
stack.add_var(var_id, x);
|
stack.add_var(var_id, x);
|
||||||
|
|
||||||
eval_block(&engine_state, stack, block, PipelineData::empty())?.into_value(head);
|
eval_block(&engine_state, stack, block, PipelineData::empty())?.into_value(head)?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Ok(PipelineData::empty())
|
Ok(PipelineData::empty())
|
||||||
|
@ -61,7 +61,7 @@ impl Command for Let {
|
|||||||
let eval_block = get_eval_block(engine_state);
|
let eval_block = get_eval_block(engine_state);
|
||||||
let stack = &mut stack.start_capture();
|
let stack = &mut stack.start_capture();
|
||||||
let pipeline_data = eval_block(engine_state, stack, block, input)?;
|
let pipeline_data = eval_block(engine_state, stack, block, input)?;
|
||||||
let value = pipeline_data.into_value(call.head);
|
let value = pipeline_data.into_value(call.head)?;
|
||||||
|
|
||||||
// if given variable type is Glob, and our result is string
|
// if given variable type is Glob, and our result is string
|
||||||
// then nushell need to convert from Value::String to Value::Glob
|
// then nushell need to convert from Value::String to Value::Glob
|
||||||
|
@ -53,12 +53,12 @@ impl Command for Loop {
|
|||||||
Err(err) => {
|
Err(err) => {
|
||||||
return Err(err);
|
return Err(err);
|
||||||
}
|
}
|
||||||
Ok(pipeline) => {
|
Ok(data) => {
|
||||||
let exit_code = pipeline.drain_with_exit_code()?;
|
if let Some(status) = data.drain()? {
|
||||||
if exit_code != 0 {
|
let code = status.code();
|
||||||
return Ok(PipelineData::new_external_stream_with_only_exit_code(
|
if code != 0 {
|
||||||
exit_code,
|
return Ok(PipelineData::new_external_stream_with_only_exit_code(code));
|
||||||
));
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -61,7 +61,7 @@ impl Command for Mut {
|
|||||||
let eval_block = get_eval_block(engine_state);
|
let eval_block = get_eval_block(engine_state);
|
||||||
let stack = &mut stack.start_capture();
|
let stack = &mut stack.start_capture();
|
||||||
let pipeline_data = eval_block(engine_state, stack, block, input)?;
|
let pipeline_data = eval_block(engine_state, stack, block, input)?;
|
||||||
let value = pipeline_data.into_value(call.head);
|
let value = pipeline_data.into_value(call.head)?;
|
||||||
|
|
||||||
// if given variable type is Glob, and our result is string
|
// if given variable type is Glob, and our result is string
|
||||||
// then nushell need to convert from Value::String to Value::Glob
|
// then nushell need to convert from Value::String to Value::Glob
|
||||||
|
@ -62,10 +62,11 @@ impl Command for Try {
|
|||||||
}
|
}
|
||||||
// external command may fail to run
|
// external command may fail to run
|
||||||
Ok(pipeline) => {
|
Ok(pipeline) => {
|
||||||
let (pipeline, external_failed) = pipeline.check_external_failed();
|
let (pipeline, external_failed) = pipeline.check_external_failed()?;
|
||||||
if external_failed {
|
if external_failed {
|
||||||
let exit_code = pipeline.drain_with_exit_code()?;
|
let status = pipeline.drain()?;
|
||||||
stack.add_env_var("LAST_EXIT_CODE".into(), Value::int(exit_code, call.head));
|
let code = status.map(|status| status.code()).unwrap_or(0);
|
||||||
|
stack.add_env_var("LAST_EXIT_CODE".into(), Value::int(code.into(), call.head));
|
||||||
let err_value = Value::nothing(call.head);
|
let err_value = Value::nothing(call.head);
|
||||||
handle_catch(err_value, catch_block, engine_state, stack, eval_block)
|
handle_catch(err_value, catch_block, engine_state, stack, eval_block)
|
||||||
} else {
|
} else {
|
||||||
|
@ -70,17 +70,19 @@ impl Command for While {
|
|||||||
Err(err) => {
|
Err(err) => {
|
||||||
return Err(err);
|
return Err(err);
|
||||||
}
|
}
|
||||||
Ok(pipeline) => {
|
Ok(data) => {
|
||||||
let exit_code = pipeline.drain_with_exit_code()?;
|
if let Some(status) = data.drain()? {
|
||||||
if exit_code != 0 {
|
let code = status.code();
|
||||||
|
if code != 0 {
|
||||||
return Ok(
|
return Ok(
|
||||||
PipelineData::new_external_stream_with_only_exit_code(
|
PipelineData::new_external_stream_with_only_exit_code(
|
||||||
exit_code,
|
code,
|
||||||
),
|
),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
@ -122,10 +122,9 @@ pub fn eval_block(
|
|||||||
|
|
||||||
stack.add_env_var("PWD".to_string(), Value::test_string(cwd.to_string_lossy()));
|
stack.add_env_var("PWD".to_string(), Value::test_string(cwd.to_string_lossy()));
|
||||||
|
|
||||||
match nu_engine::eval_block::<WithoutDebug>(engine_state, &mut stack, &block, input) {
|
nu_engine::eval_block::<WithoutDebug>(engine_state, &mut stack, &block, input)
|
||||||
Err(err) => panic!("test eval error in `{}`: {:?}", "TODO", err),
|
.and_then(|data| data.into_value(Span::test_data()))
|
||||||
Ok(result) => result.into_value(Span::test_data()),
|
.unwrap_or_else(|err| panic!("test eval error in `{}`: {:?}", "TODO", err))
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn check_example_evaluates_to_expected_output(
|
pub fn check_example_evaluates_to_expected_output(
|
||||||
@ -223,7 +222,7 @@ impl<'a> std::fmt::Debug for DebuggableValue<'a> {
|
|||||||
Value::Date { val, .. } => {
|
Value::Date { val, .. } => {
|
||||||
write!(f, "Date({:?})", val)
|
write!(f, "Date({:?})", val)
|
||||||
}
|
}
|
||||||
Value::Range { val, .. } => match val {
|
Value::Range { val, .. } => match **val {
|
||||||
Range::IntRange(range) => match range.end() {
|
Range::IntRange(range) => match range.end() {
|
||||||
Bound::Included(end) => write!(
|
Bound::Included(end) => write!(
|
||||||
f,
|
f,
|
||||||
|
@ -43,7 +43,8 @@ impl Command for PluginAdd {
|
|||||||
|
|
||||||
fn extra_usage(&self) -> &str {
|
fn extra_usage(&self) -> &str {
|
||||||
r#"
|
r#"
|
||||||
This does not load the plugin commands into the scope - see `register` for that.
|
This does not load the plugin commands into the scope - see `plugin use` for
|
||||||
|
that.
|
||||||
|
|
||||||
Instead, it runs the plugin to get its command signatures, and then edits the
|
Instead, it runs the plugin to get its command signatures, and then edits the
|
||||||
plugin registry file (by default, `$nu.plugin-path`). The changes will be
|
plugin registry file (by default, `$nu.plugin-path`). The changes will be
|
||||||
|
@ -58,11 +58,11 @@ impl<'a> StyleComputer<'a> {
|
|||||||
Some(ComputableStyle::Closure(closure, span)) => {
|
Some(ComputableStyle::Closure(closure, span)) => {
|
||||||
let result = ClosureEvalOnce::new(self.engine_state, self.stack, closure.clone())
|
let result = ClosureEvalOnce::new(self.engine_state, self.stack, closure.clone())
|
||||||
.debug(false)
|
.debug(false)
|
||||||
.run_with_value(value.clone());
|
.run_with_value(value.clone())
|
||||||
|
.and_then(|data| data.into_value(*span));
|
||||||
|
|
||||||
match result {
|
match result {
|
||||||
Ok(v) => {
|
Ok(value) => {
|
||||||
let value = v.into_value(*span);
|
|
||||||
// These should be the same color data forms supported by color_config.
|
// These should be the same color data forms supported by color_config.
|
||||||
match value {
|
match value {
|
||||||
Value::Record { .. } => color_record_to_nustyle(&value),
|
Value::Record { .. } => color_record_to_nustyle(&value),
|
||||||
@ -146,7 +146,10 @@ impl<'a> StyleComputer<'a> {
|
|||||||
let span = value.span();
|
let span = value.span();
|
||||||
match value {
|
match value {
|
||||||
Value::Closure { val, .. } => {
|
Value::Closure { val, .. } => {
|
||||||
map.insert(key.to_string(), ComputableStyle::Closure(val.clone(), span));
|
map.insert(
|
||||||
|
key.to_string(),
|
||||||
|
ComputableStyle::Closure(*val.clone(), span),
|
||||||
|
);
|
||||||
}
|
}
|
||||||
Value::Record { .. } => {
|
Value::Record { .. } => {
|
||||||
map.insert(
|
map.insert(
|
||||||
|
@ -60,63 +60,13 @@ impl Command for BytesStartsWith {
|
|||||||
pattern,
|
pattern,
|
||||||
cell_paths,
|
cell_paths,
|
||||||
};
|
};
|
||||||
|
operate(
|
||||||
match input {
|
|
||||||
PipelineData::ExternalStream {
|
|
||||||
stdout: Some(stream),
|
|
||||||
span,
|
|
||||||
..
|
|
||||||
} => {
|
|
||||||
let mut i = 0;
|
|
||||||
|
|
||||||
for item in stream {
|
|
||||||
let byte_slice = match &item {
|
|
||||||
// String and binary data are valid byte patterns
|
|
||||||
Ok(Value::String { val, .. }) => val.as_bytes(),
|
|
||||||
Ok(Value::Binary { val, .. }) => val,
|
|
||||||
// If any Error value is output, echo it back
|
|
||||||
Ok(v @ Value::Error { .. }) => return Ok(v.clone().into_pipeline_data()),
|
|
||||||
// Unsupported data
|
|
||||||
Ok(other) => {
|
|
||||||
return Ok(Value::error(
|
|
||||||
ShellError::OnlySupportsThisInputType {
|
|
||||||
exp_input_type: "string and binary".into(),
|
|
||||||
wrong_type: other.get_type().to_string(),
|
|
||||||
dst_span: span,
|
|
||||||
src_span: other.span(),
|
|
||||||
},
|
|
||||||
span,
|
|
||||||
)
|
|
||||||
.into_pipeline_data());
|
|
||||||
}
|
|
||||||
Err(err) => return Err(err.to_owned()),
|
|
||||||
};
|
|
||||||
|
|
||||||
let max = byte_slice.len().min(arg.pattern.len() - i);
|
|
||||||
|
|
||||||
if byte_slice[..max] == arg.pattern[i..i + max] {
|
|
||||||
i += max;
|
|
||||||
|
|
||||||
if i >= arg.pattern.len() {
|
|
||||||
return Ok(Value::bool(true, span).into_pipeline_data());
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
return Ok(Value::bool(false, span).into_pipeline_data());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// We reached the end of the stream and never returned,
|
|
||||||
// the pattern wasn't exhausted so it probably doesn't match
|
|
||||||
Ok(Value::bool(false, span).into_pipeline_data())
|
|
||||||
}
|
|
||||||
_ => operate(
|
|
||||||
starts_with,
|
starts_with,
|
||||||
arg,
|
arg,
|
||||||
input,
|
input,
|
||||||
call.head,
|
call.head,
|
||||||
engine_state.ctrlc.clone(),
|
engine_state.ctrlc.clone(),
|
||||||
),
|
)
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn examples(&self) -> Vec<Example> {
|
fn examples(&self) -> Vec<Example> {
|
||||||
|
@ -121,7 +121,7 @@ impl Command for Histogram {
|
|||||||
};
|
};
|
||||||
|
|
||||||
let span = call.head;
|
let span = call.head;
|
||||||
let data_as_value = input.into_value(span);
|
let data_as_value = input.into_value(span)?;
|
||||||
let value_span = data_as_value.span();
|
let value_span = data_as_value.span();
|
||||||
// `input` is not a list, here we can return an error.
|
// `input` is not a list, here we can return an error.
|
||||||
run_histogram(
|
run_histogram(
|
||||||
|
@ -73,7 +73,7 @@ impl Command for Fill {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn search_terms(&self) -> Vec<&str> {
|
fn search_terms(&self) -> Vec<&str> {
|
||||||
vec!["display", "render", "format", "pad", "align"]
|
vec!["display", "render", "format", "pad", "align", "repeat"]
|
||||||
}
|
}
|
||||||
|
|
||||||
fn examples(&self) -> Vec<Example> {
|
fn examples(&self) -> Vec<Example> {
|
||||||
@ -91,9 +91,9 @@ impl Command for Fill {
|
|||||||
result: Some(Value::string("────────nushell", Span::test_data())),
|
result: Some(Value::string("────────nushell", Span::test_data())),
|
||||||
},
|
},
|
||||||
Example {
|
Example {
|
||||||
description: "Fill a string on both sides to a width of 15 with the character '─'",
|
description: "Fill an empty string with 10 '─' characters",
|
||||||
example: "'nushell' | fill --alignment m --character '─' --width 15",
|
example: "'' | fill --character '─' --width 10",
|
||||||
result: Some(Value::string("────nushell────", Span::test_data())),
|
result: Some(Value::string("──────────", Span::test_data())),
|
||||||
},
|
},
|
||||||
Example {
|
Example {
|
||||||
description:
|
description:
|
||||||
|
@ -127,26 +127,16 @@ fn into_binary(
|
|||||||
let cell_paths = call.rest(engine_state, stack, 0)?;
|
let cell_paths = call.rest(engine_state, stack, 0)?;
|
||||||
let cell_paths = (!cell_paths.is_empty()).then_some(cell_paths);
|
let cell_paths = (!cell_paths.is_empty()).then_some(cell_paths);
|
||||||
|
|
||||||
match input {
|
if let PipelineData::ByteStream(stream, ..) = input {
|
||||||
PipelineData::ExternalStream { stdout: None, .. } => {
|
|
||||||
Ok(Value::binary(vec![], head).into_pipeline_data())
|
|
||||||
}
|
|
||||||
PipelineData::ExternalStream {
|
|
||||||
stdout: Some(stream),
|
|
||||||
..
|
|
||||||
} => {
|
|
||||||
// TODO: in the future, we may want this to stream out, converting each to bytes
|
// TODO: in the future, we may want this to stream out, converting each to bytes
|
||||||
let output = stream.into_bytes()?;
|
Ok(Value::binary(stream.into_bytes()?, head).into_pipeline_data())
|
||||||
Ok(Value::binary(output.item, head).into_pipeline_data())
|
} else {
|
||||||
}
|
|
||||||
_ => {
|
|
||||||
let args = Arguments {
|
let args = Arguments {
|
||||||
cell_paths,
|
cell_paths,
|
||||||
compact: call.has_flag(engine_state, stack, "compact")?,
|
compact: call.has_flag(engine_state, stack, "compact")?,
|
||||||
};
|
};
|
||||||
operate(action, args, input, call.head, engine_state.ctrlc.clone())
|
operate(action, args, input, call.head, engine_state.ctrlc.clone())
|
||||||
}
|
}
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn action(input: &Value, _args: &Arguments, span: Span) -> Value {
|
pub fn action(input: &Value, _args: &Arguments, span: Span) -> Value {
|
||||||
|
@ -101,11 +101,11 @@ fn into_cell_path(call: &Call, input: PipelineData) -> Result<PipelineData, Shel
|
|||||||
let list: Vec<_> = stream.into_iter().collect();
|
let list: Vec<_> = stream.into_iter().collect();
|
||||||
Ok(list_to_cell_path(&list, head)?.into_pipeline_data())
|
Ok(list_to_cell_path(&list, head)?.into_pipeline_data())
|
||||||
}
|
}
|
||||||
PipelineData::ExternalStream { span, .. } => Err(ShellError::OnlySupportsThisInputType {
|
PipelineData::ByteStream(stream, ..) => Err(ShellError::OnlySupportsThisInputType {
|
||||||
exp_input_type: "list, int".into(),
|
exp_input_type: "list, int".into(),
|
||||||
wrong_type: "raw data".into(),
|
wrong_type: "byte stream".into(),
|
||||||
dst_span: head,
|
dst_span: head,
|
||||||
src_span: span,
|
src_span: stream.span(),
|
||||||
}),
|
}),
|
||||||
PipelineData::Empty => Err(ShellError::PipelineEmpty { dst_span: head }),
|
PipelineData::Empty => Err(ShellError::PipelineEmpty { dst_span: head }),
|
||||||
}
|
}
|
||||||
|
@ -82,20 +82,12 @@ fn glob_helper(
|
|||||||
let head = call.head;
|
let head = call.head;
|
||||||
let cell_paths = call.rest(engine_state, stack, 0)?;
|
let cell_paths = call.rest(engine_state, stack, 0)?;
|
||||||
let cell_paths = (!cell_paths.is_empty()).then_some(cell_paths);
|
let cell_paths = (!cell_paths.is_empty()).then_some(cell_paths);
|
||||||
let args = Arguments { cell_paths };
|
if let PipelineData::ByteStream(stream, ..) = input {
|
||||||
match input {
|
|
||||||
PipelineData::ExternalStream { stdout: None, .. } => {
|
|
||||||
Ok(Value::glob(String::new(), false, head).into_pipeline_data())
|
|
||||||
}
|
|
||||||
PipelineData::ExternalStream {
|
|
||||||
stdout: Some(stream),
|
|
||||||
..
|
|
||||||
} => {
|
|
||||||
// TODO: in the future, we may want this to stream out, converting each to bytes
|
// TODO: in the future, we may want this to stream out, converting each to bytes
|
||||||
let output = stream.into_string()?;
|
Ok(Value::glob(stream.into_string()?, false, head).into_pipeline_data())
|
||||||
Ok(Value::glob(output.item, false, head).into_pipeline_data())
|
} else {
|
||||||
}
|
let args = Arguments { cell_paths };
|
||||||
_ => operate(action, args, input, head, engine_state.ctrlc.clone()),
|
operate(action, args, input, head, engine_state.ctrlc.clone())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -108,7 +108,7 @@ fn into_record(
|
|||||||
call: &Call,
|
call: &Call,
|
||||||
input: PipelineData,
|
input: PipelineData,
|
||||||
) -> Result<PipelineData, ShellError> {
|
) -> Result<PipelineData, ShellError> {
|
||||||
let input = input.into_value(call.head);
|
let input = input.into_value(call.head)?;
|
||||||
let input_type = input.get_type();
|
let input_type = input.get_type();
|
||||||
let span = input.span();
|
let span = input.span();
|
||||||
let res = match input {
|
let res = match input {
|
||||||
|
@ -155,26 +155,18 @@ fn string_helper(
|
|||||||
}
|
}
|
||||||
let cell_paths = call.rest(engine_state, stack, 0)?;
|
let cell_paths = call.rest(engine_state, stack, 0)?;
|
||||||
let cell_paths = (!cell_paths.is_empty()).then_some(cell_paths);
|
let cell_paths = (!cell_paths.is_empty()).then_some(cell_paths);
|
||||||
|
|
||||||
|
if let PipelineData::ByteStream(stream, ..) = input {
|
||||||
|
// TODO: in the future, we may want this to stream out, converting each to bytes
|
||||||
|
Ok(Value::string(stream.into_string()?, head).into_pipeline_data())
|
||||||
|
} else {
|
||||||
let config = engine_state.get_config().clone();
|
let config = engine_state.get_config().clone();
|
||||||
let args = Arguments {
|
let args = Arguments {
|
||||||
decimals_value,
|
decimals_value,
|
||||||
cell_paths,
|
cell_paths,
|
||||||
config,
|
config,
|
||||||
};
|
};
|
||||||
|
operate(action, args, input, head, engine_state.ctrlc.clone())
|
||||||
match input {
|
|
||||||
PipelineData::ExternalStream { stdout: None, .. } => {
|
|
||||||
Ok(Value::string(String::new(), head).into_pipeline_data())
|
|
||||||
}
|
|
||||||
PipelineData::ExternalStream {
|
|
||||||
stdout: Some(stream),
|
|
||||||
..
|
|
||||||
} => {
|
|
||||||
// TODO: in the future, we may want this to stream out, converting each to bytes
|
|
||||||
let output = stream.into_string()?;
|
|
||||||
Ok(Value::string(output.item, head).into_pipeline_data())
|
|
||||||
}
|
|
||||||
_ => operate(action, args, input, head, engine_state.ctrlc.clone()),
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -5,7 +5,7 @@ use commands::add_commands_decls;
|
|||||||
|
|
||||||
pub use values::{
|
pub use values::{
|
||||||
convert_sqlite_row_to_nu_value, convert_sqlite_value_to_nu_value, open_connection_in_memory,
|
convert_sqlite_row_to_nu_value, convert_sqlite_value_to_nu_value, open_connection_in_memory,
|
||||||
open_connection_in_memory_custom, SQLiteDatabase, MEMORY_DB,
|
open_connection_in_memory_custom, values_to_sql, SQLiteDatabase, MEMORY_DB,
|
||||||
};
|
};
|
||||||
|
|
||||||
use nu_protocol::engine::StateWorkingSet;
|
use nu_protocol::engine::StateWorkingSet;
|
||||||
|
@ -3,5 +3,5 @@ pub mod sqlite;
|
|||||||
|
|
||||||
pub use sqlite::{
|
pub use sqlite::{
|
||||||
convert_sqlite_row_to_nu_value, convert_sqlite_value_to_nu_value, open_connection_in_memory,
|
convert_sqlite_row_to_nu_value, convert_sqlite_value_to_nu_value, open_connection_in_memory,
|
||||||
open_connection_in_memory_custom, SQLiteDatabase, MEMORY_DB,
|
open_connection_in_memory_custom, values_to_sql, SQLiteDatabase, MEMORY_DB,
|
||||||
};
|
};
|
||||||
|
@ -91,7 +91,7 @@ impl SQLiteDatabase {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn try_from_pipeline(input: PipelineData, span: Span) -> Result<Self, ShellError> {
|
pub fn try_from_pipeline(input: PipelineData, span: Span) -> Result<Self, ShellError> {
|
||||||
let value = input.into_value(span);
|
let value = input.into_value(span)?;
|
||||||
Self::try_from_value(value)
|
Self::try_from_value(value)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -29,7 +29,7 @@ impl Command for Inspect {
|
|||||||
input: PipelineData,
|
input: PipelineData,
|
||||||
) -> Result<PipelineData, ShellError> {
|
) -> Result<PipelineData, ShellError> {
|
||||||
let input_metadata = input.metadata();
|
let input_metadata = input.metadata();
|
||||||
let input_val = input.into_value(call.head);
|
let input_val = input.into_value(call.head)?;
|
||||||
if input_val.is_nothing() {
|
if input_val.is_nothing() {
|
||||||
return Err(ShellError::PipelineEmpty {
|
return Err(ShellError::PipelineEmpty {
|
||||||
dst_span: call.head,
|
dst_span: call.head,
|
||||||
|
@ -53,13 +53,12 @@ impl Command for TimeIt {
|
|||||||
eval_block(engine_state, stack, block, input)?
|
eval_block(engine_state, stack, block, input)?
|
||||||
} else {
|
} else {
|
||||||
let eval_expression_with_input = get_eval_expression_with_input(engine_state);
|
let eval_expression_with_input = get_eval_expression_with_input(engine_state);
|
||||||
eval_expression_with_input(engine_state, stack, command_to_run, input)
|
eval_expression_with_input(engine_state, stack, command_to_run, input)?.0
|
||||||
.map(|res| res.0)?
|
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
PipelineData::empty()
|
PipelineData::empty()
|
||||||
}
|
}
|
||||||
.into_value(call.head);
|
.into_value(call.head)?;
|
||||||
|
|
||||||
let end_time = Instant::now();
|
let end_time = Instant::now();
|
||||||
|
|
||||||
|
10
crates/nu-command/src/env/with_env.rs
vendored
10
crates/nu-command/src/env/with_env.rs
vendored
@ -90,10 +90,7 @@ fn with_env(
|
|||||||
return Err(ShellError::CantConvert {
|
return Err(ShellError::CantConvert {
|
||||||
to_type: "record".into(),
|
to_type: "record".into(),
|
||||||
from_type: x.get_type().to_string(),
|
from_type: x.get_type().to_string(),
|
||||||
span: call
|
span: x.span(),
|
||||||
.positional_nth(1)
|
|
||||||
.expect("already checked through .req")
|
|
||||||
.span,
|
|
||||||
help: None,
|
help: None,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
@ -124,10 +121,7 @@ fn with_env(
|
|||||||
return Err(ShellError::CantConvert {
|
return Err(ShellError::CantConvert {
|
||||||
to_type: "record".into(),
|
to_type: "record".into(),
|
||||||
from_type: x.get_type().to_string(),
|
from_type: x.get_type().to_string(),
|
||||||
span: call
|
span: x.span(),
|
||||||
.positional_nth(1)
|
|
||||||
.expect("already checked through .req")
|
|
||||||
.span,
|
|
||||||
help: None,
|
help: None,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
use nu_cmd_base::util::get_init_cwd;
|
||||||
use nu_engine::command_prelude::*;
|
use nu_engine::command_prelude::*;
|
||||||
use nu_utils::filesystem::{have_permission, PermissionResult};
|
use nu_utils::filesystem::{have_permission, PermissionResult};
|
||||||
|
|
||||||
@ -39,7 +40,10 @@ impl Command for Cd {
|
|||||||
) -> Result<PipelineData, ShellError> {
|
) -> Result<PipelineData, ShellError> {
|
||||||
let physical = call.has_flag(engine_state, stack, "physical")?;
|
let physical = call.has_flag(engine_state, stack, "physical")?;
|
||||||
let path_val: Option<Spanned<String>> = call.opt(engine_state, stack, 0)?;
|
let path_val: Option<Spanned<String>> = call.opt(engine_state, stack, 0)?;
|
||||||
let cwd = engine_state.cwd(Some(stack))?;
|
|
||||||
|
// If getting PWD failed, default to the initial directory. This way, the
|
||||||
|
// user can use `cd` to recover PWD to a good state.
|
||||||
|
let cwd = engine_state.cwd(Some(stack)).unwrap_or(get_init_cwd());
|
||||||
|
|
||||||
let path_val = {
|
let path_val = {
|
||||||
if let Some(path) = path_val {
|
if let Some(path) = path_val {
|
||||||
@ -52,13 +56,13 @@ impl Command for Cd {
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
let (path, span) = match path_val {
|
let path = match path_val {
|
||||||
Some(v) => {
|
Some(v) => {
|
||||||
if v.item == "-" {
|
if v.item == "-" {
|
||||||
if let Some(oldpwd) = stack.get_env_var(engine_state, "OLDPWD") {
|
if let Some(oldpwd) = stack.get_env_var(engine_state, "OLDPWD") {
|
||||||
(oldpwd.to_path()?, v.span)
|
oldpwd.to_path()?
|
||||||
} else {
|
} else {
|
||||||
(cwd, v.span)
|
cwd
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
// Trim whitespace from the end of path.
|
// Trim whitespace from the end of path.
|
||||||
@ -66,7 +70,7 @@ impl Command for Cd {
|
|||||||
&v.item.trim_end_matches(|x| matches!(x, '\x09'..='\x0d'));
|
&v.item.trim_end_matches(|x| matches!(x, '\x09'..='\x0d'));
|
||||||
|
|
||||||
// If `--physical` is specified, canonicalize the path; otherwise expand the path.
|
// If `--physical` is specified, canonicalize the path; otherwise expand the path.
|
||||||
let path = if physical {
|
if physical {
|
||||||
if let Ok(path) = nu_path::canonicalize_with(path_no_whitespace, &cwd) {
|
if let Ok(path) = nu_path::canonicalize_with(path_no_whitespace, &cwd) {
|
||||||
if !path.is_dir() {
|
if !path.is_dir() {
|
||||||
return Err(ShellError::NotADirectory { span: v.span });
|
return Err(ShellError::NotADirectory { span: v.span });
|
||||||
@ -90,18 +94,11 @@ impl Command for Cd {
|
|||||||
return Err(ShellError::NotADirectory { span: v.span });
|
return Err(ShellError::NotADirectory { span: v.span });
|
||||||
};
|
};
|
||||||
path
|
path
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
None => nu_path::expand_tilde("~"),
|
||||||
};
|
};
|
||||||
(path, v.span)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
None => {
|
|
||||||
let path = nu_path::expand_tilde("~");
|
|
||||||
(path, call.head)
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
// Strip the trailing slash from the new path. This is required for PWD.
|
|
||||||
let path = nu_path::strip_trailing_slash(&path);
|
|
||||||
|
|
||||||
// Set OLDPWD.
|
// Set OLDPWD.
|
||||||
// We're using `Stack::get_env_var()` instead of `EngineState::cwd()` to avoid a conversion roundtrip.
|
// We're using `Stack::get_env_var()` instead of `EngineState::cwd()` to avoid a conversion roundtrip.
|
||||||
@ -113,7 +110,7 @@ impl Command for Cd {
|
|||||||
//FIXME: this only changes the current scope, but instead this environment variable
|
//FIXME: this only changes the current scope, but instead this environment variable
|
||||||
//should probably be a block that loads the information from the state in the overlay
|
//should probably be a block that loads the information from the state in the overlay
|
||||||
PermissionResult::PermissionOk => {
|
PermissionResult::PermissionOk => {
|
||||||
stack.add_env_var("PWD".into(), Value::string(path.to_string_lossy(), span));
|
stack.set_cwd(path)?;
|
||||||
Ok(PipelineData::empty())
|
Ok(PipelineData::empty())
|
||||||
}
|
}
|
||||||
PermissionResult::PermissionDenied(reason) => Err(ShellError::IOError {
|
PermissionResult::PermissionDenied(reason) => Err(ShellError::IOError {
|
||||||
|
@ -1,8 +1,8 @@
|
|||||||
use super::util::get_rest_for_glob_pattern;
|
use super::util::get_rest_for_glob_pattern;
|
||||||
#[allow(deprecated)]
|
#[allow(deprecated)]
|
||||||
use nu_engine::{command_prelude::*, current_dir, get_eval_block};
|
use nu_engine::{command_prelude::*, current_dir, get_eval_block};
|
||||||
use nu_protocol::{BufferedReader, DataSource, NuGlob, PipelineMetadata, RawStream};
|
use nu_protocol::{ByteStream, DataSource, NuGlob, PipelineMetadata};
|
||||||
use std::{io::BufReader, path::Path};
|
use std::path::Path;
|
||||||
|
|
||||||
#[cfg(feature = "sqlite")]
|
#[cfg(feature = "sqlite")]
|
||||||
use crate::database::SQLiteDatabase;
|
use crate::database::SQLiteDatabase;
|
||||||
@ -143,23 +143,13 @@ impl Command for Open {
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
let buf_reader = BufReader::new(file);
|
let stream = PipelineData::ByteStream(
|
||||||
|
ByteStream::file(file, call_span, ctrlc.clone()),
|
||||||
let file_contents = PipelineData::ExternalStream {
|
Some(PipelineMetadata {
|
||||||
stdout: Some(RawStream::new(
|
|
||||||
Box::new(BufferedReader::new(buf_reader)),
|
|
||||||
ctrlc.clone(),
|
|
||||||
call_span,
|
|
||||||
None,
|
|
||||||
)),
|
|
||||||
stderr: None,
|
|
||||||
exit_code: None,
|
|
||||||
span: call_span,
|
|
||||||
metadata: Some(PipelineMetadata {
|
|
||||||
data_source: DataSource::FilePath(path.to_path_buf()),
|
data_source: DataSource::FilePath(path.to_path_buf()),
|
||||||
}),
|
}),
|
||||||
trim_end_newline: false,
|
);
|
||||||
};
|
|
||||||
let exts_opt: Option<Vec<String>> = if raw {
|
let exts_opt: Option<Vec<String>> = if raw {
|
||||||
None
|
None
|
||||||
} else {
|
} else {
|
||||||
@ -184,9 +174,9 @@ impl Command for Open {
|
|||||||
let decl = engine_state.get_decl(converter_id);
|
let decl = engine_state.get_decl(converter_id);
|
||||||
let command_output = if let Some(block_id) = decl.get_block_id() {
|
let command_output = if let Some(block_id) = decl.get_block_id() {
|
||||||
let block = engine_state.get_block(block_id);
|
let block = engine_state.get_block(block_id);
|
||||||
eval_block(engine_state, stack, block, file_contents)
|
eval_block(engine_state, stack, block, stream)
|
||||||
} else {
|
} else {
|
||||||
decl.run(engine_state, stack, &Call::new(call_span), file_contents)
|
decl.run(engine_state, stack, &Call::new(call_span), stream)
|
||||||
};
|
};
|
||||||
output.push(command_output.map_err(|inner| {
|
output.push(command_output.map_err(|inner| {
|
||||||
ShellError::GenericError{
|
ShellError::GenericError{
|
||||||
@ -198,7 +188,7 @@ impl Command for Open {
|
|||||||
}
|
}
|
||||||
})?);
|
})?);
|
||||||
}
|
}
|
||||||
None => output.push(file_contents),
|
None => output.push(stream),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user