mirror of
https://github.com/nushell/nushell.git
synced 2025-03-13 06:58:47 +01:00
Merge branch 'main' into blow-into-bits
This commit is contained in:
commit
c20e490078
52
.github/workflows/beta-test.yml
vendored
Normal file
52
.github/workflows/beta-test.yml
vendored
Normal file
@ -0,0 +1,52 @@
|
||||
name: Test on Beta Toolchain
|
||||
# This workflow is made to run our tests on the beta toolchain to validate that
|
||||
# the beta toolchain works.
|
||||
# We do not intend to test here that we are working correctly but rather that
|
||||
# the beta toolchain works correctly.
|
||||
# The ci.yml handles our actual testing with our guarantees.
|
||||
|
||||
on:
|
||||
schedule:
|
||||
# If this workflow fails, GitHub notifications will go to the last person
|
||||
# who edited this line.
|
||||
# See: https://docs.github.com/en/actions/monitoring-and-troubleshooting-workflows/monitoring-workflows/notifications-for-workflow-runs
|
||||
- cron: '0 0 * * *' # Runs daily at midnight UTC
|
||||
|
||||
env:
|
||||
NUSHELL_CARGO_PROFILE: ci
|
||||
NU_LOG_LEVEL: DEBUG
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.head_ref && github.ref || github.run_id }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
build-and-test:
|
||||
# this job is more for testing the beta toolchain and not our tests, so if
|
||||
# this fails but the tests of the regular ci pass, then this is fine
|
||||
continue-on-error: true
|
||||
|
||||
strategy:
|
||||
fail-fast: true
|
||||
matrix:
|
||||
platform: [windows-latest, macos-latest, ubuntu-22.04]
|
||||
|
||||
runs-on: ${{ matrix.platform }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- run: rustup update beta
|
||||
|
||||
- name: Tests
|
||||
run: cargo +beta test --workspace --profile ci --exclude nu_plugin_*
|
||||
- name: Check for clean repo
|
||||
shell: bash
|
||||
run: |
|
||||
if [ -n "$(git status --porcelain)" ]; then
|
||||
echo "there are changes";
|
||||
git status --porcelain
|
||||
exit 1
|
||||
else
|
||||
echo "no changes in working directory";
|
||||
fi
|
25
.github/workflows/ci.yml
vendored
25
.github/workflows/ci.yml
vendored
@ -3,6 +3,7 @@ on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- 'patch-release-*'
|
||||
|
||||
name: continuous-integration
|
||||
|
||||
@ -21,14 +22,14 @@ jobs:
|
||||
strategy:
|
||||
fail-fast: true
|
||||
matrix:
|
||||
# Pinning to Ubuntu 20.04 because building on newer Ubuntu versions causes linux-gnu
|
||||
# Pinning to Ubuntu 22.04 because building on newer Ubuntu versions causes linux-gnu
|
||||
# builds to link against a too-new-for-many-Linux-installs glibc version. Consider
|
||||
# revisiting this when 20.04 is closer to EOL (April 2025)
|
||||
# revisiting this when 22.04 is closer to EOL (June 2027)
|
||||
#
|
||||
# Using macOS 13 runner because 14 is based on the M1 and has half as much RAM (7 GB,
|
||||
# instead of 14 GB) which is too little for us right now. Revisit when `dfr` commands are
|
||||
# removed and we're only building the `polars` plugin instead
|
||||
platform: [windows-latest, macos-13, ubuntu-20.04]
|
||||
platform: [windows-latest, macos-13, ubuntu-22.04]
|
||||
|
||||
runs-on: ${{ matrix.platform }}
|
||||
|
||||
@ -36,7 +37,7 @@ jobs:
|
||||
- uses: actions/checkout@v4.1.7
|
||||
|
||||
- name: Setup Rust toolchain and cache
|
||||
uses: actions-rust-lang/setup-rust-toolchain@v1.10.1
|
||||
uses: actions-rust-lang/setup-rust-toolchain@v1.11.0
|
||||
|
||||
- name: cargo fmt
|
||||
run: cargo fmt --all -- --check
|
||||
@ -56,7 +57,7 @@ jobs:
|
||||
strategy:
|
||||
fail-fast: true
|
||||
matrix:
|
||||
platform: [windows-latest, macos-latest, ubuntu-20.04]
|
||||
platform: [windows-latest, macos-latest, ubuntu-22.04]
|
||||
|
||||
runs-on: ${{ matrix.platform }}
|
||||
|
||||
@ -64,7 +65,7 @@ jobs:
|
||||
- uses: actions/checkout@v4.1.7
|
||||
|
||||
- name: Setup Rust toolchain and cache
|
||||
uses: actions-rust-lang/setup-rust-toolchain@v1.10.1
|
||||
uses: actions-rust-lang/setup-rust-toolchain@v1.11.0
|
||||
|
||||
- name: Tests
|
||||
run: cargo test --workspace --profile ci --exclude nu_plugin_*
|
||||
@ -83,7 +84,7 @@ jobs:
|
||||
strategy:
|
||||
fail-fast: true
|
||||
matrix:
|
||||
platform: [ubuntu-20.04, macos-latest, windows-latest]
|
||||
platform: [ubuntu-22.04, macos-latest, windows-latest]
|
||||
py:
|
||||
- py
|
||||
|
||||
@ -93,10 +94,10 @@ jobs:
|
||||
- uses: actions/checkout@v4.1.7
|
||||
|
||||
- name: Setup Rust toolchain and cache
|
||||
uses: actions-rust-lang/setup-rust-toolchain@v1.10.1
|
||||
uses: actions-rust-lang/setup-rust-toolchain@v1.11.0
|
||||
|
||||
- name: Install Nushell
|
||||
run: cargo install --path . --locked --no-default-features --force
|
||||
run: cargo install --path . --locked --force
|
||||
|
||||
- name: Standard library tests
|
||||
run: nu -c 'use crates/nu-std/testing.nu; testing run-tests --path crates/nu-std'
|
||||
@ -136,7 +137,7 @@ jobs:
|
||||
# instead of 14 GB) which is too little for us right now.
|
||||
#
|
||||
# Failure occurring with clippy for rust 1.77.2
|
||||
platform: [windows-latest, macos-13, ubuntu-20.04]
|
||||
platform: [windows-latest, macos-13, ubuntu-22.04]
|
||||
|
||||
runs-on: ${{ matrix.platform }}
|
||||
|
||||
@ -144,7 +145,7 @@ jobs:
|
||||
- uses: actions/checkout@v4.1.7
|
||||
|
||||
- name: Setup Rust toolchain and cache
|
||||
uses: actions-rust-lang/setup-rust-toolchain@v1.10.1
|
||||
uses: actions-rust-lang/setup-rust-toolchain@v1.11.0
|
||||
|
||||
- name: Clippy
|
||||
run: cargo clippy --package nu_plugin_* -- $CLIPPY_OPTIONS
|
||||
@ -185,7 +186,7 @@ jobs:
|
||||
- uses: actions/checkout@v4.1.7
|
||||
|
||||
- name: Setup Rust toolchain and cache
|
||||
uses: actions-rust-lang/setup-rust-toolchain@v1.10.1
|
||||
uses: actions-rust-lang/setup-rust-toolchain@v1.11.0
|
||||
|
||||
- name: Add wasm32-unknown-unknown target
|
||||
run: rustup target add wasm32-unknown-unknown
|
||||
|
2
.github/workflows/nightly-build.yml
vendored
2
.github/workflows/nightly-build.yml
vendored
@ -131,7 +131,7 @@ jobs:
|
||||
echo "targets = ['${{matrix.target}}']" >> rust-toolchain.toml
|
||||
|
||||
- name: Setup Rust toolchain and cache
|
||||
uses: actions-rust-lang/setup-rust-toolchain@v1.10.1
|
||||
uses: actions-rust-lang/setup-rust-toolchain@v1.11.0
|
||||
# WARN: Keep the rustflags to prevent from the winget submission error: `CAQuietExec: Error 0xc0000135`
|
||||
with:
|
||||
rustflags: ''
|
||||
|
2
.github/workflows/release.yml
vendored
2
.github/workflows/release.yml
vendored
@ -80,7 +80,7 @@ jobs:
|
||||
echo "targets = ['${{matrix.target}}']" >> rust-toolchain.toml
|
||||
|
||||
- name: Setup Rust toolchain
|
||||
uses: actions-rust-lang/setup-rust-toolchain@v1.10.1
|
||||
uses: actions-rust-lang/setup-rust-toolchain@v1.11.0
|
||||
# WARN: Keep the rustflags to prevent from the winget submission error: `CAQuietExec: Error 0xc0000135`
|
||||
with:
|
||||
cache: false
|
||||
|
2
.github/workflows/typos.yml
vendored
2
.github/workflows/typos.yml
vendored
@ -10,4 +10,4 @@ jobs:
|
||||
uses: actions/checkout@v4.1.7
|
||||
|
||||
- name: Check spelling
|
||||
uses: crate-ci/typos@v1.29.5
|
||||
uses: crate-ci/typos@v1.29.10
|
||||
|
244
Cargo.lock
generated
244
Cargo.lock
generated
@ -1,6 +1,6 @@
|
||||
# This file is automatically @generated by Cargo.
|
||||
# It is not intended for manual editing.
|
||||
version = 3
|
||||
version = 4
|
||||
|
||||
[[package]]
|
||||
name = "addr2line"
|
||||
@ -293,6 +293,15 @@ version = "0.16.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4790f9e8961209112beb783d85449b508673cf4a6a419c8449b210743ac4dbe9"
|
||||
|
||||
[[package]]
|
||||
name = "atomic"
|
||||
version = "0.6.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8d818003e740b63afc82337e3160717f4f63078720a810b7b903e70a5d1d2994"
|
||||
dependencies = [
|
||||
"bytemuck",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "atomic-waker"
|
||||
version = "1.1.2"
|
||||
@ -752,9 +761,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "bracoxide"
|
||||
version = "0.1.4"
|
||||
version = "0.1.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "fbc0bcb5424e8e1f29c21a00f2d222df5e8e9779732ff03f840315d8fbac708e"
|
||||
checksum = "7f52991c481aa9d7518254cfb6ce5726d24ff8c5d383d6422cd3793729b0962a"
|
||||
|
||||
[[package]]
|
||||
name = "brotli"
|
||||
@ -847,9 +856,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "bytesize"
|
||||
version = "1.3.0"
|
||||
version = "1.3.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a3e368af43e418a04d52505cf3dbc23dda4e3407ae2fa99fd0e4f308ce546acc"
|
||||
checksum = "2d2c12f985c78475a6b8d629afd0c360260ef34cfef52efccdcfd31972f81c2e"
|
||||
|
||||
[[package]]
|
||||
name = "calamine"
|
||||
@ -884,9 +893,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "cc"
|
||||
version = "1.2.3"
|
||||
version = "1.2.16"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "27f657647bcff5394bf56c7317665bbf790a137a50eaaa5c6bfbb9e27a518f2d"
|
||||
checksum = "be714c154be609ec7f5dad223a33bf1482fff90472de28f7362806e6d4832b8c"
|
||||
dependencies = [
|
||||
"jobserver",
|
||||
"libc",
|
||||
@ -1088,25 +1097,12 @@ version = "7.1.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "24f165e7b643266ea80cb858aed492ad9280e3e05ce24d4a99d7d7b889b6a4d9"
|
||||
dependencies = [
|
||||
"crossterm 0.28.1",
|
||||
"crossterm",
|
||||
"strum",
|
||||
"strum_macros",
|
||||
"unicode-width 0.2.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "compact_str"
|
||||
version = "0.7.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f86b9c4c00838774a6d902ef931eff7470720c51d90c2e32cfe15dc304737b3f"
|
||||
dependencies = [
|
||||
"castaway",
|
||||
"cfg-if",
|
||||
"itoa",
|
||||
"ryu",
|
||||
"static_assertions",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "compact_str"
|
||||
version = "0.8.0"
|
||||
@ -1283,22 +1279,6 @@ version = "0.8.20"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "22ec99545bb0ed0ea7bb9b8e1e9122ea386ff8a48c0922e43f36d45ab09e0e80"
|
||||
|
||||
[[package]]
|
||||
name = "crossterm"
|
||||
version = "0.27.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f476fe445d41c9e991fd07515a6f463074b782242ccf4a5b7b1d1012e70824df"
|
||||
dependencies = [
|
||||
"bitflags 2.6.0",
|
||||
"crossterm_winapi",
|
||||
"libc",
|
||||
"mio 0.8.11",
|
||||
"parking_lot",
|
||||
"signal-hook",
|
||||
"signal-hook-mio",
|
||||
"winapi",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "crossterm"
|
||||
version = "0.28.1"
|
||||
@ -1426,10 +1406,45 @@ dependencies = [
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "data-encoding"
|
||||
version = "2.7.0"
|
||||
name = "darling"
|
||||
version = "0.20.10"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0e60eed09d8c01d3cee5b7d30acb059b76614c918fa0f992e0dd6eeb10daad6f"
|
||||
checksum = "6f63b86c8a8826a49b8c21f08a2d07338eec8d900540f8630dc76284be802989"
|
||||
dependencies = [
|
||||
"darling_core",
|
||||
"darling_macro",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "darling_core"
|
||||
version = "0.20.10"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "95133861a8032aaea082871032f5815eb9e98cef03fa916ab4500513994df9e5"
|
||||
dependencies = [
|
||||
"fnv",
|
||||
"ident_case",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"strsim",
|
||||
"syn 2.0.90",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "darling_macro"
|
||||
version = "0.20.10"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d336a2a514f6ccccaa3e09b02d41d35330c07ddf03a62165fcec10bb561c7806"
|
||||
dependencies = [
|
||||
"darling_core",
|
||||
"quote",
|
||||
"syn 2.0.90",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "data-encoding"
|
||||
version = "2.8.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "575f75dfd25738df5b91b8e43e14d44bda14637a58fae779fd2b064f8bf3e010"
|
||||
|
||||
[[package]]
|
||||
name = "deranged"
|
||||
@ -2639,6 +2654,12 @@ dependencies = [
|
||||
"syn 2.0.90",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ident_case"
|
||||
version = "1.0.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39"
|
||||
|
||||
[[package]]
|
||||
name = "idna"
|
||||
version = "1.0.3"
|
||||
@ -2684,6 +2705,12 @@ dependencies = [
|
||||
"web-time",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "indoc"
|
||||
version = "2.0.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b248f5224d1d606005e02c97f5aa4e88eeb230488bcc03bc9ca4d7991399f2b5"
|
||||
|
||||
[[package]]
|
||||
name = "inotify"
|
||||
version = "0.9.6"
|
||||
@ -2704,6 +2731,19 @@ dependencies = [
|
||||
"libc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "instability"
|
||||
version = "0.3.7"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0bf9fed6d91cfb734e7476a06bde8300a1b94e217e1b523b6f0cd1a01998c71d"
|
||||
dependencies = [
|
||||
"darling",
|
||||
"indoc",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.90",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "interprocess"
|
||||
version = "2.2.2"
|
||||
@ -2784,15 +2824,6 @@ dependencies = [
|
||||
"either",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "itertools"
|
||||
version = "0.12.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ba291022dbbd398a455acf126c1e341954079855bc60dfdda641363bd6922569"
|
||||
dependencies = [
|
||||
"either",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "itertools"
|
||||
version = "0.13.0"
|
||||
@ -3074,9 +3105,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "lsp-textdocument"
|
||||
version = "0.4.1"
|
||||
version = "0.4.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9a17dcde15cae78fb2e54166da22cd6c53f48033a0391cc392b22f2437805792"
|
||||
checksum = "2d564d595f4e3dcd3c071bf472dbd2cac53bc3665ae7222d2abfecd18feaed2c"
|
||||
dependencies = [
|
||||
"lsp-types",
|
||||
"serde_json",
|
||||
@ -3438,7 +3469,7 @@ name = "nu"
|
||||
version = "0.102.1"
|
||||
dependencies = [
|
||||
"assert_cmd",
|
||||
"crossterm 0.28.1",
|
||||
"crossterm",
|
||||
"ctrlc",
|
||||
"dirs",
|
||||
"fancy-regex",
|
||||
@ -3493,7 +3524,7 @@ name = "nu-cli"
|
||||
version = "0.102.1"
|
||||
dependencies = [
|
||||
"chrono",
|
||||
"crossterm 0.28.1",
|
||||
"crossterm",
|
||||
"fancy-regex",
|
||||
"is_executable",
|
||||
"log",
|
||||
@ -3516,6 +3547,7 @@ dependencies = [
|
||||
"percent-encoding",
|
||||
"reedline",
|
||||
"rstest",
|
||||
"strum",
|
||||
"sysinfo",
|
||||
"tempfile",
|
||||
"unicode-segmentation",
|
||||
@ -3570,6 +3602,8 @@ dependencies = [
|
||||
"nu-parser",
|
||||
"nu-protocol",
|
||||
"nu-utils",
|
||||
"quickcheck",
|
||||
"quickcheck_macros",
|
||||
"shadow-rs",
|
||||
]
|
||||
|
||||
@ -3611,7 +3645,7 @@ dependencies = [
|
||||
"chrono",
|
||||
"chrono-humanize",
|
||||
"chrono-tz",
|
||||
"crossterm 0.28.1",
|
||||
"crossterm",
|
||||
"csv",
|
||||
"data-encoding",
|
||||
"devicons",
|
||||
@ -3666,8 +3700,6 @@ dependencies = [
|
||||
"print-positions",
|
||||
"procfs",
|
||||
"quick-xml 0.37.1",
|
||||
"quickcheck",
|
||||
"quickcheck_macros",
|
||||
"rand",
|
||||
"rand_chacha",
|
||||
"rayon",
|
||||
@ -3716,7 +3748,7 @@ name = "nu-derive-value"
|
||||
version = "0.102.1"
|
||||
dependencies = [
|
||||
"heck",
|
||||
"proc-macro-error",
|
||||
"proc-macro-error2",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.90",
|
||||
@ -3739,7 +3771,7 @@ version = "0.102.1"
|
||||
dependencies = [
|
||||
"ansi-str",
|
||||
"anyhow",
|
||||
"crossterm 0.28.1",
|
||||
"crossterm",
|
||||
"log",
|
||||
"lscolors",
|
||||
"nu-ansi-term",
|
||||
@ -3762,6 +3794,7 @@ name = "nu-glob"
|
||||
version = "0.102.1"
|
||||
dependencies = [
|
||||
"doc-comment",
|
||||
"nu-protocol",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -3790,6 +3823,7 @@ dependencies = [
|
||||
"nu-cli",
|
||||
"nu-cmd-lang",
|
||||
"nu-command",
|
||||
"nu-engine",
|
||||
"nu-glob",
|
||||
"nu-parser",
|
||||
"nu-protocol",
|
||||
@ -4017,7 +4051,7 @@ dependencies = [
|
||||
name = "nu-utils"
|
||||
version = "0.102.1"
|
||||
dependencies = [
|
||||
"crossterm 0.28.1",
|
||||
"crossterm",
|
||||
"crossterm_winapi",
|
||||
"fancy-regex",
|
||||
"log",
|
||||
@ -5350,7 +5384,7 @@ dependencies = [
|
||||
"bincode",
|
||||
"bytemuck",
|
||||
"bytes",
|
||||
"compact_str 0.8.0",
|
||||
"compact_str",
|
||||
"flate2",
|
||||
"hashbrown 0.15.2",
|
||||
"indexmap",
|
||||
@ -5452,26 +5486,25 @@ dependencies = [
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "proc-macro-error"
|
||||
version = "1.0.4"
|
||||
name = "proc-macro-error-attr2"
|
||||
version = "2.0.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "da25490ff9892aab3fcf7c36f08cfb902dd3e71ca0f9f9517bea02a73a5ce38c"
|
||||
checksum = "96de42df36bb9bba5542fe9f1a054b8cc87e172759a1868aa05c1f3acc89dfc5"
|
||||
dependencies = [
|
||||
"proc-macro-error-attr",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"version_check",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "proc-macro-error-attr"
|
||||
version = "1.0.4"
|
||||
name = "proc-macro-error2"
|
||||
version = "2.0.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a1be40180e52ecc98ad80b184934baf3d0d29f979574e439af5a55274b35f869"
|
||||
checksum = "11ec05c52be0a07b08061f7dd003e7d7092e0472bc731b4af7bb1ef876109802"
|
||||
dependencies = [
|
||||
"proc-macro-error-attr2",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"version_check",
|
||||
"syn 2.0.90",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -5714,22 +5747,23 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "ratatui"
|
||||
version = "0.26.3"
|
||||
version = "0.29.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f44c9e68fd46eda15c646fbb85e1040b657a58cdc8c98db1d97a55930d991eef"
|
||||
checksum = "eabd94c2f37801c20583fc49dd5cd6b0ba68c716787c2dd6ed18571e1e63117b"
|
||||
dependencies = [
|
||||
"bitflags 2.6.0",
|
||||
"cassowary",
|
||||
"compact_str 0.7.1",
|
||||
"crossterm 0.27.0",
|
||||
"itertools 0.12.1",
|
||||
"compact_str",
|
||||
"crossterm",
|
||||
"indoc",
|
||||
"instability",
|
||||
"itertools 0.13.0",
|
||||
"lru",
|
||||
"paste",
|
||||
"stability",
|
||||
"strum",
|
||||
"unicode-segmentation",
|
||||
"unicode-truncate",
|
||||
"unicode-width 0.1.11",
|
||||
"unicode-width 0.2.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -5810,14 +5844,13 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "reedline"
|
||||
version = "0.38.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9bfa8cb0ad84c396c936d8abb814703d7042a433d2da75a0c7060cbdc89109f3"
|
||||
source = "git+https://github.com/nushell/reedline?branch=main#f12c4f16aaeff9fd62ca8b2c75606b40e32489c7"
|
||||
dependencies = [
|
||||
"arboard",
|
||||
"chrono",
|
||||
"crossterm 0.28.1",
|
||||
"crossterm",
|
||||
"fd-lock",
|
||||
"itertools 0.12.1",
|
||||
"itertools 0.13.0",
|
||||
"nu-ansi-term",
|
||||
"rusqlite",
|
||||
"serde",
|
||||
@ -5956,15 +5989,14 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "ring"
|
||||
version = "0.17.8"
|
||||
version = "0.17.13"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c17fa4cb658e3583423e915b9f3acc01cceaee1860e33d59ebae66adc3a2dc0d"
|
||||
checksum = "70ac5d832aa16abd7d1def883a8545280c20a60f523a370aa3a9617c2b8550ee"
|
||||
dependencies = [
|
||||
"cc",
|
||||
"cfg-if",
|
||||
"getrandom",
|
||||
"libc",
|
||||
"spin",
|
||||
"untrusted",
|
||||
"windows-sys 0.52.0",
|
||||
]
|
||||
@ -6058,9 +6090,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "rust-embed"
|
||||
version = "8.5.0"
|
||||
version = "8.6.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "fa66af4a4fdd5e7ebc276f115e895611a34739a9c1c01028383d612d550953c0"
|
||||
checksum = "0b3aba5104622db5c9fc61098de54708feb732e7763d7faa2fa625899f00bf6f"
|
||||
dependencies = [
|
||||
"rust-embed-impl",
|
||||
"rust-embed-utils",
|
||||
@ -6069,9 +6101,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "rust-embed-impl"
|
||||
version = "8.5.0"
|
||||
version = "8.6.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6125dbc8867951125eec87294137f4e9c2c96566e61bf72c45095a7c77761478"
|
||||
checksum = "1f198c73be048d2c5aa8e12f7960ad08443e56fd39cc26336719fdb4ea0ebaae"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
@ -6082,9 +6114,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "rust-embed-utils"
|
||||
version = "8.5.0"
|
||||
version = "8.6.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2e5347777e9aacb56039b0e1f28785929a8a3b709e87482e7442c72e7c12529d"
|
||||
checksum = "5a2fcdc9f40c8dc2922842ca9add611ad19f332227fc651d015881ad1552bd9a"
|
||||
dependencies = [
|
||||
"sha2",
|
||||
"walkdir",
|
||||
@ -6272,9 +6304,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "scc"
|
||||
version = "2.2.5"
|
||||
version = "2.3.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "66b202022bb57c049555430e11fc22fea12909276a80a4c3d368da36ac1d88ed"
|
||||
checksum = "ea091f6cac2595aa38993f04f4ee692ed43757035c36e67c180b6828356385b1"
|
||||
dependencies = [
|
||||
"sdd",
|
||||
]
|
||||
@ -6346,9 +6378,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "sdd"
|
||||
version = "3.0.4"
|
||||
version = "3.0.7"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "49c1eeaf4b6a87c7479688c6d52b9f1153cedd3c489300564f932b065c6eab95"
|
||||
checksum = "b07779b9b918cc05650cb30f404d4d7835d26df37c235eded8a6832e2fb82cca"
|
||||
|
||||
[[package]]
|
||||
name = "security-framework"
|
||||
@ -6523,6 +6555,12 @@ dependencies = [
|
||||
"stable_deref_trait",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "sha1_smol"
|
||||
version = "1.0.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "bbfa15b3dddfee50a0fff136974b3e1bde555604ba463834a7eb7deb6417705d"
|
||||
|
||||
[[package]]
|
||||
name = "sha2"
|
||||
version = "0.10.8"
|
||||
@ -6574,7 +6612,6 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "34db1a06d485c9142248b7a054f034b349b212551f3dfd19c94d45a754a217cd"
|
||||
dependencies = [
|
||||
"libc",
|
||||
"mio 0.8.11",
|
||||
"mio 1.0.3",
|
||||
"signal-hook",
|
||||
]
|
||||
@ -6701,12 +6738,6 @@ dependencies = [
|
||||
"windows-sys 0.52.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "spin"
|
||||
version = "0.9.8"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6980e8d7511241f8acf4aebddbb1ff938df5eebe98691418c4468d0b72a96a67"
|
||||
|
||||
[[package]]
|
||||
name = "sqlparser"
|
||||
version = "0.53.0"
|
||||
@ -6716,16 +6747,6 @@ dependencies = [
|
||||
"log",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "stability"
|
||||
version = "0.2.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d904e7009df136af5297832a3ace3370cd14ff1546a232f4f185036c2736fcac"
|
||||
dependencies = [
|
||||
"quote",
|
||||
"syn 2.0.90",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "stable_deref_trait"
|
||||
version = "1.2.0"
|
||||
@ -7698,8 +7719,11 @@ version = "1.12.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "744018581f9a3454a9e15beb8a33b017183f1e7c0cd170232a2d1453b23a51c4"
|
||||
dependencies = [
|
||||
"atomic",
|
||||
"getrandom",
|
||||
"md-5",
|
||||
"serde",
|
||||
"sha1_smol",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
20
Cargo.toml
20
Cargo.toml
@ -10,7 +10,7 @@ homepage = "https://www.nushell.sh"
|
||||
license = "MIT"
|
||||
name = "nu"
|
||||
repository = "https://github.com/nushell/nushell"
|
||||
rust-version = "1.82.0"
|
||||
rust-version = "1.83.0"
|
||||
version = "0.102.1"
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
@ -66,11 +66,11 @@ alphanumeric-sort = "1.5"
|
||||
ansi-str = "0.8"
|
||||
anyhow = "1.0.82"
|
||||
base64 = "0.22.1"
|
||||
bracoxide = "0.1.4"
|
||||
bracoxide = "0.1.5"
|
||||
brotli = "7.0"
|
||||
byteorder = "1.5"
|
||||
bytes = "1"
|
||||
bytesize = "1.3"
|
||||
bytesize = "1.3.1"
|
||||
calamine = "0.26.1"
|
||||
chardetng = "0.1.17"
|
||||
chrono = { default-features = false, version = "0.4.34" }
|
||||
@ -104,7 +104,7 @@ lru = "0.12"
|
||||
lscolors = { version = "0.17", default-features = false }
|
||||
lsp-server = "0.7.8"
|
||||
lsp-types = { version = "0.97.0", features = ["proposed"] }
|
||||
lsp-textdocument = "0.4.1"
|
||||
lsp-textdocument = "0.4.2"
|
||||
mach2 = "0.4"
|
||||
md5 = { version = "0.10", package = "md-5" }
|
||||
miette = "7.5"
|
||||
@ -127,7 +127,7 @@ pathdiff = "0.2"
|
||||
percent-encoding = "2"
|
||||
pretty_assertions = "1.4"
|
||||
print-positions = "0.6"
|
||||
proc-macro-error = { version = "1.0", default-features = false }
|
||||
proc-macro-error2 = "2.0"
|
||||
proc-macro2 = "1.0"
|
||||
procfs = "0.17.0"
|
||||
pwd = "1.3"
|
||||
@ -138,7 +138,7 @@ quote = "1.0"
|
||||
rand = "0.8"
|
||||
getrandom = "0.2" # pick same version that rand requires
|
||||
rand_chacha = "0.3.1"
|
||||
ratatui = "0.26"
|
||||
ratatui = "0.29"
|
||||
rayon = "1.10"
|
||||
reedline = "0.38.0"
|
||||
rmp = "0.8"
|
||||
@ -147,7 +147,7 @@ roxmltree = "0.20"
|
||||
rstest = { version = "0.23", default-features = false }
|
||||
rstest_reuse = "0.7"
|
||||
rusqlite = "0.31"
|
||||
rust-embed = "8.5.0"
|
||||
rust-embed = "8.6.0"
|
||||
scopeguard = { version = "1.2.0" }
|
||||
serde = { version = "1.0" }
|
||||
serde_json = "1.0"
|
||||
@ -155,6 +155,8 @@ serde_urlencoded = "0.7.1"
|
||||
serde_yaml = "0.9.33"
|
||||
sha2 = "0.10"
|
||||
strip-ansi-escapes = "0.2.0"
|
||||
strum = "0.26"
|
||||
strum_macros = "0.26"
|
||||
syn = "2.0"
|
||||
sysinfo = "0.33"
|
||||
tabled = { version = "0.17.0", default-features = false }
|
||||
@ -294,7 +296,7 @@ system-clipboard = [
|
||||
trash-support = ["nu-command/trash-support", "nu-cmd-lang/trash-support"]
|
||||
|
||||
# SQLite commands for nushell
|
||||
sqlite = ["nu-command/sqlite", "nu-cmd-lang/sqlite"]
|
||||
sqlite = ["nu-command/sqlite", "nu-cmd-lang/sqlite", "nu-std/sqlite"]
|
||||
|
||||
[profile.release]
|
||||
opt-level = "s" # Optimize for size
|
||||
@ -324,7 +326,7 @@ bench = false
|
||||
# To use a development version of a dependency please use a global override here
|
||||
# changing versions in each sub-crate of the workspace is tedious
|
||||
[patch.crates-io]
|
||||
# reedline = { git = "https://github.com/nushell/reedline", branch = "main" }
|
||||
reedline = { git = "https://github.com/nushell/reedline", branch = "main" }
|
||||
# nu-ansi-term = {git = "https://github.com/nushell/nu-ansi-term.git", branch = "main"}
|
||||
|
||||
# Run all benchmarks with `cargo bench`
|
||||
|
@ -1,7 +1,6 @@
|
||||
use nu_cli::{eval_source, evaluate_commands};
|
||||
use nu_plugin_core::{Encoder, EncodingType};
|
||||
use nu_plugin_protocol::{PluginCallResponse, PluginOutput};
|
||||
|
||||
use nu_protocol::{
|
||||
engine::{EngineState, Stack},
|
||||
PipelineData, Signals, Span, Spanned, Value,
|
||||
@ -9,12 +8,11 @@ use nu_protocol::{
|
||||
use nu_std::load_standard_library;
|
||||
use nu_utils::{get_default_config, get_default_env};
|
||||
use std::{
|
||||
fmt::Write,
|
||||
hint::black_box,
|
||||
rc::Rc,
|
||||
sync::{atomic::AtomicBool, Arc},
|
||||
};
|
||||
|
||||
use std::hint::black_box;
|
||||
|
||||
use tango_bench::{benchmark_fn, tango_benchmarks, tango_main, IntoBenchmarks};
|
||||
|
||||
fn load_bench_commands() -> EngineState {
|
||||
@ -141,19 +139,16 @@ fn bench_load_standard_lib() -> impl IntoBenchmarks {
|
||||
})]
|
||||
}
|
||||
|
||||
fn create_flat_record_string(n: i32) -> String {
|
||||
let mut s = String::from("let record = {");
|
||||
fn create_flat_record_string(n: usize) -> String {
|
||||
let mut s = String::from("let record = { ");
|
||||
for i in 0..n {
|
||||
s.push_str(&format!("col_{}: {}", i, i));
|
||||
if i < n - 1 {
|
||||
s.push_str(", ");
|
||||
}
|
||||
write!(s, "col_{i}: {i}, ").unwrap();
|
||||
}
|
||||
s.push('}');
|
||||
s
|
||||
}
|
||||
|
||||
fn create_nested_record_string(depth: i32) -> String {
|
||||
fn create_nested_record_string(depth: usize) -> String {
|
||||
let mut s = String::from("let record = {");
|
||||
for _ in 0..depth {
|
||||
s.push_str("col: {");
|
||||
@ -166,7 +161,7 @@ fn create_nested_record_string(depth: i32) -> String {
|
||||
s
|
||||
}
|
||||
|
||||
fn create_example_table_nrows(n: i32) -> String {
|
||||
fn create_example_table_nrows(n: usize) -> String {
|
||||
let mut s = String::from("let table = [[foo bar baz]; ");
|
||||
for i in 0..n {
|
||||
s.push_str(&format!("[0, 1, {i}]"));
|
||||
@ -178,7 +173,7 @@ fn create_example_table_nrows(n: i32) -> String {
|
||||
s
|
||||
}
|
||||
|
||||
fn bench_record_create(n: i32) -> impl IntoBenchmarks {
|
||||
fn bench_record_create(n: usize) -> impl IntoBenchmarks {
|
||||
bench_command(
|
||||
&format!("record_create_{n}"),
|
||||
&create_flat_record_string(n),
|
||||
@ -187,7 +182,7 @@ fn bench_record_create(n: i32) -> impl IntoBenchmarks {
|
||||
)
|
||||
}
|
||||
|
||||
fn bench_record_flat_access(n: i32) -> impl IntoBenchmarks {
|
||||
fn bench_record_flat_access(n: usize) -> impl IntoBenchmarks {
|
||||
let setup_command = create_flat_record_string(n);
|
||||
let (stack, engine) = setup_stack_and_engine_from_command(&setup_command);
|
||||
bench_command(
|
||||
@ -198,10 +193,10 @@ fn bench_record_flat_access(n: i32) -> impl IntoBenchmarks {
|
||||
)
|
||||
}
|
||||
|
||||
fn bench_record_nested_access(n: i32) -> impl IntoBenchmarks {
|
||||
fn bench_record_nested_access(n: usize) -> impl IntoBenchmarks {
|
||||
let setup_command = create_nested_record_string(n);
|
||||
let (stack, engine) = setup_stack_and_engine_from_command(&setup_command);
|
||||
let nested_access = ".col".repeat(n as usize);
|
||||
let nested_access = ".col".repeat(n);
|
||||
bench_command(
|
||||
&format!("record_nested_access_{n}"),
|
||||
&format!("$record{} | ignore", nested_access),
|
||||
@ -210,7 +205,18 @@ fn bench_record_nested_access(n: i32) -> impl IntoBenchmarks {
|
||||
)
|
||||
}
|
||||
|
||||
fn bench_table_create(n: i32) -> impl IntoBenchmarks {
|
||||
fn bench_record_insert(n: usize, m: usize) -> impl IntoBenchmarks {
|
||||
let setup_command = create_flat_record_string(n);
|
||||
let (stack, engine) = setup_stack_and_engine_from_command(&setup_command);
|
||||
let mut insert = String::from("$record");
|
||||
for i in n..(n + m) {
|
||||
write!(insert, " | insert col_{i} {i}").unwrap();
|
||||
}
|
||||
insert.push_str(" | ignore");
|
||||
bench_command(&format!("record_insert_{n}_{m}"), &insert, stack, engine)
|
||||
}
|
||||
|
||||
fn bench_table_create(n: usize) -> impl IntoBenchmarks {
|
||||
bench_command(
|
||||
&format!("table_create_{n}"),
|
||||
&create_example_table_nrows(n),
|
||||
@ -219,7 +225,7 @@ fn bench_table_create(n: i32) -> impl IntoBenchmarks {
|
||||
)
|
||||
}
|
||||
|
||||
fn bench_table_get(n: i32) -> impl IntoBenchmarks {
|
||||
fn bench_table_get(n: usize) -> impl IntoBenchmarks {
|
||||
let setup_command = create_example_table_nrows(n);
|
||||
let (stack, engine) = setup_stack_and_engine_from_command(&setup_command);
|
||||
bench_command(
|
||||
@ -230,7 +236,7 @@ fn bench_table_get(n: i32) -> impl IntoBenchmarks {
|
||||
)
|
||||
}
|
||||
|
||||
fn bench_table_select(n: i32) -> impl IntoBenchmarks {
|
||||
fn bench_table_select(n: usize) -> impl IntoBenchmarks {
|
||||
let setup_command = create_example_table_nrows(n);
|
||||
let (stack, engine) = setup_stack_and_engine_from_command(&setup_command);
|
||||
bench_command(
|
||||
@ -241,7 +247,29 @@ fn bench_table_select(n: i32) -> impl IntoBenchmarks {
|
||||
)
|
||||
}
|
||||
|
||||
fn bench_eval_interleave(n: i32) -> impl IntoBenchmarks {
|
||||
fn bench_table_insert_row(n: usize, m: usize) -> impl IntoBenchmarks {
|
||||
let setup_command = create_example_table_nrows(n);
|
||||
let (stack, engine) = setup_stack_and_engine_from_command(&setup_command);
|
||||
let mut insert = String::from("$table");
|
||||
for i in n..(n + m) {
|
||||
write!(insert, " | insert {i} {{ foo: 0, bar: 1, baz: {i} }}").unwrap();
|
||||
}
|
||||
insert.push_str(" | ignore");
|
||||
bench_command(&format!("table_insert_row_{n}_{m}"), &insert, stack, engine)
|
||||
}
|
||||
|
||||
fn bench_table_insert_col(n: usize, m: usize) -> impl IntoBenchmarks {
|
||||
let setup_command = create_example_table_nrows(n);
|
||||
let (stack, engine) = setup_stack_and_engine_from_command(&setup_command);
|
||||
let mut insert = String::from("$table");
|
||||
for i in 0..m {
|
||||
write!(insert, " | insert col_{i} {i}").unwrap();
|
||||
}
|
||||
insert.push_str(" | ignore");
|
||||
bench_command(&format!("table_insert_col_{n}_{m}"), &insert, stack, engine)
|
||||
}
|
||||
|
||||
fn bench_eval_interleave(n: usize) -> impl IntoBenchmarks {
|
||||
let engine = setup_engine();
|
||||
let stack = Stack::new();
|
||||
bench_command(
|
||||
@ -252,7 +280,7 @@ fn bench_eval_interleave(n: i32) -> impl IntoBenchmarks {
|
||||
)
|
||||
}
|
||||
|
||||
fn bench_eval_interleave_with_interrupt(n: i32) -> impl IntoBenchmarks {
|
||||
fn bench_eval_interleave_with_interrupt(n: usize) -> impl IntoBenchmarks {
|
||||
let mut engine = setup_engine();
|
||||
engine.set_signals(Signals::new(Arc::new(AtomicBool::new(false))));
|
||||
let stack = Stack::new();
|
||||
@ -264,7 +292,7 @@ fn bench_eval_interleave_with_interrupt(n: i32) -> impl IntoBenchmarks {
|
||||
)
|
||||
}
|
||||
|
||||
fn bench_eval_for(n: i32) -> impl IntoBenchmarks {
|
||||
fn bench_eval_for(n: usize) -> impl IntoBenchmarks {
|
||||
let engine = setup_engine();
|
||||
let stack = Stack::new();
|
||||
bench_command(
|
||||
@ -275,7 +303,7 @@ fn bench_eval_for(n: i32) -> impl IntoBenchmarks {
|
||||
)
|
||||
}
|
||||
|
||||
fn bench_eval_each(n: i32) -> impl IntoBenchmarks {
|
||||
fn bench_eval_each(n: usize) -> impl IntoBenchmarks {
|
||||
let engine = setup_engine();
|
||||
let stack = Stack::new();
|
||||
bench_command(
|
||||
@ -286,7 +314,7 @@ fn bench_eval_each(n: i32) -> impl IntoBenchmarks {
|
||||
)
|
||||
}
|
||||
|
||||
fn bench_eval_par_each(n: i32) -> impl IntoBenchmarks {
|
||||
fn bench_eval_par_each(n: usize) -> impl IntoBenchmarks {
|
||||
let engine = setup_engine();
|
||||
let stack = Stack::new();
|
||||
bench_command(
|
||||
@ -427,6 +455,14 @@ tango_benchmarks!(
|
||||
bench_record_nested_access(32),
|
||||
bench_record_nested_access(64),
|
||||
bench_record_nested_access(128),
|
||||
bench_record_insert(1, 1),
|
||||
bench_record_insert(10, 1),
|
||||
bench_record_insert(100, 1),
|
||||
bench_record_insert(1000, 1),
|
||||
bench_record_insert(1, 10),
|
||||
bench_record_insert(10, 10),
|
||||
bench_record_insert(100, 10),
|
||||
bench_record_insert(1000, 10),
|
||||
// Table
|
||||
bench_table_create(1),
|
||||
bench_table_create(10),
|
||||
@ -440,6 +476,22 @@ tango_benchmarks!(
|
||||
bench_table_select(10),
|
||||
bench_table_select(100),
|
||||
bench_table_select(1_000),
|
||||
bench_table_insert_row(1, 1),
|
||||
bench_table_insert_row(10, 1),
|
||||
bench_table_insert_row(100, 1),
|
||||
bench_table_insert_row(1000, 1),
|
||||
bench_table_insert_row(1, 10),
|
||||
bench_table_insert_row(10, 10),
|
||||
bench_table_insert_row(100, 10),
|
||||
bench_table_insert_row(1000, 10),
|
||||
bench_table_insert_col(1, 1),
|
||||
bench_table_insert_col(10, 1),
|
||||
bench_table_insert_col(100, 1),
|
||||
bench_table_insert_col(1000, 1),
|
||||
bench_table_insert_col(1, 10),
|
||||
bench_table_insert_col(10, 10),
|
||||
bench_table_insert_col(100, 10),
|
||||
bench_table_insert_col(1000, 10),
|
||||
// Eval
|
||||
// Interleave
|
||||
bench_eval_interleave(100),
|
||||
|
@ -40,6 +40,7 @@ miette = { workspace = true, features = ["fancy-no-backtrace"] }
|
||||
nucleo-matcher = { workspace = true }
|
||||
percent-encoding = { workspace = true }
|
||||
sysinfo = { workspace = true }
|
||||
strum = { workspace = true }
|
||||
unicode-segmentation = { workspace = true }
|
||||
uuid = { workspace = true, features = ["v4"] }
|
||||
which = { workspace = true }
|
||||
@ -49,4 +50,4 @@ plugin = ["nu-plugin-engine"]
|
||||
system-clipboard = ["reedline/system_clipboard"]
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
workspace = true
|
||||
|
@ -29,7 +29,7 @@ impl Command for SubCommand {
|
||||
.required(
|
||||
"str",
|
||||
SyntaxShape::String,
|
||||
"the string to perform the operation with",
|
||||
"The string to perform the operation with.",
|
||||
)
|
||||
.category(Category::Core)
|
||||
}
|
||||
|
@ -18,7 +18,7 @@ impl Command for SubCommand {
|
||||
"set the current cursor position to the end of the buffer",
|
||||
Some('e'),
|
||||
)
|
||||
.optional("pos", SyntaxShape::Int, "Cursor position to be set")
|
||||
.optional("pos", SyntaxShape::Int, "Cursor position to be set.")
|
||||
.category(Category::Core)
|
||||
}
|
||||
|
||||
|
@ -21,7 +21,7 @@ impl Command for HistoryImport {
|
||||
}
|
||||
|
||||
fn description(&self) -> &str {
|
||||
"Import command line history"
|
||||
"Import command line history."
|
||||
}
|
||||
|
||||
fn extra_description(&self) -> &str {
|
||||
|
87
crates/nu-cli/src/completions/attribute_completions.rs
Normal file
87
crates/nu-cli/src/completions/attribute_completions.rs
Normal file
@ -0,0 +1,87 @@
|
||||
use super::{completion_options::NuMatcher, SemanticSuggestion};
|
||||
use crate::{
|
||||
completions::{Completer, CompletionOptions},
|
||||
SuggestionKind,
|
||||
};
|
||||
use nu_protocol::{
|
||||
engine::{Stack, StateWorkingSet},
|
||||
Span,
|
||||
};
|
||||
use reedline::Suggestion;
|
||||
|
||||
pub struct AttributeCompletion;
|
||||
pub struct AttributableCompletion;
|
||||
|
||||
impl Completer for AttributeCompletion {
|
||||
fn fetch(
|
||||
&mut self,
|
||||
working_set: &StateWorkingSet,
|
||||
_stack: &Stack,
|
||||
prefix: impl AsRef<str>,
|
||||
span: Span,
|
||||
offset: usize,
|
||||
options: &CompletionOptions,
|
||||
) -> Vec<SemanticSuggestion> {
|
||||
let mut matcher = NuMatcher::new(prefix, options);
|
||||
|
||||
let attr_commands =
|
||||
working_set.find_commands_by_predicate(|s| s.starts_with(b"attr "), true);
|
||||
|
||||
for (name, desc, ty) in attr_commands {
|
||||
let name = name.strip_prefix(b"attr ").unwrap_or(&name);
|
||||
matcher.add_semantic_suggestion(SemanticSuggestion {
|
||||
suggestion: Suggestion {
|
||||
value: String::from_utf8_lossy(name).into_owned(),
|
||||
description: desc,
|
||||
style: None,
|
||||
extra: None,
|
||||
span: reedline::Span {
|
||||
start: span.start - offset,
|
||||
end: span.end - offset,
|
||||
},
|
||||
append_whitespace: false,
|
||||
},
|
||||
kind: Some(SuggestionKind::Command(ty)),
|
||||
});
|
||||
}
|
||||
|
||||
matcher.results()
|
||||
}
|
||||
}
|
||||
|
||||
impl Completer for AttributableCompletion {
|
||||
fn fetch(
|
||||
&mut self,
|
||||
working_set: &StateWorkingSet,
|
||||
_stack: &Stack,
|
||||
prefix: impl AsRef<str>,
|
||||
span: Span,
|
||||
offset: usize,
|
||||
options: &CompletionOptions,
|
||||
) -> Vec<SemanticSuggestion> {
|
||||
let mut matcher = NuMatcher::new(prefix, options);
|
||||
|
||||
for s in ["def", "extern", "export def", "export extern"] {
|
||||
let decl_id = working_set
|
||||
.find_decl(s.as_bytes())
|
||||
.expect("internal error, builtin declaration not found");
|
||||
let cmd = working_set.get_decl(decl_id);
|
||||
matcher.add_semantic_suggestion(SemanticSuggestion {
|
||||
suggestion: Suggestion {
|
||||
value: cmd.name().into(),
|
||||
description: Some(cmd.description().into()),
|
||||
style: None,
|
||||
extra: None,
|
||||
span: reedline::Span {
|
||||
start: span.start - offset,
|
||||
end: span.end - offset,
|
||||
},
|
||||
append_whitespace: false,
|
||||
},
|
||||
kind: Some(SuggestionKind::Command(cmd.command_type())),
|
||||
});
|
||||
}
|
||||
|
||||
matcher.results()
|
||||
}
|
||||
}
|
@ -12,10 +12,9 @@ pub trait Completer {
|
||||
&mut self,
|
||||
working_set: &StateWorkingSet,
|
||||
stack: &Stack,
|
||||
prefix: &[u8],
|
||||
prefix: impl AsRef<str>,
|
||||
span: Span,
|
||||
offset: usize,
|
||||
pos: usize,
|
||||
options: &CompletionOptions,
|
||||
) -> Vec<SemanticSuggestion>;
|
||||
}
|
||||
@ -30,8 +29,14 @@ pub struct SemanticSuggestion {
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
pub enum SuggestionKind {
|
||||
Command(nu_protocol::engine::CommandType),
|
||||
Type(nu_protocol::Type),
|
||||
Value(nu_protocol::Type),
|
||||
CellPath,
|
||||
Directory,
|
||||
File,
|
||||
Flag,
|
||||
Module,
|
||||
Operator,
|
||||
Variable,
|
||||
}
|
||||
|
||||
impl From<Suggestion> for SemanticSuggestion {
|
||||
|
137
crates/nu-cli/src/completions/cell_path_completions.rs
Normal file
137
crates/nu-cli/src/completions/cell_path_completions.rs
Normal file
@ -0,0 +1,137 @@
|
||||
use crate::completions::{Completer, CompletionOptions, SemanticSuggestion, SuggestionKind};
|
||||
use nu_engine::{column::get_columns, eval_variable};
|
||||
use nu_protocol::{
|
||||
ast::{Expr, Expression, FullCellPath, PathMember},
|
||||
engine::{Stack, StateWorkingSet},
|
||||
eval_const::eval_constant,
|
||||
ShellError, Span, Value,
|
||||
};
|
||||
use reedline::Suggestion;
|
||||
|
||||
use super::completion_options::NuMatcher;
|
||||
|
||||
pub struct CellPathCompletion<'a> {
|
||||
pub full_cell_path: &'a FullCellPath,
|
||||
pub position: usize,
|
||||
}
|
||||
|
||||
fn prefix_from_path_member(member: &PathMember, pos: usize) -> (String, Span) {
|
||||
let (prefix_str, start) = match member {
|
||||
PathMember::String { val, span, .. } => (val.clone(), span.start),
|
||||
PathMember::Int { val, span, .. } => (val.to_string(), span.start),
|
||||
};
|
||||
let prefix_str = prefix_str
|
||||
.get(..pos + 1 - start)
|
||||
.map(str::to_string)
|
||||
.unwrap_or(prefix_str);
|
||||
(prefix_str, Span::new(start, pos + 1))
|
||||
}
|
||||
|
||||
impl Completer for CellPathCompletion<'_> {
|
||||
fn fetch(
|
||||
&mut self,
|
||||
working_set: &StateWorkingSet,
|
||||
stack: &Stack,
|
||||
_prefix: impl AsRef<str>,
|
||||
_span: Span,
|
||||
offset: usize,
|
||||
options: &CompletionOptions,
|
||||
) -> Vec<SemanticSuggestion> {
|
||||
let mut prefix_str = String::new();
|
||||
// position at dots, e.g. `$env.config.<TAB>`
|
||||
let mut span = Span::new(self.position + 1, self.position + 1);
|
||||
let mut path_member_num_before_pos = 0;
|
||||
for member in self.full_cell_path.tail.iter() {
|
||||
if member.span().end <= self.position {
|
||||
path_member_num_before_pos += 1;
|
||||
} else if member.span().contains(self.position) {
|
||||
(prefix_str, span) = prefix_from_path_member(member, self.position);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
let current_span = reedline::Span {
|
||||
start: span.start - offset,
|
||||
end: span.end - offset,
|
||||
};
|
||||
|
||||
let mut matcher = NuMatcher::new(prefix_str, options);
|
||||
let path_members = self
|
||||
.full_cell_path
|
||||
.tail
|
||||
.get(0..path_member_num_before_pos)
|
||||
.unwrap_or_default();
|
||||
let value = eval_cell_path(
|
||||
working_set,
|
||||
stack,
|
||||
&self.full_cell_path.head,
|
||||
path_members,
|
||||
span,
|
||||
)
|
||||
.unwrap_or_default();
|
||||
|
||||
for suggestion in get_suggestions_by_value(&value, current_span) {
|
||||
matcher.add_semantic_suggestion(suggestion);
|
||||
}
|
||||
matcher.results()
|
||||
}
|
||||
}
|
||||
|
||||
/// Follow cell path to get the value
|
||||
/// NOTE: This is a relatively lightweight implementation,
|
||||
/// so it may fail to get the exact value when the expression is complicated.
|
||||
/// One failing example would be `[$foo].0`
|
||||
pub(crate) fn eval_cell_path(
|
||||
working_set: &StateWorkingSet,
|
||||
stack: &Stack,
|
||||
head: &Expression,
|
||||
path_members: &[PathMember],
|
||||
span: Span,
|
||||
) -> Result<Value, ShellError> {
|
||||
// evaluate the head expression to get its value
|
||||
let head_value = if let Expr::Var(var_id) = head.expr {
|
||||
working_set
|
||||
.get_variable(var_id)
|
||||
.const_val
|
||||
.to_owned()
|
||||
.map_or_else(
|
||||
|| eval_variable(working_set.permanent_state, stack, var_id, span),
|
||||
Ok,
|
||||
)
|
||||
} else {
|
||||
eval_constant(working_set, head)
|
||||
}?;
|
||||
head_value.follow_cell_path(path_members, false)
|
||||
}
|
||||
|
||||
fn get_suggestions_by_value(
|
||||
value: &Value,
|
||||
current_span: reedline::Span,
|
||||
) -> Vec<SemanticSuggestion> {
|
||||
let to_suggestion = |s: String, v: Option<&Value>| SemanticSuggestion {
|
||||
suggestion: Suggestion {
|
||||
value: s,
|
||||
span: current_span,
|
||||
description: v.map(|v| v.get_type().to_string()),
|
||||
..Suggestion::default()
|
||||
},
|
||||
kind: Some(SuggestionKind::CellPath),
|
||||
};
|
||||
match value {
|
||||
Value::Record { val, .. } => val
|
||||
.columns()
|
||||
.map(|s| to_suggestion(s.to_string(), val.get(s)))
|
||||
.collect(),
|
||||
Value::List { vals, .. } => get_columns(vals.as_slice())
|
||||
.into_iter()
|
||||
.map(|s| {
|
||||
let sub_val = vals
|
||||
.first()
|
||||
.and_then(|v| v.as_record().ok())
|
||||
.and_then(|rv| rv.get(&s));
|
||||
to_suggestion(s, sub_val)
|
||||
})
|
||||
.collect(),
|
||||
_ => vec![],
|
||||
}
|
||||
}
|
@ -4,9 +4,8 @@ use crate::{
|
||||
completions::{Completer, CompletionOptions},
|
||||
SuggestionKind,
|
||||
};
|
||||
use nu_parser::FlatShape;
|
||||
use nu_protocol::{
|
||||
engine::{CachedFile, Stack, StateWorkingSet},
|
||||
engine::{CommandType, Stack, StateWorkingSet},
|
||||
Span,
|
||||
};
|
||||
use reedline::Suggestion;
|
||||
@ -14,24 +13,13 @@ use reedline::Suggestion;
|
||||
use super::{completion_options::NuMatcher, SemanticSuggestion};
|
||||
|
||||
pub struct CommandCompletion {
|
||||
flattened: Vec<(Span, FlatShape)>,
|
||||
flat_shape: FlatShape,
|
||||
force_completion_after_space: bool,
|
||||
/// Whether to include internal commands
|
||||
pub internals: bool,
|
||||
/// Whether to include external commands
|
||||
pub externals: bool,
|
||||
}
|
||||
|
||||
impl CommandCompletion {
|
||||
pub fn new(
|
||||
flattened: Vec<(Span, FlatShape)>,
|
||||
flat_shape: FlatShape,
|
||||
force_completion_after_space: bool,
|
||||
) -> Self {
|
||||
Self {
|
||||
flattened,
|
||||
flat_shape,
|
||||
force_completion_after_space,
|
||||
}
|
||||
}
|
||||
|
||||
fn external_command_completion(
|
||||
&self,
|
||||
working_set: &StateWorkingSet,
|
||||
@ -71,6 +59,9 @@ impl CommandCompletion {
|
||||
if suggs.contains_key(&value) {
|
||||
continue;
|
||||
}
|
||||
// TODO: check name matching before a relative heavy IO involved
|
||||
// `is_executable` for performance consideration, should avoid
|
||||
// duplicated `match_aux` call for matched items in the future
|
||||
if matcher.matches(&name) && is_executable::is_executable(item.path()) {
|
||||
// If there's an internal command with the same name, adds ^cmd to the
|
||||
// matcher so that both the internal and external command are included
|
||||
@ -84,8 +75,7 @@ impl CommandCompletion {
|
||||
append_whitespace: true,
|
||||
..Default::default()
|
||||
},
|
||||
// TODO: is there a way to create a test?
|
||||
kind: None,
|
||||
kind: Some(SuggestionKind::Command(CommandType::External)),
|
||||
},
|
||||
);
|
||||
}
|
||||
@ -97,46 +87,50 @@ impl CommandCompletion {
|
||||
|
||||
suggs
|
||||
}
|
||||
}
|
||||
|
||||
fn complete_commands(
|
||||
&self,
|
||||
impl Completer for CommandCompletion {
|
||||
fn fetch(
|
||||
&mut self,
|
||||
working_set: &StateWorkingSet,
|
||||
_stack: &Stack,
|
||||
prefix: impl AsRef<str>,
|
||||
span: Span,
|
||||
offset: usize,
|
||||
find_externals: bool,
|
||||
options: &CompletionOptions,
|
||||
) -> Vec<SemanticSuggestion> {
|
||||
let partial = working_set.get_span_contents(span);
|
||||
let mut matcher = NuMatcher::new(String::from_utf8_lossy(partial), options.clone());
|
||||
let mut matcher = NuMatcher::new(prefix, options);
|
||||
|
||||
let sugg_span = reedline::Span::new(span.start - offset, span.end - offset);
|
||||
|
||||
let mut internal_suggs = HashMap::new();
|
||||
let filtered_commands = working_set.find_commands_by_predicate(
|
||||
|name| {
|
||||
let name = String::from_utf8_lossy(name);
|
||||
matcher.add(&name, name.to_string())
|
||||
},
|
||||
true,
|
||||
);
|
||||
for (name, description, typ) in filtered_commands {
|
||||
let name = String::from_utf8_lossy(&name);
|
||||
internal_suggs.insert(
|
||||
name.to_string(),
|
||||
SemanticSuggestion {
|
||||
suggestion: Suggestion {
|
||||
value: name.to_string(),
|
||||
description,
|
||||
span: sugg_span,
|
||||
append_whitespace: true,
|
||||
..Suggestion::default()
|
||||
},
|
||||
kind: Some(SuggestionKind::Command(typ)),
|
||||
if self.internals {
|
||||
let filtered_commands = working_set.find_commands_by_predicate(
|
||||
|name| {
|
||||
let name = String::from_utf8_lossy(name);
|
||||
matcher.add(&name, name.to_string())
|
||||
},
|
||||
true,
|
||||
);
|
||||
for (name, description, typ) in filtered_commands {
|
||||
let name = String::from_utf8_lossy(&name);
|
||||
internal_suggs.insert(
|
||||
name.to_string(),
|
||||
SemanticSuggestion {
|
||||
suggestion: Suggestion {
|
||||
value: name.to_string(),
|
||||
description,
|
||||
span: sugg_span,
|
||||
append_whitespace: true,
|
||||
..Suggestion::default()
|
||||
},
|
||||
kind: Some(SuggestionKind::Command(typ)),
|
||||
},
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
let mut external_suggs = if find_externals {
|
||||
let mut external_suggs = if self.externals {
|
||||
self.external_command_completion(
|
||||
working_set,
|
||||
sugg_span,
|
||||
@ -159,179 +153,3 @@ impl CommandCompletion {
|
||||
res
|
||||
}
|
||||
}
|
||||
|
||||
impl Completer for CommandCompletion {
|
||||
fn fetch(
|
||||
&mut self,
|
||||
working_set: &StateWorkingSet,
|
||||
_stack: &Stack,
|
||||
_prefix: &[u8],
|
||||
span: Span,
|
||||
offset: usize,
|
||||
pos: usize,
|
||||
options: &CompletionOptions,
|
||||
) -> Vec<SemanticSuggestion> {
|
||||
let last = self
|
||||
.flattened
|
||||
.iter()
|
||||
.rev()
|
||||
.skip_while(|x| x.0.end > pos)
|
||||
.take_while(|x| {
|
||||
matches!(
|
||||
x.1,
|
||||
FlatShape::InternalCall(_)
|
||||
| FlatShape::External
|
||||
| FlatShape::ExternalArg
|
||||
| FlatShape::Literal
|
||||
| FlatShape::String
|
||||
)
|
||||
})
|
||||
.last();
|
||||
|
||||
// The last item here would be the earliest shape that could possible by part of this subcommand
|
||||
let subcommands = if let Some(last) = last {
|
||||
self.complete_commands(
|
||||
working_set,
|
||||
Span::new(last.0.start, pos),
|
||||
offset,
|
||||
false,
|
||||
options,
|
||||
)
|
||||
} else {
|
||||
vec![]
|
||||
};
|
||||
|
||||
if !subcommands.is_empty() {
|
||||
return subcommands;
|
||||
}
|
||||
|
||||
let config = working_set.get_config();
|
||||
if matches!(self.flat_shape, nu_parser::FlatShape::External)
|
||||
|| matches!(self.flat_shape, nu_parser::FlatShape::InternalCall(_))
|
||||
|| ((span.end - span.start) == 0)
|
||||
|| is_passthrough_command(working_set.delta.get_file_contents())
|
||||
{
|
||||
// we're in a gap or at a command
|
||||
if working_set.get_span_contents(span).is_empty() && !self.force_completion_after_space
|
||||
{
|
||||
return vec![];
|
||||
}
|
||||
self.complete_commands(
|
||||
working_set,
|
||||
span,
|
||||
offset,
|
||||
config.completions.external.enable,
|
||||
options,
|
||||
)
|
||||
} else {
|
||||
vec![]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn find_non_whitespace_index(contents: &[u8], start: usize) -> usize {
|
||||
match contents.get(start..) {
|
||||
Some(contents) => {
|
||||
contents
|
||||
.iter()
|
||||
.take_while(|x| x.is_ascii_whitespace())
|
||||
.count()
|
||||
+ start
|
||||
}
|
||||
None => start,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_passthrough_command(working_set_file_contents: &[CachedFile]) -> bool {
|
||||
for cached_file in working_set_file_contents {
|
||||
let contents = &cached_file.content;
|
||||
let last_pipe_pos_rev = contents.iter().rev().position(|x| x == &b'|');
|
||||
let last_pipe_pos = last_pipe_pos_rev.map(|x| contents.len() - x).unwrap_or(0);
|
||||
|
||||
let cur_pos = find_non_whitespace_index(contents, last_pipe_pos);
|
||||
|
||||
let result = match contents.get(cur_pos..) {
|
||||
Some(contents) => contents.starts_with(b"sudo ") || contents.starts_with(b"doas "),
|
||||
None => false,
|
||||
};
|
||||
if result {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
false
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod command_completions_tests {
|
||||
use super::*;
|
||||
use nu_protocol::engine::EngineState;
|
||||
use std::sync::Arc;
|
||||
|
||||
#[test]
|
||||
fn test_find_non_whitespace_index() {
|
||||
let commands = [
|
||||
(" hello", 4),
|
||||
("sudo ", 0),
|
||||
(" sudo ", 2),
|
||||
(" sudo ", 2),
|
||||
(" hello ", 1),
|
||||
(" hello ", 3),
|
||||
(" hello | sudo ", 4),
|
||||
(" sudo|sudo", 5),
|
||||
("sudo | sudo ", 0),
|
||||
(" hello sud", 1),
|
||||
];
|
||||
for (idx, ele) in commands.iter().enumerate() {
|
||||
let index = find_non_whitespace_index(ele.0.as_bytes(), 0);
|
||||
assert_eq!(index, ele.1, "Failed on index {}", idx);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_is_last_command_passthrough() {
|
||||
let commands = [
|
||||
(" hello", false),
|
||||
(" sudo ", true),
|
||||
("sudo ", true),
|
||||
(" hello", false),
|
||||
(" sudo", false),
|
||||
(" sudo ", true),
|
||||
(" sudo ", true),
|
||||
(" sudo ", true),
|
||||
(" hello ", false),
|
||||
(" hello | sudo ", true),
|
||||
(" sudo|sudo", false),
|
||||
("sudo | sudo ", true),
|
||||
(" hello sud", false),
|
||||
(" sudo | sud ", false),
|
||||
(" sudo|sudo ", true),
|
||||
(" sudo | sudo ls | sudo ", true),
|
||||
];
|
||||
for (idx, ele) in commands.iter().enumerate() {
|
||||
let input = ele.0.as_bytes();
|
||||
|
||||
let mut engine_state = EngineState::new();
|
||||
engine_state.add_file("test.nu".into(), Arc::new([]));
|
||||
|
||||
let delta = {
|
||||
let mut working_set = StateWorkingSet::new(&engine_state);
|
||||
let _ = working_set.add_file("child.nu".into(), input);
|
||||
working_set.render()
|
||||
};
|
||||
|
||||
let result = engine_state.merge_delta(delta);
|
||||
assert!(
|
||||
result.is_ok(),
|
||||
"Merge delta has failed: {}",
|
||||
result.err().unwrap()
|
||||
);
|
||||
|
||||
let is_passthrough_command = is_passthrough_command(engine_state.get_file_contents());
|
||||
assert_eq!(
|
||||
is_passthrough_command, ele.1,
|
||||
"index for '{}': {}",
|
||||
ele.0, idx
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1,22 +1,26 @@
|
||||
use crate::completions::{
|
||||
CommandCompletion, Completer, CompletionOptions, CustomCompletion, DirectoryCompletion,
|
||||
DotNuCompletion, FileCompletion, FlagCompletion, OperatorCompletion, VariableCompletion,
|
||||
AttributableCompletion, AttributeCompletion, CellPathCompletion, CommandCompletion, Completer,
|
||||
CompletionOptions, CustomCompletion, DirectoryCompletion, DotNuCompletion, FileCompletion,
|
||||
FlagCompletion, OperatorCompletion, VariableCompletion,
|
||||
};
|
||||
use log::debug;
|
||||
use nu_color_config::{color_record_to_nustyle, lookup_ansi_color_style};
|
||||
use nu_engine::eval_block;
|
||||
use nu_parser::{flatten_expression, parse, FlatShape};
|
||||
use nu_parser::{flatten_expression, parse};
|
||||
use nu_protocol::{
|
||||
ast::{Expr, Expression, FindMapResult, Traverse},
|
||||
ast::{Argument, Block, Expr, Expression, FindMapResult, Traverse},
|
||||
debugger::WithoutDebug,
|
||||
engine::{Closure, EngineState, Stack, StateWorkingSet},
|
||||
PipelineData, Span, Value,
|
||||
PipelineData, Span, Type, Value,
|
||||
};
|
||||
use reedline::{Completer as ReedlineCompleter, Suggestion};
|
||||
use std::{str, sync::Arc};
|
||||
|
||||
use super::base::{SemanticSuggestion, SuggestionKind};
|
||||
|
||||
/// Used as the function `f` in find_map Traverse
|
||||
///
|
||||
/// returns the inner-most pipeline_element of interest
|
||||
/// i.e. the one that contains given position and needs completion
|
||||
fn find_pipeline_element_by_position<'a>(
|
||||
expr: &'a Expression,
|
||||
working_set: &'a StateWorkingSet,
|
||||
@ -36,7 +40,6 @@ fn find_pipeline_element_by_position<'a>(
|
||||
.or(Some(expr))
|
||||
.map(FindMapResult::Found)
|
||||
.unwrap_or_default(),
|
||||
// TODO: clear separation of internal/external completion logic
|
||||
Expr::ExternalCall(head, arguments) => arguments
|
||||
.iter()
|
||||
.find_map(|arg| arg.expr().find_map(working_set, &closure))
|
||||
@ -58,16 +61,88 @@ fn find_pipeline_element_by_position<'a>(
|
||||
.map(FindMapResult::Found)
|
||||
.unwrap_or_default(),
|
||||
Expr::Var(_) => FindMapResult::Found(expr),
|
||||
Expr::AttributeBlock(ab) => ab
|
||||
.attributes
|
||||
.iter()
|
||||
.map(|attr| &attr.expr)
|
||||
.chain(Some(ab.item.as_ref()))
|
||||
.find_map(|expr| expr.find_map(working_set, &closure))
|
||||
.or(Some(expr))
|
||||
.map(FindMapResult::Found)
|
||||
.unwrap_or_default(),
|
||||
_ => FindMapResult::Continue,
|
||||
}
|
||||
}
|
||||
|
||||
/// Before completion, an additional character `a` is added to the source as a placeholder for correct parsing results.
|
||||
/// This function helps to strip it
|
||||
fn strip_placeholder_if_any<'a>(
|
||||
working_set: &'a StateWorkingSet,
|
||||
span: &Span,
|
||||
strip: bool,
|
||||
) -> (Span, &'a [u8]) {
|
||||
let new_span = if strip {
|
||||
let new_end = std::cmp::max(span.end - 1, span.start);
|
||||
Span::new(span.start, new_end)
|
||||
} else {
|
||||
span.to_owned()
|
||||
};
|
||||
let prefix = working_set.get_span_contents(new_span);
|
||||
(new_span, prefix)
|
||||
}
|
||||
|
||||
/// Given a span with noise,
|
||||
/// 1. Call `rsplit` to get the last token
|
||||
/// 2. Strip the last placeholder from the token
|
||||
fn strip_placeholder_with_rsplit<'a>(
|
||||
working_set: &'a StateWorkingSet,
|
||||
span: &Span,
|
||||
predicate: impl FnMut(&u8) -> bool,
|
||||
strip: bool,
|
||||
) -> (Span, &'a [u8]) {
|
||||
let span_content = working_set.get_span_contents(*span);
|
||||
let mut prefix = span_content
|
||||
.rsplit(predicate)
|
||||
.next()
|
||||
.unwrap_or(span_content);
|
||||
let start = span.end.saturating_sub(prefix.len());
|
||||
if strip && !prefix.is_empty() {
|
||||
prefix = &prefix[..prefix.len() - 1];
|
||||
}
|
||||
let end = start + prefix.len();
|
||||
(Span::new(start, end), prefix)
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct NuCompleter {
|
||||
engine_state: Arc<EngineState>,
|
||||
stack: Stack,
|
||||
}
|
||||
|
||||
/// Common arguments required for Completer
|
||||
struct Context<'a> {
|
||||
working_set: &'a StateWorkingSet<'a>,
|
||||
span: Span,
|
||||
prefix: &'a [u8],
|
||||
offset: usize,
|
||||
}
|
||||
|
||||
impl Context<'_> {
|
||||
fn new<'a>(
|
||||
working_set: &'a StateWorkingSet,
|
||||
span: Span,
|
||||
prefix: &'a [u8],
|
||||
offset: usize,
|
||||
) -> Context<'a> {
|
||||
Context {
|
||||
working_set,
|
||||
span,
|
||||
prefix,
|
||||
offset,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl NuCompleter {
|
||||
pub fn new(engine_state: Arc<EngineState>, stack: Arc<Stack>) -> Self {
|
||||
Self {
|
||||
@ -76,19 +151,391 @@ impl NuCompleter {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn fetch_completions_at(&mut self, line: &str, pos: usize) -> Vec<SemanticSuggestion> {
|
||||
self.completion_helper(line, pos)
|
||||
pub fn fetch_completions_at(&self, line: &str, pos: usize) -> Vec<SemanticSuggestion> {
|
||||
let mut working_set = StateWorkingSet::new(&self.engine_state);
|
||||
let offset = working_set.next_span_start();
|
||||
// TODO: Callers should be trimming the line themselves
|
||||
let line = if line.len() > pos { &line[..pos] } else { line };
|
||||
let block = parse(
|
||||
&mut working_set,
|
||||
Some("completer"),
|
||||
// Add a placeholder `a` to the end
|
||||
format!("{}a", line).as_bytes(),
|
||||
false,
|
||||
);
|
||||
self.fetch_completions_by_block(block, &working_set, pos, offset, line, true)
|
||||
}
|
||||
|
||||
/// For completion in LSP server.
|
||||
/// We don't truncate the contents in order
|
||||
/// to complete the definitions after the cursor.
|
||||
///
|
||||
/// And we avoid the placeholder to reuse the parsed blocks
|
||||
/// cached while handling other LSP requests, e.g. diagnostics
|
||||
pub fn fetch_completions_within_file(
|
||||
&self,
|
||||
filename: &str,
|
||||
pos: usize,
|
||||
contents: &str,
|
||||
) -> Vec<SemanticSuggestion> {
|
||||
let mut working_set = StateWorkingSet::new(&self.engine_state);
|
||||
let block = parse(&mut working_set, Some(filename), contents.as_bytes(), false);
|
||||
let Some(file_span) = working_set.get_span_for_filename(filename) else {
|
||||
return vec![];
|
||||
};
|
||||
let offset = file_span.start;
|
||||
self.fetch_completions_by_block(block.clone(), &working_set, pos, offset, contents, false)
|
||||
}
|
||||
|
||||
fn fetch_completions_by_block(
|
||||
&self,
|
||||
block: Arc<Block>,
|
||||
working_set: &StateWorkingSet,
|
||||
pos: usize,
|
||||
offset: usize,
|
||||
contents: &str,
|
||||
extra_placeholder: bool,
|
||||
) -> Vec<SemanticSuggestion> {
|
||||
// Adjust offset so that the spans of the suggestions will start at the right
|
||||
// place even with `only_buffer_difference: true`
|
||||
let mut pos_to_search = pos + offset;
|
||||
if !extra_placeholder {
|
||||
pos_to_search = pos_to_search.saturating_sub(1);
|
||||
}
|
||||
let Some(element_expression) = block.find_map(working_set, &|expr: &Expression| {
|
||||
find_pipeline_element_by_position(expr, working_set, pos_to_search)
|
||||
}) else {
|
||||
return vec![];
|
||||
};
|
||||
|
||||
// text of element_expression
|
||||
let start_offset = element_expression.span.start - offset;
|
||||
let Some(text) = contents.get(start_offset..pos) else {
|
||||
return vec![];
|
||||
};
|
||||
self.complete_by_expression(
|
||||
working_set,
|
||||
element_expression,
|
||||
offset,
|
||||
pos_to_search,
|
||||
text,
|
||||
extra_placeholder,
|
||||
)
|
||||
}
|
||||
|
||||
/// Complete given the expression of interest
|
||||
/// Usually, the expression is get from `find_pipeline_element_by_position`
|
||||
///
|
||||
/// # Arguments
|
||||
/// * `offset` - start offset of current working_set span
|
||||
/// * `pos` - cursor position, should be > offset
|
||||
/// * `prefix_str` - all the text before the cursor, within the `element_expression`
|
||||
/// * `strip` - whether to strip the extra placeholder from a span
|
||||
fn complete_by_expression(
|
||||
&self,
|
||||
working_set: &StateWorkingSet,
|
||||
element_expression: &Expression,
|
||||
offset: usize,
|
||||
pos: usize,
|
||||
prefix_str: &str,
|
||||
strip: bool,
|
||||
) -> Vec<SemanticSuggestion> {
|
||||
let mut suggestions: Vec<SemanticSuggestion> = vec![];
|
||||
|
||||
match &element_expression.expr {
|
||||
Expr::Var(_) => {
|
||||
return self.variable_names_completion_helper(
|
||||
working_set,
|
||||
element_expression.span,
|
||||
offset,
|
||||
strip,
|
||||
);
|
||||
}
|
||||
Expr::FullCellPath(full_cell_path) => {
|
||||
// e.g. `$e<tab>` parsed as FullCellPath
|
||||
// but `$e.<tab>` without placeholder should be taken as cell_path
|
||||
if full_cell_path.tail.is_empty() && !prefix_str.ends_with('.') {
|
||||
return self.variable_names_completion_helper(
|
||||
working_set,
|
||||
element_expression.span,
|
||||
offset,
|
||||
strip,
|
||||
);
|
||||
} else {
|
||||
let mut cell_path_completer = CellPathCompletion {
|
||||
full_cell_path,
|
||||
position: if strip { pos - 1 } else { pos },
|
||||
};
|
||||
let ctx = Context::new(working_set, Span::unknown(), &[], offset);
|
||||
return self.process_completion(&mut cell_path_completer, &ctx);
|
||||
}
|
||||
}
|
||||
Expr::BinaryOp(lhs, op, _) => {
|
||||
if op.span.contains(pos) {
|
||||
let mut operator_completions = OperatorCompletion {
|
||||
left_hand_side: lhs.as_ref(),
|
||||
};
|
||||
let (new_span, prefix) = strip_placeholder_if_any(working_set, &op.span, strip);
|
||||
let ctx = Context::new(working_set, new_span, prefix, offset);
|
||||
let results = self.process_completion(&mut operator_completions, &ctx);
|
||||
if !results.is_empty() {
|
||||
return results;
|
||||
}
|
||||
}
|
||||
}
|
||||
Expr::AttributeBlock(ab) => {
|
||||
if let Some(span) = ab.attributes.iter().find_map(|attr| {
|
||||
let span = attr.expr.span;
|
||||
span.contains(pos).then_some(span)
|
||||
}) {
|
||||
let (new_span, prefix) = strip_placeholder_if_any(working_set, &span, strip);
|
||||
let ctx = Context::new(working_set, new_span, prefix, offset);
|
||||
return self.process_completion(&mut AttributeCompletion, &ctx);
|
||||
};
|
||||
let span = ab.item.span;
|
||||
if span.contains(pos) {
|
||||
let (new_span, prefix) = strip_placeholder_if_any(working_set, &span, strip);
|
||||
let ctx = Context::new(working_set, new_span, prefix, offset);
|
||||
return self.process_completion(&mut AttributableCompletion, &ctx);
|
||||
}
|
||||
}
|
||||
|
||||
// NOTE: user defined internal commands can have any length
|
||||
// e.g. `def "foo -f --ff bar"`, complete by line text
|
||||
// instead of relying on the parsing result in that case
|
||||
Expr::Call(_) | Expr::ExternalCall(_, _) => {
|
||||
let need_externals = !prefix_str.contains(' ');
|
||||
let need_internals = !prefix_str.starts_with('^');
|
||||
let mut span = element_expression.span;
|
||||
if !need_internals {
|
||||
span.start += 1;
|
||||
};
|
||||
suggestions.extend(self.command_completion_helper(
|
||||
working_set,
|
||||
span,
|
||||
offset,
|
||||
need_internals,
|
||||
need_externals,
|
||||
strip,
|
||||
))
|
||||
}
|
||||
_ => (),
|
||||
}
|
||||
|
||||
// unfinished argument completion for commands
|
||||
match &element_expression.expr {
|
||||
Expr::Call(call) => {
|
||||
// NOTE: the argument to complete is not necessarily the last one
|
||||
// for lsp completion, we don't trim the text,
|
||||
// so that `def`s after pos can be completed
|
||||
for arg in call.arguments.iter() {
|
||||
let span = arg.span();
|
||||
if span.contains(pos) {
|
||||
// if customized completion specified, it has highest priority
|
||||
if let Some(decl_id) = arg.expr().and_then(|e| e.custom_completion) {
|
||||
// for `--foo <tab>` and `--foo=<tab>`, the arg span should be trimmed
|
||||
let (new_span, prefix) = if matches!(arg, Argument::Named(_)) {
|
||||
strip_placeholder_with_rsplit(
|
||||
working_set,
|
||||
&span,
|
||||
|b| *b == b'=' || *b == b' ',
|
||||
strip,
|
||||
)
|
||||
} else {
|
||||
strip_placeholder_if_any(working_set, &span, strip)
|
||||
};
|
||||
let ctx = Context::new(working_set, new_span, prefix, offset);
|
||||
|
||||
let mut completer = CustomCompletion::new(
|
||||
decl_id,
|
||||
prefix_str.into(),
|
||||
pos - offset,
|
||||
FileCompletion,
|
||||
);
|
||||
|
||||
suggestions.extend(self.process_completion(&mut completer, &ctx));
|
||||
break;
|
||||
}
|
||||
|
||||
// normal arguments completion
|
||||
let (new_span, prefix) =
|
||||
strip_placeholder_if_any(working_set, &span, strip);
|
||||
let ctx = Context::new(working_set, new_span, prefix, offset);
|
||||
let flag_completion_helper = || {
|
||||
let mut flag_completions = FlagCompletion {
|
||||
decl_id: call.decl_id,
|
||||
};
|
||||
self.process_completion(&mut flag_completions, &ctx)
|
||||
};
|
||||
suggestions.extend(match arg {
|
||||
// flags
|
||||
Argument::Named(_) | Argument::Unknown(_)
|
||||
if prefix.starts_with(b"-") =>
|
||||
{
|
||||
flag_completion_helper()
|
||||
}
|
||||
// only when `strip` == false
|
||||
Argument::Positional(_) if prefix == b"-" => flag_completion_helper(),
|
||||
// complete according to expression type and command head
|
||||
Argument::Positional(expr) => {
|
||||
let command_head = working_set.get_span_contents(call.head);
|
||||
self.argument_completion_helper(
|
||||
command_head,
|
||||
expr,
|
||||
&ctx,
|
||||
suggestions.is_empty(),
|
||||
)
|
||||
}
|
||||
_ => vec![],
|
||||
});
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
Expr::ExternalCall(head, arguments) => {
|
||||
for (i, arg) in arguments.iter().enumerate() {
|
||||
let span = arg.expr().span;
|
||||
if span.contains(pos) {
|
||||
// e.g. `sudo l<tab>`
|
||||
// HACK: judge by index 0 is not accurate
|
||||
if i == 0 {
|
||||
let external_cmd = working_set.get_span_contents(head.span);
|
||||
if external_cmd == b"sudo" || external_cmd == b"doas" {
|
||||
let commands = self.command_completion_helper(
|
||||
working_set,
|
||||
span,
|
||||
offset,
|
||||
true,
|
||||
true,
|
||||
strip,
|
||||
);
|
||||
// flags of sudo/doas can still be completed by external completer
|
||||
if !commands.is_empty() {
|
||||
return commands;
|
||||
}
|
||||
}
|
||||
}
|
||||
// resort to external completer set in config
|
||||
let config = self.engine_state.get_config();
|
||||
if let Some(closure) = config.completions.external.completer.as_ref() {
|
||||
let mut text_spans: Vec<String> =
|
||||
flatten_expression(working_set, element_expression)
|
||||
.iter()
|
||||
.map(|(span, _)| {
|
||||
let bytes = working_set.get_span_contents(*span);
|
||||
String::from_utf8_lossy(bytes).to_string()
|
||||
})
|
||||
.collect();
|
||||
let mut new_span = span;
|
||||
// strip the placeholder
|
||||
if strip {
|
||||
if let Some(last) = text_spans.last_mut() {
|
||||
last.pop();
|
||||
new_span = Span::new(span.start, span.end.saturating_sub(1));
|
||||
}
|
||||
}
|
||||
if let Some(external_result) =
|
||||
self.external_completion(closure, &text_spans, offset, new_span)
|
||||
{
|
||||
suggestions.extend(external_result);
|
||||
return suggestions;
|
||||
}
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => (),
|
||||
}
|
||||
|
||||
// if no suggestions yet, fallback to file completion
|
||||
if suggestions.is_empty() {
|
||||
let (new_span, prefix) = strip_placeholder_with_rsplit(
|
||||
working_set,
|
||||
&element_expression.span,
|
||||
|c| *c == b' ',
|
||||
strip,
|
||||
);
|
||||
let ctx = Context::new(working_set, new_span, prefix, offset);
|
||||
suggestions.extend(self.process_completion(&mut FileCompletion, &ctx));
|
||||
}
|
||||
suggestions
|
||||
}
|
||||
|
||||
fn variable_names_completion_helper(
|
||||
&self,
|
||||
working_set: &StateWorkingSet,
|
||||
span: Span,
|
||||
offset: usize,
|
||||
strip: bool,
|
||||
) -> Vec<SemanticSuggestion> {
|
||||
let (new_span, prefix) = strip_placeholder_if_any(working_set, &span, strip);
|
||||
if !prefix.starts_with(b"$") {
|
||||
return vec![];
|
||||
}
|
||||
let ctx = Context::new(working_set, new_span, prefix, offset);
|
||||
self.process_completion(&mut VariableCompletion, &ctx)
|
||||
}
|
||||
|
||||
fn command_completion_helper(
|
||||
&self,
|
||||
working_set: &StateWorkingSet,
|
||||
span: Span,
|
||||
offset: usize,
|
||||
internals: bool,
|
||||
externals: bool,
|
||||
strip: bool,
|
||||
) -> Vec<SemanticSuggestion> {
|
||||
let mut command_completions = CommandCompletion {
|
||||
internals,
|
||||
externals,
|
||||
};
|
||||
let (new_span, prefix) = strip_placeholder_if_any(working_set, &span, strip);
|
||||
let ctx = Context::new(working_set, new_span, prefix, offset);
|
||||
self.process_completion(&mut command_completions, &ctx)
|
||||
}
|
||||
|
||||
fn argument_completion_helper(
|
||||
&self,
|
||||
command_head: &[u8],
|
||||
expr: &Expression,
|
||||
ctx: &Context,
|
||||
need_fallback: bool,
|
||||
) -> Vec<SemanticSuggestion> {
|
||||
// special commands
|
||||
match command_head {
|
||||
// complete module file/directory
|
||||
// TODO: if module file already specified,
|
||||
// should parse it to get modules/commands/consts to complete
|
||||
b"use" | b"export use" | b"overlay use" | b"source-env" => {
|
||||
return self.process_completion(&mut DotNuCompletion, ctx);
|
||||
}
|
||||
b"which" => {
|
||||
let mut completer = CommandCompletion {
|
||||
internals: true,
|
||||
externals: true,
|
||||
};
|
||||
return self.process_completion(&mut completer, ctx);
|
||||
}
|
||||
_ => (),
|
||||
}
|
||||
|
||||
// general positional arguments
|
||||
let file_completion_helper = || self.process_completion(&mut FileCompletion, ctx);
|
||||
match &expr.expr {
|
||||
Expr::Directory(_, _) => self.process_completion(&mut DirectoryCompletion, ctx),
|
||||
Expr::Filepath(_, _) | Expr::GlobPattern(_, _) => file_completion_helper(),
|
||||
// fallback to file completion if necessary
|
||||
_ if need_fallback => file_completion_helper(),
|
||||
_ => vec![],
|
||||
}
|
||||
}
|
||||
|
||||
// Process the completion for a given completer
|
||||
fn process_completion<T: Completer>(
|
||||
&self,
|
||||
completer: &mut T,
|
||||
working_set: &StateWorkingSet,
|
||||
prefix: &[u8],
|
||||
new_span: Span,
|
||||
offset: usize,
|
||||
pos: usize,
|
||||
ctx: &Context,
|
||||
) -> Vec<SemanticSuggestion> {
|
||||
let config = self.engine_state.get_config();
|
||||
|
||||
@ -99,18 +546,12 @@ impl NuCompleter {
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
debug!(
|
||||
"process_completion: prefix: {}, new_span: {new_span:?}, offset: {offset}, pos: {pos}",
|
||||
String::from_utf8_lossy(prefix)
|
||||
);
|
||||
|
||||
completer.fetch(
|
||||
working_set,
|
||||
ctx.working_set,
|
||||
&self.stack,
|
||||
prefix,
|
||||
new_span,
|
||||
offset,
|
||||
pos,
|
||||
String::from_utf8_lossy(ctx.prefix),
|
||||
ctx.span,
|
||||
ctx.offset,
|
||||
&options,
|
||||
)
|
||||
}
|
||||
@ -170,360 +611,17 @@ impl NuCompleter {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn completion_helper(&mut self, line: &str, pos: usize) -> Vec<SemanticSuggestion> {
|
||||
let mut working_set = StateWorkingSet::new(&self.engine_state);
|
||||
let offset = working_set.next_span_start();
|
||||
// TODO: Callers should be trimming the line themselves
|
||||
let line = if line.len() > pos { &line[..pos] } else { line };
|
||||
// Adjust offset so that the spans of the suggestions will start at the right
|
||||
// place even with `only_buffer_difference: true`
|
||||
let fake_offset = offset + line.len() - pos;
|
||||
let pos = offset + line.len();
|
||||
let initial_line = line.to_string();
|
||||
let mut line = line.to_string();
|
||||
line.push('a');
|
||||
|
||||
let config = self.engine_state.get_config();
|
||||
|
||||
let block = parse(&mut working_set, Some("completer"), line.as_bytes(), false);
|
||||
let Some(element_expression) = block.find_map(&working_set, &|expr: &Expression| {
|
||||
find_pipeline_element_by_position(expr, &working_set, pos)
|
||||
}) else {
|
||||
return vec![];
|
||||
};
|
||||
|
||||
let flattened = flatten_expression(&working_set, element_expression);
|
||||
let mut spans: Vec<String> = vec![];
|
||||
|
||||
for (flat_idx, (span, shape)) in flattened.iter().enumerate() {
|
||||
let is_passthrough_command = spans
|
||||
.first()
|
||||
.filter(|content| content.as_str() == "sudo" || content.as_str() == "doas")
|
||||
.is_some();
|
||||
|
||||
// Read the current span to string
|
||||
let current_span = working_set.get_span_contents(*span);
|
||||
let current_span_str = String::from_utf8_lossy(current_span);
|
||||
let is_last_span = span.contains(pos);
|
||||
|
||||
// Skip the last 'a' as span item
|
||||
if is_last_span {
|
||||
let offset = pos - span.start;
|
||||
if offset == 0 {
|
||||
spans.push(String::new())
|
||||
} else {
|
||||
let mut current_span_str = current_span_str.to_string();
|
||||
current_span_str.remove(offset);
|
||||
spans.push(current_span_str);
|
||||
}
|
||||
} else {
|
||||
spans.push(current_span_str.to_string());
|
||||
}
|
||||
|
||||
// Complete based on the last span
|
||||
if is_last_span {
|
||||
// Context variables
|
||||
let most_left_var = most_left_variable(flat_idx, &working_set, flattened.clone());
|
||||
|
||||
// Create a new span
|
||||
let new_span = Span::new(span.start, span.end - 1);
|
||||
|
||||
// Parses the prefix. Completion should look up to the cursor position, not after.
|
||||
let index = pos - span.start;
|
||||
let prefix = ¤t_span[..index];
|
||||
|
||||
// Variables completion
|
||||
if prefix.starts_with(b"$") || most_left_var.is_some() {
|
||||
let mut variable_names_completer =
|
||||
VariableCompletion::new(most_left_var.unwrap_or((vec![], vec![])));
|
||||
|
||||
let mut variable_completions = self.process_completion(
|
||||
&mut variable_names_completer,
|
||||
&working_set,
|
||||
prefix,
|
||||
new_span,
|
||||
fake_offset,
|
||||
pos,
|
||||
);
|
||||
|
||||
let mut variable_operations_completer =
|
||||
OperatorCompletion::new(element_expression.clone());
|
||||
|
||||
let mut variable_operations_completions = self.process_completion(
|
||||
&mut variable_operations_completer,
|
||||
&working_set,
|
||||
prefix,
|
||||
new_span,
|
||||
fake_offset,
|
||||
pos,
|
||||
);
|
||||
|
||||
variable_completions.append(&mut variable_operations_completions);
|
||||
return variable_completions;
|
||||
}
|
||||
|
||||
// Flags completion
|
||||
if prefix.starts_with(b"-") {
|
||||
// Try to complete flag internally
|
||||
let mut completer = FlagCompletion::new(element_expression.clone());
|
||||
let result = self.process_completion(
|
||||
&mut completer,
|
||||
&working_set,
|
||||
prefix,
|
||||
new_span,
|
||||
fake_offset,
|
||||
pos,
|
||||
);
|
||||
|
||||
if !result.is_empty() {
|
||||
return result;
|
||||
}
|
||||
|
||||
// We got no results for internal completion
|
||||
// now we can check if external completer is set and use it
|
||||
if let Some(closure) = config.completions.external.completer.as_ref() {
|
||||
if let Some(external_result) =
|
||||
self.external_completion(closure, &spans, fake_offset, new_span)
|
||||
{
|
||||
return external_result;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// specially check if it is currently empty - always complete commands
|
||||
if (is_passthrough_command && flat_idx == 1)
|
||||
|| (flat_idx == 0 && working_set.get_span_contents(new_span).is_empty())
|
||||
{
|
||||
let mut completer = CommandCompletion::new(
|
||||
flattened.clone(),
|
||||
// flat_idx,
|
||||
FlatShape::String,
|
||||
true,
|
||||
);
|
||||
return self.process_completion(
|
||||
&mut completer,
|
||||
&working_set,
|
||||
prefix,
|
||||
new_span,
|
||||
fake_offset,
|
||||
pos,
|
||||
);
|
||||
}
|
||||
|
||||
// Completions that depends on the previous expression (e.g: use, source-env)
|
||||
if (is_passthrough_command && flat_idx > 1) || flat_idx > 0 {
|
||||
if let Some(previous_expr) = flattened.get(flat_idx - 1) {
|
||||
// Read the content for the previous expression
|
||||
let prev_expr_str = working_set.get_span_contents(previous_expr.0).to_vec();
|
||||
|
||||
// Completion for .nu files
|
||||
if prev_expr_str == b"use"
|
||||
|| prev_expr_str == b"overlay use"
|
||||
|| prev_expr_str == b"source-env"
|
||||
{
|
||||
let mut completer = DotNuCompletion::new();
|
||||
|
||||
return self.process_completion(
|
||||
&mut completer,
|
||||
&working_set,
|
||||
prefix,
|
||||
new_span,
|
||||
fake_offset,
|
||||
pos,
|
||||
);
|
||||
} else if prev_expr_str == b"ls" {
|
||||
let mut completer = FileCompletion::new();
|
||||
|
||||
return self.process_completion(
|
||||
&mut completer,
|
||||
&working_set,
|
||||
prefix,
|
||||
new_span,
|
||||
fake_offset,
|
||||
pos,
|
||||
);
|
||||
} else if matches!(
|
||||
previous_expr.1,
|
||||
FlatShape::Float
|
||||
| FlatShape::Int
|
||||
| FlatShape::String
|
||||
| FlatShape::List
|
||||
| FlatShape::Bool
|
||||
| FlatShape::Variable(_)
|
||||
) {
|
||||
let mut completer = OperatorCompletion::new(element_expression.clone());
|
||||
|
||||
let operator_suggestion = self.process_completion(
|
||||
&mut completer,
|
||||
&working_set,
|
||||
prefix,
|
||||
new_span,
|
||||
fake_offset,
|
||||
pos,
|
||||
);
|
||||
if !operator_suggestion.is_empty() {
|
||||
return operator_suggestion;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Match other types
|
||||
match shape {
|
||||
FlatShape::Custom(decl_id) => {
|
||||
let mut completer = CustomCompletion::new(
|
||||
self.stack.clone(),
|
||||
*decl_id,
|
||||
initial_line,
|
||||
FileCompletion::new(),
|
||||
);
|
||||
|
||||
return self.process_completion(
|
||||
&mut completer,
|
||||
&working_set,
|
||||
prefix,
|
||||
new_span,
|
||||
fake_offset,
|
||||
pos,
|
||||
);
|
||||
}
|
||||
FlatShape::Directory => {
|
||||
let mut completer = DirectoryCompletion::new();
|
||||
|
||||
return self.process_completion(
|
||||
&mut completer,
|
||||
&working_set,
|
||||
prefix,
|
||||
new_span,
|
||||
fake_offset,
|
||||
pos,
|
||||
);
|
||||
}
|
||||
FlatShape::Filepath | FlatShape::GlobPattern => {
|
||||
let mut completer = FileCompletion::new();
|
||||
|
||||
return self.process_completion(
|
||||
&mut completer,
|
||||
&working_set,
|
||||
prefix,
|
||||
new_span,
|
||||
fake_offset,
|
||||
pos,
|
||||
);
|
||||
}
|
||||
flat_shape => {
|
||||
let mut completer = CommandCompletion::new(
|
||||
flattened.clone(),
|
||||
// flat_idx,
|
||||
flat_shape.clone(),
|
||||
false,
|
||||
);
|
||||
|
||||
let mut out: Vec<_> = self.process_completion(
|
||||
&mut completer,
|
||||
&working_set,
|
||||
prefix,
|
||||
new_span,
|
||||
fake_offset,
|
||||
pos,
|
||||
);
|
||||
|
||||
if !out.is_empty() {
|
||||
return out;
|
||||
}
|
||||
|
||||
// Try to complete using an external completer (if set)
|
||||
if let Some(closure) = config.completions.external.completer.as_ref() {
|
||||
if let Some(external_result) =
|
||||
self.external_completion(closure, &spans, fake_offset, new_span)
|
||||
{
|
||||
return external_result;
|
||||
}
|
||||
}
|
||||
|
||||
// Check for file completion
|
||||
let mut completer = FileCompletion::new();
|
||||
out = self.process_completion(
|
||||
&mut completer,
|
||||
&working_set,
|
||||
prefix,
|
||||
new_span,
|
||||
fake_offset,
|
||||
pos,
|
||||
);
|
||||
|
||||
if !out.is_empty() {
|
||||
return out;
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
vec![]
|
||||
}
|
||||
}
|
||||
|
||||
impl ReedlineCompleter for NuCompleter {
|
||||
fn complete(&mut self, line: &str, pos: usize) -> Vec<Suggestion> {
|
||||
self.completion_helper(line, pos)
|
||||
self.fetch_completions_at(line, pos)
|
||||
.into_iter()
|
||||
.map(|s| s.suggestion)
|
||||
.collect()
|
||||
}
|
||||
}
|
||||
|
||||
// reads the most left variable returning it's name (e.g: $myvar)
|
||||
// and the depth (a.b.c)
|
||||
fn most_left_variable(
|
||||
idx: usize,
|
||||
working_set: &StateWorkingSet<'_>,
|
||||
flattened: Vec<(Span, FlatShape)>,
|
||||
) -> Option<(Vec<u8>, Vec<Vec<u8>>)> {
|
||||
// Reverse items to read the list backwards and truncate
|
||||
// because the only items that matters are the ones before the current index
|
||||
let mut rev = flattened;
|
||||
rev.truncate(idx);
|
||||
rev = rev.into_iter().rev().collect();
|
||||
|
||||
// Store the variables and sub levels found and reverse to correct order
|
||||
let mut variables_found: Vec<Vec<u8>> = vec![];
|
||||
let mut found_var = false;
|
||||
for item in rev.clone() {
|
||||
let result = working_set.get_span_contents(item.0).to_vec();
|
||||
|
||||
match item.1 {
|
||||
FlatShape::Variable(_) => {
|
||||
variables_found.push(result);
|
||||
found_var = true;
|
||||
|
||||
break;
|
||||
}
|
||||
FlatShape::String => {
|
||||
variables_found.push(result);
|
||||
}
|
||||
_ => {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// If most left var was not found
|
||||
if !found_var {
|
||||
return None;
|
||||
}
|
||||
|
||||
// Reverse the order back
|
||||
variables_found = variables_found.into_iter().rev().collect();
|
||||
|
||||
// Extract the variable and the sublevels
|
||||
let var = variables_found.first().unwrap_or(&vec![]).to_vec();
|
||||
let sublevels: Vec<Vec<u8>> = variables_found.into_iter().skip(1).collect();
|
||||
|
||||
Some((var, sublevels))
|
||||
}
|
||||
|
||||
pub fn map_value_completions<'a>(
|
||||
list: impl Iterator<Item = &'a Value>,
|
||||
span: Span,
|
||||
@ -541,7 +639,7 @@ pub fn map_value_completions<'a>(
|
||||
},
|
||||
..Suggestion::default()
|
||||
},
|
||||
kind: Some(SuggestionKind::Type(x.get_type())),
|
||||
kind: Some(SuggestionKind::Value(x.get_type())),
|
||||
});
|
||||
}
|
||||
|
||||
@ -555,41 +653,41 @@ pub fn map_value_completions<'a>(
|
||||
},
|
||||
..Suggestion::default()
|
||||
};
|
||||
let mut value_type = Type::String;
|
||||
|
||||
// Iterate the cols looking for `value` and `description`
|
||||
record.iter().for_each(|it| {
|
||||
// Match `value` column
|
||||
if it.0 == "value" {
|
||||
// Convert the value to string
|
||||
if let Ok(val_str) = it.1.coerce_string() {
|
||||
// Update the suggestion value
|
||||
suggestion.value = val_str;
|
||||
record.iter().for_each(|(key, value)| {
|
||||
match key.as_str() {
|
||||
"value" => {
|
||||
value_type = value.get_type();
|
||||
// Convert the value to string
|
||||
if let Ok(val_str) = value.coerce_string() {
|
||||
// Update the suggestion value
|
||||
suggestion.value = val_str;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Match `description` column
|
||||
if it.0 == "description" {
|
||||
// Convert the value to string
|
||||
if let Ok(desc_str) = it.1.coerce_string() {
|
||||
// Update the suggestion value
|
||||
suggestion.description = Some(desc_str);
|
||||
"description" => {
|
||||
// Convert the value to string
|
||||
if let Ok(desc_str) = value.coerce_string() {
|
||||
// Update the suggestion value
|
||||
suggestion.description = Some(desc_str);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Match `style` column
|
||||
if it.0 == "style" {
|
||||
// Convert the value to string
|
||||
suggestion.style = match it.1 {
|
||||
Value::String { val, .. } => Some(lookup_ansi_color_style(val)),
|
||||
Value::Record { .. } => Some(color_record_to_nustyle(it.1)),
|
||||
_ => None,
|
||||
};
|
||||
"style" => {
|
||||
// Convert the value to string
|
||||
suggestion.style = match value {
|
||||
Value::String { val, .. } => Some(lookup_ansi_color_style(val)),
|
||||
Value::Record { .. } => Some(color_record_to_nustyle(value)),
|
||||
_ => None,
|
||||
};
|
||||
}
|
||||
_ => (),
|
||||
}
|
||||
});
|
||||
|
||||
return Some(SemanticSuggestion {
|
||||
suggestion,
|
||||
kind: Some(SuggestionKind::Type(x.get_type())),
|
||||
kind: Some(SuggestionKind::Value(value_type)),
|
||||
});
|
||||
}
|
||||
|
||||
@ -620,7 +718,7 @@ mod completer_tests {
|
||||
result.err().unwrap()
|
||||
);
|
||||
|
||||
let mut completer = NuCompleter::new(engine_state.into(), Arc::new(Stack::new()));
|
||||
let completer = NuCompleter::new(engine_state.into(), Arc::new(Stack::new()));
|
||||
let dataset = [
|
||||
("1 bit-sh", true, "b", vec!["bit-shl", "bit-shr"]),
|
||||
("1.0 bit-sh", false, "b", vec![]),
|
||||
@ -640,7 +738,7 @@ mod completer_tests {
|
||||
("ls | sudo m", true, "m", vec!["mv", "mut", "move"]),
|
||||
];
|
||||
for (line, has_result, begins_with, expected_values) in dataset {
|
||||
let result = completer.completion_helper(line, line.len());
|
||||
let result = completer.fetch_completions_at(line, line.len());
|
||||
// Test whether the result is empty or not
|
||||
assert_eq!(!result.is_empty(), has_result, "line: {}", line);
|
||||
|
||||
|
@ -51,7 +51,7 @@ fn complete_rec(
|
||||
}
|
||||
|
||||
let prefix = partial.first().unwrap_or(&"");
|
||||
let mut matcher = NuMatcher::new(prefix, options.clone());
|
||||
let mut matcher = NuMatcher::new(prefix, options);
|
||||
|
||||
for built in built_paths {
|
||||
let mut path = built.cwd.clone();
|
||||
@ -65,10 +65,11 @@ fn complete_rec(
|
||||
|
||||
for entry in result.filter_map(|e| e.ok()) {
|
||||
let entry_name = entry.file_name().to_string_lossy().into_owned();
|
||||
let entry_isdir = entry.path().is_dir() && !entry.path().is_symlink();
|
||||
let entry_isdir = entry.path().is_dir();
|
||||
let mut built = built.clone();
|
||||
built.parts.push(entry_name.clone());
|
||||
built.isdir = entry_isdir;
|
||||
// Symlinks to directories shouldn't have a trailing slash (#13275)
|
||||
built.isdir = entry_isdir && !entry.path().is_symlink();
|
||||
|
||||
if !want_directory || entry_isdir {
|
||||
matcher.add(entry_name.clone(), (entry_name, built));
|
||||
@ -157,6 +158,7 @@ pub struct FileSuggestion {
|
||||
pub span: nu_protocol::Span,
|
||||
pub path: String,
|
||||
pub style: Option<Style>,
|
||||
pub is_dir: bool,
|
||||
}
|
||||
|
||||
/// # Parameters
|
||||
@ -260,6 +262,7 @@ pub fn complete_item(
|
||||
if should_collapse_dots {
|
||||
p = collapse_ndots(p);
|
||||
}
|
||||
let is_dir = p.isdir;
|
||||
let path = original_cwd.apply(p, path_separator);
|
||||
let style = ls_colors.as_ref().map(|lsc| {
|
||||
lsc.style_for_path_with_metadata(
|
||||
@ -275,6 +278,7 @@ pub fn complete_item(
|
||||
span,
|
||||
path: escape_path(path, want_directory),
|
||||
style,
|
||||
is_dir,
|
||||
}
|
||||
})
|
||||
.collect()
|
||||
@ -315,12 +319,12 @@ pub struct AdjustView {
|
||||
}
|
||||
|
||||
pub fn adjust_if_intermediate(
|
||||
prefix: &[u8],
|
||||
prefix: &str,
|
||||
working_set: &StateWorkingSet,
|
||||
mut span: nu_protocol::Span,
|
||||
) -> AdjustView {
|
||||
let span_contents = String::from_utf8_lossy(working_set.get_span_contents(span)).to_string();
|
||||
let mut prefix = String::from_utf8_lossy(prefix).to_string();
|
||||
let mut prefix = prefix.to_string();
|
||||
|
||||
// A difference of 1 because of the cursor's unicode code point in between.
|
||||
// Using .chars().count() because unicode and Windows.
|
||||
|
@ -25,8 +25,8 @@ pub enum MatchAlgorithm {
|
||||
Fuzzy,
|
||||
}
|
||||
|
||||
pub struct NuMatcher<T> {
|
||||
options: CompletionOptions,
|
||||
pub struct NuMatcher<'a, T> {
|
||||
options: &'a CompletionOptions,
|
||||
needle: String,
|
||||
state: State<T>,
|
||||
}
|
||||
@ -45,11 +45,11 @@ enum State<T> {
|
||||
}
|
||||
|
||||
/// Filters and sorts suggestions
|
||||
impl<T> NuMatcher<T> {
|
||||
impl<T> NuMatcher<'_, T> {
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `needle` - The text to search for
|
||||
pub fn new(needle: impl AsRef<str>, options: CompletionOptions) -> NuMatcher<T> {
|
||||
pub fn new(needle: impl AsRef<str>, options: &CompletionOptions) -> NuMatcher<T> {
|
||||
let needle = trim_quotes_str(needle.as_ref());
|
||||
match options.match_algorithm {
|
||||
MatchAlgorithm::Prefix => {
|
||||
@ -184,7 +184,7 @@ impl<T> NuMatcher<T> {
|
||||
}
|
||||
}
|
||||
|
||||
impl NuMatcher<SemanticSuggestion> {
|
||||
impl NuMatcher<'_, SemanticSuggestion> {
|
||||
pub fn add_semantic_suggestion(&mut self, sugg: SemanticSuggestion) -> bool {
|
||||
let value = sugg.suggestion.value.to_string();
|
||||
self.add(value, sugg)
|
||||
@ -271,7 +271,7 @@ mod test {
|
||||
match_algorithm,
|
||||
..Default::default()
|
||||
};
|
||||
let mut matcher = NuMatcher::new(needle, options);
|
||||
let mut matcher = NuMatcher::new(needle, &options);
|
||||
matcher.add(haystack, haystack);
|
||||
if should_match {
|
||||
assert_eq!(vec![haystack], matcher.results());
|
||||
@ -286,7 +286,7 @@ mod test {
|
||||
match_algorithm: MatchAlgorithm::Fuzzy,
|
||||
..Default::default()
|
||||
};
|
||||
let mut matcher = NuMatcher::new("fob", options);
|
||||
let mut matcher = NuMatcher::new("fob", &options);
|
||||
for item in ["foo/bar", "fob", "foo bar"] {
|
||||
matcher.add(item, item);
|
||||
}
|
||||
@ -300,7 +300,7 @@ mod test {
|
||||
match_algorithm: MatchAlgorithm::Fuzzy,
|
||||
..Default::default()
|
||||
};
|
||||
let mut matcher = NuMatcher::new("'love spaces' ", options);
|
||||
let mut matcher = NuMatcher::new("'love spaces' ", &options);
|
||||
for item in [
|
||||
"'i love spaces'",
|
||||
"'i love spaces' so much",
|
||||
|
@ -13,18 +13,18 @@ use std::collections::HashMap;
|
||||
use super::completion_options::NuMatcher;
|
||||
|
||||
pub struct CustomCompletion<T: Completer> {
|
||||
stack: Stack,
|
||||
decl_id: DeclId,
|
||||
line: String,
|
||||
line_pos: usize,
|
||||
fallback: T,
|
||||
}
|
||||
|
||||
impl<T: Completer> CustomCompletion<T> {
|
||||
pub fn new(stack: Stack, decl_id: DeclId, line: String, fallback: T) -> Self {
|
||||
pub fn new(decl_id: DeclId, line: String, line_pos: usize, fallback: T) -> Self {
|
||||
Self {
|
||||
stack,
|
||||
decl_id,
|
||||
line,
|
||||
line_pos,
|
||||
fallback,
|
||||
}
|
||||
}
|
||||
@ -35,19 +35,16 @@ impl<T: Completer> Completer for CustomCompletion<T> {
|
||||
&mut self,
|
||||
working_set: &StateWorkingSet,
|
||||
stack: &Stack,
|
||||
prefix: &[u8],
|
||||
prefix: impl AsRef<str>,
|
||||
span: Span,
|
||||
offset: usize,
|
||||
pos: usize,
|
||||
orig_options: &CompletionOptions,
|
||||
) -> Vec<SemanticSuggestion> {
|
||||
// Line position
|
||||
let line_pos = pos - offset;
|
||||
|
||||
// Call custom declaration
|
||||
let mut stack_mut = stack.clone();
|
||||
let result = eval_call::<WithoutDebug>(
|
||||
working_set.permanent_state,
|
||||
&mut self.stack,
|
||||
&mut stack_mut,
|
||||
&Call {
|
||||
decl_id: self.decl_id,
|
||||
head: span,
|
||||
@ -58,7 +55,7 @@ impl<T: Completer> Completer for CustomCompletion<T> {
|
||||
Type::String,
|
||||
)),
|
||||
Argument::Positional(Expression::new_unknown(
|
||||
Expr::Int(line_pos as i64),
|
||||
Expr::Int(self.line_pos as i64),
|
||||
Span::unknown(),
|
||||
Type::Int,
|
||||
)),
|
||||
@ -120,7 +117,6 @@ impl<T: Completer> Completer for CustomCompletion<T> {
|
||||
prefix,
|
||||
span,
|
||||
offset,
|
||||
pos,
|
||||
orig_options,
|
||||
);
|
||||
}
|
||||
@ -138,7 +134,7 @@ impl<T: Completer> Completer for CustomCompletion<T> {
|
||||
}
|
||||
};
|
||||
|
||||
let mut matcher = NuMatcher::new(String::from_utf8_lossy(prefix), completion_options);
|
||||
let mut matcher = NuMatcher::new(prefix, &completion_options);
|
||||
|
||||
if should_sort {
|
||||
for sugg in suggestions {
|
||||
|
@ -9,29 +9,22 @@ use nu_protocol::{
|
||||
use reedline::Suggestion;
|
||||
use std::path::Path;
|
||||
|
||||
use super::{completion_common::FileSuggestion, SemanticSuggestion};
|
||||
use super::{completion_common::FileSuggestion, SemanticSuggestion, SuggestionKind};
|
||||
|
||||
#[derive(Clone, Default)]
|
||||
pub struct DirectoryCompletion {}
|
||||
|
||||
impl DirectoryCompletion {
|
||||
pub fn new() -> Self {
|
||||
Self::default()
|
||||
}
|
||||
}
|
||||
pub struct DirectoryCompletion;
|
||||
|
||||
impl Completer for DirectoryCompletion {
|
||||
fn fetch(
|
||||
&mut self,
|
||||
working_set: &StateWorkingSet,
|
||||
stack: &Stack,
|
||||
prefix: &[u8],
|
||||
prefix: impl AsRef<str>,
|
||||
span: Span,
|
||||
offset: usize,
|
||||
_pos: usize,
|
||||
options: &CompletionOptions,
|
||||
) -> Vec<SemanticSuggestion> {
|
||||
let AdjustView { prefix, span, .. } = adjust_if_intermediate(prefix, working_set, span);
|
||||
let AdjustView { prefix, span, .. } =
|
||||
adjust_if_intermediate(prefix.as_ref(), working_set, span);
|
||||
|
||||
// Filter only the folders
|
||||
#[allow(deprecated)]
|
||||
@ -54,8 +47,7 @@ impl Completer for DirectoryCompletion {
|
||||
},
|
||||
..Suggestion::default()
|
||||
},
|
||||
// TODO????
|
||||
kind: None,
|
||||
kind: Some(SuggestionKind::Directory),
|
||||
})
|
||||
.collect();
|
||||
|
||||
|
@ -5,75 +5,99 @@ use nu_protocol::{
|
||||
Span,
|
||||
};
|
||||
use reedline::Suggestion;
|
||||
use std::path::{is_separator, PathBuf, MAIN_SEPARATOR as SEP, MAIN_SEPARATOR_STR};
|
||||
use std::{
|
||||
collections::HashSet,
|
||||
path::{is_separator, PathBuf, MAIN_SEPARATOR as SEP, MAIN_SEPARATOR_STR},
|
||||
};
|
||||
|
||||
use super::{SemanticSuggestion, SuggestionKind};
|
||||
|
||||
#[derive(Clone, Default)]
|
||||
pub struct DotNuCompletion {}
|
||||
|
||||
impl DotNuCompletion {
|
||||
pub fn new() -> Self {
|
||||
Self::default()
|
||||
}
|
||||
}
|
||||
pub struct DotNuCompletion;
|
||||
|
||||
impl Completer for DotNuCompletion {
|
||||
fn fetch(
|
||||
&mut self,
|
||||
working_set: &StateWorkingSet,
|
||||
stack: &Stack,
|
||||
prefix: &[u8],
|
||||
prefix: impl AsRef<str>,
|
||||
span: Span,
|
||||
offset: usize,
|
||||
_pos: usize,
|
||||
options: &CompletionOptions,
|
||||
) -> Vec<SemanticSuggestion> {
|
||||
let prefix_str = String::from_utf8_lossy(prefix);
|
||||
let prefix_str = prefix.as_ref();
|
||||
let start_with_backquote = prefix_str.starts_with('`');
|
||||
let end_with_backquote = prefix_str.ends_with('`');
|
||||
let prefix_str = prefix_str.replace('`', "");
|
||||
// e.g. `./`, `..\`, `/`
|
||||
let not_lib_dirs = prefix_str
|
||||
.chars()
|
||||
.find(|c| *c != '.')
|
||||
.is_some_and(is_separator);
|
||||
let mut search_dirs: Vec<PathBuf> = vec![];
|
||||
|
||||
// If prefix_str is only a word we want to search in the current dir
|
||||
let (base, partial) = prefix_str
|
||||
.rsplit_once(is_separator)
|
||||
.unwrap_or((".", &prefix_str));
|
||||
let (base, partial) = if let Some((parent, remain)) = prefix_str.rsplit_once(is_separator) {
|
||||
// If prefix_str is only a word we want to search in the current dir.
|
||||
// "/xx" should be split to "/" and "xx".
|
||||
if parent.is_empty() {
|
||||
(MAIN_SEPARATOR_STR, remain)
|
||||
} else {
|
||||
(parent, remain)
|
||||
}
|
||||
} else {
|
||||
(".", prefix_str.as_str())
|
||||
};
|
||||
let base_dir = base.replace(is_separator, MAIN_SEPARATOR_STR);
|
||||
|
||||
// Fetch the lib dirs
|
||||
let lib_dirs: Vec<PathBuf> = working_set
|
||||
// NOTE: 2 ways to setup `NU_LIB_DIRS`
|
||||
// 1. `const NU_LIB_DIRS = [paths]`, equal to `nu -I paths`
|
||||
// 2. `$env.NU_LIB_DIRS = [paths]`
|
||||
let const_lib_dirs = working_set
|
||||
.find_variable(b"$NU_LIB_DIRS")
|
||||
.and_then(|vid| working_set.get_variable(vid).const_val.as_ref())
|
||||
.or(working_set.get_env_var("NU_LIB_DIRS"))
|
||||
.map(|lib_dirs| {
|
||||
.and_then(|vid| working_set.get_variable(vid).const_val.as_ref());
|
||||
let env_lib_dirs = working_set.get_env_var("NU_LIB_DIRS");
|
||||
let lib_dirs: HashSet<PathBuf> = [const_lib_dirs, env_lib_dirs]
|
||||
.into_iter()
|
||||
.flatten()
|
||||
.flat_map(|lib_dirs| {
|
||||
lib_dirs
|
||||
.as_list()
|
||||
.into_iter()
|
||||
.flat_map(|it| it.iter().filter_map(|x| x.to_path().ok()))
|
||||
.map(expand_tilde)
|
||||
.collect()
|
||||
})
|
||||
.unwrap_or_default();
|
||||
.collect();
|
||||
|
||||
// Check if the base_dir is a folder
|
||||
// rsplit_once removes the separator
|
||||
let cwd = working_set.permanent_state.cwd(None);
|
||||
if base_dir != "." {
|
||||
// Search in base_dir as well as lib_dirs
|
||||
let expanded_base_dir = expand_tilde(&base_dir);
|
||||
let is_base_dir_relative = expanded_base_dir.is_relative();
|
||||
// Search in base_dir as well as lib_dirs.
|
||||
// After expanded, base_dir can be a relative path or absolute path.
|
||||
// If relative, we join "current working dir" with it to get subdirectory and add to search_dirs.
|
||||
// If absolute, we add it to search_dirs.
|
||||
if let Ok(mut cwd) = cwd {
|
||||
cwd.push(&base_dir);
|
||||
search_dirs.push(cwd.into_std_path_buf());
|
||||
if is_base_dir_relative {
|
||||
cwd.push(&base_dir);
|
||||
search_dirs.push(cwd.into_std_path_buf());
|
||||
} else {
|
||||
search_dirs.push(expanded_base_dir);
|
||||
}
|
||||
}
|
||||
if !not_lib_dirs {
|
||||
search_dirs.extend(lib_dirs.into_iter().map(|mut dir| {
|
||||
dir.push(&base_dir);
|
||||
dir
|
||||
}));
|
||||
}
|
||||
search_dirs.extend(lib_dirs.into_iter().map(|mut dir| {
|
||||
dir.push(&base_dir);
|
||||
dir
|
||||
}));
|
||||
} else {
|
||||
if let Ok(cwd) = cwd {
|
||||
search_dirs.push(cwd.into_std_path_buf());
|
||||
}
|
||||
search_dirs.extend(lib_dirs);
|
||||
if !not_lib_dirs {
|
||||
search_dirs.extend(lib_dirs);
|
||||
}
|
||||
}
|
||||
|
||||
// Fetch the files filtering the ones that ends with .nu
|
||||
@ -104,7 +128,9 @@ impl Completer for DotNuCompletion {
|
||||
let mut span_offset = 0;
|
||||
let mut value = x.path.to_string();
|
||||
// Complete only the last path component
|
||||
if base_dir != "." {
|
||||
if base_dir == MAIN_SEPARATOR_STR {
|
||||
span_offset = base_dir.len()
|
||||
} else if base_dir != "." {
|
||||
span_offset = base_dir.len() + 1
|
||||
}
|
||||
// Retain only one '`'
|
||||
|
@ -9,33 +9,25 @@ use nu_protocol::{
|
||||
use reedline::Suggestion;
|
||||
use std::path::Path;
|
||||
|
||||
use super::{completion_common::FileSuggestion, SemanticSuggestion};
|
||||
use super::{completion_common::FileSuggestion, SemanticSuggestion, SuggestionKind};
|
||||
|
||||
#[derive(Clone, Default)]
|
||||
pub struct FileCompletion {}
|
||||
|
||||
impl FileCompletion {
|
||||
pub fn new() -> Self {
|
||||
Self::default()
|
||||
}
|
||||
}
|
||||
pub struct FileCompletion;
|
||||
|
||||
impl Completer for FileCompletion {
|
||||
fn fetch(
|
||||
&mut self,
|
||||
working_set: &StateWorkingSet,
|
||||
stack: &Stack,
|
||||
prefix: &[u8],
|
||||
prefix: impl AsRef<str>,
|
||||
span: Span,
|
||||
offset: usize,
|
||||
_pos: usize,
|
||||
options: &CompletionOptions,
|
||||
) -> Vec<SemanticSuggestion> {
|
||||
let AdjustView {
|
||||
prefix,
|
||||
span,
|
||||
readjusted,
|
||||
} = adjust_if_intermediate(prefix, working_set, span);
|
||||
} = adjust_if_intermediate(prefix.as_ref(), working_set, span);
|
||||
|
||||
#[allow(deprecated)]
|
||||
let items: Vec<_> = complete_item(
|
||||
@ -58,8 +50,11 @@ impl Completer for FileCompletion {
|
||||
},
|
||||
..Suggestion::default()
|
||||
},
|
||||
// TODO????
|
||||
kind: None,
|
||||
kind: Some(if x.is_dir {
|
||||
SuggestionKind::Directory
|
||||
} else {
|
||||
SuggestionKind::File
|
||||
}),
|
||||
})
|
||||
.collect();
|
||||
|
||||
|
@ -1,22 +1,15 @@
|
||||
use crate::completions::{completion_options::NuMatcher, Completer, CompletionOptions};
|
||||
use nu_protocol::{
|
||||
ast::{Expr, Expression},
|
||||
engine::{Stack, StateWorkingSet},
|
||||
Span,
|
||||
DeclId, Span,
|
||||
};
|
||||
use reedline::Suggestion;
|
||||
|
||||
use super::SemanticSuggestion;
|
||||
use super::{SemanticSuggestion, SuggestionKind};
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct FlagCompletion {
|
||||
expression: Expression,
|
||||
}
|
||||
|
||||
impl FlagCompletion {
|
||||
pub fn new(expression: Expression) -> Self {
|
||||
Self { expression }
|
||||
}
|
||||
pub decl_id: DeclId,
|
||||
}
|
||||
|
||||
impl Completer for FlagCompletion {
|
||||
@ -24,69 +17,42 @@ impl Completer for FlagCompletion {
|
||||
&mut self,
|
||||
working_set: &StateWorkingSet,
|
||||
_stack: &Stack,
|
||||
prefix: &[u8],
|
||||
prefix: impl AsRef<str>,
|
||||
span: Span,
|
||||
offset: usize,
|
||||
_pos: usize,
|
||||
options: &CompletionOptions,
|
||||
) -> Vec<SemanticSuggestion> {
|
||||
// Check if it's a flag
|
||||
if let Expr::Call(call) = &self.expression.expr {
|
||||
let decl = working_set.get_decl(call.decl_id);
|
||||
let sig = decl.signature();
|
||||
|
||||
let mut matcher = NuMatcher::new(String::from_utf8_lossy(prefix), options.clone());
|
||||
|
||||
for named in &sig.named {
|
||||
let flag_desc = &named.desc;
|
||||
if let Some(short) = named.short {
|
||||
let mut named = vec![0; short.len_utf8()];
|
||||
short.encode_utf8(&mut named);
|
||||
named.insert(0, b'-');
|
||||
|
||||
matcher.add_semantic_suggestion(SemanticSuggestion {
|
||||
suggestion: Suggestion {
|
||||
value: String::from_utf8_lossy(&named).to_string(),
|
||||
description: Some(flag_desc.to_string()),
|
||||
span: reedline::Span {
|
||||
start: span.start - offset,
|
||||
end: span.end - offset,
|
||||
},
|
||||
append_whitespace: true,
|
||||
..Suggestion::default()
|
||||
},
|
||||
// TODO????
|
||||
kind: None,
|
||||
});
|
||||
}
|
||||
|
||||
if named.long.is_empty() {
|
||||
continue;
|
||||
}
|
||||
|
||||
let mut named = named.long.as_bytes().to_vec();
|
||||
named.insert(0, b'-');
|
||||
named.insert(0, b'-');
|
||||
|
||||
matcher.add_semantic_suggestion(SemanticSuggestion {
|
||||
suggestion: Suggestion {
|
||||
value: String::from_utf8_lossy(&named).to_string(),
|
||||
description: Some(flag_desc.to_string()),
|
||||
span: reedline::Span {
|
||||
start: span.start - offset,
|
||||
end: span.end - offset,
|
||||
},
|
||||
append_whitespace: true,
|
||||
..Suggestion::default()
|
||||
let mut matcher = NuMatcher::new(prefix, options);
|
||||
let mut add_suggestion = |value: String, description: String| {
|
||||
matcher.add_semantic_suggestion(SemanticSuggestion {
|
||||
suggestion: Suggestion {
|
||||
value,
|
||||
description: Some(description),
|
||||
span: reedline::Span {
|
||||
start: span.start - offset,
|
||||
end: span.end - offset,
|
||||
},
|
||||
// TODO????
|
||||
kind: None,
|
||||
});
|
||||
append_whitespace: true,
|
||||
..Suggestion::default()
|
||||
},
|
||||
kind: Some(SuggestionKind::Flag),
|
||||
});
|
||||
};
|
||||
|
||||
let decl = working_set.get_decl(self.decl_id);
|
||||
let sig = decl.signature();
|
||||
for named in &sig.named {
|
||||
if let Some(short) = named.short {
|
||||
let mut name = String::from("-");
|
||||
name.push(short);
|
||||
add_suggestion(name, named.desc.clone());
|
||||
}
|
||||
|
||||
return matcher.results();
|
||||
if named.long.is_empty() {
|
||||
continue;
|
||||
}
|
||||
add_suggestion(format!("--{}", named.long), named.desc.clone());
|
||||
}
|
||||
|
||||
vec![]
|
||||
matcher.results()
|
||||
}
|
||||
}
|
||||
|
@ -1,4 +1,6 @@
|
||||
mod attribute_completions;
|
||||
mod base;
|
||||
mod cell_path_completions;
|
||||
mod command_completions;
|
||||
mod completer;
|
||||
mod completion_common;
|
||||
@ -11,7 +13,9 @@ mod flag_completions;
|
||||
mod operator_completions;
|
||||
mod variable_completions;
|
||||
|
||||
pub use attribute_completions::{AttributableCompletion, AttributeCompletion};
|
||||
pub use base::{Completer, SemanticSuggestion, SuggestionKind};
|
||||
pub use cell_path_completions::CellPathCompletion;
|
||||
pub use command_completions::CommandCompletion;
|
||||
pub use completer::NuCompleter;
|
||||
pub use completion_options::{CompletionOptions, MatchAlgorithm};
|
||||
|
@ -2,169 +2,276 @@ use crate::completions::{
|
||||
completion_options::NuMatcher, Completer, CompletionOptions, SemanticSuggestion, SuggestionKind,
|
||||
};
|
||||
use nu_protocol::{
|
||||
ast::{Expr, Expression},
|
||||
ast::{self, Comparison, Expr, Expression},
|
||||
engine::{Stack, StateWorkingSet},
|
||||
Span, Type,
|
||||
Span, Type, Value, ENV_VARIABLE_ID,
|
||||
};
|
||||
use reedline::Suggestion;
|
||||
use strum::{EnumMessage, IntoEnumIterator};
|
||||
|
||||
use super::cell_path_completions::eval_cell_path;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct OperatorCompletion {
|
||||
previous_expr: Expression,
|
||||
pub struct OperatorCompletion<'a> {
|
||||
pub left_hand_side: &'a Expression,
|
||||
}
|
||||
|
||||
impl OperatorCompletion {
|
||||
pub fn new(previous_expr: Expression) -> Self {
|
||||
OperatorCompletion { previous_expr }
|
||||
struct OperatorItem {
|
||||
pub symbols: String,
|
||||
pub description: String,
|
||||
}
|
||||
|
||||
fn operator_to_item<T: EnumMessage + AsRef<str>>(op: T) -> OperatorItem {
|
||||
OperatorItem {
|
||||
symbols: op.as_ref().into(),
|
||||
description: op.get_message().unwrap_or_default().into(),
|
||||
}
|
||||
}
|
||||
|
||||
impl Completer for OperatorCompletion {
|
||||
fn common_comparison_ops() -> Vec<OperatorItem> {
|
||||
vec![
|
||||
operator_to_item(Comparison::In),
|
||||
operator_to_item(Comparison::NotIn),
|
||||
operator_to_item(Comparison::Equal),
|
||||
operator_to_item(Comparison::NotEqual),
|
||||
]
|
||||
}
|
||||
|
||||
fn all_ops_for_immutable() -> Vec<OperatorItem> {
|
||||
ast::Comparison::iter()
|
||||
.map(operator_to_item)
|
||||
.chain(ast::Math::iter().map(operator_to_item))
|
||||
.chain(ast::Boolean::iter().map(operator_to_item))
|
||||
.chain(ast::Bits::iter().map(operator_to_item))
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn collection_comparison_ops() -> Vec<OperatorItem> {
|
||||
let mut ops = common_comparison_ops();
|
||||
ops.push(operator_to_item(Comparison::Has));
|
||||
ops.push(operator_to_item(Comparison::NotHas));
|
||||
ops
|
||||
}
|
||||
|
||||
fn number_comparison_ops() -> Vec<OperatorItem> {
|
||||
Comparison::iter()
|
||||
.filter(|op| {
|
||||
!matches!(
|
||||
op,
|
||||
Comparison::RegexMatch
|
||||
| Comparison::NotRegexMatch
|
||||
| Comparison::StartsWith
|
||||
| Comparison::EndsWith
|
||||
| Comparison::Has
|
||||
| Comparison::NotHas
|
||||
)
|
||||
})
|
||||
.map(operator_to_item)
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn math_ops() -> Vec<OperatorItem> {
|
||||
ast::Math::iter()
|
||||
.filter(|op| !matches!(op, ast::Math::Concatenate | ast::Math::Pow))
|
||||
.map(operator_to_item)
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn bit_ops() -> Vec<OperatorItem> {
|
||||
ast::Bits::iter().map(operator_to_item).collect()
|
||||
}
|
||||
|
||||
fn all_assignment_ops() -> Vec<OperatorItem> {
|
||||
ast::Assignment::iter().map(operator_to_item).collect()
|
||||
}
|
||||
|
||||
fn numeric_assignment_ops() -> Vec<OperatorItem> {
|
||||
ast::Assignment::iter()
|
||||
.filter(|op| !matches!(op, ast::Assignment::ConcatenateAssign))
|
||||
.map(operator_to_item)
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn concat_assignment_ops() -> Vec<OperatorItem> {
|
||||
vec![
|
||||
operator_to_item(ast::Assignment::Assign),
|
||||
operator_to_item(ast::Assignment::ConcatenateAssign),
|
||||
]
|
||||
}
|
||||
|
||||
fn valid_int_ops() -> Vec<OperatorItem> {
|
||||
let mut ops = valid_float_ops();
|
||||
ops.extend(bit_ops());
|
||||
ops
|
||||
}
|
||||
|
||||
fn valid_float_ops() -> Vec<OperatorItem> {
|
||||
let mut ops = valid_value_with_unit_ops();
|
||||
ops.push(operator_to_item(ast::Math::Pow));
|
||||
ops
|
||||
}
|
||||
|
||||
fn valid_string_ops() -> Vec<OperatorItem> {
|
||||
let mut ops: Vec<OperatorItem> = Comparison::iter().map(operator_to_item).collect();
|
||||
ops.push(operator_to_item(ast::Math::Concatenate));
|
||||
ops.push(OperatorItem {
|
||||
symbols: "like".into(),
|
||||
description: Comparison::RegexMatch
|
||||
.get_message()
|
||||
.unwrap_or_default()
|
||||
.into(),
|
||||
});
|
||||
ops.push(OperatorItem {
|
||||
symbols: "not-like".into(),
|
||||
description: Comparison::NotRegexMatch
|
||||
.get_message()
|
||||
.unwrap_or_default()
|
||||
.into(),
|
||||
});
|
||||
ops
|
||||
}
|
||||
|
||||
fn valid_list_ops() -> Vec<OperatorItem> {
|
||||
let mut ops = collection_comparison_ops();
|
||||
ops.push(operator_to_item(ast::Math::Concatenate));
|
||||
ops
|
||||
}
|
||||
|
||||
fn valid_binary_ops() -> Vec<OperatorItem> {
|
||||
let mut ops = number_comparison_ops();
|
||||
ops.extend(bit_ops());
|
||||
ops.push(operator_to_item(ast::Math::Concatenate));
|
||||
ops
|
||||
}
|
||||
|
||||
fn valid_bool_ops() -> Vec<OperatorItem> {
|
||||
let mut ops: Vec<OperatorItem> = ast::Boolean::iter().map(operator_to_item).collect();
|
||||
ops.extend(common_comparison_ops());
|
||||
ops
|
||||
}
|
||||
|
||||
fn valid_value_with_unit_ops() -> Vec<OperatorItem> {
|
||||
let mut ops = number_comparison_ops();
|
||||
ops.extend(math_ops());
|
||||
ops
|
||||
}
|
||||
|
||||
fn ops_by_value(value: &Value, mutable: bool) -> Vec<OperatorItem> {
|
||||
let mut ops = match value {
|
||||
Value::Int { .. } => valid_int_ops(),
|
||||
Value::Float { .. } => valid_float_ops(),
|
||||
Value::String { .. } => valid_string_ops(),
|
||||
Value::Binary { .. } => valid_binary_ops(),
|
||||
Value::Bool { .. } => valid_bool_ops(),
|
||||
Value::Date { .. } => number_comparison_ops(),
|
||||
Value::Filesize { .. } | Value::Duration { .. } => valid_value_with_unit_ops(),
|
||||
Value::Range { .. } | Value::Record { .. } => collection_comparison_ops(),
|
||||
Value::List { .. } => valid_list_ops(),
|
||||
_ => all_ops_for_immutable(),
|
||||
};
|
||||
if mutable {
|
||||
ops.extend(match value {
|
||||
Value::Int { .. }
|
||||
| Value::Float { .. }
|
||||
| Value::Filesize { .. }
|
||||
| Value::Duration { .. } => numeric_assignment_ops(),
|
||||
Value::String { .. } | Value::Binary { .. } | Value::List { .. } => {
|
||||
concat_assignment_ops()
|
||||
}
|
||||
Value::Bool { .. }
|
||||
| Value::Date { .. }
|
||||
| Value::Range { .. }
|
||||
| Value::Record { .. } => vec![operator_to_item(ast::Assignment::Assign)],
|
||||
_ => all_assignment_ops(),
|
||||
})
|
||||
}
|
||||
ops
|
||||
}
|
||||
|
||||
fn is_expression_mutable(expr: &Expr, working_set: &StateWorkingSet) -> bool {
|
||||
let Expr::FullCellPath(path) = expr else {
|
||||
return false;
|
||||
};
|
||||
let Expr::Var(id) = path.head.expr else {
|
||||
return false;
|
||||
};
|
||||
if id == ENV_VARIABLE_ID {
|
||||
return true;
|
||||
}
|
||||
let var = working_set.get_variable(id);
|
||||
var.mutable
|
||||
}
|
||||
|
||||
impl Completer for OperatorCompletion<'_> {
|
||||
fn fetch(
|
||||
&mut self,
|
||||
working_set: &StateWorkingSet,
|
||||
_stack: &Stack,
|
||||
_prefix: &[u8],
|
||||
stack: &Stack,
|
||||
prefix: impl AsRef<str>,
|
||||
span: Span,
|
||||
offset: usize,
|
||||
_pos: usize,
|
||||
options: &CompletionOptions,
|
||||
) -> Vec<SemanticSuggestion> {
|
||||
//Check if int, float, or string
|
||||
let partial = std::str::from_utf8(working_set.get_span_contents(span)).unwrap_or("");
|
||||
let op = match &self.previous_expr.expr {
|
||||
Expr::BinaryOp(x, _, _) => &x.expr,
|
||||
_ => {
|
||||
return vec![];
|
||||
}
|
||||
};
|
||||
let possible_operations = match op {
|
||||
Expr::Int(_) => vec![
|
||||
("+", "Add (Plus)"),
|
||||
("-", "Subtract (Minus)"),
|
||||
("*", "Multiply"),
|
||||
("/", "Divide"),
|
||||
("==", "Equal to"),
|
||||
("!=", "Not equal to"),
|
||||
("//", "Floor division"),
|
||||
("<", "Less than"),
|
||||
(">", "Greater than"),
|
||||
("<=", "Less than or equal to"),
|
||||
(">=", "Greater than or equal to"),
|
||||
("mod", "Floor division remainder (Modulo)"),
|
||||
("**", "Power of"),
|
||||
("bit-or", "Bitwise OR"),
|
||||
("bit-xor", "Bitwise exclusive OR"),
|
||||
("bit-and", "Bitwise AND"),
|
||||
("bit-shl", "Bitwise shift left"),
|
||||
("bit-shr", "Bitwise shift right"),
|
||||
("in", "Is a member of (doesn't use regex)"),
|
||||
("not-in", "Is not a member of (doesn't use regex)"),
|
||||
],
|
||||
Expr::String(_) => vec![
|
||||
("=~", "Contains regex match"),
|
||||
("like", "Contains regex match"),
|
||||
("!~", "Does not contain regex match"),
|
||||
("not-like", "Does not contain regex match"),
|
||||
(
|
||||
"++",
|
||||
"Concatenates two lists, two strings, or two binary values",
|
||||
),
|
||||
("in", "Is a member of (doesn't use regex)"),
|
||||
("not-in", "Is not a member of (doesn't use regex)"),
|
||||
("starts-with", "Starts with"),
|
||||
("ends-with", "Ends with"),
|
||||
],
|
||||
Expr::Float(_) => vec![
|
||||
("+", "Add (Plus)"),
|
||||
("-", "Subtract (Minus)"),
|
||||
("*", "Multiply"),
|
||||
("/", "Divide"),
|
||||
("==", "Equal to"),
|
||||
("!=", "Not equal to"),
|
||||
("//", "Floor division"),
|
||||
("<", "Less than"),
|
||||
(">", "Greater than"),
|
||||
("<=", "Less than or equal to"),
|
||||
(">=", "Greater than or equal to"),
|
||||
("mod", "Floor division remainder (Modulo)"),
|
||||
("**", "Power of"),
|
||||
("in", "Is a member of (doesn't use regex)"),
|
||||
("not-in", "Is not a member of (doesn't use regex)"),
|
||||
],
|
||||
Expr::Bool(_) => vec![
|
||||
(
|
||||
"and",
|
||||
"Both values are true (short-circuits when first value is false)",
|
||||
),
|
||||
(
|
||||
"or",
|
||||
"Either value is true (short-circuits when first value is true)",
|
||||
),
|
||||
("xor", "One value is true and the other is false"),
|
||||
("not", "Negates a value or expression"),
|
||||
("in", "Is a member of (doesn't use regex)"),
|
||||
("not-in", "Is not a member of (doesn't use regex)"),
|
||||
],
|
||||
Expr::FullCellPath(path) => match path.head.expr {
|
||||
Expr::List(_) => vec![
|
||||
(
|
||||
"++",
|
||||
"Concatenates two lists, two strings, or two binary values",
|
||||
),
|
||||
("has", "Contains a value of (doesn't use regex)"),
|
||||
("not-has", "Does not contain a value of (doesn't use regex)"),
|
||||
],
|
||||
Expr::Var(id) => get_variable_completions(id, working_set),
|
||||
_ => vec![],
|
||||
let mut needs_assignment_ops = true;
|
||||
// Complete according expression type
|
||||
// TODO: type inference on self.left_hand_side to get more accurate completions
|
||||
let mut possible_operations: Vec<OperatorItem> = match &self.left_hand_side.ty {
|
||||
Type::Int | Type::Number => valid_int_ops(),
|
||||
Type::Float => valid_float_ops(),
|
||||
Type::String => valid_string_ops(),
|
||||
Type::Binary => valid_binary_ops(),
|
||||
Type::Bool => valid_bool_ops(),
|
||||
Type::Date => number_comparison_ops(),
|
||||
Type::Filesize | Type::Duration => valid_value_with_unit_ops(),
|
||||
Type::Record(_) | Type::Range => collection_comparison_ops(),
|
||||
Type::List(_) | Type::Table(_) => valid_list_ops(),
|
||||
// Unknown type, resort to evaluated values
|
||||
Type::Any => match &self.left_hand_side.expr {
|
||||
Expr::FullCellPath(path) => {
|
||||
// for `$ <tab>`
|
||||
if matches!(path.head.expr, Expr::Garbage) {
|
||||
return vec![];
|
||||
}
|
||||
let value =
|
||||
eval_cell_path(working_set, stack, &path.head, &path.tail, path.head.span)
|
||||
.unwrap_or_default();
|
||||
let mutable = is_expression_mutable(&self.left_hand_side.expr, working_set);
|
||||
// to avoid duplication
|
||||
needs_assignment_ops = false;
|
||||
ops_by_value(&value, mutable)
|
||||
}
|
||||
_ => all_ops_for_immutable(),
|
||||
},
|
||||
_ => vec![],
|
||||
_ => common_comparison_ops(),
|
||||
};
|
||||
// If the left hand side is a variable, add assignment operators if mutable
|
||||
if needs_assignment_ops && is_expression_mutable(&self.left_hand_side.expr, working_set) {
|
||||
possible_operations.extend(match &self.left_hand_side.ty {
|
||||
Type::Int | Type::Float | Type::Number => numeric_assignment_ops(),
|
||||
Type::Filesize | Type::Duration => numeric_assignment_ops(),
|
||||
Type::String | Type::Binary | Type::List(_) => concat_assignment_ops(),
|
||||
Type::Any => all_assignment_ops(),
|
||||
_ => vec![operator_to_item(ast::Assignment::Assign)],
|
||||
});
|
||||
}
|
||||
|
||||
let mut matcher = NuMatcher::new(partial, options.clone());
|
||||
for (symbol, desc) in possible_operations.into_iter() {
|
||||
let mut matcher = NuMatcher::new(prefix, options);
|
||||
for OperatorItem {
|
||||
symbols,
|
||||
description,
|
||||
} in possible_operations
|
||||
{
|
||||
matcher.add_semantic_suggestion(SemanticSuggestion {
|
||||
suggestion: Suggestion {
|
||||
value: symbol.to_string(),
|
||||
description: Some(desc.to_string()),
|
||||
value: symbols.to_owned(),
|
||||
description: Some(description.to_owned()),
|
||||
span: reedline::Span::new(span.start - offset, span.end - offset),
|
||||
append_whitespace: true,
|
||||
..Suggestion::default()
|
||||
},
|
||||
kind: Some(SuggestionKind::Command(
|
||||
nu_protocol::engine::CommandType::Builtin,
|
||||
)),
|
||||
kind: Some(SuggestionKind::Operator),
|
||||
});
|
||||
}
|
||||
matcher.results()
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_variable_completions<'a>(
|
||||
id: nu_protocol::Id<nu_protocol::marker::Var>,
|
||||
working_set: &StateWorkingSet,
|
||||
) -> Vec<(&'a str, &'a str)> {
|
||||
let var = working_set.get_variable(id);
|
||||
if !var.mutable {
|
||||
return vec![];
|
||||
}
|
||||
|
||||
match var.ty {
|
||||
Type::List(_) | Type::String | Type::Binary => vec![
|
||||
(
|
||||
"++=",
|
||||
"Concatenates two lists, two strings, or two binary values",
|
||||
),
|
||||
("=", "Assigns a value to a variable."),
|
||||
],
|
||||
|
||||
Type::Int | Type::Float => vec![
|
||||
("=", "Assigns a value to a variable."),
|
||||
("+=", "Adds a value to a variable."),
|
||||
("-=", "Subtracts a value from a variable."),
|
||||
("*=", "Multiplies a variable by a value"),
|
||||
("/=", "Divides a variable by a value."),
|
||||
],
|
||||
_ => vec![],
|
||||
}
|
||||
}
|
||||
|
@ -1,157 +1,67 @@
|
||||
use crate::completions::{Completer, CompletionOptions, SemanticSuggestion, SuggestionKind};
|
||||
use nu_engine::{column::get_columns, eval_variable};
|
||||
use nu_protocol::{
|
||||
engine::{Stack, StateWorkingSet},
|
||||
Span, Value,
|
||||
Span, VarId,
|
||||
};
|
||||
use reedline::Suggestion;
|
||||
use std::str;
|
||||
|
||||
use super::completion_options::NuMatcher;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct VariableCompletion {
|
||||
var_context: (Vec<u8>, Vec<Vec<u8>>), // tuple with $var and the sublevels (.b.c.d)
|
||||
}
|
||||
|
||||
impl VariableCompletion {
|
||||
pub fn new(var_context: (Vec<u8>, Vec<Vec<u8>>)) -> Self {
|
||||
Self { var_context }
|
||||
}
|
||||
}
|
||||
pub struct VariableCompletion;
|
||||
|
||||
impl Completer for VariableCompletion {
|
||||
fn fetch(
|
||||
&mut self,
|
||||
working_set: &StateWorkingSet,
|
||||
stack: &Stack,
|
||||
prefix: &[u8],
|
||||
_stack: &Stack,
|
||||
prefix: impl AsRef<str>,
|
||||
span: Span,
|
||||
offset: usize,
|
||||
_pos: usize,
|
||||
options: &CompletionOptions,
|
||||
) -> Vec<SemanticSuggestion> {
|
||||
let builtins = ["$nu", "$in", "$env"];
|
||||
let var_str = std::str::from_utf8(&self.var_context.0).unwrap_or("");
|
||||
let var_id = working_set.find_variable(&self.var_context.0);
|
||||
let mut matcher = NuMatcher::new(prefix, options);
|
||||
let current_span = reedline::Span {
|
||||
start: span.start - offset,
|
||||
end: span.end - offset,
|
||||
};
|
||||
let sublevels_count = self.var_context.1.len();
|
||||
let prefix_str = String::from_utf8_lossy(prefix);
|
||||
let mut matcher = NuMatcher::new(prefix_str, options.clone());
|
||||
|
||||
// Completions for the given variable
|
||||
if !var_str.is_empty() {
|
||||
// Completion for $env.<tab>
|
||||
if var_str == "$env" {
|
||||
let env_vars = stack.get_env_vars(working_set.permanent_state);
|
||||
|
||||
// Return nested values
|
||||
if sublevels_count > 0 {
|
||||
// Extract the target var ($env.<target-var>)
|
||||
let target_var = self.var_context.1[0].clone();
|
||||
let target_var_str =
|
||||
str::from_utf8(&target_var).unwrap_or_default().to_string();
|
||||
|
||||
// Everything after the target var is the nested level ($env.<target-var>.<nested_levels>...)
|
||||
let nested_levels: Vec<Vec<u8>> =
|
||||
self.var_context.1.clone().into_iter().skip(1).collect();
|
||||
|
||||
if let Some(val) = env_vars.get(&target_var_str) {
|
||||
for suggestion in nested_suggestions(val, &nested_levels, current_span) {
|
||||
matcher.add_semantic_suggestion(suggestion);
|
||||
}
|
||||
|
||||
return matcher.results();
|
||||
}
|
||||
} else {
|
||||
// No nesting provided, return all env vars
|
||||
for env_var in env_vars {
|
||||
matcher.add_semantic_suggestion(SemanticSuggestion {
|
||||
suggestion: Suggestion {
|
||||
value: env_var.0,
|
||||
span: current_span,
|
||||
..Suggestion::default()
|
||||
},
|
||||
kind: Some(SuggestionKind::Type(env_var.1.get_type())),
|
||||
});
|
||||
}
|
||||
|
||||
return matcher.results();
|
||||
}
|
||||
}
|
||||
|
||||
// Completions for $nu.<tab>
|
||||
if var_str == "$nu" {
|
||||
// Eval nu var
|
||||
if let Ok(nuval) = eval_variable(
|
||||
working_set.permanent_state,
|
||||
stack,
|
||||
nu_protocol::NU_VARIABLE_ID,
|
||||
nu_protocol::Span::new(current_span.start, current_span.end),
|
||||
) {
|
||||
for suggestion in nested_suggestions(&nuval, &self.var_context.1, current_span)
|
||||
{
|
||||
matcher.add_semantic_suggestion(suggestion);
|
||||
}
|
||||
|
||||
return matcher.results();
|
||||
}
|
||||
}
|
||||
|
||||
// Completion other variable types
|
||||
if let Some(var_id) = var_id {
|
||||
// Extract the variable value from the stack
|
||||
let var = stack.get_var(var_id, Span::new(span.start, span.end));
|
||||
|
||||
// If the value exists and it's of type Record
|
||||
if let Ok(value) = var {
|
||||
for suggestion in nested_suggestions(&value, &self.var_context.1, current_span)
|
||||
{
|
||||
matcher.add_semantic_suggestion(suggestion);
|
||||
}
|
||||
|
||||
return matcher.results();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Variable completion (e.g: $en<tab> to complete $env)
|
||||
let builtins = ["$nu", "$in", "$env"];
|
||||
for builtin in builtins {
|
||||
matcher.add_semantic_suggestion(SemanticSuggestion {
|
||||
suggestion: Suggestion {
|
||||
value: builtin.to_string(),
|
||||
span: current_span,
|
||||
description: Some("reserved".into()),
|
||||
..Suggestion::default()
|
||||
},
|
||||
// TODO is there a way to get the VarId to get the type???
|
||||
kind: None,
|
||||
kind: Some(SuggestionKind::Variable),
|
||||
});
|
||||
}
|
||||
|
||||
let mut add_candidate = |name, var_id: &VarId| {
|
||||
matcher.add_semantic_suggestion(SemanticSuggestion {
|
||||
suggestion: Suggestion {
|
||||
value: String::from_utf8_lossy(name).to_string(),
|
||||
span: current_span,
|
||||
description: Some(working_set.get_variable(*var_id).ty.to_string()),
|
||||
..Suggestion::default()
|
||||
},
|
||||
kind: Some(SuggestionKind::Variable),
|
||||
})
|
||||
};
|
||||
|
||||
// TODO: The following can be refactored (see find_commands_by_predicate() used in
|
||||
// command_completions).
|
||||
let mut removed_overlays = vec![];
|
||||
// Working set scope vars
|
||||
for scope_frame in working_set.delta.scope.iter().rev() {
|
||||
for overlay_frame in scope_frame.active_overlays(&mut removed_overlays).rev() {
|
||||
for v in &overlay_frame.vars {
|
||||
matcher.add_semantic_suggestion(SemanticSuggestion {
|
||||
suggestion: Suggestion {
|
||||
value: String::from_utf8_lossy(v.0).to_string(),
|
||||
span: current_span,
|
||||
..Suggestion::default()
|
||||
},
|
||||
kind: Some(SuggestionKind::Type(
|
||||
working_set.get_variable(*v.1).ty.clone(),
|
||||
)),
|
||||
});
|
||||
for (name, var_id) in &overlay_frame.vars {
|
||||
add_candidate(name, var_id);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Permanent state vars
|
||||
// for scope in &self.engine_state.scope {
|
||||
for overlay_frame in working_set
|
||||
@ -159,98 +69,11 @@ impl Completer for VariableCompletion {
|
||||
.active_overlays(&removed_overlays)
|
||||
.rev()
|
||||
{
|
||||
for v in &overlay_frame.vars {
|
||||
matcher.add_semantic_suggestion(SemanticSuggestion {
|
||||
suggestion: Suggestion {
|
||||
value: String::from_utf8_lossy(v.0).to_string(),
|
||||
span: current_span,
|
||||
..Suggestion::default()
|
||||
},
|
||||
kind: Some(SuggestionKind::Type(
|
||||
working_set.get_variable(*v.1).ty.clone(),
|
||||
)),
|
||||
});
|
||||
for (name, var_id) in &overlay_frame.vars {
|
||||
add_candidate(name, var_id);
|
||||
}
|
||||
}
|
||||
|
||||
matcher.results()
|
||||
}
|
||||
}
|
||||
|
||||
// Find recursively the values for sublevels
|
||||
// if no sublevels are set it returns the current value
|
||||
fn nested_suggestions(
|
||||
val: &Value,
|
||||
sublevels: &[Vec<u8>],
|
||||
current_span: reedline::Span,
|
||||
) -> Vec<SemanticSuggestion> {
|
||||
let mut output: Vec<SemanticSuggestion> = vec![];
|
||||
let value = recursive_value(val, sublevels).unwrap_or_else(Value::nothing);
|
||||
|
||||
let kind = SuggestionKind::Type(value.get_type());
|
||||
match value {
|
||||
Value::Record { val, .. } => {
|
||||
// Add all the columns as completion
|
||||
for col in val.columns() {
|
||||
output.push(SemanticSuggestion {
|
||||
suggestion: Suggestion {
|
||||
value: col.clone(),
|
||||
span: current_span,
|
||||
..Suggestion::default()
|
||||
},
|
||||
kind: Some(kind.clone()),
|
||||
});
|
||||
}
|
||||
|
||||
output
|
||||
}
|
||||
Value::List { vals, .. } => {
|
||||
for column_name in get_columns(vals.as_slice()) {
|
||||
output.push(SemanticSuggestion {
|
||||
suggestion: Suggestion {
|
||||
value: column_name,
|
||||
span: current_span,
|
||||
..Suggestion::default()
|
||||
},
|
||||
kind: Some(kind.clone()),
|
||||
});
|
||||
}
|
||||
|
||||
output
|
||||
}
|
||||
_ => output,
|
||||
}
|
||||
}
|
||||
|
||||
// Extracts the recursive value (e.g: $var.a.b.c)
|
||||
fn recursive_value(val: &Value, sublevels: &[Vec<u8>]) -> Result<Value, Span> {
|
||||
// Go to next sublevel
|
||||
if let Some((sublevel, next_sublevels)) = sublevels.split_first() {
|
||||
let span = val.span();
|
||||
match val {
|
||||
Value::Record { val, .. } => {
|
||||
if let Some((_, value)) = val.iter().find(|(key, _)| key.as_bytes() == sublevel) {
|
||||
// If matches try to fetch recursively the next
|
||||
recursive_value(value, next_sublevels)
|
||||
} else {
|
||||
// Current sublevel value not found
|
||||
Err(span)
|
||||
}
|
||||
}
|
||||
Value::List { vals, .. } => {
|
||||
for col in get_columns(vals.as_slice()) {
|
||||
if col.as_bytes() == *sublevel {
|
||||
let val = val.get_data_by_key(&col).ok_or(span)?;
|
||||
return recursive_value(&val, next_sublevels);
|
||||
}
|
||||
}
|
||||
|
||||
// Current sublevel value not found
|
||||
Err(span)
|
||||
}
|
||||
_ => Ok(val.clone()),
|
||||
}
|
||||
} else {
|
||||
Ok(val.clone())
|
||||
}
|
||||
}
|
||||
|
@ -8,7 +8,7 @@ use nu_protocol::{
|
||||
debugger::WithoutDebug,
|
||||
engine::{EngineState, Stack, StateWorkingSet},
|
||||
report_parse_error, report_parse_warning,
|
||||
shell_error::io::IoError,
|
||||
shell_error::io::*,
|
||||
PipelineData, ShellError, Span, Value,
|
||||
};
|
||||
use std::{path::PathBuf, sync::Arc};
|
||||
@ -27,11 +27,11 @@ pub fn evaluate_file(
|
||||
let cwd = engine_state.cwd_as_string(Some(stack))?;
|
||||
|
||||
let file_path = canonicalize_with(&path, cwd).map_err(|err| {
|
||||
IoError::new_with_additional_context(
|
||||
err.kind(),
|
||||
Span::unknown(),
|
||||
PathBuf::from(&path),
|
||||
IoError::new_internal_with_path(
|
||||
err.kind().not_found_as(NotFound::File),
|
||||
"Could not access file",
|
||||
nu_protocol::location!(),
|
||||
PathBuf::from(&path),
|
||||
)
|
||||
})?;
|
||||
|
||||
@ -46,21 +46,21 @@ pub fn evaluate_file(
|
||||
})?;
|
||||
|
||||
let file = std::fs::read(&file_path).map_err(|err| {
|
||||
IoError::new_with_additional_context(
|
||||
err.kind(),
|
||||
Span::unknown(),
|
||||
file_path.clone(),
|
||||
IoError::new_internal_with_path(
|
||||
err.kind().not_found_as(NotFound::File),
|
||||
"Could not read file",
|
||||
nu_protocol::location!(),
|
||||
file_path.clone(),
|
||||
)
|
||||
})?;
|
||||
engine_state.file = Some(file_path.clone());
|
||||
|
||||
let parent = file_path.parent().ok_or_else(|| {
|
||||
IoError::new_with_additional_context(
|
||||
std::io::ErrorKind::NotFound,
|
||||
Span::unknown(),
|
||||
file_path.clone(),
|
||||
IoError::new_internal_with_path(
|
||||
ErrorKind::DirectoryNotFound,
|
||||
"The file path does not have a parent",
|
||||
nu_protocol::location!(),
|
||||
file_path.clone(),
|
||||
)
|
||||
})?;
|
||||
|
||||
|
@ -707,11 +707,6 @@ pub(crate) fn create_keybindings(config: &Config) -> Result<KeybindingsMode, She
|
||||
EditBindings::Vi => {
|
||||
add_menu_keybindings(&mut insert_keybindings);
|
||||
add_menu_keybindings(&mut normal_keybindings);
|
||||
normal_keybindings.add_binding(
|
||||
KeyModifiers::NONE,
|
||||
KeyCode::Char('/'),
|
||||
ReedlineEvent::Menu("history_menu".to_string()),
|
||||
);
|
||||
}
|
||||
}
|
||||
for keybinding in parsed_keybindings {
|
||||
@ -1003,41 +998,54 @@ fn event_from_record(
|
||||
) -> Result<ReedlineEvent, ShellError> {
|
||||
let event = match name {
|
||||
"none" => ReedlineEvent::None,
|
||||
"clearscreen" => ReedlineEvent::ClearScreen,
|
||||
"clearscrollback" => ReedlineEvent::ClearScrollback,
|
||||
"historyhintcomplete" => ReedlineEvent::HistoryHintComplete,
|
||||
"historyhintwordcomplete" => ReedlineEvent::HistoryHintWordComplete,
|
||||
"ctrld" => ReedlineEvent::CtrlD,
|
||||
"ctrlc" => ReedlineEvent::CtrlC,
|
||||
"clearscreen" => ReedlineEvent::ClearScreen,
|
||||
"clearscrollback" => ReedlineEvent::ClearScrollback,
|
||||
"enter" => ReedlineEvent::Enter,
|
||||
"submit" => ReedlineEvent::Submit,
|
||||
"submitornewline" => ReedlineEvent::SubmitOrNewline,
|
||||
"esc" | "escape" => ReedlineEvent::Esc,
|
||||
// Non-sensical for user configuration:
|
||||
//
|
||||
// `ReedlineEvent::Mouse` - itself a no-op
|
||||
// `ReedlineEvent::Resize` - requires size info specifically from the ANSI resize
|
||||
// event
|
||||
//
|
||||
// Handled above in `parse_event`:
|
||||
//
|
||||
// `ReedlineEvent::Edit`
|
||||
"repaint" => ReedlineEvent::Repaint,
|
||||
"previoushistory" => ReedlineEvent::PreviousHistory,
|
||||
"up" => ReedlineEvent::Up,
|
||||
"down" => ReedlineEvent::Down,
|
||||
"right" => ReedlineEvent::Right,
|
||||
"left" => ReedlineEvent::Left,
|
||||
"searchhistory" => ReedlineEvent::SearchHistory,
|
||||
"nexthistory" => ReedlineEvent::NextHistory,
|
||||
"previoushistory" => ReedlineEvent::PreviousHistory,
|
||||
"repaint" => ReedlineEvent::Repaint,
|
||||
"menudown" => ReedlineEvent::MenuDown,
|
||||
"menuup" => ReedlineEvent::MenuUp,
|
||||
"menuleft" => ReedlineEvent::MenuLeft,
|
||||
"menuright" => ReedlineEvent::MenuRight,
|
||||
"menunext" => ReedlineEvent::MenuNext,
|
||||
"menuprevious" => ReedlineEvent::MenuPrevious,
|
||||
"menupagenext" => ReedlineEvent::MenuPageNext,
|
||||
"menupageprevious" => ReedlineEvent::MenuPagePrevious,
|
||||
"openeditor" => ReedlineEvent::OpenEditor,
|
||||
"searchhistory" => ReedlineEvent::SearchHistory,
|
||||
// Handled above in `parse_event`:
|
||||
//
|
||||
// `ReedlineEvent::Multiple`
|
||||
// `ReedlineEvent::UntilFound`
|
||||
"menu" => {
|
||||
let menu = extract_value("name", record, span)?;
|
||||
ReedlineEvent::Menu(menu.to_expanded_string("", config))
|
||||
}
|
||||
"menunext" => ReedlineEvent::MenuNext,
|
||||
"menuprevious" => ReedlineEvent::MenuPrevious,
|
||||
"menuup" => ReedlineEvent::MenuUp,
|
||||
"menudown" => ReedlineEvent::MenuDown,
|
||||
"menuleft" => ReedlineEvent::MenuLeft,
|
||||
"menuright" => ReedlineEvent::MenuRight,
|
||||
"menupagenext" => ReedlineEvent::MenuPageNext,
|
||||
"menupageprevious" => ReedlineEvent::MenuPagePrevious,
|
||||
"executehostcommand" => {
|
||||
let cmd = extract_value("cmd", record, span)?;
|
||||
ReedlineEvent::ExecuteHostCommand(cmd.to_expanded_string("", config))
|
||||
}
|
||||
"openeditor" => ReedlineEvent::OpenEditor,
|
||||
str => {
|
||||
return Err(ShellError::InvalidValue {
|
||||
valid: "a reedline event".into(),
|
||||
@ -1067,7 +1075,6 @@ fn edit_from_record(
|
||||
.and_then(|value| value.as_bool())
|
||||
.unwrap_or(false),
|
||||
},
|
||||
|
||||
"movetoend" => EditCommand::MoveToEnd {
|
||||
select: extract_value("select", record, span)
|
||||
.and_then(|value| value.as_bool())
|
||||
@ -1103,16 +1110,6 @@ fn edit_from_record(
|
||||
.and_then(|value| value.as_bool())
|
||||
.unwrap_or(false),
|
||||
},
|
||||
"movewordrightend" => EditCommand::MoveWordRightEnd {
|
||||
select: extract_value("select", record, span)
|
||||
.and_then(|value| value.as_bool())
|
||||
.unwrap_or(false),
|
||||
},
|
||||
"movebigwordrightend" => EditCommand::MoveBigWordRightEnd {
|
||||
select: extract_value("select", record, span)
|
||||
.and_then(|value| value.as_bool())
|
||||
.unwrap_or(false),
|
||||
},
|
||||
"movewordrightstart" => EditCommand::MoveWordRightStart {
|
||||
select: extract_value("select", record, span)
|
||||
.and_then(|value| value.as_bool())
|
||||
@ -1123,6 +1120,16 @@ fn edit_from_record(
|
||||
.and_then(|value| value.as_bool())
|
||||
.unwrap_or(false),
|
||||
},
|
||||
"movewordrightend" => EditCommand::MoveWordRightEnd {
|
||||
select: extract_value("select", record, span)
|
||||
.and_then(|value| value.as_bool())
|
||||
.unwrap_or(false),
|
||||
},
|
||||
"movebigwordrightend" => EditCommand::MoveBigWordRightEnd {
|
||||
select: extract_value("select", record, span)
|
||||
.and_then(|value| value.as_bool())
|
||||
.unwrap_or(false),
|
||||
},
|
||||
"movetoposition" => {
|
||||
let value = extract_value("value", record, span)?;
|
||||
let select = extract_value("select", record, span)
|
||||
@ -1144,6 +1151,13 @@ fn edit_from_record(
|
||||
EditCommand::InsertString(value.to_expanded_string("", config))
|
||||
}
|
||||
"insertnewline" => EditCommand::InsertNewline,
|
||||
"replacechar" => {
|
||||
let value = extract_value("value", record, span)?;
|
||||
let char = extract_char(value)?;
|
||||
EditCommand::ReplaceChar(char)
|
||||
}
|
||||
// `EditCommand::ReplaceChars` - Internal hack not sanely implementable as a
|
||||
// standalone binding
|
||||
"backspace" => EditCommand::Backspace,
|
||||
"delete" => EditCommand::Delete,
|
||||
"cutchar" => EditCommand::CutChar,
|
||||
@ -1151,6 +1165,7 @@ fn edit_from_record(
|
||||
"deleteword" => EditCommand::DeleteWord,
|
||||
"clear" => EditCommand::Clear,
|
||||
"cleartolineend" => EditCommand::ClearToLineEnd,
|
||||
"complete" => EditCommand::Complete,
|
||||
"cutcurrentline" => EditCommand::CutCurrentLine,
|
||||
"cutfromstart" => EditCommand::CutFromStart,
|
||||
"cutfromlinestart" => EditCommand::CutFromLineStart,
|
||||
@ -1167,6 +1182,7 @@ fn edit_from_record(
|
||||
"uppercaseword" => EditCommand::UppercaseWord,
|
||||
"lowercaseword" => EditCommand::LowercaseWord,
|
||||
"capitalizechar" => EditCommand::CapitalizeChar,
|
||||
"switchcasechar" => EditCommand::SwitchcaseChar,
|
||||
"swapwords" => EditCommand::SwapWords,
|
||||
"swapgraphemes" => EditCommand::SwapGraphemes,
|
||||
"undo" => EditCommand::Undo,
|
||||
@ -1223,17 +1239,64 @@ fn edit_from_record(
|
||||
.unwrap_or(false);
|
||||
EditCommand::MoveLeftBefore { c: char, select }
|
||||
}
|
||||
"complete" => EditCommand::Complete,
|
||||
"selectall" => EditCommand::SelectAll,
|
||||
"cutselection" => EditCommand::CutSelection,
|
||||
"copyselection" => EditCommand::CopySelection,
|
||||
"paste" => EditCommand::Paste,
|
||||
"copyfromstart" => EditCommand::CopyFromStart,
|
||||
"copyfromlinestart" => EditCommand::CopyFromLineStart,
|
||||
"copytoend" => EditCommand::CopyToEnd,
|
||||
"copytolineend" => EditCommand::CopyToLineEnd,
|
||||
"copycurrentline" => EditCommand::CopyCurrentLine,
|
||||
"copywordleft" => EditCommand::CopyWordLeft,
|
||||
"copybigwordleft" => EditCommand::CopyBigWordLeft,
|
||||
"copywordright" => EditCommand::CopyWordRight,
|
||||
"copybigwordright" => EditCommand::CopyBigWordRight,
|
||||
"copywordrighttonext" => EditCommand::CopyWordRightToNext,
|
||||
"copybigwordrighttonext" => EditCommand::CopyBigWordRightToNext,
|
||||
"copyleft" => EditCommand::CopyLeft,
|
||||
"copyright" => EditCommand::CopyRight,
|
||||
"copyrightuntil" => {
|
||||
let value = extract_value("value", record, span)?;
|
||||
let char = extract_char(value)?;
|
||||
EditCommand::CopyRightUntil(char)
|
||||
}
|
||||
"copyrightbefore" => {
|
||||
let value = extract_value("value", record, span)?;
|
||||
let char = extract_char(value)?;
|
||||
EditCommand::CopyRightBefore(char)
|
||||
}
|
||||
"copyleftuntil" => {
|
||||
let value = extract_value("value", record, span)?;
|
||||
let char = extract_char(value)?;
|
||||
EditCommand::CopyLeftUntil(char)
|
||||
}
|
||||
"copyleftbefore" => {
|
||||
let value = extract_value("value", record, span)?;
|
||||
let char = extract_char(value)?;
|
||||
EditCommand::CopyLeftBefore(char)
|
||||
}
|
||||
"swapcursorandanchor" => EditCommand::SwapCursorAndAnchor,
|
||||
#[cfg(feature = "system-clipboard")]
|
||||
"cutselectionsystem" => EditCommand::CutSelectionSystem,
|
||||
"copyselection" => EditCommand::CopySelection,
|
||||
#[cfg(feature = "system-clipboard")]
|
||||
"copyselectionsystem" => EditCommand::CopySelectionSystem,
|
||||
"paste" => EditCommand::Paste,
|
||||
#[cfg(feature = "system-clipboard")]
|
||||
"pastesystem" => EditCommand::PasteSystem,
|
||||
"selectall" => EditCommand::SelectAll,
|
||||
"cutinside" => {
|
||||
let value = extract_value("left", record, span)?;
|
||||
let left = extract_char(value)?;
|
||||
let value = extract_value("right", record, span)?;
|
||||
let right = extract_char(value)?;
|
||||
EditCommand::CutInside { left, right }
|
||||
}
|
||||
"yankinside" => {
|
||||
let value = extract_value("left", record, span)?;
|
||||
let left = extract_char(value)?;
|
||||
let value = extract_value("right", record, span)?;
|
||||
let right = extract_char(value)?;
|
||||
EditCommand::YankInside { left, right }
|
||||
}
|
||||
str => {
|
||||
return Err(ShellError::InvalidValue {
|
||||
valid: "a reedline EditCommand".into(),
|
||||
|
@ -20,6 +20,7 @@ use nu_cmd_base::util::get_editor;
|
||||
use nu_color_config::StyleComputer;
|
||||
#[allow(deprecated)]
|
||||
use nu_engine::env_to_strings;
|
||||
use nu_engine::exit::cleanup_exit;
|
||||
use nu_parser::{lex, parse, trim_quotes_str};
|
||||
use nu_protocol::shell_error::io::IoError;
|
||||
use nu_protocol::{
|
||||
@ -36,6 +37,7 @@ use reedline::{
|
||||
CursorConfig, CwdAwareHinter, DefaultCompleter, EditCommand, Emacs, FileBackedHistory,
|
||||
HistorySessionId, Reedline, SqliteBackedHistory, Vi,
|
||||
};
|
||||
use std::sync::atomic::Ordering;
|
||||
use std::{
|
||||
collections::HashMap,
|
||||
env::temp_dir,
|
||||
@ -692,7 +694,11 @@ fn loop_iteration(ctx: LoopContext) -> (bool, Stack, Reedline) {
|
||||
);
|
||||
|
||||
println!();
|
||||
return (false, stack, line_editor);
|
||||
|
||||
cleanup_exit((), engine_state, 0);
|
||||
|
||||
// if cleanup_exit didn't exit, we should keep running
|
||||
return (true, stack, line_editor);
|
||||
}
|
||||
Err(err) => {
|
||||
let message = err.to_string();
|
||||
@ -930,6 +936,9 @@ fn do_run_cmd(
|
||||
trace!("eval source: {}", s);
|
||||
|
||||
let mut cmds = s.split_whitespace();
|
||||
|
||||
let had_warning_before = engine_state.exit_warning_given.load(Ordering::SeqCst);
|
||||
|
||||
if let Some("exit") = cmds.next() {
|
||||
let mut working_set = StateWorkingSet::new(engine_state);
|
||||
let _ = parse(&mut working_set, None, s.as_bytes(), false);
|
||||
@ -938,13 +947,11 @@ fn do_run_cmd(
|
||||
match cmds.next() {
|
||||
Some(s) => {
|
||||
if let Ok(n) = s.parse::<i32>() {
|
||||
drop(line_editor);
|
||||
std::process::exit(n);
|
||||
return cleanup_exit(line_editor, engine_state, n);
|
||||
}
|
||||
}
|
||||
None => {
|
||||
drop(line_editor);
|
||||
std::process::exit(0);
|
||||
return cleanup_exit(line_editor, engine_state, 0);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -963,6 +970,14 @@ fn do_run_cmd(
|
||||
false,
|
||||
);
|
||||
|
||||
// if there was a warning before, and we got to this point, it means
|
||||
// the possible call to cleanup_exit did not occur.
|
||||
if had_warning_before && engine_state.is_interactive {
|
||||
engine_state
|
||||
.exit_warning_given
|
||||
.store(false, Ordering::SeqCst);
|
||||
}
|
||||
|
||||
line_editor
|
||||
}
|
||||
|
||||
|
@ -309,6 +309,7 @@ fn find_matching_block_end_in_expr(
|
||||
.unwrap_or(expression.span.start);
|
||||
|
||||
return match &expression.expr {
|
||||
// TODO: Can't these be handled with an `_ => None` branch? Refactor
|
||||
Expr::Bool(_) => None,
|
||||
Expr::Int(_) => None,
|
||||
Expr::Float(_) => None,
|
||||
@ -335,6 +336,28 @@ fn find_matching_block_end_in_expr(
|
||||
Expr::Nothing => None,
|
||||
Expr::Garbage => None,
|
||||
|
||||
Expr::AttributeBlock(ab) => ab
|
||||
.attributes
|
||||
.iter()
|
||||
.find_map(|attr| {
|
||||
find_matching_block_end_in_expr(
|
||||
line,
|
||||
working_set,
|
||||
&attr.expr,
|
||||
global_span_offset,
|
||||
global_cursor_offset,
|
||||
)
|
||||
})
|
||||
.or_else(|| {
|
||||
find_matching_block_end_in_expr(
|
||||
line,
|
||||
working_set,
|
||||
&ab.item,
|
||||
global_span_offset,
|
||||
global_cursor_offset,
|
||||
)
|
||||
}),
|
||||
|
||||
Expr::Table(table) => {
|
||||
if expr_last == global_cursor_offset {
|
||||
// cursor is at table end
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -14,7 +14,7 @@ fn create_default_context() -> EngineState {
|
||||
nu_command::add_shell_command_context(nu_cmd_lang::create_default_context())
|
||||
}
|
||||
|
||||
// creates a new engine with the current path into the completions fixtures folder
|
||||
/// creates a new engine with the current path into the completions fixtures folder
|
||||
pub fn new_engine() -> (AbsolutePathBuf, String, EngineState, Stack) {
|
||||
// Target folder inside assets
|
||||
let dir = fs::fixtures().join("completions");
|
||||
@ -69,7 +69,26 @@ pub fn new_engine() -> (AbsolutePathBuf, String, EngineState, Stack) {
|
||||
(dir, dir_str, engine_state, stack)
|
||||
}
|
||||
|
||||
// creates a new engine with the current path into the completions fixtures folder
|
||||
/// Adds pseudo PATH env for external completion tests
|
||||
pub fn new_external_engine() -> EngineState {
|
||||
let mut engine = create_default_context();
|
||||
let dir = fs::fixtures().join("external_completions").join("path");
|
||||
let dir_str = dir.to_string_lossy().to_string();
|
||||
let internal_span = nu_protocol::Span::new(0, dir_str.len());
|
||||
engine.add_env_var(
|
||||
"PATH".to_string(),
|
||||
Value::List {
|
||||
vals: vec![Value::String {
|
||||
val: dir_str,
|
||||
internal_span,
|
||||
}],
|
||||
internal_span,
|
||||
},
|
||||
);
|
||||
engine
|
||||
}
|
||||
|
||||
/// creates a new engine with the current path into the completions fixtures folder
|
||||
pub fn new_dotnu_engine() -> (AbsolutePathBuf, String, EngineState, Stack) {
|
||||
// Target folder inside assets
|
||||
let dir = fs::fixtures().join("dotnu_completions");
|
||||
@ -86,6 +105,23 @@ pub fn new_dotnu_engine() -> (AbsolutePathBuf, String, EngineState, Stack) {
|
||||
// Add $nu
|
||||
engine_state.generate_nu_constant();
|
||||
|
||||
// const $NU_LIB_DIRS
|
||||
let mut working_set = StateWorkingSet::new(&engine_state);
|
||||
let var_id = working_set.add_variable(
|
||||
b"$NU_LIB_DIRS".into(),
|
||||
Span::unknown(),
|
||||
nu_protocol::Type::List(Box::new(nu_protocol::Type::String)),
|
||||
false,
|
||||
);
|
||||
working_set.set_variable_const_val(
|
||||
var_id,
|
||||
Value::test_list(vec![
|
||||
Value::string(file(dir.join("lib-dir1")), dir_span),
|
||||
Value::string(file(dir.join("lib-dir3")), dir_span),
|
||||
]),
|
||||
);
|
||||
let _ = engine_state.merge_delta(working_set.render());
|
||||
|
||||
// New stack
|
||||
let mut stack = Stack::new();
|
||||
|
||||
@ -95,17 +131,12 @@ pub fn new_dotnu_engine() -> (AbsolutePathBuf, String, EngineState, Stack) {
|
||||
"TEST".to_string(),
|
||||
Value::string("NUSHELL".to_string(), dir_span),
|
||||
);
|
||||
|
||||
stack.add_env_var(
|
||||
"NU_LIB_DIRS".to_string(),
|
||||
Value::list(
|
||||
vec![
|
||||
Value::string(file(dir.join("lib-dir1")), dir_span),
|
||||
Value::string(file(dir.join("lib-dir2")), dir_span),
|
||||
Value::string(file(dir.join("lib-dir3")), dir_span),
|
||||
],
|
||||
dir_span,
|
||||
),
|
||||
"NU_LIB_DIRS".into(),
|
||||
Value::test_list(vec![
|
||||
Value::string(file(dir.join("lib-dir2")), dir_span),
|
||||
Value::string(file(dir.join("lib-dir3")), dir_span),
|
||||
]),
|
||||
);
|
||||
|
||||
// Merge environment into the permanent state
|
||||
@ -185,8 +216,8 @@ pub fn new_partial_engine() -> (AbsolutePathBuf, String, EngineState, Stack) {
|
||||
(dir, dir_str, engine_state, stack)
|
||||
}
|
||||
|
||||
// match a list of suggestions with the expected values
|
||||
pub fn match_suggestions(expected: &Vec<String>, suggestions: &Vec<Suggestion>) {
|
||||
/// match a list of suggestions with the expected values
|
||||
pub fn match_suggestions(expected: &Vec<&str>, suggestions: &Vec<Suggestion>) {
|
||||
let expected_len = expected.len();
|
||||
let suggestions_len = suggestions.len();
|
||||
if expected_len != suggestions_len {
|
||||
@ -197,28 +228,34 @@ pub fn match_suggestions(expected: &Vec<String>, suggestions: &Vec<Suggestion>)
|
||||
)
|
||||
}
|
||||
|
||||
let suggestoins_str = suggestions
|
||||
let suggestions_str = suggestions
|
||||
.iter()
|
||||
.map(|it| it.value.clone())
|
||||
.map(|it| it.value.as_str())
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
assert_eq!(expected, &suggestoins_str);
|
||||
assert_eq!(expected, &suggestions_str);
|
||||
}
|
||||
|
||||
// append the separator to the converted path
|
||||
/// match a list of suggestions with the expected values
|
||||
pub fn match_suggestions_by_string(expected: &[String], suggestions: &Vec<Suggestion>) {
|
||||
let expected = expected.iter().map(|it| it.as_str()).collect::<Vec<_>>();
|
||||
match_suggestions(&expected, suggestions);
|
||||
}
|
||||
|
||||
/// append the separator to the converted path
|
||||
pub fn folder(path: impl Into<PathBuf>) -> String {
|
||||
let mut converted_path = file(path);
|
||||
converted_path.push(MAIN_SEPARATOR);
|
||||
converted_path
|
||||
}
|
||||
|
||||
// convert a given path to string
|
||||
/// convert a given path to string
|
||||
pub fn file(path: impl Into<PathBuf>) -> String {
|
||||
path.into().into_os_string().into_string().unwrap()
|
||||
}
|
||||
|
||||
// merge_input executes the given input into the engine
|
||||
// and merges the state
|
||||
/// merge_input executes the given input into the engine
|
||||
/// and merges the state
|
||||
pub fn merge_input(
|
||||
input: &[u8],
|
||||
engine_state: &mut EngineState,
|
||||
|
@ -1,3 +1,5 @@
|
||||
pub mod completions_helpers;
|
||||
|
||||
pub use completions_helpers::{file, folder, match_suggestions, merge_input, new_engine};
|
||||
pub use completions_helpers::{
|
||||
file, folder, match_suggestions, match_suggestions_by_string, merge_input, new_engine,
|
||||
};
|
||||
|
@ -26,7 +26,7 @@ impl Command for BitsAnd {
|
||||
.required(
|
||||
"target",
|
||||
SyntaxShape::OneOf(vec![SyntaxShape::Binary, SyntaxShape::Int]),
|
||||
"right-hand side of the operation",
|
||||
"Right-hand side of the operation.",
|
||||
)
|
||||
.named(
|
||||
"endian",
|
||||
|
@ -27,7 +27,7 @@ impl Command for BitsOr {
|
||||
.required(
|
||||
"target",
|
||||
SyntaxShape::OneOf(vec![SyntaxShape::Binary, SyntaxShape::Int]),
|
||||
"right-hand side of the operation",
|
||||
"Right-hand side of the operation.",
|
||||
)
|
||||
.named(
|
||||
"endian",
|
||||
|
@ -37,7 +37,7 @@ impl Command for BitsRol {
|
||||
),
|
||||
])
|
||||
.allow_variants_without_examples(true)
|
||||
.required("bits", SyntaxShape::Int, "number of bits to rotate left")
|
||||
.required("bits", SyntaxShape::Int, "Number of bits to rotate left.")
|
||||
.switch(
|
||||
"signed",
|
||||
"always treat input number as a signed number",
|
||||
|
@ -37,7 +37,7 @@ impl Command for BitsRor {
|
||||
),
|
||||
])
|
||||
.allow_variants_without_examples(true)
|
||||
.required("bits", SyntaxShape::Int, "number of bits to rotate right")
|
||||
.required("bits", SyntaxShape::Int, "Number of bits to rotate right.")
|
||||
.switch(
|
||||
"signed",
|
||||
"always treat input number as a signed number",
|
||||
|
@ -40,7 +40,7 @@ impl Command for BitsShl {
|
||||
),
|
||||
])
|
||||
.allow_variants_without_examples(true)
|
||||
.required("bits", SyntaxShape::Int, "number of bits to shift left")
|
||||
.required("bits", SyntaxShape::Int, "Number of bits to shift left.")
|
||||
.switch(
|
||||
"signed",
|
||||
"always treat input number as a signed number",
|
||||
|
@ -37,7 +37,7 @@ impl Command for BitsShr {
|
||||
),
|
||||
])
|
||||
.allow_variants_without_examples(true)
|
||||
.required("bits", SyntaxShape::Int, "number of bits to shift right")
|
||||
.required("bits", SyntaxShape::Int, "Number of bits to shift right.")
|
||||
.switch(
|
||||
"signed",
|
||||
"always treat input number as a signed number",
|
||||
|
@ -27,7 +27,7 @@ impl Command for BitsXor {
|
||||
.required(
|
||||
"target",
|
||||
SyntaxShape::OneOf(vec![SyntaxShape::Binary, SyntaxShape::Int]),
|
||||
"right-hand side of the operation",
|
||||
"Right-hand side of the operation.",
|
||||
)
|
||||
.named(
|
||||
"endian",
|
||||
|
@ -1,74 +0,0 @@
|
||||
use nu_engine::command_prelude::*;
|
||||
use nu_protocol::{report_parse_warning, ParseWarning};
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct Fmt;
|
||||
|
||||
impl Command for Fmt {
|
||||
fn name(&self) -> &str {
|
||||
"fmt"
|
||||
}
|
||||
|
||||
fn description(&self) -> &str {
|
||||
"Format a number."
|
||||
}
|
||||
|
||||
fn signature(&self) -> nu_protocol::Signature {
|
||||
Signature::build("fmt")
|
||||
.input_output_types(vec![(Type::Number, Type::record())])
|
||||
.category(Category::Deprecated)
|
||||
}
|
||||
|
||||
fn search_terms(&self) -> Vec<&str> {
|
||||
vec![]
|
||||
}
|
||||
|
||||
fn examples(&self) -> Vec<Example> {
|
||||
vec![Example {
|
||||
description: "Get a record containing multiple formats for the number 42",
|
||||
example: "42 | fmt",
|
||||
result: Some(Value::test_record(record! {
|
||||
"binary" => Value::test_string("0b101010"),
|
||||
"debug" => Value::test_string("42"),
|
||||
"display" => Value::test_string("42"),
|
||||
"lowerexp" => Value::test_string("4.2e1"),
|
||||
"lowerhex" => Value::test_string("0x2a"),
|
||||
"octal" => Value::test_string("0o52"),
|
||||
"upperexp" => Value::test_string("4.2E1"),
|
||||
"upperhex" => Value::test_string("0x2A"),
|
||||
})),
|
||||
}]
|
||||
}
|
||||
|
||||
fn run(
|
||||
&self,
|
||||
engine_state: &EngineState,
|
||||
stack: &mut Stack,
|
||||
call: &Call,
|
||||
input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let head = call.head;
|
||||
report_parse_warning(
|
||||
&StateWorkingSet::new(engine_state),
|
||||
&ParseWarning::DeprecatedWarning {
|
||||
old_command: "fmt".into(),
|
||||
new_suggestion: "use `format number`".into(),
|
||||
span: head,
|
||||
url: "`help format number`".into(),
|
||||
},
|
||||
);
|
||||
crate::extra::strings::format::format_number(engine_state, stack, call, input)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_examples() {
|
||||
use crate::test_examples;
|
||||
|
||||
test_examples(Fmt {})
|
||||
}
|
||||
}
|
@ -1,3 +0,0 @@
|
||||
mod fmt;
|
||||
|
||||
pub(crate) use fmt::Fmt;
|
@ -26,7 +26,7 @@ impl Command for EachWhile {
|
||||
.required(
|
||||
"closure",
|
||||
SyntaxShape::Closure(Some(vec![SyntaxShape::Any])),
|
||||
"the closure to run",
|
||||
"The closure to run.",
|
||||
)
|
||||
.category(Category::Filters)
|
||||
}
|
||||
|
@ -20,7 +20,7 @@ impl Command for Rotate {
|
||||
.rest(
|
||||
"rest",
|
||||
SyntaxShape::String,
|
||||
"the names to give columns once rotated",
|
||||
"The names to give columns once rotated.",
|
||||
)
|
||||
.category(Category::Filters)
|
||||
.allow_variants_without_examples(true)
|
||||
|
@ -16,7 +16,7 @@ impl Command for UpdateCells {
|
||||
.required(
|
||||
"closure",
|
||||
SyntaxShape::Closure(Some(vec![SyntaxShape::Any])),
|
||||
"the closure to run an update for each cell",
|
||||
"The closure to run an update for each cell.",
|
||||
)
|
||||
.named(
|
||||
"columns",
|
||||
|
@ -1,5 +1,4 @@
|
||||
mod bits;
|
||||
mod conversions;
|
||||
mod filters;
|
||||
mod formats;
|
||||
mod math;
|
||||
@ -27,8 +26,6 @@ pub fn add_extra_command_context(mut engine_state: EngineState) -> EngineState {
|
||||
};
|
||||
}
|
||||
|
||||
bind_command!(conversions::Fmt);
|
||||
|
||||
bind_command!(
|
||||
filters::UpdateCells,
|
||||
filters::EachWhile,
|
||||
|
@ -38,7 +38,7 @@ impl Command for SubCommand {
|
||||
.rest(
|
||||
"cell path",
|
||||
SyntaxShape::CellPath,
|
||||
"for a data structure input, add a gradient to strings at the given cell paths",
|
||||
"For a data structure input, add a gradient to strings at the given cell paths.",
|
||||
)
|
||||
.input_output_types(vec![
|
||||
(Type::String, Type::String),
|
||||
|
@ -40,7 +40,7 @@ impl Command for FormatBits {
|
||||
.rest(
|
||||
"rest",
|
||||
SyntaxShape::CellPath,
|
||||
"for a data structure input, convert data at the given cell paths",
|
||||
"For a data structure input, convert data at the given cell paths.",
|
||||
)
|
||||
.category(Category::Conversions)
|
||||
}
|
||||
|
@ -18,7 +18,7 @@ impl Command for FormatPattern {
|
||||
.required(
|
||||
"pattern",
|
||||
SyntaxShape::String,
|
||||
"the pattern to output. e.g.) \"{foo}: {bar}\"",
|
||||
"The pattern to output. e.g.) \"{foo}: {bar}\".",
|
||||
)
|
||||
.allow_variants_without_examples(true)
|
||||
.category(Category::Strings)
|
||||
|
@ -4,5 +4,4 @@ mod number;
|
||||
|
||||
pub(crate) use bits::FormatBits;
|
||||
pub(crate) use command::FormatPattern;
|
||||
// TODO remove `format_number` visibility after removal of into bits
|
||||
pub(crate) use number::{format_number, FormatNumber};
|
||||
pub(crate) use number::FormatNumber;
|
||||
|
@ -20,7 +20,7 @@ impl Command for FormatNumber {
|
||||
}
|
||||
|
||||
fn search_terms(&self) -> Vec<&str> {
|
||||
vec!["display", "render", "format"]
|
||||
vec!["display", "render", "fmt"]
|
||||
}
|
||||
|
||||
fn examples(&self) -> Vec<Example> {
|
||||
|
@ -25,7 +25,7 @@ impl Command for SubCommand {
|
||||
.rest(
|
||||
"rest",
|
||||
SyntaxShape::CellPath,
|
||||
"For a data structure input, convert strings at the given cell paths",
|
||||
"For a data structure input, convert strings at the given cell paths.",
|
||||
)
|
||||
.category(Category::Strings)
|
||||
}
|
||||
|
@ -25,7 +25,7 @@ impl Command for SubCommand {
|
||||
.rest(
|
||||
"rest",
|
||||
SyntaxShape::CellPath,
|
||||
"For a data structure input, convert strings at the given cell paths",
|
||||
"For a data structure input, convert strings at the given cell paths.",
|
||||
)
|
||||
.category(Category::Strings)
|
||||
}
|
||||
|
@ -25,7 +25,7 @@ impl Command for SubCommand {
|
||||
.rest(
|
||||
"rest",
|
||||
SyntaxShape::CellPath,
|
||||
"For a data structure input, convert strings at the given cell paths",
|
||||
"For a data structure input, convert strings at the given cell paths.",
|
||||
)
|
||||
.category(Category::Strings)
|
||||
}
|
||||
|
@ -25,7 +25,7 @@ impl Command for SubCommand {
|
||||
.rest(
|
||||
"rest",
|
||||
SyntaxShape::CellPath,
|
||||
"For a data structure input, convert strings at the given cell paths",
|
||||
"For a data structure input, convert strings at the given cell paths.",
|
||||
)
|
||||
.category(Category::Strings)
|
||||
}
|
||||
|
@ -25,7 +25,7 @@ impl Command for SubCommand {
|
||||
.rest(
|
||||
"rest",
|
||||
SyntaxShape::CellPath,
|
||||
"For a data structure input, convert strings at the given cell paths",
|
||||
"For a data structure input, convert strings at the given cell paths.",
|
||||
)
|
||||
.category(Category::Strings)
|
||||
}
|
||||
|
@ -25,7 +25,7 @@ impl Command for SubCommand {
|
||||
.rest(
|
||||
"rest",
|
||||
SyntaxShape::CellPath,
|
||||
"For a data structure input, convert strings at the given cell paths",
|
||||
"For a data structure input, convert strings at the given cell paths.",
|
||||
)
|
||||
.category(Category::Strings)
|
||||
}
|
||||
|
@ -26,6 +26,10 @@ shadow-rs = { version = "0.38", default-features = false }
|
||||
[build-dependencies]
|
||||
shadow-rs = { version = "0.38", default-features = false }
|
||||
|
||||
[dev-dependencies]
|
||||
quickcheck = { workspace = true }
|
||||
quickcheck_macros = { workspace = true }
|
||||
|
||||
[features]
|
||||
default = ["os"]
|
||||
os = [
|
||||
@ -42,4 +46,4 @@ mimalloc = []
|
||||
trash-support = []
|
||||
sqlite = []
|
||||
static-link-openssl = []
|
||||
system-clipboard = []
|
||||
system-clipboard = []
|
||||
|
61
crates/nu-cmd-lang/src/core_commands/attr/category.rs
Normal file
61
crates/nu-cmd-lang/src/core_commands/attr/category.rs
Normal file
@ -0,0 +1,61 @@
|
||||
use nu_engine::command_prelude::*;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct AttrCategory;
|
||||
|
||||
impl Command for AttrCategory {
|
||||
fn name(&self) -> &str {
|
||||
"attr category"
|
||||
}
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("attr category")
|
||||
.input_output_type(Type::Nothing, Type::list(Type::String))
|
||||
.allow_variants_without_examples(true)
|
||||
.required(
|
||||
"category",
|
||||
SyntaxShape::String,
|
||||
"Category of the custom command.",
|
||||
)
|
||||
.category(Category::Core)
|
||||
}
|
||||
|
||||
fn description(&self) -> &str {
|
||||
"Attribute for adding a category to custom commands."
|
||||
}
|
||||
|
||||
fn run(
|
||||
&self,
|
||||
engine_state: &EngineState,
|
||||
stack: &mut Stack,
|
||||
call: &Call,
|
||||
_input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let arg: String = call.req(engine_state, stack, 0)?;
|
||||
Ok(Value::string(arg, call.head).into_pipeline_data())
|
||||
}
|
||||
|
||||
fn run_const(
|
||||
&self,
|
||||
working_set: &StateWorkingSet,
|
||||
call: &Call,
|
||||
_input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let arg: String = call.req_const(working_set, 0)?;
|
||||
Ok(Value::string(arg, call.head).into_pipeline_data())
|
||||
}
|
||||
|
||||
fn is_const(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn examples(&self) -> Vec<Example> {
|
||||
vec![Example {
|
||||
description: "Add a category to a custom command",
|
||||
example: r###"# Double numbers
|
||||
@category math
|
||||
def double []: [number -> number] { $in * 2 }"###,
|
||||
result: None,
|
||||
}]
|
||||
}
|
||||
}
|
159
crates/nu-cmd-lang/src/core_commands/attr/example.rs
Normal file
159
crates/nu-cmd-lang/src/core_commands/attr/example.rs
Normal file
@ -0,0 +1,159 @@
|
||||
use nu_engine::command_prelude::*;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct AttrExample;
|
||||
|
||||
impl Command for AttrExample {
|
||||
fn name(&self) -> &str {
|
||||
"attr example"
|
||||
}
|
||||
|
||||
// TODO: When const closure are available, switch to using them for the `example` argument
|
||||
// rather than a block. That should remove the need for `requires_ast_for_arguments` to be true
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("attr example")
|
||||
.input_output_types(vec![(
|
||||
Type::Nothing,
|
||||
Type::Record(
|
||||
[
|
||||
("description".into(), Type::String),
|
||||
("example".into(), Type::String),
|
||||
]
|
||||
.into(),
|
||||
),
|
||||
)])
|
||||
.allow_variants_without_examples(true)
|
||||
.required(
|
||||
"description",
|
||||
SyntaxShape::String,
|
||||
"Description of the example.",
|
||||
)
|
||||
.required(
|
||||
"example",
|
||||
SyntaxShape::OneOf(vec![SyntaxShape::Block, SyntaxShape::String]),
|
||||
"Example code snippet.",
|
||||
)
|
||||
.named(
|
||||
"result",
|
||||
SyntaxShape::Any,
|
||||
"Expected output of example.",
|
||||
None,
|
||||
)
|
||||
.category(Category::Core)
|
||||
}
|
||||
|
||||
fn description(&self) -> &str {
|
||||
"Attribute for adding examples to custom commands."
|
||||
}
|
||||
|
||||
fn run(
|
||||
&self,
|
||||
engine_state: &EngineState,
|
||||
stack: &mut Stack,
|
||||
call: &Call,
|
||||
_input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let description: Spanned<String> = call.req(engine_state, stack, 0)?;
|
||||
let result: Option<Value> = call.get_flag(engine_state, stack, "result")?;
|
||||
|
||||
let example_string: Result<String, _> = call.req(engine_state, stack, 1);
|
||||
let example_expr = call
|
||||
.positional_nth(stack, 1)
|
||||
.ok_or(ShellError::MissingParameter {
|
||||
param_name: "example".into(),
|
||||
span: call.head,
|
||||
})?;
|
||||
|
||||
let working_set = StateWorkingSet::new(engine_state);
|
||||
|
||||
attr_example_impl(
|
||||
example_expr,
|
||||
example_string,
|
||||
&working_set,
|
||||
call,
|
||||
description,
|
||||
result,
|
||||
)
|
||||
}
|
||||
|
||||
fn run_const(
|
||||
&self,
|
||||
working_set: &StateWorkingSet,
|
||||
call: &Call,
|
||||
_input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let description: Spanned<String> = call.req_const(working_set, 0)?;
|
||||
let result: Option<Value> = call.get_flag_const(working_set, "result")?;
|
||||
|
||||
let example_string: Result<String, _> = call.req_const(working_set, 1);
|
||||
let example_expr =
|
||||
call.assert_ast_call()?
|
||||
.positional_nth(1)
|
||||
.ok_or(ShellError::MissingParameter {
|
||||
param_name: "example".into(),
|
||||
span: call.head,
|
||||
})?;
|
||||
|
||||
attr_example_impl(
|
||||
example_expr,
|
||||
example_string,
|
||||
working_set,
|
||||
call,
|
||||
description,
|
||||
result,
|
||||
)
|
||||
}
|
||||
|
||||
fn is_const(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn requires_ast_for_arguments(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn examples(&self) -> Vec<Example> {
|
||||
vec![Example {
|
||||
description: "Add examples to custom command",
|
||||
example: r###"# Double numbers
|
||||
@example "double an int" { 2 | double } --result 4
|
||||
@example "double a float" { 0.25 | double } --result 0.5
|
||||
def double []: [number -> number] { $in * 2 }"###,
|
||||
result: None,
|
||||
}]
|
||||
}
|
||||
}
|
||||
|
||||
fn attr_example_impl(
|
||||
example_expr: &nu_protocol::ast::Expression,
|
||||
example_string: Result<String, ShellError>,
|
||||
working_set: &StateWorkingSet<'_>,
|
||||
call: &Call<'_>,
|
||||
description: Spanned<String>,
|
||||
result: Option<Value>,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let example_content = match example_expr.as_block() {
|
||||
Some(block_id) => {
|
||||
let block = working_set.get_block(block_id);
|
||||
let contents =
|
||||
working_set.get_span_contents(block.span.expect("a block must have a span"));
|
||||
let contents = contents
|
||||
.strip_prefix(b"{")
|
||||
.and_then(|x| x.strip_suffix(b"}"))
|
||||
.unwrap_or(contents)
|
||||
.trim_ascii();
|
||||
String::from_utf8_lossy(contents).into_owned()
|
||||
}
|
||||
None => example_string?,
|
||||
};
|
||||
|
||||
let mut rec = record! {
|
||||
"description" => Value::string(description.item, description.span),
|
||||
"example" => Value::string(example_content, example_expr.span),
|
||||
};
|
||||
if let Some(result) = result {
|
||||
rec.push("result", result);
|
||||
}
|
||||
|
||||
Ok(Value::record(rec, call.head).into_pipeline_data())
|
||||
}
|
7
crates/nu-cmd-lang/src/core_commands/attr/mod.rs
Normal file
7
crates/nu-cmd-lang/src/core_commands/attr/mod.rs
Normal file
@ -0,0 +1,7 @@
|
||||
mod category;
|
||||
mod example;
|
||||
mod search_terms;
|
||||
|
||||
pub use category::AttrCategory;
|
||||
pub use example::AttrExample;
|
||||
pub use search_terms::AttrSearchTerms;
|
57
crates/nu-cmd-lang/src/core_commands/attr/search_terms.rs
Normal file
57
crates/nu-cmd-lang/src/core_commands/attr/search_terms.rs
Normal file
@ -0,0 +1,57 @@
|
||||
use nu_engine::command_prelude::*;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct AttrSearchTerms;
|
||||
|
||||
impl Command for AttrSearchTerms {
|
||||
fn name(&self) -> &str {
|
||||
"attr search-terms"
|
||||
}
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("attr search-terms")
|
||||
.input_output_type(Type::Nothing, Type::list(Type::String))
|
||||
.allow_variants_without_examples(true)
|
||||
.rest("terms", SyntaxShape::String, "Search terms.")
|
||||
.category(Category::Core)
|
||||
}
|
||||
|
||||
fn description(&self) -> &str {
|
||||
"Attribute for adding search terms to custom commands."
|
||||
}
|
||||
|
||||
fn run(
|
||||
&self,
|
||||
engine_state: &EngineState,
|
||||
stack: &mut Stack,
|
||||
call: &Call,
|
||||
_input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let args = call.rest(engine_state, stack, 0)?;
|
||||
Ok(Value::list(args, call.head).into_pipeline_data())
|
||||
}
|
||||
|
||||
fn run_const(
|
||||
&self,
|
||||
working_set: &StateWorkingSet,
|
||||
call: &Call,
|
||||
_input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let args = call.rest_const(working_set, 0)?;
|
||||
Ok(Value::list(args, call.head).into_pipeline_data())
|
||||
}
|
||||
|
||||
fn is_const(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn examples(&self) -> Vec<Example> {
|
||||
vec![Example {
|
||||
description: "Add search terms to a custom command",
|
||||
example: r###"# Double numbers
|
||||
@search-terms multiply times
|
||||
def double []: [number -> number] { $in * 2 }"###,
|
||||
result: None,
|
||||
}]
|
||||
}
|
||||
}
|
@ -72,6 +72,19 @@ impl Command for Const {
|
||||
}
|
||||
}
|
||||
|
||||
fn run_const(
|
||||
&self,
|
||||
_working_set: &StateWorkingSet,
|
||||
_call: &Call,
|
||||
_input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
Ok(PipelineData::empty())
|
||||
}
|
||||
|
||||
fn is_const(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn examples(&self) -> Vec<Example> {
|
||||
vec![
|
||||
Example {
|
||||
|
@ -1,4 +1,5 @@
|
||||
mod alias;
|
||||
mod attr;
|
||||
mod break_;
|
||||
mod collect;
|
||||
mod const_;
|
||||
@ -35,6 +36,7 @@ mod version;
|
||||
mod while_;
|
||||
|
||||
pub use alias::Alias;
|
||||
pub use attr::*;
|
||||
pub use break_::Break;
|
||||
pub use collect::Collect;
|
||||
pub use const_::Const;
|
||||
|
@ -119,31 +119,40 @@ impl Command for OverlayUse {
|
||||
|
||||
// Evaluate the export-env block (if any) and keep its environment
|
||||
if let Some(block_id) = module.env_block {
|
||||
let maybe_path = find_in_dirs_env(
|
||||
let maybe_file_path_or_dir = find_in_dirs_env(
|
||||
&name_arg.item,
|
||||
engine_state,
|
||||
caller_stack,
|
||||
get_dirs_var_from_call(caller_stack, call),
|
||||
)?;
|
||||
|
||||
let block = engine_state.get_block(block_id);
|
||||
let mut callee_stack = caller_stack
|
||||
.gather_captures(engine_state, &block.captures)
|
||||
.reset_pipes();
|
||||
|
||||
if let Some(path) = &maybe_path {
|
||||
if let Some(path) = &maybe_file_path_or_dir {
|
||||
// Set the currently evaluated directory, if the argument is a valid path
|
||||
let mut parent = path.clone();
|
||||
parent.pop();
|
||||
|
||||
let parent = if path.is_dir() {
|
||||
path.clone()
|
||||
} else {
|
||||
let mut parent = path.clone();
|
||||
parent.pop();
|
||||
parent
|
||||
};
|
||||
let file_pwd = Value::string(parent.to_string_lossy(), call.head);
|
||||
|
||||
callee_stack.add_env_var("FILE_PWD".to_string(), file_pwd);
|
||||
}
|
||||
|
||||
if let Some(file_path) = &maybe_path {
|
||||
let file_path = Value::string(file_path.to_string_lossy(), call.head);
|
||||
callee_stack.add_env_var("CURRENT_FILE".to_string(), file_path);
|
||||
if let Some(path) = &maybe_file_path_or_dir {
|
||||
let module_file_path = if path.is_dir() {
|
||||
// the existence of `mod.nu` is verified in parsing time
|
||||
// so it's safe to use it here.
|
||||
Value::string(path.join("mod.nu").to_string_lossy(), call.head)
|
||||
} else {
|
||||
Value::string(path.to_string_lossy(), call.head)
|
||||
};
|
||||
callee_stack.add_env_var("CURRENT_FILE".to_string(), module_file_path);
|
||||
}
|
||||
|
||||
let eval_block = get_eval_block(engine_state);
|
||||
|
@ -10,7 +10,7 @@ impl Command for Return {
|
||||
}
|
||||
|
||||
fn description(&self) -> &str {
|
||||
"Return early from a function."
|
||||
"Return early from a custom command."
|
||||
}
|
||||
|
||||
fn signature(&self) -> nu_protocol::Signature {
|
||||
|
@ -16,6 +16,9 @@ pub fn create_default_context() -> EngineState {
|
||||
// Core
|
||||
bind_command! {
|
||||
Alias,
|
||||
AttrCategory,
|
||||
AttrExample,
|
||||
AttrSearchTerms,
|
||||
Break,
|
||||
Collect,
|
||||
Const,
|
||||
|
@ -4,6 +4,8 @@ mod core_commands;
|
||||
mod default_context;
|
||||
pub mod example_support;
|
||||
mod example_test;
|
||||
#[cfg(test)]
|
||||
mod parse_const_test;
|
||||
|
||||
pub use core_commands::*;
|
||||
pub use default_context::*;
|
||||
|
19
crates/nu-cmd-lang/src/parse_const_test.rs
Normal file
19
crates/nu-cmd-lang/src/parse_const_test.rs
Normal file
@ -0,0 +1,19 @@
|
||||
use nu_protocol::{engine::StateWorkingSet, Span};
|
||||
use quickcheck_macros::quickcheck;
|
||||
|
||||
#[quickcheck]
|
||||
fn quickcheck_parse(data: String) -> bool {
|
||||
let (tokens, err) = nu_parser::lex(data.as_bytes(), 0, b"", b"", true);
|
||||
|
||||
if err.is_none() {
|
||||
let context = crate::create_default_context();
|
||||
{
|
||||
let mut working_set = StateWorkingSet::new(&context);
|
||||
let _ = working_set.add_file("quickcheck".into(), data.as_bytes());
|
||||
|
||||
let _ =
|
||||
nu_parser::parse_block(&mut working_set, &tokens, Span::new(0, 0), false, false);
|
||||
}
|
||||
}
|
||||
true
|
||||
}
|
@ -33,7 +33,7 @@ impl Command for PluginAdd {
|
||||
.required(
|
||||
"filename",
|
||||
SyntaxShape::String,
|
||||
"Path to the executable for the plugin",
|
||||
"Path to the executable for the plugin.",
|
||||
)
|
||||
.category(Category::Plugin)
|
||||
}
|
||||
|
@ -28,7 +28,7 @@ impl Command for PluginRm {
|
||||
.required(
|
||||
"name",
|
||||
SyntaxShape::String,
|
||||
"The name, or filename, of the plugin to remove",
|
||||
"The name, or filename, of the plugin to remove.",
|
||||
)
|
||||
.category(Category::Plugin)
|
||||
}
|
||||
|
@ -16,7 +16,7 @@ impl Command for PluginStop {
|
||||
.required(
|
||||
"name",
|
||||
SyntaxShape::String,
|
||||
"The name, or filename, of the plugin to stop",
|
||||
"The name, or filename, of the plugin to stop.",
|
||||
)
|
||||
.category(Category::Plugin)
|
||||
}
|
||||
|
@ -25,7 +25,7 @@ impl Command for PluginUse {
|
||||
.required(
|
||||
"name",
|
||||
SyntaxShape::String,
|
||||
"The name, or filename, of the plugin to load",
|
||||
"The name, or filename, of the plugin to load.",
|
||||
)
|
||||
.category(Category::Plugin)
|
||||
}
|
||||
|
@ -240,7 +240,9 @@ fn test_computable_style_closure_errors() {
|
||||
];
|
||||
let actual_repl = nu!(nu_repl_code(&inp));
|
||||
// Check that the error was printed
|
||||
assert!(actual_repl.err.contains("type mismatch for operator"));
|
||||
assert!(actual_repl
|
||||
.err
|
||||
.contains("nu::shell::operator_incompatible_types"));
|
||||
// Check that the value was printed
|
||||
assert!(actual_repl.out.contains("bell"));
|
||||
}
|
||||
|
@ -40,13 +40,19 @@ byteorder = { workspace = true }
|
||||
bytesize = { workspace = true }
|
||||
calamine = { workspace = true, features = ["dates"] }
|
||||
chardetng = { workspace = true }
|
||||
chrono = { workspace = true, features = ["std", "unstable-locales", "clock"], default-features = false }
|
||||
chrono = { workspace = true, features = [
|
||||
"std",
|
||||
"unstable-locales",
|
||||
"clock",
|
||||
], default-features = false }
|
||||
chrono-humanize = { workspace = true }
|
||||
chrono-tz = { workspace = true }
|
||||
crossterm = { workspace = true, optional = true }
|
||||
csv = { workspace = true }
|
||||
devicons = { workspace = true }
|
||||
dialoguer = { workspace = true, default-features = false, features = ["fuzzy-select"] }
|
||||
dialoguer = { workspace = true, default-features = false, features = [
|
||||
"fuzzy-select",
|
||||
] }
|
||||
digest = { workspace = true, default-features = false }
|
||||
dtparse = { workspace = true }
|
||||
encoding_rs = { workspace = true }
|
||||
@ -58,7 +64,9 @@ indexmap = { workspace = true }
|
||||
indicatif = { workspace = true }
|
||||
itertools = { workspace = true }
|
||||
log = { workspace = true }
|
||||
lscolors = { workspace = true, default-features = false, features = ["nu-ansi-term"] }
|
||||
lscolors = { workspace = true, default-features = false, features = [
|
||||
"nu-ansi-term",
|
||||
] }
|
||||
md5 = { workspace = true }
|
||||
mime = { workspace = true }
|
||||
mime_guess = { workspace = true }
|
||||
@ -78,7 +86,11 @@ rand = { workspace = true, optional = true }
|
||||
getrandom = { workspace = true, optional = true }
|
||||
rayon = { workspace = true }
|
||||
roxmltree = { workspace = true }
|
||||
rusqlite = { workspace = true, features = ["bundled", "backup", "chrono"], optional = true }
|
||||
rusqlite = { workspace = true, features = [
|
||||
"bundled",
|
||||
"backup",
|
||||
"chrono",
|
||||
], optional = true }
|
||||
rmp = { workspace = true }
|
||||
scopeguard = { workspace = true }
|
||||
serde = { workspace = true, features = ["derive"] }
|
||||
@ -92,7 +104,12 @@ titlecase = { workspace = true }
|
||||
toml = { workspace = true, features = ["preserve_order"] }
|
||||
unicode-segmentation = { workspace = true }
|
||||
update-informer = { workspace = true, optional = true }
|
||||
ureq = { workspace = true, default-features = false, features = ["charset", "gzip", "json", "native-tls"], optional = true }
|
||||
ureq = { workspace = true, default-features = false, features = [
|
||||
"charset",
|
||||
"gzip",
|
||||
"json",
|
||||
"native-tls",
|
||||
], optional = true }
|
||||
url = { workspace = true }
|
||||
uu_cp = { workspace = true, optional = true }
|
||||
uu_mkdir = { workspace = true, optional = true }
|
||||
@ -101,12 +118,18 @@ uu_mv = { workspace = true, optional = true }
|
||||
uu_touch = { workspace = true, optional = true }
|
||||
uu_uname = { workspace = true, optional = true }
|
||||
uu_whoami = { workspace = true, optional = true }
|
||||
uuid = { workspace = true, features = ["v4"], optional = true }
|
||||
uuid = { workspace = true, features = [
|
||||
"v1",
|
||||
"v3",
|
||||
"v4",
|
||||
"v5",
|
||||
"v7",
|
||||
], optional = true }
|
||||
v_htmlescape = { workspace = true }
|
||||
wax = { workspace = true }
|
||||
which = { workspace = true, optional = true }
|
||||
unicode-width = { workspace = true }
|
||||
data-encoding = { version = "2.7.0", features = ["alloc"] }
|
||||
data-encoding = { version = "2.8.0", features = ["alloc"] }
|
||||
web-time = { workspace = true }
|
||||
|
||||
[target.'cfg(windows)'.dependencies]
|
||||
@ -117,7 +140,11 @@ uucore = { workspace = true, features = ["mode"] }
|
||||
|
||||
[target.'cfg(unix)'.dependencies]
|
||||
umask = { workspace = true }
|
||||
nix = { workspace = true, default-features = false, features = ["user", "resource", "pthread"] }
|
||||
nix = { workspace = true, default-features = false, features = [
|
||||
"user",
|
||||
"resource",
|
||||
"pthread",
|
||||
] }
|
||||
|
||||
[target.'cfg(any(target_os = "linux", target_os = "android"))'.dependencies]
|
||||
procfs = { workspace = true }
|
||||
@ -164,12 +191,7 @@ os = [
|
||||
# The dependencies listed below need 'getrandom'.
|
||||
# They work with JS (usually with wasm-bindgen) or regular OS support.
|
||||
# Hence they are also put under the 'os' feature to avoid repetition.
|
||||
js = [
|
||||
"getrandom",
|
||||
"getrandom/js",
|
||||
"rand",
|
||||
"uuid",
|
||||
]
|
||||
js = ["getrandom", "getrandom/js", "rand", "uuid"]
|
||||
|
||||
# These dependencies require networking capabilities, especially the http
|
||||
# interface requires openssl which is not easy to embed into wasm,
|
||||
@ -182,10 +204,7 @@ network = [
|
||||
"uuid",
|
||||
]
|
||||
|
||||
plugin = [
|
||||
"nu-parser/plugin",
|
||||
"os",
|
||||
]
|
||||
plugin = ["nu-parser/plugin", "os"]
|
||||
sqlite = ["rusqlite"]
|
||||
trash-support = ["trash"]
|
||||
|
||||
@ -195,8 +214,6 @@ nu-test-support = { path = "../nu-test-support", version = "0.102.1" }
|
||||
|
||||
dirs = { workspace = true }
|
||||
mockito = { workspace = true, default-features = false }
|
||||
quickcheck = { workspace = true }
|
||||
quickcheck_macros = { workspace = true }
|
||||
rstest = { workspace = true, default-features = false }
|
||||
rstest_reuse = { workspace = true }
|
||||
pretty_assertions = { workspace = true }
|
||||
|
@ -408,13 +408,10 @@ fn action(input: &Value, args: &Arguments, head: Span) -> Value {
|
||||
Err(reason) => {
|
||||
match NaiveDateTime::parse_from_str(val, &dt.0) {
|
||||
Ok(d) => {
|
||||
let local_offset = *Local::now().offset();
|
||||
let dt_fixed =
|
||||
TimeZone::from_local_datetime(&local_offset, &d)
|
||||
.single()
|
||||
.unwrap_or_default();
|
||||
Local.from_local_datetime(&d).single().unwrap_or_default();
|
||||
|
||||
Value::date (dt_fixed,head)
|
||||
Value::date(dt_fixed.into(),head)
|
||||
}
|
||||
Err(_) => {
|
||||
Value::error (
|
||||
|
@ -1,15 +1,15 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use nu_cmd_base::input_handler::{operate, CmdArgument};
|
||||
use nu_engine::command_prelude::*;
|
||||
use nu_protocol::{shell_error::into_code, Config};
|
||||
use nu_utils::get_system_locale;
|
||||
use num_format::ToFormattedString;
|
||||
use std::sync::Arc;
|
||||
|
||||
struct Arguments {
|
||||
decimals_value: Option<i64>,
|
||||
cell_paths: Option<Vec<CellPath>>,
|
||||
config: Arc<Config>,
|
||||
group_digits: bool,
|
||||
}
|
||||
|
||||
impl CmdArgument for Arguments {
|
||||
@ -52,6 +52,11 @@ impl Command for SubCommand {
|
||||
SyntaxShape::CellPath,
|
||||
"For a data structure input, convert data at the given cell paths.",
|
||||
)
|
||||
.switch(
|
||||
"group-digits",
|
||||
"group digits together by the locale specific thousands separator",
|
||||
Some('g'),
|
||||
)
|
||||
.named(
|
||||
"decimals",
|
||||
SyntaxShape::Int,
|
||||
@ -148,6 +153,7 @@ fn string_helper(
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let head = call.head;
|
||||
let decimals_value: Option<i64> = call.get_flag(engine_state, stack, "decimals")?;
|
||||
let group_digits = call.has_flag(engine_state, stack, "group-digits")?;
|
||||
if let Some(decimal_val) = decimals_value {
|
||||
if decimal_val.is_negative() {
|
||||
return Err(ShellError::TypeMismatch {
|
||||
@ -182,6 +188,7 @@ fn string_helper(
|
||||
decimals_value,
|
||||
cell_paths,
|
||||
config,
|
||||
group_digits,
|
||||
};
|
||||
operate(action, args, input, head, engine_state.signals())
|
||||
}
|
||||
@ -190,10 +197,12 @@ fn string_helper(
|
||||
fn action(input: &Value, args: &Arguments, span: Span) -> Value {
|
||||
let digits = args.decimals_value;
|
||||
let config = &args.config;
|
||||
let group_digits = args.group_digits;
|
||||
|
||||
match input {
|
||||
Value::Int { val, .. } => {
|
||||
let decimal_value = digits.unwrap_or(0) as usize;
|
||||
let res = format_int(*val, false, decimal_value);
|
||||
let res = format_int(*val, group_digits, decimal_value);
|
||||
Value::string(res, span)
|
||||
}
|
||||
Value::Float { val, .. } => {
|
||||
@ -206,11 +215,24 @@ fn action(input: &Value, args: &Arguments, span: Span) -> Value {
|
||||
}
|
||||
Value::Bool { val, .. } => Value::string(val.to_string(), span),
|
||||
Value::Date { val, .. } => Value::string(val.format("%c").to_string(), span),
|
||||
Value::String { val, .. } => Value::string(val.to_string(), span),
|
||||
Value::String { val, .. } => {
|
||||
if group_digits {
|
||||
let number = val.parse::<i64>().unwrap_or_default();
|
||||
let decimal_value = digits.unwrap_or(0) as usize;
|
||||
Value::string(format_int(number, group_digits, decimal_value), span)
|
||||
} else {
|
||||
Value::string(val.to_string(), span)
|
||||
}
|
||||
}
|
||||
Value::Glob { val, .. } => Value::string(val.to_string(), span),
|
||||
|
||||
Value::Filesize { val: _, .. } => {
|
||||
Value::string(input.to_expanded_string(", ", config), span)
|
||||
Value::Filesize { val, .. } => {
|
||||
if group_digits {
|
||||
let decimal_value = digits.unwrap_or(0) as usize;
|
||||
Value::string(format_int(val.get(), group_digits, decimal_value), span)
|
||||
} else {
|
||||
Value::string(input.to_expanded_string(", ", config), span)
|
||||
}
|
||||
}
|
||||
Value::Duration { val: _, .. } => Value::string(input.to_expanded_string("", config), span),
|
||||
|
||||
|
@ -70,7 +70,6 @@ pub fn add_shell_command_context(mut engine_state: EngineState) -> EngineState {
|
||||
ParEach,
|
||||
ChunkBy,
|
||||
Prepend,
|
||||
Range,
|
||||
Reduce,
|
||||
Reject,
|
||||
Rename,
|
||||
@ -314,6 +313,7 @@ pub fn add_shell_command_context(mut engine_state: EngineState) -> EngineState {
|
||||
Where,
|
||||
ToXml,
|
||||
ToYaml,
|
||||
ToYml,
|
||||
};
|
||||
|
||||
// Viewers
|
||||
@ -448,8 +448,17 @@ pub fn add_shell_command_context(mut engine_state: EngineState) -> EngineState {
|
||||
// Experimental
|
||||
bind_command! {
|
||||
IsAdmin,
|
||||
JobSpawn,
|
||||
JobList,
|
||||
JobKill,
|
||||
Job,
|
||||
};
|
||||
|
||||
#[cfg(unix)]
|
||||
bind_command! {
|
||||
JobUnfreeze,
|
||||
}
|
||||
|
||||
// Removed
|
||||
bind_command! {
|
||||
LetEnv,
|
||||
|
3
crates/nu-command/src/env/config/config_.rs
vendored
3
crates/nu-command/src/env/config/config_.rs
vendored
@ -106,6 +106,7 @@ pub(super) fn start_editor(
|
||||
let child = ForegroundChild::spawn(
|
||||
command,
|
||||
engine_state.is_interactive,
|
||||
engine_state.is_background_job(),
|
||||
&engine_state.pipeline_externals_state,
|
||||
);
|
||||
|
||||
@ -119,7 +120,7 @@ pub(super) fn start_editor(
|
||||
})?;
|
||||
|
||||
// Wrap the output into a `PipelineData::ByteStream`.
|
||||
let child = nu_protocol::process::ChildProcess::new(child, None, false, call.head)?;
|
||||
let child = nu_protocol::process::ChildProcess::new(child, None, false, call.head, None)?;
|
||||
Ok(PipelineData::ByteStream(
|
||||
ByteStream::child(child, call.head),
|
||||
None,
|
||||
|
@ -72,7 +72,7 @@ fn convert_string_to_value(
|
||||
Err(x) => match x {
|
||||
nu_json::Error::Syntax(_, row, col) => {
|
||||
let label = x.to_string();
|
||||
let label_span = convert_row_column_to_span(row, col, string_input);
|
||||
let label_span = Span::from_row_column(row, col, string_input);
|
||||
Err(ShellError::GenericError {
|
||||
error: "Error while parsing JSON text".into(),
|
||||
msg: "error parsing JSON text".into(),
|
||||
@ -173,23 +173,3 @@ fn expand_closure(
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
// Converts row+column to a Span, assuming bytes (1-based rows)
|
||||
fn convert_row_column_to_span(row: usize, col: usize, contents: &str) -> Span {
|
||||
let mut cur_row = 1;
|
||||
let mut cur_col = 1;
|
||||
|
||||
for (offset, curr_byte) in contents.bytes().enumerate() {
|
||||
if curr_byte == b'\n' {
|
||||
cur_row += 1;
|
||||
cur_col = 1;
|
||||
}
|
||||
if cur_row >= row && cur_col >= col {
|
||||
return Span::new(offset, offset);
|
||||
} else {
|
||||
cur_col += 1;
|
||||
}
|
||||
}
|
||||
|
||||
Span::new(contents.len(), contents.len())
|
||||
}
|
||||
|
10
crates/nu-command/src/env/config/config_reset.rs
vendored
10
crates/nu-command/src/env/config/config_reset.rs
vendored
@ -1,7 +1,5 @@
|
||||
use chrono::Local;
|
||||
use nu_engine::command_prelude::*;
|
||||
|
||||
use nu_protocol::shell_error::io::IoError;
|
||||
use nu_utils::{get_scaffold_config, get_scaffold_env};
|
||||
use std::{io::Write, path::PathBuf};
|
||||
|
||||
@ -61,7 +59,7 @@ impl Command for ConfigReset {
|
||||
));
|
||||
if let Err(err) = std::fs::rename(nu_config.clone(), &backup_path) {
|
||||
return Err(ShellError::Io(IoError::new_with_additional_context(
|
||||
err.kind(),
|
||||
err.kind().not_found_as(NotFound::Directory),
|
||||
span,
|
||||
PathBuf::from(backup_path),
|
||||
"config.nu could not be backed up",
|
||||
@ -71,7 +69,7 @@ impl Command for ConfigReset {
|
||||
if let Ok(mut file) = std::fs::File::create(&nu_config) {
|
||||
if let Err(err) = writeln!(&mut file, "{config_file}") {
|
||||
return Err(ShellError::Io(IoError::new_with_additional_context(
|
||||
err.kind(),
|
||||
err.kind().not_found_as(NotFound::File),
|
||||
span,
|
||||
PathBuf::from(nu_config),
|
||||
"config.nu could not be written to",
|
||||
@ -88,7 +86,7 @@ impl Command for ConfigReset {
|
||||
backup_path.push(format!("oldenv-{}.nu", Local::now().format("%F-%H-%M-%S"),));
|
||||
if let Err(err) = std::fs::rename(env_config.clone(), &backup_path) {
|
||||
return Err(ShellError::Io(IoError::new_with_additional_context(
|
||||
err.kind(),
|
||||
err.kind().not_found_as(NotFound::Directory),
|
||||
span,
|
||||
PathBuf::from(backup_path),
|
||||
"env.nu could not be backed up",
|
||||
@ -98,7 +96,7 @@ impl Command for ConfigReset {
|
||||
if let Ok(mut file) = std::fs::File::create(&env_config) {
|
||||
if let Err(err) = writeln!(&mut file, "{config_file}") {
|
||||
return Err(ShellError::Io(IoError::new_with_additional_context(
|
||||
err.kind(),
|
||||
err.kind().not_found_as(NotFound::File),
|
||||
span,
|
||||
PathBuf::from(env_config),
|
||||
"env.nu could not be written to",
|
||||
|
34
crates/nu-command/src/experimental/job.rs
Normal file
34
crates/nu-command/src/experimental/job.rs
Normal file
@ -0,0 +1,34 @@
|
||||
use nu_engine::{command_prelude::*, get_full_help};
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct Job;
|
||||
|
||||
impl Command for Job {
|
||||
fn name(&self) -> &str {
|
||||
"job"
|
||||
}
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("job")
|
||||
.category(Category::Strings)
|
||||
.input_output_types(vec![(Type::Nothing, Type::String)])
|
||||
}
|
||||
|
||||
fn description(&self) -> &str {
|
||||
"Various commands for working with background jobs."
|
||||
}
|
||||
|
||||
fn extra_description(&self) -> &str {
|
||||
"You must use one of the following subcommands. Using this command as-is will only produce this help message."
|
||||
}
|
||||
|
||||
fn run(
|
||||
&self,
|
||||
engine_state: &EngineState,
|
||||
stack: &mut Stack,
|
||||
call: &Call,
|
||||
_input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
Ok(Value::string(get_full_help(self, engine_state, stack), call.head).into_pipeline_data())
|
||||
}
|
||||
}
|
72
crates/nu-command/src/experimental/job_kill.rs
Normal file
72
crates/nu-command/src/experimental/job_kill.rs
Normal file
@ -0,0 +1,72 @@
|
||||
use nu_engine::command_prelude::*;
|
||||
use nu_protocol::JobId;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct JobKill;
|
||||
|
||||
impl Command for JobKill {
|
||||
fn name(&self) -> &str {
|
||||
"job kill"
|
||||
}
|
||||
|
||||
fn description(&self) -> &str {
|
||||
"Kill a background job."
|
||||
}
|
||||
|
||||
fn signature(&self) -> nu_protocol::Signature {
|
||||
Signature::build("job kill")
|
||||
.category(Category::Experimental)
|
||||
.required("id", SyntaxShape::Int, "The id of the job to kill.")
|
||||
.input_output_types(vec![(Type::Nothing, Type::Nothing)])
|
||||
.allow_variants_without_examples(true)
|
||||
}
|
||||
|
||||
fn search_terms(&self) -> Vec<&str> {
|
||||
vec!["halt", "stop", "end", "close"]
|
||||
}
|
||||
|
||||
fn run(
|
||||
&self,
|
||||
engine_state: &EngineState,
|
||||
stack: &mut Stack,
|
||||
call: &Call,
|
||||
_input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let head = call.head;
|
||||
|
||||
let id_arg: Spanned<i64> = call.req(engine_state, stack, 0)?;
|
||||
|
||||
if id_arg.item < 0 {
|
||||
return Err(ShellError::NeedsPositiveValue { span: id_arg.span });
|
||||
}
|
||||
|
||||
let id: JobId = JobId::new(id_arg.item as usize);
|
||||
|
||||
let mut jobs = engine_state.jobs.lock().expect("jobs lock is poisoned!");
|
||||
|
||||
if jobs.lookup(id).is_none() {
|
||||
return Err(ShellError::JobNotFound {
|
||||
id: id.get(),
|
||||
span: head,
|
||||
});
|
||||
}
|
||||
|
||||
jobs.kill_and_remove(id).map_err(|err| {
|
||||
ShellError::Io(IoError::new_internal(
|
||||
err.kind(),
|
||||
"Failed to kill the requested job",
|
||||
nu_protocol::location!(),
|
||||
))
|
||||
})?;
|
||||
|
||||
Ok(Value::nothing(head).into_pipeline_data())
|
||||
}
|
||||
|
||||
fn examples(&self) -> Vec<Example> {
|
||||
vec![Example {
|
||||
example: "let id = job spawn { sleep 10sec }; job kill $id",
|
||||
description: "Kill a newly spawned job",
|
||||
result: None,
|
||||
}]
|
||||
}
|
||||
}
|
75
crates/nu-command/src/experimental/job_list.rs
Normal file
75
crates/nu-command/src/experimental/job_list.rs
Normal file
@ -0,0 +1,75 @@
|
||||
use nu_engine::command_prelude::*;
|
||||
use nu_protocol::engine::{FrozenJob, Job};
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct JobList;
|
||||
|
||||
impl Command for JobList {
|
||||
fn name(&self) -> &str {
|
||||
"job list"
|
||||
}
|
||||
|
||||
fn description(&self) -> &str {
|
||||
"List background jobs."
|
||||
}
|
||||
|
||||
fn signature(&self) -> nu_protocol::Signature {
|
||||
Signature::build("job list")
|
||||
.category(Category::Experimental)
|
||||
.input_output_types(vec![(Type::Nothing, Type::table())])
|
||||
}
|
||||
|
||||
fn search_terms(&self) -> Vec<&str> {
|
||||
vec!["background", "jobs"]
|
||||
}
|
||||
|
||||
fn run(
|
||||
&self,
|
||||
engine_state: &EngineState,
|
||||
_stack: &mut Stack,
|
||||
call: &Call,
|
||||
_input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let head = call.head;
|
||||
|
||||
let jobs = engine_state.jobs.lock().expect("jobs lock is poisoned!");
|
||||
|
||||
let values = jobs
|
||||
.iter()
|
||||
.map(|(id, job)| {
|
||||
let record = record! {
|
||||
"id" => Value::int(id.get() as i64, head),
|
||||
"type" => match job {
|
||||
Job::Thread(_) => Value::string("thread", head),
|
||||
Job::Frozen(_) => Value::string("frozen", head),
|
||||
},
|
||||
"pids" => match job {
|
||||
Job::Thread(job) => Value::list(
|
||||
job.collect_pids()
|
||||
.into_iter()
|
||||
.map(|it| Value::int(it as i64, head))
|
||||
.collect::<Vec<Value>>(),
|
||||
head,
|
||||
),
|
||||
|
||||
Job::Frozen(FrozenJob { unfreeze }) => {
|
||||
Value::list(vec![ Value::int(unfreeze.pid() as i64, head) ], head)
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
Value::record(record, head)
|
||||
})
|
||||
.collect::<Vec<Value>>();
|
||||
|
||||
Ok(Value::list(values, head).into_pipeline_data())
|
||||
}
|
||||
|
||||
fn examples(&self) -> Vec<Example> {
|
||||
vec![Example {
|
||||
example: "job list",
|
||||
description: "List all background jobs",
|
||||
result: None,
|
||||
}]
|
||||
}
|
||||
}
|
126
crates/nu-command/src/experimental/job_spawn.rs
Normal file
126
crates/nu-command/src/experimental/job_spawn.rs
Normal file
@ -0,0 +1,126 @@
|
||||
use std::{
|
||||
sync::{
|
||||
atomic::{AtomicBool, AtomicU32},
|
||||
Arc,
|
||||
},
|
||||
thread,
|
||||
};
|
||||
|
||||
use nu_engine::{command_prelude::*, ClosureEvalOnce};
|
||||
use nu_protocol::{
|
||||
engine::{Closure, Job, ThreadJob},
|
||||
report_shell_error, Signals,
|
||||
};
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct JobSpawn;
|
||||
|
||||
impl Command for JobSpawn {
|
||||
fn name(&self) -> &str {
|
||||
"job spawn"
|
||||
}
|
||||
|
||||
fn description(&self) -> &str {
|
||||
"Spawn a background job and retrieve its ID."
|
||||
}
|
||||
|
||||
fn signature(&self) -> nu_protocol::Signature {
|
||||
Signature::build("job spawn")
|
||||
.category(Category::Experimental)
|
||||
.input_output_types(vec![(Type::Nothing, Type::Int)])
|
||||
.required(
|
||||
"closure",
|
||||
SyntaxShape::Closure(Some(vec![SyntaxShape::Any])),
|
||||
"The closure to run in another thread.",
|
||||
)
|
||||
}
|
||||
|
||||
fn search_terms(&self) -> Vec<&str> {
|
||||
vec!["background", "bg", "&"]
|
||||
}
|
||||
|
||||
fn run(
|
||||
&self,
|
||||
engine_state: &EngineState,
|
||||
stack: &mut Stack,
|
||||
call: &Call,
|
||||
_input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let head = call.head;
|
||||
|
||||
let closure: Closure = call.req(engine_state, stack, 0)?;
|
||||
|
||||
let mut job_state = engine_state.clone();
|
||||
job_state.is_interactive = false;
|
||||
|
||||
let job_stack = stack.clone();
|
||||
|
||||
// the new job should have its ctrl-c independent of foreground
|
||||
let job_signals = Signals::new(Arc::new(AtomicBool::new(false)));
|
||||
job_state.set_signals(job_signals.clone());
|
||||
|
||||
// the new job has a separate process group state for its processes
|
||||
job_state.pipeline_externals_state = Arc::new((AtomicU32::new(0), AtomicU32::new(0)));
|
||||
|
||||
job_state.exit_warning_given = Arc::new(AtomicBool::new(false));
|
||||
|
||||
let jobs = job_state.jobs.clone();
|
||||
let mut jobs = jobs.lock().expect("jobs lock is poisoned!");
|
||||
|
||||
let id = {
|
||||
let thread_job = ThreadJob::new(job_signals);
|
||||
job_state.current_thread_job = Some(thread_job.clone());
|
||||
jobs.add_job(Job::Thread(thread_job))
|
||||
};
|
||||
|
||||
let result = thread::Builder::new()
|
||||
.name(format!("background job {}", id.get()))
|
||||
.spawn(move || {
|
||||
ClosureEvalOnce::new(&job_state, &job_stack, closure)
|
||||
.run_with_input(Value::nothing(head).into_pipeline_data())
|
||||
.and_then(|data| data.into_value(head))
|
||||
.unwrap_or_else(|err| {
|
||||
if !job_state.signals().interrupted() {
|
||||
report_shell_error(&job_state, &err);
|
||||
}
|
||||
|
||||
Value::nothing(head)
|
||||
});
|
||||
|
||||
{
|
||||
let mut jobs = job_state.jobs.lock().expect("jobs lock is poisoned!");
|
||||
|
||||
jobs.remove_job(id);
|
||||
}
|
||||
});
|
||||
|
||||
match result {
|
||||
Ok(_) => Ok(Value::int(id.get() as i64, head).into_pipeline_data()),
|
||||
Err(err) => {
|
||||
jobs.remove_job(id);
|
||||
Err(ShellError::Io(IoError::new_with_additional_context(
|
||||
err.kind(),
|
||||
call.head,
|
||||
None,
|
||||
"Failed to spawn thread for job",
|
||||
)))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn examples(&self) -> Vec<Example> {
|
||||
vec![Example {
|
||||
example: "job spawn { sleep 5sec; rm evidence.pdf }",
|
||||
description: "Spawn a background job to do some time consuming work",
|
||||
result: None,
|
||||
}]
|
||||
}
|
||||
|
||||
fn extra_description(&self) -> &str {
|
||||
r#"Executes the provided closure in a background thread
|
||||
and registers this task in the background job table, which can be retrieved with `job list`.
|
||||
|
||||
This command returns the job id of the newly created job.
|
||||
"#
|
||||
}
|
||||
}
|
163
crates/nu-command/src/experimental/job_unfreeze.rs
Normal file
163
crates/nu-command/src/experimental/job_unfreeze.rs
Normal file
@ -0,0 +1,163 @@
|
||||
use nu_engine::command_prelude::*;
|
||||
use nu_protocol::{
|
||||
engine::{FrozenJob, Job, ThreadJob},
|
||||
process::check_ok,
|
||||
shell_error, JobId,
|
||||
};
|
||||
use nu_system::{kill_by_pid, ForegroundWaitStatus};
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct JobUnfreeze;
|
||||
|
||||
impl Command for JobUnfreeze {
|
||||
fn name(&self) -> &str {
|
||||
"job unfreeze"
|
||||
}
|
||||
|
||||
fn description(&self) -> &str {
|
||||
"Unfreeze a frozen process job in foreground."
|
||||
}
|
||||
|
||||
fn signature(&self) -> nu_protocol::Signature {
|
||||
Signature::build("job unfreeze")
|
||||
.category(Category::Experimental)
|
||||
.optional("id", SyntaxShape::Int, "The process id to unfreeze.")
|
||||
.input_output_types(vec![(Type::Nothing, Type::Nothing)])
|
||||
.allow_variants_without_examples(true)
|
||||
}
|
||||
|
||||
fn search_terms(&self) -> Vec<&str> {
|
||||
vec!["fg"]
|
||||
}
|
||||
|
||||
fn run(
|
||||
&self,
|
||||
engine_state: &EngineState,
|
||||
stack: &mut Stack,
|
||||
call: &Call,
|
||||
_input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let head = call.head;
|
||||
|
||||
let option_id: Option<Spanned<i64>> = call.opt(engine_state, stack, 0)?;
|
||||
|
||||
let mut jobs = engine_state.jobs.lock().expect("jobs lock is poisoned!");
|
||||
|
||||
if let Some(id_arg) = option_id {
|
||||
if id_arg.item < 0 {
|
||||
return Err(ShellError::NeedsPositiveValue { span: id_arg.span });
|
||||
}
|
||||
}
|
||||
|
||||
let id = option_id
|
||||
.map(|it| JobId::new(it.item as usize))
|
||||
.or_else(|| jobs.most_recent_frozen_job_id())
|
||||
.ok_or_else(|| ShellError::NoFrozenJob { span: head })?;
|
||||
|
||||
let job = match jobs.lookup(id) {
|
||||
None => {
|
||||
return Err(ShellError::JobNotFound {
|
||||
id: id.get(),
|
||||
span: head,
|
||||
})
|
||||
}
|
||||
Some(Job::Thread(ThreadJob { .. })) => {
|
||||
return Err(ShellError::JobNotFrozen {
|
||||
id: id.get(),
|
||||
span: head,
|
||||
})
|
||||
}
|
||||
Some(Job::Frozen(FrozenJob { .. })) => jobs
|
||||
.remove_job(id)
|
||||
.expect("job was supposed to be in job list"),
|
||||
};
|
||||
|
||||
drop(jobs);
|
||||
|
||||
unfreeze_job(engine_state, id, job, head)?;
|
||||
|
||||
Ok(Value::nothing(head).into_pipeline_data())
|
||||
}
|
||||
|
||||
fn examples(&self) -> Vec<Example> {
|
||||
vec![
|
||||
Example {
|
||||
example: "job unfreeze",
|
||||
description: "Unfreeze the latest frozen job",
|
||||
result: None,
|
||||
},
|
||||
Example {
|
||||
example: "job unfreeze 4",
|
||||
description: "Unfreeze a specific frozen job by its PID",
|
||||
result: None,
|
||||
},
|
||||
]
|
||||
}
|
||||
|
||||
fn extra_description(&self) -> &str {
|
||||
r#"When a running process is frozen (with the SIGTSTP signal or with the Ctrl-Z key on unix),
|
||||
a background job gets registered for this process, which can then be resumed using this command."#
|
||||
}
|
||||
}
|
||||
|
||||
fn unfreeze_job(
|
||||
state: &EngineState,
|
||||
old_id: JobId,
|
||||
job: Job,
|
||||
span: Span,
|
||||
) -> Result<(), ShellError> {
|
||||
match job {
|
||||
Job::Thread(ThreadJob { .. }) => Err(ShellError::JobNotFrozen {
|
||||
id: old_id.get(),
|
||||
span,
|
||||
}),
|
||||
|
||||
Job::Frozen(FrozenJob { unfreeze: handle }) => {
|
||||
let pid = handle.pid();
|
||||
|
||||
if let Some(thread_job) = &state.current_thread_job {
|
||||
if !thread_job.try_add_pid(pid) {
|
||||
kill_by_pid(pid.into()).map_err(|err| {
|
||||
ShellError::Io(IoError::new_internal(
|
||||
err.kind(),
|
||||
"job was interrupted; could not kill foreground process",
|
||||
nu_protocol::location!(),
|
||||
))
|
||||
})?;
|
||||
}
|
||||
}
|
||||
|
||||
let result = handle.unfreeze(
|
||||
state
|
||||
.is_interactive
|
||||
.then(|| state.pipeline_externals_state.clone()),
|
||||
);
|
||||
|
||||
if let Some(thread_job) = &state.current_thread_job {
|
||||
thread_job.remove_pid(pid);
|
||||
}
|
||||
|
||||
match result {
|
||||
Ok(ForegroundWaitStatus::Frozen(handle)) => {
|
||||
let mut jobs = state.jobs.lock().expect("jobs lock is poisoned!");
|
||||
|
||||
jobs.add_job_with_id(old_id, Job::Frozen(FrozenJob { unfreeze: handle }))
|
||||
.expect("job was supposed to be removed");
|
||||
|
||||
if state.is_interactive {
|
||||
println!("\nJob {} is re-frozen", old_id.get());
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
Ok(ForegroundWaitStatus::Finished(status)) => check_ok(status, false, span),
|
||||
|
||||
Err(err) => Err(ShellError::Io(IoError::new_internal(
|
||||
shell_error::io::ErrorKind::Std(err.kind()),
|
||||
"Failed to unfreeze foreground process",
|
||||
nu_protocol::location!(),
|
||||
))),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
@ -1,3 +1,18 @@
|
||||
mod is_admin;
|
||||
mod job;
|
||||
mod job_kill;
|
||||
mod job_list;
|
||||
mod job_spawn;
|
||||
|
||||
#[cfg(unix)]
|
||||
mod job_unfreeze;
|
||||
|
||||
pub use is_admin::IsAdmin;
|
||||
pub use job::Job;
|
||||
pub use job_kill::JobKill;
|
||||
pub use job_list::JobList;
|
||||
|
||||
pub use job_spawn::JobSpawn;
|
||||
|
||||
#[cfg(unix)]
|
||||
pub use job_unfreeze::JobUnfreeze;
|
||||
|
@ -77,7 +77,9 @@ impl Command for Cd {
|
||||
if let Ok(path) = nu_path::canonicalize_with(path_no_whitespace, &cwd) {
|
||||
if !path.is_dir() {
|
||||
return Err(shell_error::io::IoError::new(
|
||||
shell_error::io::ErrorKind::NotADirectory,
|
||||
shell_error::io::ErrorKind::Std(
|
||||
std::io::ErrorKind::NotADirectory,
|
||||
),
|
||||
v.span,
|
||||
None,
|
||||
)
|
||||
@ -86,7 +88,7 @@ impl Command for Cd {
|
||||
path
|
||||
} else {
|
||||
return Err(shell_error::io::IoError::new(
|
||||
std::io::ErrorKind::NotFound,
|
||||
ErrorKind::DirectoryNotFound,
|
||||
v.span,
|
||||
PathBuf::from(path_no_whitespace),
|
||||
)
|
||||
@ -96,7 +98,7 @@ impl Command for Cd {
|
||||
let path = nu_path::expand_path_with(path_no_whitespace, &cwd, true);
|
||||
if !path.exists() {
|
||||
return Err(shell_error::io::IoError::new(
|
||||
std::io::ErrorKind::NotFound,
|
||||
ErrorKind::DirectoryNotFound,
|
||||
v.span,
|
||||
PathBuf::from(path_no_whitespace),
|
||||
)
|
||||
@ -104,7 +106,7 @@ impl Command for Cd {
|
||||
};
|
||||
if !path.is_dir() {
|
||||
return Err(shell_error::io::IoError::new(
|
||||
shell_error::io::ErrorKind::NotADirectory,
|
||||
shell_error::io::ErrorKind::Std(std::io::ErrorKind::NotADirectory),
|
||||
v.span,
|
||||
path,
|
||||
)
|
||||
|
@ -118,7 +118,7 @@ impl Command for Du {
|
||||
min_size,
|
||||
};
|
||||
Ok(
|
||||
du_for_one_pattern(args, ¤t_dir, tag, engine_state.signals())?
|
||||
du_for_one_pattern(args, ¤t_dir, tag, engine_state.signals().clone())?
|
||||
.into_pipeline_data(tag, engine_state.signals().clone()),
|
||||
)
|
||||
}
|
||||
@ -137,7 +137,7 @@ impl Command for Du {
|
||||
args,
|
||||
¤t_dir,
|
||||
tag,
|
||||
engine_state.signals(),
|
||||
engine_state.signals().clone(),
|
||||
)?)
|
||||
}
|
||||
|
||||
@ -163,9 +163,8 @@ fn du_for_one_pattern(
|
||||
args: DuArgs,
|
||||
current_dir: &Path,
|
||||
span: Span,
|
||||
signals: &Signals,
|
||||
signals: Signals,
|
||||
) -> Result<impl Iterator<Item = Value> + Send, ShellError> {
|
||||
let signals_clone = signals.clone();
|
||||
let exclude = args.exclude.map_or(Ok(None), move |x| {
|
||||
Pattern::new(x.item.as_ref())
|
||||
.map(Some)
|
||||
@ -176,7 +175,8 @@ fn du_for_one_pattern(
|
||||
})?;
|
||||
|
||||
let paths = match args.path {
|
||||
Some(p) => nu_engine::glob_from(&p, current_dir, span, None),
|
||||
Some(p) => nu_engine::glob_from(&p, current_dir, span, None, signals.clone()),
|
||||
|
||||
// The * pattern should never fail.
|
||||
None => nu_engine::glob_from(
|
||||
&Spanned {
|
||||
@ -186,6 +186,7 @@ fn du_for_one_pattern(
|
||||
current_dir,
|
||||
span,
|
||||
None,
|
||||
signals.clone(),
|
||||
),
|
||||
}
|
||||
.map(|f| f.1)?;
|
||||
@ -206,7 +207,7 @@ fn du_for_one_pattern(
|
||||
Ok(paths.filter_map(move |p| match p {
|
||||
Ok(a) => {
|
||||
if a.is_dir() {
|
||||
match DirInfo::new(a, ¶ms, max_depth, span, &signals_clone) {
|
||||
match DirInfo::new(a, ¶ms, max_depth, span, &signals) {
|
||||
Ok(v) => Some(Value::from(v)),
|
||||
Err(_) => None,
|
||||
}
|
||||
|
@ -285,7 +285,10 @@ fn ls_for_one_pattern(
|
||||
nu_path::expand_path_with(pat.item.as_ref(), &cwd, pat.item.is_expand());
|
||||
// Avoid checking and pushing "*" to the path when directory (do not show contents) flag is true
|
||||
if !directory && tmp_expanded.is_dir() {
|
||||
if read_dir(tmp_expanded, p_tag, use_threads)?.next().is_none() {
|
||||
if read_dir(tmp_expanded, p_tag, use_threads, signals.clone())?
|
||||
.next()
|
||||
.is_none()
|
||||
{
|
||||
return Ok(Value::test_nothing().into_pipeline_data());
|
||||
}
|
||||
just_read_dir = !(pat.item.is_expand() && nu_glob::is_glob(pat.item.as_ref()));
|
||||
@ -304,7 +307,10 @@ fn ls_for_one_pattern(
|
||||
// Avoid pushing "*" to the default path when directory (do not show contents) flag is true
|
||||
if directory {
|
||||
(NuGlob::Expand(".".to_string()), false)
|
||||
} else if read_dir(cwd.clone(), p_tag, use_threads)?.next().is_none() {
|
||||
} else if read_dir(cwd.clone(), p_tag, use_threads, signals.clone())?
|
||||
.next()
|
||||
.is_none()
|
||||
{
|
||||
return Ok(Value::test_nothing().into_pipeline_data());
|
||||
} else {
|
||||
(NuGlob::Expand("*".to_string()), false)
|
||||
@ -317,7 +323,7 @@ fn ls_for_one_pattern(
|
||||
let path = pattern_arg.into_spanned(p_tag);
|
||||
let (prefix, paths) = if just_read_dir {
|
||||
let expanded = nu_path::expand_path_with(path.item.as_ref(), &cwd, path.item.is_expand());
|
||||
let paths = read_dir(expanded.clone(), p_tag, use_threads)?;
|
||||
let paths = read_dir(expanded.clone(), p_tag, use_threads, signals.clone())?;
|
||||
// just need to read the directory, so prefix is path itself.
|
||||
(Some(expanded), paths)
|
||||
} else {
|
||||
@ -330,11 +336,13 @@ fn ls_for_one_pattern(
|
||||
};
|
||||
Some(glob_options)
|
||||
};
|
||||
glob_from(&path, &cwd, call_span, glob_options)?
|
||||
glob_from(&path, &cwd, call_span, glob_options, signals.clone())?
|
||||
};
|
||||
|
||||
let mut paths_peek = paths.peekable();
|
||||
if paths_peek.peek().is_none() {
|
||||
let no_matches = paths_peek.peek().is_none();
|
||||
signals.check(call_span)?;
|
||||
if no_matches {
|
||||
return Err(ShellError::GenericError {
|
||||
error: format!("No matches found for {:?}", path.item),
|
||||
msg: "Pattern, file or folder not found".into(),
|
||||
@ -959,17 +967,21 @@ fn read_dir(
|
||||
f: PathBuf,
|
||||
span: Span,
|
||||
use_threads: bool,
|
||||
signals: Signals,
|
||||
) -> Result<Box<dyn Iterator<Item = Result<PathBuf, ShellError>> + Send>, ShellError> {
|
||||
let signals_clone = signals.clone();
|
||||
let items = f
|
||||
.read_dir()
|
||||
.map_err(|err| IoError::new(err.kind(), span, f.clone()))?
|
||||
.map(move |d| {
|
||||
signals_clone.check(span)?;
|
||||
d.map(|r| r.path())
|
||||
.map_err(|err| IoError::new(err.kind(), span, f.clone()))
|
||||
.map_err(ShellError::from)
|
||||
});
|
||||
if !use_threads {
|
||||
let mut collected = items.collect::<Vec<_>>();
|
||||
signals.check(span)?;
|
||||
collected.sort_by(|a, b| match (a, b) {
|
||||
(Ok(a), Ok(b)) => a.cmp(b),
|
||||
(Ok(_), Err(_)) => Ordering::Greater,
|
||||
|
@ -95,15 +95,17 @@ impl Command for Open {
|
||||
let arg_span = path.span;
|
||||
// let path_no_whitespace = &path.item.trim_end_matches(|x| matches!(x, '\x09'..='\x0d'));
|
||||
|
||||
for path in nu_engine::glob_from(&path, &cwd, call_span, None)
|
||||
.map_err(|err| match err {
|
||||
ShellError::Io(mut err) => {
|
||||
err.span = arg_span;
|
||||
err.into()
|
||||
}
|
||||
_ => err,
|
||||
})?
|
||||
.1
|
||||
for path in
|
||||
nu_engine::glob_from(&path, &cwd, call_span, None, engine_state.signals().clone())
|
||||
.map_err(|err| match err {
|
||||
ShellError::Io(mut err) => {
|
||||
err.kind = err.kind.not_found_as(NotFound::File);
|
||||
err.span = arg_span;
|
||||
err.into()
|
||||
}
|
||||
_ => err,
|
||||
})?
|
||||
.1
|
||||
{
|
||||
let path = path?;
|
||||
let path = Path::new(&path);
|
||||
@ -151,7 +153,7 @@ impl Command for Open {
|
||||
// At least under windows this check ensures that we don't get a
|
||||
// permission denied error on directories
|
||||
return Err(ShellError::Io(IoError::new(
|
||||
shell_error::io::ErrorKind::IsADirectory,
|
||||
shell_error::io::ErrorKind::Std(std::io::ErrorKind::IsADirectory),
|
||||
arg_span,
|
||||
PathBuf::from(path),
|
||||
)));
|
||||
|
@ -260,6 +260,7 @@ fn rm(
|
||||
require_literal_leading_dot: true,
|
||||
..Default::default()
|
||||
}),
|
||||
engine_state.signals().clone(),
|
||||
) {
|
||||
Ok(files) => {
|
||||
for file in files.1 {
|
||||
|
@ -424,7 +424,9 @@ fn open_file(path: &Path, span: Span, append: bool) -> Result<File, ShellError>
|
||||
// A TOCTOU problem exists here, which may cause wrong error message to be shown
|
||||
#[cfg(target_os = "windows")]
|
||||
if path.is_dir() {
|
||||
Err(nu_protocol::shell_error::io::ErrorKind::IsADirectory)
|
||||
Err(nu_protocol::shell_error::io::ErrorKind::Std(
|
||||
std::io::ErrorKind::IsADirectory,
|
||||
))
|
||||
} else {
|
||||
std::fs::File::create(path).map_err(|err| err.kind().into())
|
||||
}
|
||||
|
@ -193,7 +193,7 @@ impl Command for UCp {
|
||||
for mut p in paths {
|
||||
p.item = p.item.strip_ansi_string_unlikely();
|
||||
let exp_files: Vec<Result<PathBuf, ShellError>> =
|
||||
nu_engine::glob_from(&p, &cwd, call.head, None)
|
||||
nu_engine::glob_from(&p, &cwd, call.head, None, engine_state.signals().clone())
|
||||
.map(|f| f.1)?
|
||||
.collect();
|
||||
if exp_files.is_empty() {
|
||||
|
@ -134,7 +134,7 @@ impl Command for UMv {
|
||||
for mut p in paths {
|
||||
p.item = p.item.strip_ansi_string_unlikely();
|
||||
let exp_files: Vec<Result<PathBuf, ShellError>> =
|
||||
nu_engine::glob_from(&p, &cwd, call.head, None)
|
||||
nu_engine::glob_from(&p, &cwd, call.head, None, engine_state.signals().clone())
|
||||
.map(|f| f.1)?
|
||||
.collect();
|
||||
if exp_files.is_empty() {
|
||||
|
@ -158,14 +158,17 @@ impl Command for UTouch {
|
||||
continue;
|
||||
}
|
||||
|
||||
let mut expanded_globs = glob(&file_path.to_string_lossy())
|
||||
.unwrap_or_else(|_| {
|
||||
panic!(
|
||||
"Failed to process file path: {}",
|
||||
&file_path.to_string_lossy()
|
||||
)
|
||||
})
|
||||
.peekable();
|
||||
let mut expanded_globs = glob(
|
||||
&file_path.to_string_lossy(),
|
||||
Some(engine_state.signals().clone()),
|
||||
)
|
||||
.unwrap_or_else(|_| {
|
||||
panic!(
|
||||
"Failed to process file path: {}",
|
||||
&file_path.to_string_lossy()
|
||||
)
|
||||
})
|
||||
.peekable();
|
||||
|
||||
if expanded_globs.peek().is_none() {
|
||||
let file_name = file_path.file_name().unwrap_or_else(|| {
|
||||
|
@ -25,7 +25,7 @@ impl Command for Chunks {
|
||||
}
|
||||
|
||||
fn description(&self) -> &str {
|
||||
"Divide a list or table into chunks of `chunk_size`."
|
||||
"Divide a list, table or binary input into chunks of `chunk_size`."
|
||||
}
|
||||
|
||||
fn extra_description(&self) -> &str {
|
||||
@ -33,7 +33,7 @@ impl Command for Chunks {
|
||||
}
|
||||
|
||||
fn search_terms(&self) -> Vec<&str> {
|
||||
vec!["batch", "group"]
|
||||
vec!["batch", "group", "split", "bytes"]
|
||||
}
|
||||
|
||||
fn examples(&self) -> Vec<Example> {
|
||||
|
@ -108,10 +108,15 @@ pub fn compact(
|
||||
return false;
|
||||
}
|
||||
if compact_empties {
|
||||
if let Value::String { val, .. } = x {
|
||||
if val.is_empty() {
|
||||
return false;
|
||||
}
|
||||
// check if the value is one of the empty value
|
||||
if match x {
|
||||
Value::String { val, .. } => val.is_empty(),
|
||||
Value::Record { val, .. } => val.is_empty(),
|
||||
Value::List { vals, .. } => vals.is_empty(),
|
||||
_ => false,
|
||||
} {
|
||||
// one of the empty value found so skip now
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -20,7 +20,7 @@ iterate over each record, not necessarily each cell within it.
|
||||
|
||||
Avoid passing single records to this command. Since a record is a
|
||||
one-row structure, 'each' will only run once, behaving similar to 'do'.
|
||||
To iterate over a record's values, try converting it to a table
|
||||
To iterate over a record's values, use 'items' or try converting it to a table
|
||||
with 'transpose' first."#
|
||||
}
|
||||
|
||||
@ -70,7 +70,7 @@ with 'transpose' first."#
|
||||
},
|
||||
Example {
|
||||
example: r#"[1 2 3 2] | each {|e| if $e == 2 { "two" } }"#,
|
||||
description: "Produce a list that has \"two\" for each 2 in the input",
|
||||
description: "'null' items will be dropped from the result list. It has the same effect as 'filter_map' in other languages.",
|
||||
result: Some(Value::test_list(vec![
|
||||
Value::test_string("two"),
|
||||
Value::test_string("two"),
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user