mirror of
https://github.com/nushell/nushell.git
synced 2025-07-01 23:22:10 +02:00
Compare commits
3 Commits
Author | SHA1 | Date | |
---|---|---|---|
3d631490bc | |||
68211dea3e | |||
b8e9293c45 |
14
.github/.typos.toml
vendored
Normal file
14
.github/.typos.toml
vendored
Normal file
@ -0,0 +1,14 @@
|
|||||||
|
[files]
|
||||||
|
extend-exclude = ["crates/nu-command/tests/commands/table.rs", "*.tsv", "*.json", "*.txt"]
|
||||||
|
|
||||||
|
[default.extend-words]
|
||||||
|
# Ignore false-positives
|
||||||
|
nd = "nd"
|
||||||
|
fo = "fo"
|
||||||
|
ons = "ons"
|
||||||
|
ba = "ba"
|
||||||
|
Plasticos = "Plasticos"
|
||||||
|
IIF = "IIF"
|
||||||
|
numer = "numer"
|
||||||
|
ratatui = "ratatui"
|
||||||
|
doas = "doas"
|
16
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
16
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
@ -13,7 +13,7 @@ body:
|
|||||||
id: repro
|
id: repro
|
||||||
attributes:
|
attributes:
|
||||||
label: How to reproduce
|
label: How to reproduce
|
||||||
description: Steps to reproduce the behavior (including succinct code examples or screenshots of the observed behavior)
|
description: Steps to reproduce the behavior
|
||||||
placeholder: |
|
placeholder: |
|
||||||
1.
|
1.
|
||||||
2.
|
2.
|
||||||
@ -28,6 +28,13 @@ body:
|
|||||||
placeholder: I expected nu to...
|
placeholder: I expected nu to...
|
||||||
validations:
|
validations:
|
||||||
required: true
|
required: true
|
||||||
|
- type: textarea
|
||||||
|
id: screenshots
|
||||||
|
attributes:
|
||||||
|
label: Screenshots
|
||||||
|
description: Please add any relevant screenshots here, if any
|
||||||
|
validations:
|
||||||
|
required: false
|
||||||
- type: textarea
|
- type: textarea
|
||||||
id: config
|
id: config
|
||||||
attributes:
|
attributes:
|
||||||
@ -48,3 +55,10 @@ body:
|
|||||||
| installed_plugins | binaryview, chart bar, chart line, fetch, from bson, from sqlite, inc, match, post, ps, query json, s3, selector, start, sys, textview, to bson, to sqlite, tree, xpath |
|
| installed_plugins | binaryview, chart bar, chart line, fetch, from bson, from sqlite, inc, match, post, ps, query json, s3, selector, start, sys, textview, to bson, to sqlite, tree, xpath |
|
||||||
validations:
|
validations:
|
||||||
required: true
|
required: true
|
||||||
|
- type: textarea
|
||||||
|
id: context
|
||||||
|
attributes:
|
||||||
|
label: Additional context
|
||||||
|
description: Add any other context about the problem here.
|
||||||
|
validations:
|
||||||
|
required: false
|
||||||
|
11
.github/ISSUE_TEMPLATE/standard-library-bug-or-feature-report.md
vendored
Normal file
11
.github/ISSUE_TEMPLATE/standard-library-bug-or-feature-report.md
vendored
Normal file
@ -0,0 +1,11 @@
|
|||||||
|
---
|
||||||
|
name: standard library bug or feature report
|
||||||
|
about: Used to submit issues related to the nu standard library
|
||||||
|
title: ''
|
||||||
|
labels: ['needs-triage', 'std-library']
|
||||||
|
assignees: ''
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
**Describe the bug or feature**
|
||||||
|
A clear and concise description of what the bug is.
|
20
.github/dependabot.yml
vendored
20
.github/dependabot.yml
vendored
@ -11,30 +11,10 @@ updates:
|
|||||||
directory: "/"
|
directory: "/"
|
||||||
schedule:
|
schedule:
|
||||||
interval: "weekly"
|
interval: "weekly"
|
||||||
# We release on Tuesdays and open dependabot PRs will rebase after the
|
|
||||||
# version bump and thus consume unnecessary workers during release, thus
|
|
||||||
# let's open new ones on Wednesday
|
|
||||||
day: "wednesday"
|
|
||||||
ignore:
|
ignore:
|
||||||
- dependency-name: "*"
|
- dependency-name: "*"
|
||||||
update-types: ["version-update:semver-patch"]
|
update-types: ["version-update:semver-patch"]
|
||||||
groups:
|
|
||||||
# Only update polars as a whole as there are many subcrates that need to
|
|
||||||
# be updated at once. We explicitly depend on some of them, so batch their
|
|
||||||
# updates to not take up dependabot PR slots with dysfunctional PRs
|
|
||||||
polars:
|
|
||||||
patterns:
|
|
||||||
- "polars"
|
|
||||||
- "polars-*"
|
|
||||||
# uutils/coreutils also versions all their workspace crates the same at the moment
|
|
||||||
# Most of them have bleeding edge version requirements (some not)
|
|
||||||
# see: https://github.com/uutils/coreutils/blob/main/Cargo.toml
|
|
||||||
uutils:
|
|
||||||
patterns:
|
|
||||||
- "uucore"
|
|
||||||
- "uu_*"
|
|
||||||
- package-ecosystem: "github-actions"
|
- package-ecosystem: "github-actions"
|
||||||
directory: "/"
|
directory: "/"
|
||||||
schedule:
|
schedule:
|
||||||
interval: "weekly"
|
interval: "weekly"
|
||||||
day: "wednesday"
|
|
||||||
|
2
.github/pull_request_template.md
vendored
2
.github/pull_request_template.md
vendored
@ -26,7 +26,7 @@ Make sure you've run and fixed any issues with these commands:
|
|||||||
- `cargo fmt --all -- --check` to check standard code formatting (`cargo fmt --all` applies these changes)
|
- `cargo fmt --all -- --check` to check standard code formatting (`cargo fmt --all` applies these changes)
|
||||||
- `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used` to check that you're using the standard code style
|
- `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used` to check that you're using the standard code style
|
||||||
- `cargo test --workspace` to check that all tests pass (on Windows make sure to [enable developer mode](https://learn.microsoft.com/en-us/windows/apps/get-started/developer-mode-features-and-debugging))
|
- `cargo test --workspace` to check that all tests pass (on Windows make sure to [enable developer mode](https://learn.microsoft.com/en-us/windows/apps/get-started/developer-mode-features-and-debugging))
|
||||||
- `cargo run -- -c "use toolkit.nu; toolkit test stdlib"` to run the tests for the standard library
|
- `cargo run -- -c "use std testing; testing run-tests --path crates/nu-std"` to run the tests for the standard library
|
||||||
|
|
||||||
> **Note**
|
> **Note**
|
||||||
> from `nushell` you can also use the `toolkit` as follows
|
> from `nushell` you can also use the `toolkit` as follows
|
||||||
|
4
.github/workflows/audit.yml
vendored
4
.github/workflows/audit.yml
vendored
@ -19,7 +19,7 @@ jobs:
|
|||||||
# Prevent sudden announcement of a new advisory from failing ci:
|
# Prevent sudden announcement of a new advisory from failing ci:
|
||||||
continue-on-error: true
|
continue-on-error: true
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4.1.7
|
- uses: actions/checkout@v4
|
||||||
- uses: rustsec/audit-check@v2.0.0
|
- uses: rustsec/audit-check@v1.4.1
|
||||||
with:
|
with:
|
||||||
token: ${{ secrets.GITHUB_TOKEN }}
|
token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
12
.github/workflows/check-msrv.nu
vendored
12
.github/workflows/check-msrv.nu
vendored
@ -1,12 +0,0 @@
|
|||||||
let toolchain_spec = open rust-toolchain.toml | get toolchain.channel
|
|
||||||
let msrv_spec = open Cargo.toml | get package.rust-version
|
|
||||||
|
|
||||||
# This check is conservative in the sense that we use `rust-toolchain.toml`'s
|
|
||||||
# override to ensure that this is the upper-bound for the minimum supported
|
|
||||||
# rust version
|
|
||||||
if $toolchain_spec != $msrv_spec {
|
|
||||||
print -e "Mismatching rust compiler versions specified in `Cargo.toml` and `rust-toolchain.toml`"
|
|
||||||
print -e $"Cargo.toml: ($msrv_spec)"
|
|
||||||
print -e $"rust-toolchain.toml: ($toolchain_spec)"
|
|
||||||
exit 1
|
|
||||||
}
|
|
153
.github/workflows/ci.yml
vendored
153
.github/workflows/ci.yml
vendored
@ -10,11 +10,7 @@ env:
|
|||||||
NUSHELL_CARGO_PROFILE: ci
|
NUSHELL_CARGO_PROFILE: ci
|
||||||
NU_LOG_LEVEL: DEBUG
|
NU_LOG_LEVEL: DEBUG
|
||||||
# If changing these settings also change toolkit.nu
|
# If changing these settings also change toolkit.nu
|
||||||
CLIPPY_OPTIONS: "-D warnings -D clippy::unwrap_used -D clippy::unchecked_duration_subtraction"
|
CLIPPY_OPTIONS: "-D warnings -D clippy::unwrap_used"
|
||||||
|
|
||||||
concurrency:
|
|
||||||
group: ${{ github.workflow }}-${{ github.head_ref && github.ref || github.run_id }}
|
|
||||||
cancel-in-progress: true
|
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
fmt-clippy:
|
fmt-clippy:
|
||||||
@ -24,60 +20,77 @@ jobs:
|
|||||||
# Pinning to Ubuntu 20.04 because building on newer Ubuntu versions causes linux-gnu
|
# Pinning to Ubuntu 20.04 because building on newer Ubuntu versions causes linux-gnu
|
||||||
# builds to link against a too-new-for-many-Linux-installs glibc version. Consider
|
# builds to link against a too-new-for-many-Linux-installs glibc version. Consider
|
||||||
# revisiting this when 20.04 is closer to EOL (April 2025)
|
# revisiting this when 20.04 is closer to EOL (April 2025)
|
||||||
#
|
platform: [windows-latest, macos-latest, ubuntu-20.04]
|
||||||
# Using macOS 13 runner because 14 is based on the M1 and has half as much RAM (7 GB,
|
feature: [default, dataframe, extra]
|
||||||
# instead of 14 GB) which is too little for us right now. Revisit when `dfr` commands are
|
include:
|
||||||
# removed and we're only building the `polars` plugin instead
|
- feature: default
|
||||||
platform: [windows-latest, macos-13, ubuntu-20.04]
|
flags: ""
|
||||||
|
- feature: dataframe
|
||||||
|
flags: "--features=dataframe"
|
||||||
|
- feature: extra
|
||||||
|
flags: "--features=extra"
|
||||||
|
exclude:
|
||||||
|
- platform: windows-latest
|
||||||
|
feature: dataframe
|
||||||
|
- platform: macos-latest
|
||||||
|
feature: dataframe
|
||||||
|
|
||||||
runs-on: ${{ matrix.platform }}
|
runs-on: ${{ matrix.platform }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4.1.7
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Setup Rust toolchain and cache
|
- name: Setup Rust toolchain and cache
|
||||||
uses: actions-rust-lang/setup-rust-toolchain@v1.10.1
|
uses: actions-rust-lang/setup-rust-toolchain@v1.5.0
|
||||||
|
with:
|
||||||
|
rustflags: ""
|
||||||
|
|
||||||
- name: cargo fmt
|
- name: cargo fmt
|
||||||
run: cargo fmt --all -- --check
|
run: cargo fmt --all -- --check
|
||||||
|
|
||||||
# If changing these settings also change toolkit.nu
|
# If changing these settings also change toolkit.nu
|
||||||
- name: Clippy
|
- name: Clippy
|
||||||
run: cargo clippy --workspace --exclude nu_plugin_* -- $CLIPPY_OPTIONS
|
run: cargo clippy --workspace ${{ matrix.flags }} --exclude nu_plugin_* -- $CLIPPY_OPTIONS
|
||||||
|
|
||||||
# In tests we don't have to deny unwrap
|
# In tests we don't have to deny unwrap
|
||||||
- name: Clippy of tests
|
- name: Clippy of tests
|
||||||
run: cargo clippy --tests --workspace --exclude nu_plugin_* -- -D warnings
|
run: cargo clippy --tests --workspace ${{ matrix.flags }} --exclude nu_plugin_* -- -D warnings
|
||||||
|
|
||||||
- name: Clippy of benchmarks
|
|
||||||
run: cargo clippy --benches --workspace --exclude nu_plugin_* -- -D warnings
|
|
||||||
|
|
||||||
tests:
|
tests:
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: true
|
fail-fast: true
|
||||||
matrix:
|
matrix:
|
||||||
platform: [windows-latest, macos-latest, ubuntu-20.04]
|
platform: [windows-latest, macos-latest, ubuntu-20.04]
|
||||||
|
feature: [default, dataframe, extra]
|
||||||
|
include:
|
||||||
|
- feature: default
|
||||||
|
flags: ""
|
||||||
|
- feature: dataframe
|
||||||
|
flags: "--features=dataframe"
|
||||||
|
- feature: extra
|
||||||
|
flags: "--features=extra"
|
||||||
|
exclude:
|
||||||
|
- platform: windows-latest
|
||||||
|
feature: dataframe
|
||||||
|
- platform: macos-latest
|
||||||
|
feature: dataframe
|
||||||
|
- platform: windows-latest
|
||||||
|
feature: extra
|
||||||
|
- platform: macos-latest
|
||||||
|
feature: extra
|
||||||
|
|
||||||
runs-on: ${{ matrix.platform }}
|
runs-on: ${{ matrix.platform }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4.1.7
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Setup Rust toolchain and cache
|
- name: Setup Rust toolchain and cache
|
||||||
uses: actions-rust-lang/setup-rust-toolchain@v1.10.1
|
uses: actions-rust-lang/setup-rust-toolchain@v1.5.0
|
||||||
|
with:
|
||||||
|
rustflags: ""
|
||||||
|
|
||||||
- name: Tests
|
- name: Tests
|
||||||
run: cargo test --workspace --profile ci --exclude nu_plugin_*
|
run: cargo test --workspace --profile ci --exclude nu_plugin_* ${{ matrix.flags }}
|
||||||
- name: Check for clean repo
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
if [ -n "$(git status --porcelain)" ]; then
|
|
||||||
echo "there are changes";
|
|
||||||
git status --porcelain
|
|
||||||
exit 1
|
|
||||||
else
|
|
||||||
echo "no changes in working directory";
|
|
||||||
fi
|
|
||||||
|
|
||||||
std-lib-and-python-virtualenv:
|
std-lib-and-python-virtualenv:
|
||||||
strategy:
|
strategy:
|
||||||
@ -90,22 +103,21 @@ jobs:
|
|||||||
runs-on: ${{ matrix.platform }}
|
runs-on: ${{ matrix.platform }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4.1.7
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Setup Rust toolchain and cache
|
- name: Setup Rust toolchain and cache
|
||||||
uses: actions-rust-lang/setup-rust-toolchain@v1.10.1
|
uses: actions-rust-lang/setup-rust-toolchain@v1.5.0
|
||||||
|
with:
|
||||||
|
rustflags: ""
|
||||||
|
|
||||||
- name: Install Nushell
|
- name: Install Nushell
|
||||||
run: cargo install --path . --locked --no-default-features
|
run: cargo install --path . --locked --no-default-features
|
||||||
|
|
||||||
- name: Standard library tests
|
- name: Standard library tests
|
||||||
run: nu -c 'use crates/nu-std/testing.nu; testing run-tests --path crates/nu-std'
|
run: nu -c 'use std testing; testing run-tests --path crates/nu-std'
|
||||||
|
|
||||||
- name: Ensure that Cargo.toml MSRV and rust-toolchain.toml use the same version
|
|
||||||
run: nu .github/workflows/check-msrv.nu
|
|
||||||
|
|
||||||
- name: Setup Python
|
- name: Setup Python
|
||||||
uses: actions/setup-python@v5
|
uses: actions/setup-python@v4
|
||||||
with:
|
with:
|
||||||
python-version: "3.10"
|
python-version: "3.10"
|
||||||
|
|
||||||
@ -117,79 +129,24 @@ jobs:
|
|||||||
run: nu scripts/test_virtualenv.nu
|
run: nu scripts/test_virtualenv.nu
|
||||||
shell: bash
|
shell: bash
|
||||||
|
|
||||||
- name: Check for clean repo
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
if [ -n "$(git status --porcelain)" ]; then
|
|
||||||
echo "there are changes";
|
|
||||||
git status --porcelain
|
|
||||||
exit 1
|
|
||||||
else
|
|
||||||
echo "no changes in working directory";
|
|
||||||
fi
|
|
||||||
|
|
||||||
plugins:
|
plugins:
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: true
|
fail-fast: true
|
||||||
matrix:
|
matrix:
|
||||||
# Using macOS 13 runner because 14 is based on the M1 and has half as much RAM (7 GB,
|
platform: [windows-latest, macos-latest, ubuntu-20.04]
|
||||||
# instead of 14 GB) which is too little for us right now.
|
|
||||||
#
|
|
||||||
# Failure occurring with clippy for rust 1.77.2
|
|
||||||
platform: [windows-latest, macos-13, ubuntu-20.04]
|
|
||||||
|
|
||||||
runs-on: ${{ matrix.platform }}
|
runs-on: ${{ matrix.platform }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4.1.7
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Setup Rust toolchain and cache
|
- name: Setup Rust toolchain and cache
|
||||||
uses: actions-rust-lang/setup-rust-toolchain@v1.10.1
|
uses: actions-rust-lang/setup-rust-toolchain@v1.5.0
|
||||||
|
with:
|
||||||
|
rustflags: ""
|
||||||
|
|
||||||
- name: Clippy
|
- name: Clippy
|
||||||
run: cargo clippy --package nu_plugin_* -- $CLIPPY_OPTIONS
|
run: cargo clippy --package nu_plugin_* ${{ matrix.flags }} -- $CLIPPY_OPTIONS
|
||||||
|
|
||||||
- name: Tests
|
- name: Tests
|
||||||
run: cargo test --profile ci --package nu_plugin_*
|
run: cargo test --profile ci --package nu_plugin_*
|
||||||
|
|
||||||
- name: Check for clean repo
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
if [ -n "$(git status --porcelain)" ]; then
|
|
||||||
echo "there are changes";
|
|
||||||
git status --porcelain
|
|
||||||
exit 1
|
|
||||||
else
|
|
||||||
echo "no changes in working directory";
|
|
||||||
fi
|
|
||||||
|
|
||||||
build-wasm:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v4.1.7
|
|
||||||
|
|
||||||
- name: Setup Rust toolchain and cache
|
|
||||||
uses: actions-rust-lang/setup-rust-toolchain@v1.10.1
|
|
||||||
|
|
||||||
- name: Add wasm32-unknown-unknown target
|
|
||||||
run: rustup target add wasm32-unknown-unknown
|
|
||||||
|
|
||||||
- run: cargo build -p nu-cmd-base --no-default-features --target wasm32-unknown-unknown
|
|
||||||
- run: cargo build -p nu-cmd-extra --no-default-features --target wasm32-unknown-unknown
|
|
||||||
- run: cargo build -p nu-cmd-lang --no-default-features --target wasm32-unknown-unknown
|
|
||||||
- run: cargo build -p nu-color-config --no-default-features --target wasm32-unknown-unknown
|
|
||||||
- run: cargo build -p nu-command --no-default-features --target wasm32-unknown-unknown
|
|
||||||
- run: cargo build -p nu-derive-value --no-default-features --target wasm32-unknown-unknown
|
|
||||||
- run: cargo build -p nu-engine --no-default-features --target wasm32-unknown-unknown
|
|
||||||
- run: cargo build -p nu-glob --no-default-features --target wasm32-unknown-unknown
|
|
||||||
- run: cargo build -p nu-json --no-default-features --target wasm32-unknown-unknown
|
|
||||||
- run: cargo build -p nu-parser --no-default-features --target wasm32-unknown-unknown
|
|
||||||
- run: cargo build -p nu-path --no-default-features --target wasm32-unknown-unknown
|
|
||||||
- run: cargo build -p nu-pretty-hex --no-default-features --target wasm32-unknown-unknown
|
|
||||||
- run: cargo build -p nu-protocol --no-default-features --target wasm32-unknown-unknown
|
|
||||||
- run: cargo build -p nu-std --no-default-features --target wasm32-unknown-unknown
|
|
||||||
- run: cargo build -p nu-system --no-default-features --target wasm32-unknown-unknown
|
|
||||||
- run: cargo build -p nu-table --no-default-features --target wasm32-unknown-unknown
|
|
||||||
- run: cargo build -p nu-term-grid --no-default-features --target wasm32-unknown-unknown
|
|
||||||
- run: cargo build -p nu-utils --no-default-features --target wasm32-unknown-unknown
|
|
||||||
- run: cargo build -p nuon --no-default-features --target wasm32-unknown-unknown
|
|
||||||
|
30
.github/workflows/milestone.yml
vendored
30
.github/workflows/milestone.yml
vendored
@ -1,30 +0,0 @@
|
|||||||
# Description:
|
|
||||||
# - Add milestone to a merged PR automatically
|
|
||||||
# - Add milestone to a closed issue that has a merged PR fix (if any)
|
|
||||||
|
|
||||||
name: Milestone Action
|
|
||||||
on:
|
|
||||||
issues:
|
|
||||||
types: [closed]
|
|
||||||
pull_request_target:
|
|
||||||
types: [closed]
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
update-milestone:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
name: Milestone Update
|
|
||||||
steps:
|
|
||||||
- name: Set Milestone for PR
|
|
||||||
uses: hustcer/milestone-action@main
|
|
||||||
if: github.event.pull_request.merged == true
|
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
|
|
||||||
# Bind milestone to closed issue that has a merged PR fix
|
|
||||||
- name: Set Milestone for Issue
|
|
||||||
uses: hustcer/milestone-action@main
|
|
||||||
if: github.event.issue.state == 'closed'
|
|
||||||
with:
|
|
||||||
action: bind-issue
|
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
178
.github/workflows/nightly-build.yml
vendored
178
.github/workflows/nightly-build.yml
vendored
@ -36,10 +36,12 @@ jobs:
|
|||||||
token: ${{ secrets.WORKFLOW_TOKEN }}
|
token: ${{ secrets.WORKFLOW_TOKEN }}
|
||||||
|
|
||||||
- name: Setup Nushell
|
- name: Setup Nushell
|
||||||
uses: hustcer/setup-nu@v3
|
uses: hustcer/setup-nu@v3.8
|
||||||
if: github.repository == 'nushell/nightly'
|
if: github.repository == 'nushell/nightly'
|
||||||
with:
|
with:
|
||||||
version: 0.98.0
|
version: 0.86.0
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
# Synchronize the main branch of nightly repo with the main branch of Nushell official repo
|
# Synchronize the main branch of nightly repo with the main branch of Nushell official repo
|
||||||
- name: Prepare for Nightly Release
|
- name: Prepare for Nightly Release
|
||||||
@ -65,7 +67,7 @@ jobs:
|
|||||||
}
|
}
|
||||||
|
|
||||||
standard:
|
standard:
|
||||||
name: Nu
|
name: Std
|
||||||
needs: prepare
|
needs: prepare
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
@ -78,45 +80,47 @@ jobs:
|
|||||||
- x86_64-unknown-linux-gnu
|
- x86_64-unknown-linux-gnu
|
||||||
- x86_64-unknown-linux-musl
|
- x86_64-unknown-linux-musl
|
||||||
- aarch64-unknown-linux-gnu
|
- aarch64-unknown-linux-gnu
|
||||||
- aarch64-unknown-linux-musl
|
|
||||||
- armv7-unknown-linux-gnueabihf
|
- armv7-unknown-linux-gnueabihf
|
||||||
- armv7-unknown-linux-musleabihf
|
|
||||||
- riscv64gc-unknown-linux-gnu
|
- riscv64gc-unknown-linux-gnu
|
||||||
- loongarch64-unknown-linux-gnu
|
|
||||||
extra: ['bin']
|
extra: ['bin']
|
||||||
include:
|
include:
|
||||||
- target: aarch64-apple-darwin
|
- target: aarch64-apple-darwin
|
||||||
os: macos-latest
|
os: macos-latest
|
||||||
|
target_rustflags: ''
|
||||||
- target: x86_64-apple-darwin
|
- target: x86_64-apple-darwin
|
||||||
os: macos-latest
|
os: macos-latest
|
||||||
|
target_rustflags: ''
|
||||||
- target: x86_64-pc-windows-msvc
|
- target: x86_64-pc-windows-msvc
|
||||||
extra: 'bin'
|
extra: 'bin'
|
||||||
os: windows-latest
|
os: windows-latest
|
||||||
|
target_rustflags: ''
|
||||||
- target: x86_64-pc-windows-msvc
|
- target: x86_64-pc-windows-msvc
|
||||||
extra: msi
|
extra: msi
|
||||||
os: windows-latest
|
os: windows-latest
|
||||||
|
target_rustflags: ''
|
||||||
- target: aarch64-pc-windows-msvc
|
- target: aarch64-pc-windows-msvc
|
||||||
extra: 'bin'
|
extra: 'bin'
|
||||||
os: windows-latest
|
os: windows-latest
|
||||||
|
target_rustflags: ''
|
||||||
- target: aarch64-pc-windows-msvc
|
- target: aarch64-pc-windows-msvc
|
||||||
extra: msi
|
extra: msi
|
||||||
os: windows-latest
|
os: windows-latest
|
||||||
|
target_rustflags: ''
|
||||||
- target: x86_64-unknown-linux-gnu
|
- target: x86_64-unknown-linux-gnu
|
||||||
os: ubuntu-22.04
|
os: ubuntu-20.04
|
||||||
|
target_rustflags: ''
|
||||||
- target: x86_64-unknown-linux-musl
|
- target: x86_64-unknown-linux-musl
|
||||||
os: ubuntu-22.04
|
os: ubuntu-20.04
|
||||||
|
target_rustflags: ''
|
||||||
- target: aarch64-unknown-linux-gnu
|
- target: aarch64-unknown-linux-gnu
|
||||||
os: ubuntu-22.04
|
os: ubuntu-20.04
|
||||||
- target: aarch64-unknown-linux-musl
|
target_rustflags: ''
|
||||||
os: ubuntu-22.04
|
|
||||||
- target: armv7-unknown-linux-gnueabihf
|
- target: armv7-unknown-linux-gnueabihf
|
||||||
os: ubuntu-22.04
|
os: ubuntu-20.04
|
||||||
- target: armv7-unknown-linux-musleabihf
|
target_rustflags: ''
|
||||||
os: ubuntu-22.04
|
|
||||||
- target: riscv64gc-unknown-linux-gnu
|
- target: riscv64gc-unknown-linux-gnu
|
||||||
os: ubuntu-latest
|
os: ubuntu-20.04
|
||||||
- target: loongarch64-unknown-linux-gnu
|
target_rustflags: ''
|
||||||
os: ubuntu-22.04
|
|
||||||
|
|
||||||
runs-on: ${{matrix.os}}
|
runs-on: ${{matrix.os}}
|
||||||
|
|
||||||
@ -131,28 +135,32 @@ jobs:
|
|||||||
echo "targets = ['${{matrix.target}}']" >> rust-toolchain.toml
|
echo "targets = ['${{matrix.target}}']" >> rust-toolchain.toml
|
||||||
|
|
||||||
- name: Setup Rust toolchain and cache
|
- name: Setup Rust toolchain and cache
|
||||||
uses: actions-rust-lang/setup-rust-toolchain@v1.10.1
|
uses: actions-rust-lang/setup-rust-toolchain@v1.5.0
|
||||||
# WARN: Keep the rustflags to prevent from the winget submission error: `CAQuietExec: Error 0xc0000135`
|
# WARN: Keep the rustflags to prevent from the winget submission error: `CAQuietExec: Error 0xc0000135`
|
||||||
with:
|
with:
|
||||||
rustflags: ''
|
rustflags: ''
|
||||||
|
|
||||||
- name: Setup Nushell
|
- name: Setup Nushell
|
||||||
uses: hustcer/setup-nu@v3
|
uses: hustcer/setup-nu@v3.8
|
||||||
with:
|
with:
|
||||||
version: 0.98.0
|
version: 0.86.0
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
- name: Release Nu Binary
|
- name: Release Nu Binary
|
||||||
id: nu
|
id: nu
|
||||||
run: nu .github/workflows/release-pkg.nu
|
run: nu .github/workflows/release-pkg.nu
|
||||||
env:
|
env:
|
||||||
|
RELEASE_TYPE: standard
|
||||||
OS: ${{ matrix.os }}
|
OS: ${{ matrix.os }}
|
||||||
REF: ${{ github.ref }}
|
REF: ${{ github.ref }}
|
||||||
TARGET: ${{ matrix.target }}
|
TARGET: ${{ matrix.target }}
|
||||||
_EXTRA_: ${{ matrix.extra }}
|
_EXTRA_: ${{ matrix.extra }}
|
||||||
|
TARGET_RUSTFLAGS: ${{ matrix.target_rustflags }}
|
||||||
|
|
||||||
- name: Create an Issue for Release Failure
|
- name: Create an Issue for Release Failure
|
||||||
if: ${{ failure() }}
|
if: ${{ failure() }}
|
||||||
uses: JasonEtco/create-an-issue@v2.9.2
|
uses: JasonEtco/create-an-issue@v2.9.1
|
||||||
env:
|
env:
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
with:
|
with:
|
||||||
@ -170,7 +178,7 @@ jobs:
|
|||||||
# REF: https://github.com/marketplace/actions/gh-release
|
# REF: https://github.com/marketplace/actions/gh-release
|
||||||
# Create a release only in nushell/nightly repo
|
# Create a release only in nushell/nightly repo
|
||||||
- name: Publish Archive
|
- name: Publish Archive
|
||||||
uses: softprops/action-gh-release@v2.0.9
|
uses: softprops/action-gh-release@v0.1.15
|
||||||
if: ${{ startsWith(github.repository, 'nushell/nightly') }}
|
if: ${{ startsWith(github.repository, 'nushell/nightly') }}
|
||||||
with:
|
with:
|
||||||
prerelease: true
|
prerelease: true
|
||||||
@ -180,6 +188,124 @@ jobs:
|
|||||||
env:
|
env:
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
|
full:
|
||||||
|
name: Full
|
||||||
|
needs: prepare
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
target:
|
||||||
|
- aarch64-apple-darwin
|
||||||
|
- x86_64-apple-darwin
|
||||||
|
- x86_64-pc-windows-msvc
|
||||||
|
- aarch64-pc-windows-msvc
|
||||||
|
- x86_64-unknown-linux-gnu
|
||||||
|
- x86_64-unknown-linux-musl
|
||||||
|
- aarch64-unknown-linux-gnu
|
||||||
|
extra: ['bin']
|
||||||
|
include:
|
||||||
|
- target: aarch64-apple-darwin
|
||||||
|
os: macos-latest
|
||||||
|
target_rustflags: '--features=dataframe,extra'
|
||||||
|
- target: x86_64-apple-darwin
|
||||||
|
os: macos-latest
|
||||||
|
target_rustflags: '--features=dataframe,extra'
|
||||||
|
- target: x86_64-pc-windows-msvc
|
||||||
|
extra: 'bin'
|
||||||
|
os: windows-latest
|
||||||
|
target_rustflags: '--features=dataframe,extra'
|
||||||
|
- target: x86_64-pc-windows-msvc
|
||||||
|
extra: msi
|
||||||
|
os: windows-latest
|
||||||
|
target_rustflags: '--features=dataframe,extra'
|
||||||
|
- target: aarch64-pc-windows-msvc
|
||||||
|
extra: 'bin'
|
||||||
|
os: windows-latest
|
||||||
|
target_rustflags: '--features=dataframe,extra'
|
||||||
|
- target: aarch64-pc-windows-msvc
|
||||||
|
extra: msi
|
||||||
|
os: windows-latest
|
||||||
|
target_rustflags: '--features=dataframe,extra'
|
||||||
|
- target: x86_64-unknown-linux-gnu
|
||||||
|
os: ubuntu-20.04
|
||||||
|
target_rustflags: '--features=dataframe,extra'
|
||||||
|
- target: x86_64-unknown-linux-musl
|
||||||
|
os: ubuntu-20.04
|
||||||
|
target_rustflags: '--features=dataframe,extra'
|
||||||
|
- target: aarch64-unknown-linux-gnu
|
||||||
|
os: ubuntu-20.04
|
||||||
|
target_rustflags: '--features=dataframe,extra'
|
||||||
|
|
||||||
|
runs-on: ${{matrix.os}}
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
ref: main
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: Update Rust Toolchain Target
|
||||||
|
run: |
|
||||||
|
echo "targets = ['${{matrix.target}}']" >> rust-toolchain.toml
|
||||||
|
|
||||||
|
- name: Setup Rust toolchain and cache
|
||||||
|
uses: actions-rust-lang/setup-rust-toolchain@v1.5.0
|
||||||
|
# WARN: Keep the rustflags to prevent from the winget submission error: `CAQuietExec: Error 0xc0000135`
|
||||||
|
with:
|
||||||
|
rustflags: ''
|
||||||
|
|
||||||
|
- name: Setup Nushell
|
||||||
|
uses: hustcer/setup-nu@v3.8
|
||||||
|
with:
|
||||||
|
version: 0.86.0
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
|
- name: Release Nu Binary
|
||||||
|
id: nu
|
||||||
|
run: nu .github/workflows/release-pkg.nu
|
||||||
|
env:
|
||||||
|
RELEASE_TYPE: full
|
||||||
|
OS: ${{ matrix.os }}
|
||||||
|
REF: ${{ github.ref }}
|
||||||
|
TARGET: ${{ matrix.target }}
|
||||||
|
_EXTRA_: ${{ matrix.extra }}
|
||||||
|
TARGET_RUSTFLAGS: ${{ matrix.target_rustflags }}
|
||||||
|
|
||||||
|
- name: Create an Issue for Release Failure
|
||||||
|
if: ${{ failure() }}
|
||||||
|
uses: JasonEtco/create-an-issue@v2.9.1
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
with:
|
||||||
|
update_existing: true
|
||||||
|
search_existing: open
|
||||||
|
filename: .github/AUTO_ISSUE_TEMPLATE/nightly-build-fail.md
|
||||||
|
|
||||||
|
- name: Set Outputs of Short SHA
|
||||||
|
id: vars
|
||||||
|
run: |
|
||||||
|
echo "date=$(date -u +'%Y-%m-%d')" >> $GITHUB_OUTPUT
|
||||||
|
sha_short=$(git rev-parse --short HEAD)
|
||||||
|
echo "sha_short=${sha_short:0:7}" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
# REF: https://github.com/marketplace/actions/gh-release
|
||||||
|
# Create a release only in nushell/nightly repo
|
||||||
|
- name: Publish Archive
|
||||||
|
uses: softprops/action-gh-release@v0.1.15
|
||||||
|
if: ${{ startsWith(github.repository, 'nushell/nightly') }}
|
||||||
|
with:
|
||||||
|
draft: false
|
||||||
|
prerelease: true
|
||||||
|
name: Nu-nightly-${{ steps.vars.outputs.date }}-${{ steps.vars.outputs.sha_short }}
|
||||||
|
tag_name: nightly-${{ steps.vars.outputs.sha_short }}
|
||||||
|
body: |
|
||||||
|
This is a NIGHTLY build of Nushell.
|
||||||
|
It is NOT recommended for production use.
|
||||||
|
files: ${{ steps.nu.outputs.archive }}
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
cleanup:
|
cleanup:
|
||||||
name: Cleanup
|
name: Cleanup
|
||||||
# Should only run in nushell/nightly repo
|
# Should only run in nushell/nightly repo
|
||||||
@ -195,9 +321,11 @@ jobs:
|
|||||||
ref: main
|
ref: main
|
||||||
|
|
||||||
- name: Setup Nushell
|
- name: Setup Nushell
|
||||||
uses: hustcer/setup-nu@v3
|
uses: hustcer/setup-nu@v3.8
|
||||||
with:
|
with:
|
||||||
version: 0.98.0
|
version: 0.86.0
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
# Keep the last a few releases
|
# Keep the last a few releases
|
||||||
- name: Delete Older Releases
|
- name: Delete Older Releases
|
||||||
|
147
.github/workflows/release-pkg.nu
vendored
147
.github/workflows/release-pkg.nu
vendored
@ -9,6 +9,7 @@
|
|||||||
# Instructions for manually creating an MSI for Winget Releases when they fail
|
# Instructions for manually creating an MSI for Winget Releases when they fail
|
||||||
# Added 2022-11-29 when Windows packaging wouldn't work
|
# Added 2022-11-29 when Windows packaging wouldn't work
|
||||||
# Updated again on 2023-02-23 because msis are still failing validation
|
# Updated again on 2023-02-23 because msis are still failing validation
|
||||||
|
# Update on 2023-10-18 to use RELEASE_TYPE env var to determine if full or not
|
||||||
# To run this manual for windows here are the steps I take
|
# To run this manual for windows here are the steps I take
|
||||||
# checkout the release you want to publish
|
# checkout the release you want to publish
|
||||||
# 1. git checkout 0.86.0
|
# 1. git checkout 0.86.0
|
||||||
@ -16,26 +17,28 @@
|
|||||||
# 2. $env:CARGO_TARGET_DIR = ""
|
# 2. $env:CARGO_TARGET_DIR = ""
|
||||||
# 2. hide-env CARGO_TARGET_DIR
|
# 2. hide-env CARGO_TARGET_DIR
|
||||||
# 3. $env.TARGET = 'x86_64-pc-windows-msvc'
|
# 3. $env.TARGET = 'x86_64-pc-windows-msvc'
|
||||||
# 4. $env.GITHUB_WORKSPACE = 'D:\nushell'
|
# 4. $env.TARGET_RUSTFLAGS = ''
|
||||||
# 5. $env.GITHUB_OUTPUT = 'D:\nushell\output\out.txt'
|
# 5. $env.GITHUB_WORKSPACE = 'D:\nushell'
|
||||||
# 6. $env.OS = 'windows-latest'
|
# 6. $env.GITHUB_OUTPUT = 'D:\nushell\output\out.txt'
|
||||||
|
# 7. $env.OS = 'windows-latest'
|
||||||
|
# 8. $env.RELEASE_TYPE = '' # There is full and '' for normal releases
|
||||||
# make sure 7z.exe is in your path https://www.7-zip.org/download.html
|
# make sure 7z.exe is in your path https://www.7-zip.org/download.html
|
||||||
# 7. $env.Path = ($env.Path | append 'c:\apps\7-zip')
|
# 9. $env.Path = ($env.Path | append 'c:\apps\7-zip')
|
||||||
# make sure aria2c.exe is in your path https://github.com/aria2/aria2
|
# make sure aria2c.exe is in your path https://github.com/aria2/aria2
|
||||||
# 8. $env.Path = ($env.Path | append 'c:\path\to\aria2c')
|
# 10. $env.Path = ($env.Path | append 'c:\path\to\aria2c')
|
||||||
# make sure you have the wixtools installed https://wixtoolset.org/
|
# make sure you have the wixtools installed https://wixtoolset.org/
|
||||||
# 9. $env.Path = ($env.Path | append 'C:\Users\dschroeder\AppData\Local\tauri\WixTools')
|
# 11. $env.Path = ($env.Path | append 'C:\Users\dschroeder\AppData\Local\tauri\WixTools')
|
||||||
# You need to run the release-pkg twice. The first pass, with _EXTRA_ as 'bin', makes the output
|
# You need to run the release-pkg twice. The first pass, with _EXTRA_ as 'bin', makes the output
|
||||||
# folder and builds everything. The second pass, that generates the msi file, with _EXTRA_ as 'msi'
|
# folder and builds everything. The second pass, that generates the msi file, with _EXTRA_ as 'msi'
|
||||||
# 10. $env._EXTRA_ = 'bin'
|
# 12. $env._EXTRA_ = 'bin'
|
||||||
# 11. source .github\workflows\release-pkg.nu
|
# 13. source .github\workflows\release-pkg.nu
|
||||||
# 12. cd ..
|
# 14. cd ..
|
||||||
# 13. $env._EXTRA_ = 'msi'
|
# 15. $env._EXTRA_ = 'msi'
|
||||||
# 14. source .github\workflows\release-pkg.nu
|
# 16. source .github\workflows\release-pkg.nu
|
||||||
# After msi is generated, you have to update winget-pkgs repo, you'll need to patch the release
|
# After msi is generated, you have to update winget-pkgs repo, you'll need to patch the release
|
||||||
# by deleting the existing msi and uploading this new msi. Then you'll need to update the hash
|
# by deleting the existing msi and uploading this new msi. Then you'll need to update the hash
|
||||||
# on the winget-pkgs PR. To generate the hash, run this command
|
# on the winget-pkgs PR. To generate the hash, run this command
|
||||||
# 15. open target\wix\nu-0.74.0-x86_64-pc-windows-msvc.msi | hash sha256
|
# 17. open target\wix\nu-0.74.0-x86_64-pc-windows-msvc.msi | hash sha256
|
||||||
# Then, just take the output and put it in the winget-pkgs PR for the hash on the msi
|
# Then, just take the output and put it in the winget-pkgs PR for the hash on the msi
|
||||||
|
|
||||||
|
|
||||||
@ -45,15 +48,31 @@ let os = $env.OS
|
|||||||
let target = $env.TARGET
|
let target = $env.TARGET
|
||||||
# Repo source dir like `/home/runner/work/nushell/nushell`
|
# Repo source dir like `/home/runner/work/nushell/nushell`
|
||||||
let src = $env.GITHUB_WORKSPACE
|
let src = $env.GITHUB_WORKSPACE
|
||||||
|
let flags = $env.TARGET_RUSTFLAGS
|
||||||
let dist = $'($env.GITHUB_WORKSPACE)/output'
|
let dist = $'($env.GITHUB_WORKSPACE)/output'
|
||||||
let version = (open Cargo.toml | get package.version)
|
let version = (open Cargo.toml | get package.version)
|
||||||
|
|
||||||
print $'Debugging info:'
|
print $'Debugging info:'
|
||||||
print { version: $version, bin: $bin, os: $os, target: $target, src: $src, dist: $dist }; hr-line -b
|
print { version: $version, bin: $bin, os: $os, releaseType: $env.RELEASE_TYPE, target: $target, src: $src, flags: $flags, dist: $dist }; hr-line -b
|
||||||
|
|
||||||
|
# Rename the full release name so that we won't break the existing scripts for standard release downloading, such as:
|
||||||
|
# curl -s https://api.github.com/repos/chmln/sd/releases/latest | grep browser_download_url | cut -d '"' -f 4 | grep x86_64-unknown-linux-musl
|
||||||
|
const FULL_RLS_NAMING = {
|
||||||
|
x86_64-apple-darwin: 'x86_64-darwin-full',
|
||||||
|
aarch64-apple-darwin: 'aarch64-darwin-full',
|
||||||
|
x86_64-unknown-linux-gnu: 'x86_64-linux-gnu-full',
|
||||||
|
x86_64-pc-windows-msvc: 'x86_64-windows-msvc-full',
|
||||||
|
x86_64-unknown-linux-musl: 'x86_64-linux-musl-full',
|
||||||
|
aarch64-unknown-linux-gnu: 'aarch64-linux-gnu-full',
|
||||||
|
aarch64-pc-windows-msvc: 'aarch64-windows-msvc-full',
|
||||||
|
riscv64gc-unknown-linux-gnu: 'riscv64-linux-gnu-full',
|
||||||
|
armv7-unknown-linux-gnueabihf: 'armv7-linux-gnueabihf-full',
|
||||||
|
}
|
||||||
|
|
||||||
# $env
|
# $env
|
||||||
|
|
||||||
let USE_UBUNTU = $os starts-with ubuntu
|
let USE_UBUNTU = 'ubuntu-20.04'
|
||||||
|
let FULL_NAME = $FULL_RLS_NAMING | get -i $target | default 'unknown-target-full'
|
||||||
|
|
||||||
print $'(char nl)Packaging ($bin) v($version) for ($target) in ($src)...'; hr-line -b
|
print $'(char nl)Packaging ($bin) v($version) for ($target) in ($src)...'; hr-line -b
|
||||||
if not ('Cargo.lock' | path exists) { cargo generate-lockfile }
|
if not ('Cargo.lock' | path exists) { cargo generate-lockfile }
|
||||||
@ -63,8 +82,8 @@ print $'Start building ($bin)...'; hr-line
|
|||||||
# ----------------------------------------------------------------------------
|
# ----------------------------------------------------------------------------
|
||||||
# Build for Ubuntu and macOS
|
# Build for Ubuntu and macOS
|
||||||
# ----------------------------------------------------------------------------
|
# ----------------------------------------------------------------------------
|
||||||
if $os in ['macos-latest'] or $USE_UBUNTU {
|
if $os in [$USE_UBUNTU, 'macos-latest'] {
|
||||||
if $USE_UBUNTU {
|
if $os == $USE_UBUNTU {
|
||||||
sudo apt update
|
sudo apt update
|
||||||
sudo apt-get install libxcb-composite0-dev -y
|
sudo apt-get install libxcb-composite0-dev -y
|
||||||
}
|
}
|
||||||
@ -72,44 +91,23 @@ if $os in ['macos-latest'] or $USE_UBUNTU {
|
|||||||
'aarch64-unknown-linux-gnu' => {
|
'aarch64-unknown-linux-gnu' => {
|
||||||
sudo apt-get install gcc-aarch64-linux-gnu -y
|
sudo apt-get install gcc-aarch64-linux-gnu -y
|
||||||
$env.CARGO_TARGET_AARCH64_UNKNOWN_LINUX_GNU_LINKER = 'aarch64-linux-gnu-gcc'
|
$env.CARGO_TARGET_AARCH64_UNKNOWN_LINUX_GNU_LINKER = 'aarch64-linux-gnu-gcc'
|
||||||
cargo-build-nu
|
cargo-build-nu $flags
|
||||||
}
|
}
|
||||||
'riscv64gc-unknown-linux-gnu' => {
|
'riscv64gc-unknown-linux-gnu' => {
|
||||||
sudo apt-get install gcc-riscv64-linux-gnu -y
|
sudo apt-get install gcc-riscv64-linux-gnu -y
|
||||||
$env.CARGO_TARGET_RISCV64GC_UNKNOWN_LINUX_GNU_LINKER = 'riscv64-linux-gnu-gcc'
|
$env.CARGO_TARGET_RISCV64GC_UNKNOWN_LINUX_GNU_LINKER = 'riscv64-linux-gnu-gcc'
|
||||||
cargo-build-nu
|
cargo-build-nu $flags
|
||||||
}
|
}
|
||||||
'armv7-unknown-linux-gnueabihf' => {
|
'armv7-unknown-linux-gnueabihf' => {
|
||||||
sudo apt-get install pkg-config gcc-arm-linux-gnueabihf -y
|
sudo apt-get install pkg-config gcc-arm-linux-gnueabihf -y
|
||||||
$env.CARGO_TARGET_ARMV7_UNKNOWN_LINUX_GNUEABIHF_LINKER = 'arm-linux-gnueabihf-gcc'
|
$env.CARGO_TARGET_ARMV7_UNKNOWN_LINUX_GNUEABIHF_LINKER = 'arm-linux-gnueabihf-gcc'
|
||||||
cargo-build-nu
|
cargo-build-nu $flags
|
||||||
}
|
|
||||||
'aarch64-unknown-linux-musl' => {
|
|
||||||
aria2c https://musl.cc/aarch64-linux-musl-cross.tgz
|
|
||||||
tar -xf aarch64-linux-musl-cross.tgz -C $env.HOME
|
|
||||||
$env.PATH = ($env.PATH | split row (char esep) | prepend $'($env.HOME)/aarch64-linux-musl-cross/bin')
|
|
||||||
$env.CARGO_TARGET_AARCH64_UNKNOWN_LINUX_MUSL_LINKER = 'aarch64-linux-musl-gcc'
|
|
||||||
cargo-build-nu
|
|
||||||
}
|
|
||||||
'armv7-unknown-linux-musleabihf' => {
|
|
||||||
aria2c https://musl.cc/armv7r-linux-musleabihf-cross.tgz
|
|
||||||
tar -xf armv7r-linux-musleabihf-cross.tgz -C $env.HOME
|
|
||||||
$env.PATH = ($env.PATH | split row (char esep) | prepend $'($env.HOME)/armv7r-linux-musleabihf-cross/bin')
|
|
||||||
$env.CARGO_TARGET_ARMV7_UNKNOWN_LINUX_MUSLEABIHF_LINKER = 'armv7r-linux-musleabihf-gcc'
|
|
||||||
cargo-build-nu
|
|
||||||
}
|
|
||||||
'loongarch64-unknown-linux-gnu' => {
|
|
||||||
aria2c https://github.com/loongson/build-tools/releases/download/2024.08.08/x86_64-cross-tools-loongarch64-binutils_2.43-gcc_14.2.0-glibc_2.40.tar.xz
|
|
||||||
tar xf x86_64-cross-tools-loongarch64-*.tar.xz
|
|
||||||
$env.PATH = ($env.PATH | split row (char esep) | prepend $'($env.PWD)/cross-tools/bin')
|
|
||||||
$env.CARGO_TARGET_LOONGARCH64_UNKNOWN_LINUX_GNU_LINKER = 'loongarch64-unknown-linux-gnu-gcc'
|
|
||||||
cargo-build-nu
|
|
||||||
}
|
}
|
||||||
_ => {
|
_ => {
|
||||||
# musl-tools to fix 'Failed to find tool. Is `musl-gcc` installed?'
|
# musl-tools to fix 'Failed to find tool. Is `musl-gcc` installed?'
|
||||||
# Actually just for x86_64-unknown-linux-musl target
|
# Actually just for x86_64-unknown-linux-musl target
|
||||||
if $USE_UBUNTU { sudo apt install musl-tools -y }
|
if $os == $USE_UBUNTU { sudo apt install musl-tools -y }
|
||||||
cargo-build-nu
|
cargo-build-nu $flags
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -118,7 +116,7 @@ if $os in ['macos-latest'] or $USE_UBUNTU {
|
|||||||
# Build for Windows without static-link-openssl feature
|
# Build for Windows without static-link-openssl feature
|
||||||
# ----------------------------------------------------------------------------
|
# ----------------------------------------------------------------------------
|
||||||
if $os in ['windows-latest'] {
|
if $os in ['windows-latest'] {
|
||||||
cargo-build-nu
|
cargo-build-nu $flags
|
||||||
}
|
}
|
||||||
|
|
||||||
# ----------------------------------------------------------------------------
|
# ----------------------------------------------------------------------------
|
||||||
@ -130,22 +128,18 @@ let executable = $'target/($target)/release/($bin)*($suffix)'
|
|||||||
print $'Current executable file: ($executable)'
|
print $'Current executable file: ($executable)'
|
||||||
|
|
||||||
cd $src; mkdir $dist;
|
cd $src; mkdir $dist;
|
||||||
rm -rf ...(glob $'target/($target)/release/*.d') ...(glob $'target/($target)/release/nu_pretty_hex*')
|
rm -rf $'target/($target)/release/*.d' $'target/($target)/release/nu_pretty_hex*'
|
||||||
print $'(char nl)All executable files:'; hr-line
|
print $'(char nl)All executable files:'; hr-line
|
||||||
# We have to use `print` here to make sure the command output is displayed
|
# We have to use `print` here to make sure the command output is displayed
|
||||||
print (ls -f ($executable | into glob)); sleep 1sec
|
print (ls -f $executable); sleep 1sec
|
||||||
|
|
||||||
print $'(char nl)Copying release files...'; hr-line
|
print $'(char nl)Copying release files...'; hr-line
|
||||||
"To use the included Nushell plugins, register the binaries with the `plugin add` command to tell Nu where to find the plugin.
|
"To use Nu plugins, use the register command to tell Nu where to find the plugin. For example:
|
||||||
Then you can use `plugin use` to load the plugin into your session.
|
|
||||||
For example:
|
|
||||||
|
|
||||||
> plugin add ./nu_plugin_query
|
> register ./nu_plugin_query" | save $'($dist)/README.txt' -f
|
||||||
> plugin use query
|
[LICENSE $executable] | each {|it| cp -rv $it $dist } | flatten
|
||||||
|
# Sleep a few seconds to make sure the cp process finished successfully
|
||||||
For more information, refer to https://www.nushell.sh/book/plugins.html
|
sleep 3sec
|
||||||
" | save $'($dist)/README.txt' -f
|
|
||||||
[LICENSE ...(glob $executable)] | each {|it| cp -rv $it $dist } | flatten
|
|
||||||
|
|
||||||
print $'(char nl)Check binary release version detail:'; hr-line
|
print $'(char nl)Check binary release version detail:'; hr-line
|
||||||
let ver = if $os == 'windows-latest' {
|
let ver = if $os == 'windows-latest' {
|
||||||
@ -154,23 +148,23 @@ let ver = if $os == 'windows-latest' {
|
|||||||
(do -i { ./output/nu -c 'version' }) | str join
|
(do -i { ./output/nu -c 'version' }) | str join
|
||||||
}
|
}
|
||||||
if ($ver | str trim | is-empty) {
|
if ($ver | str trim | is-empty) {
|
||||||
print $'(ansi r)Incompatible Nu binary: The binary cross compiled is not runnable on current arch...(ansi reset)'
|
print $'(ansi r)Incompatible nu binary...(ansi reset)'
|
||||||
} else { print $ver }
|
} else { print $ver }
|
||||||
|
|
||||||
# ----------------------------------------------------------------------------
|
# ----------------------------------------------------------------------------
|
||||||
# Create a release archive and send it to output for the following steps
|
# Create a release archive and send it to output for the following steps
|
||||||
# ----------------------------------------------------------------------------
|
# ----------------------------------------------------------------------------
|
||||||
cd $dist; print $'(char nl)Creating release archive...'; hr-line
|
cd $dist; print $'(char nl)Creating release archive...'; hr-line
|
||||||
if $os in ['macos-latest'] or $USE_UBUNTU {
|
if $os in [$USE_UBUNTU, 'macos-latest'] {
|
||||||
|
|
||||||
let files = (ls | get name)
|
let files = (ls | get name)
|
||||||
let dest = $'($bin)-($version)-($target)'
|
let dest = if $env.RELEASE_TYPE == 'full' { $'($bin)-($version)-($FULL_NAME)' } else { $'($bin)-($version)-($target)' }
|
||||||
let archive = $'($dist)/($dest).tar.gz'
|
let archive = $'($dist)/($dest).tar.gz'
|
||||||
|
|
||||||
mkdir $dest
|
mkdir $dest
|
||||||
$files | each {|it| cp -v $it $dest }
|
$files | each {|it| mv $it $dest } | ignore
|
||||||
|
|
||||||
print $'(char nl)(ansi g)Archive contents:(ansi reset)'; hr-line; ls $dest | print
|
print $'(char nl)(ansi g)Archive contents:(ansi reset)'; hr-line; ls $dest
|
||||||
|
|
||||||
tar -czf $archive $dest
|
tar -czf $archive $dest
|
||||||
print $'archive: ---> ($archive)'; ls $archive
|
print $'archive: ---> ($archive)'; ls $archive
|
||||||
@ -179,26 +173,21 @@ if $os in ['macos-latest'] or $USE_UBUNTU {
|
|||||||
|
|
||||||
} else if $os == 'windows-latest' {
|
} else if $os == 'windows-latest' {
|
||||||
|
|
||||||
let releaseStem = $'($bin)-($version)-($target)'
|
let releaseStem = if $env.RELEASE_TYPE == 'full' { $'($bin)-($version)-($FULL_NAME)' } else { $'($bin)-($version)-($target)' }
|
||||||
|
|
||||||
print $'(char nl)Download less related stuffs...'; hr-line
|
print $'(char nl)Download less related stuffs...'; hr-line
|
||||||
# todo: less-v661 is out but is released as a zip file. maybe we should switch to that and extract it?
|
|
||||||
aria2c https://github.com/jftuga/less-Windows/releases/download/less-v608/less.exe -o less.exe
|
aria2c https://github.com/jftuga/less-Windows/releases/download/less-v608/less.exe -o less.exe
|
||||||
# the below was renamed because it was failing to download for darren. it should work but it wasn't
|
aria2c https://raw.githubusercontent.com/jftuga/less-Windows/master/LICENSE -o LICENSE-for-less.txt
|
||||||
# todo: maybe we should get rid of this aria2c dependency and just use http get?
|
|
||||||
#aria2c https://raw.githubusercontent.com/jftuga/less-Windows/master/LICENSE -o LICENSE-for-less.txt
|
|
||||||
aria2c https://github.com/jftuga/less-Windows/blob/master/LICENSE -o LICENSE-for-less.txt
|
|
||||||
|
|
||||||
# Create Windows msi release package
|
# Create Windows msi release package
|
||||||
if (get-env _EXTRA_) == 'msi' {
|
if (get-env _EXTRA_) == 'msi' {
|
||||||
|
|
||||||
let wixRelease = $'($src)/target/wix/($releaseStem).msi'
|
let wixRelease = $'($src)/target/wix/($releaseStem).msi'
|
||||||
print $'(char nl)Start creating Windows msi package with the following contents...'
|
print $'(char nl)Start creating Windows msi package...'
|
||||||
cd $src; hr-line
|
cd $src; hr-line
|
||||||
# Wix need the binaries be stored in target/release/
|
# Wix need the binaries be stored in target/release/
|
||||||
cp -r ($'($dist)/*' | into glob) target/release/
|
cp -r $'($dist)/*' target/release/
|
||||||
ls target/release/* | print
|
cargo install cargo-wix --version 0.3.4
|
||||||
cargo install cargo-wix --version 0.3.8
|
|
||||||
cargo wix --no-build --nocapture --package nu --output $wixRelease
|
cargo wix --no-build --nocapture --package nu --output $wixRelease
|
||||||
# Workaround for https://github.com/softprops/action-gh-release/issues/280
|
# Workaround for https://github.com/softprops/action-gh-release/issues/280
|
||||||
let archive = ($wixRelease | str replace --all '\' '/')
|
let archive = ($wixRelease | str replace --all '\' '/')
|
||||||
@ -207,9 +196,9 @@ if $os in ['macos-latest'] or $USE_UBUNTU {
|
|||||||
|
|
||||||
} else {
|
} else {
|
||||||
|
|
||||||
print $'(char nl)(ansi g)Archive contents:(ansi reset)'; hr-line; ls | print
|
print $'(char nl)(ansi g)Archive contents:(ansi reset)'; hr-line; ls
|
||||||
let archive = $'($dist)/($releaseStem).zip'
|
let archive = $'($dist)/($releaseStem).zip'
|
||||||
7z a $archive ...(glob *)
|
7z a $archive *
|
||||||
let pkg = (ls -f $archive | get name)
|
let pkg = (ls -f $archive | get name)
|
||||||
if not ($pkg | is-empty) {
|
if not ($pkg | is-empty) {
|
||||||
# Workaround for https://github.com/softprops/action-gh-release/issues/280
|
# Workaround for https://github.com/softprops/action-gh-release/issues/280
|
||||||
@ -220,11 +209,19 @@ if $os in ['macos-latest'] or $USE_UBUNTU {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
def 'cargo-build-nu' [] {
|
def 'cargo-build-nu' [ options: string ] {
|
||||||
if $os == 'windows-latest' {
|
if ($options | str trim | is-empty) {
|
||||||
cargo build --release --all --target $target
|
if $os == 'windows-latest' {
|
||||||
|
cargo build --release --all --target $target
|
||||||
|
} else {
|
||||||
|
cargo build --release --all --target $target --features=static-link-openssl
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
cargo build --release --all --target $target --features=static-link-openssl
|
if $os == 'windows-latest' {
|
||||||
|
cargo build --release --all --target $target $options
|
||||||
|
} else {
|
||||||
|
cargo build --release --all --target $target --features=static-link-openssl $options
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
175
.github/workflows/release.yml
vendored
175
.github/workflows/release.yml
vendored
@ -14,8 +14,106 @@ defaults:
|
|||||||
shell: bash
|
shell: bash
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
release:
|
standard:
|
||||||
name: Nu
|
name: Std
|
||||||
|
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
target:
|
||||||
|
- aarch64-apple-darwin
|
||||||
|
- x86_64-apple-darwin
|
||||||
|
- x86_64-pc-windows-msvc
|
||||||
|
- aarch64-pc-windows-msvc
|
||||||
|
- x86_64-unknown-linux-gnu
|
||||||
|
- x86_64-unknown-linux-musl
|
||||||
|
- aarch64-unknown-linux-gnu
|
||||||
|
- armv7-unknown-linux-gnueabihf
|
||||||
|
- riscv64gc-unknown-linux-gnu
|
||||||
|
extra: ['bin']
|
||||||
|
include:
|
||||||
|
- target: aarch64-apple-darwin
|
||||||
|
os: macos-latest
|
||||||
|
target_rustflags: ''
|
||||||
|
- target: x86_64-apple-darwin
|
||||||
|
os: macos-latest
|
||||||
|
target_rustflags: ''
|
||||||
|
- target: x86_64-pc-windows-msvc
|
||||||
|
extra: 'bin'
|
||||||
|
os: windows-latest
|
||||||
|
target_rustflags: ''
|
||||||
|
- target: x86_64-pc-windows-msvc
|
||||||
|
extra: msi
|
||||||
|
os: windows-latest
|
||||||
|
target_rustflags: ''
|
||||||
|
- target: aarch64-pc-windows-msvc
|
||||||
|
extra: 'bin'
|
||||||
|
os: windows-latest
|
||||||
|
target_rustflags: ''
|
||||||
|
- target: aarch64-pc-windows-msvc
|
||||||
|
extra: msi
|
||||||
|
os: windows-latest
|
||||||
|
target_rustflags: ''
|
||||||
|
- target: x86_64-unknown-linux-gnu
|
||||||
|
os: ubuntu-20.04
|
||||||
|
target_rustflags: ''
|
||||||
|
- target: x86_64-unknown-linux-musl
|
||||||
|
os: ubuntu-20.04
|
||||||
|
target_rustflags: ''
|
||||||
|
- target: aarch64-unknown-linux-gnu
|
||||||
|
os: ubuntu-20.04
|
||||||
|
target_rustflags: ''
|
||||||
|
- target: armv7-unknown-linux-gnueabihf
|
||||||
|
os: ubuntu-20.04
|
||||||
|
target_rustflags: ''
|
||||||
|
- target: riscv64gc-unknown-linux-gnu
|
||||||
|
os: ubuntu-20.04
|
||||||
|
target_rustflags: ''
|
||||||
|
|
||||||
|
runs-on: ${{matrix.os}}
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Update Rust Toolchain Target
|
||||||
|
run: |
|
||||||
|
echo "targets = ['${{matrix.target}}']" >> rust-toolchain.toml
|
||||||
|
|
||||||
|
- name: Setup Rust toolchain and cache
|
||||||
|
uses: actions-rust-lang/setup-rust-toolchain@v1.5.0
|
||||||
|
# WARN: Keep the rustflags to prevent from the winget submission error: `CAQuietExec: Error 0xc0000135`
|
||||||
|
with:
|
||||||
|
rustflags: ''
|
||||||
|
|
||||||
|
- name: Setup Nushell
|
||||||
|
uses: hustcer/setup-nu@v3.8
|
||||||
|
with:
|
||||||
|
version: 0.86.0
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
|
- name: Release Nu Binary
|
||||||
|
id: nu
|
||||||
|
run: nu .github/workflows/release-pkg.nu
|
||||||
|
env:
|
||||||
|
RELEASE_TYPE: standard
|
||||||
|
OS: ${{ matrix.os }}
|
||||||
|
REF: ${{ github.ref }}
|
||||||
|
TARGET: ${{ matrix.target }}
|
||||||
|
_EXTRA_: ${{ matrix.extra }}
|
||||||
|
TARGET_RUSTFLAGS: ${{ matrix.target_rustflags }}
|
||||||
|
|
||||||
|
# REF: https://github.com/marketplace/actions/gh-release
|
||||||
|
- name: Publish Archive
|
||||||
|
uses: softprops/action-gh-release@v0.1.15
|
||||||
|
if: ${{ startsWith(github.ref, 'refs/tags/') }}
|
||||||
|
with:
|
||||||
|
draft: true
|
||||||
|
files: ${{ steps.nu.outputs.archive }}
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
|
full:
|
||||||
|
name: Full
|
||||||
|
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
@ -28,45 +126,39 @@ jobs:
|
|||||||
- x86_64-unknown-linux-gnu
|
- x86_64-unknown-linux-gnu
|
||||||
- x86_64-unknown-linux-musl
|
- x86_64-unknown-linux-musl
|
||||||
- aarch64-unknown-linux-gnu
|
- aarch64-unknown-linux-gnu
|
||||||
- aarch64-unknown-linux-musl
|
|
||||||
- armv7-unknown-linux-gnueabihf
|
|
||||||
- armv7-unknown-linux-musleabihf
|
|
||||||
- riscv64gc-unknown-linux-gnu
|
|
||||||
- loongarch64-unknown-linux-gnu
|
|
||||||
extra: ['bin']
|
extra: ['bin']
|
||||||
include:
|
include:
|
||||||
- target: aarch64-apple-darwin
|
- target: aarch64-apple-darwin
|
||||||
os: macos-latest
|
os: macos-latest
|
||||||
|
target_rustflags: '--features=dataframe,extra'
|
||||||
- target: x86_64-apple-darwin
|
- target: x86_64-apple-darwin
|
||||||
os: macos-latest
|
os: macos-latest
|
||||||
|
target_rustflags: '--features=dataframe,extra'
|
||||||
- target: x86_64-pc-windows-msvc
|
- target: x86_64-pc-windows-msvc
|
||||||
extra: 'bin'
|
extra: 'bin'
|
||||||
os: windows-latest
|
os: windows-latest
|
||||||
|
target_rustflags: '--features=dataframe,extra'
|
||||||
- target: x86_64-pc-windows-msvc
|
- target: x86_64-pc-windows-msvc
|
||||||
extra: msi
|
extra: msi
|
||||||
os: windows-latest
|
os: windows-latest
|
||||||
|
target_rustflags: '--features=dataframe,extra'
|
||||||
- target: aarch64-pc-windows-msvc
|
- target: aarch64-pc-windows-msvc
|
||||||
extra: 'bin'
|
extra: 'bin'
|
||||||
os: windows-latest
|
os: windows-latest
|
||||||
|
target_rustflags: '--features=dataframe,extra'
|
||||||
- target: aarch64-pc-windows-msvc
|
- target: aarch64-pc-windows-msvc
|
||||||
extra: msi
|
extra: msi
|
||||||
os: windows-latest
|
os: windows-latest
|
||||||
|
target_rustflags: '--features=dataframe,extra'
|
||||||
- target: x86_64-unknown-linux-gnu
|
- target: x86_64-unknown-linux-gnu
|
||||||
os: ubuntu-22.04
|
os: ubuntu-20.04
|
||||||
|
target_rustflags: '--features=dataframe,extra'
|
||||||
- target: x86_64-unknown-linux-musl
|
- target: x86_64-unknown-linux-musl
|
||||||
os: ubuntu-22.04
|
os: ubuntu-20.04
|
||||||
|
target_rustflags: '--features=dataframe,extra'
|
||||||
- target: aarch64-unknown-linux-gnu
|
- target: aarch64-unknown-linux-gnu
|
||||||
os: ubuntu-22.04
|
os: ubuntu-20.04
|
||||||
- target: aarch64-unknown-linux-musl
|
target_rustflags: '--features=dataframe,extra'
|
||||||
os: ubuntu-22.04
|
|
||||||
- target: armv7-unknown-linux-gnueabihf
|
|
||||||
os: ubuntu-22.04
|
|
||||||
- target: armv7-unknown-linux-musleabihf
|
|
||||||
os: ubuntu-22.04
|
|
||||||
- target: riscv64gc-unknown-linux-gnu
|
|
||||||
os: ubuntu-latest
|
|
||||||
- target: loongarch64-unknown-linux-gnu
|
|
||||||
os: ubuntu-22.04
|
|
||||||
|
|
||||||
runs-on: ${{matrix.os}}
|
runs-on: ${{matrix.os}}
|
||||||
|
|
||||||
@ -77,57 +169,36 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
echo "targets = ['${{matrix.target}}']" >> rust-toolchain.toml
|
echo "targets = ['${{matrix.target}}']" >> rust-toolchain.toml
|
||||||
|
|
||||||
- name: Setup Rust toolchain
|
- name: Setup Rust toolchain and cache
|
||||||
uses: actions-rust-lang/setup-rust-toolchain@v1.10.1
|
uses: actions-rust-lang/setup-rust-toolchain@v1.5.0
|
||||||
# WARN: Keep the rustflags to prevent from the winget submission error: `CAQuietExec: Error 0xc0000135`
|
# WARN: Keep the rustflags to prevent from the winget submission error: `CAQuietExec: Error 0xc0000135`
|
||||||
with:
|
with:
|
||||||
cache: false
|
|
||||||
rustflags: ''
|
rustflags: ''
|
||||||
|
|
||||||
- name: Setup Nushell
|
- name: Setup Nushell
|
||||||
uses: hustcer/setup-nu@v3
|
uses: hustcer/setup-nu@v3.8
|
||||||
with:
|
with:
|
||||||
version: 0.98.0
|
version: 0.86.0
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
- name: Release Nu Binary
|
- name: Release Nu Binary
|
||||||
id: nu
|
id: nu
|
||||||
run: nu .github/workflows/release-pkg.nu
|
run: nu .github/workflows/release-pkg.nu
|
||||||
env:
|
env:
|
||||||
|
RELEASE_TYPE: full
|
||||||
OS: ${{ matrix.os }}
|
OS: ${{ matrix.os }}
|
||||||
REF: ${{ github.ref }}
|
REF: ${{ github.ref }}
|
||||||
TARGET: ${{ matrix.target }}
|
TARGET: ${{ matrix.target }}
|
||||||
_EXTRA_: ${{ matrix.extra }}
|
_EXTRA_: ${{ matrix.extra }}
|
||||||
|
TARGET_RUSTFLAGS: ${{ matrix.target_rustflags }}
|
||||||
|
|
||||||
# WARN: Don't upgrade this action due to the release per asset issue.
|
# REF: https://github.com/marketplace/actions/gh-release
|
||||||
# See: https://github.com/softprops/action-gh-release/issues/445
|
|
||||||
- name: Publish Archive
|
- name: Publish Archive
|
||||||
uses: softprops/action-gh-release@v2.0.5
|
uses: softprops/action-gh-release@v0.1.15
|
||||||
if: ${{ startsWith(github.ref, 'refs/tags/') }}
|
if: ${{ startsWith(github.ref, 'refs/tags/') }}
|
||||||
with:
|
with:
|
||||||
draft: true
|
draft: true
|
||||||
files: ${{ steps.nu.outputs.archive }}
|
files: ${{ steps.nu.outputs.archive }}
|
||||||
env:
|
env:
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
sha256sum:
|
|
||||||
needs: release
|
|
||||||
name: Create Sha256sum
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- name: Download Release Archives
|
|
||||||
env:
|
|
||||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
run: >-
|
|
||||||
gh release download ${{ github.ref_name }}
|
|
||||||
--repo ${{ github.repository }}
|
|
||||||
--pattern '*'
|
|
||||||
--dir release
|
|
||||||
- name: Create Checksums
|
|
||||||
run: cd release && shasum -a 256 * > ../SHA256SUMS
|
|
||||||
- name: Publish Checksums
|
|
||||||
uses: softprops/action-gh-release@v2.0.5
|
|
||||||
with:
|
|
||||||
draft: true
|
|
||||||
files: SHA256SUMS
|
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
|
6
.github/workflows/typos.yml
vendored
6
.github/workflows/typos.yml
vendored
@ -7,7 +7,9 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout Actions Repository
|
- name: Checkout Actions Repository
|
||||||
uses: actions/checkout@v4.1.7
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Check spelling
|
- name: Check spelling
|
||||||
uses: crate-ci/typos@v1.28.4
|
uses: crate-ci/typos@v1.16.23
|
||||||
|
with:
|
||||||
|
config: ./.github/.typos.toml
|
||||||
|
26
CITATION.cff
26
CITATION.cff
@ -1,26 +0,0 @@
|
|||||||
cff-version: 1.2.0
|
|
||||||
title: 'Nushell'
|
|
||||||
message: >-
|
|
||||||
If you use this software and wish to cite it,
|
|
||||||
you can use the metadata from this file.
|
|
||||||
type: software
|
|
||||||
authors:
|
|
||||||
- name: "The Nushell Project Team"
|
|
||||||
identifiers:
|
|
||||||
- type: url
|
|
||||||
value: 'https://github.com/nushell/nushell'
|
|
||||||
description: Repository
|
|
||||||
repository-code: 'https://github.com/nushell/nushell'
|
|
||||||
url: 'https://www.nushell.sh/'
|
|
||||||
abstract: >-
|
|
||||||
The goal of the Nushell project is to take the Unix
|
|
||||||
philosophy of shells, where pipes connect simple commands
|
|
||||||
together, and bring it to the modern style of development.
|
|
||||||
Thus, rather than being either a shell, or a programming
|
|
||||||
language, Nushell connects both by bringing a rich
|
|
||||||
programming language and a full-featured shell together
|
|
||||||
into one package.
|
|
||||||
keywords:
|
|
||||||
- nushell
|
|
||||||
- shell
|
|
||||||
license: MIT
|
|
@ -10,16 +10,11 @@ Welcome to Nushell and thank you for considering contributing!
|
|||||||
- [Useful commands](#useful-commands)
|
- [Useful commands](#useful-commands)
|
||||||
- [Debugging tips](#debugging-tips)
|
- [Debugging tips](#debugging-tips)
|
||||||
- [Git etiquette](#git-etiquette)
|
- [Git etiquette](#git-etiquette)
|
||||||
|
- [Our Rust style](#our-rust-style)
|
||||||
|
- [Generally discouraged](#generally-discouraged)
|
||||||
|
- [Things we want to get better at](#things-we-want-to-get-better-at)
|
||||||
- [License](#license)
|
- [License](#license)
|
||||||
|
|
||||||
## Other helpful resources
|
|
||||||
|
|
||||||
More resources can be found in the nascent [developer documentation](devdocs/README.md) in this repo.
|
|
||||||
|
|
||||||
- [Developer FAQ](devdocs/FAQ.md)
|
|
||||||
- [Platform support policy](devdocs/PLATFORM_SUPPORT.md)
|
|
||||||
- [Our Rust style](devdocs/rust_style.md)
|
|
||||||
|
|
||||||
## Proposing design changes
|
## Proposing design changes
|
||||||
|
|
||||||
First of all, before diving into the code, if you want to create a new feature, change something significantly, and especially if the change is user-facing, it is a good practice to first get an approval from the core team before starting to work on it.
|
First of all, before diving into the code, if you want to create a new feature, change something significantly, and especially if the change is user-facing, it is a good practice to first get an approval from the core team before starting to work on it.
|
||||||
@ -55,6 +50,7 @@ It is good practice to cover your changes with a test. Also, try to think about
|
|||||||
|
|
||||||
Tests can be found in different places:
|
Tests can be found in different places:
|
||||||
* `/tests`
|
* `/tests`
|
||||||
|
* `src/tests`
|
||||||
* command examples
|
* command examples
|
||||||
* crate-specific tests
|
* crate-specific tests
|
||||||
|
|
||||||
@ -67,64 +63,74 @@ Read cargo's documentation for more details: https://doc.rust-lang.org/cargo/ref
|
|||||||
|
|
||||||
- Build and run Nushell:
|
- Build and run Nushell:
|
||||||
|
|
||||||
```nushell
|
```shell
|
||||||
cargo run
|
cargo run
|
||||||
```
|
```
|
||||||
|
|
||||||
|
- Build and run with dataframe support.
|
||||||
|
```shell
|
||||||
|
cargo run --features=dataframe
|
||||||
|
```
|
||||||
|
|
||||||
- Run Clippy on Nushell:
|
- Run Clippy on Nushell:
|
||||||
|
|
||||||
```nushell
|
```shell
|
||||||
cargo clippy --workspace -- -D warnings -D clippy::unwrap_used
|
cargo clippy --workspace -- -D warnings -D clippy::unwrap_used
|
||||||
```
|
```
|
||||||
or via the `toolkit.nu` command:
|
or via the `toolkit.nu` command:
|
||||||
```nushell
|
```shell
|
||||||
use toolkit.nu clippy
|
use toolkit.nu clippy
|
||||||
clippy
|
clippy
|
||||||
```
|
```
|
||||||
|
|
||||||
- Run all tests:
|
- Run all tests:
|
||||||
|
|
||||||
```nushell
|
```shell
|
||||||
cargo test --workspace
|
cargo test --workspace
|
||||||
```
|
```
|
||||||
|
|
||||||
|
along with dataframe tests
|
||||||
|
|
||||||
|
```shell
|
||||||
|
cargo test --workspace --features=dataframe
|
||||||
|
```
|
||||||
or via the `toolkit.nu` command:
|
or via the `toolkit.nu` command:
|
||||||
```nushell
|
```shell
|
||||||
use toolkit.nu test
|
use toolkit.nu test
|
||||||
test
|
test
|
||||||
```
|
```
|
||||||
|
|
||||||
- Run all tests for a specific command
|
- Run all tests for a specific command
|
||||||
|
|
||||||
```nushell
|
```shell
|
||||||
cargo test --package nu-cli --test main -- commands::<command_name_here>
|
cargo test --package nu-cli --test main -- commands::<command_name_here>
|
||||||
```
|
```
|
||||||
|
|
||||||
- Check to see if there are code formatting issues
|
- Check to see if there are code formatting issues
|
||||||
|
|
||||||
```nushell
|
```shell
|
||||||
cargo fmt --all -- --check
|
cargo fmt --all -- --check
|
||||||
```
|
```
|
||||||
or via the `toolkit.nu` command:
|
or via the `toolkit.nu` command:
|
||||||
```nushell
|
```shell
|
||||||
use toolkit.nu fmt
|
use toolkit.nu fmt
|
||||||
fmt --check
|
fmt --check
|
||||||
```
|
```
|
||||||
|
|
||||||
- Format the code in the project
|
- Format the code in the project
|
||||||
|
|
||||||
```nushell
|
```shell
|
||||||
cargo fmt --all
|
cargo fmt --all
|
||||||
```
|
```
|
||||||
or via the `toolkit.nu` command:
|
or via the `toolkit.nu` command:
|
||||||
```nushell
|
```shell
|
||||||
use toolkit.nu fmt
|
use toolkit.nu fmt
|
||||||
fmt
|
fmt
|
||||||
```
|
```
|
||||||
|
|
||||||
- Set up `git` hooks to check formatting and run `clippy` before committing and pushing:
|
- Set up `git` hooks to check formatting and run `clippy` before committing and pushing:
|
||||||
|
|
||||||
```nushell
|
```shell
|
||||||
use toolkit.nu setup-git-hooks
|
use toolkit.nu setup-git-hooks
|
||||||
setup-git-hooks
|
setup-git-hooks
|
||||||
```
|
```
|
||||||
@ -134,12 +140,12 @@ Read cargo's documentation for more details: https://doc.rust-lang.org/cargo/ref
|
|||||||
|
|
||||||
- To view verbose logs when developing, enable the `trace` log level.
|
- To view verbose logs when developing, enable the `trace` log level.
|
||||||
|
|
||||||
```nushell
|
```shell
|
||||||
cargo run --release -- --log-level trace
|
cargo run --release -- --log-level trace
|
||||||
```
|
```
|
||||||
|
|
||||||
- To redirect trace logs to a file, enable the `--log-target file` switch.
|
- To redirect trace logs to a file, enable the `--log-target file` switch.
|
||||||
```nushell
|
```shell
|
||||||
cargo run --release -- --log-level trace --log-target file
|
cargo run --release -- --log-level trace --log-target file
|
||||||
open $"($nu.temp-path)/nu-($nu.pid).log"
|
open $"($nu.temp-path)/nu-($nu.pid).log"
|
||||||
```
|
```
|
||||||
@ -230,6 +236,51 @@ You can help us to make the review process a smooth experience:
|
|||||||
- Choose what simplifies having confidence in the conflict resolution and the review. **Merge commits in your branch are OK** in the squash model.
|
- Choose what simplifies having confidence in the conflict resolution and the review. **Merge commits in your branch are OK** in the squash model.
|
||||||
- Feel free to notify your reviewers or affected PR authors if your change might cause larger conflicts with another change.
|
- Feel free to notify your reviewers or affected PR authors if your change might cause larger conflicts with another change.
|
||||||
- During the rollup of multiple PRs, we may choose to resolve merge conflicts and CI failures ourselves. (Allow maintainers to push to your branch to enable us to do this quickly.)
|
- During the rollup of multiple PRs, we may choose to resolve merge conflicts and CI failures ourselves. (Allow maintainers to push to your branch to enable us to do this quickly.)
|
||||||
|
|
||||||
|
## Our Rust style
|
||||||
|
To make the collaboration on a project the scale of Nushell easy, we want to work towards a style of Rust code that can easily be understood by all of our contributors. We conservatively rely on most of [`clippy`s suggestions](https://github.com/rust-lang/rust-clippy) to get to the holy grail of "idiomatic" code. Good code in our eyes is not the most clever use of all available language features or with the most unique personal touch but readable and strikes a balance between being concise, and also unsurprising and explicit in the places where it matters.
|
||||||
|
One example of this philosophy is that we generally avoid to fight the borrow-checker in our data model but rather try to get to a correct and simple solution first and then figure out where we should reuse data to achieve the necessary performance. As we are still pre-1.0 this served us well to be able to quickly refactor or change larger parts of the code base.
|
||||||
|
|
||||||
|
### Generally discouraged
|
||||||
|
#### `+nightly` language features or things only available in the most recent `+stable`
|
||||||
|
To make life for the people easier that maintain the Nushell packages in various distributions with their own release cycle of `rustc` we typically rely on slightly older Rust versions. We do not make explicit guarantees how far back in the past we live but you can find out in our [`rust-toolchain.toml`](https://github.com/nushell/nushell/blob/main/rust-toolchain.toml)
|
||||||
|
(As a rule of thumb this has been typically been approximately 2 releases behind the newest stable compiler.)
|
||||||
|
The use of nightly features is prohibited.
|
||||||
|
|
||||||
|
#### Panicking
|
||||||
|
As Nushell aims to provide a reliable foundational way for folks to interact with their computer, we cannot carelessly crash the execution of their work by panicking Nushell.
|
||||||
|
Thus panicking is not an allowed error handling strategy for anything that could be triggered by user input OR behavior of the outside system. If Nushell panics this is a bug or we are against all odds already in an unrecoverable state (The system stopped cooperating, we went out of memory). The use of `.unwrap()` is thus outright banned and any uses of `.expect()` or related panicking macros like `unreachable!` should include a helpful description which assumptions have been violated.
|
||||||
|
|
||||||
|
#### `unsafe` code
|
||||||
|
For any use of `unsafe` code we need to require even higher standards and additional review. If you add or alter `unsafe` blocks you have to be familiar with the promises you need to uphold as found in the [Rustonomicon](https://doc.rust-lang.org/nomicon/intro.html). All `unsafe` uses should include `// SAFETY:` comments explaining how the invariants are upheld and thus alerting you what to watch out for when making a change.
|
||||||
|
##### FFI with system calls and the outside world
|
||||||
|
As a shell Nushell needs to interact with system APIs in several places, for which FFI code with unsafe blocks may be necessary. In some cases this can be handled by safe API wrapper crates but in some cases we may choose to directly do those calls.
|
||||||
|
If you do so you need to document the system behavior on top of the Rust memory model guarantees that you uphold. This means documenting whether using a particular system call is safe to use in a particular context and all failure cases are properly recovered.
|
||||||
|
##### Implementing self-contained data structures
|
||||||
|
Another motivation for reaching to `unsafe` code might be to try to implement a particular data structure that is not expressible on safe `std` library APIs. Doing so in the Nushell code base would have to clear a high bar for need based on profiling results. Also you should first do a survey of the [crate ecosystem](https://crates.io) that there doesn't exist a usable well vetted crate that already provides safe APIs to the desired datastructure.
|
||||||
|
##### Make things go faster by removing checks
|
||||||
|
This is probably a bad idea if you feel tempted to do so. Don't
|
||||||
|
#### Macros
|
||||||
|
Another advanced feature people feel tempted to use to work around perceived limitations of Rusts syntax and we are not particularly fans of are custom macros.
|
||||||
|
They have clear downsides not only in terms of readability if they locally introduce a different syntax. Most tooling apart from the compiler will struggle more with them. This limits for example consistent automatic formatting or automated refactors with `rust-analyzer`.
|
||||||
|
That you can fluently read `macro_rules!` is less likely than regular code. This can lead people to introduce funky behavior when using a macro. Be it because a macro is not following proper hygiene rules or because it introduces excessive work at compile time.
|
||||||
|
|
||||||
|
So we generally discourage the addition of macros. In a lot of cases your macro may start do something that can be expressed with functions or generics in a much more reusable fashion.
|
||||||
|
The only exceptions we may allow need to demonstrate that the macro can fix something that is otherwise extremely unreadable, error-prone, or consistently worse at compile time.
|
||||||
|
### Things we want to get better at
|
||||||
|
These are things we did pretty liberally to get Nushell off the ground, that make things harder for a high quality stable product. You may run across them but shouldn't take them as an endorsed example.
|
||||||
|
#### Liberal use of third-party dependencies
|
||||||
|
The amazing variety of crates on [crates.io](https://crates.io) allowed us to quickly get Nushell into a feature rich state but it left us with a bunch of baggage to clean up.
|
||||||
|
Each dependency introduces a compile time cost and duplicated code can add to the overall binary size. Also vetting more for correct and secure implementations takes unreasonably more time as this is also a continuous process of reacting to updates or potential vulnerabilities.
|
||||||
|
|
||||||
|
Thus we only want to accept dependencies that are essential and well tested implementations of a particular requirement of Nushells codebase.
|
||||||
|
Also as a project for the move to 1.0 we will try to unify among a set of dependencies if they possibly implement similar things in an area. We don't need three different crates with potentially perfect fit for three problems but rather one reliable crate with a maximized overlap between what it provides and what we need.
|
||||||
|
We will favor crates that are well tested and used and promise to be more stable and still frequently maintained.
|
||||||
|
#### Deeply nested code
|
||||||
|
As Nushell uses a lot of enums in its internal data representation there are a lot of `match` expressions. Combined with the need to handle a lot of edge cases and be defensive about any errors this has led to some absolutely hard to read deeply nested code (e.g. in the parser but also in the implementation of several commands).
|
||||||
|
This can be observed both as a "rightward drift" where the main part of the code is found after many levels of indentations or by long function bodies with several layers of branching with seemingly repeated branching inside the higher branch level.
|
||||||
|
This can also be exacerbated by "quick" bugfixes/enhancements that may just try to add a special case to catch a previously unexpected condition. The likelihood of introducing a bug in a sea of code duplication is high.
|
||||||
|
To combat this, consider using the early-`return` pattern to reject invalid data early in one place instead of building a tree through Rust's expression constructs with a lot of duplicated paths. Unpacking data into a type that expresses that the necessary things already have been checked and using functions to properly deal with separate and common behavior can also help.
|
||||||
|
|
||||||
## License
|
## License
|
||||||
|
|
||||||
|
5577
Cargo.lock
generated
5577
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
317
Cargo.toml
317
Cargo.toml
@ -10,8 +10,8 @@ homepage = "https://www.nushell.sh"
|
|||||||
license = "MIT"
|
license = "MIT"
|
||||||
name = "nu"
|
name = "nu"
|
||||||
repository = "https://github.com/nushell/nushell"
|
repository = "https://github.com/nushell/nushell"
|
||||||
rust-version = "1.81.0"
|
rust-version = "1.60"
|
||||||
version = "0.101.0"
|
version = "0.87.1"
|
||||||
|
|
||||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
|
|
||||||
@ -24,254 +24,99 @@ pkg-fmt = "zip"
|
|||||||
|
|
||||||
[workspace]
|
[workspace]
|
||||||
members = [
|
members = [
|
||||||
"crates/nu-cli",
|
"crates/nu-cli",
|
||||||
"crates/nu-engine",
|
"crates/nu-engine",
|
||||||
"crates/nu-parser",
|
"crates/nu-parser",
|
||||||
"crates/nu-system",
|
"crates/nu-system",
|
||||||
"crates/nu-cmd-base",
|
"crates/nu-cmd-base",
|
||||||
"crates/nu-cmd-extra",
|
"crates/nu-cmd-extra",
|
||||||
"crates/nu-cmd-lang",
|
"crates/nu-cmd-lang",
|
||||||
"crates/nu-cmd-plugin",
|
"crates/nu-cmd-dataframe",
|
||||||
"crates/nu-command",
|
"crates/nu-command",
|
||||||
"crates/nu-color-config",
|
"crates/nu-lsp",
|
||||||
"crates/nu-explore",
|
"crates/nu-protocol",
|
||||||
"crates/nu-json",
|
"crates/nu-plugin",
|
||||||
"crates/nu-lsp",
|
"crates/nu_plugin_inc",
|
||||||
"crates/nu-pretty-hex",
|
"crates/nu_plugin_gstat",
|
||||||
"crates/nu-protocol",
|
"crates/nu_plugin_example",
|
||||||
"crates/nu-derive-value",
|
"crates/nu_plugin_query",
|
||||||
"crates/nu-plugin",
|
"crates/nu_plugin_custom_values",
|
||||||
"crates/nu-plugin-core",
|
"crates/nu_plugin_formats",
|
||||||
"crates/nu-plugin-engine",
|
"crates/nu-std",
|
||||||
"crates/nu-plugin-protocol",
|
"crates/nu-utils",
|
||||||
"crates/nu-plugin-test-support",
|
|
||||||
"crates/nu_plugin_inc",
|
|
||||||
"crates/nu_plugin_gstat",
|
|
||||||
"crates/nu_plugin_example",
|
|
||||||
"crates/nu_plugin_query",
|
|
||||||
"crates/nu_plugin_custom_values",
|
|
||||||
"crates/nu_plugin_formats",
|
|
||||||
"crates/nu_plugin_polars",
|
|
||||||
"crates/nu_plugin_stress_internals",
|
|
||||||
"crates/nu-std",
|
|
||||||
"crates/nu-table",
|
|
||||||
"crates/nu-term-grid",
|
|
||||||
"crates/nu-test-support",
|
|
||||||
"crates/nu-utils",
|
|
||||||
"crates/nuon",
|
|
||||||
]
|
]
|
||||||
|
|
||||||
[workspace.dependencies]
|
|
||||||
alphanumeric-sort = "1.5"
|
|
||||||
ansi-str = "0.8"
|
|
||||||
anyhow = "1.0.82"
|
|
||||||
base64 = "0.22.1"
|
|
||||||
bracoxide = "0.1.4"
|
|
||||||
brotli = "6.0"
|
|
||||||
byteorder = "1.5"
|
|
||||||
bytes = "1"
|
|
||||||
bytesize = "1.3"
|
|
||||||
calamine = "0.26.1"
|
|
||||||
chardetng = "0.1.17"
|
|
||||||
chrono = { default-features = false, version = "0.4.34" }
|
|
||||||
chrono-humanize = "0.2.3"
|
|
||||||
chrono-tz = "0.10"
|
|
||||||
crossbeam-channel = "0.5.8"
|
|
||||||
crossterm = "0.28.1"
|
|
||||||
csv = "1.3"
|
|
||||||
ctrlc = "3.4"
|
|
||||||
dialoguer = { default-features = false, version = "0.11" }
|
|
||||||
digest = { default-features = false, version = "0.10" }
|
|
||||||
dirs = "5.0"
|
|
||||||
dirs-sys = "0.4"
|
|
||||||
dtparse = "2.0"
|
|
||||||
encoding_rs = "0.8"
|
|
||||||
fancy-regex = "0.14"
|
|
||||||
filesize = "0.2"
|
|
||||||
filetime = "0.2"
|
|
||||||
fuzzy-matcher = "0.3"
|
|
||||||
heck = "0.5.0"
|
|
||||||
human-date-parser = "0.2.0"
|
|
||||||
indexmap = "2.7"
|
|
||||||
indicatif = "0.17"
|
|
||||||
interprocess = "2.2.0"
|
|
||||||
is_executable = "1.0"
|
|
||||||
itertools = "0.13"
|
|
||||||
libc = "0.2"
|
|
||||||
libproc = "0.14"
|
|
||||||
log = "0.4"
|
|
||||||
lru = "0.12"
|
|
||||||
lscolors = { version = "0.17", default-features = false }
|
|
||||||
lsp-server = "0.7.5"
|
|
||||||
lsp-types = { version = "0.95.0", features = ["proposed"] }
|
|
||||||
mach2 = "0.4"
|
|
||||||
md5 = { version = "0.10", package = "md-5" }
|
|
||||||
miette = "7.3"
|
|
||||||
mime = "0.3.17"
|
|
||||||
mime_guess = "2.0"
|
|
||||||
mockito = { version = "1.6", default-features = false }
|
|
||||||
multipart-rs = "0.1.13"
|
|
||||||
native-tls = "0.2"
|
|
||||||
nix = { version = "0.29", default-features = false }
|
|
||||||
notify-debouncer-full = { version = "0.3", default-features = false }
|
|
||||||
nu-ansi-term = "0.50.1"
|
|
||||||
num-format = "0.4"
|
|
||||||
num-traits = "0.2"
|
|
||||||
oem_cp = "2.0.0"
|
|
||||||
omnipath = "0.1"
|
|
||||||
open = "5.3"
|
|
||||||
os_pipe = { version = "1.2", features = ["io_safety"] }
|
|
||||||
pathdiff = "0.2"
|
|
||||||
percent-encoding = "2"
|
|
||||||
pretty_assertions = "1.4"
|
|
||||||
print-positions = "0.6"
|
|
||||||
proc-macro-error = { version = "1.0", default-features = false }
|
|
||||||
proc-macro2 = "1.0"
|
|
||||||
procfs = "0.17.0"
|
|
||||||
pwd = "1.3"
|
|
||||||
quick-xml = "0.37.0"
|
|
||||||
quickcheck = "1.0"
|
|
||||||
quickcheck_macros = "1.0"
|
|
||||||
quote = "1.0"
|
|
||||||
rand = "0.8"
|
|
||||||
getrandom = "0.2" # pick same version that rand requires
|
|
||||||
rand_chacha = "0.3.1"
|
|
||||||
ratatui = "0.26"
|
|
||||||
rayon = "1.10"
|
|
||||||
reedline = "0.38.0"
|
|
||||||
regex = "1.9.5"
|
|
||||||
rmp = "0.8"
|
|
||||||
rmp-serde = "1.3"
|
|
||||||
ropey = "1.6.1"
|
|
||||||
roxmltree = "0.20"
|
|
||||||
rstest = { version = "0.23", default-features = false }
|
|
||||||
rusqlite = "0.31"
|
|
||||||
rust-embed = "8.5.0"
|
|
||||||
scopeguard = { version = "1.2.0" }
|
|
||||||
serde = { version = "1.0" }
|
|
||||||
serde_json = "1.0"
|
|
||||||
serde_urlencoded = "0.7.1"
|
|
||||||
serde_yaml = "0.9"
|
|
||||||
sha2 = "0.10"
|
|
||||||
strip-ansi-escapes = "0.2.0"
|
|
||||||
syn = "2.0"
|
|
||||||
sysinfo = "0.32"
|
|
||||||
tabled = { version = "0.16.0", default-features = false }
|
|
||||||
tempfile = "3.14"
|
|
||||||
terminal_size = "0.4"
|
|
||||||
titlecase = "3.0"
|
|
||||||
toml = "0.8"
|
|
||||||
trash = "5.2"
|
|
||||||
umask = "2.1"
|
|
||||||
unicode-segmentation = "1.12"
|
|
||||||
unicode-width = "0.2"
|
|
||||||
ureq = { version = "2.12", default-features = false }
|
|
||||||
url = "2.2"
|
|
||||||
uu_cp = "0.0.28"
|
|
||||||
uu_mkdir = "0.0.28"
|
|
||||||
uu_mktemp = "0.0.28"
|
|
||||||
uu_mv = "0.0.28"
|
|
||||||
uu_touch = "0.0.28"
|
|
||||||
uu_whoami = "0.0.28"
|
|
||||||
uu_uname = "0.0.28"
|
|
||||||
uucore = "0.0.28"
|
|
||||||
uuid = "1.11.0"
|
|
||||||
v_htmlescape = "0.15.0"
|
|
||||||
wax = "0.6"
|
|
||||||
which = "7.0.0"
|
|
||||||
windows = "0.56"
|
|
||||||
windows-sys = "0.48"
|
|
||||||
winreg = "0.52"
|
|
||||||
|
|
||||||
[workspace.lints.clippy]
|
|
||||||
# Warning: workspace lints affect library code as well as tests, so don't enable lints that would be too noisy in tests like that.
|
|
||||||
# todo = "warn"
|
|
||||||
unchecked_duration_subtraction = "warn"
|
|
||||||
|
|
||||||
[lints]
|
|
||||||
workspace = true
|
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
nu-cli = { path = "./crates/nu-cli", version = "0.101.0" }
|
nu-cli = { path = "./crates/nu-cli", version = "0.87.1" }
|
||||||
nu-cmd-base = { path = "./crates/nu-cmd-base", version = "0.101.0" }
|
nu-color-config = { path = "./crates/nu-color-config", version = "0.87.1" }
|
||||||
nu-cmd-lang = { path = "./crates/nu-cmd-lang", version = "0.101.0" }
|
nu-cmd-base = { path = "./crates/nu-cmd-base", version = "0.87.1" }
|
||||||
nu-cmd-plugin = { path = "./crates/nu-cmd-plugin", version = "0.101.0", optional = true }
|
nu-cmd-lang = { path = "./crates/nu-cmd-lang", version = "0.87.1" }
|
||||||
nu-cmd-extra = { path = "./crates/nu-cmd-extra", version = "0.101.0" }
|
nu-cmd-dataframe = { path = "./crates/nu-cmd-dataframe", version = "0.87.1", features = ["dataframe"], optional = true }
|
||||||
nu-command = { path = "./crates/nu-command", version = "0.101.0" }
|
nu-cmd-extra = { path = "./crates/nu-cmd-extra", version = "0.87.1", optional = true }
|
||||||
nu-engine = { path = "./crates/nu-engine", version = "0.101.0" }
|
nu-command = { path = "./crates/nu-command", version = "0.87.1" }
|
||||||
nu-explore = { path = "./crates/nu-explore", version = "0.101.0" }
|
nu-engine = { path = "./crates/nu-engine", version = "0.87.1" }
|
||||||
nu-lsp = { path = "./crates/nu-lsp/", version = "0.101.0" }
|
nu-explore = { path = "./crates/nu-explore", version = "0.87.1" }
|
||||||
nu-parser = { path = "./crates/nu-parser", version = "0.101.0" }
|
nu-json = { path = "./crates/nu-json", version = "0.87.1" }
|
||||||
nu-path = { path = "./crates/nu-path", version = "0.101.0" }
|
nu-lsp = { path = "./crates/nu-lsp/", version = "0.87.1" }
|
||||||
nu-plugin-engine = { path = "./crates/nu-plugin-engine", optional = true, version = "0.101.0" }
|
nu-parser = { path = "./crates/nu-parser", version = "0.87.1" }
|
||||||
nu-protocol = { path = "./crates/nu-protocol", version = "0.101.0" }
|
nu-path = { path = "./crates/nu-path", version = "0.87.1" }
|
||||||
nu-std = { path = "./crates/nu-std", version = "0.101.0" }
|
nu-plugin = { path = "./crates/nu-plugin", optional = true, version = "0.87.1" }
|
||||||
nu-system = { path = "./crates/nu-system", version = "0.101.0" }
|
nu-pretty-hex = { path = "./crates/nu-pretty-hex", version = "0.87.1" }
|
||||||
nu-utils = { path = "./crates/nu-utils", version = "0.101.0" }
|
nu-protocol = { path = "./crates/nu-protocol", version = "0.87.1" }
|
||||||
reedline = { workspace = true, features = ["bashisms", "sqlite"] }
|
nu-system = { path = "./crates/nu-system", version = "0.87.1" }
|
||||||
|
nu-table = { path = "./crates/nu-table", version = "0.87.1" }
|
||||||
|
nu-term-grid = { path = "./crates/nu-term-grid", version = "0.87.1" }
|
||||||
|
nu-std = { path = "./crates/nu-std", version = "0.87.1" }
|
||||||
|
nu-utils = { path = "./crates/nu-utils", version = "0.87.1" }
|
||||||
|
nu-ansi-term = "0.49.0"
|
||||||
|
reedline = { version = "0.26.0", features = ["bashisms", "sqlite"] }
|
||||||
|
|
||||||
crossterm = { workspace = true }
|
crossterm = "0.27"
|
||||||
ctrlc = { workspace = true }
|
ctrlc = "3.4"
|
||||||
dirs = { workspace = true }
|
log = "0.4"
|
||||||
log = { workspace = true }
|
miette = { version = "5.10", features = ["fancy-no-backtrace"] }
|
||||||
miette = { workspace = true, features = ["fancy-no-backtrace", "fancy"] }
|
mimalloc = { version = "0.1.37", default-features = false, optional = true }
|
||||||
mimalloc = { version = "0.1.42", default-features = false, optional = true }
|
serde_json = "1.0"
|
||||||
multipart-rs = { workspace = true }
|
|
||||||
serde_json = { workspace = true }
|
|
||||||
simplelog = "0.12"
|
simplelog = "0.12"
|
||||||
time = "0.3"
|
time = "0.3"
|
||||||
|
|
||||||
[target.'cfg(not(target_os = "windows"))'.dependencies]
|
[target.'cfg(not(target_os = "windows"))'.dependencies]
|
||||||
# Our dependencies don't use OpenSSL on Windows
|
# Our dependencies don't use OpenSSL on Windows
|
||||||
openssl = { version = "0.10", features = ["vendored"], optional = true }
|
openssl = { version = "0.10", features = ["vendored"], optional = true }
|
||||||
|
signal-hook = { version = "0.3", default-features = false }
|
||||||
|
|
||||||
[target.'cfg(windows)'.build-dependencies]
|
[target.'cfg(windows)'.build-dependencies]
|
||||||
winresource = "0.1"
|
winresource = "0.1"
|
||||||
|
|
||||||
[target.'cfg(target_family = "unix")'.dependencies]
|
[target.'cfg(target_family = "unix")'.dependencies]
|
||||||
nix = { workspace = true, default-features = false, features = [
|
nix = { version = "0.27", default-features = false, features = [
|
||||||
"signal",
|
"signal",
|
||||||
"process",
|
"process",
|
||||||
"fs",
|
"fs",
|
||||||
"term",
|
"term",
|
||||||
] }
|
] }
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
nu-test-support = { path = "./crates/nu-test-support", version = "0.101.0" }
|
nu-test-support = { path = "./crates/nu-test-support", version = "0.87.1" }
|
||||||
nu-plugin-protocol = { path = "./crates/nu-plugin-protocol", version = "0.101.0" }
|
|
||||||
nu-plugin-core = { path = "./crates/nu-plugin-core", version = "0.101.0" }
|
|
||||||
assert_cmd = "2.0"
|
assert_cmd = "2.0"
|
||||||
dirs = { workspace = true }
|
criterion = "0.5"
|
||||||
tango-bench = "0.6"
|
pretty_assertions = "1.4"
|
||||||
pretty_assertions = { workspace = true }
|
rstest = { version = "0.18", default-features = false }
|
||||||
regex = { workspace = true }
|
serial_test = "2.0"
|
||||||
rstest = { workspace = true, default-features = false }
|
tempfile = "3.8"
|
||||||
serial_test = "3.2"
|
|
||||||
tempfile = { workspace = true }
|
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
plugin = [
|
plugin = [
|
||||||
# crates
|
"nu-plugin",
|
||||||
"nu-cmd-plugin",
|
"nu-cli/plugin",
|
||||||
"nu-plugin-engine",
|
"nu-parser/plugin",
|
||||||
|
"nu-command/plugin",
|
||||||
# features
|
"nu-protocol/plugin",
|
||||||
"nu-cli/plugin",
|
"nu-engine/plugin",
|
||||||
"nu-cmd-lang/plugin",
|
|
||||||
"nu-command/plugin",
|
|
||||||
"nu-engine/plugin",
|
|
||||||
"nu-engine/plugin",
|
|
||||||
"nu-parser/plugin",
|
|
||||||
"nu-protocol/plugin",
|
|
||||||
]
|
|
||||||
|
|
||||||
default = [
|
|
||||||
"plugin",
|
|
||||||
"trash-support",
|
|
||||||
"sqlite",
|
|
||||||
"mimalloc",
|
|
||||||
]
|
]
|
||||||
|
default = ["plugin", "which-support", "trash-support", "sqlite", "mimalloc"]
|
||||||
stable = ["default"]
|
stable = ["default"]
|
||||||
|
wasi = ["nu-cmd-lang/wasi"]
|
||||||
# NOTE: individual features are also passed to `nu-cmd-lang` that uses them to generate the feature matrix in the `version` command
|
# NOTE: individual features are also passed to `nu-cmd-lang` that uses them to generate the feature matrix in the `version` command
|
||||||
|
|
||||||
# Enable to statically link OpenSSL (perl is required, to build OpenSSL https://docs.rs/openssl/latest/openssl/);
|
# Enable to statically link OpenSSL (perl is required, to build OpenSSL https://docs.rs/openssl/latest/openssl/);
|
||||||
@ -279,17 +124,17 @@ stable = ["default"]
|
|||||||
static-link-openssl = ["dep:openssl", "nu-cmd-lang/static-link-openssl"]
|
static-link-openssl = ["dep:openssl", "nu-cmd-lang/static-link-openssl"]
|
||||||
|
|
||||||
mimalloc = ["nu-cmd-lang/mimalloc", "dep:mimalloc"]
|
mimalloc = ["nu-cmd-lang/mimalloc", "dep:mimalloc"]
|
||||||
# Optional system clipboard support in `reedline`, this behavior has problematic compatibility with some systems.
|
|
||||||
# Missing X server/ Wayland can cause issues
|
|
||||||
system-clipboard = [
|
|
||||||
"reedline/system_clipboard",
|
|
||||||
"nu-cli/system-clipboard",
|
|
||||||
"nu-cmd-lang/system-clipboard",
|
|
||||||
]
|
|
||||||
|
|
||||||
# Stable (Default)
|
# Stable (Default)
|
||||||
|
which-support = ["nu-command/which-support", "nu-cmd-lang/which-support"]
|
||||||
trash-support = ["nu-command/trash-support", "nu-cmd-lang/trash-support"]
|
trash-support = ["nu-command/trash-support", "nu-cmd-lang/trash-support"]
|
||||||
|
|
||||||
|
# Extra feature for nushell
|
||||||
|
extra = ["dep:nu-cmd-extra", "nu-cmd-lang/extra"]
|
||||||
|
|
||||||
|
# Dataframe feature for nushell
|
||||||
|
dataframe = ["dep:nu-cmd-dataframe", "nu-cmd-lang/dataframe"]
|
||||||
|
|
||||||
# SQLite commands for nushell
|
# SQLite commands for nushell
|
||||||
sqlite = ["nu-command/sqlite", "nu-cmd-lang/sqlite"]
|
sqlite = ["nu-command/sqlite", "nu-cmd-lang/sqlite"]
|
||||||
|
|
||||||
@ -321,11 +166,13 @@ bench = false
|
|||||||
# To use a development version of a dependency please use a global override here
|
# To use a development version of a dependency please use a global override here
|
||||||
# changing versions in each sub-crate of the workspace is tedious
|
# changing versions in each sub-crate of the workspace is tedious
|
||||||
[patch.crates-io]
|
[patch.crates-io]
|
||||||
# reedline = { git = "https://github.com/nushell/reedline", branch = "main" }
|
# reedline = { git = "https://github.com/nushell/reedline.git", branch = "main" }
|
||||||
# nu-ansi-term = {git = "https://github.com/nushell/nu-ansi-term.git", branch = "main"}
|
# nu-ansi-term = {git = "https://github.com/nushell/nu-ansi-term.git", branch = "main"}
|
||||||
|
# uu_cp = { git = "https://github.com/uutils/coreutils.git", branch = "main" }
|
||||||
|
|
||||||
|
# Criterion benchmarking setup
|
||||||
# Run all benchmarks with `cargo bench`
|
# Run all benchmarks with `cargo bench`
|
||||||
# Run individual benchmarks like `cargo bench -- <regex>` e.g. `cargo bench -- parse`
|
# Run individual benchmarks like `cargo bench -- <regex>` e.g. `cargo bench -- parse`
|
||||||
[[bench]]
|
[[bench]]
|
||||||
name = "benchmarks"
|
name = "benchmarks"
|
||||||
harness = false
|
harness = false
|
||||||
|
@ -33,8 +33,13 @@ We will try to provide builds for all of them but a standard configuration for x
|
|||||||
|
|
||||||
We have features of Nushell behind flags that can be passed at compilation time.
|
We have features of Nushell behind flags that can be passed at compilation time.
|
||||||
|
|
||||||
The design focus of Nushell is primarily expressed by everything accessible without passing additional feature flag.
|
The design focus of Nushell is primarily expressed by everything accessible without passing additional feature flag. This provides a standard command set and receives the most attention.
|
||||||
This provides a standard command set and receives the most attention.
|
Two other feature flags are actively tested but are not guaranteed to express the stable design direction of Nushell:
|
||||||
|
- `extra`
|
||||||
|
- This includes commands where we are not convinced that they are ready to be stabilized for 1.0 or popular enough
|
||||||
|
- `dataframe`
|
||||||
|
- This includes dataframe support via `polars` and `arrow2`. Introduces a significant additional compilation and binary size.
|
||||||
|
- Due to the use of SIMD extensions may not be compatible with every minimal architecture.
|
||||||
|
|
||||||
## Passively supported platforms
|
## Passively supported platforms
|
||||||
|
|
11
README.md
11
README.md
@ -52,13 +52,13 @@ To use `Nu` in GitHub Action, check [setup-nu](https://github.com/marketplace/ac
|
|||||||
|
|
||||||
Detailed installation instructions can be found in the [installation chapter of the book](https://www.nushell.sh/book/installation.html). Nu is available via many package managers:
|
Detailed installation instructions can be found in the [installation chapter of the book](https://www.nushell.sh/book/installation.html). Nu is available via many package managers:
|
||||||
|
|
||||||
[](https://repology.org/project/nushell/versions)
|
[](https://repology.org/project/nushell/versions)
|
||||||
|
|
||||||
For details about which platforms the Nushell team actively supports, see [our platform support policy](devdocs/PLATFORM_SUPPORT.md).
|
For details about which platforms the Nushell team actively supports, see [our platform support policy](PLATFORM_SUPPORT.md).
|
||||||
|
|
||||||
## Configuration
|
## Configuration
|
||||||
|
|
||||||
The default configurations can be found at [sample_config](crates/nu-utils/src/default_files)
|
The default configurations can be found at [sample_config](crates/nu-utils/src/sample_config)
|
||||||
which are the configuration files one gets when they startup Nushell for the first time.
|
which are the configuration files one gets when they startup Nushell for the first time.
|
||||||
|
|
||||||
It sets all of the default configuration to run Nushell. From here one can
|
It sets all of the default configuration to run Nushell. From here one can
|
||||||
@ -199,7 +199,7 @@ topics that have been presented.
|
|||||||
|
|
||||||
Nu adheres closely to a set of goals that make up its design philosophy. As features are added, they are checked against these goals.
|
Nu adheres closely to a set of goals that make up its design philosophy. As features are added, they are checked against these goals.
|
||||||
|
|
||||||
- First and foremost, Nu is cross-platform. Commands and techniques should work across platforms and Nu has [first-class support for Windows, macOS, and Linux](devdocs/PLATFORM_SUPPORT.md).
|
- First and foremost, Nu is cross-platform. Commands and techniques should work across platforms and Nu has [first-class support for Windows, macOS, and Linux](PLATFORM_SUPPORT.md).
|
||||||
|
|
||||||
- Nu ensures compatibility with existing platform-specific executables.
|
- Nu ensures compatibility with existing platform-specific executables.
|
||||||
|
|
||||||
@ -222,14 +222,13 @@ Please submit an issue or PR to be added to this list.
|
|||||||
- [clap](https://github.com/clap-rs/clap/tree/master/clap_complete_nushell)
|
- [clap](https://github.com/clap-rs/clap/tree/master/clap_complete_nushell)
|
||||||
- [Dorothy](http://github.com/bevry/dorothy)
|
- [Dorothy](http://github.com/bevry/dorothy)
|
||||||
- [Direnv](https://github.com/direnv/direnv/blob/master/docs/hook.md#nushell)
|
- [Direnv](https://github.com/direnv/direnv/blob/master/docs/hook.md#nushell)
|
||||||
- [x-cmd](https://x-cmd.com/mod/nu)
|
|
||||||
|
|
||||||
## Contributing
|
## Contributing
|
||||||
|
|
||||||
See [Contributing](CONTRIBUTING.md) for details. Thanks to all the people who already contributed!
|
See [Contributing](CONTRIBUTING.md) for details. Thanks to all the people who already contributed!
|
||||||
|
|
||||||
<a href="https://github.com/nushell/nushell/graphs/contributors">
|
<a href="https://github.com/nushell/nushell/graphs/contributors">
|
||||||
<img src="https://contributors-img.web.app/image?repo=nushell/nushell&max=750&columns=20" />
|
<img src="https://contributors-img.web.app/image?repo=nushell/nushell&max=600" />
|
||||||
</a>
|
</a>
|
||||||
|
|
||||||
## License
|
## License
|
||||||
|
29
SECURITY.md
29
SECURITY.md
@ -1,29 +0,0 @@
|
|||||||
# Security Policy
|
|
||||||
|
|
||||||
As a shell and programming language Nushell provides you with great powers and the potential to do dangerous things to your computer and data. Whenever there is a risk that a malicious actor can abuse a bug or a violation of documented behavior/assumptions in Nushell to harm you this is a *security* risk.
|
|
||||||
We want to fix those issues without exposing our users to unnecessary risk. Thus we want to explain our security policy.
|
|
||||||
Additional issues may be part of *safety* where the behavior of Nushell as designed and implemented can cause unintended harm or a bug causes damage without the involvement of a third party.
|
|
||||||
|
|
||||||
## Supported Versions
|
|
||||||
|
|
||||||
As Nushell is still under very active pre-stable development, the only version the core team prioritizes for security and safety fixes is the [most recent version as published on GitHub](https://github.com/nushell/nushell/releases/latest).
|
|
||||||
Only if you provide a strong reasoning and the necessary resources, will we consider blessing a backported fix with an official patch release for a previous version.
|
|
||||||
|
|
||||||
## Reporting a Vulnerability
|
|
||||||
|
|
||||||
If you suspect that a bug or behavior of Nushell can affect security or may be potentially exploitable, please report the issue to us in private.
|
|
||||||
Either reach out to the core team on [our Discord server](https://discord.gg/NtAbbGn) to arrange a private channel or use the [GitHub vulnerability reporting form](https://github.com/nushell/nushell/security/advisories/new).
|
|
||||||
Please try to answer the following questions:
|
|
||||||
- How can we reach you for further questions?
|
|
||||||
- What is the bug? Which system of Nushell may be affected?
|
|
||||||
- Do you have proof-of-concept for a potential exploit or have you observed an exploit in the wild?
|
|
||||||
- What is your assessment of the severity based on what could be impacted should the bug be exploited?
|
|
||||||
- Are additional people aware of the issue or deserve credit for identifying the issue?
|
|
||||||
|
|
||||||
We will try to get back to you within a week with:
|
|
||||||
- acknowledging the receipt of the report
|
|
||||||
- an initial plan of how we want to address this including the primary points of contact for further communication
|
|
||||||
- our preliminary assessment of how severe we judge the issue
|
|
||||||
- a proposal for how we can coordinate responsible disclosure (e.g. how we ship the bugfix, if we need to coordinate with distribution maintainers, when you can release a blog post if you want to etc.)
|
|
||||||
|
|
||||||
For purely *safety* related issues where the impact is severe by direct user action instead of malicious input or third parties, feel free to open a regular issue. If we deem that there may be an additional *security* risk on a *safety* issue we may continue discussions in a restricted forum.
|
|
@ -1,6 +1,6 @@
|
|||||||
# Divan benchmarks
|
# Criterion benchmarks
|
||||||
|
|
||||||
These are benchmarks using [Divan](https://github.com/nvzqz/divan), a microbenchmarking tool for Rust.
|
These are benchmarks using [Criterion](https://github.com/bheisler/criterion.rs), a microbenchmarking tool for Rust.
|
||||||
|
|
||||||
Run all benchmarks with `cargo bench`
|
Run all benchmarks with `cargo bench`
|
||||||
|
|
||||||
|
@ -1,61 +1,115 @@
|
|||||||
use nu_cli::{eval_source, evaluate_commands};
|
use criterion::{criterion_group, criterion_main, BatchSize, Criterion};
|
||||||
use nu_plugin_core::{Encoder, EncodingType};
|
use nu_cli::eval_source;
|
||||||
use nu_plugin_protocol::{PluginCallResponse, PluginOutput};
|
use nu_parser::parse;
|
||||||
|
use nu_plugin::{EncodingType, PluginResponse};
|
||||||
use nu_protocol::{
|
use nu_protocol::{engine::EngineState, PipelineData, Span, Value};
|
||||||
engine::{EngineState, Stack},
|
|
||||||
PipelineData, Signals, Span, Spanned, Value,
|
|
||||||
};
|
|
||||||
use nu_std::load_standard_library;
|
|
||||||
use nu_utils::{get_default_config, get_default_env};
|
use nu_utils::{get_default_config, get_default_env};
|
||||||
use std::{
|
|
||||||
rc::Rc,
|
|
||||||
sync::{atomic::AtomicBool, Arc},
|
|
||||||
};
|
|
||||||
|
|
||||||
use std::hint::black_box;
|
|
||||||
|
|
||||||
use tango_bench::{benchmark_fn, tango_benchmarks, tango_main, IntoBenchmarks};
|
|
||||||
|
|
||||||
fn load_bench_commands() -> EngineState {
|
fn load_bench_commands() -> EngineState {
|
||||||
nu_command::add_shell_command_context(nu_cmd_lang::create_default_context())
|
nu_command::add_shell_command_context(nu_cmd_lang::create_default_context())
|
||||||
}
|
}
|
||||||
|
// FIXME: All benchmarks live in this 1 file to speed up build times when benchmarking.
|
||||||
|
// When the *_benchmarks functions were in different files, `cargo bench` would build
|
||||||
|
// an executable for every single one - incredibly slowly. Would be nice to figure out
|
||||||
|
// a way to split things up again.
|
||||||
|
|
||||||
fn setup_engine() -> EngineState {
|
fn parser_benchmarks(c: &mut Criterion) {
|
||||||
let mut engine_state = load_bench_commands();
|
let mut engine_state = load_bench_commands();
|
||||||
let cwd = std::env::current_dir()
|
// parsing config.nu breaks without PWD set
|
||||||
.unwrap()
|
engine_state.add_env_var(
|
||||||
.into_os_string()
|
"PWD".into(),
|
||||||
.into_string()
|
Value::string("/some/dir".to_string(), Span::test_data()),
|
||||||
.unwrap();
|
);
|
||||||
|
|
||||||
// parsing config.nu breaks without PWD set, so set a valid path
|
let default_env = get_default_env().as_bytes();
|
||||||
engine_state.add_env_var("PWD".into(), Value::string(cwd, Span::test_data()));
|
c.bench_function("parse_default_env_file", |b| {
|
||||||
|
b.iter_batched(
|
||||||
|
|| nu_protocol::engine::StateWorkingSet::new(&engine_state),
|
||||||
|
|mut working_set| parse(&mut working_set, None, default_env, false),
|
||||||
|
BatchSize::SmallInput,
|
||||||
|
)
|
||||||
|
});
|
||||||
|
|
||||||
engine_state.generate_nu_constant();
|
let default_config = get_default_config().as_bytes();
|
||||||
|
c.bench_function("parse_default_config_file", |b| {
|
||||||
|
b.iter_batched(
|
||||||
|
|| nu_protocol::engine::StateWorkingSet::new(&engine_state),
|
||||||
|
|mut working_set| parse(&mut working_set, None, default_config, false),
|
||||||
|
BatchSize::SmallInput,
|
||||||
|
)
|
||||||
|
});
|
||||||
|
|
||||||
engine_state
|
c.bench_function("eval default_env.nu", |b| {
|
||||||
|
b.iter(|| {
|
||||||
|
let mut engine_state = load_bench_commands();
|
||||||
|
let mut stack = nu_protocol::engine::Stack::new();
|
||||||
|
eval_source(
|
||||||
|
&mut engine_state,
|
||||||
|
&mut stack,
|
||||||
|
get_default_env().as_bytes(),
|
||||||
|
"default_env.nu",
|
||||||
|
PipelineData::empty(),
|
||||||
|
false,
|
||||||
|
)
|
||||||
|
})
|
||||||
|
});
|
||||||
|
|
||||||
|
c.bench_function("eval default_config.nu", |b| {
|
||||||
|
b.iter(|| {
|
||||||
|
let mut engine_state = load_bench_commands();
|
||||||
|
// parsing config.nu breaks without PWD set
|
||||||
|
engine_state.add_env_var(
|
||||||
|
"PWD".into(),
|
||||||
|
Value::string("/some/dir".to_string(), Span::test_data()),
|
||||||
|
);
|
||||||
|
let mut stack = nu_protocol::engine::Stack::new();
|
||||||
|
eval_source(
|
||||||
|
&mut engine_state,
|
||||||
|
&mut stack,
|
||||||
|
get_default_config().as_bytes(),
|
||||||
|
"default_config.nu",
|
||||||
|
PipelineData::empty(),
|
||||||
|
false,
|
||||||
|
)
|
||||||
|
})
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
fn setup_stack_and_engine_from_command(command: &str) -> (Stack, EngineState) {
|
fn eval_benchmarks(c: &mut Criterion) {
|
||||||
let mut engine = setup_engine();
|
c.bench_function("eval default_env.nu", |b| {
|
||||||
let commands = Spanned {
|
b.iter(|| {
|
||||||
span: Span::unknown(),
|
let mut engine_state = load_bench_commands();
|
||||||
item: command.to_string(),
|
let mut stack = nu_protocol::engine::Stack::new();
|
||||||
};
|
eval_source(
|
||||||
|
&mut engine_state,
|
||||||
|
&mut stack,
|
||||||
|
get_default_env().as_bytes(),
|
||||||
|
"default_env.nu",
|
||||||
|
PipelineData::empty(),
|
||||||
|
false,
|
||||||
|
)
|
||||||
|
})
|
||||||
|
});
|
||||||
|
|
||||||
let mut stack = Stack::new();
|
c.bench_function("eval default_config.nu", |b| {
|
||||||
|
b.iter(|| {
|
||||||
evaluate_commands(
|
let mut engine_state = load_bench_commands();
|
||||||
&commands,
|
// parsing config.nu breaks without PWD set
|
||||||
&mut engine,
|
engine_state.add_env_var(
|
||||||
&mut stack,
|
"PWD".into(),
|
||||||
PipelineData::empty(),
|
Value::string("/some/dir".to_string(), Span::test_data()),
|
||||||
Default::default(),
|
);
|
||||||
)
|
let mut stack = nu_protocol::engine::Stack::new();
|
||||||
.unwrap();
|
eval_source(
|
||||||
|
&mut engine_state,
|
||||||
(stack, engine)
|
&mut stack,
|
||||||
|
get_default_config().as_bytes(),
|
||||||
|
"default_config.nu",
|
||||||
|
PipelineData::empty(),
|
||||||
|
false,
|
||||||
|
)
|
||||||
|
})
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
// generate a new table data with `row_cnt` rows, `col_cnt` columns.
|
// generate a new table data with `row_cnt` rows, `col_cnt` columns.
|
||||||
@ -69,421 +123,50 @@ fn encoding_test_data(row_cnt: usize, col_cnt: usize) -> Value {
|
|||||||
Value::list(vec![record; row_cnt], Span::test_data())
|
Value::list(vec![record; row_cnt], Span::test_data())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn bench_command(
|
fn encoding_benchmarks(c: &mut Criterion) {
|
||||||
name: &str,
|
let mut group = c.benchmark_group("Encoding");
|
||||||
command: &str,
|
let test_cnt_pairs = [(100, 5), (100, 15), (10000, 5), (10000, 15)];
|
||||||
stack: Stack,
|
for (row_cnt, col_cnt) in test_cnt_pairs.into_iter() {
|
||||||
engine: EngineState,
|
for fmt in ["json", "msgpack"] {
|
||||||
) -> impl IntoBenchmarks {
|
group.bench_function(&format!("{fmt} encode {row_cnt} * {col_cnt}"), |b| {
|
||||||
let commands = Spanned {
|
let mut res = vec![];
|
||||||
span: Span::unknown(),
|
let test_data =
|
||||||
item: command.to_string(),
|
PluginResponse::Value(Box::new(encoding_test_data(row_cnt, col_cnt)));
|
||||||
};
|
let encoder = EncodingType::try_from_bytes(fmt.as_bytes()).unwrap();
|
||||||
[benchmark_fn(name, move |b| {
|
b.iter(|| encoder.encode_response(&test_data, &mut res))
|
||||||
let commands = commands.clone();
|
});
|
||||||
let stack = stack.clone();
|
|
||||||
let engine = engine.clone();
|
|
||||||
b.iter(move || {
|
|
||||||
let mut stack = stack.clone();
|
|
||||||
let mut engine = engine.clone();
|
|
||||||
#[allow(clippy::unit_arg)]
|
|
||||||
black_box(
|
|
||||||
evaluate_commands(
|
|
||||||
&commands,
|
|
||||||
&mut engine,
|
|
||||||
&mut stack,
|
|
||||||
PipelineData::empty(),
|
|
||||||
Default::default(),
|
|
||||||
)
|
|
||||||
.unwrap(),
|
|
||||||
);
|
|
||||||
})
|
|
||||||
})]
|
|
||||||
}
|
|
||||||
|
|
||||||
fn bench_eval_source(
|
|
||||||
name: &str,
|
|
||||||
fname: String,
|
|
||||||
source: Vec<u8>,
|
|
||||||
stack: Stack,
|
|
||||||
engine: EngineState,
|
|
||||||
) -> impl IntoBenchmarks {
|
|
||||||
[benchmark_fn(name, move |b| {
|
|
||||||
let stack = stack.clone();
|
|
||||||
let engine = engine.clone();
|
|
||||||
let fname = fname.clone();
|
|
||||||
let source = source.clone();
|
|
||||||
b.iter(move || {
|
|
||||||
let mut stack = stack.clone();
|
|
||||||
let mut engine = engine.clone();
|
|
||||||
let fname: &str = &fname.clone();
|
|
||||||
let source: &[u8] = &source.clone();
|
|
||||||
black_box(eval_source(
|
|
||||||
&mut engine,
|
|
||||||
&mut stack,
|
|
||||||
source,
|
|
||||||
fname,
|
|
||||||
PipelineData::empty(),
|
|
||||||
false,
|
|
||||||
));
|
|
||||||
})
|
|
||||||
})]
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Load the standard library into the engine.
|
|
||||||
fn bench_load_standard_lib() -> impl IntoBenchmarks {
|
|
||||||
[benchmark_fn("load_standard_lib", move |b| {
|
|
||||||
let engine = setup_engine();
|
|
||||||
b.iter(move || {
|
|
||||||
let mut engine = engine.clone();
|
|
||||||
load_standard_library(&mut engine)
|
|
||||||
})
|
|
||||||
})]
|
|
||||||
}
|
|
||||||
|
|
||||||
fn create_flat_record_string(n: i32) -> String {
|
|
||||||
let mut s = String::from("let record = {");
|
|
||||||
for i in 0..n {
|
|
||||||
s.push_str(&format!("col_{}: {}", i, i));
|
|
||||||
if i < n - 1 {
|
|
||||||
s.push_str(", ");
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
s.push('}');
|
group.finish();
|
||||||
s
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn create_nested_record_string(depth: i32) -> String {
|
fn decoding_benchmarks(c: &mut Criterion) {
|
||||||
let mut s = String::from("let record = {");
|
let mut group = c.benchmark_group("Decoding");
|
||||||
for _ in 0..depth {
|
let test_cnt_pairs = [(100, 5), (100, 15), (10000, 5), (10000, 15)];
|
||||||
s.push_str("col: {");
|
for (row_cnt, col_cnt) in test_cnt_pairs.into_iter() {
|
||||||
}
|
for fmt in ["json", "msgpack"] {
|
||||||
s.push_str("col_final: 0");
|
group.bench_function(&format!("{fmt} decode for {row_cnt} * {col_cnt}"), |b| {
|
||||||
for _ in 0..depth {
|
let mut res = vec![];
|
||||||
s.push('}');
|
let test_data =
|
||||||
}
|
PluginResponse::Value(Box::new(encoding_test_data(row_cnt, col_cnt)));
|
||||||
s.push('}');
|
let encoder = EncodingType::try_from_bytes(fmt.as_bytes()).unwrap();
|
||||||
s
|
encoder.encode_response(&test_data, &mut res).unwrap();
|
||||||
}
|
let mut binary_data = std::io::Cursor::new(res);
|
||||||
|
b.iter(|| {
|
||||||
fn create_example_table_nrows(n: i32) -> String {
|
binary_data.set_position(0);
|
||||||
let mut s = String::from("let table = [[foo bar baz]; ");
|
encoder.decode_response(&mut binary_data)
|
||||||
for i in 0..n {
|
})
|
||||||
s.push_str(&format!("[0, 1, {i}]"));
|
});
|
||||||
if i < n - 1 {
|
|
||||||
s.push_str(", ");
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
s.push(']');
|
group.finish();
|
||||||
s
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn bench_record_create(n: i32) -> impl IntoBenchmarks {
|
criterion_group!(
|
||||||
bench_command(
|
benches,
|
||||||
&format!("record_create_{n}"),
|
parser_benchmarks,
|
||||||
&create_flat_record_string(n),
|
eval_benchmarks,
|
||||||
Stack::new(),
|
encoding_benchmarks,
|
||||||
setup_engine(),
|
decoding_benchmarks
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn bench_record_flat_access(n: i32) -> impl IntoBenchmarks {
|
|
||||||
let setup_command = create_flat_record_string(n);
|
|
||||||
let (stack, engine) = setup_stack_and_engine_from_command(&setup_command);
|
|
||||||
bench_command(
|
|
||||||
&format!("record_flat_access_{n}"),
|
|
||||||
"$record.col_0 | ignore",
|
|
||||||
stack,
|
|
||||||
engine,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn bench_record_nested_access(n: i32) -> impl IntoBenchmarks {
|
|
||||||
let setup_command = create_nested_record_string(n);
|
|
||||||
let (stack, engine) = setup_stack_and_engine_from_command(&setup_command);
|
|
||||||
let nested_access = ".col".repeat(n as usize);
|
|
||||||
bench_command(
|
|
||||||
&format!("record_nested_access_{n}"),
|
|
||||||
&format!("$record{} | ignore", nested_access),
|
|
||||||
stack,
|
|
||||||
engine,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn bench_table_create(n: i32) -> impl IntoBenchmarks {
|
|
||||||
bench_command(
|
|
||||||
&format!("table_create_{n}"),
|
|
||||||
&create_example_table_nrows(n),
|
|
||||||
Stack::new(),
|
|
||||||
setup_engine(),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn bench_table_get(n: i32) -> impl IntoBenchmarks {
|
|
||||||
let setup_command = create_example_table_nrows(n);
|
|
||||||
let (stack, engine) = setup_stack_and_engine_from_command(&setup_command);
|
|
||||||
bench_command(
|
|
||||||
&format!("table_get_{n}"),
|
|
||||||
"$table | get bar | math sum | ignore",
|
|
||||||
stack,
|
|
||||||
engine,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn bench_table_select(n: i32) -> impl IntoBenchmarks {
|
|
||||||
let setup_command = create_example_table_nrows(n);
|
|
||||||
let (stack, engine) = setup_stack_and_engine_from_command(&setup_command);
|
|
||||||
bench_command(
|
|
||||||
&format!("table_select_{n}"),
|
|
||||||
"$table | select foo baz | ignore",
|
|
||||||
stack,
|
|
||||||
engine,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn bench_eval_interleave(n: i32) -> impl IntoBenchmarks {
|
|
||||||
let engine = setup_engine();
|
|
||||||
let stack = Stack::new();
|
|
||||||
bench_command(
|
|
||||||
&format!("eval_interleave_{n}"),
|
|
||||||
&format!("seq 1 {n} | wrap a | interleave {{ seq 1 {n} | wrap b }} | ignore"),
|
|
||||||
stack,
|
|
||||||
engine,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn bench_eval_interleave_with_interrupt(n: i32) -> impl IntoBenchmarks {
|
|
||||||
let mut engine = setup_engine();
|
|
||||||
engine.set_signals(Signals::new(Arc::new(AtomicBool::new(false))));
|
|
||||||
let stack = Stack::new();
|
|
||||||
bench_command(
|
|
||||||
&format!("eval_interleave_with_interrupt_{n}"),
|
|
||||||
&format!("seq 1 {n} | wrap a | interleave {{ seq 1 {n} | wrap b }} | ignore"),
|
|
||||||
stack,
|
|
||||||
engine,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn bench_eval_for(n: i32) -> impl IntoBenchmarks {
|
|
||||||
let engine = setup_engine();
|
|
||||||
let stack = Stack::new();
|
|
||||||
bench_command(
|
|
||||||
&format!("eval_for_{n}"),
|
|
||||||
&format!("(for $x in (1..{n}) {{ 1 }}) | ignore"),
|
|
||||||
stack,
|
|
||||||
engine,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn bench_eval_each(n: i32) -> impl IntoBenchmarks {
|
|
||||||
let engine = setup_engine();
|
|
||||||
let stack = Stack::new();
|
|
||||||
bench_command(
|
|
||||||
&format!("eval_each_{n}"),
|
|
||||||
&format!("(1..{n}) | each {{|_| 1 }} | ignore"),
|
|
||||||
stack,
|
|
||||||
engine,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn bench_eval_par_each(n: i32) -> impl IntoBenchmarks {
|
|
||||||
let engine = setup_engine();
|
|
||||||
let stack = Stack::new();
|
|
||||||
bench_command(
|
|
||||||
&format!("eval_par_each_{n}"),
|
|
||||||
&format!("(1..{}) | par-each -t 2 {{|_| 1 }} | ignore", n),
|
|
||||||
stack,
|
|
||||||
engine,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn bench_eval_default_config() -> impl IntoBenchmarks {
|
|
||||||
let default_env = get_default_config().as_bytes().to_vec();
|
|
||||||
let fname = "default_config.nu".to_string();
|
|
||||||
bench_eval_source(
|
|
||||||
"eval_default_config",
|
|
||||||
fname,
|
|
||||||
default_env,
|
|
||||||
Stack::new(),
|
|
||||||
setup_engine(),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn bench_eval_default_env() -> impl IntoBenchmarks {
|
|
||||||
let default_env = get_default_env().as_bytes().to_vec();
|
|
||||||
let fname = "default_env.nu".to_string();
|
|
||||||
bench_eval_source(
|
|
||||||
"eval_default_env",
|
|
||||||
fname,
|
|
||||||
default_env,
|
|
||||||
Stack::new(),
|
|
||||||
setup_engine(),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn encode_json(row_cnt: usize, col_cnt: usize) -> impl IntoBenchmarks {
|
|
||||||
let test_data = Rc::new(PluginOutput::CallResponse(
|
|
||||||
0,
|
|
||||||
PluginCallResponse::value(encoding_test_data(row_cnt, col_cnt)),
|
|
||||||
));
|
|
||||||
let encoder = Rc::new(EncodingType::try_from_bytes(b"json").unwrap());
|
|
||||||
|
|
||||||
[benchmark_fn(
|
|
||||||
format!("encode_json_{}_{}", row_cnt, col_cnt),
|
|
||||||
move |b| {
|
|
||||||
let encoder = encoder.clone();
|
|
||||||
let test_data = test_data.clone();
|
|
||||||
b.iter(move || {
|
|
||||||
let mut res = Vec::new();
|
|
||||||
encoder.encode(&*test_data, &mut res).unwrap();
|
|
||||||
})
|
|
||||||
},
|
|
||||||
)]
|
|
||||||
}
|
|
||||||
|
|
||||||
fn encode_msgpack(row_cnt: usize, col_cnt: usize) -> impl IntoBenchmarks {
|
|
||||||
let test_data = Rc::new(PluginOutput::CallResponse(
|
|
||||||
0,
|
|
||||||
PluginCallResponse::value(encoding_test_data(row_cnt, col_cnt)),
|
|
||||||
));
|
|
||||||
let encoder = Rc::new(EncodingType::try_from_bytes(b"msgpack").unwrap());
|
|
||||||
|
|
||||||
[benchmark_fn(
|
|
||||||
format!("encode_msgpack_{}_{}", row_cnt, col_cnt),
|
|
||||||
move |b| {
|
|
||||||
let encoder = encoder.clone();
|
|
||||||
let test_data = test_data.clone();
|
|
||||||
b.iter(move || {
|
|
||||||
let mut res = Vec::new();
|
|
||||||
encoder.encode(&*test_data, &mut res).unwrap();
|
|
||||||
})
|
|
||||||
},
|
|
||||||
)]
|
|
||||||
}
|
|
||||||
|
|
||||||
fn decode_json(row_cnt: usize, col_cnt: usize) -> impl IntoBenchmarks {
|
|
||||||
let test_data = PluginOutput::CallResponse(
|
|
||||||
0,
|
|
||||||
PluginCallResponse::value(encoding_test_data(row_cnt, col_cnt)),
|
|
||||||
);
|
|
||||||
let encoder = EncodingType::try_from_bytes(b"json").unwrap();
|
|
||||||
let mut res = vec![];
|
|
||||||
encoder.encode(&test_data, &mut res).unwrap();
|
|
||||||
|
|
||||||
[benchmark_fn(
|
|
||||||
format!("decode_json_{}_{}", row_cnt, col_cnt),
|
|
||||||
move |b| {
|
|
||||||
let res = res.clone();
|
|
||||||
b.iter(move || {
|
|
||||||
let mut binary_data = std::io::Cursor::new(res.clone());
|
|
||||||
binary_data.set_position(0);
|
|
||||||
let _: Result<Option<PluginOutput>, _> =
|
|
||||||
black_box(encoder.decode(&mut binary_data));
|
|
||||||
})
|
|
||||||
},
|
|
||||||
)]
|
|
||||||
}
|
|
||||||
|
|
||||||
fn decode_msgpack(row_cnt: usize, col_cnt: usize) -> impl IntoBenchmarks {
|
|
||||||
let test_data = PluginOutput::CallResponse(
|
|
||||||
0,
|
|
||||||
PluginCallResponse::value(encoding_test_data(row_cnt, col_cnt)),
|
|
||||||
);
|
|
||||||
let encoder = EncodingType::try_from_bytes(b"msgpack").unwrap();
|
|
||||||
let mut res = vec![];
|
|
||||||
encoder.encode(&test_data, &mut res).unwrap();
|
|
||||||
|
|
||||||
[benchmark_fn(
|
|
||||||
format!("decode_msgpack_{}_{}", row_cnt, col_cnt),
|
|
||||||
move |b| {
|
|
||||||
let res = res.clone();
|
|
||||||
b.iter(move || {
|
|
||||||
let mut binary_data = std::io::Cursor::new(res.clone());
|
|
||||||
binary_data.set_position(0);
|
|
||||||
let _: Result<Option<PluginOutput>, _> =
|
|
||||||
black_box(encoder.decode(&mut binary_data));
|
|
||||||
})
|
|
||||||
},
|
|
||||||
)]
|
|
||||||
}
|
|
||||||
|
|
||||||
tango_benchmarks!(
|
|
||||||
bench_load_standard_lib(),
|
|
||||||
// Data types
|
|
||||||
// Record
|
|
||||||
bench_record_create(1),
|
|
||||||
bench_record_create(10),
|
|
||||||
bench_record_create(100),
|
|
||||||
bench_record_create(1_000),
|
|
||||||
bench_record_flat_access(1),
|
|
||||||
bench_record_flat_access(10),
|
|
||||||
bench_record_flat_access(100),
|
|
||||||
bench_record_flat_access(1_000),
|
|
||||||
bench_record_nested_access(1),
|
|
||||||
bench_record_nested_access(2),
|
|
||||||
bench_record_nested_access(4),
|
|
||||||
bench_record_nested_access(8),
|
|
||||||
bench_record_nested_access(16),
|
|
||||||
bench_record_nested_access(32),
|
|
||||||
bench_record_nested_access(64),
|
|
||||||
bench_record_nested_access(128),
|
|
||||||
// Table
|
|
||||||
bench_table_create(1),
|
|
||||||
bench_table_create(10),
|
|
||||||
bench_table_create(100),
|
|
||||||
bench_table_create(1_000),
|
|
||||||
bench_table_get(1),
|
|
||||||
bench_table_get(10),
|
|
||||||
bench_table_get(100),
|
|
||||||
bench_table_get(1_000),
|
|
||||||
bench_table_select(1),
|
|
||||||
bench_table_select(10),
|
|
||||||
bench_table_select(100),
|
|
||||||
bench_table_select(1_000),
|
|
||||||
// Eval
|
|
||||||
// Interleave
|
|
||||||
bench_eval_interleave(100),
|
|
||||||
bench_eval_interleave(1_000),
|
|
||||||
bench_eval_interleave(10_000),
|
|
||||||
bench_eval_interleave_with_interrupt(100),
|
|
||||||
bench_eval_interleave_with_interrupt(1_000),
|
|
||||||
bench_eval_interleave_with_interrupt(10_000),
|
|
||||||
// For
|
|
||||||
bench_eval_for(1),
|
|
||||||
bench_eval_for(10),
|
|
||||||
bench_eval_for(100),
|
|
||||||
bench_eval_for(1_000),
|
|
||||||
bench_eval_for(10_000),
|
|
||||||
// Each
|
|
||||||
bench_eval_each(1),
|
|
||||||
bench_eval_each(10),
|
|
||||||
bench_eval_each(100),
|
|
||||||
bench_eval_each(1_000),
|
|
||||||
bench_eval_each(10_000),
|
|
||||||
// Par-Each
|
|
||||||
bench_eval_par_each(1),
|
|
||||||
bench_eval_par_each(10),
|
|
||||||
bench_eval_par_each(100),
|
|
||||||
bench_eval_par_each(1_000),
|
|
||||||
bench_eval_par_each(10_000),
|
|
||||||
// Config
|
|
||||||
bench_eval_default_config(),
|
|
||||||
// Env
|
|
||||||
bench_eval_default_env(),
|
|
||||||
// Encode
|
|
||||||
// Json
|
|
||||||
encode_json(100, 5),
|
|
||||||
encode_json(10000, 15),
|
|
||||||
// MsgPack
|
|
||||||
encode_msgpack(100, 5),
|
|
||||||
encode_msgpack(10000, 15),
|
|
||||||
// Decode
|
|
||||||
// Json
|
|
||||||
decode_json(100, 5),
|
|
||||||
decode_json(10000, 15),
|
|
||||||
// MsgPack
|
|
||||||
decode_msgpack(100, 5),
|
|
||||||
decode_msgpack(10000, 15)
|
|
||||||
);
|
);
|
||||||
|
criterion_main!(benches);
|
||||||
tango_main!();
|
|
||||||
|
@ -5,47 +5,41 @@ repository = "https://github.com/nushell/nushell/tree/main/crates/nu-cli"
|
|||||||
edition = "2021"
|
edition = "2021"
|
||||||
license = "MIT"
|
license = "MIT"
|
||||||
name = "nu-cli"
|
name = "nu-cli"
|
||||||
version = "0.101.0"
|
version = "0.87.1"
|
||||||
|
|
||||||
[lib]
|
[lib]
|
||||||
bench = false
|
bench = false
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
nu-cmd-lang = { path = "../nu-cmd-lang", version = "0.101.0" }
|
nu-cmd-lang = { path = "../nu-cmd-lang", version = "0.87.1" }
|
||||||
nu-command = { path = "../nu-command", version = "0.101.0" }
|
nu-command = { path = "../nu-command", version = "0.87.1" }
|
||||||
nu-test-support = { path = "../nu-test-support", version = "0.101.0" }
|
nu-test-support = { path = "../nu-test-support", version = "0.87.1" }
|
||||||
rstest = { workspace = true, default-features = false }
|
rstest = { version = "0.18.1", default-features = false }
|
||||||
tempfile = { workspace = true }
|
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
nu-cmd-base = { path = "../nu-cmd-base", version = "0.101.0" }
|
nu-cmd-base = { path = "../nu-cmd-base", version = "0.87.1" }
|
||||||
nu-engine = { path = "../nu-engine", version = "0.101.0", features = ["os"] }
|
nu-engine = { path = "../nu-engine", version = "0.87.1" }
|
||||||
nu-path = { path = "../nu-path", version = "0.101.0" }
|
nu-path = { path = "../nu-path", version = "0.87.1" }
|
||||||
nu-parser = { path = "../nu-parser", version = "0.101.0" }
|
nu-parser = { path = "../nu-parser", version = "0.87.1" }
|
||||||
nu-plugin-engine = { path = "../nu-plugin-engine", version = "0.101.0", optional = true }
|
nu-protocol = { path = "../nu-protocol", version = "0.87.1" }
|
||||||
nu-protocol = { path = "../nu-protocol", version = "0.101.0", features = ["os"] }
|
nu-utils = { path = "../nu-utils", version = "0.87.1" }
|
||||||
nu-utils = { path = "../nu-utils", version = "0.101.0" }
|
nu-color-config = { path = "../nu-color-config", version = "0.87.1" }
|
||||||
nu-color-config = { path = "../nu-color-config", version = "0.101.0" }
|
nu-ansi-term = "0.49.0"
|
||||||
nu-ansi-term = { workspace = true }
|
reedline = { version = "0.26.0", features = ["bashisms", "sqlite"] }
|
||||||
reedline = { workspace = true, features = ["bashisms", "sqlite"] }
|
|
||||||
|
|
||||||
chrono = { default-features = false, features = ["std"], workspace = true }
|
chrono = { default-features = false, features = ["std"], version = "0.4" }
|
||||||
crossterm = { workspace = true }
|
crossterm = "0.27"
|
||||||
fancy-regex = { workspace = true }
|
fancy-regex = "0.11"
|
||||||
fuzzy-matcher = { workspace = true }
|
fuzzy-matcher = "0.3"
|
||||||
is_executable = { workspace = true }
|
is_executable = "1.0"
|
||||||
log = { workspace = true }
|
log = "0.4"
|
||||||
miette = { workspace = true, features = ["fancy-no-backtrace"] }
|
miette = { version = "5.10", features = ["fancy-no-backtrace"] }
|
||||||
lscolors = { workspace = true, default-features = false, features = ["nu-ansi-term"] }
|
once_cell = "1.18"
|
||||||
percent-encoding = { workspace = true }
|
percent-encoding = "2"
|
||||||
sysinfo = { workspace = true }
|
pathdiff = "0.2"
|
||||||
unicode-segmentation = { workspace = true }
|
sysinfo = "0.29"
|
||||||
uuid = { workspace = true, features = ["v4"] }
|
unicode-segmentation = "1.10"
|
||||||
which = { workspace = true }
|
uuid = { version = "1.5.0", features = ["v4"] }
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
plugin = ["nu-plugin-engine"]
|
plugin = []
|
||||||
system-clipboard = ["reedline/system_clipboard"]
|
|
||||||
|
|
||||||
[lints]
|
|
||||||
workspace = true
|
|
||||||
|
@ -1,7 +0,0 @@
|
|||||||
This crate implements the core functionality of the interactive Nushell REPL and interfaces with `reedline`.
|
|
||||||
Currently implements the syntax highlighting and completions logic.
|
|
||||||
Furthermore includes a few commands that are specific to `reedline`
|
|
||||||
|
|
||||||
## Internal Nushell crate
|
|
||||||
|
|
||||||
This crate implements components of Nushell and is not designed to support plugin authors or other users directly.
|
|
129
crates/nu-cli/src/commands/commandline.rs
Normal file
129
crates/nu-cli/src/commands/commandline.rs
Normal file
@ -0,0 +1,129 @@
|
|||||||
|
use nu_engine::CallExt;
|
||||||
|
use nu_protocol::{
|
||||||
|
ast::Call,
|
||||||
|
engine::{Command, EngineState, Stack},
|
||||||
|
Category, IntoPipelineData, PipelineData, ShellError, Signature, SyntaxShape, Type, Value,
|
||||||
|
};
|
||||||
|
use unicode_segmentation::UnicodeSegmentation;
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct Commandline;
|
||||||
|
|
||||||
|
impl Command for Commandline {
|
||||||
|
fn name(&self) -> &str {
|
||||||
|
"commandline"
|
||||||
|
}
|
||||||
|
|
||||||
|
fn signature(&self) -> Signature {
|
||||||
|
Signature::build("commandline")
|
||||||
|
.input_output_types(vec![
|
||||||
|
(Type::Nothing, Type::Nothing),
|
||||||
|
(Type::String, Type::String),
|
||||||
|
])
|
||||||
|
.switch(
|
||||||
|
"cursor",
|
||||||
|
"Set or get the current cursor position",
|
||||||
|
Some('c'),
|
||||||
|
)
|
||||||
|
.switch(
|
||||||
|
"cursor-end",
|
||||||
|
"Set the current cursor position to the end of the buffer",
|
||||||
|
Some('e'),
|
||||||
|
)
|
||||||
|
.switch(
|
||||||
|
"append",
|
||||||
|
"appends the string to the end of the buffer",
|
||||||
|
Some('a'),
|
||||||
|
)
|
||||||
|
.switch(
|
||||||
|
"insert",
|
||||||
|
"inserts the string into the buffer at the cursor position",
|
||||||
|
Some('i'),
|
||||||
|
)
|
||||||
|
.switch(
|
||||||
|
"replace",
|
||||||
|
"replaces the current contents of the buffer (default)",
|
||||||
|
Some('r'),
|
||||||
|
)
|
||||||
|
.optional(
|
||||||
|
"cmd",
|
||||||
|
SyntaxShape::String,
|
||||||
|
"the string to perform the operation with",
|
||||||
|
)
|
||||||
|
.category(Category::Core)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn usage(&self) -> &str {
|
||||||
|
"View or modify the current command line input buffer."
|
||||||
|
}
|
||||||
|
|
||||||
|
fn search_terms(&self) -> Vec<&str> {
|
||||||
|
vec!["repl", "interactive"]
|
||||||
|
}
|
||||||
|
|
||||||
|
fn run(
|
||||||
|
&self,
|
||||||
|
engine_state: &EngineState,
|
||||||
|
stack: &mut Stack,
|
||||||
|
call: &Call,
|
||||||
|
_input: PipelineData,
|
||||||
|
) -> Result<PipelineData, ShellError> {
|
||||||
|
if let Some(cmd) = call.opt::<Value>(engine_state, stack, 0)? {
|
||||||
|
let mut repl = engine_state.repl_state.lock().expect("repl state mutex");
|
||||||
|
|
||||||
|
if call.has_flag("cursor") {
|
||||||
|
let cmd_str = cmd.as_string()?;
|
||||||
|
match cmd_str.parse::<i64>() {
|
||||||
|
Ok(n) => {
|
||||||
|
repl.cursor_pos = if n <= 0 {
|
||||||
|
0usize
|
||||||
|
} else {
|
||||||
|
repl.buffer
|
||||||
|
.grapheme_indices(true)
|
||||||
|
.map(|(i, _c)| i)
|
||||||
|
.nth(n as usize)
|
||||||
|
.unwrap_or(repl.buffer.len())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Err(_) => {
|
||||||
|
return Err(ShellError::CantConvert {
|
||||||
|
to_type: "int".to_string(),
|
||||||
|
from_type: "string".to_string(),
|
||||||
|
span: cmd.span(),
|
||||||
|
help: Some(format!(
|
||||||
|
r#"string "{cmd_str}" does not represent a valid int"#
|
||||||
|
)),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else if call.has_flag("append") {
|
||||||
|
repl.buffer.push_str(&cmd.as_string()?);
|
||||||
|
} else if call.has_flag("insert") {
|
||||||
|
let cmd_str = cmd.as_string()?;
|
||||||
|
let cursor_pos = repl.cursor_pos;
|
||||||
|
repl.buffer.insert_str(cursor_pos, &cmd_str);
|
||||||
|
repl.cursor_pos += cmd_str.len();
|
||||||
|
} else {
|
||||||
|
repl.buffer = cmd.as_string()?;
|
||||||
|
repl.cursor_pos = repl.buffer.len();
|
||||||
|
}
|
||||||
|
Ok(Value::nothing(call.head).into_pipeline_data())
|
||||||
|
} else {
|
||||||
|
let mut repl = engine_state.repl_state.lock().expect("repl state mutex");
|
||||||
|
if call.has_flag("cursor-end") {
|
||||||
|
repl.cursor_pos = repl.buffer.graphemes(true).count();
|
||||||
|
Ok(Value::nothing(call.head).into_pipeline_data())
|
||||||
|
} else if call.has_flag("cursor") {
|
||||||
|
let char_pos = repl
|
||||||
|
.buffer
|
||||||
|
.grapheme_indices(true)
|
||||||
|
.chain(std::iter::once((repl.buffer.len(), "")))
|
||||||
|
.position(|(i, _c)| i == repl.cursor_pos)
|
||||||
|
.expect("Cursor position isn't on a grapheme boundary");
|
||||||
|
Ok(Value::string(char_pos.to_string(), call.head).into_pipeline_data())
|
||||||
|
} else {
|
||||||
|
Ok(Value::string(repl.buffer.to_string(), call.head).into_pipeline_data())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@ -1,35 +0,0 @@
|
|||||||
use nu_engine::command_prelude::*;
|
|
||||||
|
|
||||||
#[derive(Clone)]
|
|
||||||
pub struct Commandline;
|
|
||||||
|
|
||||||
impl Command for Commandline {
|
|
||||||
fn name(&self) -> &str {
|
|
||||||
"commandline"
|
|
||||||
}
|
|
||||||
|
|
||||||
fn signature(&self) -> Signature {
|
|
||||||
Signature::build("commandline")
|
|
||||||
.input_output_types(vec![(Type::Nothing, Type::String)])
|
|
||||||
.category(Category::Core)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn description(&self) -> &str {
|
|
||||||
"View the current command line input buffer."
|
|
||||||
}
|
|
||||||
|
|
||||||
fn search_terms(&self) -> Vec<&str> {
|
|
||||||
vec!["repl", "interactive"]
|
|
||||||
}
|
|
||||||
|
|
||||||
fn run(
|
|
||||||
&self,
|
|
||||||
engine_state: &EngineState,
|
|
||||||
_stack: &mut Stack,
|
|
||||||
call: &Call,
|
|
||||||
_input: PipelineData,
|
|
||||||
) -> Result<PipelineData, ShellError> {
|
|
||||||
let repl = engine_state.repl_state.lock().expect("repl state mutex");
|
|
||||||
Ok(Value::string(repl.buffer.clone(), call.head).into_pipeline_data())
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,66 +0,0 @@
|
|||||||
use nu_engine::command_prelude::*;
|
|
||||||
|
|
||||||
#[derive(Clone)]
|
|
||||||
pub struct SubCommand;
|
|
||||||
|
|
||||||
impl Command for SubCommand {
|
|
||||||
fn name(&self) -> &str {
|
|
||||||
"commandline edit"
|
|
||||||
}
|
|
||||||
|
|
||||||
fn signature(&self) -> Signature {
|
|
||||||
Signature::build(self.name())
|
|
||||||
.input_output_types(vec![(Type::Nothing, Type::Nothing)])
|
|
||||||
.switch(
|
|
||||||
"append",
|
|
||||||
"appends the string to the end of the buffer",
|
|
||||||
Some('a'),
|
|
||||||
)
|
|
||||||
.switch(
|
|
||||||
"insert",
|
|
||||||
"inserts the string into the buffer at the cursor position",
|
|
||||||
Some('i'),
|
|
||||||
)
|
|
||||||
.switch(
|
|
||||||
"replace",
|
|
||||||
"replaces the current contents of the buffer (default)",
|
|
||||||
Some('r'),
|
|
||||||
)
|
|
||||||
.required(
|
|
||||||
"str",
|
|
||||||
SyntaxShape::String,
|
|
||||||
"the string to perform the operation with",
|
|
||||||
)
|
|
||||||
.category(Category::Core)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn description(&self) -> &str {
|
|
||||||
"Modify the current command line input buffer."
|
|
||||||
}
|
|
||||||
|
|
||||||
fn search_terms(&self) -> Vec<&str> {
|
|
||||||
vec!["repl", "interactive"]
|
|
||||||
}
|
|
||||||
|
|
||||||
fn run(
|
|
||||||
&self,
|
|
||||||
engine_state: &EngineState,
|
|
||||||
stack: &mut Stack,
|
|
||||||
call: &Call,
|
|
||||||
_input: PipelineData,
|
|
||||||
) -> Result<PipelineData, ShellError> {
|
|
||||||
let str: String = call.req(engine_state, stack, 0)?;
|
|
||||||
let mut repl = engine_state.repl_state.lock().expect("repl state mutex");
|
|
||||||
if call.has_flag(engine_state, stack, "append")? {
|
|
||||||
repl.buffer.push_str(&str);
|
|
||||||
} else if call.has_flag(engine_state, stack, "insert")? {
|
|
||||||
let cursor_pos = repl.cursor_pos;
|
|
||||||
repl.buffer.insert_str(cursor_pos, &str);
|
|
||||||
repl.cursor_pos += str.len();
|
|
||||||
} else {
|
|
||||||
repl.buffer = str;
|
|
||||||
repl.cursor_pos = repl.buffer.len();
|
|
||||||
}
|
|
||||||
Ok(Value::nothing(call.head).into_pipeline_data())
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,52 +0,0 @@
|
|||||||
use nu_engine::command_prelude::*;
|
|
||||||
use unicode_segmentation::UnicodeSegmentation;
|
|
||||||
|
|
||||||
#[derive(Clone)]
|
|
||||||
pub struct SubCommand;
|
|
||||||
|
|
||||||
impl Command for SubCommand {
|
|
||||||
fn name(&self) -> &str {
|
|
||||||
"commandline get-cursor"
|
|
||||||
}
|
|
||||||
|
|
||||||
fn signature(&self) -> Signature {
|
|
||||||
Signature::build(self.name())
|
|
||||||
.input_output_types(vec![(Type::Nothing, Type::Int)])
|
|
||||||
.allow_variants_without_examples(true)
|
|
||||||
.category(Category::Core)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn description(&self) -> &str {
|
|
||||||
"Get the current cursor position."
|
|
||||||
}
|
|
||||||
|
|
||||||
fn search_terms(&self) -> Vec<&str> {
|
|
||||||
vec!["repl", "interactive"]
|
|
||||||
}
|
|
||||||
|
|
||||||
fn run(
|
|
||||||
&self,
|
|
||||||
engine_state: &EngineState,
|
|
||||||
_stack: &mut Stack,
|
|
||||||
call: &Call,
|
|
||||||
_input: PipelineData,
|
|
||||||
) -> Result<PipelineData, ShellError> {
|
|
||||||
let repl = engine_state.repl_state.lock().expect("repl state mutex");
|
|
||||||
let char_pos = repl
|
|
||||||
.buffer
|
|
||||||
.grapheme_indices(true)
|
|
||||||
.chain(std::iter::once((repl.buffer.len(), "")))
|
|
||||||
.position(|(i, _c)| i == repl.cursor_pos)
|
|
||||||
.expect("Cursor position isn't on a grapheme boundary");
|
|
||||||
match i64::try_from(char_pos) {
|
|
||||||
Ok(pos) => Ok(Value::int(pos, call.head).into_pipeline_data()),
|
|
||||||
Err(e) => Err(ShellError::GenericError {
|
|
||||||
error: "Failed to convert cursor position to int".to_string(),
|
|
||||||
msg: e.to_string(),
|
|
||||||
span: None,
|
|
||||||
help: None,
|
|
||||||
inner: vec![],
|
|
||||||
}),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,9 +0,0 @@
|
|||||||
mod commandline_;
|
|
||||||
mod edit;
|
|
||||||
mod get_cursor;
|
|
||||||
mod set_cursor;
|
|
||||||
|
|
||||||
pub use commandline_::Commandline;
|
|
||||||
pub use edit::SubCommand as CommandlineEdit;
|
|
||||||
pub use get_cursor::SubCommand as CommandlineGetCursor;
|
|
||||||
pub use set_cursor::SubCommand as CommandlineSetCursor;
|
|
@ -1,65 +0,0 @@
|
|||||||
use nu_engine::command_prelude::*;
|
|
||||||
|
|
||||||
use unicode_segmentation::UnicodeSegmentation;
|
|
||||||
|
|
||||||
#[derive(Clone)]
|
|
||||||
pub struct SubCommand;
|
|
||||||
|
|
||||||
impl Command for SubCommand {
|
|
||||||
fn name(&self) -> &str {
|
|
||||||
"commandline set-cursor"
|
|
||||||
}
|
|
||||||
|
|
||||||
fn signature(&self) -> Signature {
|
|
||||||
Signature::build(self.name())
|
|
||||||
.input_output_types(vec![(Type::Nothing, Type::Nothing)])
|
|
||||||
.switch(
|
|
||||||
"end",
|
|
||||||
"set the current cursor position to the end of the buffer",
|
|
||||||
Some('e'),
|
|
||||||
)
|
|
||||||
.optional("pos", SyntaxShape::Int, "Cursor position to be set")
|
|
||||||
.category(Category::Core)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn description(&self) -> &str {
|
|
||||||
"Set the current cursor position."
|
|
||||||
}
|
|
||||||
|
|
||||||
fn search_terms(&self) -> Vec<&str> {
|
|
||||||
vec!["repl", "interactive"]
|
|
||||||
}
|
|
||||||
|
|
||||||
fn run(
|
|
||||||
&self,
|
|
||||||
engine_state: &EngineState,
|
|
||||||
stack: &mut Stack,
|
|
||||||
call: &Call,
|
|
||||||
_input: PipelineData,
|
|
||||||
) -> Result<PipelineData, ShellError> {
|
|
||||||
let mut repl = engine_state.repl_state.lock().expect("repl state mutex");
|
|
||||||
if let Some(pos) = call.opt::<i64>(engine_state, stack, 0)? {
|
|
||||||
repl.cursor_pos = if pos <= 0 {
|
|
||||||
0usize
|
|
||||||
} else {
|
|
||||||
repl.buffer
|
|
||||||
.grapheme_indices(true)
|
|
||||||
.map(|(i, _c)| i)
|
|
||||||
.nth(pos as usize)
|
|
||||||
.unwrap_or(repl.buffer.len())
|
|
||||||
};
|
|
||||||
Ok(Value::nothing(call.head).into_pipeline_data())
|
|
||||||
} else if call.has_flag(engine_state, stack, "end")? {
|
|
||||||
repl.cursor_pos = repl.buffer.len();
|
|
||||||
Ok(Value::nothing(call.head).into_pipeline_data())
|
|
||||||
} else {
|
|
||||||
Err(ShellError::GenericError {
|
|
||||||
error: "Required a positional argument or a flag".to_string(),
|
|
||||||
msg: "".to_string(),
|
|
||||||
span: None,
|
|
||||||
help: None,
|
|
||||||
inner: vec![],
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,6 +1,7 @@
|
|||||||
use crate::commands::*;
|
|
||||||
use nu_protocol::engine::{EngineState, StateWorkingSet};
|
use nu_protocol::engine::{EngineState, StateWorkingSet};
|
||||||
|
|
||||||
|
use crate::commands::*;
|
||||||
|
|
||||||
pub fn add_cli_context(mut engine_state: EngineState) -> EngineState {
|
pub fn add_cli_context(mut engine_state: EngineState) -> EngineState {
|
||||||
let delta = {
|
let delta = {
|
||||||
let mut working_set = StateWorkingSet::new(&engine_state);
|
let mut working_set = StateWorkingSet::new(&engine_state);
|
||||||
@ -13,11 +14,7 @@ pub fn add_cli_context(mut engine_state: EngineState) -> EngineState {
|
|||||||
|
|
||||||
bind_command! {
|
bind_command! {
|
||||||
Commandline,
|
Commandline,
|
||||||
CommandlineEdit,
|
|
||||||
CommandlineGetCursor,
|
|
||||||
CommandlineSetCursor,
|
|
||||||
History,
|
History,
|
||||||
HistoryImport,
|
|
||||||
HistorySession,
|
HistorySession,
|
||||||
Keybindings,
|
Keybindings,
|
||||||
KeybindingsDefault,
|
KeybindingsDefault,
|
||||||
|
240
crates/nu-cli/src/commands/history.rs
Normal file
240
crates/nu-cli/src/commands/history.rs
Normal file
@ -0,0 +1,240 @@
|
|||||||
|
use nu_protocol::ast::Call;
|
||||||
|
use nu_protocol::engine::{Command, EngineState, Stack};
|
||||||
|
use nu_protocol::{
|
||||||
|
record, Category, Example, HistoryFileFormat, IntoInterruptiblePipelineData, PipelineData,
|
||||||
|
ShellError, Signature, Span, Type, Value,
|
||||||
|
};
|
||||||
|
use reedline::{
|
||||||
|
FileBackedHistory, History as ReedlineHistory, HistoryItem, SearchDirection, SearchQuery,
|
||||||
|
SqliteBackedHistory,
|
||||||
|
};
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct History;
|
||||||
|
|
||||||
|
impl Command for History {
|
||||||
|
fn name(&self) -> &str {
|
||||||
|
"history"
|
||||||
|
}
|
||||||
|
|
||||||
|
fn usage(&self) -> &str {
|
||||||
|
"Get the command history."
|
||||||
|
}
|
||||||
|
|
||||||
|
fn signature(&self) -> nu_protocol::Signature {
|
||||||
|
Signature::build("history")
|
||||||
|
.input_output_types(vec![
|
||||||
|
(Type::Nothing, Type::Table(vec![])),
|
||||||
|
(Type::Nothing, Type::Nothing),
|
||||||
|
])
|
||||||
|
.allow_variants_without_examples(true)
|
||||||
|
.switch("clear", "Clears out the history entries", Some('c'))
|
||||||
|
.switch(
|
||||||
|
"long",
|
||||||
|
"Show long listing of entries for sqlite history",
|
||||||
|
Some('l'),
|
||||||
|
)
|
||||||
|
.category(Category::Misc)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn run(
|
||||||
|
&self,
|
||||||
|
engine_state: &EngineState,
|
||||||
|
_stack: &mut Stack,
|
||||||
|
call: &Call,
|
||||||
|
_input: PipelineData,
|
||||||
|
) -> Result<PipelineData, ShellError> {
|
||||||
|
let head = call.head;
|
||||||
|
|
||||||
|
// todo for sqlite history this command should be an alias to `open ~/.config/nushell/history.sqlite3 | get history`
|
||||||
|
if let Some(config_path) = nu_path::config_dir() {
|
||||||
|
let clear = call.has_flag("clear");
|
||||||
|
let long = call.has_flag("long");
|
||||||
|
let ctrlc = engine_state.ctrlc.clone();
|
||||||
|
|
||||||
|
let mut history_path = config_path;
|
||||||
|
history_path.push("nushell");
|
||||||
|
match engine_state.config.history_file_format {
|
||||||
|
HistoryFileFormat::Sqlite => {
|
||||||
|
history_path.push("history.sqlite3");
|
||||||
|
}
|
||||||
|
HistoryFileFormat::PlainText => {
|
||||||
|
history_path.push("history.txt");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if clear {
|
||||||
|
let _ = std::fs::remove_file(history_path);
|
||||||
|
// TODO: FIXME also clear the auxiliary files when using sqlite
|
||||||
|
Ok(PipelineData::empty())
|
||||||
|
} else {
|
||||||
|
let history_reader: Option<Box<dyn ReedlineHistory>> =
|
||||||
|
match engine_state.config.history_file_format {
|
||||||
|
HistoryFileFormat::Sqlite => {
|
||||||
|
SqliteBackedHistory::with_file(history_path, None, None)
|
||||||
|
.map(|inner| {
|
||||||
|
let boxed: Box<dyn ReedlineHistory> = Box::new(inner);
|
||||||
|
boxed
|
||||||
|
})
|
||||||
|
.ok()
|
||||||
|
}
|
||||||
|
|
||||||
|
HistoryFileFormat::PlainText => FileBackedHistory::with_file(
|
||||||
|
engine_state.config.max_history_size as usize,
|
||||||
|
history_path,
|
||||||
|
)
|
||||||
|
.map(|inner| {
|
||||||
|
let boxed: Box<dyn ReedlineHistory> = Box::new(inner);
|
||||||
|
boxed
|
||||||
|
})
|
||||||
|
.ok(),
|
||||||
|
};
|
||||||
|
|
||||||
|
match engine_state.config.history_file_format {
|
||||||
|
HistoryFileFormat::PlainText => Ok(history_reader
|
||||||
|
.and_then(|h| {
|
||||||
|
h.search(SearchQuery::everything(SearchDirection::Forward, None))
|
||||||
|
.ok()
|
||||||
|
})
|
||||||
|
.map(move |entries| {
|
||||||
|
entries.into_iter().enumerate().map(move |(idx, entry)| {
|
||||||
|
Value::record(
|
||||||
|
record! {
|
||||||
|
"command" => Value::string(entry.command_line, head),
|
||||||
|
"index" => Value::int(idx as i64, head),
|
||||||
|
},
|
||||||
|
head,
|
||||||
|
)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
.ok_or(ShellError::FileNotFound(head))?
|
||||||
|
.into_pipeline_data(ctrlc)),
|
||||||
|
HistoryFileFormat::Sqlite => Ok(history_reader
|
||||||
|
.and_then(|h| {
|
||||||
|
h.search(SearchQuery::everything(SearchDirection::Forward, None))
|
||||||
|
.ok()
|
||||||
|
})
|
||||||
|
.map(move |entries| {
|
||||||
|
entries.into_iter().enumerate().map(move |(idx, entry)| {
|
||||||
|
create_history_record(idx, entry, long, head)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
.ok_or(ShellError::FileNotFound(head))?
|
||||||
|
.into_pipeline_data(ctrlc)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
Err(ShellError::FileNotFound(head))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn examples(&self) -> Vec<Example> {
|
||||||
|
vec![
|
||||||
|
Example {
|
||||||
|
example: "history | length",
|
||||||
|
description: "Get current history length",
|
||||||
|
result: None,
|
||||||
|
},
|
||||||
|
Example {
|
||||||
|
example: "history | last 5",
|
||||||
|
description: "Show last 5 commands you have ran",
|
||||||
|
result: None,
|
||||||
|
},
|
||||||
|
Example {
|
||||||
|
example: "history | where command =~ cargo | get command",
|
||||||
|
description: "Search all the commands from history that contains 'cargo'",
|
||||||
|
result: None,
|
||||||
|
},
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn create_history_record(idx: usize, entry: HistoryItem, long: bool, head: Span) -> Value {
|
||||||
|
//1. Format all the values
|
||||||
|
//2. Create a record of either short or long columns and values
|
||||||
|
|
||||||
|
let item_id_value = Value::int(
|
||||||
|
match entry.id {
|
||||||
|
Some(id) => {
|
||||||
|
let ids = id.to_string();
|
||||||
|
match ids.parse::<i64>() {
|
||||||
|
Ok(i) => i,
|
||||||
|
_ => 0i64,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
None => 0i64,
|
||||||
|
},
|
||||||
|
head,
|
||||||
|
);
|
||||||
|
let start_timestamp_value = Value::string(
|
||||||
|
match entry.start_timestamp {
|
||||||
|
Some(time) => time.to_string(),
|
||||||
|
None => "".into(),
|
||||||
|
},
|
||||||
|
head,
|
||||||
|
);
|
||||||
|
let command_value = Value::string(entry.command_line, head);
|
||||||
|
let session_id_value = Value::int(
|
||||||
|
match entry.session_id {
|
||||||
|
Some(sid) => {
|
||||||
|
let sids = sid.to_string();
|
||||||
|
match sids.parse::<i64>() {
|
||||||
|
Ok(i) => i,
|
||||||
|
_ => 0i64,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
None => 0i64,
|
||||||
|
},
|
||||||
|
head,
|
||||||
|
);
|
||||||
|
let hostname_value = Value::string(
|
||||||
|
match entry.hostname {
|
||||||
|
Some(host) => host,
|
||||||
|
None => "".into(),
|
||||||
|
},
|
||||||
|
head,
|
||||||
|
);
|
||||||
|
let cwd_value = Value::string(
|
||||||
|
match entry.cwd {
|
||||||
|
Some(cwd) => cwd,
|
||||||
|
None => "".into(),
|
||||||
|
},
|
||||||
|
head,
|
||||||
|
);
|
||||||
|
let duration_value = Value::duration(
|
||||||
|
match entry.duration {
|
||||||
|
Some(d) => d.as_nanos().try_into().unwrap_or(0),
|
||||||
|
None => 0,
|
||||||
|
},
|
||||||
|
head,
|
||||||
|
);
|
||||||
|
let exit_status_value = Value::int(entry.exit_status.unwrap_or(0), head);
|
||||||
|
let index_value = Value::int(idx as i64, head);
|
||||||
|
if long {
|
||||||
|
Value::record(
|
||||||
|
record! {
|
||||||
|
"item_id" => item_id_value,
|
||||||
|
"start_timestamp" => start_timestamp_value,
|
||||||
|
"command" => command_value,
|
||||||
|
"session_id" => session_id_value,
|
||||||
|
"hostname" => hostname_value,
|
||||||
|
"cwd" => cwd_value,
|
||||||
|
"duration" => duration_value,
|
||||||
|
"exit_status" => exit_status_value,
|
||||||
|
"idx" => index_value,
|
||||||
|
},
|
||||||
|
head,
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
Value::record(
|
||||||
|
record! {
|
||||||
|
"start_timestamp" => start_timestamp_value,
|
||||||
|
"command" => command_value,
|
||||||
|
"cwd" => cwd_value,
|
||||||
|
"duration" => duration_value,
|
||||||
|
"exit_status" => exit_status_value,
|
||||||
|
},
|
||||||
|
head,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
@ -1,9 +0,0 @@
|
|||||||
// Each const is named after a HistoryItem field, and the value is the field name to be displayed to
|
|
||||||
// the user (or accept during import).
|
|
||||||
pub const COMMAND_LINE: &str = "command";
|
|
||||||
pub const START_TIMESTAMP: &str = "start_timestamp";
|
|
||||||
pub const HOSTNAME: &str = "hostname";
|
|
||||||
pub const CWD: &str = "cwd";
|
|
||||||
pub const EXIT_STATUS: &str = "exit_status";
|
|
||||||
pub const DURATION: &str = "duration";
|
|
||||||
pub const SESSION_ID: &str = "session_id";
|
|
@ -1,205 +0,0 @@
|
|||||||
use nu_engine::command_prelude::*;
|
|
||||||
use nu_protocol::HistoryFileFormat;
|
|
||||||
use reedline::{
|
|
||||||
FileBackedHistory, History as ReedlineHistory, HistoryItem, SearchDirection, SearchQuery,
|
|
||||||
SqliteBackedHistory,
|
|
||||||
};
|
|
||||||
|
|
||||||
use super::fields;
|
|
||||||
|
|
||||||
#[derive(Clone)]
|
|
||||||
pub struct History;
|
|
||||||
|
|
||||||
impl Command for History {
|
|
||||||
fn name(&self) -> &str {
|
|
||||||
"history"
|
|
||||||
}
|
|
||||||
|
|
||||||
fn description(&self) -> &str {
|
|
||||||
"Get the command history."
|
|
||||||
}
|
|
||||||
|
|
||||||
fn signature(&self) -> nu_protocol::Signature {
|
|
||||||
Signature::build("history")
|
|
||||||
.input_output_types(vec![(Type::Nothing, Type::Any)])
|
|
||||||
.allow_variants_without_examples(true)
|
|
||||||
.switch("clear", "Clears out the history entries", Some('c'))
|
|
||||||
.switch(
|
|
||||||
"long",
|
|
||||||
"Show long listing of entries for sqlite history",
|
|
||||||
Some('l'),
|
|
||||||
)
|
|
||||||
.category(Category::History)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn run(
|
|
||||||
&self,
|
|
||||||
engine_state: &EngineState,
|
|
||||||
stack: &mut Stack,
|
|
||||||
call: &Call,
|
|
||||||
_input: PipelineData,
|
|
||||||
) -> Result<PipelineData, ShellError> {
|
|
||||||
let head = call.head;
|
|
||||||
|
|
||||||
let Some(history) = engine_state.history_config() else {
|
|
||||||
return Ok(PipelineData::empty());
|
|
||||||
};
|
|
||||||
// todo for sqlite history this command should be an alias to `open ~/.config/nushell/history.sqlite3 | get history`
|
|
||||||
let Some(history_path) = history.file_path() else {
|
|
||||||
return Err(ShellError::ConfigDirNotFound { span: Some(head) });
|
|
||||||
};
|
|
||||||
|
|
||||||
if call.has_flag(engine_state, stack, "clear")? {
|
|
||||||
let _ = std::fs::remove_file(history_path);
|
|
||||||
// TODO: FIXME also clear the auxiliary files when using sqlite
|
|
||||||
return Ok(PipelineData::empty());
|
|
||||||
}
|
|
||||||
|
|
||||||
let long = call.has_flag(engine_state, stack, "long")?;
|
|
||||||
let signals = engine_state.signals().clone();
|
|
||||||
let history_reader: Option<Box<dyn ReedlineHistory>> = match history.file_format {
|
|
||||||
HistoryFileFormat::Sqlite => {
|
|
||||||
SqliteBackedHistory::with_file(history_path.clone(), None, None)
|
|
||||||
.map(|inner| {
|
|
||||||
let boxed: Box<dyn ReedlineHistory> = Box::new(inner);
|
|
||||||
boxed
|
|
||||||
})
|
|
||||||
.ok()
|
|
||||||
}
|
|
||||||
HistoryFileFormat::Plaintext => {
|
|
||||||
FileBackedHistory::with_file(history.max_size as usize, history_path.clone())
|
|
||||||
.map(|inner| {
|
|
||||||
let boxed: Box<dyn ReedlineHistory> = Box::new(inner);
|
|
||||||
boxed
|
|
||||||
})
|
|
||||||
.ok()
|
|
||||||
}
|
|
||||||
};
|
|
||||||
match history.file_format {
|
|
||||||
HistoryFileFormat::Plaintext => Ok(history_reader
|
|
||||||
.and_then(|h| {
|
|
||||||
h.search(SearchQuery::everything(SearchDirection::Forward, None))
|
|
||||||
.ok()
|
|
||||||
})
|
|
||||||
.map(move |entries| {
|
|
||||||
entries.into_iter().enumerate().map(move |(idx, entry)| {
|
|
||||||
Value::record(
|
|
||||||
record! {
|
|
||||||
fields::COMMAND_LINE => Value::string(entry.command_line, head),
|
|
||||||
// TODO: This name is inconsistent with create_history_record.
|
|
||||||
"index" => Value::int(idx as i64, head),
|
|
||||||
},
|
|
||||||
head,
|
|
||||||
)
|
|
||||||
})
|
|
||||||
})
|
|
||||||
.ok_or(ShellError::FileNotFound {
|
|
||||||
file: history_path.display().to_string(),
|
|
||||||
span: head,
|
|
||||||
})?
|
|
||||||
.into_pipeline_data(head, signals)),
|
|
||||||
HistoryFileFormat::Sqlite => Ok(history_reader
|
|
||||||
.and_then(|h| {
|
|
||||||
h.search(SearchQuery::everything(SearchDirection::Forward, None))
|
|
||||||
.ok()
|
|
||||||
})
|
|
||||||
.map(move |entries| {
|
|
||||||
entries
|
|
||||||
.into_iter()
|
|
||||||
.enumerate()
|
|
||||||
.map(move |(idx, entry)| create_history_record(idx, entry, long, head))
|
|
||||||
})
|
|
||||||
.ok_or(ShellError::FileNotFound {
|
|
||||||
file: history_path.display().to_string(),
|
|
||||||
span: head,
|
|
||||||
})?
|
|
||||||
.into_pipeline_data(head, signals)),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn examples(&self) -> Vec<Example> {
|
|
||||||
vec![
|
|
||||||
Example {
|
|
||||||
example: "history | length",
|
|
||||||
description: "Get current history length",
|
|
||||||
result: None,
|
|
||||||
},
|
|
||||||
Example {
|
|
||||||
example: "history | last 5",
|
|
||||||
description: "Show last 5 commands you have ran",
|
|
||||||
result: None,
|
|
||||||
},
|
|
||||||
Example {
|
|
||||||
example: "history | where command =~ cargo | get command",
|
|
||||||
description: "Search all the commands from history that contains 'cargo'",
|
|
||||||
result: None,
|
|
||||||
},
|
|
||||||
]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn create_history_record(idx: usize, entry: HistoryItem, long: bool, head: Span) -> Value {
|
|
||||||
//1. Format all the values
|
|
||||||
//2. Create a record of either short or long columns and values
|
|
||||||
|
|
||||||
let item_id_value = Value::int(
|
|
||||||
entry
|
|
||||||
.id
|
|
||||||
.and_then(|id| id.to_string().parse::<i64>().ok())
|
|
||||||
.unwrap_or_default(),
|
|
||||||
head,
|
|
||||||
);
|
|
||||||
let start_timestamp_value = Value::string(
|
|
||||||
entry
|
|
||||||
.start_timestamp
|
|
||||||
.map(|time| time.to_string())
|
|
||||||
.unwrap_or_default(),
|
|
||||||
head,
|
|
||||||
);
|
|
||||||
let command_value = Value::string(entry.command_line, head);
|
|
||||||
let session_id_value = Value::int(
|
|
||||||
entry
|
|
||||||
.session_id
|
|
||||||
.and_then(|id| id.to_string().parse::<i64>().ok())
|
|
||||||
.unwrap_or_default(),
|
|
||||||
head,
|
|
||||||
);
|
|
||||||
let hostname_value = Value::string(entry.hostname.unwrap_or_default(), head);
|
|
||||||
let cwd_value = Value::string(entry.cwd.unwrap_or_default(), head);
|
|
||||||
let duration_value = Value::duration(
|
|
||||||
entry
|
|
||||||
.duration
|
|
||||||
.and_then(|d| d.as_nanos().try_into().ok())
|
|
||||||
.unwrap_or(0),
|
|
||||||
head,
|
|
||||||
);
|
|
||||||
let exit_status_value = Value::int(entry.exit_status.unwrap_or(0), head);
|
|
||||||
let index_value = Value::int(idx as i64, head);
|
|
||||||
if long {
|
|
||||||
Value::record(
|
|
||||||
record! {
|
|
||||||
"item_id" => item_id_value,
|
|
||||||
fields::START_TIMESTAMP => start_timestamp_value,
|
|
||||||
fields::COMMAND_LINE => command_value,
|
|
||||||
fields::SESSION_ID => session_id_value,
|
|
||||||
fields::HOSTNAME => hostname_value,
|
|
||||||
fields::CWD => cwd_value,
|
|
||||||
fields::DURATION => duration_value,
|
|
||||||
fields::EXIT_STATUS => exit_status_value,
|
|
||||||
"idx" => index_value,
|
|
||||||
},
|
|
||||||
head,
|
|
||||||
)
|
|
||||||
} else {
|
|
||||||
Value::record(
|
|
||||||
record! {
|
|
||||||
fields::START_TIMESTAMP => start_timestamp_value,
|
|
||||||
fields::COMMAND_LINE => command_value,
|
|
||||||
fields::CWD => cwd_value,
|
|
||||||
fields::DURATION => duration_value,
|
|
||||||
fields::EXIT_STATUS => exit_status_value,
|
|
||||||
},
|
|
||||||
head,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,415 +0,0 @@
|
|||||||
use std::path::{Path, PathBuf};
|
|
||||||
|
|
||||||
use nu_engine::command_prelude::*;
|
|
||||||
use nu_protocol::HistoryFileFormat;
|
|
||||||
|
|
||||||
use reedline::{
|
|
||||||
FileBackedHistory, History, HistoryItem, ReedlineError, SearchQuery, SqliteBackedHistory,
|
|
||||||
};
|
|
||||||
|
|
||||||
use super::fields;
|
|
||||||
|
|
||||||
#[derive(Clone)]
|
|
||||||
pub struct HistoryImport;
|
|
||||||
|
|
||||||
impl Command for HistoryImport {
|
|
||||||
fn name(&self) -> &str {
|
|
||||||
"history import"
|
|
||||||
}
|
|
||||||
|
|
||||||
fn description(&self) -> &str {
|
|
||||||
"Import command line history"
|
|
||||||
}
|
|
||||||
|
|
||||||
fn extra_description(&self) -> &str {
|
|
||||||
r#"Can import history from input, either successive command lines or more detailed records. If providing records, available fields are:
|
|
||||||
command_line, id, start_timestamp, hostname, cwd, duration, exit_status.
|
|
||||||
|
|
||||||
If no input is provided, will import all history items from existing history in the other format: if current history is stored in sqlite, it will store it in plain text and vice versa.
|
|
||||||
|
|
||||||
Note that history item IDs are ignored when importing from file."#
|
|
||||||
}
|
|
||||||
|
|
||||||
fn signature(&self) -> nu_protocol::Signature {
|
|
||||||
Signature::build("history import")
|
|
||||||
.category(Category::History)
|
|
||||||
.input_output_types(vec![
|
|
||||||
(Type::Nothing, Type::Nothing),
|
|
||||||
(Type::List(Box::new(Type::String)), Type::Nothing),
|
|
||||||
(Type::table(), Type::Nothing),
|
|
||||||
])
|
|
||||||
}
|
|
||||||
|
|
||||||
fn examples(&self) -> Vec<Example> {
|
|
||||||
vec![
|
|
||||||
Example {
|
|
||||||
example: "history import",
|
|
||||||
description:
|
|
||||||
"Append all items from history in the other format to the current history",
|
|
||||||
result: None,
|
|
||||||
},
|
|
||||||
Example {
|
|
||||||
example: "echo foo | history import",
|
|
||||||
description: "Append `foo` to the current history",
|
|
||||||
result: None,
|
|
||||||
},
|
|
||||||
Example {
|
|
||||||
example: "[[ command_line cwd ]; [ foo /home ]] | history import",
|
|
||||||
description: "Append `foo` ran from `/home` to the current history",
|
|
||||||
result: None,
|
|
||||||
},
|
|
||||||
]
|
|
||||||
}
|
|
||||||
|
|
||||||
fn run(
|
|
||||||
&self,
|
|
||||||
engine_state: &EngineState,
|
|
||||||
_stack: &mut Stack,
|
|
||||||
call: &Call,
|
|
||||||
input: PipelineData,
|
|
||||||
) -> Result<PipelineData, ShellError> {
|
|
||||||
let ok = Ok(Value::nothing(call.head).into_pipeline_data());
|
|
||||||
|
|
||||||
let Some(history) = engine_state.history_config() else {
|
|
||||||
return ok;
|
|
||||||
};
|
|
||||||
let Some(current_history_path) = history.file_path() else {
|
|
||||||
return Err(ShellError::ConfigDirNotFound {
|
|
||||||
span: Some(call.head),
|
|
||||||
});
|
|
||||||
};
|
|
||||||
if let Some(bak_path) = backup(¤t_history_path)? {
|
|
||||||
println!("Backed history to {}", bak_path.display());
|
|
||||||
}
|
|
||||||
match input {
|
|
||||||
PipelineData::Empty => {
|
|
||||||
let other_format = match history.file_format {
|
|
||||||
HistoryFileFormat::Sqlite => HistoryFileFormat::Plaintext,
|
|
||||||
HistoryFileFormat::Plaintext => HistoryFileFormat::Sqlite,
|
|
||||||
};
|
|
||||||
let src = new_backend(other_format, None)?;
|
|
||||||
let mut dst = new_backend(history.file_format, Some(current_history_path))?;
|
|
||||||
let items = src
|
|
||||||
.search(SearchQuery::everything(
|
|
||||||
reedline::SearchDirection::Forward,
|
|
||||||
None,
|
|
||||||
))
|
|
||||||
.map_err(error_from_reedline)?
|
|
||||||
.into_iter()
|
|
||||||
.map(Ok);
|
|
||||||
import(dst.as_mut(), items)
|
|
||||||
}
|
|
||||||
_ => {
|
|
||||||
let input = input.into_iter().map(item_from_value);
|
|
||||||
import(
|
|
||||||
new_backend(history.file_format, Some(current_history_path))?.as_mut(),
|
|
||||||
input,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}?;
|
|
||||||
|
|
||||||
ok
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn new_backend(
|
|
||||||
format: HistoryFileFormat,
|
|
||||||
path: Option<PathBuf>,
|
|
||||||
) -> Result<Box<dyn History>, ShellError> {
|
|
||||||
let path = match path {
|
|
||||||
Some(path) => path,
|
|
||||||
None => {
|
|
||||||
let Some(mut path) = nu_path::nu_config_dir() else {
|
|
||||||
return Err(ShellError::ConfigDirNotFound { span: None });
|
|
||||||
};
|
|
||||||
path.push(format.default_file_name());
|
|
||||||
path.into_std_path_buf()
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
fn map(
|
|
||||||
result: Result<impl History + 'static, ReedlineError>,
|
|
||||||
) -> Result<Box<dyn History>, ShellError> {
|
|
||||||
result
|
|
||||||
.map(|x| Box::new(x) as Box<dyn History>)
|
|
||||||
.map_err(error_from_reedline)
|
|
||||||
}
|
|
||||||
match format {
|
|
||||||
// Use a reasonably large value for maximum capacity.
|
|
||||||
HistoryFileFormat::Plaintext => map(FileBackedHistory::with_file(0xfffffff, path)),
|
|
||||||
HistoryFileFormat::Sqlite => map(SqliteBackedHistory::with_file(path, None, None)),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn import(
|
|
||||||
dst: &mut dyn History,
|
|
||||||
src: impl Iterator<Item = Result<HistoryItem, ShellError>>,
|
|
||||||
) -> Result<(), ShellError> {
|
|
||||||
for item in src {
|
|
||||||
let mut item = item?;
|
|
||||||
item.id = None;
|
|
||||||
dst.save(item).map_err(error_from_reedline)?;
|
|
||||||
}
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
fn error_from_reedline(e: ReedlineError) -> ShellError {
|
|
||||||
// TODO: Should we add a new ShellError variant?
|
|
||||||
ShellError::GenericError {
|
|
||||||
error: "Reedline error".to_owned(),
|
|
||||||
msg: format!("{e}"),
|
|
||||||
span: None,
|
|
||||||
help: None,
|
|
||||||
inner: Vec::new(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn item_from_value(v: Value) -> Result<HistoryItem, ShellError> {
|
|
||||||
let span = v.span();
|
|
||||||
match v {
|
|
||||||
Value::Record { val, .. } => item_from_record(val.into_owned(), span),
|
|
||||||
Value::String { val, .. } => Ok(HistoryItem {
|
|
||||||
command_line: val,
|
|
||||||
id: None,
|
|
||||||
start_timestamp: None,
|
|
||||||
session_id: None,
|
|
||||||
hostname: None,
|
|
||||||
cwd: None,
|
|
||||||
duration: None,
|
|
||||||
exit_status: None,
|
|
||||||
more_info: None,
|
|
||||||
}),
|
|
||||||
_ => Err(ShellError::UnsupportedInput {
|
|
||||||
msg: "Only list and record inputs are supported".to_owned(),
|
|
||||||
input: v.get_type().to_string(),
|
|
||||||
msg_span: span,
|
|
||||||
input_span: span,
|
|
||||||
}),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn item_from_record(mut rec: Record, span: Span) -> Result<HistoryItem, ShellError> {
|
|
||||||
let cmd = match rec.remove(fields::COMMAND_LINE) {
|
|
||||||
Some(v) => v.as_str()?.to_owned(),
|
|
||||||
None => {
|
|
||||||
return Err(ShellError::TypeMismatch {
|
|
||||||
err_message: format!("missing column: {}", fields::COMMAND_LINE),
|
|
||||||
span,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
fn get<T>(
|
|
||||||
rec: &mut Record,
|
|
||||||
field: &'static str,
|
|
||||||
f: impl FnOnce(Value) -> Result<T, ShellError>,
|
|
||||||
) -> Result<Option<T>, ShellError> {
|
|
||||||
rec.remove(field).map(f).transpose()
|
|
||||||
}
|
|
||||||
|
|
||||||
let rec = &mut rec;
|
|
||||||
let item = HistoryItem {
|
|
||||||
command_line: cmd,
|
|
||||||
id: None,
|
|
||||||
start_timestamp: get(rec, fields::START_TIMESTAMP, |v| Ok(v.as_date()?.to_utc()))?,
|
|
||||||
hostname: get(rec, fields::HOSTNAME, |v| Ok(v.as_str()?.to_owned()))?,
|
|
||||||
cwd: get(rec, fields::CWD, |v| Ok(v.as_str()?.to_owned()))?,
|
|
||||||
exit_status: get(rec, fields::EXIT_STATUS, |v| v.as_int())?,
|
|
||||||
duration: get(rec, fields::DURATION, duration_from_value)?,
|
|
||||||
more_info: None,
|
|
||||||
// TODO: Currently reedline doesn't let you create session IDs.
|
|
||||||
session_id: None,
|
|
||||||
};
|
|
||||||
|
|
||||||
if !rec.is_empty() {
|
|
||||||
let cols = rec.columns().map(|s| s.as_str()).collect::<Vec<_>>();
|
|
||||||
return Err(ShellError::TypeMismatch {
|
|
||||||
err_message: format!("unsupported column names: {}", cols.join(", ")),
|
|
||||||
span,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
Ok(item)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn duration_from_value(v: Value) -> Result<std::time::Duration, ShellError> {
|
|
||||||
chrono::Duration::nanoseconds(v.as_duration()?)
|
|
||||||
.to_std()
|
|
||||||
.map_err(|_| ShellError::IOError {
|
|
||||||
msg: "negative duration not supported".to_string(),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
fn find_backup_path(path: &Path) -> Result<PathBuf, ShellError> {
|
|
||||||
let Ok(mut bak_path) = path.to_path_buf().into_os_string().into_string() else {
|
|
||||||
// This isn't fundamentally problem, but trying to work with OsString is a nightmare.
|
|
||||||
return Err(ShellError::IOError {
|
|
||||||
msg: "History path mush be representable as UTF-8".to_string(),
|
|
||||||
});
|
|
||||||
};
|
|
||||||
bak_path.push_str(".bak");
|
|
||||||
if !Path::new(&bak_path).exists() {
|
|
||||||
return Ok(bak_path.into());
|
|
||||||
}
|
|
||||||
let base_len = bak_path.len();
|
|
||||||
for i in 1..100 {
|
|
||||||
use std::fmt::Write;
|
|
||||||
bak_path.truncate(base_len);
|
|
||||||
write!(&mut bak_path, ".{i}").unwrap();
|
|
||||||
if !Path::new(&bak_path).exists() {
|
|
||||||
return Ok(PathBuf::from(bak_path));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Err(ShellError::IOError {
|
|
||||||
msg: "Too many existing backup files".to_string(),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
fn backup(path: &Path) -> Result<Option<PathBuf>, ShellError> {
|
|
||||||
match path.metadata() {
|
|
||||||
Ok(md) if md.is_file() => (),
|
|
||||||
Ok(_) => {
|
|
||||||
return Err(ShellError::IOError {
|
|
||||||
msg: "history path exists but is not a file".to_string(),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
Err(e) if e.kind() == std::io::ErrorKind::NotFound => return Ok(None),
|
|
||||||
Err(e) => return Err(e.into()),
|
|
||||||
}
|
|
||||||
let bak_path = find_backup_path(path)?;
|
|
||||||
std::fs::copy(path, &bak_path)?;
|
|
||||||
Ok(Some(bak_path))
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
use chrono::DateTime;
|
|
||||||
use rstest::rstest;
|
|
||||||
|
|
||||||
use super::*;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_item_from_value_string() -> Result<(), ShellError> {
|
|
||||||
let item = item_from_value(Value::string("foo", Span::unknown()))?;
|
|
||||||
assert_eq!(
|
|
||||||
item,
|
|
||||||
HistoryItem {
|
|
||||||
command_line: "foo".to_string(),
|
|
||||||
id: None,
|
|
||||||
start_timestamp: None,
|
|
||||||
session_id: None,
|
|
||||||
hostname: None,
|
|
||||||
cwd: None,
|
|
||||||
duration: None,
|
|
||||||
exit_status: None,
|
|
||||||
more_info: None
|
|
||||||
}
|
|
||||||
);
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_item_from_value_record() {
|
|
||||||
let span = Span::unknown();
|
|
||||||
let rec = new_record(&[
|
|
||||||
("command", Value::string("foo", span)),
|
|
||||||
(
|
|
||||||
"start_timestamp",
|
|
||||||
Value::date(
|
|
||||||
DateTime::parse_from_rfc3339("1996-12-19T16:39:57-08:00").unwrap(),
|
|
||||||
span,
|
|
||||||
),
|
|
||||||
),
|
|
||||||
("hostname", Value::string("localhost", span)),
|
|
||||||
("cwd", Value::string("/home/test", span)),
|
|
||||||
("duration", Value::duration(100_000_000, span)),
|
|
||||||
("exit_status", Value::int(42, span)),
|
|
||||||
]);
|
|
||||||
let item = item_from_value(rec).unwrap();
|
|
||||||
assert_eq!(
|
|
||||||
item,
|
|
||||||
HistoryItem {
|
|
||||||
command_line: "foo".to_string(),
|
|
||||||
id: None,
|
|
||||||
start_timestamp: Some(
|
|
||||||
DateTime::parse_from_rfc3339("1996-12-19T16:39:57-08:00")
|
|
||||||
.unwrap()
|
|
||||||
.to_utc()
|
|
||||||
),
|
|
||||||
hostname: Some("localhost".to_string()),
|
|
||||||
cwd: Some("/home/test".to_string()),
|
|
||||||
duration: Some(std::time::Duration::from_nanos(100_000_000)),
|
|
||||||
exit_status: Some(42),
|
|
||||||
|
|
||||||
session_id: None,
|
|
||||||
more_info: None
|
|
||||||
}
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_item_from_value_record_extra_field() {
|
|
||||||
let span = Span::unknown();
|
|
||||||
let rec = new_record(&[
|
|
||||||
("command_line", Value::string("foo", span)),
|
|
||||||
("id_nonexistent", Value::int(1, span)),
|
|
||||||
]);
|
|
||||||
assert!(item_from_value(rec).is_err());
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_item_from_value_record_bad_type() {
|
|
||||||
let span = Span::unknown();
|
|
||||||
let rec = new_record(&[
|
|
||||||
("command_line", Value::string("foo", span)),
|
|
||||||
("id", Value::string("one".to_string(), span)),
|
|
||||||
]);
|
|
||||||
assert!(item_from_value(rec).is_err());
|
|
||||||
}
|
|
||||||
|
|
||||||
fn new_record(rec: &[(&'static str, Value)]) -> Value {
|
|
||||||
let span = Span::unknown();
|
|
||||||
let rec = Record::from_raw_cols_vals(
|
|
||||||
rec.iter().map(|(col, _)| col.to_string()).collect(),
|
|
||||||
rec.iter().map(|(_, val)| val.clone()).collect(),
|
|
||||||
span,
|
|
||||||
span,
|
|
||||||
)
|
|
||||||
.unwrap();
|
|
||||||
Value::record(rec, span)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[rstest]
|
|
||||||
#[case::no_backup(&["history.dat"], "history.dat.bak")]
|
|
||||||
#[case::backup_exists(&["history.dat", "history.dat.bak"], "history.dat.bak.1")]
|
|
||||||
#[case::multiple_backups_exists( &["history.dat", "history.dat.bak", "history.dat.bak.1"], "history.dat.bak.2")]
|
|
||||||
fn test_find_backup_path(#[case] existing: &[&str], #[case] want: &str) {
|
|
||||||
let dir = tempfile::tempdir().unwrap();
|
|
||||||
for name in existing {
|
|
||||||
std::fs::File::create_new(dir.path().join(name)).unwrap();
|
|
||||||
}
|
|
||||||
let got = find_backup_path(&dir.path().join("history.dat")).unwrap();
|
|
||||||
assert_eq!(got, dir.path().join(want))
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_backup() {
|
|
||||||
let dir = tempfile::tempdir().unwrap();
|
|
||||||
let mut history = std::fs::File::create_new(dir.path().join("history.dat")).unwrap();
|
|
||||||
use std::io::Write;
|
|
||||||
write!(&mut history, "123").unwrap();
|
|
||||||
let want_bak_path = dir.path().join("history.dat.bak");
|
|
||||||
assert_eq!(
|
|
||||||
backup(&dir.path().join("history.dat")),
|
|
||||||
Ok(Some(want_bak_path.clone()))
|
|
||||||
);
|
|
||||||
let got_data = String::from_utf8(std::fs::read(want_bak_path).unwrap()).unwrap();
|
|
||||||
assert_eq!(got_data, "123");
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_backup_no_file() {
|
|
||||||
let dir = tempfile::tempdir().unwrap();
|
|
||||||
let bak_path = backup(&dir.path().join("history.dat")).unwrap();
|
|
||||||
assert!(bak_path.is_none());
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,8 +0,0 @@
|
|||||||
mod fields;
|
|
||||||
mod history_;
|
|
||||||
mod history_import;
|
|
||||||
mod history_session;
|
|
||||||
|
|
||||||
pub use history_::History;
|
|
||||||
pub use history_import::HistoryImport;
|
|
||||||
pub use history_session::HistorySession;
|
|
@ -1,4 +1,8 @@
|
|||||||
use nu_engine::command_prelude::*;
|
use nu_protocol::ast::Call;
|
||||||
|
use nu_protocol::engine::{Command, EngineState, Stack};
|
||||||
|
use nu_protocol::{
|
||||||
|
Category, Example, IntoPipelineData, PipelineData, ShellError, Signature, Type, Value,
|
||||||
|
};
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct HistorySession;
|
pub struct HistorySession;
|
||||||
@ -8,13 +12,13 @@ impl Command for HistorySession {
|
|||||||
"history session"
|
"history session"
|
||||||
}
|
}
|
||||||
|
|
||||||
fn description(&self) -> &str {
|
fn usage(&self) -> &str {
|
||||||
"Get the command history session."
|
"Get the command history session."
|
||||||
}
|
}
|
||||||
|
|
||||||
fn signature(&self) -> nu_protocol::Signature {
|
fn signature(&self) -> nu_protocol::Signature {
|
||||||
Signature::build("history session")
|
Signature::build("history session")
|
||||||
.category(Category::History)
|
.category(Category::Misc)
|
||||||
.input_output_types(vec![(Type::Nothing, Type::Int)])
|
.input_output_types(vec![(Type::Nothing, Type::Int)])
|
||||||
}
|
}
|
||||||
|
|
@ -1,4 +1,9 @@
|
|||||||
use nu_engine::{command_prelude::*, get_full_help};
|
use nu_engine::get_full_help;
|
||||||
|
use nu_protocol::{
|
||||||
|
ast::Call,
|
||||||
|
engine::{Command, EngineState, Stack},
|
||||||
|
Category, IntoPipelineData, PipelineData, ShellError, Signature, Type, Value,
|
||||||
|
};
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct Keybindings;
|
pub struct Keybindings;
|
||||||
@ -14,11 +19,11 @@ impl Command for Keybindings {
|
|||||||
.input_output_types(vec![(Type::Nothing, Type::String)])
|
.input_output_types(vec![(Type::Nothing, Type::String)])
|
||||||
}
|
}
|
||||||
|
|
||||||
fn description(&self) -> &str {
|
fn usage(&self) -> &str {
|
||||||
"Keybindings related commands."
|
"Keybindings related commands."
|
||||||
}
|
}
|
||||||
|
|
||||||
fn extra_description(&self) -> &str {
|
fn extra_usage(&self) -> &str {
|
||||||
r#"You must use one of the following subcommands. Using this command as-is will only produce this help message.
|
r#"You must use one of the following subcommands. Using this command as-is will only produce this help message.
|
||||||
|
|
||||||
For more information on input and keybindings, check:
|
For more information on input and keybindings, check:
|
||||||
@ -36,6 +41,16 @@ For more information on input and keybindings, check:
|
|||||||
call: &Call,
|
call: &Call,
|
||||||
_input: PipelineData,
|
_input: PipelineData,
|
||||||
) -> Result<PipelineData, ShellError> {
|
) -> Result<PipelineData, ShellError> {
|
||||||
Ok(Value::string(get_full_help(self, engine_state, stack), call.head).into_pipeline_data())
|
Ok(Value::string(
|
||||||
|
get_full_help(
|
||||||
|
&Keybindings.signature(),
|
||||||
|
&Keybindings.examples(),
|
||||||
|
engine_state,
|
||||||
|
stack,
|
||||||
|
self.is_parser_keyword(),
|
||||||
|
),
|
||||||
|
call.head,
|
||||||
|
)
|
||||||
|
.into_pipeline_data())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,4 +1,8 @@
|
|||||||
use nu_engine::command_prelude::*;
|
use nu_protocol::{
|
||||||
|
ast::Call,
|
||||||
|
engine::{Command, EngineState, Stack},
|
||||||
|
record, Category, Example, IntoPipelineData, PipelineData, ShellError, Signature, Type, Value,
|
||||||
|
};
|
||||||
use reedline::get_reedline_default_keybindings;
|
use reedline::get_reedline_default_keybindings;
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
@ -12,10 +16,10 @@ impl Command for KeybindingsDefault {
|
|||||||
fn signature(&self) -> Signature {
|
fn signature(&self) -> Signature {
|
||||||
Signature::build(self.name())
|
Signature::build(self.name())
|
||||||
.category(Category::Platform)
|
.category(Category::Platform)
|
||||||
.input_output_types(vec![(Type::Nothing, Type::table())])
|
.input_output_types(vec![(Type::Nothing, Type::Table(vec![]))])
|
||||||
}
|
}
|
||||||
|
|
||||||
fn description(&self) -> &str {
|
fn usage(&self) -> &str {
|
||||||
"List default keybindings."
|
"List default keybindings."
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,4 +1,9 @@
|
|||||||
use nu_engine::command_prelude::*;
|
use nu_protocol::{
|
||||||
|
ast::Call,
|
||||||
|
engine::{Command, EngineState, Stack},
|
||||||
|
record, Category, Example, IntoPipelineData, PipelineData, ShellError, Signature, Span, Type,
|
||||||
|
Value,
|
||||||
|
};
|
||||||
use reedline::{
|
use reedline::{
|
||||||
get_reedline_edit_commands, get_reedline_keybinding_modifiers, get_reedline_keycodes,
|
get_reedline_edit_commands, get_reedline_keybinding_modifiers, get_reedline_keycodes,
|
||||||
get_reedline_prompt_edit_modes, get_reedline_reedline_events,
|
get_reedline_prompt_edit_modes, get_reedline_reedline_events,
|
||||||
@ -14,7 +19,7 @@ impl Command for KeybindingsList {
|
|||||||
|
|
||||||
fn signature(&self) -> Signature {
|
fn signature(&self) -> Signature {
|
||||||
Signature::build(self.name())
|
Signature::build(self.name())
|
||||||
.input_output_types(vec![(Type::Nothing, Type::table())])
|
.input_output_types(vec![(Type::Nothing, Type::Table(vec![]))])
|
||||||
.switch("modifiers", "list of modifiers", Some('m'))
|
.switch("modifiers", "list of modifiers", Some('m'))
|
||||||
.switch("keycodes", "list of keycodes", Some('k'))
|
.switch("keycodes", "list of keycodes", Some('k'))
|
||||||
.switch("modes", "list of edit modes", Some('o'))
|
.switch("modes", "list of edit modes", Some('o'))
|
||||||
@ -23,7 +28,7 @@ impl Command for KeybindingsList {
|
|||||||
.category(Category::Platform)
|
.category(Category::Platform)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn description(&self) -> &str {
|
fn usage(&self) -> &str {
|
||||||
"List available options that can be used to create keybindings."
|
"List available options that can be used to create keybindings."
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -49,26 +54,22 @@ impl Command for KeybindingsList {
|
|||||||
|
|
||||||
fn run(
|
fn run(
|
||||||
&self,
|
&self,
|
||||||
engine_state: &EngineState,
|
_engine_state: &EngineState,
|
||||||
stack: &mut Stack,
|
_stack: &mut Stack,
|
||||||
call: &Call,
|
call: &Call,
|
||||||
_input: PipelineData,
|
_input: PipelineData,
|
||||||
) -> Result<PipelineData, ShellError> {
|
) -> Result<PipelineData, ShellError> {
|
||||||
let all_options = ["modifiers", "keycodes", "edits", "modes", "events"];
|
let records = if call.named_len() == 0 {
|
||||||
|
let all_options = ["modifiers", "keycodes", "edits", "modes", "events"];
|
||||||
let presence = all_options
|
all_options
|
||||||
.iter()
|
.iter()
|
||||||
.map(|option| call.has_flag(engine_state, stack, option))
|
.flat_map(|argument| get_records(argument, call.head))
|
||||||
.collect::<Result<Vec<_>, ShellError>>()?;
|
.collect()
|
||||||
|
} else {
|
||||||
let no_option_specified = presence.iter().all(|present| !*present);
|
call.named_iter()
|
||||||
|
.flat_map(|(argument, _, _)| get_records(argument.item.as_str(), call.head))
|
||||||
let records = all_options
|
.collect()
|
||||||
.iter()
|
};
|
||||||
.zip(presence)
|
|
||||||
.filter(|(_, present)| no_option_specified || *present)
|
|
||||||
.flat_map(|(option, _)| get_records(option, call.head))
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
Ok(Value::list(records, call.head).into_pipeline_data())
|
Ok(Value::list(records, call.head).into_pipeline_data())
|
||||||
}
|
}
|
||||||
|
@ -1,7 +1,12 @@
|
|||||||
use crossterm::{
|
use crossterm::execute;
|
||||||
event::Event, event::KeyCode, event::KeyEvent, execute, terminal, QueueableCommand,
|
use crossterm::QueueableCommand;
|
||||||
|
use crossterm::{event::Event, event::KeyCode, event::KeyEvent, terminal};
|
||||||
|
use nu_protocol::ast::Call;
|
||||||
|
use nu_protocol::engine::{Command, EngineState, Stack};
|
||||||
|
use nu_protocol::{
|
||||||
|
record, Category, Example, IntoPipelineData, PipelineData, ShellError, Signature, Span, Type,
|
||||||
|
Value,
|
||||||
};
|
};
|
||||||
use nu_engine::command_prelude::*;
|
|
||||||
use std::io::{stdout, Write};
|
use std::io::{stdout, Write};
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
@ -12,11 +17,11 @@ impl Command for KeybindingsListen {
|
|||||||
"keybindings listen"
|
"keybindings listen"
|
||||||
}
|
}
|
||||||
|
|
||||||
fn description(&self) -> &str {
|
fn usage(&self) -> &str {
|
||||||
"Get input from the user."
|
"Get input from the user."
|
||||||
}
|
}
|
||||||
|
|
||||||
fn extra_description(&self) -> &str {
|
fn extra_usage(&self) -> &str {
|
||||||
"This is an internal debugging tool. For better output, try `input listen --types [key]`"
|
"This is an internal debugging tool. For better output, try `input listen --types [key]`"
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -40,13 +45,13 @@ impl Command for KeybindingsListen {
|
|||||||
Ok(v) => Ok(v.into_pipeline_data()),
|
Ok(v) => Ok(v.into_pipeline_data()),
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
terminal::disable_raw_mode()?;
|
terminal::disable_raw_mode()?;
|
||||||
Err(ShellError::GenericError {
|
Err(ShellError::GenericError(
|
||||||
error: "Error with input".into(),
|
"Error with input".to_string(),
|
||||||
msg: "".into(),
|
"".to_string(),
|
||||||
span: None,
|
None,
|
||||||
help: Some(e.to_string()),
|
Some(e.to_string()),
|
||||||
inner: vec![],
|
Vec::new(),
|
||||||
})
|
))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -107,7 +112,7 @@ pub fn print_events(engine_state: &EngineState) -> Result<Value, ShellError> {
|
|||||||
let o = match v {
|
let o = match v {
|
||||||
Value::Record { val, .. } => val
|
Value::Record { val, .. } => val
|
||||||
.iter()
|
.iter()
|
||||||
.map(|(x, y)| format!("{}: {}", x, y.to_expanded_string("", config)))
|
.map(|(x, y)| format!("{}: {}", x, y.into_string("", config)))
|
||||||
.collect::<Vec<String>>()
|
.collect::<Vec<String>>()
|
||||||
.join(", "),
|
.join(", "),
|
||||||
|
|
||||||
|
@ -1,13 +1,15 @@
|
|||||||
mod commandline;
|
mod commandline;
|
||||||
mod default_context;
|
mod default_context;
|
||||||
mod history;
|
mod history;
|
||||||
|
mod history_session;
|
||||||
mod keybindings;
|
mod keybindings;
|
||||||
mod keybindings_default;
|
mod keybindings_default;
|
||||||
mod keybindings_list;
|
mod keybindings_list;
|
||||||
mod keybindings_listen;
|
mod keybindings_listen;
|
||||||
|
|
||||||
pub use commandline::{Commandline, CommandlineEdit, CommandlineGetCursor, CommandlineSetCursor};
|
pub use commandline::Commandline;
|
||||||
pub use history::{History, HistoryImport, HistorySession};
|
pub use history::History;
|
||||||
|
pub use history_session::HistorySession;
|
||||||
pub use keybindings::Keybindings;
|
pub use keybindings::Keybindings;
|
||||||
pub use keybindings_default::KeybindingsDefault;
|
pub use keybindings_default::KeybindingsDefault;
|
||||||
pub use keybindings_list::KeybindingsList;
|
pub use keybindings_list::KeybindingsList;
|
||||||
|
@ -1,43 +1,43 @@
|
|||||||
use crate::completions::CompletionOptions;
|
use crate::completions::{CompletionOptions, SortBy};
|
||||||
use nu_protocol::{
|
use nu_protocol::{engine::StateWorkingSet, levenshtein_distance, Span};
|
||||||
engine::{Stack, StateWorkingSet},
|
|
||||||
Span,
|
|
||||||
};
|
|
||||||
use reedline::Suggestion;
|
use reedline::Suggestion;
|
||||||
|
|
||||||
|
// Completer trait represents the three stages of the completion
|
||||||
|
// fetch, filter and sort
|
||||||
pub trait Completer {
|
pub trait Completer {
|
||||||
/// Fetch, filter, and sort completions
|
|
||||||
#[allow(clippy::too_many_arguments)]
|
|
||||||
fn fetch(
|
fn fetch(
|
||||||
&mut self,
|
&mut self,
|
||||||
working_set: &StateWorkingSet,
|
working_set: &StateWorkingSet,
|
||||||
stack: &Stack,
|
prefix: Vec<u8>,
|
||||||
prefix: &[u8],
|
|
||||||
span: Span,
|
span: Span,
|
||||||
offset: usize,
|
offset: usize,
|
||||||
pos: usize,
|
pos: usize,
|
||||||
options: &CompletionOptions,
|
options: &CompletionOptions,
|
||||||
) -> Vec<SemanticSuggestion>;
|
) -> Vec<Suggestion>;
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Default, PartialEq)]
|
fn get_sort_by(&self) -> SortBy {
|
||||||
pub struct SemanticSuggestion {
|
SortBy::Ascending
|
||||||
pub suggestion: Suggestion,
|
}
|
||||||
pub kind: Option<SuggestionKind>,
|
|
||||||
}
|
|
||||||
|
|
||||||
// TODO: think about name: maybe suggestion context?
|
fn sort(&self, items: Vec<Suggestion>, prefix: Vec<u8>) -> Vec<Suggestion> {
|
||||||
#[derive(Clone, Debug, PartialEq)]
|
let prefix_str = String::from_utf8_lossy(&prefix).to_string();
|
||||||
pub enum SuggestionKind {
|
let mut filtered_items = items;
|
||||||
Command(nu_protocol::engine::CommandType),
|
|
||||||
Type(nu_protocol::Type),
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<Suggestion> for SemanticSuggestion {
|
// Sort items
|
||||||
fn from(suggestion: Suggestion) -> Self {
|
match self.get_sort_by() {
|
||||||
Self {
|
SortBy::LevenshteinDistance => {
|
||||||
suggestion,
|
filtered_items.sort_by(|a, b| {
|
||||||
..Default::default()
|
let a_distance = levenshtein_distance(&prefix_str, &a.value);
|
||||||
}
|
let b_distance = levenshtein_distance(&prefix_str, &b.value);
|
||||||
|
a_distance.cmp(&b_distance)
|
||||||
|
});
|
||||||
|
}
|
||||||
|
SortBy::Ascending => {
|
||||||
|
filtered_items.sort_by(|a, b| a.value.cmp(&b.value));
|
||||||
|
}
|
||||||
|
SortBy::None => {}
|
||||||
|
};
|
||||||
|
|
||||||
|
filtered_items
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,19 +1,14 @@
|
|||||||
use std::collections::HashMap;
|
use crate::completions::{Completer, CompletionOptions, MatchAlgorithm, SortBy};
|
||||||
|
|
||||||
use crate::{
|
|
||||||
completions::{Completer, CompletionOptions},
|
|
||||||
SuggestionKind,
|
|
||||||
};
|
|
||||||
use nu_parser::FlatShape;
|
use nu_parser::FlatShape;
|
||||||
use nu_protocol::{
|
use nu_protocol::{
|
||||||
engine::{CachedFile, Stack, StateWorkingSet},
|
engine::{EngineState, StateWorkingSet},
|
||||||
Span,
|
Span,
|
||||||
};
|
};
|
||||||
use reedline::Suggestion;
|
use reedline::Suggestion;
|
||||||
|
use std::sync::Arc;
|
||||||
use super::{completion_options::NuMatcher, SemanticSuggestion};
|
|
||||||
|
|
||||||
pub struct CommandCompletion {
|
pub struct CommandCompletion {
|
||||||
|
engine_state: Arc<EngineState>,
|
||||||
flattened: Vec<(Span, FlatShape)>,
|
flattened: Vec<(Span, FlatShape)>,
|
||||||
flat_shape: FlatShape,
|
flat_shape: FlatShape,
|
||||||
force_completion_after_space: bool,
|
force_completion_after_space: bool,
|
||||||
@ -21,11 +16,14 @@ pub struct CommandCompletion {
|
|||||||
|
|
||||||
impl CommandCompletion {
|
impl CommandCompletion {
|
||||||
pub fn new(
|
pub fn new(
|
||||||
|
engine_state: Arc<EngineState>,
|
||||||
|
_: &StateWorkingSet,
|
||||||
flattened: Vec<(Span, FlatShape)>,
|
flattened: Vec<(Span, FlatShape)>,
|
||||||
flat_shape: FlatShape,
|
flat_shape: FlatShape,
|
||||||
force_completion_after_space: bool,
|
force_completion_after_space: bool,
|
||||||
) -> Self {
|
) -> Self {
|
||||||
Self {
|
Self {
|
||||||
|
engine_state,
|
||||||
flattened,
|
flattened,
|
||||||
flat_shape,
|
flat_shape,
|
||||||
force_completion_after_space,
|
force_completion_after_space,
|
||||||
@ -34,60 +32,40 @@ impl CommandCompletion {
|
|||||||
|
|
||||||
fn external_command_completion(
|
fn external_command_completion(
|
||||||
&self,
|
&self,
|
||||||
working_set: &StateWorkingSet,
|
prefix: &str,
|
||||||
sugg_span: reedline::Span,
|
match_algorithm: MatchAlgorithm,
|
||||||
matched_internal: impl Fn(&str) -> bool,
|
) -> Vec<String> {
|
||||||
matcher: &mut NuMatcher<String>,
|
let mut executables = vec![];
|
||||||
) -> HashMap<String, SemanticSuggestion> {
|
|
||||||
let mut suggs = HashMap::new();
|
|
||||||
|
|
||||||
let paths = working_set.permanent_state.get_env_var_insensitive("path");
|
// os agnostic way to get the PATH env var
|
||||||
|
let paths = self.engine_state.get_path_env_var();
|
||||||
|
|
||||||
if let Some(paths) = paths {
|
if let Some(paths) = paths {
|
||||||
if let Ok(paths) = paths.as_list() {
|
if let Ok(paths) = paths.as_list() {
|
||||||
for path in paths {
|
for path in paths {
|
||||||
let path = path.coerce_str().unwrap_or_default();
|
let path = path.as_string().unwrap_or_default();
|
||||||
|
|
||||||
if let Ok(mut contents) = std::fs::read_dir(path.as_ref()) {
|
if let Ok(mut contents) = std::fs::read_dir(path) {
|
||||||
while let Some(Ok(item)) = contents.next() {
|
while let Some(Ok(item)) = contents.next() {
|
||||||
if working_set
|
if self.engine_state.config.max_external_completion_results
|
||||||
.permanent_state
|
> executables.len() as i64
|
||||||
.config
|
&& !executables.contains(
|
||||||
.completions
|
&item
|
||||||
.external
|
.path()
|
||||||
.max_results
|
.file_name()
|
||||||
<= suggs.len() as i64
|
.map(|x| x.to_string_lossy().to_string())
|
||||||
|
.unwrap_or_default(),
|
||||||
|
)
|
||||||
|
&& matches!(
|
||||||
|
item.path().file_name().map(|x| match_algorithm
|
||||||
|
.matches_str(&x.to_string_lossy(), prefix)),
|
||||||
|
Some(true)
|
||||||
|
)
|
||||||
|
&& is_executable::is_executable(item.path())
|
||||||
{
|
{
|
||||||
break;
|
if let Ok(name) = item.file_name().into_string() {
|
||||||
}
|
executables.push(name);
|
||||||
let Ok(name) = item.file_name().into_string() else {
|
}
|
||||||
continue;
|
|
||||||
};
|
|
||||||
let value = if matched_internal(&name) {
|
|
||||||
format!("^{}", name)
|
|
||||||
} else {
|
|
||||||
name.clone()
|
|
||||||
};
|
|
||||||
if suggs.contains_key(&value) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
if matcher.matches(&name) && is_executable::is_executable(item.path()) {
|
|
||||||
// If there's an internal command with the same name, adds ^cmd to the
|
|
||||||
// matcher so that both the internal and external command are included
|
|
||||||
matcher.add(&name, value.clone());
|
|
||||||
suggs.insert(
|
|
||||||
value.clone(),
|
|
||||||
SemanticSuggestion {
|
|
||||||
suggestion: Suggestion {
|
|
||||||
value,
|
|
||||||
span: sugg_span,
|
|
||||||
append_whitespace: true,
|
|
||||||
..Default::default()
|
|
||||||
},
|
|
||||||
// TODO: is there a way to create a test?
|
|
||||||
kind: None,
|
|
||||||
},
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -95,7 +73,7 @@ impl CommandCompletion {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
suggs
|
executables
|
||||||
}
|
}
|
||||||
|
|
||||||
fn complete_commands(
|
fn complete_commands(
|
||||||
@ -104,59 +82,60 @@ impl CommandCompletion {
|
|||||||
span: Span,
|
span: Span,
|
||||||
offset: usize,
|
offset: usize,
|
||||||
find_externals: bool,
|
find_externals: bool,
|
||||||
options: &CompletionOptions,
|
match_algorithm: MatchAlgorithm,
|
||||||
) -> Vec<SemanticSuggestion> {
|
) -> Vec<Suggestion> {
|
||||||
let partial = working_set.get_span_contents(span);
|
let partial = working_set.get_span_contents(span);
|
||||||
let mut matcher = NuMatcher::new(String::from_utf8_lossy(partial), options.clone());
|
|
||||||
|
|
||||||
let sugg_span = reedline::Span::new(span.start - offset, span.end - offset);
|
let filter_predicate = |command: &[u8]| match_algorithm.matches_u8(command, partial);
|
||||||
|
|
||||||
let mut internal_suggs = HashMap::new();
|
let mut results = working_set
|
||||||
let filtered_commands = working_set.find_commands_by_predicate(
|
.find_commands_by_predicate(filter_predicate, true)
|
||||||
|name| {
|
.into_iter()
|
||||||
let name = String::from_utf8_lossy(name);
|
.map(move |x| Suggestion {
|
||||||
matcher.add(&name, name.to_string())
|
value: String::from_utf8_lossy(&x.0).to_string(),
|
||||||
},
|
description: x.1,
|
||||||
true,
|
extra: None,
|
||||||
);
|
span: reedline::Span::new(span.start - offset, span.end - offset),
|
||||||
for (name, description, typ) in filtered_commands {
|
append_whitespace: true,
|
||||||
let name = String::from_utf8_lossy(&name);
|
})
|
||||||
internal_suggs.insert(
|
.collect::<Vec<_>>();
|
||||||
name.to_string(),
|
|
||||||
SemanticSuggestion {
|
let partial = working_set.get_span_contents(span);
|
||||||
suggestion: Suggestion {
|
let partial = String::from_utf8_lossy(partial).to_string();
|
||||||
value: name.to_string(),
|
|
||||||
description,
|
if find_externals {
|
||||||
span: sugg_span,
|
let results_external = self
|
||||||
|
.external_command_completion(&partial, match_algorithm)
|
||||||
|
.into_iter()
|
||||||
|
.map(move |x| Suggestion {
|
||||||
|
value: x,
|
||||||
|
description: None,
|
||||||
|
extra: None,
|
||||||
|
span: reedline::Span::new(span.start - offset, span.end - offset),
|
||||||
|
append_whitespace: true,
|
||||||
|
});
|
||||||
|
|
||||||
|
let results_strings: Vec<String> =
|
||||||
|
results.clone().into_iter().map(|x| x.value).collect();
|
||||||
|
|
||||||
|
for external in results_external {
|
||||||
|
if results_strings.contains(&external.value) {
|
||||||
|
results.push(Suggestion {
|
||||||
|
value: format!("^{}", external.value),
|
||||||
|
description: None,
|
||||||
|
extra: None,
|
||||||
|
span: external.span,
|
||||||
append_whitespace: true,
|
append_whitespace: true,
|
||||||
..Suggestion::default()
|
})
|
||||||
},
|
} else {
|
||||||
kind: Some(SuggestionKind::Command(typ)),
|
results.push(external)
|
||||||
},
|
}
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut external_suggs = if find_externals {
|
|
||||||
self.external_command_completion(
|
|
||||||
working_set,
|
|
||||||
sugg_span,
|
|
||||||
|name| internal_suggs.contains_key(name),
|
|
||||||
&mut matcher,
|
|
||||||
)
|
|
||||||
} else {
|
|
||||||
HashMap::new()
|
|
||||||
};
|
|
||||||
|
|
||||||
let mut res = Vec::new();
|
|
||||||
for cmd_name in matcher.results() {
|
|
||||||
if let Some(sugg) = internal_suggs
|
|
||||||
.remove(&cmd_name)
|
|
||||||
.or_else(|| external_suggs.remove(&cmd_name))
|
|
||||||
{
|
|
||||||
res.push(sugg);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
results
|
||||||
|
} else {
|
||||||
|
results
|
||||||
}
|
}
|
||||||
res
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -164,13 +143,12 @@ impl Completer for CommandCompletion {
|
|||||||
fn fetch(
|
fn fetch(
|
||||||
&mut self,
|
&mut self,
|
||||||
working_set: &StateWorkingSet,
|
working_set: &StateWorkingSet,
|
||||||
_stack: &Stack,
|
_prefix: Vec<u8>,
|
||||||
_prefix: &[u8],
|
|
||||||
span: Span,
|
span: Span,
|
||||||
offset: usize,
|
offset: usize,
|
||||||
pos: usize,
|
pos: usize,
|
||||||
options: &CompletionOptions,
|
options: &CompletionOptions,
|
||||||
) -> Vec<SemanticSuggestion> {
|
) -> Vec<Suggestion> {
|
||||||
let last = self
|
let last = self
|
||||||
.flattened
|
.flattened
|
||||||
.iter()
|
.iter()
|
||||||
@ -195,7 +173,7 @@ impl Completer for CommandCompletion {
|
|||||||
Span::new(last.0.start, pos),
|
Span::new(last.0.start, pos),
|
||||||
offset,
|
offset,
|
||||||
false,
|
false,
|
||||||
options,
|
options.match_algorithm,
|
||||||
)
|
)
|
||||||
} else {
|
} else {
|
||||||
vec![]
|
vec![]
|
||||||
@ -206,7 +184,7 @@ impl Completer for CommandCompletion {
|
|||||||
}
|
}
|
||||||
|
|
||||||
let config = working_set.get_config();
|
let config = working_set.get_config();
|
||||||
if matches!(self.flat_shape, nu_parser::FlatShape::External)
|
let commands = if matches!(self.flat_shape, nu_parser::FlatShape::External)
|
||||||
|| matches!(self.flat_shape, nu_parser::FlatShape::InternalCall(_))
|
|| matches!(self.flat_shape, nu_parser::FlatShape::InternalCall(_))
|
||||||
|| ((span.end - span.start) == 0)
|
|| ((span.end - span.start) == 0)
|
||||||
|| is_passthrough_command(working_set.delta.get_file_contents())
|
|| is_passthrough_command(working_set.delta.get_file_contents())
|
||||||
@ -220,12 +198,18 @@ impl Completer for CommandCompletion {
|
|||||||
working_set,
|
working_set,
|
||||||
span,
|
span,
|
||||||
offset,
|
offset,
|
||||||
config.completions.external.enable,
|
config.enable_external_completion,
|
||||||
options,
|
options.match_algorithm,
|
||||||
)
|
)
|
||||||
} else {
|
} else {
|
||||||
vec![]
|
vec![]
|
||||||
}
|
};
|
||||||
|
|
||||||
|
subcommands.into_iter().chain(commands).collect::<Vec<_>>()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_sort_by(&self) -> SortBy {
|
||||||
|
SortBy::LevenshteinDistance
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -242,9 +226,8 @@ pub fn find_non_whitespace_index(contents: &[u8], start: usize) -> usize {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn is_passthrough_command(working_set_file_contents: &[CachedFile]) -> bool {
|
pub fn is_passthrough_command(working_set_file_contents: &[(Vec<u8>, usize, usize)]) -> bool {
|
||||||
for cached_file in working_set_file_contents {
|
for (contents, _, _) in working_set_file_contents {
|
||||||
let contents = &cached_file.content;
|
|
||||||
let last_pipe_pos_rev = contents.iter().rev().position(|x| x == &b'|');
|
let last_pipe_pos_rev = contents.iter().rev().position(|x| x == &b'|');
|
||||||
let last_pipe_pos = last_pipe_pos_rev.map(|x| contents.len() - x).unwrap_or(0);
|
let last_pipe_pos = last_pipe_pos_rev.map(|x| contents.len() - x).unwrap_or(0);
|
||||||
|
|
||||||
@ -264,12 +247,10 @@ pub fn is_passthrough_command(working_set_file_contents: &[CachedFile]) -> bool
|
|||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod command_completions_tests {
|
mod command_completions_tests {
|
||||||
use super::*;
|
use super::*;
|
||||||
use nu_protocol::engine::EngineState;
|
|
||||||
use std::sync::Arc;
|
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_find_non_whitespace_index() {
|
fn test_find_non_whitespace_index() {
|
||||||
let commands = [
|
let commands = vec![
|
||||||
(" hello", 4),
|
(" hello", 4),
|
||||||
("sudo ", 0),
|
("sudo ", 0),
|
||||||
(" sudo ", 2),
|
(" sudo ", 2),
|
||||||
@ -289,7 +270,7 @@ mod command_completions_tests {
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_is_last_command_passthrough() {
|
fn test_is_last_command_passthrough() {
|
||||||
let commands = [
|
let commands = vec![
|
||||||
(" hello", false),
|
(" hello", false),
|
||||||
(" sudo ", true),
|
(" sudo ", true),
|
||||||
("sudo ", true),
|
("sudo ", true),
|
||||||
@ -311,7 +292,7 @@ mod command_completions_tests {
|
|||||||
let input = ele.0.as_bytes();
|
let input = ele.0.as_bytes();
|
||||||
|
|
||||||
let mut engine_state = EngineState::new();
|
let mut engine_state = EngineState::new();
|
||||||
engine_state.add_file("test.nu".into(), Arc::new([]));
|
engine_state.add_file("test.nu".into(), vec![]);
|
||||||
|
|
||||||
let delta = {
|
let delta = {
|
||||||
let mut working_set = StateWorkingSet::new(&engine_state);
|
let mut working_set = StateWorkingSet::new(&engine_state);
|
||||||
|
@ -1,19 +1,17 @@
|
|||||||
use crate::completions::{
|
use crate::completions::{
|
||||||
CommandCompletion, Completer, CompletionOptions, CustomCompletion, DirectoryCompletion,
|
CommandCompletion, Completer, CompletionOptions, CustomCompletion, DirectoryCompletion,
|
||||||
DotNuCompletion, FileCompletion, FlagCompletion, OperatorCompletion, VariableCompletion,
|
DotNuCompletion, FileCompletion, FlagCompletion, VariableCompletion,
|
||||||
};
|
};
|
||||||
use nu_color_config::{color_record_to_nustyle, lookup_ansi_color_style};
|
|
||||||
use nu_engine::eval_block;
|
use nu_engine::eval_block;
|
||||||
use nu_parser::{flatten_pipeline_element, parse, FlatShape};
|
use nu_parser::{flatten_expression, parse, FlatShape};
|
||||||
use nu_protocol::{
|
use nu_protocol::{
|
||||||
debugger::WithoutDebug,
|
ast::PipelineElement,
|
||||||
engine::{Closure, EngineState, Stack, StateWorkingSet},
|
engine::{EngineState, Stack, StateWorkingSet},
|
||||||
PipelineData, Span, Value,
|
BlockId, PipelineData, Span, Value,
|
||||||
};
|
};
|
||||||
use reedline::{Completer as ReedlineCompleter, Suggestion};
|
use reedline::{Completer as ReedlineCompleter, Suggestion};
|
||||||
use std::{str, sync::Arc};
|
use std::str;
|
||||||
|
use std::sync::Arc;
|
||||||
use super::base::{SemanticSuggestion, SuggestionKind};
|
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct NuCompleter {
|
pub struct NuCompleter {
|
||||||
@ -22,61 +20,54 @@ pub struct NuCompleter {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl NuCompleter {
|
impl NuCompleter {
|
||||||
pub fn new(engine_state: Arc<EngineState>, stack: Arc<Stack>) -> Self {
|
pub fn new(engine_state: Arc<EngineState>, stack: Stack) -> Self {
|
||||||
Self {
|
Self {
|
||||||
engine_state,
|
engine_state,
|
||||||
stack: Stack::with_parent(stack).reset_out_dest().collect_value(),
|
stack,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn fetch_completions_at(&mut self, line: &str, pos: usize) -> Vec<SemanticSuggestion> {
|
|
||||||
self.completion_helper(line, pos)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Process the completion for a given completer
|
// Process the completion for a given completer
|
||||||
fn process_completion<T: Completer>(
|
fn process_completion<T: Completer>(
|
||||||
&self,
|
&self,
|
||||||
completer: &mut T,
|
completer: &mut T,
|
||||||
working_set: &StateWorkingSet,
|
working_set: &StateWorkingSet,
|
||||||
prefix: &[u8],
|
prefix: Vec<u8>,
|
||||||
new_span: Span,
|
new_span: Span,
|
||||||
offset: usize,
|
offset: usize,
|
||||||
pos: usize,
|
pos: usize,
|
||||||
) -> Vec<SemanticSuggestion> {
|
) -> Vec<Suggestion> {
|
||||||
let config = self.engine_state.get_config();
|
let config = self.engine_state.get_config();
|
||||||
|
|
||||||
let options = CompletionOptions {
|
let options = CompletionOptions {
|
||||||
case_sensitive: config.completions.case_sensitive,
|
case_sensitive: config.case_sensitive_completions,
|
||||||
match_algorithm: config.completions.algorithm.into(),
|
match_algorithm: config.completion_algorithm.into(),
|
||||||
sort: config.completions.sort,
|
|
||||||
..Default::default()
|
..Default::default()
|
||||||
};
|
};
|
||||||
|
|
||||||
completer.fetch(
|
// Fetch
|
||||||
working_set,
|
let mut suggestions =
|
||||||
&self.stack,
|
completer.fetch(working_set, prefix.clone(), new_span, offset, pos, &options);
|
||||||
prefix,
|
|
||||||
new_span,
|
// Sort
|
||||||
offset,
|
suggestions = completer.sort(suggestions, prefix);
|
||||||
pos,
|
|
||||||
&options,
|
suggestions
|
||||||
)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn external_completion(
|
fn external_completion(
|
||||||
&self,
|
&self,
|
||||||
closure: &Closure,
|
block_id: BlockId,
|
||||||
spans: &[String],
|
spans: &[String],
|
||||||
offset: usize,
|
offset: usize,
|
||||||
span: Span,
|
span: Span,
|
||||||
) -> Option<Vec<SemanticSuggestion>> {
|
) -> Option<Vec<Suggestion>> {
|
||||||
let block = self.engine_state.get_block(closure.block_id);
|
let stack = self.stack.clone();
|
||||||
let mut callee_stack = self
|
let block = self.engine_state.get_block(block_id);
|
||||||
.stack
|
let mut callee_stack = stack.gather_captures(&self.engine_state, &block.captures);
|
||||||
.captures_to_stack_preserve_out_dest(closure.captures.clone());
|
|
||||||
|
|
||||||
// Line
|
// Line
|
||||||
if let Some(pos_arg) = block.signature.required_positional.first() {
|
if let Some(pos_arg) = block.signature.required_positional.get(0) {
|
||||||
if let Some(var_id) = pos_arg.var_id {
|
if let Some(var_id) = pos_arg.var_id {
|
||||||
callee_stack.add_var(
|
callee_stack.add_var(
|
||||||
var_id,
|
var_id,
|
||||||
@ -91,15 +82,18 @@ impl NuCompleter {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let result = eval_block::<WithoutDebug>(
|
let result = eval_block(
|
||||||
&self.engine_state,
|
&self.engine_state,
|
||||||
&mut callee_stack,
|
&mut callee_stack,
|
||||||
block,
|
block,
|
||||||
PipelineData::empty(),
|
PipelineData::empty(),
|
||||||
|
true,
|
||||||
|
true,
|
||||||
);
|
);
|
||||||
|
|
||||||
match result.and_then(|data| data.into_value(span)) {
|
match result {
|
||||||
Ok(value) => {
|
Ok(pd) => {
|
||||||
|
let value = pd.into_value(span);
|
||||||
if let Value::List { vals, .. } = value {
|
if let Value::List { vals, .. } = value {
|
||||||
let result =
|
let result =
|
||||||
map_value_completions(vals.iter(), Span::new(span.start, span.end), offset);
|
map_value_completions(vals.iter(), Span::new(span.start, span.end), offset);
|
||||||
@ -113,298 +107,268 @@ impl NuCompleter {
|
|||||||
None
|
None
|
||||||
}
|
}
|
||||||
|
|
||||||
fn completion_helper(&mut self, line: &str, pos: usize) -> Vec<SemanticSuggestion> {
|
fn completion_helper(&mut self, line: &str, pos: usize) -> Vec<Suggestion> {
|
||||||
let mut working_set = StateWorkingSet::new(&self.engine_state);
|
let mut working_set = StateWorkingSet::new(&self.engine_state);
|
||||||
let offset = working_set.next_span_start();
|
let offset = working_set.next_span_start();
|
||||||
// TODO: Callers should be trimming the line themselves
|
|
||||||
let line = if line.len() > pos { &line[..pos] } else { line };
|
|
||||||
// Adjust offset so that the spans of the suggestions will start at the right
|
|
||||||
// place even with `only_buffer_difference: true`
|
|
||||||
let fake_offset = offset + line.len() - pos;
|
|
||||||
let pos = offset + line.len();
|
|
||||||
let initial_line = line.to_string();
|
let initial_line = line.to_string();
|
||||||
let mut line = line.to_string();
|
let mut line = line.to_string();
|
||||||
line.push('a');
|
line.insert(pos, 'a');
|
||||||
|
let pos = offset + pos;
|
||||||
let config = self.engine_state.get_config();
|
let config = self.engine_state.get_config();
|
||||||
|
|
||||||
let output = parse(&mut working_set, Some("completer"), line.as_bytes(), false);
|
let output = parse(&mut working_set, Some("completer"), line.as_bytes(), false);
|
||||||
|
|
||||||
for pipeline in &output.pipelines {
|
for pipeline in output.pipelines.into_iter() {
|
||||||
for pipeline_element in &pipeline.elements {
|
for pipeline_element in pipeline.elements {
|
||||||
let flattened = flatten_pipeline_element(&working_set, pipeline_element);
|
match pipeline_element {
|
||||||
let mut spans: Vec<String> = vec![];
|
PipelineElement::Expression(_, expr)
|
||||||
|
| PipelineElement::Redirection(_, _, expr)
|
||||||
|
| PipelineElement::And(_, expr)
|
||||||
|
| PipelineElement::Or(_, expr)
|
||||||
|
| PipelineElement::SameTargetRedirection { cmd: (_, expr), .. }
|
||||||
|
| PipelineElement::SeparateRedirection { out: (_, expr), .. } => {
|
||||||
|
let flattened: Vec<_> = flatten_expression(&working_set, &expr);
|
||||||
|
let mut spans: Vec<String> = vec![];
|
||||||
|
|
||||||
for (flat_idx, flat) in flattened.iter().enumerate() {
|
for (flat_idx, flat) in flattened.iter().enumerate() {
|
||||||
let is_passthrough_command = spans
|
let is_passthrough_command = spans
|
||||||
.first()
|
.first()
|
||||||
.filter(|content| content.as_str() == "sudo" || content.as_str() == "doas")
|
.filter(|content| {
|
||||||
.is_some();
|
content.as_str() == "sudo" || content.as_str() == "doas"
|
||||||
// Read the current spam to string
|
})
|
||||||
let current_span = working_set.get_span_contents(flat.0).to_vec();
|
.is_some();
|
||||||
let current_span_str = String::from_utf8_lossy(¤t_span);
|
// Read the current spam to string
|
||||||
|
let current_span = working_set.get_span_contents(flat.0).to_vec();
|
||||||
|
let current_span_str = String::from_utf8_lossy(¤t_span);
|
||||||
|
|
||||||
let is_last_span = pos >= flat.0.start && pos < flat.0.end;
|
// Skip the last 'a' as span item
|
||||||
|
if flat_idx == flattened.len() - 1 {
|
||||||
// Skip the last 'a' as span item
|
let mut chars = current_span_str.chars();
|
||||||
if is_last_span {
|
chars.next_back();
|
||||||
let offset = pos - flat.0.start;
|
let current_span_str = chars.as_str().to_owned();
|
||||||
if offset == 0 {
|
spans.push(current_span_str.to_string());
|
||||||
spans.push(String::new())
|
} else {
|
||||||
} else {
|
spans.push(current_span_str.to_string());
|
||||||
let mut current_span_str = current_span_str.to_string();
|
|
||||||
current_span_str.remove(offset);
|
|
||||||
spans.push(current_span_str);
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
spans.push(current_span_str.to_string());
|
|
||||||
}
|
|
||||||
|
|
||||||
// Complete based on the last span
|
|
||||||
if is_last_span {
|
|
||||||
// Context variables
|
|
||||||
let most_left_var =
|
|
||||||
most_left_variable(flat_idx, &working_set, flattened.clone());
|
|
||||||
|
|
||||||
// Create a new span
|
|
||||||
let new_span = Span::new(flat.0.start, flat.0.end - 1);
|
|
||||||
|
|
||||||
// Parses the prefix. Completion should look up to the cursor position, not after.
|
|
||||||
let mut prefix = working_set.get_span_contents(flat.0);
|
|
||||||
let index = pos - flat.0.start;
|
|
||||||
prefix = &prefix[..index];
|
|
||||||
|
|
||||||
// Variables completion
|
|
||||||
if prefix.starts_with(b"$") || most_left_var.is_some() {
|
|
||||||
let mut variable_names_completer =
|
|
||||||
VariableCompletion::new(most_left_var.unwrap_or((vec![], vec![])));
|
|
||||||
|
|
||||||
let mut variable_completions = self.process_completion(
|
|
||||||
&mut variable_names_completer,
|
|
||||||
&working_set,
|
|
||||||
prefix,
|
|
||||||
new_span,
|
|
||||||
fake_offset,
|
|
||||||
pos,
|
|
||||||
);
|
|
||||||
|
|
||||||
let mut variable_operations_completer =
|
|
||||||
OperatorCompletion::new(pipeline_element.expr.clone());
|
|
||||||
|
|
||||||
let mut variable_operations_completions = self.process_completion(
|
|
||||||
&mut variable_operations_completer,
|
|
||||||
&working_set,
|
|
||||||
prefix,
|
|
||||||
new_span,
|
|
||||||
fake_offset,
|
|
||||||
pos,
|
|
||||||
);
|
|
||||||
|
|
||||||
variable_completions.append(&mut variable_operations_completions);
|
|
||||||
return variable_completions;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Flags completion
|
|
||||||
if prefix.starts_with(b"-") {
|
|
||||||
// Try to complete flag internally
|
|
||||||
let mut completer = FlagCompletion::new(pipeline_element.expr.clone());
|
|
||||||
let result = self.process_completion(
|
|
||||||
&mut completer,
|
|
||||||
&working_set,
|
|
||||||
prefix,
|
|
||||||
new_span,
|
|
||||||
fake_offset,
|
|
||||||
pos,
|
|
||||||
);
|
|
||||||
|
|
||||||
if !result.is_empty() {
|
|
||||||
return result;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// We got no results for internal completion
|
// Complete based on the last span
|
||||||
// now we can check if external completer is set and use it
|
if pos >= flat.0.start && pos < flat.0.end {
|
||||||
if let Some(closure) = config.completions.external.completer.as_ref() {
|
// Context variables
|
||||||
if let Some(external_result) =
|
let most_left_var =
|
||||||
self.external_completion(closure, &spans, fake_offset, new_span)
|
most_left_variable(flat_idx, &working_set, flattened.clone());
|
||||||
{
|
|
||||||
return external_result;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// specially check if it is currently empty - always complete commands
|
// Create a new span
|
||||||
if (is_passthrough_command && flat_idx == 1)
|
let new_span = Span::new(flat.0.start, flat.0.end - 1);
|
||||||
|| (flat_idx == 0 && working_set.get_span_contents(new_span).is_empty())
|
|
||||||
{
|
|
||||||
let mut completer = CommandCompletion::new(
|
|
||||||
flattened.clone(),
|
|
||||||
// flat_idx,
|
|
||||||
FlatShape::String,
|
|
||||||
true,
|
|
||||||
);
|
|
||||||
return self.process_completion(
|
|
||||||
&mut completer,
|
|
||||||
&working_set,
|
|
||||||
prefix,
|
|
||||||
new_span,
|
|
||||||
fake_offset,
|
|
||||||
pos,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Completions that depends on the previous expression (e.g: use, source-env)
|
// Parses the prefix. Completion should look up to the cursor position, not after.
|
||||||
if (is_passthrough_command && flat_idx > 1) || flat_idx > 0 {
|
let mut prefix = working_set.get_span_contents(flat.0).to_vec();
|
||||||
if let Some(previous_expr) = flattened.get(flat_idx - 1) {
|
let index = pos - flat.0.start;
|
||||||
// Read the content for the previous expression
|
prefix.drain(index..);
|
||||||
let prev_expr_str =
|
|
||||||
working_set.get_span_contents(previous_expr.0).to_vec();
|
|
||||||
|
|
||||||
// Completion for .nu files
|
// Variables completion
|
||||||
if prev_expr_str == b"use"
|
if prefix.starts_with(b"$") || most_left_var.is_some() {
|
||||||
|| prev_expr_str == b"overlay use"
|
let mut completer = VariableCompletion::new(
|
||||||
|| prev_expr_str == b"source-env"
|
self.engine_state.clone(),
|
||||||
{
|
self.stack.clone(),
|
||||||
let mut completer = DotNuCompletion::new();
|
most_left_var.unwrap_or((vec![], vec![])),
|
||||||
|
);
|
||||||
|
|
||||||
return self.process_completion(
|
return self.process_completion(
|
||||||
&mut completer,
|
&mut completer,
|
||||||
&working_set,
|
&working_set,
|
||||||
prefix,
|
prefix,
|
||||||
new_span,
|
new_span,
|
||||||
fake_offset,
|
offset,
|
||||||
pos,
|
pos,
|
||||||
);
|
);
|
||||||
} else if prev_expr_str == b"ls" {
|
}
|
||||||
let mut completer = FileCompletion::new();
|
|
||||||
|
|
||||||
|
// Flags completion
|
||||||
|
if prefix.starts_with(b"-") {
|
||||||
|
// Try to complete flag internally
|
||||||
|
let mut completer = FlagCompletion::new(expr.clone());
|
||||||
|
let result = self.process_completion(
|
||||||
|
&mut completer,
|
||||||
|
&working_set,
|
||||||
|
prefix.clone(),
|
||||||
|
new_span,
|
||||||
|
offset,
|
||||||
|
pos,
|
||||||
|
);
|
||||||
|
|
||||||
|
if !result.is_empty() {
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
// We got no results for internal completion
|
||||||
|
// now we can check if external completer is set and use it
|
||||||
|
if let Some(block_id) = config.external_completer {
|
||||||
|
if let Some(external_result) = self
|
||||||
|
.external_completion(block_id, &spans, offset, new_span)
|
||||||
|
{
|
||||||
|
return external_result;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// specially check if it is currently empty - always complete commands
|
||||||
|
if (is_passthrough_command && flat_idx == 1)
|
||||||
|
|| (flat_idx == 0
|
||||||
|
&& working_set.get_span_contents(new_span).is_empty())
|
||||||
|
{
|
||||||
|
let mut completer = CommandCompletion::new(
|
||||||
|
self.engine_state.clone(),
|
||||||
|
&working_set,
|
||||||
|
flattened.clone(),
|
||||||
|
// flat_idx,
|
||||||
|
FlatShape::String,
|
||||||
|
true,
|
||||||
|
);
|
||||||
return self.process_completion(
|
return self.process_completion(
|
||||||
&mut completer,
|
&mut completer,
|
||||||
&working_set,
|
&working_set,
|
||||||
prefix,
|
prefix,
|
||||||
new_span,
|
new_span,
|
||||||
fake_offset,
|
offset,
|
||||||
pos,
|
pos,
|
||||||
);
|
);
|
||||||
} else if matches!(
|
}
|
||||||
previous_expr.1,
|
|
||||||
FlatShape::Float
|
|
||||||
| FlatShape::Int
|
|
||||||
| FlatShape::String
|
|
||||||
| FlatShape::List
|
|
||||||
| FlatShape::Bool
|
|
||||||
| FlatShape::Variable(_)
|
|
||||||
) {
|
|
||||||
let mut completer =
|
|
||||||
OperatorCompletion::new(pipeline_element.expr.clone());
|
|
||||||
|
|
||||||
let operator_suggestion = self.process_completion(
|
// Completions that depends on the previous expression (e.g: use, source-env)
|
||||||
&mut completer,
|
if (is_passthrough_command && flat_idx > 1) || flat_idx > 0 {
|
||||||
&working_set,
|
if let Some(previous_expr) = flattened.get(flat_idx - 1) {
|
||||||
prefix,
|
// Read the content for the previous expression
|
||||||
new_span,
|
let prev_expr_str =
|
||||||
fake_offset,
|
working_set.get_span_contents(previous_expr.0).to_vec();
|
||||||
pos,
|
|
||||||
);
|
// Completion for .nu files
|
||||||
if !operator_suggestion.is_empty() {
|
if prev_expr_str == b"use" || prev_expr_str == b"source-env"
|
||||||
return operator_suggestion;
|
{
|
||||||
|
let mut completer =
|
||||||
|
DotNuCompletion::new(self.engine_state.clone());
|
||||||
|
|
||||||
|
return self.process_completion(
|
||||||
|
&mut completer,
|
||||||
|
&working_set,
|
||||||
|
prefix,
|
||||||
|
new_span,
|
||||||
|
offset,
|
||||||
|
pos,
|
||||||
|
);
|
||||||
|
} else if prev_expr_str == b"ls" {
|
||||||
|
let mut completer =
|
||||||
|
FileCompletion::new(self.engine_state.clone());
|
||||||
|
|
||||||
|
return self.process_completion(
|
||||||
|
&mut completer,
|
||||||
|
&working_set,
|
||||||
|
prefix,
|
||||||
|
new_span,
|
||||||
|
offset,
|
||||||
|
pos,
|
||||||
|
);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Match other types
|
||||||
|
match &flat.1 {
|
||||||
|
FlatShape::Custom(decl_id) => {
|
||||||
|
let mut completer = CustomCompletion::new(
|
||||||
|
self.engine_state.clone(),
|
||||||
|
self.stack.clone(),
|
||||||
|
*decl_id,
|
||||||
|
initial_line,
|
||||||
|
);
|
||||||
|
|
||||||
|
return self.process_completion(
|
||||||
|
&mut completer,
|
||||||
|
&working_set,
|
||||||
|
prefix,
|
||||||
|
new_span,
|
||||||
|
offset,
|
||||||
|
pos,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
FlatShape::Directory => {
|
||||||
|
let mut completer =
|
||||||
|
DirectoryCompletion::new(self.engine_state.clone());
|
||||||
|
|
||||||
|
return self.process_completion(
|
||||||
|
&mut completer,
|
||||||
|
&working_set,
|
||||||
|
prefix,
|
||||||
|
new_span,
|
||||||
|
offset,
|
||||||
|
pos,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
FlatShape::Filepath | FlatShape::GlobPattern => {
|
||||||
|
let mut completer =
|
||||||
|
FileCompletion::new(self.engine_state.clone());
|
||||||
|
|
||||||
|
return self.process_completion(
|
||||||
|
&mut completer,
|
||||||
|
&working_set,
|
||||||
|
prefix,
|
||||||
|
new_span,
|
||||||
|
offset,
|
||||||
|
pos,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
flat_shape => {
|
||||||
|
let mut completer = CommandCompletion::new(
|
||||||
|
self.engine_state.clone(),
|
||||||
|
&working_set,
|
||||||
|
flattened.clone(),
|
||||||
|
// flat_idx,
|
||||||
|
flat_shape.clone(),
|
||||||
|
false,
|
||||||
|
);
|
||||||
|
|
||||||
|
let mut out: Vec<_> = self.process_completion(
|
||||||
|
&mut completer,
|
||||||
|
&working_set,
|
||||||
|
prefix.clone(),
|
||||||
|
new_span,
|
||||||
|
offset,
|
||||||
|
pos,
|
||||||
|
);
|
||||||
|
|
||||||
|
if !out.is_empty() {
|
||||||
|
return out;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Try to complete using an external completer (if set)
|
||||||
|
if let Some(block_id) = config.external_completer {
|
||||||
|
if let Some(external_result) = self.external_completion(
|
||||||
|
block_id, &spans, offset, new_span,
|
||||||
|
) {
|
||||||
|
if !external_result.is_empty() {
|
||||||
|
return external_result;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for file completion
|
||||||
|
let mut completer =
|
||||||
|
FileCompletion::new(self.engine_state.clone());
|
||||||
|
out = self.process_completion(
|
||||||
|
&mut completer,
|
||||||
|
&working_set,
|
||||||
|
prefix,
|
||||||
|
new_span,
|
||||||
|
offset,
|
||||||
|
pos,
|
||||||
|
);
|
||||||
|
|
||||||
|
if !out.is_empty() {
|
||||||
|
return out;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Match other types
|
|
||||||
match &flat.1 {
|
|
||||||
FlatShape::Custom(decl_id) => {
|
|
||||||
let mut completer = CustomCompletion::new(
|
|
||||||
self.stack.clone(),
|
|
||||||
*decl_id,
|
|
||||||
initial_line,
|
|
||||||
);
|
|
||||||
|
|
||||||
return self.process_completion(
|
|
||||||
&mut completer,
|
|
||||||
&working_set,
|
|
||||||
prefix,
|
|
||||||
new_span,
|
|
||||||
fake_offset,
|
|
||||||
pos,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
FlatShape::Directory => {
|
|
||||||
let mut completer = DirectoryCompletion::new();
|
|
||||||
|
|
||||||
return self.process_completion(
|
|
||||||
&mut completer,
|
|
||||||
&working_set,
|
|
||||||
prefix,
|
|
||||||
new_span,
|
|
||||||
fake_offset,
|
|
||||||
pos,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
FlatShape::Filepath | FlatShape::GlobPattern => {
|
|
||||||
let mut completer = FileCompletion::new();
|
|
||||||
|
|
||||||
return self.process_completion(
|
|
||||||
&mut completer,
|
|
||||||
&working_set,
|
|
||||||
prefix,
|
|
||||||
new_span,
|
|
||||||
fake_offset,
|
|
||||||
pos,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
flat_shape => {
|
|
||||||
let mut completer = CommandCompletion::new(
|
|
||||||
flattened.clone(),
|
|
||||||
// flat_idx,
|
|
||||||
flat_shape.clone(),
|
|
||||||
false,
|
|
||||||
);
|
|
||||||
|
|
||||||
let mut out: Vec<_> = self.process_completion(
|
|
||||||
&mut completer,
|
|
||||||
&working_set,
|
|
||||||
prefix,
|
|
||||||
new_span,
|
|
||||||
fake_offset,
|
|
||||||
pos,
|
|
||||||
);
|
|
||||||
|
|
||||||
if !out.is_empty() {
|
|
||||||
return out;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Try to complete using an external completer (if set)
|
|
||||||
if let Some(closure) =
|
|
||||||
config.completions.external.completer.as_ref()
|
|
||||||
{
|
|
||||||
if let Some(external_result) = self.external_completion(
|
|
||||||
closure,
|
|
||||||
&spans,
|
|
||||||
fake_offset,
|
|
||||||
new_span,
|
|
||||||
) {
|
|
||||||
return external_result;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check for file completion
|
|
||||||
let mut completer = FileCompletion::new();
|
|
||||||
out = self.process_completion(
|
|
||||||
&mut completer,
|
|
||||||
&working_set,
|
|
||||||
prefix,
|
|
||||||
new_span,
|
|
||||||
fake_offset,
|
|
||||||
pos,
|
|
||||||
);
|
|
||||||
|
|
||||||
if !out.is_empty() {
|
|
||||||
return out;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -417,9 +381,6 @@ impl NuCompleter {
|
|||||||
impl ReedlineCompleter for NuCompleter {
|
impl ReedlineCompleter for NuCompleter {
|
||||||
fn complete(&mut self, line: &str, pos: usize) -> Vec<Suggestion> {
|
fn complete(&mut self, line: &str, pos: usize) -> Vec<Suggestion> {
|
||||||
self.completion_helper(line, pos)
|
self.completion_helper(line, pos)
|
||||||
.into_iter()
|
|
||||||
.map(|s| s.suggestion)
|
|
||||||
.collect()
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -477,20 +438,19 @@ pub fn map_value_completions<'a>(
|
|||||||
list: impl Iterator<Item = &'a Value>,
|
list: impl Iterator<Item = &'a Value>,
|
||||||
span: Span,
|
span: Span,
|
||||||
offset: usize,
|
offset: usize,
|
||||||
) -> Vec<SemanticSuggestion> {
|
) -> Vec<Suggestion> {
|
||||||
list.filter_map(move |x| {
|
list.filter_map(move |x| {
|
||||||
// Match for string values
|
// Match for string values
|
||||||
if let Ok(s) = x.coerce_string() {
|
if let Ok(s) = x.as_string() {
|
||||||
return Some(SemanticSuggestion {
|
return Some(Suggestion {
|
||||||
suggestion: Suggestion {
|
value: s,
|
||||||
value: s,
|
description: None,
|
||||||
span: reedline::Span {
|
extra: None,
|
||||||
start: span.start - offset,
|
span: reedline::Span {
|
||||||
end: span.end - offset,
|
start: span.start - offset,
|
||||||
},
|
end: span.end - offset,
|
||||||
..Suggestion::default()
|
|
||||||
},
|
},
|
||||||
kind: Some(SuggestionKind::Type(x.get_type())),
|
append_whitespace: false,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -498,11 +458,13 @@ pub fn map_value_completions<'a>(
|
|||||||
if let Ok(record) = x.as_record() {
|
if let Ok(record) = x.as_record() {
|
||||||
let mut suggestion = Suggestion {
|
let mut suggestion = Suggestion {
|
||||||
value: String::from(""), // Initialize with empty string
|
value: String::from(""), // Initialize with empty string
|
||||||
|
description: None,
|
||||||
|
extra: None,
|
||||||
span: reedline::Span {
|
span: reedline::Span {
|
||||||
start: span.start - offset,
|
start: span.start - offset,
|
||||||
end: span.end - offset,
|
end: span.end - offset,
|
||||||
},
|
},
|
||||||
..Suggestion::default()
|
append_whitespace: false,
|
||||||
};
|
};
|
||||||
|
|
||||||
// Iterate the cols looking for `value` and `description`
|
// Iterate the cols looking for `value` and `description`
|
||||||
@ -510,7 +472,7 @@ pub fn map_value_completions<'a>(
|
|||||||
// Match `value` column
|
// Match `value` column
|
||||||
if it.0 == "value" {
|
if it.0 == "value" {
|
||||||
// Convert the value to string
|
// Convert the value to string
|
||||||
if let Ok(val_str) = it.1.coerce_string() {
|
if let Ok(val_str) = it.1.as_string() {
|
||||||
// Update the suggestion value
|
// Update the suggestion value
|
||||||
suggestion.value = val_str;
|
suggestion.value = val_str;
|
||||||
}
|
}
|
||||||
@ -519,27 +481,14 @@ pub fn map_value_completions<'a>(
|
|||||||
// Match `description` column
|
// Match `description` column
|
||||||
if it.0 == "description" {
|
if it.0 == "description" {
|
||||||
// Convert the value to string
|
// Convert the value to string
|
||||||
if let Ok(desc_str) = it.1.coerce_string() {
|
if let Ok(desc_str) = it.1.as_string() {
|
||||||
// Update the suggestion value
|
// Update the suggestion value
|
||||||
suggestion.description = Some(desc_str);
|
suggestion.description = Some(desc_str);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Match `style` column
|
|
||||||
if it.0 == "style" {
|
|
||||||
// Convert the value to string
|
|
||||||
suggestion.style = match it.1 {
|
|
||||||
Value::String { val, .. } => Some(lookup_ansi_color_style(val)),
|
|
||||||
Value::Record { .. } => Some(color_record_to_nustyle(it.1)),
|
|
||||||
_ => None,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
});
|
});
|
||||||
|
|
||||||
return Some(SemanticSuggestion {
|
return Some(suggestion);
|
||||||
suggestion,
|
|
||||||
kind: Some(SuggestionKind::Type(x.get_type())),
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
None
|
None
|
||||||
@ -569,13 +518,8 @@ mod completer_tests {
|
|||||||
result.err().unwrap()
|
result.err().unwrap()
|
||||||
);
|
);
|
||||||
|
|
||||||
let mut completer = NuCompleter::new(engine_state.into(), Arc::new(Stack::new()));
|
let mut completer = NuCompleter::new(engine_state.into(), Stack::new());
|
||||||
let dataset = [
|
let dataset = vec![
|
||||||
("1 bit-sh", true, "b", vec!["bit-shl", "bit-shr"]),
|
|
||||||
("1.0 bit-sh", false, "b", vec![]),
|
|
||||||
("1 m", true, "m", vec!["mod"]),
|
|
||||||
("1.0 m", true, "m", vec!["mod"]),
|
|
||||||
("\"a\" s", true, "s", vec!["starts-with"]),
|
|
||||||
("sudo", false, "", Vec::new()),
|
("sudo", false, "", Vec::new()),
|
||||||
("sudo l", true, "l", vec!["ls", "let", "lines", "loop"]),
|
("sudo l", true, "l", vec!["ls", "let", "lines", "loop"]),
|
||||||
(" sudo", false, "", Vec::new()),
|
(" sudo", false, "", Vec::new()),
|
||||||
@ -596,13 +540,13 @@ mod completer_tests {
|
|||||||
// Test whether the result begins with the expected value
|
// Test whether the result begins with the expected value
|
||||||
result
|
result
|
||||||
.iter()
|
.iter()
|
||||||
.for_each(|x| assert!(x.suggestion.value.starts_with(begins_with)));
|
.for_each(|x| assert!(x.value.starts_with(begins_with)));
|
||||||
|
|
||||||
// Test whether the result contains all the expected values
|
// Test whether the result contains all the expected values
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
result
|
result
|
||||||
.iter()
|
.iter()
|
||||||
.map(|x| expected_values.contains(&x.suggestion.value.as_str()))
|
.map(|x| expected_values.contains(&x.value.as_str()))
|
||||||
.filter(|x| *x)
|
.filter(|x| *x)
|
||||||
.count(),
|
.count(),
|
||||||
expected_values.len(),
|
expected_values.len(),
|
||||||
|
@ -1,139 +1,69 @@
|
|||||||
use super::{completion_options::NuMatcher, MatchAlgorithm};
|
use crate::completions::{matches, CompletionOptions};
|
||||||
use crate::completions::CompletionOptions;
|
use nu_path::home_dir;
|
||||||
use nu_ansi_term::Style;
|
use nu_protocol::{engine::StateWorkingSet, Span};
|
||||||
use nu_engine::env_to_string;
|
|
||||||
use nu_path::dots::expand_ndots;
|
|
||||||
use nu_path::{expand_to_real_path, home_dir};
|
|
||||||
use nu_protocol::{
|
|
||||||
engine::{EngineState, Stack, StateWorkingSet},
|
|
||||||
Span,
|
|
||||||
};
|
|
||||||
use nu_utils::get_ls_colors;
|
|
||||||
use nu_utils::IgnoreCaseExt;
|
|
||||||
use std::path::{is_separator, Component, Path, PathBuf, MAIN_SEPARATOR as SEP};
|
use std::path::{is_separator, Component, Path, PathBuf, MAIN_SEPARATOR as SEP};
|
||||||
|
|
||||||
#[derive(Clone, Default)]
|
|
||||||
pub struct PathBuiltFromString {
|
|
||||||
cwd: PathBuf,
|
|
||||||
parts: Vec<String>,
|
|
||||||
isdir: bool,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Recursively goes through paths that match a given `partial`.
|
|
||||||
/// built: State struct for a valid matching path built so far.
|
|
||||||
///
|
|
||||||
/// `isdir`: whether the current partial path has a trailing slash.
|
|
||||||
/// Parsing a path string into a pathbuf loses that bit of information.
|
|
||||||
///
|
|
||||||
/// want_directory: Whether we want only directories as completion matches.
|
|
||||||
/// Some commands like `cd` can only be run on directories whereas others
|
|
||||||
/// like `ls` can be run on regular files as well.
|
|
||||||
fn complete_rec(
|
fn complete_rec(
|
||||||
partial: &[&str],
|
partial: &[String],
|
||||||
built_paths: &[PathBuiltFromString],
|
cwd: &Path,
|
||||||
options: &CompletionOptions,
|
options: &CompletionOptions,
|
||||||
want_directory: bool,
|
dir: bool,
|
||||||
isdir: bool,
|
isdir: bool,
|
||||||
) -> Vec<PathBuiltFromString> {
|
) -> Vec<PathBuf> {
|
||||||
if let Some((&base, rest)) = partial.split_first() {
|
let mut completions = vec![];
|
||||||
if base.chars().all(|c| c == '.') && (isdir || !rest.is_empty()) {
|
|
||||||
let built_paths: Vec<_> = built_paths
|
|
||||||
.iter()
|
|
||||||
.map(|built| {
|
|
||||||
let mut built = built.clone();
|
|
||||||
built.parts.push(base.to_string());
|
|
||||||
built.isdir = true;
|
|
||||||
built
|
|
||||||
})
|
|
||||||
.collect();
|
|
||||||
return complete_rec(rest, &built_paths, options, want_directory, isdir);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let prefix = partial.first().unwrap_or(&"");
|
|
||||||
let mut matcher = NuMatcher::new(prefix, options.clone());
|
|
||||||
|
|
||||||
for built in built_paths {
|
|
||||||
let mut path = built.cwd.clone();
|
|
||||||
for part in &built.parts {
|
|
||||||
path.push(part);
|
|
||||||
}
|
|
||||||
|
|
||||||
let Ok(result) = path.read_dir() else {
|
|
||||||
continue;
|
|
||||||
};
|
|
||||||
|
|
||||||
|
if let Ok(result) = cwd.read_dir() {
|
||||||
for entry in result.filter_map(|e| e.ok()) {
|
for entry in result.filter_map(|e| e.ok()) {
|
||||||
let entry_name = entry.file_name().to_string_lossy().into_owned();
|
let entry_name = entry.file_name().to_string_lossy().into_owned();
|
||||||
let entry_isdir = entry.path().is_dir();
|
let path = entry.path();
|
||||||
let mut built = built.clone();
|
|
||||||
built.parts.push(entry_name.clone());
|
|
||||||
built.isdir = entry_isdir;
|
|
||||||
|
|
||||||
if !want_directory || entry_isdir {
|
if !dir || path.is_dir() {
|
||||||
matcher.add(entry_name.clone(), (entry_name, built));
|
match partial.first() {
|
||||||
}
|
Some(base) if matches(base, &entry_name, options) => {
|
||||||
}
|
let partial = &partial[1..];
|
||||||
}
|
if !partial.is_empty() || isdir {
|
||||||
|
completions.extend(complete_rec(partial, &path, options, dir, isdir))
|
||||||
let mut completions = vec![];
|
} else {
|
||||||
for (entry_name, built) in matcher.results() {
|
completions.push(path)
|
||||||
match partial.split_first() {
|
}
|
||||||
Some((base, rest)) => {
|
|
||||||
// We use `isdir` to confirm that the current component has
|
|
||||||
// at least one next component or a slash.
|
|
||||||
// Serves as confirmation to ignore longer completions for
|
|
||||||
// components in between.
|
|
||||||
if !rest.is_empty() || isdir {
|
|
||||||
completions.extend(complete_rec(
|
|
||||||
rest,
|
|
||||||
&[built],
|
|
||||||
options,
|
|
||||||
want_directory,
|
|
||||||
isdir,
|
|
||||||
));
|
|
||||||
} else {
|
|
||||||
completions.push(built);
|
|
||||||
}
|
|
||||||
|
|
||||||
// For https://github.com/nushell/nushell/issues/13204
|
|
||||||
if isdir && options.match_algorithm == MatchAlgorithm::Prefix {
|
|
||||||
let exact_match = if options.case_sensitive {
|
|
||||||
entry_name.eq(base)
|
|
||||||
} else {
|
|
||||||
entry_name.to_folded_case().eq(&base.to_folded_case())
|
|
||||||
};
|
|
||||||
if exact_match {
|
|
||||||
break;
|
|
||||||
}
|
}
|
||||||
|
None => completions.push(path),
|
||||||
|
_ => {}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
None => {
|
|
||||||
completions.push(built);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
completions
|
completions
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
enum OriginalCwd {
|
enum OriginalCwd {
|
||||||
None,
|
None,
|
||||||
Home,
|
Home(PathBuf),
|
||||||
Prefix(String),
|
Some(PathBuf),
|
||||||
|
// referencing a single local file
|
||||||
|
Local(PathBuf),
|
||||||
}
|
}
|
||||||
|
|
||||||
impl OriginalCwd {
|
impl OriginalCwd {
|
||||||
fn apply(&self, mut p: PathBuiltFromString, path_separator: char) -> String {
|
fn apply(&self, p: &Path) -> String {
|
||||||
match self {
|
let mut ret = match self {
|
||||||
Self::None => {}
|
Self::None => p.to_string_lossy().into_owned(),
|
||||||
Self::Home => p.parts.insert(0, "~".to_string()),
|
Self::Some(base) => pathdiff::diff_paths(p, base)
|
||||||
Self::Prefix(s) => p.parts.insert(0, s.clone()),
|
.unwrap_or(p.to_path_buf())
|
||||||
|
.to_string_lossy()
|
||||||
|
.into_owned(),
|
||||||
|
Self::Home(home) => match p.strip_prefix(home) {
|
||||||
|
Ok(suffix) => format!("~{}{}", SEP, suffix.to_string_lossy()),
|
||||||
|
_ => p.to_string_lossy().into_owned(),
|
||||||
|
},
|
||||||
|
Self::Local(base) => Path::new(".")
|
||||||
|
.join(pathdiff::diff_paths(p, base).unwrap_or(p.to_path_buf()))
|
||||||
|
.to_string_lossy()
|
||||||
|
.into_owned(),
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut ret = p.parts.join(&path_separator.to_string());
|
if p.is_dir() {
|
||||||
if p.isdir {
|
ret.push(SEP);
|
||||||
ret.push(path_separator);
|
|
||||||
}
|
}
|
||||||
ret
|
ret
|
||||||
}
|
}
|
||||||
@ -153,157 +83,79 @@ fn surround_remove(partial: &str) -> String {
|
|||||||
partial.to_string()
|
partial.to_string()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct FileSuggestion {
|
|
||||||
pub span: nu_protocol::Span,
|
|
||||||
pub path: String,
|
|
||||||
pub style: Option<Style>,
|
|
||||||
pub cwd: PathBuf,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// # Parameters
|
|
||||||
/// * `cwds` - A list of directories in which to search. The only reason this isn't a single string
|
|
||||||
/// is because dotnu_completions searches in multiple directories at once
|
|
||||||
pub fn complete_item(
|
pub fn complete_item(
|
||||||
want_directory: bool,
|
want_directory: bool,
|
||||||
span: nu_protocol::Span,
|
span: nu_protocol::Span,
|
||||||
partial: &str,
|
partial: &str,
|
||||||
cwds: &[impl AsRef<str>],
|
cwd: &str,
|
||||||
options: &CompletionOptions,
|
options: &CompletionOptions,
|
||||||
engine_state: &EngineState,
|
) -> Vec<(nu_protocol::Span, String)> {
|
||||||
stack: &Stack,
|
let partial = surround_remove(partial);
|
||||||
) -> Vec<FileSuggestion> {
|
let isdir = partial.ends_with(is_separator);
|
||||||
let cleaned_partial = surround_remove(partial);
|
let cwd_pathbuf = Path::new(cwd).to_path_buf();
|
||||||
let isdir = cleaned_partial.ends_with(is_separator);
|
|
||||||
let expanded_partial = expand_ndots(Path::new(&cleaned_partial));
|
|
||||||
let should_collapse_dots = expanded_partial != Path::new(&cleaned_partial);
|
|
||||||
let mut partial = expanded_partial.to_string_lossy().to_string();
|
|
||||||
|
|
||||||
#[cfg(unix)]
|
|
||||||
let path_separator = SEP;
|
|
||||||
#[cfg(windows)]
|
|
||||||
let path_separator = cleaned_partial
|
|
||||||
.chars()
|
|
||||||
.rfind(|c: &char| is_separator(*c))
|
|
||||||
.unwrap_or(SEP);
|
|
||||||
|
|
||||||
// Handle the trailing dot case
|
|
||||||
if cleaned_partial.ends_with(&format!("{path_separator}.")) {
|
|
||||||
partial.push_str(&format!("{path_separator}."));
|
|
||||||
}
|
|
||||||
|
|
||||||
let cwd_pathbufs: Vec<_> = cwds
|
|
||||||
.iter()
|
|
||||||
.map(|cwd| Path::new(cwd.as_ref()).to_path_buf())
|
|
||||||
.collect();
|
|
||||||
let ls_colors = (engine_state.config.completions.use_ls_colors
|
|
||||||
&& engine_state.config.use_ansi_coloring)
|
|
||||||
.then(|| {
|
|
||||||
let ls_colors_env_str = match stack.get_env_var(engine_state, "LS_COLORS") {
|
|
||||||
Some(v) => env_to_string("LS_COLORS", v, engine_state, stack).ok(),
|
|
||||||
None => None,
|
|
||||||
};
|
|
||||||
get_ls_colors(ls_colors_env_str)
|
|
||||||
});
|
|
||||||
|
|
||||||
let mut cwds = cwd_pathbufs.clone();
|
|
||||||
let mut prefix_len = 0;
|
|
||||||
let mut original_cwd = OriginalCwd::None;
|
let mut original_cwd = OriginalCwd::None;
|
||||||
|
|
||||||
let mut components = Path::new(&partial).components().peekable();
|
let mut components = Path::new(&partial).components().peekable();
|
||||||
match components.peek().cloned() {
|
let mut cwd = match components.peek().cloned() {
|
||||||
Some(c @ Component::Prefix(..)) => {
|
Some(c @ Component::Prefix(..)) => {
|
||||||
// windows only by definition
|
// windows only by definition
|
||||||
cwds = vec![[c, Component::RootDir].iter().collect()];
|
components.next();
|
||||||
prefix_len = c.as_os_str().len();
|
if let Some(Component::RootDir) = components.peek().cloned() {
|
||||||
original_cwd = OriginalCwd::Prefix(c.as_os_str().to_string_lossy().into_owned());
|
components.next();
|
||||||
|
};
|
||||||
|
[c, Component::RootDir].iter().collect()
|
||||||
}
|
}
|
||||||
Some(c @ Component::RootDir) => {
|
Some(c @ Component::RootDir) => {
|
||||||
// This is kind of a hack. When joining an empty string with the rest,
|
components.next();
|
||||||
// we add the slash automagically
|
PathBuf::from(c.as_os_str())
|
||||||
cwds = vec![PathBuf::from(c.as_os_str())];
|
|
||||||
prefix_len = 1;
|
|
||||||
original_cwd = OriginalCwd::Prefix(String::new());
|
|
||||||
}
|
}
|
||||||
Some(Component::Normal(home)) if home.to_string_lossy() == "~" => {
|
Some(Component::Normal(home)) if home.to_string_lossy() == "~" => {
|
||||||
cwds = home_dir()
|
components.next();
|
||||||
.map(|dir| vec![dir.into()])
|
original_cwd = OriginalCwd::Home(home_dir().unwrap_or(cwd_pathbuf.clone()));
|
||||||
.unwrap_or(cwd_pathbufs);
|
home_dir().unwrap_or(cwd_pathbuf)
|
||||||
prefix_len = 1;
|
}
|
||||||
original_cwd = OriginalCwd::Home;
|
Some(Component::CurDir) => {
|
||||||
|
components.next();
|
||||||
|
original_cwd = match components.peek().cloned() {
|
||||||
|
Some(Component::Normal(_)) | None => OriginalCwd::Local(cwd_pathbuf.clone()),
|
||||||
|
_ => OriginalCwd::Some(cwd_pathbuf.clone()),
|
||||||
|
};
|
||||||
|
cwd_pathbuf
|
||||||
|
}
|
||||||
|
_ => {
|
||||||
|
original_cwd = OriginalCwd::Some(cwd_pathbuf.clone());
|
||||||
|
cwd_pathbuf
|
||||||
}
|
}
|
||||||
_ => {}
|
|
||||||
};
|
};
|
||||||
|
|
||||||
let after_prefix = &partial[prefix_len..];
|
let mut partial = vec![];
|
||||||
let partial: Vec<_> = after_prefix
|
|
||||||
.strip_prefix(is_separator)
|
|
||||||
.unwrap_or(after_prefix)
|
|
||||||
.split(is_separator)
|
|
||||||
.filter(|s| !s.is_empty())
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
complete_rec(
|
for component in components {
|
||||||
partial.as_slice(),
|
match component {
|
||||||
&cwds
|
Component::Prefix(..) => unreachable!(),
|
||||||
.into_iter()
|
Component::RootDir => unreachable!(),
|
||||||
.map(|cwd| PathBuiltFromString {
|
Component::CurDir => {}
|
||||||
cwd,
|
Component::ParentDir => {
|
||||||
parts: Vec::new(),
|
if partial.pop().is_none() {
|
||||||
isdir: false,
|
cwd.pop();
|
||||||
})
|
}
|
||||||
.collect::<Vec<_>>(),
|
}
|
||||||
options,
|
Component::Normal(c) => partial.push(c.to_string_lossy().into_owned()),
|
||||||
want_directory,
|
|
||||||
isdir,
|
|
||||||
)
|
|
||||||
.into_iter()
|
|
||||||
.map(|mut p| {
|
|
||||||
if should_collapse_dots {
|
|
||||||
p = collapse_ndots(p);
|
|
||||||
}
|
}
|
||||||
let cwd = p.cwd.clone();
|
}
|
||||||
let path = original_cwd.apply(p, path_separator);
|
|
||||||
let style = ls_colors.as_ref().map(|lsc| {
|
complete_rec(partial.as_slice(), &cwd, options, want_directory, isdir)
|
||||||
lsc.style_for_path_with_metadata(
|
.into_iter()
|
||||||
&path,
|
.map(|p| (span, escape_path(original_cwd.apply(&p), want_directory)))
|
||||||
std::fs::symlink_metadata(expand_to_real_path(&path))
|
.collect()
|
||||||
.ok()
|
|
||||||
.as_ref(),
|
|
||||||
)
|
|
||||||
.map(lscolors::Style::to_nu_ansi_term_style)
|
|
||||||
.unwrap_or_default()
|
|
||||||
});
|
|
||||||
FileSuggestion {
|
|
||||||
span,
|
|
||||||
path: escape_path(path, want_directory),
|
|
||||||
style,
|
|
||||||
cwd,
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.collect()
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Fix files or folders with quotes or hashes
|
// Fix files or folders with quotes or hashes
|
||||||
pub fn escape_path(path: String, dir: bool) -> String {
|
pub fn escape_path(path: String, dir: bool) -> String {
|
||||||
// make glob pattern have the highest priority.
|
|
||||||
let glob_contaminated = path.contains(['[', '*', ']', '?']);
|
|
||||||
if glob_contaminated {
|
|
||||||
return if path.contains('\'') {
|
|
||||||
// decide to use double quote, also need to escape `"` in path
|
|
||||||
// or else users can't do anything with completed path either.
|
|
||||||
format!("\"{}\"", path.replace('"', r#"\""#))
|
|
||||||
} else {
|
|
||||||
format!("'{path}'")
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
let filename_contaminated = !dir && path.contains(['\'', '"', ' ', '#', '(', ')']);
|
let filename_contaminated = !dir && path.contains(['\'', '"', ' ', '#', '(', ')']);
|
||||||
let dirname_contaminated = dir && path.contains(['\'', '"', ' ', '#']);
|
let dirname_contaminated = dir && path.contains(['\'', '"', ' ', '#']);
|
||||||
let maybe_flag = path.starts_with('-');
|
let maybe_flag = path.starts_with('-');
|
||||||
let maybe_variable = path.starts_with('$');
|
|
||||||
let maybe_number = path.parse::<f64>().is_ok();
|
let maybe_number = path.parse::<f64>().is_ok();
|
||||||
if filename_contaminated || dirname_contaminated || maybe_flag || maybe_variable || maybe_number
|
if filename_contaminated || dirname_contaminated || maybe_flag || maybe_number {
|
||||||
{
|
|
||||||
format!("`{path}`")
|
format!("`{path}`")
|
||||||
} else {
|
} else {
|
||||||
path
|
path
|
||||||
@ -342,38 +194,3 @@ pub fn adjust_if_intermediate(
|
|||||||
readjusted,
|
readjusted,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Collapse multiple ".." components into n-dots.
|
|
||||||
///
|
|
||||||
/// It performs the reverse operation of `expand_ndots`, collapsing sequences of ".." into n-dots,
|
|
||||||
/// such as "..." and "....".
|
|
||||||
///
|
|
||||||
/// The resulting path will use platform-specific path separators, regardless of what path separators were used in the input.
|
|
||||||
fn collapse_ndots(path: PathBuiltFromString) -> PathBuiltFromString {
|
|
||||||
let mut result = PathBuiltFromString {
|
|
||||||
parts: Vec::with_capacity(path.parts.len()),
|
|
||||||
isdir: path.isdir,
|
|
||||||
cwd: path.cwd,
|
|
||||||
};
|
|
||||||
|
|
||||||
let mut dot_count = 0;
|
|
||||||
|
|
||||||
for part in path.parts {
|
|
||||||
if part == ".." {
|
|
||||||
dot_count += 1;
|
|
||||||
} else {
|
|
||||||
if dot_count > 0 {
|
|
||||||
result.parts.push(".".repeat(dot_count + 1));
|
|
||||||
dot_count = 0;
|
|
||||||
}
|
|
||||||
result.parts.push(part);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Add any remaining dots
|
|
||||||
if dot_count > 0 {
|
|
||||||
result.parts.push(".".repeat(dot_count + 1));
|
|
||||||
}
|
|
||||||
|
|
||||||
result
|
|
||||||
}
|
|
||||||
|
@ -1,13 +1,18 @@
|
|||||||
|
use std::fmt::Display;
|
||||||
|
|
||||||
use fuzzy_matcher::{skim::SkimMatcherV2, FuzzyMatcher};
|
use fuzzy_matcher::{skim::SkimMatcherV2, FuzzyMatcher};
|
||||||
use nu_parser::trim_quotes_str;
|
use nu_parser::trim_quotes_str;
|
||||||
use nu_protocol::{CompletionAlgorithm, CompletionSort};
|
use nu_protocol::CompletionAlgorithm;
|
||||||
use nu_utils::IgnoreCaseExt;
|
|
||||||
use std::{borrow::Cow, fmt::Display};
|
|
||||||
|
|
||||||
use super::SemanticSuggestion;
|
#[derive(Copy, Clone)]
|
||||||
|
pub enum SortBy {
|
||||||
|
LevenshteinDistance,
|
||||||
|
Ascending,
|
||||||
|
None,
|
||||||
|
}
|
||||||
|
|
||||||
/// Describes how suggestions should be matched.
|
/// Describes how suggestions should be matched.
|
||||||
#[derive(Copy, Clone, Debug, PartialEq)]
|
#[derive(Copy, Clone, Debug)]
|
||||||
pub enum MatchAlgorithm {
|
pub enum MatchAlgorithm {
|
||||||
/// Only show suggestions which begin with the given input
|
/// Only show suggestions which begin with the given input
|
||||||
///
|
///
|
||||||
@ -22,156 +27,35 @@ pub enum MatchAlgorithm {
|
|||||||
Fuzzy,
|
Fuzzy,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct NuMatcher<T> {
|
impl MatchAlgorithm {
|
||||||
options: CompletionOptions,
|
/// Returns whether the `needle` search text matches the given `haystack`.
|
||||||
needle: String,
|
pub fn matches_str(&self, haystack: &str, needle: &str) -> bool {
|
||||||
state: State<T>,
|
|
||||||
}
|
|
||||||
|
|
||||||
enum State<T> {
|
|
||||||
Prefix {
|
|
||||||
/// Holds (haystack, item)
|
|
||||||
items: Vec<(String, T)>,
|
|
||||||
},
|
|
||||||
Fuzzy {
|
|
||||||
matcher: Box<SkimMatcherV2>,
|
|
||||||
/// Holds (haystack, item, score)
|
|
||||||
items: Vec<(String, T, i64)>,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Filters and sorts suggestions
|
|
||||||
impl<T> NuMatcher<T> {
|
|
||||||
/// # Arguments
|
|
||||||
///
|
|
||||||
/// * `needle` - The text to search for
|
|
||||||
pub fn new(needle: impl AsRef<str>, options: CompletionOptions) -> NuMatcher<T> {
|
|
||||||
let orig_needle = trim_quotes_str(needle.as_ref());
|
|
||||||
let lowercase_needle = if options.case_sensitive {
|
|
||||||
orig_needle.to_owned()
|
|
||||||
} else {
|
|
||||||
orig_needle.to_folded_case()
|
|
||||||
};
|
|
||||||
match options.match_algorithm {
|
|
||||||
MatchAlgorithm::Prefix => NuMatcher {
|
|
||||||
options,
|
|
||||||
needle: lowercase_needle,
|
|
||||||
state: State::Prefix { items: Vec::new() },
|
|
||||||
},
|
|
||||||
MatchAlgorithm::Fuzzy => {
|
|
||||||
let mut matcher = SkimMatcherV2::default();
|
|
||||||
if options.case_sensitive {
|
|
||||||
matcher = matcher.respect_case();
|
|
||||||
} else {
|
|
||||||
matcher = matcher.ignore_case();
|
|
||||||
};
|
|
||||||
NuMatcher {
|
|
||||||
options,
|
|
||||||
needle: orig_needle.to_owned(),
|
|
||||||
state: State::Fuzzy {
|
|
||||||
matcher: Box::new(matcher),
|
|
||||||
items: Vec::new(),
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns whether or not the haystack matches the needle. If it does, `item` is added
|
|
||||||
/// to the list of matches (if given).
|
|
||||||
///
|
|
||||||
/// Helper to avoid code duplication between [NuMatcher::add] and [NuMatcher::matches].
|
|
||||||
fn matches_aux(&mut self, haystack: &str, item: Option<T>) -> bool {
|
|
||||||
let haystack = trim_quotes_str(haystack);
|
let haystack = trim_quotes_str(haystack);
|
||||||
match &mut self.state {
|
let needle = trim_quotes_str(needle);
|
||||||
State::Prefix { items } => {
|
match *self {
|
||||||
let haystack_folded = if self.options.case_sensitive {
|
MatchAlgorithm::Prefix => haystack.starts_with(needle),
|
||||||
Cow::Borrowed(haystack)
|
MatchAlgorithm::Fuzzy => {
|
||||||
} else {
|
let matcher = SkimMatcherV2::default();
|
||||||
Cow::Owned(haystack.to_folded_case())
|
matcher.fuzzy_match(haystack, needle).is_some()
|
||||||
};
|
|
||||||
let matches = if self.options.positional {
|
|
||||||
haystack_folded.starts_with(self.needle.as_str())
|
|
||||||
} else {
|
|
||||||
haystack_folded.contains(self.needle.as_str())
|
|
||||||
};
|
|
||||||
if matches {
|
|
||||||
if let Some(item) = item {
|
|
||||||
items.push((haystack.to_string(), item));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
matches
|
|
||||||
}
|
|
||||||
State::Fuzzy { items, matcher } => {
|
|
||||||
let Some(score) = matcher.fuzzy_match(haystack, &self.needle) else {
|
|
||||||
return false;
|
|
||||||
};
|
|
||||||
if let Some(item) = item {
|
|
||||||
items.push((haystack.to_string(), item, score));
|
|
||||||
}
|
|
||||||
true
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Add the given item if the given haystack matches the needle.
|
/// Returns whether the `needle` search text matches the given `haystack`.
|
||||||
///
|
pub fn matches_u8(&self, haystack: &[u8], needle: &[u8]) -> bool {
|
||||||
/// Returns whether the item was added.
|
match *self {
|
||||||
pub fn add(&mut self, haystack: impl AsRef<str>, item: T) -> bool {
|
MatchAlgorithm::Prefix => haystack.starts_with(needle),
|
||||||
self.matches_aux(haystack.as_ref(), Some(item))
|
MatchAlgorithm::Fuzzy => {
|
||||||
}
|
let haystack_str = String::from_utf8_lossy(haystack);
|
||||||
|
let needle_str = String::from_utf8_lossy(needle);
|
||||||
|
|
||||||
/// Returns whether the haystack matches the needle.
|
let matcher = SkimMatcherV2::default();
|
||||||
pub fn matches(&mut self, haystack: &str) -> bool {
|
matcher.fuzzy_match(&haystack_str, &needle_str).is_some()
|
||||||
self.matches_aux(haystack, None)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Get all the items that matched (sorted)
|
|
||||||
pub fn results(self) -> Vec<T> {
|
|
||||||
match self.state {
|
|
||||||
State::Prefix { mut items, .. } => {
|
|
||||||
items.sort_by(|(haystack1, _), (haystack2, _)| {
|
|
||||||
let cmp_sensitive = haystack1.cmp(haystack2);
|
|
||||||
if self.options.case_sensitive {
|
|
||||||
cmp_sensitive
|
|
||||||
} else {
|
|
||||||
haystack1
|
|
||||||
.to_folded_case()
|
|
||||||
.cmp(&haystack2.to_folded_case())
|
|
||||||
.then(cmp_sensitive)
|
|
||||||
}
|
|
||||||
});
|
|
||||||
items.into_iter().map(|(_, item)| item).collect::<Vec<_>>()
|
|
||||||
}
|
|
||||||
State::Fuzzy { mut items, .. } => {
|
|
||||||
match self.options.sort {
|
|
||||||
CompletionSort::Alphabetical => {
|
|
||||||
items.sort_by(|(haystack1, _, _), (haystack2, _, _)| {
|
|
||||||
haystack1.cmp(haystack2)
|
|
||||||
});
|
|
||||||
}
|
|
||||||
CompletionSort::Smart => {
|
|
||||||
items.sort_by(|(haystack1, _, score1), (haystack2, _, score2)| {
|
|
||||||
score2.cmp(score1).then(haystack1.cmp(haystack2))
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
items
|
|
||||||
.into_iter()
|
|
||||||
.map(|(_, item, _)| item)
|
|
||||||
.collect::<Vec<_>>()
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl NuMatcher<SemanticSuggestion> {
|
|
||||||
pub fn add_semantic_suggestion(&mut self, sugg: SemanticSuggestion) -> bool {
|
|
||||||
let value = sugg.suggestion.value.to_string();
|
|
||||||
self.add(value, sugg)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<CompletionAlgorithm> for MatchAlgorithm {
|
impl From<CompletionAlgorithm> for MatchAlgorithm {
|
||||||
fn from(value: CompletionAlgorithm) -> Self {
|
fn from(value: CompletionAlgorithm) -> Self {
|
||||||
match value {
|
match value {
|
||||||
@ -212,8 +96,8 @@ impl std::error::Error for InvalidMatchAlgorithm {}
|
|||||||
pub struct CompletionOptions {
|
pub struct CompletionOptions {
|
||||||
pub case_sensitive: bool,
|
pub case_sensitive: bool,
|
||||||
pub positional: bool,
|
pub positional: bool,
|
||||||
|
pub sort_by: SortBy,
|
||||||
pub match_algorithm: MatchAlgorithm,
|
pub match_algorithm: MatchAlgorithm,
|
||||||
pub sort: CompletionSort,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Default for CompletionOptions {
|
impl Default for CompletionOptions {
|
||||||
@ -221,57 +105,43 @@ impl Default for CompletionOptions {
|
|||||||
Self {
|
Self {
|
||||||
case_sensitive: true,
|
case_sensitive: true,
|
||||||
positional: true,
|
positional: true,
|
||||||
|
sort_by: SortBy::Ascending,
|
||||||
match_algorithm: MatchAlgorithm::Prefix,
|
match_algorithm: MatchAlgorithm::Prefix,
|
||||||
sort: Default::default(),
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod test {
|
mod test {
|
||||||
use rstest::rstest;
|
use super::MatchAlgorithm;
|
||||||
|
|
||||||
use super::{CompletionOptions, MatchAlgorithm, NuMatcher};
|
#[test]
|
||||||
|
fn match_algorithm_prefix() {
|
||||||
|
let algorithm = MatchAlgorithm::Prefix;
|
||||||
|
|
||||||
#[rstest]
|
assert!(algorithm.matches_str("example text", ""));
|
||||||
#[case(MatchAlgorithm::Prefix, "example text", "", true)]
|
assert!(algorithm.matches_str("example text", "examp"));
|
||||||
#[case(MatchAlgorithm::Prefix, "example text", "examp", true)]
|
assert!(!algorithm.matches_str("example text", "text"));
|
||||||
#[case(MatchAlgorithm::Prefix, "example text", "text", false)]
|
|
||||||
#[case(MatchAlgorithm::Fuzzy, "example text", "", true)]
|
assert!(algorithm.matches_u8(&[1, 2, 3], &[]));
|
||||||
#[case(MatchAlgorithm::Fuzzy, "example text", "examp", true)]
|
assert!(algorithm.matches_u8(&[1, 2, 3], &[1, 2]));
|
||||||
#[case(MatchAlgorithm::Fuzzy, "example text", "ext", true)]
|
assert!(!algorithm.matches_u8(&[1, 2, 3], &[2, 3]));
|
||||||
#[case(MatchAlgorithm::Fuzzy, "example text", "mplxt", true)]
|
|
||||||
#[case(MatchAlgorithm::Fuzzy, "example text", "mpp", false)]
|
|
||||||
fn match_algorithm_simple(
|
|
||||||
#[case] match_algorithm: MatchAlgorithm,
|
|
||||||
#[case] haystack: &str,
|
|
||||||
#[case] needle: &str,
|
|
||||||
#[case] should_match: bool,
|
|
||||||
) {
|
|
||||||
let options = CompletionOptions {
|
|
||||||
match_algorithm,
|
|
||||||
..Default::default()
|
|
||||||
};
|
|
||||||
let mut matcher = NuMatcher::new(needle, options);
|
|
||||||
matcher.add(haystack, haystack);
|
|
||||||
if should_match {
|
|
||||||
assert_eq!(vec![haystack], matcher.results());
|
|
||||||
} else {
|
|
||||||
assert_ne!(vec![haystack], matcher.results());
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn match_algorithm_fuzzy_sort_score() {
|
fn match_algorithm_fuzzy() {
|
||||||
let options = CompletionOptions {
|
let algorithm = MatchAlgorithm::Fuzzy;
|
||||||
match_algorithm: MatchAlgorithm::Fuzzy,
|
|
||||||
..Default::default()
|
assert!(algorithm.matches_str("example text", ""));
|
||||||
};
|
assert!(algorithm.matches_str("example text", "examp"));
|
||||||
let mut matcher = NuMatcher::new("fob", options);
|
assert!(algorithm.matches_str("example text", "ext"));
|
||||||
for item in ["foo/bar", "fob", "foo bar"] {
|
assert!(algorithm.matches_str("example text", "mplxt"));
|
||||||
matcher.add(item, item);
|
assert!(!algorithm.matches_str("example text", "mpp"));
|
||||||
}
|
|
||||||
// Sort by score, then in alphabetical order
|
assert!(algorithm.matches_u8(&[1, 2, 3], &[]));
|
||||||
assert_eq!(vec!["fob", "foo bar", "foo/bar"], matcher.results());
|
assert!(algorithm.matches_u8(&[1, 2, 3], &[1, 2]));
|
||||||
|
assert!(algorithm.matches_u8(&[1, 2, 3], &[2, 3]));
|
||||||
|
assert!(algorithm.matches_u8(&[1, 2, 3], &[1, 3]));
|
||||||
|
assert!(!algorithm.matches_u8(&[1, 2, 3], &[2, 2]));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,29 +1,33 @@
|
|||||||
use crate::completions::{
|
use crate::completions::{Completer, CompletionOptions, MatchAlgorithm, SortBy};
|
||||||
completer::map_value_completions, Completer, CompletionOptions, SemanticSuggestion,
|
|
||||||
};
|
|
||||||
use nu_engine::eval_call;
|
use nu_engine::eval_call;
|
||||||
use nu_protocol::{
|
use nu_protocol::{
|
||||||
ast::{Argument, Call, Expr, Expression},
|
ast::{Argument, Call, Expr, Expression},
|
||||||
debugger::WithoutDebug,
|
engine::{EngineState, Stack, StateWorkingSet},
|
||||||
engine::{Stack, StateWorkingSet},
|
PipelineData, Span, Type, Value,
|
||||||
DeclId, PipelineData, Span, Type, Value,
|
|
||||||
};
|
};
|
||||||
|
use nu_utils::IgnoreCaseExt;
|
||||||
|
use reedline::Suggestion;
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
use super::completion_options::NuMatcher;
|
use super::completer::map_value_completions;
|
||||||
|
|
||||||
pub struct CustomCompletion {
|
pub struct CustomCompletion {
|
||||||
|
engine_state: Arc<EngineState>,
|
||||||
stack: Stack,
|
stack: Stack,
|
||||||
decl_id: DeclId,
|
decl_id: usize,
|
||||||
line: String,
|
line: String,
|
||||||
|
sort_by: SortBy,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl CustomCompletion {
|
impl CustomCompletion {
|
||||||
pub fn new(stack: Stack, decl_id: DeclId, line: String) -> Self {
|
pub fn new(engine_state: Arc<EngineState>, stack: Stack, decl_id: usize, line: String) -> Self {
|
||||||
Self {
|
Self {
|
||||||
|
engine_state,
|
||||||
stack,
|
stack,
|
||||||
decl_id,
|
decl_id,
|
||||||
line,
|
line,
|
||||||
|
sort_by: SortBy::None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -31,105 +35,137 @@ impl CustomCompletion {
|
|||||||
impl Completer for CustomCompletion {
|
impl Completer for CustomCompletion {
|
||||||
fn fetch(
|
fn fetch(
|
||||||
&mut self,
|
&mut self,
|
||||||
working_set: &StateWorkingSet,
|
_: &StateWorkingSet,
|
||||||
_stack: &Stack,
|
prefix: Vec<u8>,
|
||||||
prefix: &[u8],
|
|
||||||
span: Span,
|
span: Span,
|
||||||
offset: usize,
|
offset: usize,
|
||||||
pos: usize,
|
pos: usize,
|
||||||
completion_options: &CompletionOptions,
|
completion_options: &CompletionOptions,
|
||||||
) -> Vec<SemanticSuggestion> {
|
) -> Vec<Suggestion> {
|
||||||
// Line position
|
// Line position
|
||||||
let line_pos = pos - offset;
|
let line_pos = pos - offset;
|
||||||
|
|
||||||
// Call custom declaration
|
// Call custom declaration
|
||||||
let result = eval_call::<WithoutDebug>(
|
let result = eval_call(
|
||||||
working_set.permanent_state,
|
&self.engine_state,
|
||||||
&mut self.stack,
|
&mut self.stack,
|
||||||
&Call {
|
&Call {
|
||||||
decl_id: self.decl_id,
|
decl_id: self.decl_id,
|
||||||
head: span,
|
head: span,
|
||||||
arguments: vec![
|
arguments: vec![
|
||||||
Argument::Positional(Expression::new_unknown(
|
Argument::Positional(Expression {
|
||||||
Expr::String(self.line.clone()),
|
span: Span::unknown(),
|
||||||
Span::unknown(),
|
ty: Type::String,
|
||||||
Type::String,
|
expr: Expr::String(self.line.clone()),
|
||||||
)),
|
custom_completion: None,
|
||||||
Argument::Positional(Expression::new_unknown(
|
}),
|
||||||
Expr::Int(line_pos as i64),
|
Argument::Positional(Expression {
|
||||||
Span::unknown(),
|
span: Span::unknown(),
|
||||||
Type::Int,
|
ty: Type::Int,
|
||||||
)),
|
expr: Expr::Int(line_pos as i64),
|
||||||
|
custom_completion: None,
|
||||||
|
}),
|
||||||
],
|
],
|
||||||
|
redirect_stdout: true,
|
||||||
|
redirect_stderr: true,
|
||||||
parser_info: HashMap::new(),
|
parser_info: HashMap::new(),
|
||||||
},
|
},
|
||||||
PipelineData::empty(),
|
PipelineData::empty(),
|
||||||
);
|
);
|
||||||
|
|
||||||
let mut custom_completion_options = None;
|
let mut custom_completion_options = None;
|
||||||
let mut should_sort = true;
|
|
||||||
|
|
||||||
// Parse result
|
// Parse result
|
||||||
let suggestions = result
|
let suggestions = result
|
||||||
.and_then(|data| data.into_value(span))
|
.map(|pd| {
|
||||||
.map(|value| match &value {
|
let value = pd.into_value(span);
|
||||||
Value::Record { val, .. } => {
|
match &value {
|
||||||
let completions = val
|
Value::Record { val, .. } => {
|
||||||
.get("completions")
|
let completions = val
|
||||||
.and_then(|val| {
|
.get("completions")
|
||||||
val.as_list()
|
.and_then(|val| {
|
||||||
.ok()
|
val.as_list()
|
||||||
.map(|it| map_value_completions(it.iter(), span, offset))
|
.ok()
|
||||||
})
|
.map(|it| map_value_completions(it.iter(), span, offset))
|
||||||
.unwrap_or_default();
|
})
|
||||||
let options = val.get("options");
|
.unwrap_or_default();
|
||||||
|
let options = val.get("options");
|
||||||
|
|
||||||
if let Some(Value::Record { val: options, .. }) = &options {
|
if let Some(Value::Record { val: options, .. }) = &options {
|
||||||
if let Some(sort) = options.get("sort").and_then(|val| val.as_bool().ok()) {
|
let should_sort = options
|
||||||
should_sort = sort;
|
.get("sort")
|
||||||
|
.and_then(|val| val.as_bool().ok())
|
||||||
|
.unwrap_or(false);
|
||||||
|
|
||||||
|
if should_sort {
|
||||||
|
self.sort_by = SortBy::Ascending;
|
||||||
|
}
|
||||||
|
|
||||||
|
custom_completion_options = Some(CompletionOptions {
|
||||||
|
case_sensitive: options
|
||||||
|
.get("case_sensitive")
|
||||||
|
.and_then(|val| val.as_bool().ok())
|
||||||
|
.unwrap_or(true),
|
||||||
|
positional: options
|
||||||
|
.get("positional")
|
||||||
|
.and_then(|val| val.as_bool().ok())
|
||||||
|
.unwrap_or(true),
|
||||||
|
sort_by: if should_sort {
|
||||||
|
SortBy::Ascending
|
||||||
|
} else {
|
||||||
|
SortBy::None
|
||||||
|
},
|
||||||
|
match_algorithm: match options.get("completion_algorithm") {
|
||||||
|
Some(option) => option
|
||||||
|
.as_string()
|
||||||
|
.ok()
|
||||||
|
.and_then(|option| option.try_into().ok())
|
||||||
|
.unwrap_or(MatchAlgorithm::Prefix),
|
||||||
|
None => completion_options.match_algorithm,
|
||||||
|
},
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
custom_completion_options = Some(CompletionOptions {
|
completions
|
||||||
case_sensitive: options
|
|
||||||
.get("case_sensitive")
|
|
||||||
.and_then(|val| val.as_bool().ok())
|
|
||||||
.unwrap_or(true),
|
|
||||||
positional: options
|
|
||||||
.get("positional")
|
|
||||||
.and_then(|val| val.as_bool().ok())
|
|
||||||
.unwrap_or(completion_options.positional),
|
|
||||||
match_algorithm: match options.get("completion_algorithm") {
|
|
||||||
Some(option) => option
|
|
||||||
.coerce_string()
|
|
||||||
.ok()
|
|
||||||
.and_then(|option| option.try_into().ok())
|
|
||||||
.unwrap_or(completion_options.match_algorithm),
|
|
||||||
None => completion_options.match_algorithm,
|
|
||||||
},
|
|
||||||
sort: completion_options.sort,
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
Value::List { vals, .. } => map_value_completions(vals.iter(), span, offset),
|
||||||
completions
|
_ => vec![],
|
||||||
}
|
}
|
||||||
Value::List { vals, .. } => map_value_completions(vals.iter(), span, offset),
|
|
||||||
_ => vec![],
|
|
||||||
})
|
})
|
||||||
.unwrap_or_default();
|
.unwrap_or_default();
|
||||||
|
|
||||||
let options = custom_completion_options.unwrap_or(completion_options.clone());
|
if let Some(custom_completion_options) = custom_completion_options {
|
||||||
let mut matcher = NuMatcher::new(String::from_utf8_lossy(prefix), options);
|
filter(&prefix, suggestions, &custom_completion_options)
|
||||||
|
|
||||||
if should_sort {
|
|
||||||
for sugg in suggestions {
|
|
||||||
matcher.add_semantic_suggestion(sugg);
|
|
||||||
}
|
|
||||||
matcher.results()
|
|
||||||
} else {
|
} else {
|
||||||
suggestions
|
filter(&prefix, suggestions, completion_options)
|
||||||
.into_iter()
|
|
||||||
.filter(|sugg| matcher.matches(&sugg.suggestion.value))
|
|
||||||
.collect()
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn get_sort_by(&self) -> SortBy {
|
||||||
|
self.sort_by
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn filter(prefix: &[u8], items: Vec<Suggestion>, options: &CompletionOptions) -> Vec<Suggestion> {
|
||||||
|
items
|
||||||
|
.into_iter()
|
||||||
|
.filter(|it| match options.match_algorithm {
|
||||||
|
MatchAlgorithm::Prefix => match (options.case_sensitive, options.positional) {
|
||||||
|
(true, true) => it.value.as_bytes().starts_with(prefix),
|
||||||
|
(true, false) => it.value.contains(std::str::from_utf8(prefix).unwrap_or("")),
|
||||||
|
(false, positional) => {
|
||||||
|
let value = it.value.to_folded_case();
|
||||||
|
let prefix = std::str::from_utf8(prefix).unwrap_or("").to_folded_case();
|
||||||
|
if positional {
|
||||||
|
value.starts_with(&prefix)
|
||||||
|
} else {
|
||||||
|
value.contains(&prefix)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
MatchAlgorithm::Fuzzy => options
|
||||||
|
.match_algorithm
|
||||||
|
.matches_u8(it.value.as_bytes(), prefix),
|
||||||
|
})
|
||||||
|
.collect()
|
||||||
}
|
}
|
||||||
|
@ -1,22 +1,23 @@
|
|||||||
use crate::completions::{
|
use crate::completions::{
|
||||||
completion_common::{adjust_if_intermediate, complete_item, AdjustView},
|
completion_common::{adjust_if_intermediate, complete_item, AdjustView},
|
||||||
Completer, CompletionOptions,
|
Completer, CompletionOptions, SortBy,
|
||||||
};
|
};
|
||||||
use nu_protocol::{
|
use nu_protocol::{
|
||||||
engine::{EngineState, Stack, StateWorkingSet},
|
engine::{EngineState, StateWorkingSet},
|
||||||
Span,
|
levenshtein_distance, Span,
|
||||||
};
|
};
|
||||||
use reedline::Suggestion;
|
use reedline::Suggestion;
|
||||||
use std::path::Path;
|
use std::path::{Path, MAIN_SEPARATOR as SEP};
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
use super::{completion_common::FileSuggestion, SemanticSuggestion};
|
#[derive(Clone)]
|
||||||
|
pub struct DirectoryCompletion {
|
||||||
#[derive(Clone, Default)]
|
engine_state: Arc<EngineState>,
|
||||||
pub struct DirectoryCompletion {}
|
}
|
||||||
|
|
||||||
impl DirectoryCompletion {
|
impl DirectoryCompletion {
|
||||||
pub fn new() -> Self {
|
pub fn new(engine_state: Arc<EngineState>) -> Self {
|
||||||
Self::default()
|
Self { engine_state }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -24,47 +25,69 @@ impl Completer for DirectoryCompletion {
|
|||||||
fn fetch(
|
fn fetch(
|
||||||
&mut self,
|
&mut self,
|
||||||
working_set: &StateWorkingSet,
|
working_set: &StateWorkingSet,
|
||||||
stack: &Stack,
|
prefix: Vec<u8>,
|
||||||
prefix: &[u8],
|
|
||||||
span: Span,
|
span: Span,
|
||||||
offset: usize,
|
offset: usize,
|
||||||
_pos: usize,
|
_: usize,
|
||||||
options: &CompletionOptions,
|
options: &CompletionOptions,
|
||||||
) -> Vec<SemanticSuggestion> {
|
) -> Vec<Suggestion> {
|
||||||
let AdjustView { prefix, span, .. } = adjust_if_intermediate(prefix, working_set, span);
|
let AdjustView { prefix, span, .. } = adjust_if_intermediate(&prefix, working_set, span);
|
||||||
|
|
||||||
// Filter only the folders
|
// Filter only the folders
|
||||||
#[allow(deprecated)]
|
let output: Vec<_> = directory_completion(
|
||||||
let items: Vec<_> = directory_completion(
|
|
||||||
span,
|
span,
|
||||||
&prefix,
|
&prefix,
|
||||||
&working_set.permanent_state.current_work_dir(),
|
&self.engine_state.current_work_dir(),
|
||||||
options,
|
options,
|
||||||
working_set.permanent_state,
|
|
||||||
stack,
|
|
||||||
)
|
)
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(move |x| SemanticSuggestion {
|
.map(move |x| Suggestion {
|
||||||
suggestion: Suggestion {
|
value: x.1,
|
||||||
value: x.path,
|
description: None,
|
||||||
style: x.style,
|
extra: None,
|
||||||
span: reedline::Span {
|
span: reedline::Span {
|
||||||
start: x.span.start - offset,
|
start: x.0.start - offset,
|
||||||
end: x.span.end - offset,
|
end: x.0.end - offset,
|
||||||
},
|
|
||||||
..Suggestion::default()
|
|
||||||
},
|
},
|
||||||
// TODO????
|
append_whitespace: false,
|
||||||
kind: None,
|
|
||||||
})
|
})
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
// Separate the results between hidden and non hidden
|
output
|
||||||
let mut hidden: Vec<SemanticSuggestion> = vec![];
|
}
|
||||||
let mut non_hidden: Vec<SemanticSuggestion> = vec![];
|
|
||||||
|
|
||||||
for item in items.into_iter() {
|
// Sort results prioritizing the non hidden folders
|
||||||
let item_path = Path::new(&item.suggestion.value);
|
fn sort(&self, items: Vec<Suggestion>, prefix: Vec<u8>) -> Vec<Suggestion> {
|
||||||
|
let prefix_str = String::from_utf8_lossy(&prefix).to_string();
|
||||||
|
|
||||||
|
// Sort items
|
||||||
|
let mut sorted_items = items;
|
||||||
|
|
||||||
|
match self.get_sort_by() {
|
||||||
|
SortBy::Ascending => {
|
||||||
|
sorted_items.sort_by(|a, b| {
|
||||||
|
// Ignore trailing slashes in folder names when sorting
|
||||||
|
a.value
|
||||||
|
.trim_end_matches(SEP)
|
||||||
|
.cmp(b.value.trim_end_matches(SEP))
|
||||||
|
});
|
||||||
|
}
|
||||||
|
SortBy::LevenshteinDistance => {
|
||||||
|
sorted_items.sort_by(|a, b| {
|
||||||
|
let a_distance = levenshtein_distance(&prefix_str, &a.value);
|
||||||
|
let b_distance = levenshtein_distance(&prefix_str, &b.value);
|
||||||
|
a_distance.cmp(&b_distance)
|
||||||
|
});
|
||||||
|
}
|
||||||
|
_ => (),
|
||||||
|
}
|
||||||
|
|
||||||
|
// Separate the results between hidden and non hidden
|
||||||
|
let mut hidden: Vec<Suggestion> = vec![];
|
||||||
|
let mut non_hidden: Vec<Suggestion> = vec![];
|
||||||
|
|
||||||
|
for item in sorted_items.into_iter() {
|
||||||
|
let item_path = Path::new(&item.value);
|
||||||
|
|
||||||
if let Some(value) = item_path.file_name() {
|
if let Some(value) = item_path.file_name() {
|
||||||
if let Some(value) = value.to_str() {
|
if let Some(value) = value.to_str() {
|
||||||
@ -89,8 +112,6 @@ pub fn directory_completion(
|
|||||||
partial: &str,
|
partial: &str,
|
||||||
cwd: &str,
|
cwd: &str,
|
||||||
options: &CompletionOptions,
|
options: &CompletionOptions,
|
||||||
engine_state: &EngineState,
|
) -> Vec<(nu_protocol::Span, String)> {
|
||||||
stack: &Stack,
|
complete_item(true, span, partial, cwd, options)
|
||||||
) -> Vec<FileSuggestion> {
|
|
||||||
complete_item(true, span, partial, &[cwd], options, engine_state, stack)
|
|
||||||
}
|
}
|
||||||
|
@ -1,34 +1,36 @@
|
|||||||
use crate::completions::{file_path_completion, Completer, CompletionOptions};
|
use crate::completions::{file_path_completion, Completer, CompletionOptions, SortBy};
|
||||||
use nu_protocol::{
|
use nu_protocol::{
|
||||||
engine::{Stack, StateWorkingSet},
|
engine::{EngineState, StateWorkingSet},
|
||||||
Span,
|
Span,
|
||||||
};
|
};
|
||||||
use reedline::Suggestion;
|
use reedline::Suggestion;
|
||||||
use std::path::{is_separator, Path, MAIN_SEPARATOR as SEP, MAIN_SEPARATOR_STR};
|
use std::{
|
||||||
|
path::{is_separator, MAIN_SEPARATOR as SEP, MAIN_SEPARATOR_STR},
|
||||||
|
sync::Arc,
|
||||||
|
};
|
||||||
|
|
||||||
use super::SemanticSuggestion;
|
#[derive(Clone)]
|
||||||
|
pub struct DotNuCompletion {
|
||||||
#[derive(Clone, Default)]
|
engine_state: Arc<EngineState>,
|
||||||
pub struct DotNuCompletion {}
|
}
|
||||||
|
|
||||||
impl DotNuCompletion {
|
impl DotNuCompletion {
|
||||||
pub fn new() -> Self {
|
pub fn new(engine_state: Arc<EngineState>) -> Self {
|
||||||
Self::default()
|
Self { engine_state }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Completer for DotNuCompletion {
|
impl Completer for DotNuCompletion {
|
||||||
fn fetch(
|
fn fetch(
|
||||||
&mut self,
|
&mut self,
|
||||||
working_set: &StateWorkingSet,
|
_: &StateWorkingSet,
|
||||||
stack: &Stack,
|
prefix: Vec<u8>,
|
||||||
prefix: &[u8],
|
|
||||||
span: Span,
|
span: Span,
|
||||||
offset: usize,
|
offset: usize,
|
||||||
_pos: usize,
|
_: usize,
|
||||||
options: &CompletionOptions,
|
options: &CompletionOptions,
|
||||||
) -> Vec<SemanticSuggestion> {
|
) -> Vec<Suggestion> {
|
||||||
let prefix_str = String::from_utf8_lossy(prefix).replace('`', "");
|
let prefix_str = String::from_utf8_lossy(&prefix).replace('`', "");
|
||||||
let mut search_dirs: Vec<String> = vec![];
|
let mut search_dirs: Vec<String> = vec![];
|
||||||
|
|
||||||
// If prefix_str is only a word we want to search in the current dir
|
// If prefix_str is only a word we want to search in the current dir
|
||||||
@ -41,25 +43,26 @@ impl Completer for DotNuCompletion {
|
|||||||
let mut is_current_folder = false;
|
let mut is_current_folder = false;
|
||||||
|
|
||||||
// Fetch the lib dirs
|
// Fetch the lib dirs
|
||||||
let lib_dirs: Vec<String> = if let Some(lib_dirs) = working_set.get_env_var("NU_LIB_DIRS") {
|
let lib_dirs: Vec<String> =
|
||||||
lib_dirs
|
if let Some(lib_dirs) = self.engine_state.get_env_var("NU_LIB_DIRS") {
|
||||||
.as_list()
|
lib_dirs
|
||||||
.into_iter()
|
.as_list()
|
||||||
.flat_map(|it| {
|
.into_iter()
|
||||||
it.iter().map(|x| {
|
.flat_map(|it| {
|
||||||
x.to_path()
|
it.iter().map(|x| {
|
||||||
.expect("internal error: failed to convert lib path")
|
x.as_path()
|
||||||
|
.expect("internal error: failed to convert lib path")
|
||||||
|
})
|
||||||
})
|
})
|
||||||
})
|
.map(|it| {
|
||||||
.map(|it| {
|
it.into_os_string()
|
||||||
it.into_os_string()
|
.into_string()
|
||||||
.into_string()
|
.expect("internal error: failed to convert OS path")
|
||||||
.expect("internal error: failed to convert OS path")
|
})
|
||||||
})
|
.collect()
|
||||||
.collect()
|
} else {
|
||||||
} else {
|
vec![]
|
||||||
vec![]
|
};
|
||||||
};
|
|
||||||
|
|
||||||
// Check if the base_dir is a folder
|
// Check if the base_dir is a folder
|
||||||
// rsplit_once removes the separator
|
// rsplit_once removes the separator
|
||||||
@ -75,8 +78,7 @@ impl Completer for DotNuCompletion {
|
|||||||
partial = base_dir_partial;
|
partial = base_dir_partial;
|
||||||
} else {
|
} else {
|
||||||
// Fetch the current folder
|
// Fetch the current folder
|
||||||
#[allow(deprecated)]
|
let current_folder = self.engine_state.current_work_dir();
|
||||||
let current_folder = working_set.permanent_state.current_work_dir();
|
|
||||||
is_current_folder = true;
|
is_current_folder = true;
|
||||||
|
|
||||||
// Add the current folder and the lib dirs into the
|
// Add the current folder and the lib dirs into the
|
||||||
@ -87,44 +89,37 @@ impl Completer for DotNuCompletion {
|
|||||||
|
|
||||||
// Fetch the files filtering the ones that ends with .nu
|
// Fetch the files filtering the ones that ends with .nu
|
||||||
// and transform them into suggestions
|
// and transform them into suggestions
|
||||||
|
let output: Vec<Suggestion> = search_dirs
|
||||||
let completions = file_path_completion(
|
|
||||||
span,
|
|
||||||
&partial,
|
|
||||||
&search_dirs.iter().map(|d| d.as_str()).collect::<Vec<_>>(),
|
|
||||||
options,
|
|
||||||
working_set.permanent_state,
|
|
||||||
stack,
|
|
||||||
);
|
|
||||||
completions
|
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.filter(move |it| {
|
.flat_map(|it| {
|
||||||
// Different base dir, so we list the .nu files or folders
|
file_path_completion(span, &partial, &it, options)
|
||||||
if !is_current_folder {
|
.into_iter()
|
||||||
it.path.ends_with(".nu") || it.path.ends_with(SEP)
|
.filter(|it| {
|
||||||
} else {
|
// Different base dir, so we list the .nu files or folders
|
||||||
// Lib dirs, so we filter only the .nu files or directory modules
|
if !is_current_folder {
|
||||||
if it.path.ends_with(SEP) {
|
it.1.ends_with(".nu") || it.1.ends_with(SEP)
|
||||||
Path::new(&it.cwd).join(&it.path).join("mod.nu").exists()
|
} else {
|
||||||
} else {
|
// Lib dirs, so we filter only the .nu files
|
||||||
it.path.ends_with(".nu")
|
it.1.ends_with(".nu")
|
||||||
}
|
}
|
||||||
}
|
})
|
||||||
|
.map(move |x| Suggestion {
|
||||||
|
value: x.1,
|
||||||
|
description: None,
|
||||||
|
extra: None,
|
||||||
|
span: reedline::Span {
|
||||||
|
start: x.0.start - offset,
|
||||||
|
end: x.0.end - offset,
|
||||||
|
},
|
||||||
|
append_whitespace: true,
|
||||||
|
})
|
||||||
})
|
})
|
||||||
.map(move |x| SemanticSuggestion {
|
.collect();
|
||||||
suggestion: Suggestion {
|
|
||||||
value: x.path,
|
output
|
||||||
style: x.style,
|
}
|
||||||
span: reedline::Span {
|
|
||||||
start: x.span.start - offset,
|
fn get_sort_by(&self) -> SortBy {
|
||||||
end: x.span.end - offset,
|
SortBy::LevenshteinDistance
|
||||||
},
|
|
||||||
append_whitespace: true,
|
|
||||||
..Suggestion::default()
|
|
||||||
},
|
|
||||||
// TODO????
|
|
||||||
kind: None,
|
|
||||||
})
|
|
||||||
.collect::<Vec<_>>()
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,22 +1,24 @@
|
|||||||
use crate::completions::{
|
use crate::completions::{
|
||||||
completion_common::{adjust_if_intermediate, complete_item, AdjustView},
|
completion_common::{adjust_if_intermediate, complete_item, AdjustView},
|
||||||
Completer, CompletionOptions,
|
Completer, CompletionOptions, SortBy,
|
||||||
};
|
};
|
||||||
use nu_protocol::{
|
use nu_protocol::{
|
||||||
engine::{EngineState, Stack, StateWorkingSet},
|
engine::{EngineState, StateWorkingSet},
|
||||||
Span,
|
levenshtein_distance, Span,
|
||||||
};
|
};
|
||||||
|
use nu_utils::IgnoreCaseExt;
|
||||||
use reedline::Suggestion;
|
use reedline::Suggestion;
|
||||||
use std::path::Path;
|
use std::path::{Path, MAIN_SEPARATOR as SEP};
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
use super::{completion_common::FileSuggestion, SemanticSuggestion};
|
#[derive(Clone)]
|
||||||
|
pub struct FileCompletion {
|
||||||
#[derive(Clone, Default)]
|
engine_state: Arc<EngineState>,
|
||||||
pub struct FileCompletion {}
|
}
|
||||||
|
|
||||||
impl FileCompletion {
|
impl FileCompletion {
|
||||||
pub fn new() -> Self {
|
pub fn new(engine_state: Arc<EngineState>) -> Self {
|
||||||
Self::default()
|
Self { engine_state }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -24,53 +26,73 @@ impl Completer for FileCompletion {
|
|||||||
fn fetch(
|
fn fetch(
|
||||||
&mut self,
|
&mut self,
|
||||||
working_set: &StateWorkingSet,
|
working_set: &StateWorkingSet,
|
||||||
stack: &Stack,
|
prefix: Vec<u8>,
|
||||||
prefix: &[u8],
|
|
||||||
span: Span,
|
span: Span,
|
||||||
offset: usize,
|
offset: usize,
|
||||||
_pos: usize,
|
_: usize,
|
||||||
options: &CompletionOptions,
|
options: &CompletionOptions,
|
||||||
) -> Vec<SemanticSuggestion> {
|
) -> Vec<Suggestion> {
|
||||||
let AdjustView {
|
let AdjustView {
|
||||||
prefix,
|
prefix,
|
||||||
span,
|
span,
|
||||||
readjusted,
|
readjusted,
|
||||||
} = adjust_if_intermediate(prefix, working_set, span);
|
} = adjust_if_intermediate(&prefix, working_set, span);
|
||||||
|
|
||||||
#[allow(deprecated)]
|
let output: Vec<_> = complete_item(
|
||||||
let items: Vec<_> = complete_item(
|
|
||||||
readjusted,
|
readjusted,
|
||||||
span,
|
span,
|
||||||
&prefix,
|
&prefix,
|
||||||
&[&working_set.permanent_state.current_work_dir()],
|
&self.engine_state.current_work_dir(),
|
||||||
options,
|
options,
|
||||||
working_set.permanent_state,
|
|
||||||
stack,
|
|
||||||
)
|
)
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(move |x| SemanticSuggestion {
|
.map(move |x| Suggestion {
|
||||||
suggestion: Suggestion {
|
value: x.1,
|
||||||
value: x.path,
|
description: None,
|
||||||
style: x.style,
|
extra: None,
|
||||||
span: reedline::Span {
|
span: reedline::Span {
|
||||||
start: x.span.start - offset,
|
start: x.0.start - offset,
|
||||||
end: x.span.end - offset,
|
end: x.0.end - offset,
|
||||||
},
|
|
||||||
..Suggestion::default()
|
|
||||||
},
|
},
|
||||||
// TODO????
|
append_whitespace: false,
|
||||||
kind: None,
|
|
||||||
})
|
})
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
// Sort results prioritizing the non hidden folders
|
output
|
||||||
|
}
|
||||||
|
|
||||||
|
// Sort results prioritizing the non hidden folders
|
||||||
|
fn sort(&self, items: Vec<Suggestion>, prefix: Vec<u8>) -> Vec<Suggestion> {
|
||||||
|
let prefix_str = String::from_utf8_lossy(&prefix).to_string();
|
||||||
|
|
||||||
|
// Sort items
|
||||||
|
let mut sorted_items = items;
|
||||||
|
|
||||||
|
match self.get_sort_by() {
|
||||||
|
SortBy::Ascending => {
|
||||||
|
sorted_items.sort_by(|a, b| {
|
||||||
|
// Ignore trailing slashes in folder names when sorting
|
||||||
|
a.value
|
||||||
|
.trim_end_matches(SEP)
|
||||||
|
.cmp(b.value.trim_end_matches(SEP))
|
||||||
|
});
|
||||||
|
}
|
||||||
|
SortBy::LevenshteinDistance => {
|
||||||
|
sorted_items.sort_by(|a, b| {
|
||||||
|
let a_distance = levenshtein_distance(&prefix_str, &a.value);
|
||||||
|
let b_distance = levenshtein_distance(&prefix_str, &b.value);
|
||||||
|
a_distance.cmp(&b_distance)
|
||||||
|
});
|
||||||
|
}
|
||||||
|
_ => (),
|
||||||
|
}
|
||||||
|
|
||||||
// Separate the results between hidden and non hidden
|
// Separate the results between hidden and non hidden
|
||||||
let mut hidden: Vec<SemanticSuggestion> = vec![];
|
let mut hidden: Vec<Suggestion> = vec![];
|
||||||
let mut non_hidden: Vec<SemanticSuggestion> = vec![];
|
let mut non_hidden: Vec<Suggestion> = vec![];
|
||||||
|
|
||||||
for item in items.into_iter() {
|
for item in sorted_items.into_iter() {
|
||||||
let item_path = Path::new(&item.suggestion.value);
|
let item_path = Path::new(&item.value);
|
||||||
|
|
||||||
if let Some(value) = item_path.file_name() {
|
if let Some(value) = item_path.file_name() {
|
||||||
if let Some(value) = value.to_str() {
|
if let Some(value) = value.to_str() {
|
||||||
@ -93,10 +115,19 @@ impl Completer for FileCompletion {
|
|||||||
pub fn file_path_completion(
|
pub fn file_path_completion(
|
||||||
span: nu_protocol::Span,
|
span: nu_protocol::Span,
|
||||||
partial: &str,
|
partial: &str,
|
||||||
cwds: &[impl AsRef<str>],
|
cwd: &str,
|
||||||
options: &CompletionOptions,
|
options: &CompletionOptions,
|
||||||
engine_state: &EngineState,
|
) -> Vec<(nu_protocol::Span, String)> {
|
||||||
stack: &Stack,
|
complete_item(false, span, partial, cwd, options)
|
||||||
) -> Vec<FileSuggestion> {
|
}
|
||||||
complete_item(false, span, partial, cwds, options, engine_state, stack)
|
|
||||||
|
pub fn matches(partial: &str, from: &str, options: &CompletionOptions) -> bool {
|
||||||
|
// Check for case sensitive
|
||||||
|
if !options.case_sensitive {
|
||||||
|
return options
|
||||||
|
.match_algorithm
|
||||||
|
.matches_str(&from.to_folded_case(), &partial.to_folded_case());
|
||||||
|
}
|
||||||
|
|
||||||
|
options.match_algorithm.matches_str(from, partial)
|
||||||
}
|
}
|
||||||
|
@ -1,12 +1,11 @@
|
|||||||
use crate::completions::{completion_options::NuMatcher, Completer, CompletionOptions};
|
use crate::completions::{Completer, CompletionOptions};
|
||||||
use nu_protocol::{
|
use nu_protocol::{
|
||||||
ast::{Expr, Expression},
|
ast::{Expr, Expression},
|
||||||
engine::{Stack, StateWorkingSet},
|
engine::StateWorkingSet,
|
||||||
Span,
|
Span,
|
||||||
};
|
};
|
||||||
use reedline::Suggestion;
|
|
||||||
|
|
||||||
use super::SemanticSuggestion;
|
use reedline::Suggestion;
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct FlagCompletion {
|
pub struct FlagCompletion {
|
||||||
@ -23,19 +22,18 @@ impl Completer for FlagCompletion {
|
|||||||
fn fetch(
|
fn fetch(
|
||||||
&mut self,
|
&mut self,
|
||||||
working_set: &StateWorkingSet,
|
working_set: &StateWorkingSet,
|
||||||
_stack: &Stack,
|
prefix: Vec<u8>,
|
||||||
prefix: &[u8],
|
|
||||||
span: Span,
|
span: Span,
|
||||||
offset: usize,
|
offset: usize,
|
||||||
_pos: usize,
|
_: usize,
|
||||||
options: &CompletionOptions,
|
options: &CompletionOptions,
|
||||||
) -> Vec<SemanticSuggestion> {
|
) -> Vec<Suggestion> {
|
||||||
// Check if it's a flag
|
// Check if it's a flag
|
||||||
if let Expr::Call(call) = &self.expression.expr {
|
if let Expr::Call(call) = &self.expression.expr {
|
||||||
let decl = working_set.get_decl(call.decl_id);
|
let decl = working_set.get_decl(call.decl_id);
|
||||||
let sig = decl.signature();
|
let sig = decl.signature();
|
||||||
|
|
||||||
let mut matcher = NuMatcher::new(String::from_utf8_lossy(prefix), options.clone());
|
let mut output = vec![];
|
||||||
|
|
||||||
for named in &sig.named {
|
for named in &sig.named {
|
||||||
let flag_desc = &named.desc;
|
let flag_desc = &named.desc;
|
||||||
@ -44,20 +42,18 @@ impl Completer for FlagCompletion {
|
|||||||
short.encode_utf8(&mut named);
|
short.encode_utf8(&mut named);
|
||||||
named.insert(0, b'-');
|
named.insert(0, b'-');
|
||||||
|
|
||||||
matcher.add_semantic_suggestion(SemanticSuggestion {
|
if options.match_algorithm.matches_u8(&named, &prefix) {
|
||||||
suggestion: Suggestion {
|
output.push(Suggestion {
|
||||||
value: String::from_utf8_lossy(&named).to_string(),
|
value: String::from_utf8_lossy(&named).to_string(),
|
||||||
description: Some(flag_desc.to_string()),
|
description: Some(flag_desc.to_string()),
|
||||||
|
extra: None,
|
||||||
span: reedline::Span {
|
span: reedline::Span {
|
||||||
start: span.start - offset,
|
start: span.start - offset,
|
||||||
end: span.end - offset,
|
end: span.end - offset,
|
||||||
},
|
},
|
||||||
append_whitespace: true,
|
append_whitespace: true,
|
||||||
..Suggestion::default()
|
});
|
||||||
},
|
}
|
||||||
// TODO????
|
|
||||||
kind: None,
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if named.long.is_empty() {
|
if named.long.is_empty() {
|
||||||
@ -68,23 +64,21 @@ impl Completer for FlagCompletion {
|
|||||||
named.insert(0, b'-');
|
named.insert(0, b'-');
|
||||||
named.insert(0, b'-');
|
named.insert(0, b'-');
|
||||||
|
|
||||||
matcher.add_semantic_suggestion(SemanticSuggestion {
|
if options.match_algorithm.matches_u8(&named, &prefix) {
|
||||||
suggestion: Suggestion {
|
output.push(Suggestion {
|
||||||
value: String::from_utf8_lossy(&named).to_string(),
|
value: String::from_utf8_lossy(&named).to_string(),
|
||||||
description: Some(flag_desc.to_string()),
|
description: Some(flag_desc.to_string()),
|
||||||
|
extra: None,
|
||||||
span: reedline::Span {
|
span: reedline::Span {
|
||||||
start: span.start - offset,
|
start: span.start - offset,
|
||||||
end: span.end - offset,
|
end: span.end - offset,
|
||||||
},
|
},
|
||||||
append_whitespace: true,
|
append_whitespace: true,
|
||||||
..Suggestion::default()
|
});
|
||||||
},
|
}
|
||||||
// TODO????
|
|
||||||
kind: None,
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return matcher.results();
|
return output;
|
||||||
}
|
}
|
||||||
|
|
||||||
vec![]
|
vec![]
|
||||||
|
@ -8,17 +8,15 @@ mod directory_completions;
|
|||||||
mod dotnu_completions;
|
mod dotnu_completions;
|
||||||
mod file_completions;
|
mod file_completions;
|
||||||
mod flag_completions;
|
mod flag_completions;
|
||||||
mod operator_completions;
|
|
||||||
mod variable_completions;
|
mod variable_completions;
|
||||||
|
|
||||||
pub use base::{Completer, SemanticSuggestion, SuggestionKind};
|
pub use base::Completer;
|
||||||
pub use command_completions::CommandCompletion;
|
pub use command_completions::CommandCompletion;
|
||||||
pub use completer::NuCompleter;
|
pub use completer::NuCompleter;
|
||||||
pub use completion_options::{CompletionOptions, MatchAlgorithm};
|
pub use completion_options::{CompletionOptions, MatchAlgorithm, SortBy};
|
||||||
pub use custom_completions::CustomCompletion;
|
pub use custom_completions::CustomCompletion;
|
||||||
pub use directory_completions::DirectoryCompletion;
|
pub use directory_completions::DirectoryCompletion;
|
||||||
pub use dotnu_completions::DotNuCompletion;
|
pub use dotnu_completions::DotNuCompletion;
|
||||||
pub use file_completions::{file_path_completion, FileCompletion};
|
pub use file_completions::{file_path_completion, matches, FileCompletion};
|
||||||
pub use flag_completions::FlagCompletion;
|
pub use flag_completions::FlagCompletion;
|
||||||
pub use operator_completions::OperatorCompletion;
|
|
||||||
pub use variable_completions::VariableCompletion;
|
pub use variable_completions::VariableCompletion;
|
||||||
|
@ -1,166 +0,0 @@
|
|||||||
use crate::completions::{
|
|
||||||
completion_options::NuMatcher, Completer, CompletionOptions, SemanticSuggestion, SuggestionKind,
|
|
||||||
};
|
|
||||||
use nu_protocol::{
|
|
||||||
ast::{Expr, Expression},
|
|
||||||
engine::{Stack, StateWorkingSet},
|
|
||||||
Span, Type,
|
|
||||||
};
|
|
||||||
use reedline::Suggestion;
|
|
||||||
|
|
||||||
#[derive(Clone)]
|
|
||||||
pub struct OperatorCompletion {
|
|
||||||
previous_expr: Expression,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl OperatorCompletion {
|
|
||||||
pub fn new(previous_expr: Expression) -> Self {
|
|
||||||
OperatorCompletion { previous_expr }
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Completer for OperatorCompletion {
|
|
||||||
fn fetch(
|
|
||||||
&mut self,
|
|
||||||
working_set: &StateWorkingSet,
|
|
||||||
_stack: &Stack,
|
|
||||||
_prefix: &[u8],
|
|
||||||
span: Span,
|
|
||||||
offset: usize,
|
|
||||||
_pos: usize,
|
|
||||||
options: &CompletionOptions,
|
|
||||||
) -> Vec<SemanticSuggestion> {
|
|
||||||
//Check if int, float, or string
|
|
||||||
let partial = std::str::from_utf8(working_set.get_span_contents(span)).unwrap_or("");
|
|
||||||
let op = match &self.previous_expr.expr {
|
|
||||||
Expr::BinaryOp(x, _, _) => &x.expr,
|
|
||||||
_ => {
|
|
||||||
return vec![];
|
|
||||||
}
|
|
||||||
};
|
|
||||||
let possible_operations = match op {
|
|
||||||
Expr::Int(_) => vec![
|
|
||||||
("+", "Add (Plus)"),
|
|
||||||
("-", "Subtract (Minus)"),
|
|
||||||
("*", "Multiply"),
|
|
||||||
("/", "Divide"),
|
|
||||||
("==", "Equal to"),
|
|
||||||
("!=", "Not equal to"),
|
|
||||||
("//", "Floor division"),
|
|
||||||
("<", "Less than"),
|
|
||||||
(">", "Greater than"),
|
|
||||||
("<=", "Less than or equal to"),
|
|
||||||
(">=", "Greater than or equal to"),
|
|
||||||
("mod", "Floor division remainder (Modulo)"),
|
|
||||||
("**", "Power of"),
|
|
||||||
("bit-or", "Bitwise OR"),
|
|
||||||
("bit-xor", "Bitwise exclusive OR"),
|
|
||||||
("bit-and", "Bitwise AND"),
|
|
||||||
("bit-shl", "Bitwise shift left"),
|
|
||||||
("bit-shr", "Bitwise shift right"),
|
|
||||||
("in", "Is a member of (doesn't use regex)"),
|
|
||||||
("not-in", "Is not a member of (doesn't use regex)"),
|
|
||||||
],
|
|
||||||
Expr::String(_) => vec![
|
|
||||||
("=~", "Contains regex match"),
|
|
||||||
("like", "Contains regex match"),
|
|
||||||
("!~", "Does not contain regex match"),
|
|
||||||
("not-like", "Does not contain regex match"),
|
|
||||||
(
|
|
||||||
"++",
|
|
||||||
"Concatenates two lists, two strings, or two binary values",
|
|
||||||
),
|
|
||||||
("in", "Is a member of (doesn't use regex)"),
|
|
||||||
("not-in", "Is not a member of (doesn't use regex)"),
|
|
||||||
("starts-with", "Starts with"),
|
|
||||||
("ends-with", "Ends with"),
|
|
||||||
],
|
|
||||||
Expr::Float(_) => vec![
|
|
||||||
("+", "Add (Plus)"),
|
|
||||||
("-", "Subtract (Minus)"),
|
|
||||||
("*", "Multiply"),
|
|
||||||
("/", "Divide"),
|
|
||||||
("==", "Equal to"),
|
|
||||||
("!=", "Not equal to"),
|
|
||||||
("//", "Floor division"),
|
|
||||||
("<", "Less than"),
|
|
||||||
(">", "Greater than"),
|
|
||||||
("<=", "Less than or equal to"),
|
|
||||||
(">=", "Greater than or equal to"),
|
|
||||||
("mod", "Floor division remainder (Modulo)"),
|
|
||||||
("**", "Power of"),
|
|
||||||
("in", "Is a member of (doesn't use regex)"),
|
|
||||||
("not-in", "Is not a member of (doesn't use regex)"),
|
|
||||||
],
|
|
||||||
Expr::Bool(_) => vec![
|
|
||||||
(
|
|
||||||
"and",
|
|
||||||
"Both values are true (short-circuits when first value is false)",
|
|
||||||
),
|
|
||||||
(
|
|
||||||
"or",
|
|
||||||
"Either value is true (short-circuits when first value is true)",
|
|
||||||
),
|
|
||||||
("xor", "One value is true and the other is false"),
|
|
||||||
("not", "Negates a value or expression"),
|
|
||||||
("in", "Is a member of (doesn't use regex)"),
|
|
||||||
("not-in", "Is not a member of (doesn't use regex)"),
|
|
||||||
],
|
|
||||||
Expr::FullCellPath(path) => match path.head.expr {
|
|
||||||
Expr::List(_) => vec![(
|
|
||||||
"++",
|
|
||||||
"Concatenates two lists, two strings, or two binary values",
|
|
||||||
)],
|
|
||||||
Expr::Var(id) => get_variable_completions(id, working_set),
|
|
||||||
_ => vec![],
|
|
||||||
},
|
|
||||||
_ => vec![],
|
|
||||||
};
|
|
||||||
|
|
||||||
let mut matcher = NuMatcher::new(partial, options.clone());
|
|
||||||
for (symbol, desc) in possible_operations.into_iter() {
|
|
||||||
matcher.add_semantic_suggestion(SemanticSuggestion {
|
|
||||||
suggestion: Suggestion {
|
|
||||||
value: symbol.to_string(),
|
|
||||||
description: Some(desc.to_string()),
|
|
||||||
span: reedline::Span::new(span.start - offset, span.end - offset),
|
|
||||||
append_whitespace: true,
|
|
||||||
..Suggestion::default()
|
|
||||||
},
|
|
||||||
kind: Some(SuggestionKind::Command(
|
|
||||||
nu_protocol::engine::CommandType::Builtin,
|
|
||||||
)),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
matcher.results()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn get_variable_completions<'a>(
|
|
||||||
id: nu_protocol::Id<nu_protocol::marker::Var>,
|
|
||||||
working_set: &StateWorkingSet,
|
|
||||||
) -> Vec<(&'a str, &'a str)> {
|
|
||||||
let var = working_set.get_variable(id);
|
|
||||||
if !var.mutable {
|
|
||||||
return vec![];
|
|
||||||
}
|
|
||||||
|
|
||||||
match var.ty {
|
|
||||||
Type::List(_) | Type::String | Type::Binary => vec![
|
|
||||||
(
|
|
||||||
"++=",
|
|
||||||
"Concatenates two lists, two strings, or two binary values",
|
|
||||||
),
|
|
||||||
("=", "Assigns a value to a variable."),
|
|
||||||
],
|
|
||||||
|
|
||||||
Type::Int | Type::Float => vec![
|
|
||||||
("=", "Assigns a value to a variable."),
|
|
||||||
("+=", "Adds a value to a variable."),
|
|
||||||
("-=", "Subtracts a value from a variable."),
|
|
||||||
("*=", "Multiplies a variable by a value"),
|
|
||||||
("/=", "Divides a variable by a value."),
|
|
||||||
],
|
|
||||||
_ => vec![],
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,22 +1,34 @@
|
|||||||
use crate::completions::{Completer, CompletionOptions, SemanticSuggestion, SuggestionKind};
|
use crate::completions::{Completer, CompletionOptions};
|
||||||
use nu_engine::{column::get_columns, eval_variable};
|
use nu_engine::{column::get_columns, eval_variable};
|
||||||
use nu_protocol::{
|
use nu_protocol::{
|
||||||
engine::{Stack, StateWorkingSet},
|
engine::{EngineState, Stack, StateWorkingSet},
|
||||||
Span, Value,
|
Span, Value,
|
||||||
};
|
};
|
||||||
|
|
||||||
use reedline::Suggestion;
|
use reedline::Suggestion;
|
||||||
use std::str;
|
use std::str;
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
use super::completion_options::NuMatcher;
|
use super::MatchAlgorithm;
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct VariableCompletion {
|
pub struct VariableCompletion {
|
||||||
|
engine_state: Arc<EngineState>, // TODO: Is engine state necessary? It's already a part of working set in fetch()
|
||||||
|
stack: Stack,
|
||||||
var_context: (Vec<u8>, Vec<Vec<u8>>), // tuple with $var and the sublevels (.b.c.d)
|
var_context: (Vec<u8>, Vec<Vec<u8>>), // tuple with $var and the sublevels (.b.c.d)
|
||||||
}
|
}
|
||||||
|
|
||||||
impl VariableCompletion {
|
impl VariableCompletion {
|
||||||
pub fn new(var_context: (Vec<u8>, Vec<Vec<u8>>)) -> Self {
|
pub fn new(
|
||||||
Self { var_context }
|
engine_state: Arc<EngineState>,
|
||||||
|
stack: Stack,
|
||||||
|
var_context: (Vec<u8>, Vec<Vec<u8>>),
|
||||||
|
) -> Self {
|
||||||
|
Self {
|
||||||
|
engine_state,
|
||||||
|
stack,
|
||||||
|
var_context,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -24,13 +36,13 @@ impl Completer for VariableCompletion {
|
|||||||
fn fetch(
|
fn fetch(
|
||||||
&mut self,
|
&mut self,
|
||||||
working_set: &StateWorkingSet,
|
working_set: &StateWorkingSet,
|
||||||
stack: &Stack,
|
prefix: Vec<u8>,
|
||||||
prefix: &[u8],
|
|
||||||
span: Span,
|
span: Span,
|
||||||
offset: usize,
|
offset: usize,
|
||||||
_pos: usize,
|
_: usize,
|
||||||
options: &CompletionOptions,
|
options: &CompletionOptions,
|
||||||
) -> Vec<SemanticSuggestion> {
|
) -> Vec<Suggestion> {
|
||||||
|
let mut output = vec![];
|
||||||
let builtins = ["$nu", "$in", "$env"];
|
let builtins = ["$nu", "$in", "$env"];
|
||||||
let var_str = std::str::from_utf8(&self.var_context.0).unwrap_or("");
|
let var_str = std::str::from_utf8(&self.var_context.0).unwrap_or("");
|
||||||
let var_id = working_set.find_variable(&self.var_context.0);
|
let var_id = working_set.find_variable(&self.var_context.0);
|
||||||
@ -39,14 +51,12 @@ impl Completer for VariableCompletion {
|
|||||||
end: span.end - offset,
|
end: span.end - offset,
|
||||||
};
|
};
|
||||||
let sublevels_count = self.var_context.1.len();
|
let sublevels_count = self.var_context.1.len();
|
||||||
let prefix_str = String::from_utf8_lossy(prefix);
|
|
||||||
let mut matcher = NuMatcher::new(prefix_str, options.clone());
|
|
||||||
|
|
||||||
// Completions for the given variable
|
// Completions for the given variable
|
||||||
if !var_str.is_empty() {
|
if !var_str.is_empty() {
|
||||||
// Completion for $env.<tab>
|
// Completion for $env.<tab>
|
||||||
if var_str == "$env" {
|
if var_str == "$env" {
|
||||||
let env_vars = stack.get_env_vars(working_set.permanent_state);
|
let env_vars = self.stack.get_env_vars(&self.engine_state);
|
||||||
|
|
||||||
// Return nested values
|
// Return nested values
|
||||||
if sublevels_count > 0 {
|
if sublevels_count > 0 {
|
||||||
@ -60,26 +70,39 @@ impl Completer for VariableCompletion {
|
|||||||
self.var_context.1.clone().into_iter().skip(1).collect();
|
self.var_context.1.clone().into_iter().skip(1).collect();
|
||||||
|
|
||||||
if let Some(val) = env_vars.get(&target_var_str) {
|
if let Some(val) = env_vars.get(&target_var_str) {
|
||||||
for suggestion in nested_suggestions(val, &nested_levels, current_span) {
|
for suggestion in
|
||||||
matcher.add_semantic_suggestion(suggestion);
|
nested_suggestions(val.clone(), nested_levels, current_span)
|
||||||
|
{
|
||||||
|
if options.match_algorithm.matches_u8_insensitive(
|
||||||
|
options.case_sensitive,
|
||||||
|
suggestion.value.as_bytes(),
|
||||||
|
&prefix,
|
||||||
|
) {
|
||||||
|
output.push(suggestion);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return matcher.results();
|
return output;
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
// No nesting provided, return all env vars
|
// No nesting provided, return all env vars
|
||||||
for env_var in env_vars {
|
for env_var in env_vars {
|
||||||
matcher.add_semantic_suggestion(SemanticSuggestion {
|
if options.match_algorithm.matches_u8_insensitive(
|
||||||
suggestion: Suggestion {
|
options.case_sensitive,
|
||||||
|
env_var.0.as_bytes(),
|
||||||
|
&prefix,
|
||||||
|
) {
|
||||||
|
output.push(Suggestion {
|
||||||
value: env_var.0,
|
value: env_var.0,
|
||||||
|
description: None,
|
||||||
|
extra: None,
|
||||||
span: current_span,
|
span: current_span,
|
||||||
..Suggestion::default()
|
append_whitespace: false,
|
||||||
},
|
});
|
||||||
kind: Some(SuggestionKind::Type(env_var.1.get_type())),
|
}
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return matcher.results();
|
return output;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -87,48 +110,66 @@ impl Completer for VariableCompletion {
|
|||||||
if var_str == "$nu" {
|
if var_str == "$nu" {
|
||||||
// Eval nu var
|
// Eval nu var
|
||||||
if let Ok(nuval) = eval_variable(
|
if let Ok(nuval) = eval_variable(
|
||||||
working_set.permanent_state,
|
&self.engine_state,
|
||||||
stack,
|
&self.stack,
|
||||||
nu_protocol::NU_VARIABLE_ID,
|
nu_protocol::NU_VARIABLE_ID,
|
||||||
nu_protocol::Span::new(current_span.start, current_span.end),
|
nu_protocol::Span::new(current_span.start, current_span.end),
|
||||||
) {
|
) {
|
||||||
for suggestion in nested_suggestions(&nuval, &self.var_context.1, current_span)
|
for suggestion in
|
||||||
|
nested_suggestions(nuval, self.var_context.1.clone(), current_span)
|
||||||
{
|
{
|
||||||
matcher.add_semantic_suggestion(suggestion);
|
if options.match_algorithm.matches_u8_insensitive(
|
||||||
|
options.case_sensitive,
|
||||||
|
suggestion.value.as_bytes(),
|
||||||
|
&prefix,
|
||||||
|
) {
|
||||||
|
output.push(suggestion);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return matcher.results();
|
return output;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Completion other variable types
|
// Completion other variable types
|
||||||
if let Some(var_id) = var_id {
|
if let Some(var_id) = var_id {
|
||||||
// Extract the variable value from the stack
|
// Extract the variable value from the stack
|
||||||
let var = stack.get_var(var_id, Span::new(span.start, span.end));
|
let var = self.stack.get_var(var_id, Span::new(span.start, span.end));
|
||||||
|
|
||||||
// If the value exists and it's of type Record
|
// If the value exists and it's of type Record
|
||||||
if let Ok(value) = var {
|
if let Ok(value) = var {
|
||||||
for suggestion in nested_suggestions(&value, &self.var_context.1, current_span)
|
for suggestion in
|
||||||
|
nested_suggestions(value, self.var_context.1.clone(), current_span)
|
||||||
{
|
{
|
||||||
matcher.add_semantic_suggestion(suggestion);
|
if options.match_algorithm.matches_u8_insensitive(
|
||||||
|
options.case_sensitive,
|
||||||
|
suggestion.value.as_bytes(),
|
||||||
|
&prefix,
|
||||||
|
) {
|
||||||
|
output.push(suggestion);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return matcher.results();
|
return output;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Variable completion (e.g: $en<tab> to complete $env)
|
// Variable completion (e.g: $en<tab> to complete $env)
|
||||||
for builtin in builtins {
|
for builtin in builtins {
|
||||||
matcher.add_semantic_suggestion(SemanticSuggestion {
|
if options.match_algorithm.matches_u8_insensitive(
|
||||||
suggestion: Suggestion {
|
options.case_sensitive,
|
||||||
|
builtin.as_bytes(),
|
||||||
|
&prefix,
|
||||||
|
) {
|
||||||
|
output.push(Suggestion {
|
||||||
value: builtin.to_string(),
|
value: builtin.to_string(),
|
||||||
|
description: None,
|
||||||
|
extra: None,
|
||||||
span: current_span,
|
span: current_span,
|
||||||
..Suggestion::default()
|
append_whitespace: false,
|
||||||
},
|
});
|
||||||
// TODO is there a way to get the VarId to get the type???
|
}
|
||||||
kind: None,
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO: The following can be refactored (see find_commands_by_predicate() used in
|
// TODO: The following can be refactored (see find_commands_by_predicate() used in
|
||||||
@ -138,67 +179,83 @@ impl Completer for VariableCompletion {
|
|||||||
for scope_frame in working_set.delta.scope.iter().rev() {
|
for scope_frame in working_set.delta.scope.iter().rev() {
|
||||||
for overlay_frame in scope_frame.active_overlays(&mut removed_overlays).rev() {
|
for overlay_frame in scope_frame.active_overlays(&mut removed_overlays).rev() {
|
||||||
for v in &overlay_frame.vars {
|
for v in &overlay_frame.vars {
|
||||||
matcher.add_semantic_suggestion(SemanticSuggestion {
|
if options.match_algorithm.matches_u8_insensitive(
|
||||||
suggestion: Suggestion {
|
options.case_sensitive,
|
||||||
|
v.0,
|
||||||
|
&prefix,
|
||||||
|
) {
|
||||||
|
output.push(Suggestion {
|
||||||
value: String::from_utf8_lossy(v.0).to_string(),
|
value: String::from_utf8_lossy(v.0).to_string(),
|
||||||
|
description: None,
|
||||||
|
extra: None,
|
||||||
span: current_span,
|
span: current_span,
|
||||||
..Suggestion::default()
|
append_whitespace: false,
|
||||||
},
|
});
|
||||||
kind: Some(SuggestionKind::Type(
|
}
|
||||||
working_set.get_variable(*v.1).ty.clone(),
|
|
||||||
)),
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Permanent state vars
|
// Permanent state vars
|
||||||
// for scope in &self.engine_state.scope {
|
// for scope in &self.engine_state.scope {
|
||||||
for overlay_frame in working_set
|
for overlay_frame in self.engine_state.active_overlays(&removed_overlays).rev() {
|
||||||
.permanent_state
|
|
||||||
.active_overlays(&removed_overlays)
|
|
||||||
.rev()
|
|
||||||
{
|
|
||||||
for v in &overlay_frame.vars {
|
for v in &overlay_frame.vars {
|
||||||
matcher.add_semantic_suggestion(SemanticSuggestion {
|
if options.match_algorithm.matches_u8_insensitive(
|
||||||
suggestion: Suggestion {
|
options.case_sensitive,
|
||||||
|
v.0,
|
||||||
|
&prefix,
|
||||||
|
) {
|
||||||
|
output.push(Suggestion {
|
||||||
value: String::from_utf8_lossy(v.0).to_string(),
|
value: String::from_utf8_lossy(v.0).to_string(),
|
||||||
|
description: None,
|
||||||
|
extra: None,
|
||||||
span: current_span,
|
span: current_span,
|
||||||
..Suggestion::default()
|
append_whitespace: false,
|
||||||
},
|
});
|
||||||
kind: Some(SuggestionKind::Type(
|
}
|
||||||
working_set.get_variable(*v.1).ty.clone(),
|
|
||||||
)),
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
matcher.results()
|
output.dedup(); // TODO: Removes only consecutive duplicates, is it intended?
|
||||||
|
|
||||||
|
output
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Find recursively the values for sublevels
|
// Find recursively the values for sublevels
|
||||||
// if no sublevels are set it returns the current value
|
// if no sublevels are set it returns the current value
|
||||||
fn nested_suggestions(
|
fn nested_suggestions(
|
||||||
val: &Value,
|
val: Value,
|
||||||
sublevels: &[Vec<u8>],
|
sublevels: Vec<Vec<u8>>,
|
||||||
current_span: reedline::Span,
|
current_span: reedline::Span,
|
||||||
) -> Vec<SemanticSuggestion> {
|
) -> Vec<Suggestion> {
|
||||||
let mut output: Vec<SemanticSuggestion> = vec![];
|
let mut output: Vec<Suggestion> = vec![];
|
||||||
let value = recursive_value(val, sublevels).unwrap_or_else(Value::nothing);
|
let value = recursive_value(val, sublevels);
|
||||||
|
|
||||||
let kind = SuggestionKind::Type(value.get_type());
|
|
||||||
match value {
|
match value {
|
||||||
Value::Record { val, .. } => {
|
Value::Record { val, .. } => {
|
||||||
// Add all the columns as completion
|
// Add all the columns as completion
|
||||||
for col in val.columns() {
|
for (col, _) in val.into_iter() {
|
||||||
output.push(SemanticSuggestion {
|
output.push(Suggestion {
|
||||||
suggestion: Suggestion {
|
value: col,
|
||||||
value: col.clone(),
|
description: None,
|
||||||
span: current_span,
|
extra: None,
|
||||||
..Suggestion::default()
|
span: current_span,
|
||||||
},
|
append_whitespace: false,
|
||||||
kind: Some(kind.clone()),
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
output
|
||||||
|
}
|
||||||
|
Value::LazyRecord { val, .. } => {
|
||||||
|
// Add all the columns as completion
|
||||||
|
for column_name in val.column_names() {
|
||||||
|
output.push(Suggestion {
|
||||||
|
value: column_name.to_string(),
|
||||||
|
description: None,
|
||||||
|
extra: None,
|
||||||
|
span: current_span,
|
||||||
|
append_whitespace: false,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -206,13 +263,12 @@ fn nested_suggestions(
|
|||||||
}
|
}
|
||||||
Value::List { vals, .. } => {
|
Value::List { vals, .. } => {
|
||||||
for column_name in get_columns(vals.as_slice()) {
|
for column_name in get_columns(vals.as_slice()) {
|
||||||
output.push(SemanticSuggestion {
|
output.push(Suggestion {
|
||||||
suggestion: Suggestion {
|
value: column_name,
|
||||||
value: column_name,
|
description: None,
|
||||||
span: current_span,
|
extra: None,
|
||||||
..Suggestion::default()
|
span: current_span,
|
||||||
},
|
append_whitespace: false,
|
||||||
kind: Some(kind.clone()),
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -223,34 +279,64 @@ fn nested_suggestions(
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Extracts the recursive value (e.g: $var.a.b.c)
|
// Extracts the recursive value (e.g: $var.a.b.c)
|
||||||
fn recursive_value(val: &Value, sublevels: &[Vec<u8>]) -> Result<Value, Span> {
|
fn recursive_value(val: Value, sublevels: Vec<Vec<u8>>) -> Value {
|
||||||
// Go to next sublevel
|
// Go to next sublevel
|
||||||
if let Some((sublevel, next_sublevels)) = sublevels.split_first() {
|
if let Some(next_sublevel) = sublevels.clone().into_iter().next() {
|
||||||
let span = val.span();
|
let span = val.span();
|
||||||
match val {
|
match val {
|
||||||
Value::Record { val, .. } => {
|
Value::Record { val, .. } => {
|
||||||
if let Some((_, value)) = val.iter().find(|(key, _)| key.as_bytes() == sublevel) {
|
for item in val {
|
||||||
// If matches try to fetch recursively the next
|
// Check if index matches with sublevel
|
||||||
recursive_value(value, next_sublevels)
|
if item.0.as_bytes().to_vec() == next_sublevel {
|
||||||
} else {
|
// If matches try to fetch recursively the next
|
||||||
// Current sublevel value not found
|
return recursive_value(item.1, sublevels.into_iter().skip(1).collect());
|
||||||
Err(span)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Value::List { vals, .. } => {
|
|
||||||
for col in get_columns(vals.as_slice()) {
|
|
||||||
if col.as_bytes() == *sublevel {
|
|
||||||
let val = val.get_data_by_key(&col).ok_or(span)?;
|
|
||||||
return recursive_value(&val, next_sublevels);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Current sublevel value not found
|
// Current sublevel value not found
|
||||||
Err(span)
|
return Value::nothing(span);
|
||||||
}
|
}
|
||||||
_ => Ok(val.clone()),
|
Value::LazyRecord { val, .. } => {
|
||||||
|
for col in val.column_names() {
|
||||||
|
if col.as_bytes().to_vec() == next_sublevel {
|
||||||
|
return recursive_value(
|
||||||
|
val.get_column_value(col).unwrap_or_default(),
|
||||||
|
sublevels.into_iter().skip(1).collect(),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Current sublevel value not found
|
||||||
|
return Value::nothing(span);
|
||||||
|
}
|
||||||
|
Value::List { vals, .. } => {
|
||||||
|
for col in get_columns(vals.as_slice()) {
|
||||||
|
if col.as_bytes().to_vec() == next_sublevel {
|
||||||
|
return recursive_value(
|
||||||
|
Value::list(vals, span)
|
||||||
|
.get_data_by_key(&col)
|
||||||
|
.unwrap_or_default(),
|
||||||
|
sublevels.into_iter().skip(1).collect(),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Current sublevel value not found
|
||||||
|
return Value::nothing(span);
|
||||||
|
}
|
||||||
|
_ => return val,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
val
|
||||||
|
}
|
||||||
|
|
||||||
|
impl MatchAlgorithm {
|
||||||
|
pub fn matches_u8_insensitive(&self, sensitive: bool, haystack: &[u8], needle: &[u8]) -> bool {
|
||||||
|
if sensitive {
|
||||||
|
self.matches_u8(haystack, needle)
|
||||||
|
} else {
|
||||||
|
self.matches_u8(&haystack.to_ascii_lowercase(), &needle.to_ascii_lowercase())
|
||||||
}
|
}
|
||||||
} else {
|
|
||||||
Ok(val.clone())
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,190 +1,81 @@
|
|||||||
use crate::util::eval_source;
|
use crate::util::eval_source;
|
||||||
#[cfg(feature = "plugin")]
|
#[cfg(feature = "plugin")]
|
||||||
use nu_path::canonicalize_with;
|
use nu_path::canonicalize_with;
|
||||||
|
use nu_protocol::engine::{EngineState, Stack, StateWorkingSet};
|
||||||
|
use nu_protocol::report_error;
|
||||||
|
use nu_protocol::{HistoryFileFormat, PipelineData};
|
||||||
#[cfg(feature = "plugin")]
|
#[cfg(feature = "plugin")]
|
||||||
use nu_protocol::{engine::StateWorkingSet, ParseError, PluginRegistryFile, Spanned};
|
use nu_protocol::{ParseError, Spanned};
|
||||||
use nu_protocol::{
|
|
||||||
engine::{EngineState, Stack},
|
|
||||||
report_shell_error, PipelineData,
|
|
||||||
};
|
|
||||||
#[cfg(feature = "plugin")]
|
#[cfg(feature = "plugin")]
|
||||||
use nu_utils::perf;
|
use nu_utils::utils::perf;
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
|
|
||||||
#[cfg(feature = "plugin")]
|
#[cfg(feature = "plugin")]
|
||||||
const PLUGIN_FILE: &str = "plugin.msgpackz";
|
const PLUGIN_FILE: &str = "plugin.nu";
|
||||||
#[cfg(feature = "plugin")]
|
|
||||||
const OLD_PLUGIN_FILE: &str = "plugin.nu";
|
const HISTORY_FILE_TXT: &str = "history.txt";
|
||||||
|
const HISTORY_FILE_SQLITE: &str = "history.sqlite3";
|
||||||
|
|
||||||
#[cfg(feature = "plugin")]
|
#[cfg(feature = "plugin")]
|
||||||
pub fn read_plugin_file(engine_state: &mut EngineState, plugin_file: Option<Spanned<String>>) {
|
pub fn read_plugin_file(
|
||||||
use nu_protocol::ShellError;
|
engine_state: &mut EngineState,
|
||||||
use std::path::Path;
|
stack: &mut Stack,
|
||||||
|
plugin_file: Option<Spanned<String>>,
|
||||||
|
storage_path: &str,
|
||||||
|
) {
|
||||||
|
let start_time = std::time::Instant::now();
|
||||||
|
let mut plug_path = String::new();
|
||||||
|
// Reading signatures from signature file
|
||||||
|
// The plugin.nu file stores the parsed signature collected from each registered plugin
|
||||||
|
add_plugin_file(engine_state, plugin_file, storage_path);
|
||||||
|
|
||||||
let span = plugin_file.as_ref().map(|s| s.span);
|
let plugin_path = engine_state.plugin_signatures.clone();
|
||||||
|
|
||||||
// Check and warn + abort if this is a .nu plugin file
|
|
||||||
if plugin_file
|
|
||||||
.as_ref()
|
|
||||||
.and_then(|p| Path::new(&p.item).extension())
|
|
||||||
.is_some_and(|ext| ext == "nu")
|
|
||||||
{
|
|
||||||
report_shell_error(
|
|
||||||
engine_state,
|
|
||||||
&ShellError::GenericError {
|
|
||||||
error: "Wrong plugin file format".into(),
|
|
||||||
msg: ".nu plugin files are no longer supported".into(),
|
|
||||||
span,
|
|
||||||
help: Some("please recreate this file in the new .msgpackz format".into()),
|
|
||||||
inner: vec![],
|
|
||||||
},
|
|
||||||
);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut start_time = std::time::Instant::now();
|
|
||||||
// Reading signatures from plugin registry file
|
|
||||||
// The plugin.msgpackz file stores the parsed signature collected from each registered plugin
|
|
||||||
add_plugin_file(engine_state, plugin_file.clone());
|
|
||||||
perf!(
|
|
||||||
"add plugin file to engine_state",
|
|
||||||
start_time,
|
|
||||||
engine_state.get_config().use_ansi_coloring
|
|
||||||
);
|
|
||||||
|
|
||||||
start_time = std::time::Instant::now();
|
|
||||||
let plugin_path = engine_state.plugin_path.clone();
|
|
||||||
if let Some(plugin_path) = plugin_path {
|
if let Some(plugin_path) = plugin_path {
|
||||||
// Open the plugin file
|
let plugin_filename = plugin_path.to_string_lossy();
|
||||||
let mut file = match std::fs::File::open(&plugin_path) {
|
plug_path = plugin_filename.to_string();
|
||||||
Ok(file) => file,
|
if let Ok(contents) = std::fs::read(&plugin_path) {
|
||||||
Err(err) => {
|
eval_source(
|
||||||
if err.kind() == std::io::ErrorKind::NotFound {
|
engine_state,
|
||||||
log::warn!("Plugin file not found: {}", plugin_path.display());
|
stack,
|
||||||
|
&contents,
|
||||||
// Try migration of an old plugin file if this wasn't a custom plugin file
|
&plugin_filename,
|
||||||
if plugin_file.is_none() && migrate_old_plugin_file(engine_state) {
|
PipelineData::empty(),
|
||||||
let Ok(file) = std::fs::File::open(&plugin_path) else {
|
false,
|
||||||
log::warn!("Failed to load newly migrated plugin file");
|
|
||||||
return;
|
|
||||||
};
|
|
||||||
file
|
|
||||||
} else {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
report_shell_error(
|
|
||||||
engine_state,
|
|
||||||
&ShellError::GenericError {
|
|
||||||
error: format!(
|
|
||||||
"Error while opening plugin registry file: {}",
|
|
||||||
plugin_path.display()
|
|
||||||
),
|
|
||||||
msg: "plugin path defined here".into(),
|
|
||||||
span,
|
|
||||||
help: None,
|
|
||||||
inner: vec![err.into()],
|
|
||||||
},
|
|
||||||
);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
// Abort if the file is empty.
|
|
||||||
if file.metadata().is_ok_and(|m| m.len() == 0) {
|
|
||||||
log::warn!(
|
|
||||||
"Not reading plugin file because it's empty: {}",
|
|
||||||
plugin_path.display()
|
|
||||||
);
|
);
|
||||||
return;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Read the contents of the plugin file
|
|
||||||
let contents = match PluginRegistryFile::read_from(&mut file, span) {
|
|
||||||
Ok(contents) => contents,
|
|
||||||
Err(err) => {
|
|
||||||
log::warn!("Failed to read plugin registry file: {err:?}");
|
|
||||||
report_shell_error(
|
|
||||||
engine_state,
|
|
||||||
&ShellError::GenericError {
|
|
||||||
error: format!(
|
|
||||||
"Error while reading plugin registry file: {}",
|
|
||||||
plugin_path.display()
|
|
||||||
),
|
|
||||||
msg: "plugin path defined here".into(),
|
|
||||||
span,
|
|
||||||
help: Some(
|
|
||||||
"you might try deleting the file and registering all of your \
|
|
||||||
plugins again"
|
|
||||||
.into(),
|
|
||||||
),
|
|
||||||
inner: vec![],
|
|
||||||
},
|
|
||||||
);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
perf!(
|
|
||||||
&format!("read plugin file {}", plugin_path.display()),
|
|
||||||
start_time,
|
|
||||||
engine_state.get_config().use_ansi_coloring
|
|
||||||
);
|
|
||||||
start_time = std::time::Instant::now();
|
|
||||||
|
|
||||||
let mut working_set = StateWorkingSet::new(engine_state);
|
|
||||||
|
|
||||||
nu_plugin_engine::load_plugin_file(&mut working_set, &contents, span);
|
|
||||||
|
|
||||||
if let Err(err) = engine_state.merge_delta(working_set.render()) {
|
|
||||||
report_shell_error(engine_state, &err);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
perf!(
|
|
||||||
&format!("load plugin file {}", plugin_path.display()),
|
|
||||||
start_time,
|
|
||||||
engine_state.get_config().use_ansi_coloring
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
perf(
|
||||||
|
&format!("read_plugin_file {}", &plug_path),
|
||||||
|
start_time,
|
||||||
|
file!(),
|
||||||
|
line!(),
|
||||||
|
column!(),
|
||||||
|
engine_state.get_config().use_ansi_coloring,
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(feature = "plugin")]
|
#[cfg(feature = "plugin")]
|
||||||
pub fn add_plugin_file(engine_state: &mut EngineState, plugin_file: Option<Spanned<String>>) {
|
pub fn add_plugin_file(
|
||||||
use std::path::Path;
|
engine_state: &mut EngineState,
|
||||||
|
plugin_file: Option<Spanned<String>>,
|
||||||
|
storage_path: &str,
|
||||||
|
) {
|
||||||
|
if let Some(plugin_file) = plugin_file {
|
||||||
|
let working_set = StateWorkingSet::new(engine_state);
|
||||||
|
let cwd = working_set.get_cwd();
|
||||||
|
|
||||||
use nu_protocol::report_parse_error;
|
if let Ok(path) = canonicalize_with(&plugin_file.item, cwd) {
|
||||||
|
engine_state.plugin_signatures = Some(path)
|
||||||
if let Ok(cwd) = engine_state.cwd_as_string(None) {
|
} else {
|
||||||
if let Some(plugin_file) = plugin_file {
|
let e = ParseError::FileNotFound(plugin_file.item, plugin_file.span);
|
||||||
let path = Path::new(&plugin_file.item);
|
report_error(&working_set, &e);
|
||||||
let path_dir = path.parent().unwrap_or(path);
|
|
||||||
// Just try to canonicalize the directory of the plugin file first.
|
|
||||||
if let Ok(path_dir) = canonicalize_with(path_dir, &cwd) {
|
|
||||||
// Try to canonicalize the actual filename, but it's ok if that fails. The file doesn't
|
|
||||||
// have to exist.
|
|
||||||
let path = path_dir.join(path.file_name().unwrap_or(path.as_os_str()));
|
|
||||||
let path = canonicalize_with(&path, &cwd).unwrap_or(path);
|
|
||||||
engine_state.plugin_path = Some(path)
|
|
||||||
} else {
|
|
||||||
// It's an error if the directory for the plugin file doesn't exist.
|
|
||||||
report_parse_error(
|
|
||||||
&StateWorkingSet::new(engine_state),
|
|
||||||
&ParseError::FileNotFound(
|
|
||||||
path_dir.to_string_lossy().into_owned(),
|
|
||||||
plugin_file.span,
|
|
||||||
),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
} else if let Some(plugin_path) = nu_path::nu_config_dir() {
|
|
||||||
// Path to store plugins signatures
|
|
||||||
let mut plugin_path =
|
|
||||||
canonicalize_with(&plugin_path, &cwd).unwrap_or(plugin_path.into());
|
|
||||||
plugin_path.push(PLUGIN_FILE);
|
|
||||||
let plugin_path = canonicalize_with(&plugin_path, &cwd).unwrap_or(plugin_path);
|
|
||||||
engine_state.plugin_path = Some(plugin_path);
|
|
||||||
}
|
}
|
||||||
|
} else if let Some(mut plugin_path) = nu_path::config_dir() {
|
||||||
|
// Path to store plugins signatures
|
||||||
|
plugin_path.push(storage_path);
|
||||||
|
plugin_path.push(PLUGIN_FILE);
|
||||||
|
engine_state.plugin_signatures = Some(plugin_path.clone());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -197,12 +88,7 @@ pub fn eval_config_contents(
|
|||||||
let config_filename = config_path.to_string_lossy();
|
let config_filename = config_path.to_string_lossy();
|
||||||
|
|
||||||
if let Ok(contents) = std::fs::read(&config_path) {
|
if let Ok(contents) = std::fs::read(&config_path) {
|
||||||
// Set the current active file to the config file.
|
eval_source(
|
||||||
let prev_file = engine_state.file.take();
|
|
||||||
engine_state.file = Some(config_path.clone());
|
|
||||||
|
|
||||||
// TODO: ignore this error?
|
|
||||||
let _ = eval_source(
|
|
||||||
engine_state,
|
engine_state,
|
||||||
stack,
|
stack,
|
||||||
&contents,
|
&contents,
|
||||||
@ -211,141 +97,30 @@ pub fn eval_config_contents(
|
|||||||
false,
|
false,
|
||||||
);
|
);
|
||||||
|
|
||||||
// Restore the current active file.
|
|
||||||
engine_state.file = prev_file;
|
|
||||||
|
|
||||||
// Merge the environment in case env vars changed in the config
|
// Merge the environment in case env vars changed in the config
|
||||||
if let Err(e) = engine_state.merge_env(stack) {
|
match nu_engine::env::current_dir(engine_state, stack) {
|
||||||
report_shell_error(engine_state, &e);
|
Ok(cwd) => {
|
||||||
|
if let Err(e) = engine_state.merge_env(stack, cwd) {
|
||||||
|
let working_set = StateWorkingSet::new(engine_state);
|
||||||
|
report_error(&working_set, &e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
let working_set = StateWorkingSet::new(engine_state);
|
||||||
|
report_error(&working_set, &e);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(feature = "plugin")]
|
pub(crate) fn get_history_path(storage_path: &str, mode: HistoryFileFormat) -> Option<PathBuf> {
|
||||||
pub fn migrate_old_plugin_file(engine_state: &EngineState) -> bool {
|
nu_path::config_dir().map(|mut history_path| {
|
||||||
use nu_protocol::{
|
history_path.push(storage_path);
|
||||||
PluginExample, PluginIdentity, PluginRegistryItem, PluginRegistryItemData, PluginSignature,
|
history_path.push(match mode {
|
||||||
ShellError,
|
HistoryFileFormat::PlainText => HISTORY_FILE_TXT,
|
||||||
};
|
HistoryFileFormat::Sqlite => HISTORY_FILE_SQLITE,
|
||||||
use std::collections::BTreeMap;
|
|
||||||
|
|
||||||
let start_time = std::time::Instant::now();
|
|
||||||
|
|
||||||
let Ok(cwd) = engine_state.cwd_as_string(None) else {
|
|
||||||
return false;
|
|
||||||
};
|
|
||||||
|
|
||||||
let Some(config_dir) =
|
|
||||||
nu_path::nu_config_dir().and_then(|dir| nu_path::canonicalize_with(dir, &cwd).ok())
|
|
||||||
else {
|
|
||||||
return false;
|
|
||||||
};
|
|
||||||
|
|
||||||
let Ok(old_plugin_file_path) = nu_path::canonicalize_with(OLD_PLUGIN_FILE, &config_dir) else {
|
|
||||||
return false;
|
|
||||||
};
|
|
||||||
|
|
||||||
let old_contents = match std::fs::read(&old_plugin_file_path) {
|
|
||||||
Ok(old_contents) => old_contents,
|
|
||||||
Err(err) => {
|
|
||||||
report_shell_error(
|
|
||||||
engine_state,
|
|
||||||
&ShellError::GenericError {
|
|
||||||
error: "Can't read old plugin file to migrate".into(),
|
|
||||||
msg: "".into(),
|
|
||||||
span: None,
|
|
||||||
help: Some(err.to_string()),
|
|
||||||
inner: vec![],
|
|
||||||
},
|
|
||||||
);
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
// Make a copy of the engine state, because we'll read the newly generated file
|
|
||||||
let mut engine_state = engine_state.clone();
|
|
||||||
let mut stack = Stack::new();
|
|
||||||
|
|
||||||
if eval_source(
|
|
||||||
&mut engine_state,
|
|
||||||
&mut stack,
|
|
||||||
&old_contents,
|
|
||||||
&old_plugin_file_path.to_string_lossy(),
|
|
||||||
PipelineData::Empty,
|
|
||||||
false,
|
|
||||||
) != 0
|
|
||||||
{
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Now that the plugin commands are loaded, we just have to generate the file
|
|
||||||
let mut contents = PluginRegistryFile::new();
|
|
||||||
|
|
||||||
let mut groups = BTreeMap::<PluginIdentity, Vec<PluginSignature>>::new();
|
|
||||||
|
|
||||||
for decl in engine_state.plugin_decls() {
|
|
||||||
if let Some(identity) = decl.plugin_identity() {
|
|
||||||
groups
|
|
||||||
.entry(identity.clone())
|
|
||||||
.or_default()
|
|
||||||
.push(PluginSignature {
|
|
||||||
sig: decl.signature(),
|
|
||||||
examples: decl
|
|
||||||
.examples()
|
|
||||||
.into_iter()
|
|
||||||
.map(PluginExample::from)
|
|
||||||
.collect(),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
for (identity, commands) in groups {
|
|
||||||
contents.upsert_plugin(PluginRegistryItem {
|
|
||||||
name: identity.name().to_owned(),
|
|
||||||
filename: identity.filename().to_owned(),
|
|
||||||
shell: identity.shell().map(|p| p.to_owned()),
|
|
||||||
data: PluginRegistryItemData::Valid {
|
|
||||||
metadata: Default::default(),
|
|
||||||
commands,
|
|
||||||
},
|
|
||||||
});
|
});
|
||||||
}
|
history_path
|
||||||
|
})
|
||||||
// Write the new file
|
|
||||||
let new_plugin_file_path = config_dir.join(PLUGIN_FILE);
|
|
||||||
if let Err(err) = std::fs::File::create(&new_plugin_file_path)
|
|
||||||
.map_err(|e| e.into())
|
|
||||||
.and_then(|file| contents.write_to(file, None))
|
|
||||||
{
|
|
||||||
report_shell_error(
|
|
||||||
&engine_state,
|
|
||||||
&ShellError::GenericError {
|
|
||||||
error: "Failed to save migrated plugin file".into(),
|
|
||||||
msg: "".into(),
|
|
||||||
span: None,
|
|
||||||
help: Some("ensure `$nu.plugin-path` is writable".into()),
|
|
||||||
inner: vec![err],
|
|
||||||
},
|
|
||||||
);
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
if engine_state.is_interactive {
|
|
||||||
eprintln!(
|
|
||||||
"Your old plugin.nu file has been migrated to the new format: {}",
|
|
||||||
new_plugin_file_path.display()
|
|
||||||
);
|
|
||||||
eprintln!(
|
|
||||||
"The plugin.nu file has not been removed. If `plugin list` looks okay, \
|
|
||||||
you may do so manually."
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
perf!(
|
|
||||||
"migrate old plugin file",
|
|
||||||
start_time,
|
|
||||||
engine_state.get_config().use_ansi_coloring
|
|
||||||
);
|
|
||||||
true
|
|
||||||
}
|
}
|
||||||
|
@ -1,22 +1,13 @@
|
|||||||
use log::info;
|
use log::info;
|
||||||
|
use miette::Result;
|
||||||
use nu_engine::{convert_env_values, eval_block};
|
use nu_engine::{convert_env_values, eval_block};
|
||||||
use nu_parser::parse;
|
use nu_parser::parse;
|
||||||
|
use nu_protocol::engine::Stack;
|
||||||
|
use nu_protocol::report_error;
|
||||||
use nu_protocol::{
|
use nu_protocol::{
|
||||||
cli_error::report_compile_error,
|
engine::{EngineState, StateWorkingSet},
|
||||||
debugger::WithoutDebug,
|
PipelineData, Spanned, Value,
|
||||||
engine::{EngineState, Stack, StateWorkingSet},
|
|
||||||
report_parse_error, report_parse_warning, PipelineData, ShellError, Spanned, Value,
|
|
||||||
};
|
};
|
||||||
use std::sync::Arc;
|
|
||||||
|
|
||||||
use crate::util::print_pipeline;
|
|
||||||
|
|
||||||
#[derive(Default)]
|
|
||||||
pub struct EvaluateCommandsOpts {
|
|
||||||
pub table_mode: Option<Value>,
|
|
||||||
pub error_style: Option<Value>,
|
|
||||||
pub no_newline: bool,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Run a command (or commands) given to us by the user
|
/// Run a command (or commands) given to us by the user
|
||||||
pub fn evaluate_commands(
|
pub fn evaluate_commands(
|
||||||
@ -24,56 +15,29 @@ pub fn evaluate_commands(
|
|||||||
engine_state: &mut EngineState,
|
engine_state: &mut EngineState,
|
||||||
stack: &mut Stack,
|
stack: &mut Stack,
|
||||||
input: PipelineData,
|
input: PipelineData,
|
||||||
opts: EvaluateCommandsOpts,
|
table_mode: Option<Value>,
|
||||||
) -> Result<(), ShellError> {
|
) -> Result<Option<i64>> {
|
||||||
let EvaluateCommandsOpts {
|
|
||||||
table_mode,
|
|
||||||
error_style,
|
|
||||||
no_newline,
|
|
||||||
} = opts;
|
|
||||||
|
|
||||||
// Handle the configured error style early
|
|
||||||
if let Some(e_style) = error_style {
|
|
||||||
match e_style.coerce_str()?.parse() {
|
|
||||||
Ok(e_style) => {
|
|
||||||
Arc::make_mut(&mut engine_state.config).error_style = e_style;
|
|
||||||
}
|
|
||||||
Err(err) => {
|
|
||||||
return Err(ShellError::GenericError {
|
|
||||||
error: "Invalid value for `--error-style`".into(),
|
|
||||||
msg: err.into(),
|
|
||||||
span: Some(e_style.span()),
|
|
||||||
help: None,
|
|
||||||
inner: vec![],
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Translate environment variables from Strings to Values
|
// Translate environment variables from Strings to Values
|
||||||
convert_env_values(engine_state, stack)?;
|
if let Some(e) = convert_env_values(engine_state, stack) {
|
||||||
|
let working_set = StateWorkingSet::new(engine_state);
|
||||||
|
report_error(&working_set, &e);
|
||||||
|
std::process::exit(1);
|
||||||
|
}
|
||||||
|
|
||||||
// Parse the source code
|
// Parse the source code
|
||||||
let (block, delta) = {
|
let (block, delta) = {
|
||||||
if let Some(ref t_mode) = table_mode {
|
if let Some(ref t_mode) = table_mode {
|
||||||
Arc::make_mut(&mut engine_state.config).table.mode =
|
let mut config = engine_state.get_config().clone();
|
||||||
t_mode.coerce_str()?.parse().unwrap_or_default();
|
config.table_mode = t_mode.as_string()?.parse().unwrap_or_default();
|
||||||
|
engine_state.set_config(config);
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut working_set = StateWorkingSet::new(engine_state);
|
let mut working_set = StateWorkingSet::new(engine_state);
|
||||||
|
|
||||||
let output = parse(&mut working_set, None, commands.item.as_bytes(), false);
|
let output = parse(&mut working_set, None, commands.item.as_bytes(), false);
|
||||||
if let Some(warning) = working_set.parse_warnings.first() {
|
|
||||||
report_parse_warning(&working_set, warning);
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(err) = working_set.parse_errors.first() {
|
if let Some(err) = working_set.parse_errors.first() {
|
||||||
report_parse_error(&working_set, err);
|
report_error(&working_set, err);
|
||||||
std::process::exit(1);
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(err) = working_set.compile_errors.first() {
|
|
||||||
report_compile_error(&working_set, err);
|
|
||||||
std::process::exit(1);
|
std::process::exit(1);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -81,23 +45,29 @@ pub fn evaluate_commands(
|
|||||||
};
|
};
|
||||||
|
|
||||||
// Update permanent state
|
// Update permanent state
|
||||||
engine_state.merge_delta(delta)?;
|
if let Err(err) = engine_state.merge_delta(delta) {
|
||||||
|
let working_set = StateWorkingSet::new(engine_state);
|
||||||
|
report_error(&working_set, &err);
|
||||||
|
}
|
||||||
|
|
||||||
// Run the block
|
// Run the block
|
||||||
let pipeline = eval_block::<WithoutDebug>(engine_state, stack, &block, input)?;
|
let exit_code = match eval_block(engine_state, stack, &block, input, false, false) {
|
||||||
|
Ok(pipeline_data) => {
|
||||||
|
let mut config = engine_state.get_config().clone();
|
||||||
|
if let Some(t_mode) = table_mode {
|
||||||
|
config.table_mode = t_mode.as_string()?.parse().unwrap_or_default();
|
||||||
|
}
|
||||||
|
crate::eval_file::print_table_or_error(engine_state, stack, pipeline_data, &mut config)
|
||||||
|
}
|
||||||
|
Err(err) => {
|
||||||
|
let working_set = StateWorkingSet::new(engine_state);
|
||||||
|
|
||||||
if let PipelineData::Value(Value::Error { error, .. }, ..) = pipeline {
|
report_error(&working_set, &err);
|
||||||
return Err(*error);
|
std::process::exit(1);
|
||||||
}
|
}
|
||||||
|
};
|
||||||
if let Some(t_mode) = table_mode {
|
|
||||||
Arc::make_mut(&mut engine_state.config).table.mode =
|
|
||||||
t_mode.coerce_str()?.parse().unwrap_or_default();
|
|
||||||
}
|
|
||||||
|
|
||||||
print_pipeline(engine_state, stack, pipeline, no_newline)?;
|
|
||||||
|
|
||||||
info!("evaluate {}:{}:{}", file!(), line!(), column!());
|
info!("evaluate {}:{}:{}", file!(), line!(), column!());
|
||||||
|
|
||||||
Ok(())
|
Ok(exit_code)
|
||||||
}
|
}
|
||||||
|
@ -1,60 +1,94 @@
|
|||||||
use crate::util::{eval_source, print_pipeline};
|
use crate::util::eval_source;
|
||||||
use log::{info, trace};
|
use log::info;
|
||||||
use nu_engine::{convert_env_values, eval_block};
|
use log::trace;
|
||||||
|
use miette::{IntoDiagnostic, Result};
|
||||||
|
use nu_engine::eval_block;
|
||||||
|
use nu_engine::{convert_env_values, current_dir};
|
||||||
use nu_parser::parse;
|
use nu_parser::parse;
|
||||||
use nu_path::canonicalize_with;
|
use nu_path::canonicalize_with;
|
||||||
|
use nu_protocol::report_error;
|
||||||
use nu_protocol::{
|
use nu_protocol::{
|
||||||
cli_error::report_compile_error,
|
ast::Call,
|
||||||
debugger::WithoutDebug,
|
|
||||||
engine::{EngineState, Stack, StateWorkingSet},
|
engine::{EngineState, Stack, StateWorkingSet},
|
||||||
report_parse_error, report_parse_warning, PipelineData, ShellError, Span, Value,
|
Config, PipelineData, ShellError, Span, Value,
|
||||||
};
|
};
|
||||||
use std::sync::Arc;
|
use nu_utils::stdout_write_all_and_flush;
|
||||||
|
|
||||||
/// Entry point for evaluating a file.
|
/// Main function used when a file path is found as argument for nu
|
||||||
///
|
|
||||||
/// If the file contains a main command, it is invoked with `args` and the pipeline data from `input`;
|
|
||||||
/// otherwise, the pipeline data is forwarded to the first command in the file, and `args` are ignored.
|
|
||||||
pub fn evaluate_file(
|
pub fn evaluate_file(
|
||||||
path: String,
|
path: String,
|
||||||
args: &[String],
|
args: &[String],
|
||||||
engine_state: &mut EngineState,
|
engine_state: &mut EngineState,
|
||||||
stack: &mut Stack,
|
stack: &mut Stack,
|
||||||
input: PipelineData,
|
input: PipelineData,
|
||||||
) -> Result<(), ShellError> {
|
) -> Result<()> {
|
||||||
// Convert environment variables from Strings to Values and store them in the engine state.
|
// Translate environment variables from Strings to Values
|
||||||
convert_env_values(engine_state, stack)?;
|
if let Some(e) = convert_env_values(engine_state, stack) {
|
||||||
|
let working_set = StateWorkingSet::new(engine_state);
|
||||||
|
report_error(&working_set, &e);
|
||||||
|
std::process::exit(1);
|
||||||
|
}
|
||||||
|
|
||||||
let cwd = engine_state.cwd_as_string(Some(stack))?;
|
let cwd = current_dir(engine_state, stack)?;
|
||||||
|
|
||||||
let file_path =
|
let file_path = canonicalize_with(&path, cwd).unwrap_or_else(|e| {
|
||||||
canonicalize_with(&path, cwd).map_err(|err| ShellError::FileNotFoundCustom {
|
let working_set = StateWorkingSet::new(engine_state);
|
||||||
msg: format!("Could not access file '{path}': {err}"),
|
report_error(
|
||||||
span: Span::unknown(),
|
&working_set,
|
||||||
})?;
|
&ShellError::FileNotFoundCustom(
|
||||||
|
format!("Could not access file '{}': {:?}", path, e.to_string()),
|
||||||
let file_path_str = file_path
|
Span::unknown(),
|
||||||
.to_str()
|
|
||||||
.ok_or_else(|| ShellError::NonUtf8Custom {
|
|
||||||
msg: format!(
|
|
||||||
"Input file name '{}' is not valid UTF8",
|
|
||||||
file_path.to_string_lossy()
|
|
||||||
),
|
),
|
||||||
span: Span::unknown(),
|
);
|
||||||
})?;
|
std::process::exit(1);
|
||||||
|
});
|
||||||
|
|
||||||
let file = std::fs::read(&file_path).map_err(|err| ShellError::FileNotFoundCustom {
|
let file_path_str = file_path.to_str().unwrap_or_else(|| {
|
||||||
msg: format!("Could not read file '{file_path_str}': {err}"),
|
let working_set = StateWorkingSet::new(engine_state);
|
||||||
span: Span::unknown(),
|
report_error(
|
||||||
})?;
|
&working_set,
|
||||||
engine_state.file = Some(file_path.clone());
|
&ShellError::NonUtf8Custom(
|
||||||
|
format!(
|
||||||
|
"Input file name '{}' is not valid UTF8",
|
||||||
|
file_path.to_string_lossy()
|
||||||
|
),
|
||||||
|
Span::unknown(),
|
||||||
|
),
|
||||||
|
);
|
||||||
|
std::process::exit(1);
|
||||||
|
});
|
||||||
|
|
||||||
let parent = file_path
|
let file = std::fs::read(&file_path)
|
||||||
.parent()
|
.into_diagnostic()
|
||||||
.ok_or_else(|| ShellError::FileNotFoundCustom {
|
.unwrap_or_else(|e| {
|
||||||
msg: format!("The file path '{file_path_str}' does not have a parent"),
|
let working_set = StateWorkingSet::new(engine_state);
|
||||||
span: Span::unknown(),
|
report_error(
|
||||||
})?;
|
&working_set,
|
||||||
|
&ShellError::FileNotFoundCustom(
|
||||||
|
format!(
|
||||||
|
"Could not read file '{}': {:?}",
|
||||||
|
file_path_str,
|
||||||
|
e.to_string()
|
||||||
|
),
|
||||||
|
Span::unknown(),
|
||||||
|
),
|
||||||
|
);
|
||||||
|
std::process::exit(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
engine_state.start_in_file(Some(file_path_str));
|
||||||
|
|
||||||
|
let parent = file_path.parent().unwrap_or_else(|| {
|
||||||
|
let working_set = StateWorkingSet::new(engine_state);
|
||||||
|
report_error(
|
||||||
|
&working_set,
|
||||||
|
&ShellError::FileNotFoundCustom(
|
||||||
|
format!("The file path '{file_path_str}' does not have a parent"),
|
||||||
|
Span::unknown(),
|
||||||
|
),
|
||||||
|
);
|
||||||
|
std::process::exit(1);
|
||||||
|
});
|
||||||
|
|
||||||
stack.add_env_var(
|
stack.add_env_var(
|
||||||
"FILE_PWD".to_string(),
|
"FILE_PWD".to_string(),
|
||||||
@ -64,36 +98,21 @@ pub fn evaluate_file(
|
|||||||
"CURRENT_FILE".to_string(),
|
"CURRENT_FILE".to_string(),
|
||||||
Value::string(file_path.to_string_lossy(), Span::unknown()),
|
Value::string(file_path.to_string_lossy(), Span::unknown()),
|
||||||
);
|
);
|
||||||
stack.add_env_var(
|
|
||||||
"PROCESS_PATH".to_string(),
|
|
||||||
Value::string(path, Span::unknown()),
|
|
||||||
);
|
|
||||||
|
|
||||||
let source_filename = file_path
|
let source_filename = file_path
|
||||||
.file_name()
|
.file_name()
|
||||||
.expect("internal error: missing filename");
|
.expect("internal error: script missing filename");
|
||||||
|
|
||||||
let mut working_set = StateWorkingSet::new(engine_state);
|
let mut working_set = StateWorkingSet::new(engine_state);
|
||||||
trace!("parsing file: {}", file_path_str);
|
trace!("parsing file: {}", file_path_str);
|
||||||
let block = parse(&mut working_set, Some(file_path_str), &file, false);
|
let block = parse(&mut working_set, Some(file_path_str), &file, false);
|
||||||
|
|
||||||
if let Some(warning) = working_set.parse_warnings.first() {
|
|
||||||
report_parse_warning(&working_set, warning);
|
|
||||||
}
|
|
||||||
|
|
||||||
// If any parse errors were found, report the first error and exit.
|
|
||||||
if let Some(err) = working_set.parse_errors.first() {
|
if let Some(err) = working_set.parse_errors.first() {
|
||||||
report_parse_error(&working_set, err);
|
report_error(&working_set, err);
|
||||||
std::process::exit(1);
|
std::process::exit(1);
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(err) = working_set.compile_errors.first() {
|
for block in &mut working_set.delta.blocks {
|
||||||
report_compile_error(&working_set, err);
|
|
||||||
std::process::exit(1);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Look for blocks whose name starts with "main" and replace it with the filename.
|
|
||||||
for block in working_set.delta.blocks.iter_mut().map(Arc::make_mut) {
|
|
||||||
if block.signature.name == "main" {
|
if block.signature.name == "main" {
|
||||||
block.signature.name = source_filename.to_string_lossy().to_string();
|
block.signature.name = source_filename.to_string_lossy().to_string();
|
||||||
} else if block.signature.name.starts_with("main ") {
|
} else if block.signature.name.starts_with("main ") {
|
||||||
@ -102,45 +121,138 @@ pub fn evaluate_file(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Merge the changes into the engine state.
|
let _ = engine_state.merge_delta(working_set.delta);
|
||||||
engine_state.merge_delta(working_set.delta)?;
|
|
||||||
|
|
||||||
// Check if the file contains a main command.
|
if engine_state.find_decl(b"main", &[]).is_some() {
|
||||||
let exit_code = if engine_state.find_decl(b"main", &[]).is_some() {
|
|
||||||
// Evaluate the file, but don't run main yet.
|
|
||||||
let pipeline =
|
|
||||||
match eval_block::<WithoutDebug>(engine_state, stack, &block, PipelineData::empty()) {
|
|
||||||
Ok(data) => data,
|
|
||||||
Err(ShellError::Return { .. }) => {
|
|
||||||
// Allow early return before main is run.
|
|
||||||
return Ok(());
|
|
||||||
}
|
|
||||||
Err(err) => return Err(err),
|
|
||||||
};
|
|
||||||
|
|
||||||
// Print the pipeline output of the last command of the file.
|
|
||||||
print_pipeline(engine_state, stack, pipeline, true)?;
|
|
||||||
|
|
||||||
// Invoke the main command with arguments.
|
|
||||||
// Arguments with whitespaces are quoted, thus can be safely concatenated by whitespace.
|
|
||||||
let args = format!("main {}", args.join(" "));
|
let args = format!("main {}", args.join(" "));
|
||||||
eval_source(
|
|
||||||
|
let pipeline_data = eval_block(
|
||||||
|
engine_state,
|
||||||
|
stack,
|
||||||
|
&block,
|
||||||
|
PipelineData::empty(),
|
||||||
|
false,
|
||||||
|
false,
|
||||||
|
);
|
||||||
|
let pipeline_data = match pipeline_data {
|
||||||
|
Err(ShellError::Return(_, _)) => {
|
||||||
|
// allows early exists before `main` is run.
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
|
||||||
|
x => x,
|
||||||
|
}
|
||||||
|
.unwrap_or_else(|e| {
|
||||||
|
let working_set = StateWorkingSet::new(engine_state);
|
||||||
|
report_error(&working_set, &e);
|
||||||
|
std::process::exit(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
let result = pipeline_data.print(engine_state, stack, true, false);
|
||||||
|
|
||||||
|
match result {
|
||||||
|
Err(err) => {
|
||||||
|
let working_set = StateWorkingSet::new(engine_state);
|
||||||
|
|
||||||
|
report_error(&working_set, &err);
|
||||||
|
std::process::exit(1);
|
||||||
|
}
|
||||||
|
Ok(exit_code) => {
|
||||||
|
if exit_code != 0 {
|
||||||
|
std::process::exit(exit_code as i32);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if !eval_source(
|
||||||
engine_state,
|
engine_state,
|
||||||
stack,
|
stack,
|
||||||
args.as_bytes(),
|
args.as_bytes(),
|
||||||
"<commandline>",
|
"<commandline>",
|
||||||
input,
|
input,
|
||||||
true,
|
true,
|
||||||
)
|
) {
|
||||||
} else {
|
std::process::exit(1);
|
||||||
eval_source(engine_state, stack, &file, file_path_str, input, true)
|
}
|
||||||
};
|
} else if !eval_source(engine_state, stack, &file, file_path_str, input, true) {
|
||||||
|
std::process::exit(1);
|
||||||
if exit_code != 0 {
|
|
||||||
std::process::exit(exit_code);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
info!("evaluate {}:{}:{}", file!(), line!(), column!());
|
info!("evaluate {}:{}:{}", file!(), line!(), column!());
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub(crate) fn print_table_or_error(
|
||||||
|
engine_state: &mut EngineState,
|
||||||
|
stack: &mut Stack,
|
||||||
|
mut pipeline_data: PipelineData,
|
||||||
|
config: &mut Config,
|
||||||
|
) -> Option<i64> {
|
||||||
|
let exit_code = match &mut pipeline_data {
|
||||||
|
PipelineData::ExternalStream { exit_code, .. } => exit_code.take(),
|
||||||
|
_ => None,
|
||||||
|
};
|
||||||
|
|
||||||
|
// Change the engine_state config to use the passed in configuration
|
||||||
|
engine_state.set_config(config.clone());
|
||||||
|
|
||||||
|
if let PipelineData::Value(Value::Error { error, .. }, ..) = &pipeline_data {
|
||||||
|
let working_set = StateWorkingSet::new(engine_state);
|
||||||
|
report_error(&working_set, &**error);
|
||||||
|
std::process::exit(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(decl_id) = engine_state.find_decl("table".as_bytes(), &[]) {
|
||||||
|
let command = engine_state.get_decl(decl_id);
|
||||||
|
if command.get_block_id().is_some() {
|
||||||
|
print_or_exit(pipeline_data, engine_state, config);
|
||||||
|
} else {
|
||||||
|
// The final call on table command, it's ok to set redirect_output to false.
|
||||||
|
let mut call = Call::new(Span::new(0, 0));
|
||||||
|
call.redirect_stdout = false;
|
||||||
|
let table = command.run(engine_state, stack, &call, pipeline_data);
|
||||||
|
|
||||||
|
match table {
|
||||||
|
Ok(table) => {
|
||||||
|
print_or_exit(table, engine_state, config);
|
||||||
|
}
|
||||||
|
Err(error) => {
|
||||||
|
let working_set = StateWorkingSet::new(engine_state);
|
||||||
|
report_error(&working_set, &error);
|
||||||
|
std::process::exit(1);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
print_or_exit(pipeline_data, engine_state, config);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Make sure everything has finished
|
||||||
|
if let Some(exit_code) = exit_code {
|
||||||
|
let mut exit_code: Vec<_> = exit_code.into_iter().collect();
|
||||||
|
exit_code
|
||||||
|
.pop()
|
||||||
|
.and_then(|last_exit_code| match last_exit_code {
|
||||||
|
Value::Int { val: code, .. } => Some(code),
|
||||||
|
_ => None,
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn print_or_exit(pipeline_data: PipelineData, engine_state: &mut EngineState, config: &Config) {
|
||||||
|
for item in pipeline_data {
|
||||||
|
if let Value::Error { error, .. } = item {
|
||||||
|
let working_set = StateWorkingSet::new(engine_state);
|
||||||
|
|
||||||
|
report_error(&working_set, &*error);
|
||||||
|
|
||||||
|
std::process::exit(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
let out = item.into_string("\n", config) + "\n";
|
||||||
|
let _ = stdout_write_all_and_flush(out).map_err(|err| eprintln!("{err}"));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
@ -1,4 +1,3 @@
|
|||||||
#![doc = include_str!("../README.md")]
|
|
||||||
mod commands;
|
mod commands;
|
||||||
mod completions;
|
mod completions;
|
||||||
mod config_files;
|
mod config_files;
|
||||||
@ -16,11 +15,12 @@ mod util;
|
|||||||
mod validation;
|
mod validation;
|
||||||
|
|
||||||
pub use commands::add_cli_context;
|
pub use commands::add_cli_context;
|
||||||
pub use completions::{FileCompletion, NuCompleter, SemanticSuggestion, SuggestionKind};
|
pub use completions::{FileCompletion, NuCompleter};
|
||||||
pub use config_files::eval_config_contents;
|
pub use config_files::eval_config_contents;
|
||||||
pub use eval_cmds::{evaluate_commands, EvaluateCommandsOpts};
|
pub use eval_cmds::evaluate_commands;
|
||||||
pub use eval_file::evaluate_file;
|
pub use eval_file::evaluate_file;
|
||||||
pub use menus::NuHelpCompleter;
|
pub use menus::{DescriptionMenu, NuHelpCompleter};
|
||||||
|
pub use nu_cmd_base::util::get_init_cwd;
|
||||||
pub use nu_highlight::NuHighlight;
|
pub use nu_highlight::NuHighlight;
|
||||||
pub use print::Print;
|
pub use print::Print;
|
||||||
pub use prompt::NushellPrompt;
|
pub use prompt::NushellPrompt;
|
||||||
@ -32,6 +32,4 @@ pub use validation::NuValidator;
|
|||||||
#[cfg(feature = "plugin")]
|
#[cfg(feature = "plugin")]
|
||||||
pub use config_files::add_plugin_file;
|
pub use config_files::add_plugin_file;
|
||||||
#[cfg(feature = "plugin")]
|
#[cfg(feature = "plugin")]
|
||||||
pub use config_files::migrate_old_plugin_file;
|
|
||||||
#[cfg(feature = "plugin")]
|
|
||||||
pub use config_files::read_plugin_file;
|
pub use config_files::read_plugin_file;
|
||||||
|
730
crates/nu-cli/src/menus/description_menu.rs
Normal file
730
crates/nu-cli/src/menus/description_menu.rs
Normal file
@ -0,0 +1,730 @@
|
|||||||
|
use {
|
||||||
|
nu_ansi_term::{ansi::RESET, Style},
|
||||||
|
reedline::{
|
||||||
|
menu_functions::string_difference, Completer, Editor, Menu, MenuEvent, MenuTextStyle,
|
||||||
|
Painter, Suggestion, UndoBehavior,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
/// Default values used as reference for the menu. These values are set during
|
||||||
|
/// the initial declaration of the menu and are always kept as reference for the
|
||||||
|
/// changeable [`WorkingDetails`]
|
||||||
|
struct DefaultMenuDetails {
|
||||||
|
/// Number of columns that the menu will have
|
||||||
|
pub columns: u16,
|
||||||
|
/// Column width
|
||||||
|
pub col_width: Option<usize>,
|
||||||
|
/// Column padding
|
||||||
|
pub col_padding: usize,
|
||||||
|
/// Number of rows for commands
|
||||||
|
pub selection_rows: u16,
|
||||||
|
/// Number of rows allowed to display the description
|
||||||
|
pub description_rows: usize,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Default for DefaultMenuDetails {
|
||||||
|
fn default() -> Self {
|
||||||
|
Self {
|
||||||
|
columns: 4,
|
||||||
|
col_width: None,
|
||||||
|
col_padding: 2,
|
||||||
|
selection_rows: 4,
|
||||||
|
description_rows: 10,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Represents the actual column conditions of the menu. These conditions change
|
||||||
|
/// since they need to accommodate possible different line sizes for the column values
|
||||||
|
#[derive(Default)]
|
||||||
|
struct WorkingDetails {
|
||||||
|
/// Number of columns that the menu will have
|
||||||
|
pub columns: u16,
|
||||||
|
/// Column width
|
||||||
|
pub col_width: usize,
|
||||||
|
/// Number of rows for description
|
||||||
|
pub description_rows: usize,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Completion menu definition
|
||||||
|
pub struct DescriptionMenu {
|
||||||
|
/// Menu name
|
||||||
|
name: String,
|
||||||
|
/// Menu status
|
||||||
|
active: bool,
|
||||||
|
/// Menu coloring
|
||||||
|
color: MenuTextStyle,
|
||||||
|
/// Default column details that are set when creating the menu
|
||||||
|
/// These values are the reference for the working details
|
||||||
|
default_details: DefaultMenuDetails,
|
||||||
|
/// Number of minimum rows that are displayed when
|
||||||
|
/// the required lines is larger than the available lines
|
||||||
|
min_rows: u16,
|
||||||
|
/// Working column details keep changing based on the collected values
|
||||||
|
working_details: WorkingDetails,
|
||||||
|
/// Menu cached values
|
||||||
|
values: Vec<Suggestion>,
|
||||||
|
/// column position of the cursor. Starts from 0
|
||||||
|
col_pos: u16,
|
||||||
|
/// row position in the menu. Starts from 0
|
||||||
|
row_pos: u16,
|
||||||
|
/// Menu marker when active
|
||||||
|
marker: String,
|
||||||
|
/// Event sent to the menu
|
||||||
|
event: Option<MenuEvent>,
|
||||||
|
/// String collected after the menu is activated
|
||||||
|
input: Option<String>,
|
||||||
|
/// Examples to select
|
||||||
|
examples: Vec<String>,
|
||||||
|
/// Example index
|
||||||
|
example_index: Option<usize>,
|
||||||
|
/// Examples may not be shown if there is not enough space in the screen
|
||||||
|
show_examples: bool,
|
||||||
|
/// Skipped description rows
|
||||||
|
skipped_rows: usize,
|
||||||
|
/// Calls the completer using only the line buffer difference difference
|
||||||
|
/// after the menu was activated
|
||||||
|
only_buffer_difference: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Default for DescriptionMenu {
|
||||||
|
fn default() -> Self {
|
||||||
|
Self {
|
||||||
|
name: "description_menu".to_string(),
|
||||||
|
active: false,
|
||||||
|
color: MenuTextStyle::default(),
|
||||||
|
default_details: DefaultMenuDetails::default(),
|
||||||
|
min_rows: 3,
|
||||||
|
working_details: WorkingDetails::default(),
|
||||||
|
values: Vec::new(),
|
||||||
|
col_pos: 0,
|
||||||
|
row_pos: 0,
|
||||||
|
marker: "? ".to_string(),
|
||||||
|
event: None,
|
||||||
|
input: None,
|
||||||
|
examples: Vec::new(),
|
||||||
|
example_index: None,
|
||||||
|
show_examples: true,
|
||||||
|
skipped_rows: 0,
|
||||||
|
only_buffer_difference: true,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Menu configuration
|
||||||
|
impl DescriptionMenu {
|
||||||
|
/// Menu builder with new name
|
||||||
|
pub fn with_name(mut self, name: &str) -> Self {
|
||||||
|
self.name = name.into();
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Menu builder with new value for text style
|
||||||
|
pub fn with_text_style(mut self, text_style: Style) -> Self {
|
||||||
|
self.color.text_style = text_style;
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Menu builder with new value for text style
|
||||||
|
pub fn with_selected_text_style(mut self, selected_text_style: Style) -> Self {
|
||||||
|
self.color.selected_text_style = selected_text_style;
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Menu builder with new value for text style
|
||||||
|
pub fn with_description_text_style(mut self, description_text_style: Style) -> Self {
|
||||||
|
self.color.description_style = description_text_style;
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Menu builder with new columns value
|
||||||
|
pub fn with_columns(mut self, columns: u16) -> Self {
|
||||||
|
self.default_details.columns = columns;
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Menu builder with new column width value
|
||||||
|
pub fn with_column_width(mut self, col_width: Option<usize>) -> Self {
|
||||||
|
self.default_details.col_width = col_width;
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Menu builder with new column width value
|
||||||
|
pub fn with_column_padding(mut self, col_padding: usize) -> Self {
|
||||||
|
self.default_details.col_padding = col_padding;
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Menu builder with new selection rows value
|
||||||
|
pub fn with_selection_rows(mut self, selection_rows: u16) -> Self {
|
||||||
|
self.default_details.selection_rows = selection_rows;
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Menu builder with new description rows value
|
||||||
|
pub fn with_description_rows(mut self, description_rows: usize) -> Self {
|
||||||
|
self.default_details.description_rows = description_rows;
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Menu builder with marker
|
||||||
|
pub fn with_marker(mut self, marker: String) -> Self {
|
||||||
|
self.marker = marker;
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Menu builder with new only buffer difference
|
||||||
|
pub fn with_only_buffer_difference(mut self, only_buffer_difference: bool) -> Self {
|
||||||
|
self.only_buffer_difference = only_buffer_difference;
|
||||||
|
self
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Menu functionality
|
||||||
|
impl DescriptionMenu {
|
||||||
|
/// Move menu cursor to the next element
|
||||||
|
fn move_next(&mut self) {
|
||||||
|
let mut new_col = self.col_pos + 1;
|
||||||
|
let mut new_row = self.row_pos;
|
||||||
|
|
||||||
|
if new_col >= self.get_cols() {
|
||||||
|
new_row += 1;
|
||||||
|
new_col = 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
if new_row >= self.get_rows() {
|
||||||
|
new_row = 0;
|
||||||
|
new_col = 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
let position = new_row * self.get_cols() + new_col;
|
||||||
|
if position >= self.get_values().len() as u16 {
|
||||||
|
self.reset_position();
|
||||||
|
} else {
|
||||||
|
self.col_pos = new_col;
|
||||||
|
self.row_pos = new_row;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Move menu cursor to the previous element
|
||||||
|
fn move_previous(&mut self) {
|
||||||
|
let new_col = self.col_pos.checked_sub(1);
|
||||||
|
|
||||||
|
let (new_col, new_row) = match new_col {
|
||||||
|
Some(col) => (col, self.row_pos),
|
||||||
|
None => match self.row_pos.checked_sub(1) {
|
||||||
|
Some(row) => (self.get_cols().saturating_sub(1), row),
|
||||||
|
None => (
|
||||||
|
self.get_cols().saturating_sub(1),
|
||||||
|
self.get_rows().saturating_sub(1),
|
||||||
|
),
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
let position = new_row * self.get_cols() + new_col;
|
||||||
|
if position >= self.get_values().len() as u16 {
|
||||||
|
self.col_pos = (self.get_values().len() as u16 % self.get_cols()).saturating_sub(1);
|
||||||
|
self.row_pos = self.get_rows().saturating_sub(1);
|
||||||
|
} else {
|
||||||
|
self.col_pos = new_col;
|
||||||
|
self.row_pos = new_row;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Menu index based on column and row position
|
||||||
|
fn index(&self) -> usize {
|
||||||
|
let index = self.row_pos * self.get_cols() + self.col_pos;
|
||||||
|
index as usize
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get selected value from the menu
|
||||||
|
fn get_value(&self) -> Option<Suggestion> {
|
||||||
|
self.get_values().get(self.index()).cloned()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Calculates how many rows the Menu will use
|
||||||
|
fn get_rows(&self) -> u16 {
|
||||||
|
let values = self.get_values().len() as u16;
|
||||||
|
|
||||||
|
if values == 0 {
|
||||||
|
// When the values are empty the no_records_msg is shown, taking 1 line
|
||||||
|
return 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
let rows = values / self.get_cols();
|
||||||
|
if values % self.get_cols() != 0 {
|
||||||
|
rows + 1
|
||||||
|
} else {
|
||||||
|
rows
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns working details col width
|
||||||
|
fn get_width(&self) -> usize {
|
||||||
|
self.working_details.col_width
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Reset menu position
|
||||||
|
fn reset_position(&mut self) {
|
||||||
|
self.col_pos = 0;
|
||||||
|
self.row_pos = 0;
|
||||||
|
self.skipped_rows = 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
fn no_records_msg(&self, use_ansi_coloring: bool) -> String {
|
||||||
|
let msg = "TYPE TO START SEARCH";
|
||||||
|
if use_ansi_coloring {
|
||||||
|
format!(
|
||||||
|
"{}{}{}",
|
||||||
|
self.color.selected_text_style.prefix(),
|
||||||
|
msg,
|
||||||
|
RESET
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
msg.to_string()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns working details columns
|
||||||
|
fn get_cols(&self) -> u16 {
|
||||||
|
self.working_details.columns.max(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// End of line for menu
|
||||||
|
fn end_of_line(&self, column: u16, index: usize) -> &str {
|
||||||
|
let is_last = index == self.values.len().saturating_sub(1);
|
||||||
|
if column == self.get_cols().saturating_sub(1) || is_last {
|
||||||
|
"\r\n"
|
||||||
|
} else {
|
||||||
|
""
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Update list of examples from the actual value
|
||||||
|
fn update_examples(&mut self) {
|
||||||
|
self.examples = self
|
||||||
|
.get_value()
|
||||||
|
.and_then(|suggestion| suggestion.extra)
|
||||||
|
.unwrap_or_default();
|
||||||
|
|
||||||
|
self.example_index = None;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Creates default string that represents one suggestion from the menu
|
||||||
|
fn create_entry_string(
|
||||||
|
&self,
|
||||||
|
suggestion: &Suggestion,
|
||||||
|
index: usize,
|
||||||
|
column: u16,
|
||||||
|
empty_space: usize,
|
||||||
|
use_ansi_coloring: bool,
|
||||||
|
) -> String {
|
||||||
|
if use_ansi_coloring {
|
||||||
|
if index == self.index() {
|
||||||
|
format!(
|
||||||
|
"{}{}{}{:>empty$}{}",
|
||||||
|
self.color.selected_text_style.prefix(),
|
||||||
|
&suggestion.value,
|
||||||
|
RESET,
|
||||||
|
"",
|
||||||
|
self.end_of_line(column, index),
|
||||||
|
empty = empty_space,
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
format!(
|
||||||
|
"{}{}{}{:>empty$}{}",
|
||||||
|
self.color.text_style.prefix(),
|
||||||
|
&suggestion.value,
|
||||||
|
RESET,
|
||||||
|
"",
|
||||||
|
self.end_of_line(column, index),
|
||||||
|
empty = empty_space,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// If no ansi coloring is found, then the selection word is
|
||||||
|
// the line in uppercase
|
||||||
|
let (marker, empty_space) = if index == self.index() {
|
||||||
|
(">", empty_space.saturating_sub(1))
|
||||||
|
} else {
|
||||||
|
("", empty_space)
|
||||||
|
};
|
||||||
|
|
||||||
|
let line = format!(
|
||||||
|
"{}{}{:>empty$}{}",
|
||||||
|
marker,
|
||||||
|
&suggestion.value,
|
||||||
|
"",
|
||||||
|
self.end_of_line(column, index),
|
||||||
|
empty = empty_space,
|
||||||
|
);
|
||||||
|
|
||||||
|
if index == self.index() {
|
||||||
|
line.to_uppercase()
|
||||||
|
} else {
|
||||||
|
line
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Description string with color
|
||||||
|
fn create_description_string(&self, use_ansi_coloring: bool) -> String {
|
||||||
|
let description = self
|
||||||
|
.get_value()
|
||||||
|
.and_then(|suggestion| suggestion.description)
|
||||||
|
.unwrap_or_default()
|
||||||
|
.lines()
|
||||||
|
.skip(self.skipped_rows)
|
||||||
|
.take(self.working_details.description_rows)
|
||||||
|
.collect::<Vec<&str>>()
|
||||||
|
.join("\r\n");
|
||||||
|
|
||||||
|
if use_ansi_coloring && !description.is_empty() {
|
||||||
|
format!(
|
||||||
|
"{}{}{}",
|
||||||
|
self.color.description_style.prefix(),
|
||||||
|
description,
|
||||||
|
RESET,
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
description
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Selectable list of examples from the actual value
|
||||||
|
fn create_example_string(&self, use_ansi_coloring: bool) -> String {
|
||||||
|
if !self.show_examples {
|
||||||
|
return "".into();
|
||||||
|
}
|
||||||
|
|
||||||
|
let examples: String = self
|
||||||
|
.examples
|
||||||
|
.iter()
|
||||||
|
.enumerate()
|
||||||
|
.map(|(index, example)| {
|
||||||
|
if let Some(example_index) = self.example_index {
|
||||||
|
if index == example_index {
|
||||||
|
format!(
|
||||||
|
" {}{}{}\r\n",
|
||||||
|
self.color.selected_text_style.prefix(),
|
||||||
|
example,
|
||||||
|
RESET
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
format!(" {example}\r\n")
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
format!(" {example}\r\n")
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
if examples.is_empty() {
|
||||||
|
"".into()
|
||||||
|
} else if use_ansi_coloring {
|
||||||
|
format!(
|
||||||
|
"{}\r\n\r\nExamples:\r\n{}{}",
|
||||||
|
self.color.description_style.prefix(),
|
||||||
|
RESET,
|
||||||
|
examples,
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
format!("\r\n\r\nExamples:\r\n{examples}",)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Menu for DescriptionMenu {
|
||||||
|
/// Menu name
|
||||||
|
fn name(&self) -> &str {
|
||||||
|
self.name.as_str()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Menu indicator
|
||||||
|
fn indicator(&self) -> &str {
|
||||||
|
self.marker.as_str()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Deactivates context menu
|
||||||
|
fn is_active(&self) -> bool {
|
||||||
|
self.active
|
||||||
|
}
|
||||||
|
|
||||||
|
/// The menu stays active even with one record
|
||||||
|
fn can_quick_complete(&self) -> bool {
|
||||||
|
false
|
||||||
|
}
|
||||||
|
|
||||||
|
/// The menu does not need to partially complete
|
||||||
|
fn can_partially_complete(
|
||||||
|
&mut self,
|
||||||
|
_values_updated: bool,
|
||||||
|
_editor: &mut Editor,
|
||||||
|
_completer: &mut dyn Completer,
|
||||||
|
) -> bool {
|
||||||
|
false
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Selects what type of event happened with the menu
|
||||||
|
fn menu_event(&mut self, event: MenuEvent) {
|
||||||
|
match &event {
|
||||||
|
MenuEvent::Activate(_) => self.active = true,
|
||||||
|
MenuEvent::Deactivate => {
|
||||||
|
self.active = false;
|
||||||
|
self.input = None;
|
||||||
|
self.values = Vec::new();
|
||||||
|
}
|
||||||
|
_ => {}
|
||||||
|
};
|
||||||
|
|
||||||
|
self.event = Some(event);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Updates menu values
|
||||||
|
fn update_values(&mut self, editor: &mut Editor, completer: &mut dyn Completer) {
|
||||||
|
if self.only_buffer_difference {
|
||||||
|
if let Some(old_string) = &self.input {
|
||||||
|
let (start, input) = string_difference(editor.get_buffer(), old_string);
|
||||||
|
if !input.is_empty() {
|
||||||
|
self.reset_position();
|
||||||
|
self.values = completer.complete(input, start);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
let trimmed_buffer = editor.get_buffer().replace('\n', " ");
|
||||||
|
self.values = completer.complete(
|
||||||
|
trimmed_buffer.as_str(),
|
||||||
|
editor.line_buffer().insertion_point(),
|
||||||
|
);
|
||||||
|
self.reset_position();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// The working details for the menu changes based on the size of the lines
|
||||||
|
/// collected from the completer
|
||||||
|
fn update_working_details(
|
||||||
|
&mut self,
|
||||||
|
editor: &mut Editor,
|
||||||
|
completer: &mut dyn Completer,
|
||||||
|
painter: &Painter,
|
||||||
|
) {
|
||||||
|
if let Some(event) = self.event.take() {
|
||||||
|
// Updating all working parameters from the menu before executing any of the
|
||||||
|
// possible event
|
||||||
|
let max_width = self.get_values().iter().fold(0, |acc, suggestion| {
|
||||||
|
let str_len = suggestion.value.len() + self.default_details.col_padding;
|
||||||
|
if str_len > acc {
|
||||||
|
str_len
|
||||||
|
} else {
|
||||||
|
acc
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// If no default width is found, then the total screen width is used to estimate
|
||||||
|
// the column width based on the default number of columns
|
||||||
|
let default_width = if let Some(col_width) = self.default_details.col_width {
|
||||||
|
col_width
|
||||||
|
} else {
|
||||||
|
let col_width = painter.screen_width() / self.default_details.columns;
|
||||||
|
col_width as usize
|
||||||
|
};
|
||||||
|
|
||||||
|
// Adjusting the working width of the column based the max line width found
|
||||||
|
// in the menu values
|
||||||
|
if max_width > default_width {
|
||||||
|
self.working_details.col_width = max_width;
|
||||||
|
} else {
|
||||||
|
self.working_details.col_width = default_width;
|
||||||
|
};
|
||||||
|
|
||||||
|
// The working columns is adjusted based on possible number of columns
|
||||||
|
// that could be fitted in the screen with the calculated column width
|
||||||
|
let possible_cols = painter.screen_width() / self.working_details.col_width as u16;
|
||||||
|
if possible_cols > self.default_details.columns {
|
||||||
|
self.working_details.columns = self.default_details.columns.max(1);
|
||||||
|
} else {
|
||||||
|
self.working_details.columns = possible_cols;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Updating the working rows to display the description
|
||||||
|
if self.menu_required_lines(painter.screen_width()) <= painter.remaining_lines() {
|
||||||
|
self.working_details.description_rows = self.default_details.description_rows;
|
||||||
|
self.show_examples = true;
|
||||||
|
} else {
|
||||||
|
self.working_details.description_rows = painter
|
||||||
|
.remaining_lines()
|
||||||
|
.saturating_sub(self.default_details.selection_rows + 1)
|
||||||
|
as usize;
|
||||||
|
|
||||||
|
self.show_examples = false;
|
||||||
|
}
|
||||||
|
|
||||||
|
match event {
|
||||||
|
MenuEvent::Activate(_) => {
|
||||||
|
self.reset_position();
|
||||||
|
self.input = Some(editor.get_buffer().to_string());
|
||||||
|
self.update_values(editor, completer);
|
||||||
|
}
|
||||||
|
MenuEvent::Deactivate => self.active = false,
|
||||||
|
MenuEvent::Edit(_) => {
|
||||||
|
self.reset_position();
|
||||||
|
self.update_values(editor, completer);
|
||||||
|
self.update_examples()
|
||||||
|
}
|
||||||
|
MenuEvent::NextElement => {
|
||||||
|
self.skipped_rows = 0;
|
||||||
|
self.move_next();
|
||||||
|
self.update_examples();
|
||||||
|
}
|
||||||
|
MenuEvent::PreviousElement => {
|
||||||
|
self.skipped_rows = 0;
|
||||||
|
self.move_previous();
|
||||||
|
self.update_examples();
|
||||||
|
}
|
||||||
|
MenuEvent::MoveUp => {
|
||||||
|
if let Some(example_index) = self.example_index {
|
||||||
|
if let Some(index) = example_index.checked_sub(1) {
|
||||||
|
self.example_index = Some(index);
|
||||||
|
} else {
|
||||||
|
self.example_index = Some(self.examples.len().saturating_sub(1));
|
||||||
|
}
|
||||||
|
} else if !self.examples.is_empty() {
|
||||||
|
self.example_index = Some(0);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
MenuEvent::MoveDown => {
|
||||||
|
if let Some(example_index) = self.example_index {
|
||||||
|
let index = example_index + 1;
|
||||||
|
if index < self.examples.len() {
|
||||||
|
self.example_index = Some(index);
|
||||||
|
} else {
|
||||||
|
self.example_index = Some(0);
|
||||||
|
}
|
||||||
|
} else if !self.examples.is_empty() {
|
||||||
|
self.example_index = Some(0);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
MenuEvent::MoveLeft => self.skipped_rows = self.skipped_rows.saturating_sub(1),
|
||||||
|
MenuEvent::MoveRight => {
|
||||||
|
let skipped = self.skipped_rows + 1;
|
||||||
|
let description_rows = self
|
||||||
|
.get_value()
|
||||||
|
.and_then(|suggestion| suggestion.description)
|
||||||
|
.unwrap_or_default()
|
||||||
|
.lines()
|
||||||
|
.count();
|
||||||
|
|
||||||
|
let allowed_skips =
|
||||||
|
description_rows.saturating_sub(self.working_details.description_rows);
|
||||||
|
|
||||||
|
if skipped < allowed_skips {
|
||||||
|
self.skipped_rows = skipped;
|
||||||
|
} else {
|
||||||
|
self.skipped_rows = allowed_skips;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
MenuEvent::PreviousPage | MenuEvent::NextPage => {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// The buffer gets replaced in the Span location
|
||||||
|
fn replace_in_buffer(&self, editor: &mut Editor) {
|
||||||
|
if let Some(Suggestion { value, span, .. }) = self.get_value() {
|
||||||
|
let start = span.start.min(editor.line_buffer().len());
|
||||||
|
let end = span.end.min(editor.line_buffer().len());
|
||||||
|
|
||||||
|
let replacement = if let Some(example_index) = self.example_index {
|
||||||
|
self.examples
|
||||||
|
.get(example_index)
|
||||||
|
.expect("the example index is always checked")
|
||||||
|
} else {
|
||||||
|
&value
|
||||||
|
};
|
||||||
|
|
||||||
|
editor.edit_buffer(
|
||||||
|
|lb| {
|
||||||
|
lb.replace_range(start..end, replacement);
|
||||||
|
let mut offset = lb.insertion_point();
|
||||||
|
offset += lb
|
||||||
|
.len()
|
||||||
|
.saturating_sub(end.saturating_sub(start))
|
||||||
|
.saturating_sub(start);
|
||||||
|
lb.set_insertion_point(offset);
|
||||||
|
},
|
||||||
|
UndoBehavior::CreateUndoPoint,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Minimum rows that should be displayed by the menu
|
||||||
|
fn min_rows(&self) -> u16 {
|
||||||
|
self.get_rows().min(self.min_rows)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Gets values from filler that will be displayed in the menu
|
||||||
|
fn get_values(&self) -> &[Suggestion] {
|
||||||
|
&self.values
|
||||||
|
}
|
||||||
|
|
||||||
|
fn menu_required_lines(&self, _terminal_columns: u16) -> u16 {
|
||||||
|
let example_lines = self
|
||||||
|
.examples
|
||||||
|
.iter()
|
||||||
|
.fold(0, |acc, example| example.lines().count() + acc);
|
||||||
|
|
||||||
|
self.default_details.selection_rows
|
||||||
|
+ self.default_details.description_rows as u16
|
||||||
|
+ example_lines as u16
|
||||||
|
+ 3
|
||||||
|
}
|
||||||
|
|
||||||
|
fn menu_string(&self, _available_lines: u16, use_ansi_coloring: bool) -> String {
|
||||||
|
if self.get_values().is_empty() {
|
||||||
|
self.no_records_msg(use_ansi_coloring)
|
||||||
|
} else {
|
||||||
|
// The skip values represent the number of lines that should be skipped
|
||||||
|
// while printing the menu
|
||||||
|
let available_lines = self.default_details.selection_rows;
|
||||||
|
let skip_values = if self.row_pos >= available_lines {
|
||||||
|
let skip_lines = self.row_pos.saturating_sub(available_lines) + 1;
|
||||||
|
(skip_lines * self.get_cols()) as usize
|
||||||
|
} else {
|
||||||
|
0
|
||||||
|
};
|
||||||
|
|
||||||
|
// It seems that crossterm prefers to have a complete string ready to be printed
|
||||||
|
// rather than looping through the values and printing multiple things
|
||||||
|
// This reduces the flickering when printing the menu
|
||||||
|
let available_values = (available_lines * self.get_cols()) as usize;
|
||||||
|
let selection_values: String = self
|
||||||
|
.get_values()
|
||||||
|
.iter()
|
||||||
|
.skip(skip_values)
|
||||||
|
.take(available_values)
|
||||||
|
.enumerate()
|
||||||
|
.map(|(index, suggestion)| {
|
||||||
|
// Correcting the enumerate index based on the number of skipped values
|
||||||
|
let index = index + skip_values;
|
||||||
|
let column = index as u16 % self.get_cols();
|
||||||
|
let empty_space = self.get_width().saturating_sub(suggestion.value.len());
|
||||||
|
|
||||||
|
self.create_entry_string(
|
||||||
|
suggestion,
|
||||||
|
index,
|
||||||
|
column,
|
||||||
|
empty_space,
|
||||||
|
use_ansi_coloring,
|
||||||
|
)
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
format!(
|
||||||
|
"{}{}{}",
|
||||||
|
selection_values,
|
||||||
|
self.create_description_string(use_ansi_coloring),
|
||||||
|
self.create_example_string(use_ansi_coloring)
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@ -1,73 +1,63 @@
|
|||||||
use nu_engine::documentation::{get_flags_section, HelpStyle};
|
use nu_engine::documentation::get_flags_section;
|
||||||
use nu_protocol::{engine::EngineState, levenshtein_distance, Config};
|
use nu_protocol::{engine::EngineState, levenshtein_distance};
|
||||||
use nu_utils::IgnoreCaseExt;
|
use nu_utils::IgnoreCaseExt;
|
||||||
use reedline::{Completer, Suggestion};
|
use reedline::{Completer, Suggestion};
|
||||||
use std::{fmt::Write, sync::Arc};
|
use std::fmt::Write;
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
pub struct NuHelpCompleter {
|
pub struct NuHelpCompleter(Arc<EngineState>);
|
||||||
engine_state: Arc<EngineState>,
|
|
||||||
config: Arc<Config>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl NuHelpCompleter {
|
impl NuHelpCompleter {
|
||||||
pub fn new(engine_state: Arc<EngineState>, config: Arc<Config>) -> Self {
|
pub fn new(engine_state: Arc<EngineState>) -> Self {
|
||||||
Self {
|
Self(engine_state)
|
||||||
engine_state,
|
|
||||||
config,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn completion_helper(&self, line: &str, pos: usize) -> Vec<Suggestion> {
|
fn completion_helper(&self, line: &str, pos: usize) -> Vec<Suggestion> {
|
||||||
|
let full_commands = self.0.get_signatures_with_examples(false);
|
||||||
let folded_line = line.to_folded_case();
|
let folded_line = line.to_folded_case();
|
||||||
|
|
||||||
let mut help_style = HelpStyle::default();
|
//Vec<(Signature, Vec<Example>, bool, bool)> {
|
||||||
help_style.update_from_config(&self.engine_state, &self.config);
|
let mut commands = full_commands
|
||||||
|
.iter()
|
||||||
let mut commands = self
|
.filter(|(sig, _, _, _, _)| {
|
||||||
.engine_state
|
sig.name.to_folded_case().contains(&folded_line)
|
||||||
.get_decls_sorted(false)
|
|| sig.usage.to_folded_case().contains(&folded_line)
|
||||||
.into_iter()
|
|| sig
|
||||||
.filter_map(|(_, decl_id)| {
|
.search_terms
|
||||||
let decl = self.engine_state.get_decl(decl_id);
|
.iter()
|
||||||
(decl.name().to_folded_case().contains(&folded_line)
|
|
||||||
|| decl.description().to_folded_case().contains(&folded_line)
|
|
||||||
|| decl
|
|
||||||
.search_terms()
|
|
||||||
.into_iter()
|
|
||||||
.any(|term| term.to_folded_case().contains(&folded_line))
|
.any(|term| term.to_folded_case().contains(&folded_line))
|
||||||
|| decl
|
|| sig.extra_usage.to_folded_case().contains(&folded_line)
|
||||||
.extra_description()
|
|
||||||
.to_folded_case()
|
|
||||||
.contains(&folded_line))
|
|
||||||
.then_some(decl)
|
|
||||||
})
|
})
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
commands.sort_by_cached_key(|decl| levenshtein_distance(line, decl.name()));
|
commands.sort_by(|(a, _, _, _, _), (b, _, _, _, _)| {
|
||||||
|
let a_distance = levenshtein_distance(line, &a.name);
|
||||||
|
let b_distance = levenshtein_distance(line, &b.name);
|
||||||
|
a_distance.cmp(&b_distance)
|
||||||
|
});
|
||||||
|
|
||||||
commands
|
commands
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|decl| {
|
.map(|(sig, examples, _, _, _)| {
|
||||||
let mut long_desc = String::new();
|
let mut long_desc = String::new();
|
||||||
|
|
||||||
let description = decl.description();
|
let usage = &sig.usage;
|
||||||
if !description.is_empty() {
|
if !usage.is_empty() {
|
||||||
long_desc.push_str(description);
|
long_desc.push_str(usage);
|
||||||
long_desc.push_str("\r\n\r\n");
|
long_desc.push_str("\r\n\r\n");
|
||||||
}
|
}
|
||||||
|
|
||||||
let extra_desc = decl.extra_description();
|
let extra_usage = &sig.extra_usage;
|
||||||
if !extra_desc.is_empty() {
|
if !extra_usage.is_empty() {
|
||||||
long_desc.push_str(extra_desc);
|
long_desc.push_str(extra_usage);
|
||||||
long_desc.push_str("\r\n\r\n");
|
long_desc.push_str("\r\n\r\n");
|
||||||
}
|
}
|
||||||
|
|
||||||
let sig = decl.signature();
|
|
||||||
let _ = write!(long_desc, "Usage:\r\n > {}\r\n", sig.call_signature());
|
let _ = write!(long_desc, "Usage:\r\n > {}\r\n", sig.call_signature());
|
||||||
|
|
||||||
if !sig.named.is_empty() {
|
if !sig.named.is_empty() {
|
||||||
long_desc.push_str(&get_flags_section(&sig, &help_style, |v| {
|
long_desc.push_str(&get_flags_section(Some(&*self.0.clone()), sig, |v| {
|
||||||
v.to_parsable_string(", ", &self.config)
|
v.into_string_parsable(", ", &self.0.config)
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -83,7 +73,7 @@ impl NuHelpCompleter {
|
|||||||
let opt_suffix = if let Some(value) = &positional.default_value {
|
let opt_suffix = if let Some(value) = &positional.default_value {
|
||||||
format!(
|
format!(
|
||||||
" (optional, default: {})",
|
" (optional, default: {})",
|
||||||
&value.to_parsable_string(", ", &self.config),
|
&value.into_string_parsable(", ", &self.0.config),
|
||||||
)
|
)
|
||||||
} else {
|
} else {
|
||||||
(" (optional)").to_string()
|
(" (optional)").to_string()
|
||||||
@ -104,21 +94,20 @@ impl NuHelpCompleter {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let extra: Vec<String> = decl
|
let extra: Vec<String> = examples
|
||||||
.examples()
|
|
||||||
.iter()
|
.iter()
|
||||||
.map(|example| example.example.replace('\n', "\r\n"))
|
.map(|example| example.example.replace('\n', "\r\n"))
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
Suggestion {
|
Suggestion {
|
||||||
value: decl.name().into(),
|
value: sig.name.clone(),
|
||||||
description: Some(long_desc),
|
description: Some(long_desc),
|
||||||
extra: Some(extra),
|
extra: Some(extra),
|
||||||
span: reedline::Span {
|
span: reedline::Span {
|
||||||
start: pos - line.len(),
|
start: pos,
|
||||||
end: pos,
|
end: pos + line.len(),
|
||||||
},
|
},
|
||||||
..Suggestion::default()
|
append_whitespace: false,
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
.collect()
|
.collect()
|
||||||
@ -130,43 +119,3 @@ impl Completer for NuHelpCompleter {
|
|||||||
self.completion_helper(line, pos)
|
self.completion_helper(line, pos)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod test {
|
|
||||||
use super::*;
|
|
||||||
use rstest::rstest;
|
|
||||||
|
|
||||||
#[rstest]
|
|
||||||
#[case("who", 5, 8, &["whoami"])]
|
|
||||||
#[case("hash", 1, 5, &["hash", "hash md5", "hash sha256"])]
|
|
||||||
#[case("into f", 0, 6, &["into float", "into filesize"])]
|
|
||||||
#[case("into nonexistent", 0, 16, &[])]
|
|
||||||
fn test_help_completer(
|
|
||||||
#[case] line: &str,
|
|
||||||
#[case] start: usize,
|
|
||||||
#[case] end: usize,
|
|
||||||
#[case] expected: &[&str],
|
|
||||||
) {
|
|
||||||
let engine_state =
|
|
||||||
nu_command::add_shell_command_context(nu_cmd_lang::create_default_context());
|
|
||||||
let config = engine_state.get_config().clone();
|
|
||||||
let mut completer = NuHelpCompleter::new(engine_state.into(), config);
|
|
||||||
let suggestions = completer.complete(line, end);
|
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
expected.len(),
|
|
||||||
suggestions.len(),
|
|
||||||
"expected {:?}, got {:?}",
|
|
||||||
expected,
|
|
||||||
suggestions
|
|
||||||
.iter()
|
|
||||||
.map(|s| s.value.clone())
|
|
||||||
.collect::<Vec<_>>()
|
|
||||||
);
|
|
||||||
|
|
||||||
for (exp, actual) in expected.iter().zip(suggestions) {
|
|
||||||
assert_eq!(exp, &actual.value);
|
|
||||||
assert_eq!(reedline::Span::new(start, end), actual.span);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
@ -1,8 +1,7 @@
|
|||||||
use nu_engine::eval_block;
|
use nu_engine::eval_block;
|
||||||
use nu_protocol::{
|
use nu_protocol::{
|
||||||
debugger::WithoutDebug,
|
|
||||||
engine::{EngineState, Stack},
|
engine::{EngineState, Stack},
|
||||||
BlockId, IntoPipelineData, Span, Value,
|
IntoPipelineData, Span, Value,
|
||||||
};
|
};
|
||||||
use reedline::{menu_functions::parse_selection_char, Completer, Suggestion};
|
use reedline::{menu_functions::parse_selection_char, Completer, Suggestion};
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
@ -10,7 +9,7 @@ use std::sync::Arc;
|
|||||||
const SELECTION_CHAR: char = '!';
|
const SELECTION_CHAR: char = '!';
|
||||||
|
|
||||||
pub struct NuMenuCompleter {
|
pub struct NuMenuCompleter {
|
||||||
block_id: BlockId,
|
block_id: usize,
|
||||||
span: Span,
|
span: Span,
|
||||||
stack: Stack,
|
stack: Stack,
|
||||||
engine_state: Arc<EngineState>,
|
engine_state: Arc<EngineState>,
|
||||||
@ -19,7 +18,7 @@ pub struct NuMenuCompleter {
|
|||||||
|
|
||||||
impl NuMenuCompleter {
|
impl NuMenuCompleter {
|
||||||
pub fn new(
|
pub fn new(
|
||||||
block_id: BlockId,
|
block_id: usize,
|
||||||
span: Span,
|
span: Span,
|
||||||
stack: Stack,
|
stack: Stack,
|
||||||
engine_state: Arc<EngineState>,
|
engine_state: Arc<EngineState>,
|
||||||
@ -28,7 +27,7 @@ impl NuMenuCompleter {
|
|||||||
Self {
|
Self {
|
||||||
block_id,
|
block_id,
|
||||||
span,
|
span,
|
||||||
stack: stack.reset_out_dest().collect_value(),
|
stack,
|
||||||
engine_state,
|
engine_state,
|
||||||
only_buffer_difference,
|
only_buffer_difference,
|
||||||
}
|
}
|
||||||
@ -56,10 +55,17 @@ impl Completer for NuMenuCompleter {
|
|||||||
}
|
}
|
||||||
|
|
||||||
let input = Value::nothing(self.span).into_pipeline_data();
|
let input = Value::nothing(self.span).into_pipeline_data();
|
||||||
|
let res = eval_block(
|
||||||
|
&self.engine_state,
|
||||||
|
&mut self.stack,
|
||||||
|
block,
|
||||||
|
input,
|
||||||
|
false,
|
||||||
|
false,
|
||||||
|
);
|
||||||
|
|
||||||
let res = eval_block::<WithoutDebug>(&self.engine_state, &mut self.stack, block, input);
|
if let Ok(values) = res {
|
||||||
|
let values = values.into_value(self.span);
|
||||||
if let Ok(values) = res.and_then(|data| data.into_value(self.span)) {
|
|
||||||
convert_to_suggestions(values, line, pos, self.only_buffer_difference)
|
convert_to_suggestions(values, line, pos, self.only_buffer_difference)
|
||||||
} else {
|
} else {
|
||||||
Vec::new()
|
Vec::new()
|
||||||
@ -77,12 +83,10 @@ fn convert_to_suggestions(
|
|||||||
Value::Record { val, .. } => {
|
Value::Record { val, .. } => {
|
||||||
let text = val
|
let text = val
|
||||||
.get("value")
|
.get("value")
|
||||||
.and_then(|val| val.coerce_string().ok())
|
.and_then(|val| val.as_string().ok())
|
||||||
.unwrap_or_else(|| "No value key".to_string());
|
.unwrap_or_else(|| "No value key".to_string());
|
||||||
|
|
||||||
let description = val
|
let description = val.get("description").and_then(|val| val.as_string().ok());
|
||||||
.get("description")
|
|
||||||
.and_then(|val| val.coerce_string().ok());
|
|
||||||
|
|
||||||
let span = match val.get("span") {
|
let span = match val.get("span") {
|
||||||
Some(Value::Record { val: span, .. }) => {
|
Some(Value::Record { val: span, .. }) => {
|
||||||
@ -97,13 +101,9 @@ fn convert_to_suggestions(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
_ => reedline::Span {
|
_ => reedline::Span {
|
||||||
start: if only_buffer_difference {
|
start: if only_buffer_difference { pos } else { 0 },
|
||||||
pos - line.len()
|
|
||||||
} else {
|
|
||||||
0
|
|
||||||
},
|
|
||||||
end: if only_buffer_difference {
|
end: if only_buffer_difference {
|
||||||
pos
|
pos + line.len()
|
||||||
} else {
|
} else {
|
||||||
line.len()
|
line.len()
|
||||||
},
|
},
|
||||||
@ -111,13 +111,9 @@ fn convert_to_suggestions(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
_ => reedline::Span {
|
_ => reedline::Span {
|
||||||
start: if only_buffer_difference {
|
start: if only_buffer_difference { pos } else { 0 },
|
||||||
pos - line.len()
|
|
||||||
} else {
|
|
||||||
0
|
|
||||||
},
|
|
||||||
end: if only_buffer_difference {
|
end: if only_buffer_difference {
|
||||||
pos
|
pos + line.len()
|
||||||
} else {
|
} else {
|
||||||
line.len()
|
line.len()
|
||||||
},
|
},
|
||||||
@ -144,7 +140,7 @@ fn convert_to_suggestions(
|
|||||||
description,
|
description,
|
||||||
extra,
|
extra,
|
||||||
span,
|
span,
|
||||||
..Suggestion::default()
|
append_whitespace: false,
|
||||||
}]
|
}]
|
||||||
}
|
}
|
||||||
Value::List { vals, .. } => vals
|
Value::List { vals, .. } => vals
|
||||||
@ -153,19 +149,13 @@ fn convert_to_suggestions(
|
|||||||
.collect(),
|
.collect(),
|
||||||
_ => vec![Suggestion {
|
_ => vec![Suggestion {
|
||||||
value: format!("Not a record: {value:?}"),
|
value: format!("Not a record: {value:?}"),
|
||||||
|
description: None,
|
||||||
|
extra: None,
|
||||||
span: reedline::Span {
|
span: reedline::Span {
|
||||||
start: if only_buffer_difference {
|
start: 0,
|
||||||
pos - line.len()
|
end: line.len(),
|
||||||
} else {
|
|
||||||
0
|
|
||||||
},
|
|
||||||
end: if only_buffer_difference {
|
|
||||||
pos
|
|
||||||
} else {
|
|
||||||
line.len()
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
..Suggestion::default()
|
append_whitespace: false,
|
||||||
}],
|
}],
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,5 +1,7 @@
|
|||||||
|
mod description_menu;
|
||||||
mod help_completions;
|
mod help_completions;
|
||||||
mod menu_completions;
|
mod menu_completions;
|
||||||
|
|
||||||
|
pub use description_menu::DescriptionMenu;
|
||||||
pub use help_completions::NuHelpCompleter;
|
pub use help_completions::NuHelpCompleter;
|
||||||
pub use menu_completions::NuMenuCompleter;
|
pub use menu_completions::NuMenuCompleter;
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
use std::sync::Arc;
|
use nu_protocol::ast::Call;
|
||||||
|
use nu_protocol::engine::{Command, EngineState, Stack};
|
||||||
use nu_engine::command_prelude::*;
|
use nu_protocol::{Category, Example, PipelineData, ShellError, Signature, Type, Value};
|
||||||
use reedline::{Highlighter, StyledText};
|
use reedline::Highlighter;
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct NuHighlight;
|
pub struct NuHighlight;
|
||||||
@ -17,7 +17,7 @@ impl Command for NuHighlight {
|
|||||||
.input_output_types(vec![(Type::String, Type::String)])
|
.input_output_types(vec![(Type::String, Type::String)])
|
||||||
}
|
}
|
||||||
|
|
||||||
fn description(&self) -> &str {
|
fn usage(&self) -> &str {
|
||||||
"Syntax highlight the input string."
|
"Syntax highlight the input string."
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -28,28 +28,31 @@ impl Command for NuHighlight {
|
|||||||
fn run(
|
fn run(
|
||||||
&self,
|
&self,
|
||||||
engine_state: &EngineState,
|
engine_state: &EngineState,
|
||||||
stack: &mut Stack,
|
_stack: &mut Stack,
|
||||||
call: &Call,
|
call: &Call,
|
||||||
input: PipelineData,
|
input: PipelineData,
|
||||||
) -> Result<PipelineData, ShellError> {
|
) -> Result<PipelineData, ShellError> {
|
||||||
let head = call.head;
|
let head = call.head;
|
||||||
|
|
||||||
let signals = engine_state.signals();
|
let ctrlc = engine_state.ctrlc.clone();
|
||||||
|
let engine_state = std::sync::Arc::new(engine_state.clone());
|
||||||
|
let config = engine_state.get_config().clone();
|
||||||
|
|
||||||
let highlighter = crate::NuHighlighter {
|
let highlighter = crate::NuHighlighter {
|
||||||
engine_state: Arc::new(engine_state.clone()),
|
engine_state,
|
||||||
stack: Arc::new(stack.clone()),
|
config,
|
||||||
};
|
};
|
||||||
|
|
||||||
input.map(
|
input.map(
|
||||||
move |x| match x.coerce_into_string() {
|
move |x| match x.as_string() {
|
||||||
Ok(line) => {
|
Ok(line) => {
|
||||||
let highlights = highlighter.highlight(&line, line.len());
|
let highlights = highlighter.highlight(&line, line.len());
|
||||||
|
|
||||||
Value::string(highlights.render_simple(), head)
|
Value::string(highlights.render_simple(), head)
|
||||||
}
|
}
|
||||||
Err(err) => Value::error(err, head),
|
Err(err) => Value::error(err, head),
|
||||||
},
|
},
|
||||||
signals,
|
ctrlc,
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -61,16 +64,3 @@ impl Command for NuHighlight {
|
|||||||
}]
|
}]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// A highlighter that does nothing
|
|
||||||
///
|
|
||||||
/// Used to remove highlighting from a reedline instance
|
|
||||||
/// (letting NuHighlighter structs be dropped)
|
|
||||||
#[derive(Default)]
|
|
||||||
pub struct NoOpHighlighter {}
|
|
||||||
|
|
||||||
impl Highlighter for NoOpHighlighter {
|
|
||||||
fn highlight(&self, _line: &str, _cursor: usize) -> reedline::StyledText {
|
|
||||||
StyledText::new()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
@ -1,5 +1,10 @@
|
|||||||
use nu_engine::command_prelude::*;
|
use nu_engine::CallExt;
|
||||||
use nu_protocol::ByteStreamSource;
|
use nu_protocol::ast::Call;
|
||||||
|
use nu_protocol::engine::{Command, EngineState, Stack};
|
||||||
|
use nu_protocol::{
|
||||||
|
Category, Example, IntoPipelineData, PipelineData, ShellError, Signature, SyntaxShape, Type,
|
||||||
|
Value,
|
||||||
|
};
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct Print;
|
pub struct Print;
|
||||||
@ -23,19 +28,14 @@ impl Command for Print {
|
|||||||
Some('n'),
|
Some('n'),
|
||||||
)
|
)
|
||||||
.switch("stderr", "print to stderr instead of stdout", Some('e'))
|
.switch("stderr", "print to stderr instead of stdout", Some('e'))
|
||||||
.switch(
|
|
||||||
"raw",
|
|
||||||
"print without formatting (including binary data)",
|
|
||||||
Some('r'),
|
|
||||||
)
|
|
||||||
.category(Category::Strings)
|
.category(Category::Strings)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn description(&self) -> &str {
|
fn usage(&self) -> &str {
|
||||||
"Print the given values to stdout."
|
"Print the given values to stdout."
|
||||||
}
|
}
|
||||||
|
|
||||||
fn extra_description(&self) -> &str {
|
fn extra_usage(&self) -> &str {
|
||||||
r#"Unlike `echo`, this command does not return any value (`print | describe` will return "nothing").
|
r#"Unlike `echo`, this command does not return any value (`print | describe` will return "nothing").
|
||||||
Since this command has no output, there is no point in piping it with other commands.
|
Since this command has no output, there is no point in piping it with other commands.
|
||||||
|
|
||||||
@ -51,39 +51,20 @@ Since this command has no output, there is no point in piping it with other comm
|
|||||||
engine_state: &EngineState,
|
engine_state: &EngineState,
|
||||||
stack: &mut Stack,
|
stack: &mut Stack,
|
||||||
call: &Call,
|
call: &Call,
|
||||||
mut input: PipelineData,
|
input: PipelineData,
|
||||||
) -> Result<PipelineData, ShellError> {
|
) -> Result<PipelineData, ShellError> {
|
||||||
let args: Vec<Value> = call.rest(engine_state, stack, 0)?;
|
let args: Vec<Value> = call.rest(engine_state, stack, 0)?;
|
||||||
let no_newline = call.has_flag(engine_state, stack, "no-newline")?;
|
let no_newline = call.has_flag("no-newline");
|
||||||
let to_stderr = call.has_flag(engine_state, stack, "stderr")?;
|
let to_stderr = call.has_flag("stderr");
|
||||||
let raw = call.has_flag(engine_state, stack, "raw")?;
|
|
||||||
|
|
||||||
// This will allow for easy printing of pipelines as well
|
// This will allow for easy printing of pipelines as well
|
||||||
if !args.is_empty() {
|
if !args.is_empty() {
|
||||||
for arg in args {
|
for arg in args {
|
||||||
if raw {
|
arg.into_pipeline_data()
|
||||||
arg.into_pipeline_data()
|
.print(engine_state, stack, no_newline, to_stderr)?;
|
||||||
.print_raw(engine_state, no_newline, to_stderr)?;
|
|
||||||
} else {
|
|
||||||
arg.into_pipeline_data().print_table(
|
|
||||||
engine_state,
|
|
||||||
stack,
|
|
||||||
no_newline,
|
|
||||||
to_stderr,
|
|
||||||
)?;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
} else if !input.is_nothing() {
|
} else if !input.is_nothing() {
|
||||||
if let PipelineData::ByteStream(stream, _) = &mut input {
|
input.print(engine_state, stack, no_newline, to_stderr)?;
|
||||||
if let ByteStreamSource::Child(child) = stream.source_mut() {
|
|
||||||
child.ignore_error(true);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if raw {
|
|
||||||
input.print_raw(engine_state, no_newline, to_stderr)?;
|
|
||||||
} else {
|
|
||||||
input.print_table(engine_state, stack, no_newline, to_stderr)?;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(PipelineData::empty())
|
Ok(PipelineData::empty())
|
||||||
@ -101,11 +82,6 @@ Since this command has no output, there is no point in piping it with other comm
|
|||||||
example: r#"print (2 + 3)"#,
|
example: r#"print (2 + 3)"#,
|
||||||
result: None,
|
result: None,
|
||||||
},
|
},
|
||||||
Example {
|
|
||||||
description: "Print 'ABC' from binary data",
|
|
||||||
example: r#"0x[41 42 43] | print --raw"#,
|
|
||||||
result: None,
|
|
||||||
},
|
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,20 +1,16 @@
|
|||||||
use crate::prompt_update::{
|
|
||||||
POST_PROMPT_MARKER, PRE_PROMPT_MARKER, VSCODE_POST_PROMPT_MARKER, VSCODE_PRE_PROMPT_MARKER,
|
|
||||||
};
|
|
||||||
use nu_protocol::engine::{EngineState, Stack};
|
|
||||||
#[cfg(windows)]
|
#[cfg(windows)]
|
||||||
use nu_utils::enable_vt_processing;
|
use nu_utils::enable_vt_processing;
|
||||||
use reedline::{
|
use reedline::DefaultPrompt;
|
||||||
DefaultPrompt, Prompt, PromptEditMode, PromptHistorySearch, PromptHistorySearchStatus,
|
use {
|
||||||
PromptViMode,
|
reedline::{
|
||||||
|
Prompt, PromptEditMode, PromptHistorySearch, PromptHistorySearchStatus, PromptViMode,
|
||||||
|
},
|
||||||
|
std::borrow::Cow,
|
||||||
};
|
};
|
||||||
use std::borrow::Cow;
|
|
||||||
|
|
||||||
/// Nushell prompt definition
|
/// Nushell prompt definition
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct NushellPrompt {
|
pub struct NushellPrompt {
|
||||||
shell_integration_osc133: bool,
|
|
||||||
shell_integration_osc633: bool,
|
|
||||||
left_prompt_string: Option<String>,
|
left_prompt_string: Option<String>,
|
||||||
right_prompt_string: Option<String>,
|
right_prompt_string: Option<String>,
|
||||||
default_prompt_indicator: Option<String>,
|
default_prompt_indicator: Option<String>,
|
||||||
@ -22,20 +18,17 @@ pub struct NushellPrompt {
|
|||||||
default_vi_normal_prompt_indicator: Option<String>,
|
default_vi_normal_prompt_indicator: Option<String>,
|
||||||
default_multiline_indicator: Option<String>,
|
default_multiline_indicator: Option<String>,
|
||||||
render_right_prompt_on_last_line: bool,
|
render_right_prompt_on_last_line: bool,
|
||||||
engine_state: EngineState,
|
}
|
||||||
stack: Stack,
|
|
||||||
|
impl Default for NushellPrompt {
|
||||||
|
fn default() -> Self {
|
||||||
|
NushellPrompt::new()
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl NushellPrompt {
|
impl NushellPrompt {
|
||||||
pub fn new(
|
pub fn new() -> NushellPrompt {
|
||||||
shell_integration_osc133: bool,
|
|
||||||
shell_integration_osc633: bool,
|
|
||||||
engine_state: EngineState,
|
|
||||||
stack: Stack,
|
|
||||||
) -> NushellPrompt {
|
|
||||||
NushellPrompt {
|
NushellPrompt {
|
||||||
shell_integration_osc133,
|
|
||||||
shell_integration_osc633,
|
|
||||||
left_prompt_string: None,
|
left_prompt_string: None,
|
||||||
right_prompt_string: None,
|
right_prompt_string: None,
|
||||||
default_prompt_indicator: None,
|
default_prompt_indicator: None,
|
||||||
@ -43,8 +36,6 @@ impl NushellPrompt {
|
|||||||
default_vi_normal_prompt_indicator: None,
|
default_vi_normal_prompt_indicator: None,
|
||||||
default_multiline_indicator: None,
|
default_multiline_indicator: None,
|
||||||
render_right_prompt_on_last_line: false,
|
render_right_prompt_on_last_line: false,
|
||||||
engine_state,
|
|
||||||
stack,
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -120,26 +111,7 @@ impl Prompt for NushellPrompt {
|
|||||||
.to_string()
|
.to_string()
|
||||||
.replace('\n', "\r\n");
|
.replace('\n', "\r\n");
|
||||||
|
|
||||||
if self.shell_integration_osc633 {
|
prompt.into()
|
||||||
if self
|
|
||||||
.stack
|
|
||||||
.get_env_var(&self.engine_state, "TERM_PROGRAM")
|
|
||||||
.and_then(|v| v.as_str().ok())
|
|
||||||
== Some("vscode")
|
|
||||||
{
|
|
||||||
// We're in vscode and we have osc633 enabled
|
|
||||||
format!("{VSCODE_PRE_PROMPT_MARKER}{prompt}{VSCODE_POST_PROMPT_MARKER}").into()
|
|
||||||
} else if self.shell_integration_osc133 {
|
|
||||||
// If we're in VSCode but we don't find the env var, but we have osc133 set, then use it
|
|
||||||
format!("{PRE_PROMPT_MARKER}{prompt}{POST_PROMPT_MARKER}").into()
|
|
||||||
} else {
|
|
||||||
prompt.into()
|
|
||||||
}
|
|
||||||
} else if self.shell_integration_osc133 {
|
|
||||||
format!("{PRE_PROMPT_MARKER}{prompt}{POST_PROMPT_MARKER}").into()
|
|
||||||
} else {
|
|
||||||
prompt.into()
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,11 +1,14 @@
|
|||||||
use crate::NushellPrompt;
|
use crate::NushellPrompt;
|
||||||
use log::{trace, warn};
|
use log::trace;
|
||||||
use nu_engine::ClosureEvalOnce;
|
use nu_engine::eval_subexpression;
|
||||||
|
use nu_protocol::report_error;
|
||||||
use nu_protocol::{
|
use nu_protocol::{
|
||||||
engine::{EngineState, Stack},
|
engine::{EngineState, Stack, StateWorkingSet},
|
||||||
report_shell_error, Config, PipelineData, Value,
|
Config, PipelineData, Value,
|
||||||
};
|
};
|
||||||
use reedline::Prompt;
|
use reedline::Prompt;
|
||||||
|
use std::borrow::Cow;
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
// Name of environment variable where the prompt could be stored
|
// Name of environment variable where the prompt could be stored
|
||||||
pub(crate) const PROMPT_COMMAND: &str = "PROMPT_COMMAND";
|
pub(crate) const PROMPT_COMMAND: &str = "PROMPT_COMMAND";
|
||||||
@ -23,31 +26,10 @@ pub(crate) const TRANSIENT_PROMPT_INDICATOR_VI_NORMAL: &str =
|
|||||||
"TRANSIENT_PROMPT_INDICATOR_VI_NORMAL";
|
"TRANSIENT_PROMPT_INDICATOR_VI_NORMAL";
|
||||||
pub(crate) const TRANSIENT_PROMPT_MULTILINE_INDICATOR: &str =
|
pub(crate) const TRANSIENT_PROMPT_MULTILINE_INDICATOR: &str =
|
||||||
"TRANSIENT_PROMPT_MULTILINE_INDICATOR";
|
"TRANSIENT_PROMPT_MULTILINE_INDICATOR";
|
||||||
|
|
||||||
// Store all these Ansi Escape Markers here so they can be reused easily
|
|
||||||
// According to Daniel Imms @Tyriar, we need to do these this way:
|
// According to Daniel Imms @Tyriar, we need to do these this way:
|
||||||
// <133 A><prompt><133 B><command><133 C><command output>
|
// <133 A><prompt><133 B><command><133 C><command output>
|
||||||
pub(crate) const PRE_PROMPT_MARKER: &str = "\x1b]133;A\x1b\\";
|
const PRE_PROMPT_MARKER: &str = "\x1b]133;A\x1b\\";
|
||||||
pub(crate) const POST_PROMPT_MARKER: &str = "\x1b]133;B\x1b\\";
|
const POST_PROMPT_MARKER: &str = "\x1b]133;B\x1b\\";
|
||||||
pub(crate) const PRE_EXECUTION_MARKER: &str = "\x1b]133;C\x1b\\";
|
|
||||||
pub(crate) const POST_EXECUTION_MARKER_PREFIX: &str = "\x1b]133;D;";
|
|
||||||
pub(crate) const POST_EXECUTION_MARKER_SUFFIX: &str = "\x1b\\";
|
|
||||||
|
|
||||||
// OSC633 is the same as OSC133 but specifically for VSCode
|
|
||||||
pub(crate) const VSCODE_PRE_PROMPT_MARKER: &str = "\x1b]633;A\x1b\\";
|
|
||||||
pub(crate) const VSCODE_POST_PROMPT_MARKER: &str = "\x1b]633;B\x1b\\";
|
|
||||||
pub(crate) const VSCODE_PRE_EXECUTION_MARKER: &str = "\x1b]633;C\x1b\\";
|
|
||||||
//"\x1b]633;D;{}\x1b\\"
|
|
||||||
pub(crate) const VSCODE_POST_EXECUTION_MARKER_PREFIX: &str = "\x1b]633;D;";
|
|
||||||
pub(crate) const VSCODE_POST_EXECUTION_MARKER_SUFFIX: &str = "\x1b\\";
|
|
||||||
//"\x1b]633;E;{}\x1b\\"
|
|
||||||
pub(crate) const VSCODE_COMMANDLINE_MARKER_PREFIX: &str = "\x1b]633;E;";
|
|
||||||
pub(crate) const VSCODE_COMMANDLINE_MARKER_SUFFIX: &str = "\x1b\\";
|
|
||||||
// "\x1b]633;P;Cwd={}\x1b\\"
|
|
||||||
pub(crate) const VSCODE_CWD_PROPERTY_MARKER_PREFIX: &str = "\x1b]633;P;Cwd=";
|
|
||||||
pub(crate) const VSCODE_CWD_PROPERTY_MARKER_SUFFIX: &str = "\x1b\\";
|
|
||||||
|
|
||||||
pub(crate) const RESET_APPLICATION_MODE: &str = "\x1b[?1l";
|
|
||||||
|
|
||||||
fn get_prompt_string(
|
fn get_prompt_string(
|
||||||
prompt: &str,
|
prompt: &str,
|
||||||
@ -59,9 +41,11 @@ fn get_prompt_string(
|
|||||||
.get_env_var(engine_state, prompt)
|
.get_env_var(engine_state, prompt)
|
||||||
.and_then(|v| match v {
|
.and_then(|v| match v {
|
||||||
Value::Closure { val, .. } => {
|
Value::Closure { val, .. } => {
|
||||||
let result = ClosureEvalOnce::new(engine_state, stack, val.as_ref().clone())
|
let block = engine_state.get_block(val.block_id);
|
||||||
.run_with_input(PipelineData::Empty);
|
let mut stack = stack.captures_to_stack(val.captures);
|
||||||
|
// Use eval_subexpression to force a redirection of output, so we can use everything in prompt
|
||||||
|
let ret_val =
|
||||||
|
eval_subexpression(engine_state, &mut stack, block, PipelineData::empty());
|
||||||
trace!(
|
trace!(
|
||||||
"get_prompt_string (block) {}:{}:{}",
|
"get_prompt_string (block) {}:{}:{}",
|
||||||
file!(),
|
file!(),
|
||||||
@ -69,9 +53,28 @@ fn get_prompt_string(
|
|||||||
column!()
|
column!()
|
||||||
);
|
);
|
||||||
|
|
||||||
result
|
ret_val
|
||||||
.map_err(|err| {
|
.map_err(|err| {
|
||||||
report_shell_error(engine_state, &err);
|
let working_set = StateWorkingSet::new(engine_state);
|
||||||
|
report_error(&working_set, &err);
|
||||||
|
})
|
||||||
|
.ok()
|
||||||
|
}
|
||||||
|
Value::Block { val: block_id, .. } => {
|
||||||
|
let block = engine_state.get_block(block_id);
|
||||||
|
// Use eval_subexpression to force a redirection of output, so we can use everything in prompt
|
||||||
|
let ret_val = eval_subexpression(engine_state, stack, block, PipelineData::empty());
|
||||||
|
trace!(
|
||||||
|
"get_prompt_string (block) {}:{}:{}",
|
||||||
|
file!(),
|
||||||
|
line!(),
|
||||||
|
column!()
|
||||||
|
);
|
||||||
|
|
||||||
|
ret_val
|
||||||
|
.map_err(|err| {
|
||||||
|
let working_set = StateWorkingSet::new(engine_state);
|
||||||
|
report_error(&working_set, &err);
|
||||||
})
|
})
|
||||||
.ok()
|
.ok()
|
||||||
}
|
}
|
||||||
@ -80,13 +83,8 @@ fn get_prompt_string(
|
|||||||
})
|
})
|
||||||
.and_then(|pipeline_data| {
|
.and_then(|pipeline_data| {
|
||||||
let output = pipeline_data.collect_string("", config).ok();
|
let output = pipeline_data.collect_string("", config).ok();
|
||||||
let ansi_output = output.map(|mut x| {
|
|
||||||
// Always reset the color at the start of the right prompt
|
|
||||||
// to ensure there is no ansi bleed over
|
|
||||||
if x.is_empty() && prompt == PROMPT_COMMAND_RIGHT {
|
|
||||||
x.insert_str(0, "\x1b[0m")
|
|
||||||
};
|
|
||||||
|
|
||||||
|
output.map(|mut x| {
|
||||||
// Just remove the very last newline.
|
// Just remove the very last newline.
|
||||||
if x.ends_with('\n') {
|
if x.ends_with('\n') {
|
||||||
x.pop();
|
x.pop();
|
||||||
@ -96,66 +94,48 @@ fn get_prompt_string(
|
|||||||
x.pop();
|
x.pop();
|
||||||
}
|
}
|
||||||
x
|
x
|
||||||
});
|
})
|
||||||
// Let's keep this for debugging purposes with nu --log-level warn
|
|
||||||
warn!("{}:{}:{} {:?}", file!(), line!(), column!(), ansi_output);
|
|
||||||
|
|
||||||
ansi_output
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn update_prompt(
|
pub(crate) fn update_prompt<'prompt>(
|
||||||
config: &Config,
|
config: &Config,
|
||||||
engine_state: &EngineState,
|
engine_state: &EngineState,
|
||||||
stack: &mut Stack,
|
stack: &Stack,
|
||||||
nu_prompt: &mut NushellPrompt,
|
nu_prompt: &'prompt mut NushellPrompt,
|
||||||
) {
|
) -> &'prompt dyn Prompt {
|
||||||
let configured_left_prompt_string =
|
let mut stack = stack.clone();
|
||||||
match get_prompt_string(PROMPT_COMMAND, config, engine_state, stack) {
|
|
||||||
Some(s) => s,
|
let left_prompt_string = get_prompt_string(PROMPT_COMMAND, config, engine_state, &mut stack);
|
||||||
None => "".to_string(),
|
|
||||||
};
|
|
||||||
|
|
||||||
// Now that we have the prompt string lets ansify it.
|
// Now that we have the prompt string lets ansify it.
|
||||||
// <133 A><prompt><133 B><command><133 C><command output>
|
// <133 A><prompt><133 B><command><133 C><command output>
|
||||||
let left_prompt_string = if config.shell_integration.osc633 {
|
let left_prompt_string = if config.shell_integration {
|
||||||
if stack
|
if let Some(prompt_string) = left_prompt_string {
|
||||||
.get_env_var(engine_state, "TERM_PROGRAM")
|
|
||||||
.and_then(|v| v.as_str().ok())
|
|
||||||
== Some("vscode")
|
|
||||||
{
|
|
||||||
// We're in vscode and we have osc633 enabled
|
|
||||||
Some(format!(
|
Some(format!(
|
||||||
"{VSCODE_PRE_PROMPT_MARKER}{configured_left_prompt_string}{VSCODE_POST_PROMPT_MARKER}"
|
"{PRE_PROMPT_MARKER}{prompt_string}{POST_PROMPT_MARKER}"
|
||||||
))
|
|
||||||
} else if config.shell_integration.osc133 {
|
|
||||||
// If we're in VSCode but we don't find the env var, but we have osc133 set, then use it
|
|
||||||
Some(format!(
|
|
||||||
"{PRE_PROMPT_MARKER}{configured_left_prompt_string}{POST_PROMPT_MARKER}"
|
|
||||||
))
|
))
|
||||||
} else {
|
} else {
|
||||||
configured_left_prompt_string.into()
|
left_prompt_string
|
||||||
}
|
}
|
||||||
} else if config.shell_integration.osc133 {
|
|
||||||
Some(format!(
|
|
||||||
"{PRE_PROMPT_MARKER}{configured_left_prompt_string}{POST_PROMPT_MARKER}"
|
|
||||||
))
|
|
||||||
} else {
|
} else {
|
||||||
configured_left_prompt_string.into()
|
left_prompt_string
|
||||||
};
|
};
|
||||||
|
|
||||||
let right_prompt_string = get_prompt_string(PROMPT_COMMAND_RIGHT, config, engine_state, stack);
|
let right_prompt_string =
|
||||||
|
get_prompt_string(PROMPT_COMMAND_RIGHT, config, engine_state, &mut stack);
|
||||||
|
|
||||||
let prompt_indicator_string = get_prompt_string(PROMPT_INDICATOR, config, engine_state, stack);
|
let prompt_indicator_string =
|
||||||
|
get_prompt_string(PROMPT_INDICATOR, config, engine_state, &mut stack);
|
||||||
|
|
||||||
let prompt_multiline_string =
|
let prompt_multiline_string =
|
||||||
get_prompt_string(PROMPT_MULTILINE_INDICATOR, config, engine_state, stack);
|
get_prompt_string(PROMPT_MULTILINE_INDICATOR, config, engine_state, &mut stack);
|
||||||
|
|
||||||
let prompt_vi_insert_string =
|
let prompt_vi_insert_string =
|
||||||
get_prompt_string(PROMPT_INDICATOR_VI_INSERT, config, engine_state, stack);
|
get_prompt_string(PROMPT_INDICATOR_VI_INSERT, config, engine_state, &mut stack);
|
||||||
|
|
||||||
let prompt_vi_normal_string =
|
let prompt_vi_normal_string =
|
||||||
get_prompt_string(PROMPT_INDICATOR_VI_NORMAL, config, engine_state, stack);
|
get_prompt_string(PROMPT_INDICATOR_VI_NORMAL, config, engine_state, &mut stack);
|
||||||
|
|
||||||
// apply the other indicators
|
// apply the other indicators
|
||||||
nu_prompt.update_all_prompt_strings(
|
nu_prompt.update_all_prompt_strings(
|
||||||
@ -166,55 +146,125 @@ pub(crate) fn update_prompt(
|
|||||||
(prompt_vi_insert_string, prompt_vi_normal_string),
|
(prompt_vi_insert_string, prompt_vi_normal_string),
|
||||||
config.render_right_prompt_on_last_line,
|
config.render_right_prompt_on_last_line,
|
||||||
);
|
);
|
||||||
|
|
||||||
|
let ret_val = nu_prompt as &dyn Prompt;
|
||||||
trace!("update_prompt {}:{}:{}", file!(), line!(), column!());
|
trace!("update_prompt {}:{}:{}", file!(), line!(), column!());
|
||||||
|
|
||||||
|
ret_val
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Construct the transient prompt based on the normal nu_prompt
|
struct TransientPrompt {
|
||||||
pub(crate) fn make_transient_prompt(
|
engine_state: Arc<EngineState>,
|
||||||
|
stack: Stack,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Try getting `$env.TRANSIENT_PROMPT_<X>`, and get `$env.PROMPT_<X>` if that fails
|
||||||
|
fn get_transient_prompt_string(
|
||||||
|
transient_prompt: &str,
|
||||||
|
prompt: &str,
|
||||||
config: &Config,
|
config: &Config,
|
||||||
engine_state: &EngineState,
|
engine_state: &EngineState,
|
||||||
stack: &mut Stack,
|
stack: &mut Stack,
|
||||||
nu_prompt: &NushellPrompt,
|
) -> Option<String> {
|
||||||
) -> Box<dyn Prompt> {
|
get_prompt_string(transient_prompt, config, engine_state, stack)
|
||||||
let mut nu_prompt = nu_prompt.clone();
|
.or_else(|| get_prompt_string(prompt, config, engine_state, stack))
|
||||||
|
}
|
||||||
if let Some(s) = get_prompt_string(TRANSIENT_PROMPT_COMMAND, config, engine_state, stack) {
|
|
||||||
nu_prompt.update_prompt_left(Some(s))
|
impl Prompt for TransientPrompt {
|
||||||
}
|
fn render_prompt_left(&self) -> Cow<str> {
|
||||||
|
let mut nu_prompt = NushellPrompt::new();
|
||||||
if let Some(s) = get_prompt_string(TRANSIENT_PROMPT_COMMAND_RIGHT, config, engine_state, stack)
|
let config = &self.engine_state.get_config().clone();
|
||||||
{
|
let mut stack = self.stack.clone();
|
||||||
nu_prompt.update_prompt_right(Some(s), config.render_right_prompt_on_last_line)
|
nu_prompt.update_prompt_left(get_transient_prompt_string(
|
||||||
}
|
TRANSIENT_PROMPT_COMMAND,
|
||||||
|
PROMPT_COMMAND,
|
||||||
if let Some(s) = get_prompt_string(TRANSIENT_PROMPT_INDICATOR, config, engine_state, stack) {
|
config,
|
||||||
nu_prompt.update_prompt_indicator(Some(s))
|
&self.engine_state,
|
||||||
}
|
&mut stack,
|
||||||
if let Some(s) = get_prompt_string(
|
));
|
||||||
TRANSIENT_PROMPT_INDICATOR_VI_INSERT,
|
nu_prompt.render_prompt_left().to_string().into()
|
||||||
config,
|
}
|
||||||
engine_state,
|
|
||||||
stack,
|
fn render_prompt_right(&self) -> Cow<str> {
|
||||||
) {
|
let mut nu_prompt = NushellPrompt::new();
|
||||||
nu_prompt.update_prompt_vi_insert(Some(s))
|
let config = &self.engine_state.get_config().clone();
|
||||||
}
|
let mut stack = self.stack.clone();
|
||||||
if let Some(s) = get_prompt_string(
|
nu_prompt.update_prompt_right(
|
||||||
TRANSIENT_PROMPT_INDICATOR_VI_NORMAL,
|
get_transient_prompt_string(
|
||||||
config,
|
TRANSIENT_PROMPT_COMMAND_RIGHT,
|
||||||
engine_state,
|
PROMPT_COMMAND_RIGHT,
|
||||||
stack,
|
config,
|
||||||
) {
|
&self.engine_state,
|
||||||
nu_prompt.update_prompt_vi_normal(Some(s))
|
&mut stack,
|
||||||
}
|
),
|
||||||
|
config.render_right_prompt_on_last_line,
|
||||||
if let Some(s) = get_prompt_string(
|
);
|
||||||
TRANSIENT_PROMPT_MULTILINE_INDICATOR,
|
nu_prompt.render_prompt_right().to_string().into()
|
||||||
config,
|
}
|
||||||
engine_state,
|
|
||||||
stack,
|
fn render_prompt_indicator(&self, prompt_mode: reedline::PromptEditMode) -> Cow<str> {
|
||||||
) {
|
let mut nu_prompt = NushellPrompt::new();
|
||||||
nu_prompt.update_prompt_multiline(Some(s))
|
let config = &self.engine_state.get_config().clone();
|
||||||
}
|
let mut stack = self.stack.clone();
|
||||||
|
nu_prompt.update_prompt_indicator(get_transient_prompt_string(
|
||||||
Box::new(nu_prompt)
|
TRANSIENT_PROMPT_INDICATOR,
|
||||||
|
PROMPT_INDICATOR,
|
||||||
|
config,
|
||||||
|
&self.engine_state,
|
||||||
|
&mut stack,
|
||||||
|
));
|
||||||
|
nu_prompt.update_prompt_vi_insert(get_transient_prompt_string(
|
||||||
|
TRANSIENT_PROMPT_INDICATOR_VI_INSERT,
|
||||||
|
PROMPT_INDICATOR_VI_INSERT,
|
||||||
|
config,
|
||||||
|
&self.engine_state,
|
||||||
|
&mut stack,
|
||||||
|
));
|
||||||
|
nu_prompt.update_prompt_vi_normal(get_transient_prompt_string(
|
||||||
|
TRANSIENT_PROMPT_INDICATOR_VI_NORMAL,
|
||||||
|
PROMPT_INDICATOR_VI_NORMAL,
|
||||||
|
config,
|
||||||
|
&self.engine_state,
|
||||||
|
&mut stack,
|
||||||
|
));
|
||||||
|
nu_prompt
|
||||||
|
.render_prompt_indicator(prompt_mode)
|
||||||
|
.to_string()
|
||||||
|
.into()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn render_prompt_multiline_indicator(&self) -> Cow<str> {
|
||||||
|
let mut nu_prompt = NushellPrompt::new();
|
||||||
|
let config = &self.engine_state.get_config().clone();
|
||||||
|
let mut stack = self.stack.clone();
|
||||||
|
nu_prompt.update_prompt_multiline(get_transient_prompt_string(
|
||||||
|
TRANSIENT_PROMPT_MULTILINE_INDICATOR,
|
||||||
|
PROMPT_MULTILINE_INDICATOR,
|
||||||
|
config,
|
||||||
|
&self.engine_state,
|
||||||
|
&mut stack,
|
||||||
|
));
|
||||||
|
nu_prompt
|
||||||
|
.render_prompt_multiline_indicator()
|
||||||
|
.to_string()
|
||||||
|
.into()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn render_prompt_history_search_indicator(
|
||||||
|
&self,
|
||||||
|
history_search: reedline::PromptHistorySearch,
|
||||||
|
) -> Cow<str> {
|
||||||
|
NushellPrompt::new()
|
||||||
|
.render_prompt_history_search_indicator(history_search)
|
||||||
|
.to_string()
|
||||||
|
.into()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Construct the transient prompt
|
||||||
|
pub(crate) fn transient_prompt(engine_state: Arc<EngineState>, stack: &Stack) -> Box<dyn Prompt> {
|
||||||
|
Box::new(TransientPrompt {
|
||||||
|
engine_state,
|
||||||
|
stack: stack.clone(),
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@ -1,57 +1,26 @@
|
|||||||
use log::trace;
|
use log::trace;
|
||||||
use nu_ansi_term::Style;
|
use nu_ansi_term::Style;
|
||||||
use nu_color_config::{get_matching_brackets_style, get_shape_color};
|
use nu_color_config::{get_matching_brackets_style, get_shape_color};
|
||||||
use nu_engine::env;
|
|
||||||
use nu_parser::{flatten_block, parse, FlatShape};
|
use nu_parser::{flatten_block, parse, FlatShape};
|
||||||
use nu_protocol::{
|
use nu_protocol::ast::{Argument, Block, Expr, Expression, PipelineElement};
|
||||||
ast::{Block, Expr, Expression, PipelineRedirection, RecordItem},
|
use nu_protocol::engine::{EngineState, StateWorkingSet};
|
||||||
engine::{EngineState, Stack, StateWorkingSet},
|
use nu_protocol::{Config, Span};
|
||||||
Span,
|
|
||||||
};
|
|
||||||
use reedline::{Highlighter, StyledText};
|
use reedline::{Highlighter, StyledText};
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
pub struct NuHighlighter {
|
pub struct NuHighlighter {
|
||||||
pub engine_state: Arc<EngineState>,
|
pub engine_state: Arc<EngineState>,
|
||||||
pub stack: Arc<Stack>,
|
pub config: Config,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Highlighter for NuHighlighter {
|
impl Highlighter for NuHighlighter {
|
||||||
fn highlight(&self, line: &str, _cursor: usize) -> StyledText {
|
fn highlight(&self, line: &str, _cursor: usize) -> StyledText {
|
||||||
trace!("highlighting: {}", line);
|
trace!("highlighting: {}", line);
|
||||||
|
|
||||||
let config = self.stack.get_config(&self.engine_state);
|
|
||||||
let highlight_resolved_externals = config.highlight_resolved_externals;
|
|
||||||
let mut working_set = StateWorkingSet::new(&self.engine_state);
|
let mut working_set = StateWorkingSet::new(&self.engine_state);
|
||||||
let block = parse(&mut working_set, None, line.as_bytes(), false);
|
let block = parse(&mut working_set, None, line.as_bytes(), false);
|
||||||
let (shapes, global_span_offset) = {
|
let (shapes, global_span_offset) = {
|
||||||
let mut shapes = flatten_block(&working_set, &block);
|
let shapes = flatten_block(&working_set, &block);
|
||||||
// Highlighting externals has a config point because of concerns that using which to resolve
|
|
||||||
// externals may slow down things too much.
|
|
||||||
if highlight_resolved_externals {
|
|
||||||
for (span, shape) in shapes.iter_mut() {
|
|
||||||
if *shape == FlatShape::External {
|
|
||||||
let str_contents =
|
|
||||||
working_set.get_span_contents(Span::new(span.start, span.end));
|
|
||||||
|
|
||||||
let str_word = String::from_utf8_lossy(str_contents).to_string();
|
|
||||||
let paths = env::path_str(&self.engine_state, &self.stack, *span).ok();
|
|
||||||
#[allow(deprecated)]
|
|
||||||
let res = if let Ok(cwd) =
|
|
||||||
env::current_dir_str(&self.engine_state, &self.stack)
|
|
||||||
{
|
|
||||||
which::which_in(str_word, paths.as_ref(), cwd).ok()
|
|
||||||
} else {
|
|
||||||
which::which_in_global(str_word, paths.as_ref())
|
|
||||||
.ok()
|
|
||||||
.and_then(|mut i| i.next())
|
|
||||||
};
|
|
||||||
if res.is_some() {
|
|
||||||
*shape = FlatShape::ExternalResolved;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
(shapes, self.engine_state.next_span_start())
|
(shapes, self.engine_state.next_span_start())
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -86,8 +55,29 @@ impl Highlighter for NuHighlighter {
|
|||||||
[(shape.0.start - global_span_offset)..(shape.0.end - global_span_offset)]
|
[(shape.0.start - global_span_offset)..(shape.0.end - global_span_offset)]
|
||||||
.to_string();
|
.to_string();
|
||||||
|
|
||||||
|
macro_rules! add_colored_token_with_bracket_highlight {
|
||||||
|
($shape:expr, $span:expr, $text:expr) => {{
|
||||||
|
let spans = split_span_by_highlight_positions(
|
||||||
|
line,
|
||||||
|
$span,
|
||||||
|
&matching_brackets_pos,
|
||||||
|
global_span_offset,
|
||||||
|
);
|
||||||
|
spans.iter().for_each(|(part, highlight)| {
|
||||||
|
let start = part.start - $span.start;
|
||||||
|
let end = part.end - $span.start;
|
||||||
|
let text = (&next_token[start..end]).to_string();
|
||||||
|
let mut style = get_shape_color($shape.to_string(), &self.config);
|
||||||
|
if *highlight {
|
||||||
|
style = get_matching_brackets_style(style, &self.config);
|
||||||
|
}
|
||||||
|
output.push((style, text));
|
||||||
|
});
|
||||||
|
}};
|
||||||
|
}
|
||||||
|
|
||||||
let mut add_colored_token = |shape: &FlatShape, text: String| {
|
let mut add_colored_token = |shape: &FlatShape, text: String| {
|
||||||
output.push((get_shape_color(shape.as_str(), &config), text));
|
output.push((get_shape_color(shape.to_string(), &self.config), text));
|
||||||
};
|
};
|
||||||
|
|
||||||
match shape.1 {
|
match shape.1 {
|
||||||
@ -101,49 +91,40 @@ impl Highlighter for NuHighlighter {
|
|||||||
FlatShape::InternalCall(_) => add_colored_token(&shape.1, next_token),
|
FlatShape::InternalCall(_) => add_colored_token(&shape.1, next_token),
|
||||||
FlatShape::External => add_colored_token(&shape.1, next_token),
|
FlatShape::External => add_colored_token(&shape.1, next_token),
|
||||||
FlatShape::ExternalArg => add_colored_token(&shape.1, next_token),
|
FlatShape::ExternalArg => add_colored_token(&shape.1, next_token),
|
||||||
FlatShape::ExternalResolved => add_colored_token(&shape.1, next_token),
|
|
||||||
FlatShape::Keyword => add_colored_token(&shape.1, next_token),
|
FlatShape::Keyword => add_colored_token(&shape.1, next_token),
|
||||||
FlatShape::Literal => add_colored_token(&shape.1, next_token),
|
FlatShape::Literal => add_colored_token(&shape.1, next_token),
|
||||||
FlatShape::Operator => add_colored_token(&shape.1, next_token),
|
FlatShape::Operator => add_colored_token(&shape.1, next_token),
|
||||||
FlatShape::Signature => add_colored_token(&shape.1, next_token),
|
FlatShape::Signature => add_colored_token(&shape.1, next_token),
|
||||||
FlatShape::String => add_colored_token(&shape.1, next_token),
|
FlatShape::String => add_colored_token(&shape.1, next_token),
|
||||||
FlatShape::RawString => add_colored_token(&shape.1, next_token),
|
|
||||||
FlatShape::StringInterpolation => add_colored_token(&shape.1, next_token),
|
FlatShape::StringInterpolation => add_colored_token(&shape.1, next_token),
|
||||||
FlatShape::DateTime => add_colored_token(&shape.1, next_token),
|
FlatShape::DateTime => add_colored_token(&shape.1, next_token),
|
||||||
FlatShape::List
|
FlatShape::List => {
|
||||||
| FlatShape::Table
|
add_colored_token_with_bracket_highlight!(shape.1, shape.0, next_token)
|
||||||
| FlatShape::Record
|
}
|
||||||
| FlatShape::Block
|
FlatShape::Table => {
|
||||||
| FlatShape::Closure => {
|
add_colored_token_with_bracket_highlight!(shape.1, shape.0, next_token)
|
||||||
let span = shape.0;
|
}
|
||||||
let shape = &shape.1;
|
FlatShape::Record => {
|
||||||
let spans = split_span_by_highlight_positions(
|
add_colored_token_with_bracket_highlight!(shape.1, shape.0, next_token)
|
||||||
line,
|
}
|
||||||
span,
|
|
||||||
&matching_brackets_pos,
|
FlatShape::Block => {
|
||||||
global_span_offset,
|
add_colored_token_with_bracket_highlight!(shape.1, shape.0, next_token)
|
||||||
);
|
}
|
||||||
for (part, highlight) in spans {
|
FlatShape::Closure => {
|
||||||
let start = part.start - span.start;
|
add_colored_token_with_bracket_highlight!(shape.1, shape.0, next_token)
|
||||||
let end = part.end - span.start;
|
|
||||||
let text = next_token[start..end].to_string();
|
|
||||||
let mut style = get_shape_color(shape.as_str(), &config);
|
|
||||||
if highlight {
|
|
||||||
style = get_matching_brackets_style(style, &config);
|
|
||||||
}
|
|
||||||
output.push((style, text));
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
FlatShape::Filepath => add_colored_token(&shape.1, next_token),
|
FlatShape::Filepath => add_colored_token(&shape.1, next_token),
|
||||||
FlatShape::Directory => add_colored_token(&shape.1, next_token),
|
FlatShape::Directory => add_colored_token(&shape.1, next_token),
|
||||||
FlatShape::GlobInterpolation => add_colored_token(&shape.1, next_token),
|
|
||||||
FlatShape::GlobPattern => add_colored_token(&shape.1, next_token),
|
FlatShape::GlobPattern => add_colored_token(&shape.1, next_token),
|
||||||
FlatShape::Variable(_) | FlatShape::VarDecl(_) => {
|
FlatShape::Variable(_) | FlatShape::VarDecl(_) => {
|
||||||
add_colored_token(&shape.1, next_token)
|
add_colored_token(&shape.1, next_token)
|
||||||
}
|
}
|
||||||
FlatShape::Flag => add_colored_token(&shape.1, next_token),
|
FlatShape::Flag => add_colored_token(&shape.1, next_token),
|
||||||
FlatShape::Pipe => add_colored_token(&shape.1, next_token),
|
FlatShape::Pipe => add_colored_token(&shape.1, next_token),
|
||||||
|
FlatShape::And => add_colored_token(&shape.1, next_token),
|
||||||
|
FlatShape::Or => add_colored_token(&shape.1, next_token),
|
||||||
FlatShape::Redirection => add_colored_token(&shape.1, next_token),
|
FlatShape::Redirection => add_colored_token(&shape.1, next_token),
|
||||||
FlatShape::Custom(..) => add_colored_token(&shape.1, next_token),
|
FlatShape::Custom(..) => add_colored_token(&shape.1, next_token),
|
||||||
FlatShape::MatchPattern => add_colored_token(&shape.1, next_token),
|
FlatShape::MatchPattern => add_colored_token(&shape.1, next_token),
|
||||||
@ -251,38 +232,24 @@ fn find_matching_block_end_in_block(
|
|||||||
) -> Option<usize> {
|
) -> Option<usize> {
|
||||||
for p in &block.pipelines {
|
for p in &block.pipelines {
|
||||||
for e in &p.elements {
|
for e in &p.elements {
|
||||||
if e.expr.span.contains(global_cursor_offset) {
|
match e {
|
||||||
if let Some(pos) = find_matching_block_end_in_expr(
|
PipelineElement::Expression(_, e)
|
||||||
line,
|
| PipelineElement::Redirection(_, _, e)
|
||||||
working_set,
|
| PipelineElement::And(_, e)
|
||||||
&e.expr,
|
| PipelineElement::Or(_, e)
|
||||||
global_span_offset,
|
| PipelineElement::SameTargetRedirection { cmd: (_, e), .. }
|
||||||
global_cursor_offset,
|
| PipelineElement::SeparateRedirection { out: (_, e), .. } => {
|
||||||
) {
|
if e.span.contains(global_cursor_offset) {
|
||||||
return Some(pos);
|
if let Some(pos) = find_matching_block_end_in_expr(
|
||||||
}
|
line,
|
||||||
}
|
working_set,
|
||||||
|
e,
|
||||||
if let Some(redirection) = e.redirection.as_ref() {
|
global_span_offset,
|
||||||
match redirection {
|
global_cursor_offset,
|
||||||
PipelineRedirection::Single { target, .. }
|
) {
|
||||||
| PipelineRedirection::Separate { out: target, .. }
|
|
||||||
| PipelineRedirection::Separate { err: target, .. }
|
|
||||||
if target.span().contains(global_cursor_offset) =>
|
|
||||||
{
|
|
||||||
if let Some(pos) = target.expr().and_then(|expr| {
|
|
||||||
find_matching_block_end_in_expr(
|
|
||||||
line,
|
|
||||||
working_set,
|
|
||||||
expr,
|
|
||||||
global_span_offset,
|
|
||||||
global_cursor_offset,
|
|
||||||
)
|
|
||||||
}) {
|
|
||||||
return Some(pos);
|
return Some(pos);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
_ => {}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -297,6 +264,20 @@ fn find_matching_block_end_in_expr(
|
|||||||
global_span_offset: usize,
|
global_span_offset: usize,
|
||||||
global_cursor_offset: usize,
|
global_cursor_offset: usize,
|
||||||
) -> Option<usize> {
|
) -> Option<usize> {
|
||||||
|
macro_rules! find_in_expr_or_continue {
|
||||||
|
($inner_expr:ident) => {
|
||||||
|
if let Some(pos) = find_matching_block_end_in_expr(
|
||||||
|
line,
|
||||||
|
working_set,
|
||||||
|
$inner_expr,
|
||||||
|
global_span_offset,
|
||||||
|
global_cursor_offset,
|
||||||
|
) {
|
||||||
|
return Some(pos);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
if expression.span.contains(global_cursor_offset) && expression.span.start >= global_span_offset
|
if expression.span.contains(global_cursor_offset) && expression.span.start >= global_span_offset
|
||||||
{
|
{
|
||||||
let expr_first = expression.span.start;
|
let expr_first = expression.span.start;
|
||||||
@ -322,20 +303,20 @@ fn find_matching_block_end_in_expr(
|
|||||||
Expr::Keyword(..) => None,
|
Expr::Keyword(..) => None,
|
||||||
Expr::ValueWithUnit(..) => None,
|
Expr::ValueWithUnit(..) => None,
|
||||||
Expr::DateTime(_) => None,
|
Expr::DateTime(_) => None,
|
||||||
Expr::Filepath(_, _) => None,
|
Expr::Filepath(_) => None,
|
||||||
Expr::Directory(_, _) => None,
|
Expr::Directory(_) => None,
|
||||||
Expr::GlobPattern(_, _) => None,
|
Expr::GlobPattern(_) => None,
|
||||||
Expr::String(_) => None,
|
Expr::String(_) => None,
|
||||||
Expr::RawString(_) => None,
|
|
||||||
Expr::CellPath(_) => None,
|
Expr::CellPath(_) => None,
|
||||||
Expr::ImportPattern(_) => None,
|
Expr::ImportPattern(_) => None,
|
||||||
Expr::Overlay(_) => None,
|
Expr::Overlay(_) => None,
|
||||||
Expr::Signature(_) => None,
|
Expr::Signature(_) => None,
|
||||||
|
Expr::MatchPattern(_) => None,
|
||||||
Expr::MatchBlock(_) => None,
|
Expr::MatchBlock(_) => None,
|
||||||
Expr::Nothing => None,
|
Expr::Nothing => None,
|
||||||
Expr::Garbage => None,
|
Expr::Garbage => None,
|
||||||
|
|
||||||
Expr::Table(table) => {
|
Expr::Table(hdr, rows) => {
|
||||||
if expr_last == global_cursor_offset {
|
if expr_last == global_cursor_offset {
|
||||||
// cursor is at table end
|
// cursor is at table end
|
||||||
Some(expr_first)
|
Some(expr_first)
|
||||||
@ -344,19 +325,15 @@ fn find_matching_block_end_in_expr(
|
|||||||
Some(expr_last)
|
Some(expr_last)
|
||||||
} else {
|
} else {
|
||||||
// cursor is inside table
|
// cursor is inside table
|
||||||
table
|
for inner_expr in hdr {
|
||||||
.columns
|
find_in_expr_or_continue!(inner_expr);
|
||||||
.iter()
|
}
|
||||||
.chain(table.rows.iter().flat_map(AsRef::as_ref))
|
for row in rows {
|
||||||
.find_map(|expr| {
|
for inner_expr in row {
|
||||||
find_matching_block_end_in_expr(
|
find_in_expr_or_continue!(inner_expr);
|
||||||
line,
|
}
|
||||||
working_set,
|
}
|
||||||
expr,
|
None
|
||||||
global_span_offset,
|
|
||||||
global_cursor_offset,
|
|
||||||
)
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -369,45 +346,28 @@ fn find_matching_block_end_in_expr(
|
|||||||
Some(expr_last)
|
Some(expr_last)
|
||||||
} else {
|
} else {
|
||||||
// cursor is inside record
|
// cursor is inside record
|
||||||
exprs.iter().find_map(|expr| match expr {
|
for (k, v) in exprs {
|
||||||
RecordItem::Pair(k, v) => find_matching_block_end_in_expr(
|
find_in_expr_or_continue!(k);
|
||||||
line,
|
find_in_expr_or_continue!(v);
|
||||||
working_set,
|
}
|
||||||
k,
|
None
|
||||||
global_span_offset,
|
|
||||||
global_cursor_offset,
|
|
||||||
)
|
|
||||||
.or_else(|| {
|
|
||||||
find_matching_block_end_in_expr(
|
|
||||||
line,
|
|
||||||
working_set,
|
|
||||||
v,
|
|
||||||
global_span_offset,
|
|
||||||
global_cursor_offset,
|
|
||||||
)
|
|
||||||
}),
|
|
||||||
RecordItem::Spread(_, record) => find_matching_block_end_in_expr(
|
|
||||||
line,
|
|
||||||
working_set,
|
|
||||||
record,
|
|
||||||
global_span_offset,
|
|
||||||
global_cursor_offset,
|
|
||||||
),
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Expr::Call(call) => call.arguments.iter().find_map(|arg| {
|
Expr::Call(call) => {
|
||||||
arg.expr().and_then(|expr| {
|
for arg in &call.arguments {
|
||||||
find_matching_block_end_in_expr(
|
let opt_expr = match arg {
|
||||||
line,
|
Argument::Named((_, _, opt_expr)) => opt_expr.as_ref(),
|
||||||
working_set,
|
Argument::Positional(inner_expr) => Some(inner_expr),
|
||||||
expr,
|
Argument::Unknown(inner_expr) => Some(inner_expr),
|
||||||
global_span_offset,
|
};
|
||||||
global_cursor_offset,
|
|
||||||
)
|
if let Some(inner_expr) = opt_expr {
|
||||||
})
|
find_in_expr_or_continue!(inner_expr);
|
||||||
}),
|
}
|
||||||
|
}
|
||||||
|
None
|
||||||
|
}
|
||||||
|
|
||||||
Expr::FullCellPath(b) => find_matching_block_end_in_expr(
|
Expr::FullCellPath(b) => find_matching_block_end_in_expr(
|
||||||
line,
|
line,
|
||||||
@ -417,23 +377,12 @@ fn find_matching_block_end_in_expr(
|
|||||||
global_cursor_offset,
|
global_cursor_offset,
|
||||||
),
|
),
|
||||||
|
|
||||||
Expr::BinaryOp(lhs, op, rhs) => [lhs, op, rhs].into_iter().find_map(|expr| {
|
Expr::BinaryOp(lhs, op, rhs) => {
|
||||||
find_matching_block_end_in_expr(
|
find_in_expr_or_continue!(lhs);
|
||||||
line,
|
find_in_expr_or_continue!(op);
|
||||||
working_set,
|
find_in_expr_or_continue!(rhs);
|
||||||
expr,
|
None
|
||||||
global_span_offset,
|
}
|
||||||
global_cursor_offset,
|
|
||||||
)
|
|
||||||
}),
|
|
||||||
|
|
||||||
Expr::Collect(_, expr) => find_matching_block_end_in_expr(
|
|
||||||
line,
|
|
||||||
working_set,
|
|
||||||
expr,
|
|
||||||
global_span_offset,
|
|
||||||
global_cursor_offset,
|
|
||||||
),
|
|
||||||
|
|
||||||
Expr::Block(block_id)
|
Expr::Block(block_id)
|
||||||
| Expr::Closure(block_id)
|
| Expr::Closure(block_id)
|
||||||
@ -458,19 +407,14 @@ fn find_matching_block_end_in_expr(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Expr::StringInterpolation(exprs) | Expr::GlobInterpolation(exprs, _) => {
|
Expr::StringInterpolation(inner_expr) => {
|
||||||
exprs.iter().find_map(|expr| {
|
for inner_expr in inner_expr {
|
||||||
find_matching_block_end_in_expr(
|
find_in_expr_or_continue!(inner_expr);
|
||||||
line,
|
}
|
||||||
working_set,
|
None
|
||||||
expr,
|
|
||||||
global_span_offset,
|
|
||||||
global_cursor_offset,
|
|
||||||
)
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
Expr::List(list) => {
|
Expr::List(inner_expr) => {
|
||||||
if expr_last == global_cursor_offset {
|
if expr_last == global_cursor_offset {
|
||||||
// cursor is at list end
|
// cursor is at list end
|
||||||
Some(expr_first)
|
Some(expr_first)
|
||||||
@ -478,15 +422,11 @@ fn find_matching_block_end_in_expr(
|
|||||||
// cursor is at list start
|
// cursor is at list start
|
||||||
Some(expr_last)
|
Some(expr_last)
|
||||||
} else {
|
} else {
|
||||||
list.iter().find_map(|item| {
|
// cursor is inside list
|
||||||
find_matching_block_end_in_expr(
|
for inner_expr in inner_expr {
|
||||||
line,
|
find_in_expr_or_continue!(inner_expr);
|
||||||
working_set,
|
}
|
||||||
item.expr(),
|
None
|
||||||
global_span_offset,
|
|
||||||
global_cursor_offset,
|
|
||||||
)
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
@ -1,18 +1,15 @@
|
|||||||
#![allow(clippy::byte_char_slices)]
|
|
||||||
|
|
||||||
use nu_cmd_base::hook::eval_hook;
|
use nu_cmd_base::hook::eval_hook;
|
||||||
use nu_engine::{eval_block, eval_block_with_early_return};
|
use nu_engine::{eval_block, eval_block_with_early_return};
|
||||||
use nu_parser::{lex, parse, unescape_unquote_string, Token, TokenContents};
|
use nu_parser::{escape_quote_string, lex, parse, unescape_unquote_string, Token, TokenContents};
|
||||||
|
use nu_protocol::engine::StateWorkingSet;
|
||||||
use nu_protocol::{
|
use nu_protocol::{
|
||||||
cli_error::report_compile_error,
|
engine::{EngineState, Stack},
|
||||||
debugger::WithoutDebug,
|
print_if_stream, PipelineData, ShellError, Span, Value,
|
||||||
engine::{EngineState, Stack, StateWorkingSet},
|
|
||||||
report_parse_error, report_parse_warning, report_shell_error, PipelineData, ShellError, Span,
|
|
||||||
Value,
|
|
||||||
};
|
};
|
||||||
|
use nu_protocol::{report_error, report_error_new};
|
||||||
#[cfg(windows)]
|
#[cfg(windows)]
|
||||||
use nu_utils::enable_vt_processing;
|
use nu_utils::enable_vt_processing;
|
||||||
use nu_utils::{escape_quote_string, perf};
|
use nu_utils::utils::perf;
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
|
|
||||||
// This will collect environment variables from std::env and adds them to a stack.
|
// This will collect environment variables from std::env and adds them to a stack.
|
||||||
@ -43,15 +40,16 @@ fn gather_env_vars(
|
|||||||
init_cwd: &Path,
|
init_cwd: &Path,
|
||||||
) {
|
) {
|
||||||
fn report_capture_error(engine_state: &EngineState, env_str: &str, msg: &str) {
|
fn report_capture_error(engine_state: &EngineState, env_str: &str, msg: &str) {
|
||||||
report_shell_error(
|
let working_set = StateWorkingSet::new(engine_state);
|
||||||
engine_state,
|
report_error(
|
||||||
&ShellError::GenericError {
|
&working_set,
|
||||||
error: format!("Environment variable was not captured: {env_str}"),
|
&ShellError::GenericError(
|
||||||
msg: "".into(),
|
format!("Environment variable was not captured: {env_str}"),
|
||||||
span: None,
|
"".to_string(),
|
||||||
help: Some(msg.into()),
|
None,
|
||||||
inner: vec![],
|
Some(msg.into()),
|
||||||
},
|
Vec::new(),
|
||||||
|
),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -74,17 +72,18 @@ fn gather_env_vars(
|
|||||||
}
|
}
|
||||||
None => {
|
None => {
|
||||||
// Could not capture current working directory
|
// Could not capture current working directory
|
||||||
report_shell_error(
|
let working_set = StateWorkingSet::new(engine_state);
|
||||||
engine_state,
|
report_error(
|
||||||
&ShellError::GenericError {
|
&working_set,
|
||||||
error: "Current directory is not a valid utf-8 path".into(),
|
&ShellError::GenericError(
|
||||||
msg: "".into(),
|
"Current directory is not a valid utf-8 path".to_string(),
|
||||||
span: None,
|
"".to_string(),
|
||||||
help: Some(format!(
|
None,
|
||||||
|
Some(format!(
|
||||||
"Retrieving current directory failed: {init_cwd:?} not a valid utf-8 path"
|
"Retrieving current directory failed: {init_cwd:?} not a valid utf-8 path"
|
||||||
)),
|
)),
|
||||||
inner: vec![],
|
Vec::new(),
|
||||||
},
|
),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -94,8 +93,8 @@ fn gather_env_vars(
|
|||||||
let span_offset = engine_state.next_span_start();
|
let span_offset = engine_state.next_span_start();
|
||||||
|
|
||||||
engine_state.add_file(
|
engine_state.add_file(
|
||||||
"Host Environment Variables".into(),
|
"Host Environment Variables".to_string(),
|
||||||
fake_env_file.as_bytes().into(),
|
fake_env_file.as_bytes().to_vec(),
|
||||||
);
|
);
|
||||||
|
|
||||||
let (tokens, _) = lex(fake_env_file.as_bytes(), span_offset, &[], &[], true);
|
let (tokens, _) = lex(fake_env_file.as_bytes(), span_offset, &[], &[], true);
|
||||||
@ -112,7 +111,7 @@ fn gather_env_vars(
|
|||||||
let name = if let Some(Token {
|
let name = if let Some(Token {
|
||||||
contents: TokenContents::Item,
|
contents: TokenContents::Item,
|
||||||
span,
|
span,
|
||||||
}) = parts.first()
|
}) = parts.get(0)
|
||||||
{
|
{
|
||||||
let mut working_set = StateWorkingSet::new(engine_state);
|
let mut working_set = StateWorkingSet::new(engine_state);
|
||||||
let bytes = working_set.get_span_contents(*span);
|
let bytes = working_set.get_span_contents(*span);
|
||||||
@ -203,35 +202,6 @@ fn gather_env_vars(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Print a pipeline with formatting applied based on display_output hook.
|
|
||||||
///
|
|
||||||
/// This function should be preferred when printing values resulting from a completed evaluation.
|
|
||||||
/// For values printed as part of a command's execution, such as values printed by the `print` command,
|
|
||||||
/// the `PipelineData::print_table` function should be preferred instead as it is not config-dependent.
|
|
||||||
///
|
|
||||||
/// `no_newline` controls if we need to attach newline character to output.
|
|
||||||
pub fn print_pipeline(
|
|
||||||
engine_state: &mut EngineState,
|
|
||||||
stack: &mut Stack,
|
|
||||||
pipeline: PipelineData,
|
|
||||||
no_newline: bool,
|
|
||||||
) -> Result<(), ShellError> {
|
|
||||||
if let Some(hook) = engine_state.get_config().hooks.display_output.clone() {
|
|
||||||
let pipeline = eval_hook(
|
|
||||||
engine_state,
|
|
||||||
stack,
|
|
||||||
Some(pipeline),
|
|
||||||
vec![],
|
|
||||||
&hook,
|
|
||||||
"display_output",
|
|
||||||
)?;
|
|
||||||
pipeline.print_raw(engine_state, no_newline, false)
|
|
||||||
} else {
|
|
||||||
// if display_output isn't set, we should still prefer to print with some formatting
|
|
||||||
pipeline.print_table(engine_state, stack, no_newline, false)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn eval_source(
|
pub fn eval_source(
|
||||||
engine_state: &mut EngineState,
|
engine_state: &mut EngineState,
|
||||||
stack: &mut Stack,
|
stack: &mut Stack,
|
||||||
@ -239,46 +209,9 @@ pub fn eval_source(
|
|||||||
fname: &str,
|
fname: &str,
|
||||||
input: PipelineData,
|
input: PipelineData,
|
||||||
allow_return: bool,
|
allow_return: bool,
|
||||||
) -> i32 {
|
) -> bool {
|
||||||
let start_time = std::time::Instant::now();
|
let start_time = std::time::Instant::now();
|
||||||
|
|
||||||
let exit_code = match evaluate_source(engine_state, stack, source, fname, input, allow_return) {
|
|
||||||
Ok(failed) => {
|
|
||||||
let code = failed.into();
|
|
||||||
stack.set_last_exit_code(code, Span::unknown());
|
|
||||||
code
|
|
||||||
}
|
|
||||||
Err(err) => {
|
|
||||||
report_shell_error(engine_state, &err);
|
|
||||||
let code = err.exit_code();
|
|
||||||
stack.set_last_error(&err);
|
|
||||||
code.unwrap_or(0)
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
// reset vt processing, aka ansi because illbehaved externals can break it
|
|
||||||
#[cfg(windows)]
|
|
||||||
{
|
|
||||||
let _ = enable_vt_processing();
|
|
||||||
}
|
|
||||||
|
|
||||||
perf!(
|
|
||||||
&format!("eval_source {}", &fname),
|
|
||||||
start_time,
|
|
||||||
engine_state.get_config().use_ansi_coloring
|
|
||||||
);
|
|
||||||
|
|
||||||
exit_code
|
|
||||||
}
|
|
||||||
|
|
||||||
fn evaluate_source(
|
|
||||||
engine_state: &mut EngineState,
|
|
||||||
stack: &mut Stack,
|
|
||||||
source: &[u8],
|
|
||||||
fname: &str,
|
|
||||||
input: PipelineData,
|
|
||||||
allow_return: bool,
|
|
||||||
) -> Result<bool, ShellError> {
|
|
||||||
let (block, delta) = {
|
let (block, delta) = {
|
||||||
let mut working_set = StateWorkingSet::new(engine_state);
|
let mut working_set = StateWorkingSet::new(engine_state);
|
||||||
let output = parse(
|
let output = parse(
|
||||||
@ -287,35 +220,105 @@ fn evaluate_source(
|
|||||||
source,
|
source,
|
||||||
false,
|
false,
|
||||||
);
|
);
|
||||||
if let Some(warning) = working_set.parse_warnings.first() {
|
|
||||||
report_parse_warning(&working_set, warning);
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(err) = working_set.parse_errors.first() {
|
if let Some(err) = working_set.parse_errors.first() {
|
||||||
report_parse_error(&working_set, err);
|
set_last_exit_code(stack, 1);
|
||||||
return Ok(true);
|
report_error(&working_set, err);
|
||||||
}
|
return false;
|
||||||
|
|
||||||
if let Some(err) = working_set.compile_errors.first() {
|
|
||||||
report_compile_error(&working_set, err);
|
|
||||||
return Ok(true);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
(output, working_set.render())
|
(output, working_set.render())
|
||||||
};
|
};
|
||||||
|
|
||||||
engine_state.merge_delta(delta)?;
|
if let Err(err) = engine_state.merge_delta(delta) {
|
||||||
|
set_last_exit_code(stack, 1);
|
||||||
|
report_error_new(engine_state, &err);
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
let pipeline = if allow_return {
|
let b = if allow_return {
|
||||||
eval_block_with_early_return::<WithoutDebug>(engine_state, stack, &block, input)
|
eval_block_with_early_return(engine_state, stack, &block, input, false, false)
|
||||||
} else {
|
} else {
|
||||||
eval_block::<WithoutDebug>(engine_state, stack, &block, input)
|
eval_block(engine_state, stack, &block, input, false, false)
|
||||||
}?;
|
};
|
||||||
|
|
||||||
let no_newline = matches!(&pipeline, &PipelineData::ByteStream(..));
|
match b {
|
||||||
print_pipeline(engine_state, stack, pipeline, no_newline)?;
|
Ok(pipeline_data) => {
|
||||||
|
let config = engine_state.get_config();
|
||||||
|
let result;
|
||||||
|
if let PipelineData::ExternalStream {
|
||||||
|
stdout: stream,
|
||||||
|
stderr: stderr_stream,
|
||||||
|
exit_code,
|
||||||
|
..
|
||||||
|
} = pipeline_data
|
||||||
|
{
|
||||||
|
result = print_if_stream(stream, stderr_stream, false, exit_code);
|
||||||
|
} else if let Some(hook) = config.hooks.display_output.clone() {
|
||||||
|
match eval_hook(
|
||||||
|
engine_state,
|
||||||
|
stack,
|
||||||
|
Some(pipeline_data),
|
||||||
|
vec![],
|
||||||
|
&hook,
|
||||||
|
"display_output",
|
||||||
|
) {
|
||||||
|
Err(err) => {
|
||||||
|
result = Err(err);
|
||||||
|
}
|
||||||
|
Ok(val) => {
|
||||||
|
result = val.print(engine_state, stack, false, false);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
result = pipeline_data.print(engine_state, stack, true, false);
|
||||||
|
}
|
||||||
|
|
||||||
Ok(false)
|
match result {
|
||||||
|
Err(err) => {
|
||||||
|
let working_set = StateWorkingSet::new(engine_state);
|
||||||
|
|
||||||
|
report_error(&working_set, &err);
|
||||||
|
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
Ok(exit_code) => {
|
||||||
|
set_last_exit_code(stack, exit_code);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// reset vt processing, aka ansi because illbehaved externals can break it
|
||||||
|
#[cfg(windows)]
|
||||||
|
{
|
||||||
|
let _ = enable_vt_processing();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Err(err) => {
|
||||||
|
set_last_exit_code(stack, 1);
|
||||||
|
|
||||||
|
let working_set = StateWorkingSet::new(engine_state);
|
||||||
|
|
||||||
|
report_error(&working_set, &err);
|
||||||
|
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
perf(
|
||||||
|
&format!("eval_source {}", &fname),
|
||||||
|
start_time,
|
||||||
|
file!(),
|
||||||
|
line!(),
|
||||||
|
column!(),
|
||||||
|
engine_state.get_config().use_ansi_coloring,
|
||||||
|
);
|
||||||
|
|
||||||
|
true
|
||||||
|
}
|
||||||
|
|
||||||
|
fn set_last_exit_code(stack: &mut Stack, exit_code: i64) {
|
||||||
|
stack.add_env_var(
|
||||||
|
"LAST_EXIT_CODE".to_string(),
|
||||||
|
Value::int(exit_code, Span::unknown()),
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
@ -340,10 +343,16 @@ mod test {
|
|||||||
|
|
||||||
let env = engine_state.render_env_vars();
|
let env = engine_state.render_env_vars();
|
||||||
|
|
||||||
assert!(matches!(env.get("FOO"), Some(&Value::String { val, .. }) if val == "foo"));
|
assert!(
|
||||||
assert!(matches!(env.get("SYMBOLS"), Some(&Value::String { val, .. }) if val == symbols));
|
matches!(env.get(&"FOO".to_string()), Some(&Value::String { val, .. }) if val == "foo")
|
||||||
assert!(matches!(env.get(symbols), Some(&Value::String { val, .. }) if val == "symbols"));
|
);
|
||||||
assert!(env.contains_key("PWD"));
|
assert!(
|
||||||
|
matches!(env.get(&"SYMBOLS".to_string()), Some(&Value::String { val, .. }) if val == symbols)
|
||||||
|
);
|
||||||
|
assert!(
|
||||||
|
matches!(env.get(&symbols.to_string()), Some(&Value::String { val, .. }) if val == "symbols")
|
||||||
|
);
|
||||||
|
assert!(env.get(&"PWD".to_string()).is_some());
|
||||||
assert_eq!(env.len(), 4);
|
assert_eq!(env.len(), 4);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,296 +0,0 @@
|
|||||||
use nu_protocol::HistoryFileFormat;
|
|
||||||
use nu_test_support::{nu, Outcome};
|
|
||||||
use reedline::{
|
|
||||||
FileBackedHistory, History, HistoryItem, HistoryItemId, ReedlineError, SearchQuery,
|
|
||||||
SqliteBackedHistory,
|
|
||||||
};
|
|
||||||
use rstest::rstest;
|
|
||||||
use tempfile::TempDir;
|
|
||||||
|
|
||||||
struct Test {
|
|
||||||
cfg_dir: TempDir,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Test {
|
|
||||||
fn new(history_format: &'static str) -> Self {
|
|
||||||
let cfg_dir = tempfile::Builder::new()
|
|
||||||
.prefix("history_import_test")
|
|
||||||
.tempdir()
|
|
||||||
.unwrap();
|
|
||||||
// Assigning to $env.config.history.file_format seems to work only in startup
|
|
||||||
// configuration.
|
|
||||||
std::fs::write(
|
|
||||||
cfg_dir.path().join("env.nu"),
|
|
||||||
format!("$env.config.history.file_format = {history_format:?}"),
|
|
||||||
)
|
|
||||||
.unwrap();
|
|
||||||
Self { cfg_dir }
|
|
||||||
}
|
|
||||||
|
|
||||||
fn nu(&self, cmd: impl AsRef<str>) -> Outcome {
|
|
||||||
let env = [(
|
|
||||||
"XDG_CONFIG_HOME".to_string(),
|
|
||||||
self.cfg_dir.path().to_str().unwrap().to_string(),
|
|
||||||
)];
|
|
||||||
let env_config = self.cfg_dir.path().join("env.nu");
|
|
||||||
nu!(envs: env, env_config: env_config, cmd.as_ref())
|
|
||||||
}
|
|
||||||
|
|
||||||
fn open_plaintext(&self) -> Result<FileBackedHistory, ReedlineError> {
|
|
||||||
FileBackedHistory::with_file(
|
|
||||||
100,
|
|
||||||
self.cfg_dir
|
|
||||||
.path()
|
|
||||||
.join("nushell")
|
|
||||||
.join(HistoryFileFormat::Plaintext.default_file_name()),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn open_sqlite(&self) -> Result<SqliteBackedHistory, ReedlineError> {
|
|
||||||
SqliteBackedHistory::with_file(
|
|
||||||
self.cfg_dir
|
|
||||||
.path()
|
|
||||||
.join("nushell")
|
|
||||||
.join(HistoryFileFormat::Sqlite.default_file_name()),
|
|
||||||
None,
|
|
||||||
None,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn open_backend(&self, format: HistoryFileFormat) -> Result<Box<dyn History>, ReedlineError> {
|
|
||||||
fn boxed(be: impl History + 'static) -> Box<dyn History> {
|
|
||||||
Box::new(be)
|
|
||||||
}
|
|
||||||
use HistoryFileFormat::*;
|
|
||||||
match format {
|
|
||||||
Plaintext => self.open_plaintext().map(boxed),
|
|
||||||
Sqlite => self.open_sqlite().map(boxed),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
enum HistorySource {
|
|
||||||
Vec(Vec<HistoryItem>),
|
|
||||||
Command(&'static str),
|
|
||||||
}
|
|
||||||
|
|
||||||
struct TestCase {
|
|
||||||
dst_format: HistoryFileFormat,
|
|
||||||
dst_history: Vec<HistoryItem>,
|
|
||||||
src_history: HistorySource,
|
|
||||||
want_history: Vec<HistoryItem>,
|
|
||||||
}
|
|
||||||
|
|
||||||
const EMPTY_TEST_CASE: TestCase = TestCase {
|
|
||||||
dst_format: HistoryFileFormat::Plaintext,
|
|
||||||
dst_history: Vec::new(),
|
|
||||||
src_history: HistorySource::Vec(Vec::new()),
|
|
||||||
want_history: Vec::new(),
|
|
||||||
};
|
|
||||||
|
|
||||||
impl TestCase {
|
|
||||||
fn run(self) {
|
|
||||||
use HistoryFileFormat::*;
|
|
||||||
let test = Test::new(match self.dst_format {
|
|
||||||
Plaintext => "plaintext",
|
|
||||||
Sqlite => "sqlite",
|
|
||||||
});
|
|
||||||
save_all(
|
|
||||||
&mut *test.open_backend(self.dst_format).unwrap(),
|
|
||||||
self.dst_history,
|
|
||||||
)
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
let outcome = match self.src_history {
|
|
||||||
HistorySource::Vec(src_history) => {
|
|
||||||
let src_format = match self.dst_format {
|
|
||||||
Plaintext => Sqlite,
|
|
||||||
Sqlite => Plaintext,
|
|
||||||
};
|
|
||||||
save_all(&mut *test.open_backend(src_format).unwrap(), src_history).unwrap();
|
|
||||||
test.nu("history import")
|
|
||||||
}
|
|
||||||
HistorySource::Command(cmd) => {
|
|
||||||
let mut cmd = cmd.to_string();
|
|
||||||
cmd.push_str(" | history import");
|
|
||||||
test.nu(cmd)
|
|
||||||
}
|
|
||||||
};
|
|
||||||
assert!(outcome.status.success());
|
|
||||||
let got = query_all(&*test.open_backend(self.dst_format).unwrap()).unwrap();
|
|
||||||
|
|
||||||
// Compare just the commands first, for readability.
|
|
||||||
fn commands_only(items: &[HistoryItem]) -> Vec<&str> {
|
|
||||||
items
|
|
||||||
.iter()
|
|
||||||
.map(|item| item.command_line.as_str())
|
|
||||||
.collect()
|
|
||||||
}
|
|
||||||
assert_eq!(commands_only(&got), commands_only(&self.want_history));
|
|
||||||
// If commands match, compare full items.
|
|
||||||
assert_eq!(got, self.want_history);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn query_all(history: &dyn History) -> Result<Vec<HistoryItem>, ReedlineError> {
|
|
||||||
history.search(SearchQuery::everything(
|
|
||||||
reedline::SearchDirection::Forward,
|
|
||||||
None,
|
|
||||||
))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn save_all(history: &mut dyn History, items: Vec<HistoryItem>) -> Result<(), ReedlineError> {
|
|
||||||
for item in items {
|
|
||||||
history.save(item)?;
|
|
||||||
}
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
const EMPTY_ITEM: HistoryItem = HistoryItem {
|
|
||||||
command_line: String::new(),
|
|
||||||
id: None,
|
|
||||||
start_timestamp: None,
|
|
||||||
session_id: None,
|
|
||||||
hostname: None,
|
|
||||||
cwd: None,
|
|
||||||
duration: None,
|
|
||||||
exit_status: None,
|
|
||||||
more_info: None,
|
|
||||||
};
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn history_import_pipe_string() {
|
|
||||||
TestCase {
|
|
||||||
dst_format: HistoryFileFormat::Plaintext,
|
|
||||||
src_history: HistorySource::Command("echo bar"),
|
|
||||||
want_history: vec![HistoryItem {
|
|
||||||
id: Some(HistoryItemId::new(0)),
|
|
||||||
command_line: "bar".to_string(),
|
|
||||||
..EMPTY_ITEM
|
|
||||||
}],
|
|
||||||
..EMPTY_TEST_CASE
|
|
||||||
}
|
|
||||||
.run();
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn history_import_pipe_record() {
|
|
||||||
TestCase {
|
|
||||||
dst_format: HistoryFileFormat::Sqlite,
|
|
||||||
src_history: HistorySource::Command("[[cwd command]; [/tmp some_command]]"),
|
|
||||||
want_history: vec![HistoryItem {
|
|
||||||
id: Some(HistoryItemId::new(1)),
|
|
||||||
command_line: "some_command".to_string(),
|
|
||||||
cwd: Some("/tmp".to_string()),
|
|
||||||
..EMPTY_ITEM
|
|
||||||
}],
|
|
||||||
..EMPTY_TEST_CASE
|
|
||||||
}
|
|
||||||
.run();
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn to_empty_plaintext() {
|
|
||||||
TestCase {
|
|
||||||
dst_format: HistoryFileFormat::Plaintext,
|
|
||||||
src_history: HistorySource::Vec(vec![
|
|
||||||
HistoryItem {
|
|
||||||
command_line: "foo".to_string(),
|
|
||||||
..EMPTY_ITEM
|
|
||||||
},
|
|
||||||
HistoryItem {
|
|
||||||
command_line: "bar".to_string(),
|
|
||||||
..EMPTY_ITEM
|
|
||||||
},
|
|
||||||
]),
|
|
||||||
want_history: vec![
|
|
||||||
HistoryItem {
|
|
||||||
id: Some(HistoryItemId::new(0)),
|
|
||||||
command_line: "foo".to_string(),
|
|
||||||
..EMPTY_ITEM
|
|
||||||
},
|
|
||||||
HistoryItem {
|
|
||||||
id: Some(HistoryItemId::new(1)),
|
|
||||||
command_line: "bar".to_string(),
|
|
||||||
..EMPTY_ITEM
|
|
||||||
},
|
|
||||||
],
|
|
||||||
..EMPTY_TEST_CASE
|
|
||||||
}
|
|
||||||
.run()
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn to_empty_sqlite() {
|
|
||||||
TestCase {
|
|
||||||
dst_format: HistoryFileFormat::Sqlite,
|
|
||||||
src_history: HistorySource::Vec(vec![
|
|
||||||
HistoryItem {
|
|
||||||
command_line: "foo".to_string(),
|
|
||||||
..EMPTY_ITEM
|
|
||||||
},
|
|
||||||
HistoryItem {
|
|
||||||
command_line: "bar".to_string(),
|
|
||||||
..EMPTY_ITEM
|
|
||||||
},
|
|
||||||
]),
|
|
||||||
want_history: vec![
|
|
||||||
HistoryItem {
|
|
||||||
id: Some(HistoryItemId::new(1)),
|
|
||||||
command_line: "foo".to_string(),
|
|
||||||
..EMPTY_ITEM
|
|
||||||
},
|
|
||||||
HistoryItem {
|
|
||||||
id: Some(HistoryItemId::new(2)),
|
|
||||||
command_line: "bar".to_string(),
|
|
||||||
..EMPTY_ITEM
|
|
||||||
},
|
|
||||||
],
|
|
||||||
..EMPTY_TEST_CASE
|
|
||||||
}
|
|
||||||
.run()
|
|
||||||
}
|
|
||||||
|
|
||||||
#[rstest]
|
|
||||||
#[case::plaintext(HistoryFileFormat::Plaintext)]
|
|
||||||
#[case::sqlite(HistoryFileFormat::Sqlite)]
|
|
||||||
fn to_existing(#[case] dst_format: HistoryFileFormat) {
|
|
||||||
TestCase {
|
|
||||||
dst_format,
|
|
||||||
dst_history: vec![
|
|
||||||
HistoryItem {
|
|
||||||
id: Some(HistoryItemId::new(0)),
|
|
||||||
command_line: "original-1".to_string(),
|
|
||||||
..EMPTY_ITEM
|
|
||||||
},
|
|
||||||
HistoryItem {
|
|
||||||
id: Some(HistoryItemId::new(1)),
|
|
||||||
command_line: "original-2".to_string(),
|
|
||||||
..EMPTY_ITEM
|
|
||||||
},
|
|
||||||
],
|
|
||||||
src_history: HistorySource::Vec(vec![HistoryItem {
|
|
||||||
id: Some(HistoryItemId::new(1)),
|
|
||||||
command_line: "new".to_string(),
|
|
||||||
..EMPTY_ITEM
|
|
||||||
}]),
|
|
||||||
want_history: vec![
|
|
||||||
HistoryItem {
|
|
||||||
id: Some(HistoryItemId::new(0)),
|
|
||||||
command_line: "original-1".to_string(),
|
|
||||||
..EMPTY_ITEM
|
|
||||||
},
|
|
||||||
HistoryItem {
|
|
||||||
id: Some(HistoryItemId::new(1)),
|
|
||||||
command_line: "original-2".to_string(),
|
|
||||||
..EMPTY_ITEM
|
|
||||||
},
|
|
||||||
HistoryItem {
|
|
||||||
id: Some(HistoryItemId::new(2)),
|
|
||||||
command_line: "new".to_string(),
|
|
||||||
..EMPTY_ITEM
|
|
||||||
},
|
|
||||||
],
|
|
||||||
}
|
|
||||||
.run()
|
|
||||||
}
|
|
@ -1,7 +0,0 @@
|
|||||||
use nu_test_support::nu;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn not_empty() {
|
|
||||||
let result = nu!("keybindings list | is-not-empty");
|
|
||||||
assert_eq!(result.out, "true");
|
|
||||||
}
|
|
@ -1,3 +0,0 @@
|
|||||||
mod history_import;
|
|
||||||
mod keybindings_list;
|
|
||||||
mod nu_highlight;
|
|
@ -1,7 +0,0 @@
|
|||||||
use nu_test_support::nu;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn nu_highlight_not_expr() {
|
|
||||||
let actual = nu!("'not false' | nu-highlight | ansi strip");
|
|
||||||
assert_eq!(actual.out, "not false");
|
|
||||||
}
|
|
986
crates/nu-cli/tests/completions.rs
Normal file
986
crates/nu-cli/tests/completions.rs
Normal file
@ -0,0 +1,986 @@
|
|||||||
|
pub mod support;
|
||||||
|
|
||||||
|
use std::path::PathBuf;
|
||||||
|
|
||||||
|
use nu_cli::NuCompleter;
|
||||||
|
use nu_parser::parse;
|
||||||
|
use nu_protocol::engine::StateWorkingSet;
|
||||||
|
use reedline::{Completer, Suggestion};
|
||||||
|
use rstest::{fixture, rstest};
|
||||||
|
use support::{
|
||||||
|
completions_helpers::{new_partial_engine, new_quote_engine},
|
||||||
|
file, folder, match_suggestions, new_engine,
|
||||||
|
};
|
||||||
|
|
||||||
|
#[fixture]
|
||||||
|
fn completer() -> NuCompleter {
|
||||||
|
// Create a new engine
|
||||||
|
let (dir, _, mut engine, mut stack) = new_engine();
|
||||||
|
|
||||||
|
// Add record value as example
|
||||||
|
let record = "def tst [--mod -s] {}";
|
||||||
|
assert!(support::merge_input(record.as_bytes(), &mut engine, &mut stack, dir).is_ok());
|
||||||
|
|
||||||
|
// Instantiate a new completer
|
||||||
|
NuCompleter::new(std::sync::Arc::new(engine), stack)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[fixture]
|
||||||
|
fn completer_strings() -> NuCompleter {
|
||||||
|
// Create a new engine
|
||||||
|
let (dir, _, mut engine, mut stack) = new_engine();
|
||||||
|
|
||||||
|
// Add record value as example
|
||||||
|
let record = r#"def animals [] { ["cat", "dog", "eel" ] }
|
||||||
|
def my-command [animal: string@animals] { print $animal }"#;
|
||||||
|
assert!(support::merge_input(record.as_bytes(), &mut engine, &mut stack, dir).is_ok());
|
||||||
|
|
||||||
|
// Instantiate a new completer
|
||||||
|
NuCompleter::new(std::sync::Arc::new(engine), stack)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[fixture]
|
||||||
|
fn extern_completer() -> NuCompleter {
|
||||||
|
// Create a new engine
|
||||||
|
let (dir, _, mut engine, mut stack) = new_engine();
|
||||||
|
|
||||||
|
// Add record value as example
|
||||||
|
let record = r#"
|
||||||
|
def animals [] { [ "cat", "dog", "eel" ] }
|
||||||
|
extern spam [
|
||||||
|
animal: string@animals
|
||||||
|
--foo (-f): string@animals
|
||||||
|
-b: string@animals
|
||||||
|
]
|
||||||
|
"#;
|
||||||
|
assert!(support::merge_input(record.as_bytes(), &mut engine, &mut stack, dir).is_ok());
|
||||||
|
|
||||||
|
// Instantiate a new completer
|
||||||
|
NuCompleter::new(std::sync::Arc::new(engine), stack)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn variables_dollar_sign_with_varialblecompletion() {
|
||||||
|
let (_, _, engine, stack) = new_engine();
|
||||||
|
|
||||||
|
let mut completer = NuCompleter::new(std::sync::Arc::new(engine), stack);
|
||||||
|
|
||||||
|
let target_dir = "$ ";
|
||||||
|
let suggestions = completer.complete(target_dir, target_dir.len());
|
||||||
|
|
||||||
|
assert_eq!(7, suggestions.len());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[rstest]
|
||||||
|
fn variables_double_dash_argument_with_flagcompletion(mut completer: NuCompleter) {
|
||||||
|
let suggestions = completer.complete("tst --", 6);
|
||||||
|
let expected: Vec<String> = vec!["--help".into(), "--mod".into()];
|
||||||
|
// dbg!(&expected, &suggestions);
|
||||||
|
match_suggestions(expected, suggestions);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[rstest]
|
||||||
|
fn variables_single_dash_argument_with_flagcompletion(mut completer: NuCompleter) {
|
||||||
|
let suggestions = completer.complete("tst -", 5);
|
||||||
|
let expected: Vec<String> = vec!["--help".into(), "--mod".into(), "-h".into(), "-s".into()];
|
||||||
|
match_suggestions(expected, suggestions);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[rstest]
|
||||||
|
fn variables_command_with_commandcompletion(mut completer_strings: NuCompleter) {
|
||||||
|
let suggestions = completer_strings.complete("my-c ", 4);
|
||||||
|
let expected: Vec<String> = vec!["my-command".into()];
|
||||||
|
match_suggestions(expected, suggestions);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[rstest]
|
||||||
|
fn variables_subcommands_with_customcompletion(mut completer_strings: NuCompleter) {
|
||||||
|
let suggestions = completer_strings.complete("my-command ", 11);
|
||||||
|
let expected: Vec<String> = vec!["cat".into(), "dog".into(), "eel".into()];
|
||||||
|
match_suggestions(expected, suggestions);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[rstest]
|
||||||
|
fn variables_customcompletion_subcommands_with_customcompletion_2(
|
||||||
|
mut completer_strings: NuCompleter,
|
||||||
|
) {
|
||||||
|
let suggestions = completer_strings.complete("my-command ", 11);
|
||||||
|
let expected: Vec<String> = vec!["cat".into(), "dog".into(), "eel".into()];
|
||||||
|
match_suggestions(expected, suggestions);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn dotnu_completions() {
|
||||||
|
// Create a new engine
|
||||||
|
let (_, _, engine, stack) = new_engine();
|
||||||
|
|
||||||
|
// Instantiate a new completer
|
||||||
|
let mut completer = NuCompleter::new(std::sync::Arc::new(engine), stack);
|
||||||
|
|
||||||
|
// Test source completion
|
||||||
|
let completion_str = "source-env ".to_string();
|
||||||
|
let suggestions = completer.complete(&completion_str, completion_str.len());
|
||||||
|
|
||||||
|
assert_eq!(1, suggestions.len());
|
||||||
|
assert_eq!("custom_completion.nu", suggestions.get(0).unwrap().value);
|
||||||
|
|
||||||
|
// Test use completion
|
||||||
|
let completion_str = "use ".to_string();
|
||||||
|
let suggestions = completer.complete(&completion_str, completion_str.len());
|
||||||
|
|
||||||
|
assert_eq!(1, suggestions.len());
|
||||||
|
assert_eq!("custom_completion.nu", suggestions.get(0).unwrap().value);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
#[ignore]
|
||||||
|
fn external_completer_trailing_space() {
|
||||||
|
// https://github.com/nushell/nushell/issues/6378
|
||||||
|
let block = "let external_completer = {|spans| $spans}";
|
||||||
|
let input = "gh alias ".to_string();
|
||||||
|
|
||||||
|
let suggestions = run_external_completion(block, &input);
|
||||||
|
assert_eq!(3, suggestions.len());
|
||||||
|
assert_eq!("gh", suggestions.get(0).unwrap().value);
|
||||||
|
assert_eq!("alias", suggestions.get(1).unwrap().value);
|
||||||
|
assert_eq!("", suggestions.get(2).unwrap().value);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn external_completer_no_trailing_space() {
|
||||||
|
let block = "{|spans| $spans}";
|
||||||
|
let input = "gh alias".to_string();
|
||||||
|
|
||||||
|
let suggestions = run_external_completion(block, &input);
|
||||||
|
assert_eq!(2, suggestions.len());
|
||||||
|
assert_eq!("gh", suggestions.get(0).unwrap().value);
|
||||||
|
assert_eq!("alias", suggestions.get(1).unwrap().value);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn external_completer_pass_flags() {
|
||||||
|
let block = "{|spans| $spans}";
|
||||||
|
let input = "gh api --".to_string();
|
||||||
|
|
||||||
|
let suggestions = run_external_completion(block, &input);
|
||||||
|
assert_eq!(3, suggestions.len());
|
||||||
|
assert_eq!("gh", suggestions.get(0).unwrap().value);
|
||||||
|
assert_eq!("api", suggestions.get(1).unwrap().value);
|
||||||
|
assert_eq!("--", suggestions.get(2).unwrap().value);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn file_completions() {
|
||||||
|
// Create a new engine
|
||||||
|
let (dir, dir_str, engine, stack) = new_engine();
|
||||||
|
|
||||||
|
// Instantiate a new completer
|
||||||
|
let mut completer = NuCompleter::new(std::sync::Arc::new(engine), stack);
|
||||||
|
|
||||||
|
// Test completions for the current folder
|
||||||
|
let target_dir = format!("cp {dir_str}");
|
||||||
|
let suggestions = completer.complete(&target_dir, target_dir.len());
|
||||||
|
|
||||||
|
// Create the expected values
|
||||||
|
let expected_paths: Vec<String> = vec![
|
||||||
|
folder(dir.join("another")),
|
||||||
|
file(dir.join("custom_completion.nu")),
|
||||||
|
file(dir.join("nushell")),
|
||||||
|
folder(dir.join("test_a")),
|
||||||
|
folder(dir.join("test_b")),
|
||||||
|
file(dir.join(".hidden_file")),
|
||||||
|
folder(dir.join(".hidden_folder")),
|
||||||
|
];
|
||||||
|
|
||||||
|
// Match the results
|
||||||
|
match_suggestions(expected_paths, suggestions);
|
||||||
|
|
||||||
|
// Test completions for a file
|
||||||
|
let target_dir = format!("cp {}", folder(dir.join("another")));
|
||||||
|
let suggestions = completer.complete(&target_dir, target_dir.len());
|
||||||
|
|
||||||
|
// Create the expected values
|
||||||
|
let expected_paths: Vec<String> = vec![file(dir.join("another").join("newfile"))];
|
||||||
|
|
||||||
|
// Match the results
|
||||||
|
match_suggestions(expected_paths, suggestions);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn partial_completions() {
|
||||||
|
// Create a new engine
|
||||||
|
let (dir, _, engine, stack) = new_partial_engine();
|
||||||
|
|
||||||
|
// Instantiate a new completer
|
||||||
|
let mut completer = NuCompleter::new(std::sync::Arc::new(engine), stack);
|
||||||
|
|
||||||
|
// Test completions for a folder's name
|
||||||
|
let target_dir = format!("cd {}", file(dir.join("pa")));
|
||||||
|
let suggestions = completer.complete(&target_dir, target_dir.len());
|
||||||
|
|
||||||
|
// Create the expected values
|
||||||
|
let expected_paths: Vec<String> = vec![
|
||||||
|
folder(dir.join("partial_a")),
|
||||||
|
folder(dir.join("partial_b")),
|
||||||
|
folder(dir.join("partial_c")),
|
||||||
|
];
|
||||||
|
|
||||||
|
// Match the results
|
||||||
|
match_suggestions(expected_paths, suggestions);
|
||||||
|
|
||||||
|
// Test completions for the files whose name begin with "h"
|
||||||
|
// and are present under directories whose names begin with "pa"
|
||||||
|
let dir_str = file(dir.join("pa").join("h"));
|
||||||
|
let target_dir = format!("cp {dir_str}");
|
||||||
|
let suggestions = completer.complete(&target_dir, target_dir.len());
|
||||||
|
|
||||||
|
// Create the expected values
|
||||||
|
let expected_paths: Vec<String> = vec![
|
||||||
|
file(dir.join("partial_a").join("hello")),
|
||||||
|
file(dir.join("partial_a").join("hola")),
|
||||||
|
file(dir.join("partial_b").join("hello_b")),
|
||||||
|
file(dir.join("partial_b").join("hi_b")),
|
||||||
|
file(dir.join("partial_c").join("hello_c")),
|
||||||
|
];
|
||||||
|
|
||||||
|
// Match the results
|
||||||
|
match_suggestions(expected_paths, suggestions);
|
||||||
|
|
||||||
|
// Test completion for all files under directories whose names begin with "pa"
|
||||||
|
let dir_str = folder(dir.join("pa"));
|
||||||
|
let target_dir = format!("ls {dir_str}");
|
||||||
|
let suggestions = completer.complete(&target_dir, target_dir.len());
|
||||||
|
|
||||||
|
// Create the expected values
|
||||||
|
let expected_paths: Vec<String> = vec![
|
||||||
|
file(dir.join("partial_a").join("anotherfile")),
|
||||||
|
file(dir.join("partial_a").join("hello")),
|
||||||
|
file(dir.join("partial_a").join("hola")),
|
||||||
|
file(dir.join("partial_b").join("hello_b")),
|
||||||
|
file(dir.join("partial_b").join("hi_b")),
|
||||||
|
file(dir.join("partial_c").join("hello_c")),
|
||||||
|
];
|
||||||
|
|
||||||
|
// Match the results
|
||||||
|
match_suggestions(expected_paths, suggestions);
|
||||||
|
|
||||||
|
// Test completion for a single file
|
||||||
|
let dir_str = file(dir.join("fi").join("so"));
|
||||||
|
let target_dir = format!("rm {dir_str}");
|
||||||
|
let suggestions = completer.complete(&target_dir, target_dir.len());
|
||||||
|
|
||||||
|
// Create the expected values
|
||||||
|
let expected_paths: Vec<String> = vec![file(dir.join("final_partial").join("somefile"))];
|
||||||
|
|
||||||
|
// Match the results
|
||||||
|
match_suggestions(expected_paths, suggestions);
|
||||||
|
|
||||||
|
// Test completion where there is a sneaky `..` in the path
|
||||||
|
let dir_str = file(dir.join("par").join("..").join("fi").join("so"));
|
||||||
|
let target_dir = format!("rm {dir_str}");
|
||||||
|
let suggestions = completer.complete(&target_dir, target_dir.len());
|
||||||
|
|
||||||
|
// Create the expected values
|
||||||
|
let expected_paths: Vec<String> = vec![file(dir.join("final_partial").join("somefile"))];
|
||||||
|
|
||||||
|
// Match the results
|
||||||
|
match_suggestions(expected_paths, suggestions);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn command_ls_with_filecompletion() {
|
||||||
|
let (_, _, engine, stack) = new_engine();
|
||||||
|
|
||||||
|
let mut completer = NuCompleter::new(std::sync::Arc::new(engine), stack);
|
||||||
|
|
||||||
|
let target_dir = "ls ";
|
||||||
|
let suggestions = completer.complete(target_dir, target_dir.len());
|
||||||
|
|
||||||
|
#[cfg(windows)]
|
||||||
|
let expected_paths: Vec<String> = vec![
|
||||||
|
"another\\".to_string(),
|
||||||
|
"custom_completion.nu".to_string(),
|
||||||
|
"nushell".to_string(),
|
||||||
|
"test_a\\".to_string(),
|
||||||
|
"test_b\\".to_string(),
|
||||||
|
".hidden_file".to_string(),
|
||||||
|
".hidden_folder\\".to_string(),
|
||||||
|
];
|
||||||
|
#[cfg(not(windows))]
|
||||||
|
let expected_paths: Vec<String> = vec![
|
||||||
|
"another/".to_string(),
|
||||||
|
"custom_completion.nu".to_string(),
|
||||||
|
"nushell".to_string(),
|
||||||
|
"test_a/".to_string(),
|
||||||
|
"test_b/".to_string(),
|
||||||
|
".hidden_file".to_string(),
|
||||||
|
".hidden_folder/".to_string(),
|
||||||
|
];
|
||||||
|
|
||||||
|
match_suggestions(expected_paths, suggestions)
|
||||||
|
}
|
||||||
|
#[test]
|
||||||
|
fn command_open_with_filecompletion() {
|
||||||
|
let (_, _, engine, stack) = new_engine();
|
||||||
|
|
||||||
|
let mut completer = NuCompleter::new(std::sync::Arc::new(engine), stack);
|
||||||
|
|
||||||
|
let target_dir = "open ";
|
||||||
|
let suggestions = completer.complete(target_dir, target_dir.len());
|
||||||
|
|
||||||
|
#[cfg(windows)]
|
||||||
|
let expected_paths: Vec<String> = vec![
|
||||||
|
"another\\".to_string(),
|
||||||
|
"custom_completion.nu".to_string(),
|
||||||
|
"nushell".to_string(),
|
||||||
|
"test_a\\".to_string(),
|
||||||
|
"test_b\\".to_string(),
|
||||||
|
".hidden_file".to_string(),
|
||||||
|
".hidden_folder\\".to_string(),
|
||||||
|
];
|
||||||
|
#[cfg(not(windows))]
|
||||||
|
let expected_paths: Vec<String> = vec![
|
||||||
|
"another/".to_string(),
|
||||||
|
"custom_completion.nu".to_string(),
|
||||||
|
"nushell".to_string(),
|
||||||
|
"test_a/".to_string(),
|
||||||
|
"test_b/".to_string(),
|
||||||
|
".hidden_file".to_string(),
|
||||||
|
".hidden_folder/".to_string(),
|
||||||
|
];
|
||||||
|
|
||||||
|
match_suggestions(expected_paths, suggestions)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn command_rm_with_globcompletion() {
|
||||||
|
let (_, _, engine, stack) = new_engine();
|
||||||
|
|
||||||
|
let mut completer = NuCompleter::new(std::sync::Arc::new(engine), stack);
|
||||||
|
|
||||||
|
let target_dir = "rm ";
|
||||||
|
let suggestions = completer.complete(target_dir, target_dir.len());
|
||||||
|
|
||||||
|
#[cfg(windows)]
|
||||||
|
let expected_paths: Vec<String> = vec![
|
||||||
|
"another\\".to_string(),
|
||||||
|
"custom_completion.nu".to_string(),
|
||||||
|
"nushell".to_string(),
|
||||||
|
"test_a\\".to_string(),
|
||||||
|
"test_b\\".to_string(),
|
||||||
|
".hidden_file".to_string(),
|
||||||
|
".hidden_folder\\".to_string(),
|
||||||
|
];
|
||||||
|
#[cfg(not(windows))]
|
||||||
|
let expected_paths: Vec<String> = vec![
|
||||||
|
"another/".to_string(),
|
||||||
|
"custom_completion.nu".to_string(),
|
||||||
|
"nushell".to_string(),
|
||||||
|
"test_a/".to_string(),
|
||||||
|
"test_b/".to_string(),
|
||||||
|
".hidden_file".to_string(),
|
||||||
|
".hidden_folder/".to_string(),
|
||||||
|
];
|
||||||
|
|
||||||
|
match_suggestions(expected_paths, suggestions)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn command_cp_with_globcompletion() {
|
||||||
|
let (_, _, engine, stack) = new_engine();
|
||||||
|
|
||||||
|
let mut completer = NuCompleter::new(std::sync::Arc::new(engine), stack);
|
||||||
|
|
||||||
|
let target_dir = "cp ";
|
||||||
|
let suggestions = completer.complete(target_dir, target_dir.len());
|
||||||
|
|
||||||
|
#[cfg(windows)]
|
||||||
|
let expected_paths: Vec<String> = vec![
|
||||||
|
"another\\".to_string(),
|
||||||
|
"custom_completion.nu".to_string(),
|
||||||
|
"nushell".to_string(),
|
||||||
|
"test_a\\".to_string(),
|
||||||
|
"test_b\\".to_string(),
|
||||||
|
".hidden_file".to_string(),
|
||||||
|
".hidden_folder\\".to_string(),
|
||||||
|
];
|
||||||
|
#[cfg(not(windows))]
|
||||||
|
let expected_paths: Vec<String> = vec![
|
||||||
|
"another/".to_string(),
|
||||||
|
"custom_completion.nu".to_string(),
|
||||||
|
"nushell".to_string(),
|
||||||
|
"test_a/".to_string(),
|
||||||
|
"test_b/".to_string(),
|
||||||
|
".hidden_file".to_string(),
|
||||||
|
".hidden_folder/".to_string(),
|
||||||
|
];
|
||||||
|
|
||||||
|
match_suggestions(expected_paths, suggestions)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn command_save_with_filecompletion() {
|
||||||
|
let (_, _, engine, stack) = new_engine();
|
||||||
|
|
||||||
|
let mut completer = NuCompleter::new(std::sync::Arc::new(engine), stack);
|
||||||
|
|
||||||
|
let target_dir = "save ";
|
||||||
|
let suggestions = completer.complete(target_dir, target_dir.len());
|
||||||
|
|
||||||
|
#[cfg(windows)]
|
||||||
|
let expected_paths: Vec<String> = vec![
|
||||||
|
"another\\".to_string(),
|
||||||
|
"custom_completion.nu".to_string(),
|
||||||
|
"nushell".to_string(),
|
||||||
|
"test_a\\".to_string(),
|
||||||
|
"test_b\\".to_string(),
|
||||||
|
".hidden_file".to_string(),
|
||||||
|
".hidden_folder\\".to_string(),
|
||||||
|
];
|
||||||
|
#[cfg(not(windows))]
|
||||||
|
let expected_paths: Vec<String> = vec![
|
||||||
|
"another/".to_string(),
|
||||||
|
"custom_completion.nu".to_string(),
|
||||||
|
"nushell".to_string(),
|
||||||
|
"test_a/".to_string(),
|
||||||
|
"test_b/".to_string(),
|
||||||
|
".hidden_file".to_string(),
|
||||||
|
".hidden_folder/".to_string(),
|
||||||
|
];
|
||||||
|
|
||||||
|
match_suggestions(expected_paths, suggestions)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn command_touch_with_filecompletion() {
|
||||||
|
let (_, _, engine, stack) = new_engine();
|
||||||
|
|
||||||
|
let mut completer = NuCompleter::new(std::sync::Arc::new(engine), stack);
|
||||||
|
|
||||||
|
let target_dir = "touch ";
|
||||||
|
let suggestions = completer.complete(target_dir, target_dir.len());
|
||||||
|
|
||||||
|
#[cfg(windows)]
|
||||||
|
let expected_paths: Vec<String> = vec![
|
||||||
|
"another\\".to_string(),
|
||||||
|
"custom_completion.nu".to_string(),
|
||||||
|
"nushell".to_string(),
|
||||||
|
"test_a\\".to_string(),
|
||||||
|
"test_b\\".to_string(),
|
||||||
|
".hidden_file".to_string(),
|
||||||
|
".hidden_folder\\".to_string(),
|
||||||
|
];
|
||||||
|
#[cfg(not(windows))]
|
||||||
|
let expected_paths: Vec<String> = vec![
|
||||||
|
"another/".to_string(),
|
||||||
|
"custom_completion.nu".to_string(),
|
||||||
|
"nushell".to_string(),
|
||||||
|
"test_a/".to_string(),
|
||||||
|
"test_b/".to_string(),
|
||||||
|
".hidden_file".to_string(),
|
||||||
|
".hidden_folder/".to_string(),
|
||||||
|
];
|
||||||
|
|
||||||
|
match_suggestions(expected_paths, suggestions)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn command_watch_with_filecompletion() {
|
||||||
|
let (_, _, engine, stack) = new_engine();
|
||||||
|
|
||||||
|
let mut completer = NuCompleter::new(std::sync::Arc::new(engine), stack);
|
||||||
|
|
||||||
|
let target_dir = "watch ";
|
||||||
|
let suggestions = completer.complete(target_dir, target_dir.len());
|
||||||
|
|
||||||
|
#[cfg(windows)]
|
||||||
|
let expected_paths: Vec<String> = vec![
|
||||||
|
"another\\".to_string(),
|
||||||
|
"custom_completion.nu".to_string(),
|
||||||
|
"nushell".to_string(),
|
||||||
|
"test_a\\".to_string(),
|
||||||
|
"test_b\\".to_string(),
|
||||||
|
".hidden_file".to_string(),
|
||||||
|
".hidden_folder\\".to_string(),
|
||||||
|
];
|
||||||
|
#[cfg(not(windows))]
|
||||||
|
let expected_paths: Vec<String> = vec![
|
||||||
|
"another/".to_string(),
|
||||||
|
"custom_completion.nu".to_string(),
|
||||||
|
"nushell".to_string(),
|
||||||
|
"test_a/".to_string(),
|
||||||
|
"test_b/".to_string(),
|
||||||
|
".hidden_file".to_string(),
|
||||||
|
".hidden_folder/".to_string(),
|
||||||
|
];
|
||||||
|
|
||||||
|
match_suggestions(expected_paths, suggestions)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn file_completion_quoted() {
|
||||||
|
let (_, _, engine, stack) = new_quote_engine();
|
||||||
|
|
||||||
|
let mut completer = NuCompleter::new(std::sync::Arc::new(engine), stack);
|
||||||
|
|
||||||
|
let target_dir = "open ";
|
||||||
|
let suggestions = completer.complete(target_dir, target_dir.len());
|
||||||
|
|
||||||
|
let expected_paths: Vec<String> = vec![
|
||||||
|
"`--help`".to_string(),
|
||||||
|
"`-42`".to_string(),
|
||||||
|
"`-inf`".to_string(),
|
||||||
|
"`4.2`".to_string(),
|
||||||
|
"`te st.txt`".to_string(),
|
||||||
|
"`te#st.txt`".to_string(),
|
||||||
|
"`te'st.txt`".to_string(),
|
||||||
|
"`te(st).txt`".to_string(),
|
||||||
|
format!("`{}`", folder("test dir".into())),
|
||||||
|
];
|
||||||
|
|
||||||
|
match_suggestions(expected_paths, suggestions);
|
||||||
|
|
||||||
|
let dir: PathBuf = "test dir".into();
|
||||||
|
let target_dir = format!("open '{}'", folder(dir.clone()));
|
||||||
|
let suggestions = completer.complete(&target_dir, target_dir.len());
|
||||||
|
|
||||||
|
let expected_paths: Vec<String> = vec![
|
||||||
|
format!("`{}`", file(dir.join("double quote"))),
|
||||||
|
format!("`{}`", file(dir.join("single quote"))),
|
||||||
|
];
|
||||||
|
|
||||||
|
match_suggestions(expected_paths, suggestions)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn flag_completions() {
|
||||||
|
// Create a new engine
|
||||||
|
let (_, _, engine, stack) = new_engine();
|
||||||
|
|
||||||
|
// Instantiate a new completer
|
||||||
|
let mut completer = NuCompleter::new(std::sync::Arc::new(engine), stack);
|
||||||
|
// Test completions for the 'ls' flags
|
||||||
|
let suggestions = completer.complete("ls -", 4);
|
||||||
|
|
||||||
|
assert_eq!(16, suggestions.len());
|
||||||
|
|
||||||
|
let expected: Vec<String> = vec![
|
||||||
|
"--all".into(),
|
||||||
|
"--directory".into(),
|
||||||
|
"--du".into(),
|
||||||
|
"--full-paths".into(),
|
||||||
|
"--help".into(),
|
||||||
|
"--long".into(),
|
||||||
|
"--mime-type".into(),
|
||||||
|
"--short-names".into(),
|
||||||
|
"-D".into(),
|
||||||
|
"-a".into(),
|
||||||
|
"-d".into(),
|
||||||
|
"-f".into(),
|
||||||
|
"-h".into(),
|
||||||
|
"-l".into(),
|
||||||
|
"-m".into(),
|
||||||
|
"-s".into(),
|
||||||
|
];
|
||||||
|
|
||||||
|
// Match results
|
||||||
|
match_suggestions(expected, suggestions);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn folder_with_directorycompletions() {
|
||||||
|
// Create a new engine
|
||||||
|
let (dir, dir_str, engine, stack) = new_engine();
|
||||||
|
|
||||||
|
// Instantiate a new completer
|
||||||
|
let mut completer = NuCompleter::new(std::sync::Arc::new(engine), stack);
|
||||||
|
|
||||||
|
// Test completions for the current folder
|
||||||
|
let target_dir = format!("cd {dir_str}");
|
||||||
|
let suggestions = completer.complete(&target_dir, target_dir.len());
|
||||||
|
|
||||||
|
// Create the expected values
|
||||||
|
let expected_paths: Vec<String> = vec![
|
||||||
|
folder(dir.join("another")),
|
||||||
|
folder(dir.join("test_a")),
|
||||||
|
folder(dir.join("test_b")),
|
||||||
|
folder(dir.join(".hidden_folder")),
|
||||||
|
];
|
||||||
|
|
||||||
|
// Match the results
|
||||||
|
match_suggestions(expected_paths, suggestions);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn variables_completions() {
|
||||||
|
// Create a new engine
|
||||||
|
let (dir, _, mut engine, mut stack) = new_engine();
|
||||||
|
|
||||||
|
// Add record value as example
|
||||||
|
let record = "let actor = { name: 'Tom Hardy', age: 44 }";
|
||||||
|
assert!(support::merge_input(record.as_bytes(), &mut engine, &mut stack, dir).is_ok());
|
||||||
|
|
||||||
|
// Instantiate a new completer
|
||||||
|
let mut completer = NuCompleter::new(std::sync::Arc::new(engine), stack);
|
||||||
|
|
||||||
|
// Test completions for $nu
|
||||||
|
let suggestions = completer.complete("$nu.", 4);
|
||||||
|
|
||||||
|
assert_eq!(14, suggestions.len());
|
||||||
|
|
||||||
|
let expected: Vec<String> = vec![
|
||||||
|
"config-path".into(),
|
||||||
|
"current-exe".into(),
|
||||||
|
"default-config-dir".into(),
|
||||||
|
"env-path".into(),
|
||||||
|
"history-path".into(),
|
||||||
|
"home-path".into(),
|
||||||
|
"is-interactive".into(),
|
||||||
|
"is-login".into(),
|
||||||
|
"loginshell-path".into(),
|
||||||
|
"os-info".into(),
|
||||||
|
"pid".into(),
|
||||||
|
"plugin-path".into(),
|
||||||
|
"startup-time".into(),
|
||||||
|
"temp-path".into(),
|
||||||
|
];
|
||||||
|
|
||||||
|
// Match results
|
||||||
|
match_suggestions(expected, suggestions);
|
||||||
|
|
||||||
|
// Test completions for $nu.h (filter)
|
||||||
|
let suggestions = completer.complete("$nu.h", 5);
|
||||||
|
|
||||||
|
assert_eq!(2, suggestions.len());
|
||||||
|
|
||||||
|
let expected: Vec<String> = vec!["history-path".into(), "home-path".into()];
|
||||||
|
|
||||||
|
// Match results
|
||||||
|
match_suggestions(expected, suggestions);
|
||||||
|
|
||||||
|
// Test completions for $nu.os-info
|
||||||
|
let suggestions = completer.complete("$nu.os-info.", 12);
|
||||||
|
assert_eq!(4, suggestions.len());
|
||||||
|
let expected: Vec<String> = vec![
|
||||||
|
"arch".into(),
|
||||||
|
"family".into(),
|
||||||
|
"kernel_version".into(),
|
||||||
|
"name".into(),
|
||||||
|
];
|
||||||
|
// Match results
|
||||||
|
match_suggestions(expected, suggestions);
|
||||||
|
|
||||||
|
// Test completions for custom var
|
||||||
|
let suggestions = completer.complete("$actor.", 7);
|
||||||
|
|
||||||
|
assert_eq!(2, suggestions.len());
|
||||||
|
|
||||||
|
let expected: Vec<String> = vec!["age".into(), "name".into()];
|
||||||
|
|
||||||
|
// Match results
|
||||||
|
match_suggestions(expected, suggestions);
|
||||||
|
|
||||||
|
// Test completions for custom var (filtering)
|
||||||
|
let suggestions = completer.complete("$actor.n", 8);
|
||||||
|
|
||||||
|
assert_eq!(1, suggestions.len());
|
||||||
|
|
||||||
|
let expected: Vec<String> = vec!["name".into()];
|
||||||
|
|
||||||
|
// Match results
|
||||||
|
match_suggestions(expected, suggestions);
|
||||||
|
|
||||||
|
// Test completions for $env
|
||||||
|
let suggestions = completer.complete("$env.", 5);
|
||||||
|
|
||||||
|
assert_eq!(3, suggestions.len());
|
||||||
|
|
||||||
|
#[cfg(windows)]
|
||||||
|
let expected: Vec<String> = vec!["PWD".into(), "Path".into(), "TEST".into()];
|
||||||
|
#[cfg(not(windows))]
|
||||||
|
let expected: Vec<String> = vec!["PATH".into(), "PWD".into(), "TEST".into()];
|
||||||
|
|
||||||
|
// Match results
|
||||||
|
match_suggestions(expected, suggestions);
|
||||||
|
|
||||||
|
// Test completions for $env
|
||||||
|
let suggestions = completer.complete("$env.T", 6);
|
||||||
|
|
||||||
|
assert_eq!(1, suggestions.len());
|
||||||
|
|
||||||
|
let expected: Vec<String> = vec!["TEST".into()];
|
||||||
|
|
||||||
|
// Match results
|
||||||
|
match_suggestions(expected, suggestions);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn alias_of_command_and_flags() {
|
||||||
|
let (dir, _, mut engine, mut stack) = new_engine();
|
||||||
|
|
||||||
|
// Create an alias
|
||||||
|
let alias = r#"alias ll = ls -l"#;
|
||||||
|
assert!(support::merge_input(alias.as_bytes(), &mut engine, &mut stack, dir).is_ok());
|
||||||
|
|
||||||
|
let mut completer = NuCompleter::new(std::sync::Arc::new(engine), stack);
|
||||||
|
|
||||||
|
let suggestions = completer.complete("ll t", 4);
|
||||||
|
#[cfg(windows)]
|
||||||
|
let expected_paths: Vec<String> = vec!["test_a\\".to_string(), "test_b\\".to_string()];
|
||||||
|
#[cfg(not(windows))]
|
||||||
|
let expected_paths: Vec<String> = vec!["test_a/".to_string(), "test_b/".to_string()];
|
||||||
|
|
||||||
|
match_suggestions(expected_paths, suggestions)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn alias_of_basic_command() {
|
||||||
|
let (dir, _, mut engine, mut stack) = new_engine();
|
||||||
|
|
||||||
|
// Create an alias
|
||||||
|
let alias = r#"alias ll = ls "#;
|
||||||
|
assert!(support::merge_input(alias.as_bytes(), &mut engine, &mut stack, dir).is_ok());
|
||||||
|
|
||||||
|
let mut completer = NuCompleter::new(std::sync::Arc::new(engine), stack);
|
||||||
|
|
||||||
|
let suggestions = completer.complete("ll t", 4);
|
||||||
|
#[cfg(windows)]
|
||||||
|
let expected_paths: Vec<String> = vec!["test_a\\".to_string(), "test_b\\".to_string()];
|
||||||
|
#[cfg(not(windows))]
|
||||||
|
let expected_paths: Vec<String> = vec!["test_a/".to_string(), "test_b/".to_string()];
|
||||||
|
|
||||||
|
match_suggestions(expected_paths, suggestions)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn alias_of_another_alias() {
|
||||||
|
let (dir, _, mut engine, mut stack) = new_engine();
|
||||||
|
|
||||||
|
// Create an alias
|
||||||
|
let alias = r#"alias ll = ls -la"#;
|
||||||
|
assert!(support::merge_input(alias.as_bytes(), &mut engine, &mut stack, dir.clone()).is_ok());
|
||||||
|
// Create the second alias
|
||||||
|
let alias = r#"alias lf = ll -f"#;
|
||||||
|
assert!(support::merge_input(alias.as_bytes(), &mut engine, &mut stack, dir).is_ok());
|
||||||
|
|
||||||
|
let mut completer = NuCompleter::new(std::sync::Arc::new(engine), stack);
|
||||||
|
|
||||||
|
let suggestions = completer.complete("lf t", 4);
|
||||||
|
#[cfg(windows)]
|
||||||
|
let expected_paths: Vec<String> = vec!["test_a\\".to_string(), "test_b\\".to_string()];
|
||||||
|
#[cfg(not(windows))]
|
||||||
|
let expected_paths: Vec<String> = vec!["test_a/".to_string(), "test_b/".to_string()];
|
||||||
|
|
||||||
|
match_suggestions(expected_paths, suggestions)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn run_external_completion(block: &str, input: &str) -> Vec<Suggestion> {
|
||||||
|
// Create a new engine
|
||||||
|
let (dir, _, mut engine_state, mut stack) = new_engine();
|
||||||
|
let (_, delta) = {
|
||||||
|
let mut working_set = StateWorkingSet::new(&engine_state);
|
||||||
|
let block = parse(&mut working_set, None, block.as_bytes(), false);
|
||||||
|
assert!(working_set.parse_errors.is_empty());
|
||||||
|
|
||||||
|
(block, working_set.render())
|
||||||
|
};
|
||||||
|
|
||||||
|
assert!(engine_state.merge_delta(delta).is_ok());
|
||||||
|
|
||||||
|
// Merge environment into the permanent state
|
||||||
|
assert!(engine_state.merge_env(&mut stack, &dir).is_ok());
|
||||||
|
|
||||||
|
let latest_block_id = engine_state.num_blocks() - 1;
|
||||||
|
|
||||||
|
// Change config adding the external completer
|
||||||
|
let mut config = engine_state.get_config().clone();
|
||||||
|
config.external_completer = Some(latest_block_id);
|
||||||
|
engine_state.set_config(config);
|
||||||
|
|
||||||
|
// Instantiate a new completer
|
||||||
|
let mut completer = NuCompleter::new(std::sync::Arc::new(engine_state), stack);
|
||||||
|
|
||||||
|
completer.complete(input, input.len())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn unknown_command_completion() {
|
||||||
|
let (_, _, engine, stack) = new_engine();
|
||||||
|
|
||||||
|
let mut completer = NuCompleter::new(std::sync::Arc::new(engine), stack);
|
||||||
|
|
||||||
|
let target_dir = "thiscommanddoesnotexist ";
|
||||||
|
let suggestions = completer.complete(target_dir, target_dir.len());
|
||||||
|
|
||||||
|
#[cfg(windows)]
|
||||||
|
let expected_paths: Vec<String> = vec![
|
||||||
|
"another\\".to_string(),
|
||||||
|
"custom_completion.nu".to_string(),
|
||||||
|
"nushell".to_string(),
|
||||||
|
"test_a\\".to_string(),
|
||||||
|
"test_b\\".to_string(),
|
||||||
|
".hidden_file".to_string(),
|
||||||
|
".hidden_folder\\".to_string(),
|
||||||
|
];
|
||||||
|
#[cfg(not(windows))]
|
||||||
|
let expected_paths: Vec<String> = vec![
|
||||||
|
"another/".to_string(),
|
||||||
|
"custom_completion.nu".to_string(),
|
||||||
|
"nushell".to_string(),
|
||||||
|
"test_a/".to_string(),
|
||||||
|
"test_b/".to_string(),
|
||||||
|
".hidden_file".to_string(),
|
||||||
|
".hidden_folder/".to_string(),
|
||||||
|
];
|
||||||
|
|
||||||
|
match_suggestions(expected_paths, suggestions)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[rstest]
|
||||||
|
fn flagcompletion_triggers_after_cursor(mut completer: NuCompleter) {
|
||||||
|
let suggestions = completer.complete("tst -h", 5);
|
||||||
|
let expected: Vec<String> = vec!["--help".into(), "--mod".into(), "-h".into(), "-s".into()];
|
||||||
|
match_suggestions(expected, suggestions);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[rstest]
|
||||||
|
fn customcompletion_triggers_after_cursor(mut completer_strings: NuCompleter) {
|
||||||
|
let suggestions = completer_strings.complete("my-command c", 11);
|
||||||
|
let expected: Vec<String> = vec!["cat".into(), "dog".into(), "eel".into()];
|
||||||
|
match_suggestions(expected, suggestions);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[rstest]
|
||||||
|
fn customcompletion_triggers_after_cursor_piped(mut completer_strings: NuCompleter) {
|
||||||
|
let suggestions = completer_strings.complete("my-command c | ls", 11);
|
||||||
|
let expected: Vec<String> = vec!["cat".into(), "dog".into(), "eel".into()];
|
||||||
|
match_suggestions(expected, suggestions);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[rstest]
|
||||||
|
fn flagcompletion_triggers_after_cursor_piped(mut completer: NuCompleter) {
|
||||||
|
let suggestions = completer.complete("tst -h | ls", 5);
|
||||||
|
let expected: Vec<String> = vec!["--help".into(), "--mod".into(), "-h".into(), "-s".into()];
|
||||||
|
match_suggestions(expected, suggestions);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn filecompletions_triggers_after_cursor() {
|
||||||
|
let (_, _, engine, stack) = new_engine();
|
||||||
|
|
||||||
|
let mut completer = NuCompleter::new(std::sync::Arc::new(engine), stack);
|
||||||
|
|
||||||
|
let suggestions = completer.complete("cp test_c", 3);
|
||||||
|
|
||||||
|
#[cfg(windows)]
|
||||||
|
let expected_paths: Vec<String> = vec![
|
||||||
|
"another\\".to_string(),
|
||||||
|
"custom_completion.nu".to_string(),
|
||||||
|
"nushell".to_string(),
|
||||||
|
"test_a\\".to_string(),
|
||||||
|
"test_b\\".to_string(),
|
||||||
|
".hidden_file".to_string(),
|
||||||
|
".hidden_folder\\".to_string(),
|
||||||
|
];
|
||||||
|
#[cfg(not(windows))]
|
||||||
|
let expected_paths: Vec<String> = vec![
|
||||||
|
"another/".to_string(),
|
||||||
|
"custom_completion.nu".to_string(),
|
||||||
|
"nushell".to_string(),
|
||||||
|
"test_a/".to_string(),
|
||||||
|
"test_b/".to_string(),
|
||||||
|
".hidden_file".to_string(),
|
||||||
|
".hidden_folder/".to_string(),
|
||||||
|
];
|
||||||
|
|
||||||
|
match_suggestions(expected_paths, suggestions);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[rstest]
|
||||||
|
fn extern_custom_completion_positional(mut extern_completer: NuCompleter) {
|
||||||
|
let suggestions = extern_completer.complete("spam ", 5);
|
||||||
|
let expected: Vec<String> = vec!["cat".into(), "dog".into(), "eel".into()];
|
||||||
|
match_suggestions(expected, suggestions);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[rstest]
|
||||||
|
fn extern_custom_completion_long_flag_1(mut extern_completer: NuCompleter) {
|
||||||
|
let suggestions = extern_completer.complete("spam --foo=", 11);
|
||||||
|
let expected: Vec<String> = vec!["cat".into(), "dog".into(), "eel".into()];
|
||||||
|
match_suggestions(expected, suggestions);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[rstest]
|
||||||
|
fn extern_custom_completion_long_flag_2(mut extern_completer: NuCompleter) {
|
||||||
|
let suggestions = extern_completer.complete("spam --foo ", 11);
|
||||||
|
let expected: Vec<String> = vec!["cat".into(), "dog".into(), "eel".into()];
|
||||||
|
match_suggestions(expected, suggestions);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[rstest]
|
||||||
|
fn extern_custom_completion_long_flag_short(mut extern_completer: NuCompleter) {
|
||||||
|
let suggestions = extern_completer.complete("spam -f ", 8);
|
||||||
|
let expected: Vec<String> = vec!["cat".into(), "dog".into(), "eel".into()];
|
||||||
|
match_suggestions(expected, suggestions);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[rstest]
|
||||||
|
fn extern_custom_completion_short_flag(mut extern_completer: NuCompleter) {
|
||||||
|
let suggestions = extern_completer.complete("spam -b ", 8);
|
||||||
|
let expected: Vec<String> = vec!["cat".into(), "dog".into(), "eel".into()];
|
||||||
|
match_suggestions(expected, suggestions);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[rstest]
|
||||||
|
fn extern_complete_flags(mut extern_completer: NuCompleter) {
|
||||||
|
let suggestions = extern_completer.complete("spam -", 6);
|
||||||
|
let expected: Vec<String> = vec!["--foo".into(), "-b".into(), "-f".into()];
|
||||||
|
match_suggestions(expected, suggestions);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[ignore = "was reverted, still needs fixing"]
|
||||||
|
#[rstest]
|
||||||
|
fn alias_offset_bug_7648() {
|
||||||
|
let (dir, _, mut engine, mut stack) = new_engine();
|
||||||
|
|
||||||
|
// Create an alias
|
||||||
|
let alias = r#"alias ea = ^$env.EDITOR /tmp/test.s"#;
|
||||||
|
assert!(support::merge_input(alias.as_bytes(), &mut engine, &mut stack, dir).is_ok());
|
||||||
|
|
||||||
|
let mut completer = NuCompleter::new(std::sync::Arc::new(engine), stack);
|
||||||
|
|
||||||
|
// Issue #7648
|
||||||
|
// Nushell crashes when an alias name is shorter than the alias command
|
||||||
|
// and the alias command is a external command
|
||||||
|
// This happens because of offset is not correct.
|
||||||
|
// This crashes before PR #7779
|
||||||
|
let _suggestions = completer.complete("e", 1);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[ignore = "was reverted, still needs fixing"]
|
||||||
|
#[rstest]
|
||||||
|
fn alias_offset_bug_7754() {
|
||||||
|
let (dir, _, mut engine, mut stack) = new_engine();
|
||||||
|
|
||||||
|
// Create an alias
|
||||||
|
let alias = r#"alias ll = ls -l"#;
|
||||||
|
assert!(support::merge_input(alias.as_bytes(), &mut engine, &mut stack, dir).is_ok());
|
||||||
|
|
||||||
|
let mut completer = NuCompleter::new(std::sync::Arc::new(engine), stack);
|
||||||
|
|
||||||
|
// Issue #7754
|
||||||
|
// Nushell crashes when an alias name is shorter than the alias command
|
||||||
|
// and the alias command contains pipes.
|
||||||
|
// This crashes before PR #7756
|
||||||
|
let _suggestions = completer.complete("ll -a | c", 9);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn get_path_env_var_8003() {
|
||||||
|
// Create a new engine
|
||||||
|
let (_, _, engine, _) = new_engine();
|
||||||
|
// Get the path env var in a platform agnostic way
|
||||||
|
let the_path = engine.get_path_env_var();
|
||||||
|
// Make sure it's not empty
|
||||||
|
assert!(the_path.is_some());
|
||||||
|
}
|
File diff suppressed because it is too large
Load Diff
@ -1,2 +0,0 @@
|
|||||||
mod commands;
|
|
||||||
mod completions;
|
|
@ -1,34 +1,38 @@
|
|||||||
|
use std::path::PathBuf;
|
||||||
|
|
||||||
use nu_engine::eval_block;
|
use nu_engine::eval_block;
|
||||||
use nu_parser::parse;
|
use nu_parser::parse;
|
||||||
use nu_path::{AbsolutePathBuf, PathBuf};
|
|
||||||
use nu_protocol::{
|
use nu_protocol::{
|
||||||
debugger::WithoutDebug,
|
|
||||||
engine::{EngineState, Stack, StateWorkingSet},
|
engine::{EngineState, Stack, StateWorkingSet},
|
||||||
PipelineData, ShellError, Span, Value,
|
eval_const::create_nu_constant,
|
||||||
|
PipelineData, ShellError, Span, Value, NU_VARIABLE_ID,
|
||||||
};
|
};
|
||||||
use nu_test_support::fs;
|
use nu_test_support::fs;
|
||||||
use reedline::Suggestion;
|
use reedline::Suggestion;
|
||||||
use std::path::MAIN_SEPARATOR;
|
const SEP: char = std::path::MAIN_SEPARATOR;
|
||||||
|
|
||||||
fn create_default_context() -> EngineState {
|
fn create_default_context() -> EngineState {
|
||||||
nu_command::add_shell_command_context(nu_cmd_lang::create_default_context())
|
nu_command::add_shell_command_context(nu_cmd_lang::create_default_context())
|
||||||
}
|
}
|
||||||
|
|
||||||
// creates a new engine with the current path into the completions fixtures folder
|
// creates a new engine with the current path into the completions fixtures folder
|
||||||
pub fn new_engine() -> (AbsolutePathBuf, String, EngineState, Stack) {
|
pub fn new_engine() -> (PathBuf, String, EngineState, Stack) {
|
||||||
// Target folder inside assets
|
// Target folder inside assets
|
||||||
let dir = fs::fixtures().join("completions");
|
let dir = fs::fixtures().join("completions");
|
||||||
let dir_str = dir
|
let mut dir_str = dir
|
||||||
.clone()
|
.clone()
|
||||||
.into_os_string()
|
.into_os_string()
|
||||||
.into_string()
|
.into_string()
|
||||||
.unwrap_or_default();
|
.unwrap_or_default();
|
||||||
|
dir_str.push(SEP);
|
||||||
|
|
||||||
// Create a new engine with default context
|
// Create a new engine with default context
|
||||||
let mut engine_state = create_default_context();
|
let mut engine_state = create_default_context();
|
||||||
|
|
||||||
// Add $nu
|
// Add $nu
|
||||||
engine_state.generate_nu_constant();
|
let nu_const =
|
||||||
|
create_nu_constant(&engine_state, Span::test_data()).expect("Failed creating $nu");
|
||||||
|
engine_state.set_variable_const_val(NU_VARIABLE_ID, nu_const);
|
||||||
|
|
||||||
// New stack
|
// New stack
|
||||||
let mut stack = Stack::new();
|
let mut stack = Stack::new();
|
||||||
@ -63,66 +67,21 @@ pub fn new_engine() -> (AbsolutePathBuf, String, EngineState, Stack) {
|
|||||||
);
|
);
|
||||||
|
|
||||||
// Merge environment into the permanent state
|
// Merge environment into the permanent state
|
||||||
let merge_result = engine_state.merge_env(&mut stack);
|
let merge_result = engine_state.merge_env(&mut stack, &dir);
|
||||||
assert!(merge_result.is_ok());
|
assert!(merge_result.is_ok());
|
||||||
|
|
||||||
(dir, dir_str, engine_state, stack)
|
(dir, dir_str, engine_state, stack)
|
||||||
}
|
}
|
||||||
|
|
||||||
// creates a new engine with the current path into the completions fixtures folder
|
pub fn new_quote_engine() -> (PathBuf, String, EngineState, Stack) {
|
||||||
pub fn new_dotnu_engine() -> (AbsolutePathBuf, String, EngineState, Stack) {
|
|
||||||
// Target folder inside assets
|
|
||||||
let dir = fs::fixtures().join("dotnu_completions");
|
|
||||||
let dir_str = dir
|
|
||||||
.clone()
|
|
||||||
.into_os_string()
|
|
||||||
.into_string()
|
|
||||||
.unwrap_or_default();
|
|
||||||
let dir_span = nu_protocol::Span::new(0, dir_str.len());
|
|
||||||
|
|
||||||
// Create a new engine with default context
|
|
||||||
let mut engine_state = create_default_context();
|
|
||||||
|
|
||||||
// Add $nu
|
|
||||||
engine_state.generate_nu_constant();
|
|
||||||
|
|
||||||
// New stack
|
|
||||||
let mut stack = Stack::new();
|
|
||||||
|
|
||||||
// Add pwd as env var
|
|
||||||
stack.add_env_var("PWD".to_string(), Value::string(dir_str.clone(), dir_span));
|
|
||||||
stack.add_env_var(
|
|
||||||
"TEST".to_string(),
|
|
||||||
Value::string("NUSHELL".to_string(), dir_span),
|
|
||||||
);
|
|
||||||
|
|
||||||
stack.add_env_var(
|
|
||||||
"NU_LIB_DIRS".to_string(),
|
|
||||||
Value::List {
|
|
||||||
vals: vec![
|
|
||||||
Value::string(file(dir.join("lib-dir1")), dir_span),
|
|
||||||
Value::string(file(dir.join("lib-dir2")), dir_span),
|
|
||||||
Value::string(file(dir.join("lib-dir3")), dir_span),
|
|
||||||
],
|
|
||||||
internal_span: dir_span,
|
|
||||||
},
|
|
||||||
);
|
|
||||||
|
|
||||||
// Merge environment into the permanent state
|
|
||||||
let merge_result = engine_state.merge_env(&mut stack);
|
|
||||||
assert!(merge_result.is_ok());
|
|
||||||
|
|
||||||
(dir, dir_str, engine_state, stack)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn new_quote_engine() -> (AbsolutePathBuf, String, EngineState, Stack) {
|
|
||||||
// Target folder inside assets
|
// Target folder inside assets
|
||||||
let dir = fs::fixtures().join("quoted_completions");
|
let dir = fs::fixtures().join("quoted_completions");
|
||||||
let dir_str = dir
|
let mut dir_str = dir
|
||||||
.clone()
|
.clone()
|
||||||
.into_os_string()
|
.into_os_string()
|
||||||
.into_string()
|
.into_string()
|
||||||
.unwrap_or_default();
|
.unwrap_or_default();
|
||||||
|
dir_str.push(SEP);
|
||||||
|
|
||||||
// Create a new engine with default context
|
// Create a new engine with default context
|
||||||
let mut engine_state = create_default_context();
|
let mut engine_state = create_default_context();
|
||||||
@ -144,20 +103,21 @@ pub fn new_quote_engine() -> (AbsolutePathBuf, String, EngineState, Stack) {
|
|||||||
);
|
);
|
||||||
|
|
||||||
// Merge environment into the permanent state
|
// Merge environment into the permanent state
|
||||||
let merge_result = engine_state.merge_env(&mut stack);
|
let merge_result = engine_state.merge_env(&mut stack, &dir);
|
||||||
assert!(merge_result.is_ok());
|
assert!(merge_result.is_ok());
|
||||||
|
|
||||||
(dir, dir_str, engine_state, stack)
|
(dir, dir_str, engine_state, stack)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn new_partial_engine() -> (AbsolutePathBuf, String, EngineState, Stack) {
|
pub fn new_partial_engine() -> (PathBuf, String, EngineState, Stack) {
|
||||||
// Target folder inside assets
|
// Target folder inside assets
|
||||||
let dir = fs::fixtures().join("partial_completions");
|
let dir = fs::fixtures().join("partial_completions");
|
||||||
let dir_str = dir
|
let mut dir_str = dir
|
||||||
.clone()
|
.clone()
|
||||||
.into_os_string()
|
.into_os_string()
|
||||||
.into_string()
|
.into_string()
|
||||||
.unwrap_or_default();
|
.unwrap_or_default();
|
||||||
|
dir_str.push(SEP);
|
||||||
|
|
||||||
// Create a new engine with default context
|
// Create a new engine with default context
|
||||||
let mut engine_state = create_default_context();
|
let mut engine_state = create_default_context();
|
||||||
@ -179,14 +139,14 @@ pub fn new_partial_engine() -> (AbsolutePathBuf, String, EngineState, Stack) {
|
|||||||
);
|
);
|
||||||
|
|
||||||
// Merge environment into the permanent state
|
// Merge environment into the permanent state
|
||||||
let merge_result = engine_state.merge_env(&mut stack);
|
let merge_result = engine_state.merge_env(&mut stack, &dir);
|
||||||
assert!(merge_result.is_ok());
|
assert!(merge_result.is_ok());
|
||||||
|
|
||||||
(dir, dir_str, engine_state, stack)
|
(dir, dir_str, engine_state, stack)
|
||||||
}
|
}
|
||||||
|
|
||||||
// match a list of suggestions with the expected values
|
// match a list of suggestions with the expected values
|
||||||
pub fn match_suggestions(expected: &Vec<String>, suggestions: &Vec<Suggestion>) {
|
pub fn match_suggestions(expected: Vec<String>, suggestions: Vec<Suggestion>) {
|
||||||
let expected_len = expected.len();
|
let expected_len = expected.len();
|
||||||
let suggestions_len = suggestions.len();
|
let suggestions_len = suggestions.len();
|
||||||
if expected_len != suggestions_len {
|
if expected_len != suggestions_len {
|
||||||
@ -196,25 +156,22 @@ pub fn match_suggestions(expected: &Vec<String>, suggestions: &Vec<Suggestion>)
|
|||||||
Expected: {expected:#?}\n"
|
Expected: {expected:#?}\n"
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
expected.iter().zip(suggestions).for_each(|it| {
|
||||||
let suggestoins_str = suggestions
|
assert_eq!(it.0, &it.1.value);
|
||||||
.iter()
|
});
|
||||||
.map(|it| it.value.clone())
|
|
||||||
.collect::<Vec<_>>();
|
|
||||||
|
|
||||||
assert_eq!(expected, &suggestoins_str);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// append the separator to the converted path
|
// append the separator to the converted path
|
||||||
pub fn folder(path: impl Into<PathBuf>) -> String {
|
pub fn folder(path: PathBuf) -> String {
|
||||||
let mut converted_path = file(path);
|
let mut converted_path = file(path);
|
||||||
converted_path.push(MAIN_SEPARATOR);
|
converted_path.push(SEP);
|
||||||
|
|
||||||
converted_path
|
converted_path
|
||||||
}
|
}
|
||||||
|
|
||||||
// convert a given path to string
|
// convert a given path to string
|
||||||
pub fn file(path: impl Into<PathBuf>) -> String {
|
pub fn file(path: PathBuf) -> String {
|
||||||
path.into().into_os_string().into_string().unwrap()
|
path.into_os_string().into_string().unwrap_or_default()
|
||||||
}
|
}
|
||||||
|
|
||||||
// merge_input executes the given input into the engine
|
// merge_input executes the given input into the engine
|
||||||
@ -223,6 +180,7 @@ pub fn merge_input(
|
|||||||
input: &[u8],
|
input: &[u8],
|
||||||
engine_state: &mut EngineState,
|
engine_state: &mut EngineState,
|
||||||
stack: &mut Stack,
|
stack: &mut Stack,
|
||||||
|
dir: PathBuf,
|
||||||
) -> Result<(), ShellError> {
|
) -> Result<(), ShellError> {
|
||||||
let (block, delta) = {
|
let (block, delta) = {
|
||||||
let mut working_set = StateWorkingSet::new(engine_state);
|
let mut working_set = StateWorkingSet::new(engine_state);
|
||||||
@ -236,14 +194,16 @@ pub fn merge_input(
|
|||||||
|
|
||||||
engine_state.merge_delta(delta)?;
|
engine_state.merge_delta(delta)?;
|
||||||
|
|
||||||
assert!(eval_block::<WithoutDebug>(
|
assert!(eval_block(
|
||||||
engine_state,
|
engine_state,
|
||||||
stack,
|
stack,
|
||||||
&block,
|
&block,
|
||||||
PipelineData::Value(Value::nothing(Span::unknown()), None),
|
PipelineData::Value(Value::nothing(Span::unknown(),), None),
|
||||||
|
false,
|
||||||
|
false
|
||||||
)
|
)
|
||||||
.is_ok());
|
.is_ok());
|
||||||
|
|
||||||
// Merge environment into the permanent state
|
// Merge environment into the permanent state
|
||||||
engine_state.merge_env(stack)
|
engine_state.merge_env(stack, &dir)
|
||||||
}
|
}
|
@ -5,20 +5,21 @@ edition = "2021"
|
|||||||
license = "MIT"
|
license = "MIT"
|
||||||
name = "nu-cmd-base"
|
name = "nu-cmd-base"
|
||||||
repository = "https://github.com/nushell/nushell/tree/main/crates/nu-cmd-base"
|
repository = "https://github.com/nushell/nushell/tree/main/crates/nu-cmd-base"
|
||||||
version = "0.101.0"
|
version = "0.87.1"
|
||||||
|
|
||||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
|
|
||||||
[lints]
|
|
||||||
workspace = true
|
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
nu-engine = { path = "../nu-engine", version = "0.101.0", default-features = false }
|
nu-engine = { path = "../nu-engine", version = "0.87.1" }
|
||||||
nu-parser = { path = "../nu-parser", version = "0.101.0" }
|
nu-glob = { path = "../nu-glob", version = "0.87.1" }
|
||||||
nu-path = { path = "../nu-path", version = "0.101.0" }
|
nu-parser = { path = "../nu-parser", version = "0.87.1" }
|
||||||
nu-protocol = { path = "../nu-protocol", version = "0.101.0", default-features = false }
|
nu-path = { path = "../nu-path", version = "0.87.1" }
|
||||||
|
nu-protocol = { path = "../nu-protocol", version = "0.87.1" }
|
||||||
|
nu-utils = { path = "../nu-utils", version = "0.87.1" }
|
||||||
|
|
||||||
indexmap = { workspace = true }
|
indexmap = "2.1"
|
||||||
miette = { workspace = true }
|
miette = "5.10.0"
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
|
nu-test-support = { path = "../nu-test-support", version = "0.87.1" }
|
||||||
|
rstest = "0.18.2"
|
||||||
|
@ -1,5 +0,0 @@
|
|||||||
Utilities used by the different `nu-command`/`nu-cmd-*` crates, should not contain any full `Command` implementations.
|
|
||||||
|
|
||||||
## Internal Nushell crate
|
|
||||||
|
|
||||||
This crate implements components of Nushell and is not designed to support plugin authors or other users directly.
|
|
207
crates/nu-cmd-base/src/arg_glob.rs
Normal file
207
crates/nu-cmd-base/src/arg_glob.rs
Normal file
@ -0,0 +1,207 @@
|
|||||||
|
// utilities for expanding globs in command arguments
|
||||||
|
|
||||||
|
use nu_glob::{glob_with_parent, MatchOptions, Paths};
|
||||||
|
use nu_protocol::{ShellError, Spanned};
|
||||||
|
use std::fs;
|
||||||
|
use std::path::{Path, PathBuf};
|
||||||
|
|
||||||
|
// standard glob options to use for filesystem command arguments
|
||||||
|
|
||||||
|
const GLOB_PARAMS: MatchOptions = MatchOptions {
|
||||||
|
case_sensitive: true,
|
||||||
|
require_literal_separator: false,
|
||||||
|
require_literal_leading_dot: false,
|
||||||
|
recursive_match_hidden_dir: true,
|
||||||
|
};
|
||||||
|
|
||||||
|
// handle an argument that could be a literal path or a glob.
|
||||||
|
// if literal path, return just that (whether user can access it or not).
|
||||||
|
// if glob, expand into matching paths, using GLOB_PARAMS options.
|
||||||
|
pub fn arg_glob(
|
||||||
|
pattern: &Spanned<String>, // alleged path or glob
|
||||||
|
cwd: &Path, // current working directory
|
||||||
|
) -> Result<Paths, ShellError> {
|
||||||
|
arg_glob_opt(pattern, cwd, GLOB_PARAMS)
|
||||||
|
}
|
||||||
|
|
||||||
|
// variant of [arg_glob] that requires literal dot prefix in pattern to match dot-prefixed path.
|
||||||
|
pub fn arg_glob_leading_dot(pattern: &Spanned<String>, cwd: &Path) -> Result<Paths, ShellError> {
|
||||||
|
arg_glob_opt(
|
||||||
|
pattern,
|
||||||
|
cwd,
|
||||||
|
MatchOptions {
|
||||||
|
require_literal_leading_dot: true,
|
||||||
|
..GLOB_PARAMS
|
||||||
|
},
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn arg_glob_opt(
|
||||||
|
pattern: &Spanned<String>,
|
||||||
|
cwd: &Path,
|
||||||
|
options: MatchOptions,
|
||||||
|
) -> Result<Paths, ShellError> {
|
||||||
|
// remove ansi coloring (?)
|
||||||
|
let pattern = {
|
||||||
|
Spanned {
|
||||||
|
item: nu_utils::strip_ansi_string_unlikely(pattern.item.clone()),
|
||||||
|
span: pattern.span,
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// if there's a file with same path as the pattern, just return that.
|
||||||
|
let pp = cwd.join(&pattern.item);
|
||||||
|
let md = fs::metadata(pp);
|
||||||
|
#[allow(clippy::single_match)]
|
||||||
|
match md {
|
||||||
|
Ok(_metadata) => {
|
||||||
|
return Ok(Paths::single(&PathBuf::from(pattern.item), cwd));
|
||||||
|
}
|
||||||
|
// file not found, but also "invalid chars in file" (e.g * on Windows). Fall through and glob
|
||||||
|
Err(_) => {}
|
||||||
|
}
|
||||||
|
|
||||||
|
// user wasn't referring to a specific thing in filesystem, try to glob it.
|
||||||
|
match glob_with_parent(&pattern.item, options, cwd) {
|
||||||
|
Ok(p) => Ok(p),
|
||||||
|
Err(pat_err) => {
|
||||||
|
Err(ShellError::InvalidGlobPattern(
|
||||||
|
pat_err.msg.into(),
|
||||||
|
pattern.span, // improve specificity
|
||||||
|
))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod test {
|
||||||
|
use super::*;
|
||||||
|
use nu_glob::GlobResult;
|
||||||
|
use nu_protocol::{Span, Spanned};
|
||||||
|
use nu_test_support::fs::Stub::EmptyFile;
|
||||||
|
use nu_test_support::playground::Playground;
|
||||||
|
use rstest::rstest;
|
||||||
|
|
||||||
|
fn spanned_string(str: &str) -> Spanned<String> {
|
||||||
|
Spanned {
|
||||||
|
item: str.to_string(),
|
||||||
|
span: Span::test_data(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn does_something() {
|
||||||
|
let act = arg_glob(&spanned_string("*"), &PathBuf::from("."));
|
||||||
|
assert!(act.is_ok());
|
||||||
|
for f in act.expect("checked ok") {
|
||||||
|
match f {
|
||||||
|
Ok(p) => {
|
||||||
|
assert!(!p.to_str().unwrap().is_empty());
|
||||||
|
}
|
||||||
|
Err(e) => panic!("unexpected error {:?}", e),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn glob_format_error() {
|
||||||
|
let act = arg_glob(&spanned_string(r#"ab]c[def"#), &PathBuf::from("."));
|
||||||
|
assert!(act.is_err());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[rstest]
|
||||||
|
#[case("*", 4, "no dirs")]
|
||||||
|
#[case("**/*", 7, "incl dirs")]
|
||||||
|
fn glob_subdirs(#[case] pat: &str, #[case] exp_count: usize, #[case] case: &str) {
|
||||||
|
Playground::setup("glob_subdirs", |dirs, sandbox| {
|
||||||
|
sandbox.with_files(vec![
|
||||||
|
EmptyFile("yehuda.txt"),
|
||||||
|
EmptyFile("jttxt"),
|
||||||
|
EmptyFile("andres.txt"),
|
||||||
|
]);
|
||||||
|
sandbox.mkdir(".children");
|
||||||
|
sandbox.within(".children").with_files(vec![
|
||||||
|
EmptyFile("timothy.txt"),
|
||||||
|
EmptyFile("tiffany.txt"),
|
||||||
|
EmptyFile("trish.txt"),
|
||||||
|
]);
|
||||||
|
|
||||||
|
let p: Vec<GlobResult> = arg_glob(&spanned_string(pat), &dirs.test)
|
||||||
|
.expect("no error")
|
||||||
|
.collect();
|
||||||
|
assert_eq!(
|
||||||
|
exp_count,
|
||||||
|
p.iter().filter(|i| i.is_ok()).count(),
|
||||||
|
" case: {case} ",
|
||||||
|
);
|
||||||
|
|
||||||
|
// expected behavior -- that directories are included in results (if name matches pattern)
|
||||||
|
let t = p
|
||||||
|
.iter()
|
||||||
|
.any(|i| i.as_ref().unwrap().to_string_lossy().contains(".children"));
|
||||||
|
assert!(t, "check for dir, case {case}");
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
#[rstest]
|
||||||
|
#[case("yehuda.txt", true, 1, "matches literal path")]
|
||||||
|
#[case("*", false, 3, "matches glob")]
|
||||||
|
#[case(r#"bad[glob.foo"#, true, 1, "matches literal, would be bad glob pat")]
|
||||||
|
fn exact_vs_glob(
|
||||||
|
#[case] pat: &str,
|
||||||
|
#[case] exp_matches_input: bool,
|
||||||
|
#[case] exp_count: usize,
|
||||||
|
#[case] case: &str,
|
||||||
|
) {
|
||||||
|
Playground::setup("exact_vs_glob", |dirs, sandbox| {
|
||||||
|
sandbox.with_files(vec![
|
||||||
|
EmptyFile("yehuda.txt"),
|
||||||
|
EmptyFile("jttxt"),
|
||||||
|
EmptyFile("bad[glob.foo"),
|
||||||
|
]);
|
||||||
|
|
||||||
|
let res = arg_glob(&spanned_string(pat), &dirs.test)
|
||||||
|
.expect("no error")
|
||||||
|
.collect::<Vec<GlobResult>>();
|
||||||
|
|
||||||
|
eprintln!("res: {:?}", res);
|
||||||
|
if exp_matches_input {
|
||||||
|
assert_eq!(
|
||||||
|
exp_count,
|
||||||
|
res.len(),
|
||||||
|
" case {case}: matches input, but count not 1? "
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
&res[0].as_ref().unwrap().to_string_lossy(),
|
||||||
|
pat, // todo: is it OK for glob to return relative paths (not to current cwd, but to arg cwd of arg_glob)?
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
assert_eq!(exp_count, res.len(), " case: {}: matched glob", case);
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
#[rstest]
|
||||||
|
#[case(r#"realbad[glob.foo"#, true, 1, "error, bad glob")]
|
||||||
|
fn exact_vs_bad_glob(
|
||||||
|
// if path doesn't exist but pattern is not valid glob, should get error.
|
||||||
|
#[case] pat: &str,
|
||||||
|
#[case] _exp_matches_input: bool,
|
||||||
|
#[case] _exp_count: usize,
|
||||||
|
#[case] _tag: &str,
|
||||||
|
) {
|
||||||
|
Playground::setup("exact_vs_bad_glob", |dirs, sandbox| {
|
||||||
|
sandbox.with_files(vec![
|
||||||
|
EmptyFile("yehuda.txt"),
|
||||||
|
EmptyFile("jttxt"),
|
||||||
|
EmptyFile("bad[glob.foo"),
|
||||||
|
]);
|
||||||
|
|
||||||
|
let res = arg_glob(&spanned_string(pat), &dirs.test);
|
||||||
|
assert!(res
|
||||||
|
.expect_err("expected error")
|
||||||
|
.to_string()
|
||||||
|
.contains("Invalid glob pattern"));
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
@ -1,61 +1,58 @@
|
|||||||
|
use crate::util::get_guaranteed_cwd;
|
||||||
use miette::Result;
|
use miette::Result;
|
||||||
use nu_engine::{eval_block, eval_block_with_early_return};
|
use nu_engine::{eval_block, eval_block_with_early_return};
|
||||||
use nu_parser::parse;
|
use nu_parser::parse;
|
||||||
use nu_protocol::{
|
use nu_protocol::cli_error::{report_error, report_error_new};
|
||||||
cli_error::{report_parse_error, report_shell_error},
|
use nu_protocol::engine::{EngineState, Stack, StateWorkingSet};
|
||||||
debugger::WithoutDebug,
|
use nu_protocol::{BlockId, PipelineData, PositionalArg, ShellError, Span, Type, Value, VarId};
|
||||||
engine::{Closure, EngineState, Stack, StateWorkingSet},
|
|
||||||
PipelineData, PositionalArg, ShellError, Span, Type, Value, VarId,
|
|
||||||
};
|
|
||||||
use std::{collections::HashMap, sync::Arc};
|
|
||||||
|
|
||||||
pub fn eval_env_change_hook(
|
pub fn eval_env_change_hook(
|
||||||
env_change_hook: &HashMap<String, Vec<Value>>,
|
env_change_hook: Option<Value>,
|
||||||
engine_state: &mut EngineState,
|
engine_state: &mut EngineState,
|
||||||
stack: &mut Stack,
|
stack: &mut Stack,
|
||||||
) -> Result<(), ShellError> {
|
) -> Result<(), ShellError> {
|
||||||
for (env, hooks) in env_change_hook {
|
if let Some(hook) = env_change_hook {
|
||||||
let before = engine_state.previous_env_vars.get(env);
|
match hook {
|
||||||
let after = stack.get_env_var(engine_state, env);
|
Value::Record { val, .. } => {
|
||||||
if before != after {
|
for (env_name, hook_value) in &val {
|
||||||
let before = before.cloned().unwrap_or_default();
|
let before = engine_state
|
||||||
let after = after.cloned().unwrap_or_default();
|
.previous_env_vars
|
||||||
|
.get(env_name)
|
||||||
|
.cloned()
|
||||||
|
.unwrap_or_default();
|
||||||
|
|
||||||
eval_hooks(
|
let after = stack
|
||||||
engine_state,
|
.get_env_var(engine_state, env_name)
|
||||||
stack,
|
.unwrap_or_default();
|
||||||
vec![("$before".into(), before), ("$after".into(), after.clone())],
|
|
||||||
hooks,
|
|
||||||
"env_change",
|
|
||||||
)?;
|
|
||||||
|
|
||||||
Arc::make_mut(&mut engine_state.previous_env_vars).insert(env.clone(), after);
|
if before != after {
|
||||||
|
eval_hook(
|
||||||
|
engine_state,
|
||||||
|
stack,
|
||||||
|
None,
|
||||||
|
vec![("$before".into(), before), ("$after".into(), after.clone())],
|
||||||
|
hook_value,
|
||||||
|
"env_change",
|
||||||
|
)?;
|
||||||
|
|
||||||
|
engine_state
|
||||||
|
.previous_env_vars
|
||||||
|
.insert(env_name.to_string(), after);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
x => {
|
||||||
|
return Err(ShellError::TypeMismatch {
|
||||||
|
err_message: "record for the 'env_change' hook".to_string(),
|
||||||
|
span: x.span(),
|
||||||
|
});
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn eval_hooks(
|
|
||||||
engine_state: &mut EngineState,
|
|
||||||
stack: &mut Stack,
|
|
||||||
arguments: Vec<(String, Value)>,
|
|
||||||
hooks: &[Value],
|
|
||||||
hook_name: &str,
|
|
||||||
) -> Result<(), ShellError> {
|
|
||||||
for hook in hooks {
|
|
||||||
eval_hook(
|
|
||||||
engine_state,
|
|
||||||
stack,
|
|
||||||
None,
|
|
||||||
arguments.clone(),
|
|
||||||
hook,
|
|
||||||
&format!("{hook_name} list, recursive"),
|
|
||||||
)?;
|
|
||||||
}
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn eval_hook(
|
pub fn eval_hook(
|
||||||
engine_state: &mut EngineState,
|
engine_state: &mut EngineState,
|
||||||
stack: &mut Stack,
|
stack: &mut Stack,
|
||||||
@ -91,14 +88,13 @@ pub fn eval_hook(
|
|||||||
false,
|
false,
|
||||||
);
|
);
|
||||||
if let Some(err) = working_set.parse_errors.first() {
|
if let Some(err) = working_set.parse_errors.first() {
|
||||||
report_parse_error(&working_set, err);
|
report_error(&working_set, err);
|
||||||
return Err(ShellError::GenericError {
|
|
||||||
error: format!("Failed to run {hook_name} hook"),
|
return Err(ShellError::UnsupportedConfigValue(
|
||||||
msg: "source code has errors".into(),
|
"valid source code".into(),
|
||||||
span: Some(span),
|
"source code with syntax errors".into(),
|
||||||
help: None,
|
span,
|
||||||
inner: Vec::new(),
|
));
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
(output, working_set.render(), vars)
|
(output, working_set.render(), vars)
|
||||||
@ -119,12 +115,12 @@ pub fn eval_hook(
|
|||||||
})
|
})
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
match eval_block::<WithoutDebug>(engine_state, stack, &block, input) {
|
match eval_block(engine_state, stack, &block, input, false, false) {
|
||||||
Ok(pipeline_data) => {
|
Ok(pipeline_data) => {
|
||||||
output = pipeline_data;
|
output = pipeline_data;
|
||||||
}
|
}
|
||||||
Err(err) => {
|
Err(err) => {
|
||||||
report_shell_error(engine_state, &err);
|
report_error_new(engine_state, &err);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -133,7 +129,16 @@ pub fn eval_hook(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
Value::List { vals, .. } => {
|
Value::List { vals, .. } => {
|
||||||
eval_hooks(engine_state, stack, arguments, vals, hook_name)?;
|
for val in vals {
|
||||||
|
eval_hook(
|
||||||
|
engine_state,
|
||||||
|
stack,
|
||||||
|
None,
|
||||||
|
arguments.clone(),
|
||||||
|
val,
|
||||||
|
&format!("{hook_name} list, recursive"),
|
||||||
|
)?;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
Value::Record { val, .. } => {
|
Value::Record { val, .. } => {
|
||||||
// Hooks can optionally be a record in this form:
|
// Hooks can optionally be a record in this form:
|
||||||
@ -145,11 +150,11 @@ pub fn eval_hook(
|
|||||||
// If it returns true (the default if a condition block is not specified), the hook should be run.
|
// If it returns true (the default if a condition block is not specified), the hook should be run.
|
||||||
let do_run_hook = if let Some(condition) = val.get("condition") {
|
let do_run_hook = if let Some(condition) = val.get("condition") {
|
||||||
let other_span = condition.span();
|
let other_span = condition.span();
|
||||||
if let Ok(closure) = condition.as_closure() {
|
if let Ok(block_id) = condition.as_block() {
|
||||||
match run_hook(
|
match run_hook_block(
|
||||||
engine_state,
|
engine_state,
|
||||||
stack,
|
stack,
|
||||||
closure,
|
block_id,
|
||||||
None,
|
None,
|
||||||
arguments.clone(),
|
arguments.clone(),
|
||||||
other_span,
|
other_span,
|
||||||
@ -159,11 +164,11 @@ pub fn eval_hook(
|
|||||||
{
|
{
|
||||||
val
|
val
|
||||||
} else {
|
} else {
|
||||||
return Err(ShellError::RuntimeTypeMismatch {
|
return Err(ShellError::UnsupportedConfigValue(
|
||||||
expected: Type::Bool,
|
"boolean output".to_string(),
|
||||||
actual: pipeline_data.get_type(),
|
"other PipelineData variant".to_string(),
|
||||||
span: pipeline_data.span().unwrap_or(other_span),
|
other_span,
|
||||||
});
|
));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Err(err) => {
|
Err(err) => {
|
||||||
@ -171,11 +176,11 @@ pub fn eval_hook(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
return Err(ShellError::RuntimeTypeMismatch {
|
return Err(ShellError::UnsupportedConfigValue(
|
||||||
expected: Type::Closure,
|
"block".to_string(),
|
||||||
actual: condition.get_type(),
|
format!("{}", condition.get_type()),
|
||||||
span: other_span,
|
other_span,
|
||||||
});
|
));
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
// always run the hook
|
// always run the hook
|
||||||
@ -186,7 +191,7 @@ pub fn eval_hook(
|
|||||||
let Some(follow) = val.get("code") else {
|
let Some(follow) = val.get("code") else {
|
||||||
return Err(ShellError::CantFindColumn {
|
return Err(ShellError::CantFindColumn {
|
||||||
col_name: "code".into(),
|
col_name: "code".into(),
|
||||||
span: Some(span),
|
span,
|
||||||
src_span: span,
|
src_span: span,
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
@ -215,14 +220,13 @@ pub fn eval_hook(
|
|||||||
false,
|
false,
|
||||||
);
|
);
|
||||||
if let Some(err) = working_set.parse_errors.first() {
|
if let Some(err) = working_set.parse_errors.first() {
|
||||||
report_parse_error(&working_set, err);
|
report_error(&working_set, err);
|
||||||
return Err(ShellError::GenericError {
|
|
||||||
error: format!("Failed to run {hook_name} hook"),
|
return Err(ShellError::UnsupportedConfigValue(
|
||||||
msg: "source code has errors".into(),
|
"valid source code".into(),
|
||||||
span: Some(span),
|
"source code with syntax errors".into(),
|
||||||
help: None,
|
source_span,
|
||||||
inner: Vec::new(),
|
));
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
(output, working_set.render(), vars)
|
(output, working_set.render(), vars)
|
||||||
@ -239,12 +243,12 @@ pub fn eval_hook(
|
|||||||
})
|
})
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
match eval_block::<WithoutDebug>(engine_state, stack, &block, input) {
|
match eval_block(engine_state, stack, &block, input, false, false) {
|
||||||
Ok(pipeline_data) => {
|
Ok(pipeline_data) => {
|
||||||
output = pipeline_data;
|
output = pipeline_data;
|
||||||
}
|
}
|
||||||
Err(err) => {
|
Err(err) => {
|
||||||
report_shell_error(engine_state, &err);
|
report_error_new(engine_state, &err);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -252,51 +256,70 @@ pub fn eval_hook(
|
|||||||
stack.remove_var(*var_id);
|
stack.remove_var(*var_id);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
Value::Block { val: block_id, .. } => {
|
||||||
|
run_hook_block(
|
||||||
|
engine_state,
|
||||||
|
stack,
|
||||||
|
*block_id,
|
||||||
|
input,
|
||||||
|
arguments,
|
||||||
|
source_span,
|
||||||
|
)?;
|
||||||
|
}
|
||||||
Value::Closure { val, .. } => {
|
Value::Closure { val, .. } => {
|
||||||
run_hook(engine_state, stack, val, input, arguments, source_span)?;
|
run_hook_block(
|
||||||
|
engine_state,
|
||||||
|
stack,
|
||||||
|
val.block_id,
|
||||||
|
input,
|
||||||
|
arguments,
|
||||||
|
source_span,
|
||||||
|
)?;
|
||||||
}
|
}
|
||||||
other => {
|
other => {
|
||||||
return Err(ShellError::RuntimeTypeMismatch {
|
return Err(ShellError::UnsupportedConfigValue(
|
||||||
expected: Type::custom("string or closure"),
|
"block or string".to_string(),
|
||||||
actual: other.get_type(),
|
format!("{}", other.get_type()),
|
||||||
span: source_span,
|
source_span,
|
||||||
});
|
));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
Value::Block { val: block_id, .. } => {
|
||||||
|
output = run_hook_block(engine_state, stack, *block_id, input, arguments, span)?;
|
||||||
|
}
|
||||||
Value::Closure { val, .. } => {
|
Value::Closure { val, .. } => {
|
||||||
output = run_hook(engine_state, stack, val, input, arguments, span)?;
|
output = run_hook_block(engine_state, stack, val.block_id, input, arguments, span)?;
|
||||||
}
|
}
|
||||||
other => {
|
other => {
|
||||||
return Err(ShellError::RuntimeTypeMismatch {
|
return Err(ShellError::UnsupportedConfigValue(
|
||||||
expected: Type::custom("string, closure, record, or list"),
|
"string, block, record, or list of commands".into(),
|
||||||
actual: other.get_type(),
|
format!("{}", other.get_type()),
|
||||||
span: other.span(),
|
other.span(),
|
||||||
});
|
));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
engine_state.merge_env(stack)?;
|
let cwd = get_guaranteed_cwd(engine_state, stack);
|
||||||
|
engine_state.merge_env(stack, cwd)?;
|
||||||
|
|
||||||
Ok(output)
|
Ok(output)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn run_hook(
|
fn run_hook_block(
|
||||||
engine_state: &EngineState,
|
engine_state: &EngineState,
|
||||||
stack: &mut Stack,
|
stack: &mut Stack,
|
||||||
closure: &Closure,
|
block_id: BlockId,
|
||||||
optional_input: Option<PipelineData>,
|
optional_input: Option<PipelineData>,
|
||||||
arguments: Vec<(String, Value)>,
|
arguments: Vec<(String, Value)>,
|
||||||
span: Span,
|
span: Span,
|
||||||
) -> Result<PipelineData, ShellError> {
|
) -> Result<PipelineData, ShellError> {
|
||||||
let block = engine_state.get_block(closure.block_id);
|
let block = engine_state.get_block(block_id);
|
||||||
|
|
||||||
let input = optional_input.unwrap_or_else(PipelineData::empty);
|
let input = optional_input.unwrap_or_else(PipelineData::empty);
|
||||||
|
|
||||||
let mut callee_stack = stack
|
let mut callee_stack = stack.gather_captures(engine_state, &block.captures);
|
||||||
.captures_to_stack_preserve_out_dest(closure.captures.clone())
|
|
||||||
.reset_pipes();
|
|
||||||
|
|
||||||
for (idx, PositionalArg { var_id, .. }) in
|
for (idx, PositionalArg { var_id, .. }) in
|
||||||
block.signature.required_positional.iter().enumerate()
|
block.signature.required_positional.iter().enumerate()
|
||||||
@ -313,12 +336,8 @@ fn run_hook(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let pipeline_data = eval_block_with_early_return::<WithoutDebug>(
|
let pipeline_data =
|
||||||
engine_state,
|
eval_block_with_early_return(engine_state, &mut callee_stack, block, input, false, false)?;
|
||||||
&mut callee_stack,
|
|
||||||
block,
|
|
||||||
input,
|
|
||||||
)?;
|
|
||||||
|
|
||||||
if let PipelineData::Value(Value::Error { error, .. }, _) = pipeline_data {
|
if let PipelineData::Value(Value::Error { error, .. }, _) = pipeline_data {
|
||||||
return Err(*error);
|
return Err(*error);
|
||||||
|
@ -1,4 +1,6 @@
|
|||||||
use nu_protocol::{ast::CellPath, PipelineData, ShellError, Signals, Span, Value};
|
use nu_protocol::ast::CellPath;
|
||||||
|
use nu_protocol::{PipelineData, ShellError, Span, Value};
|
||||||
|
use std::sync::atomic::AtomicBool;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
pub trait CmdArgument {
|
pub trait CmdArgument {
|
||||||
@ -40,7 +42,7 @@ pub fn operate<C, A>(
|
|||||||
mut arg: A,
|
mut arg: A,
|
||||||
input: PipelineData,
|
input: PipelineData,
|
||||||
span: Span,
|
span: Span,
|
||||||
signals: &Signals,
|
ctrlc: Option<Arc<AtomicBool>>,
|
||||||
) -> Result<PipelineData, ShellError>
|
) -> Result<PipelineData, ShellError>
|
||||||
where
|
where
|
||||||
A: CmdArgument + Send + Sync + 'static,
|
A: CmdArgument + Send + Sync + 'static,
|
||||||
@ -55,7 +57,7 @@ where
|
|||||||
_ => cmd(&v, &arg, span),
|
_ => cmd(&v, &arg, span),
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
signals,
|
ctrlc,
|
||||||
),
|
),
|
||||||
Some(column_paths) => {
|
Some(column_paths) => {
|
||||||
let arg = Arc::new(arg);
|
let arg = Arc::new(arg);
|
||||||
@ -79,7 +81,7 @@ where
|
|||||||
}
|
}
|
||||||
v
|
v
|
||||||
},
|
},
|
||||||
signals,
|
ctrlc,
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,5 +1,7 @@
|
|||||||
#![doc = include_str!("../README.md")]
|
mod arg_glob;
|
||||||
pub mod formats;
|
pub mod formats;
|
||||||
pub mod hook;
|
pub mod hook;
|
||||||
pub mod input_handler;
|
pub mod input_handler;
|
||||||
pub mod util;
|
pub mod util;
|
||||||
|
pub use arg_glob::arg_glob;
|
||||||
|
pub use arg_glob::arg_glob_leading_dot;
|
||||||
|
@ -1,28 +1,59 @@
|
|||||||
|
use nu_protocol::report_error;
|
||||||
use nu_protocol::{
|
use nu_protocol::{
|
||||||
engine::{EngineState, Stack},
|
ast::RangeInclusion,
|
||||||
|
engine::{EngineState, Stack, StateWorkingSet},
|
||||||
Range, ShellError, Span, Value,
|
Range, ShellError, Span, Value,
|
||||||
};
|
};
|
||||||
use std::ops::Bound;
|
use std::path::PathBuf;
|
||||||
|
|
||||||
|
pub fn get_init_cwd() -> PathBuf {
|
||||||
|
std::env::current_dir().unwrap_or_else(|_| {
|
||||||
|
std::env::var("PWD")
|
||||||
|
.map(Into::into)
|
||||||
|
.unwrap_or_else(|_| nu_path::home_dir().unwrap_or_default())
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_guaranteed_cwd(engine_state: &EngineState, stack: &Stack) -> PathBuf {
|
||||||
|
nu_engine::env::current_dir(engine_state, stack).unwrap_or_else(|e| {
|
||||||
|
let working_set = StateWorkingSet::new(engine_state);
|
||||||
|
report_error(&working_set, &e);
|
||||||
|
crate::util::get_init_cwd()
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
type MakeRangeError = fn(&str, Span) -> ShellError;
|
type MakeRangeError = fn(&str, Span) -> ShellError;
|
||||||
|
|
||||||
/// Returns a inclusive pair of boundary in given `range`.
|
|
||||||
pub fn process_range(range: &Range) -> Result<(isize, isize), MakeRangeError> {
|
pub fn process_range(range: &Range) -> Result<(isize, isize), MakeRangeError> {
|
||||||
match range {
|
let start = match &range.from {
|
||||||
Range::IntRange(range) => {
|
Value::Int { val, .. } => isize::try_from(*val).unwrap_or_default(),
|
||||||
let start = range.start().try_into().unwrap_or(0);
|
Value::Nothing { .. } => 0,
|
||||||
let end = match range.end() {
|
_ => {
|
||||||
Bound::Included(v) => v as isize,
|
return Err(|msg, span| ShellError::TypeMismatch {
|
||||||
Bound::Excluded(v) => (v - 1) as isize,
|
err_message: msg.to_string(),
|
||||||
Bound::Unbounded => isize::MAX,
|
span,
|
||||||
};
|
})
|
||||||
Ok((start, end))
|
|
||||||
}
|
}
|
||||||
Range::FloatRange(_) => Err(|msg, span| ShellError::TypeMismatch {
|
};
|
||||||
err_message: msg.to_string(),
|
|
||||||
span,
|
let end = match &range.to {
|
||||||
}),
|
Value::Int { val, .. } => {
|
||||||
}
|
if matches!(range.inclusion, RangeInclusion::Inclusive) {
|
||||||
|
isize::try_from(*val).unwrap_or(isize::max_value())
|
||||||
|
} else {
|
||||||
|
isize::try_from(*val).unwrap_or(isize::max_value()) - 1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Value::Nothing { .. } => isize::max_value(),
|
||||||
|
_ => {
|
||||||
|
return Err(|msg, span| ShellError::TypeMismatch {
|
||||||
|
err_message: msg.to_string(),
|
||||||
|
span,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok((start, end))
|
||||||
}
|
}
|
||||||
|
|
||||||
const HELP_MSG: &str = "Nushell's config file can be found with the command: $nu.config-path. \
|
const HELP_MSG: &str = "Nushell's config file can be found with the command: $nu.config-path. \
|
||||||
@ -35,28 +66,28 @@ fn get_editor_commandline(
|
|||||||
match value {
|
match value {
|
||||||
Value::String { val, .. } if !val.is_empty() => Ok((val.to_string(), Vec::new())),
|
Value::String { val, .. } if !val.is_empty() => Ok((val.to_string(), Vec::new())),
|
||||||
Value::List { vals, .. } if !vals.is_empty() => {
|
Value::List { vals, .. } if !vals.is_empty() => {
|
||||||
let mut editor_cmd = vals.iter().map(|l| l.coerce_string());
|
let mut editor_cmd = vals.iter().map(|l| l.as_string());
|
||||||
match editor_cmd.next().transpose()? {
|
match editor_cmd.next().transpose()? {
|
||||||
Some(editor) if !editor.is_empty() => {
|
Some(editor) if !editor.is_empty() => {
|
||||||
let params = editor_cmd.collect::<Result<_, ShellError>>()?;
|
let params = editor_cmd.collect::<Result<_, ShellError>>()?;
|
||||||
Ok((editor, params))
|
Ok((editor, params))
|
||||||
}
|
}
|
||||||
_ => Err(ShellError::GenericError {
|
_ => Err(ShellError::GenericError(
|
||||||
error: "Editor executable is missing".into(),
|
"Editor executable is missing".into(),
|
||||||
msg: "Set the first element to an executable".into(),
|
"Set the first element to an executable".into(),
|
||||||
span: Some(value.span()),
|
Some(value.span()),
|
||||||
help: Some(HELP_MSG.into()),
|
Some(HELP_MSG.into()),
|
||||||
inner: vec![],
|
vec![],
|
||||||
}),
|
)),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Value::String { .. } | Value::List { .. } => Err(ShellError::GenericError {
|
Value::String { .. } | Value::List { .. } => Err(ShellError::GenericError(
|
||||||
error: format!("{var_name} should be a non-empty string or list<String>"),
|
format!("{var_name} should be a non-empty string or list<String>"),
|
||||||
msg: "Specify an executable here".into(),
|
"Specify an executable here".into(),
|
||||||
span: Some(value.span()),
|
Some(value.span()),
|
||||||
help: Some(HELP_MSG.into()),
|
Some(HELP_MSG.into()),
|
||||||
inner: vec![],
|
vec![],
|
||||||
}),
|
)),
|
||||||
x => Err(ShellError::CantConvert {
|
x => Err(ShellError::CantConvert {
|
||||||
to_type: "string or list<string>".into(),
|
to_type: "string or list<string>".into(),
|
||||||
from_type: x.get_type().to_string(),
|
from_type: x.get_type().to_string(),
|
||||||
@ -68,7 +99,7 @@ fn get_editor_commandline(
|
|||||||
|
|
||||||
pub fn get_editor(
|
pub fn get_editor(
|
||||||
engine_state: &EngineState,
|
engine_state: &EngineState,
|
||||||
stack: &Stack,
|
stack: &mut Stack,
|
||||||
span: Span,
|
span: Span,
|
||||||
) -> Result<(String, Vec<String>), ShellError> {
|
) -> Result<(String, Vec<String>), ShellError> {
|
||||||
let config = engine_state.get_config();
|
let config = engine_state.get_config();
|
||||||
@ -78,19 +109,18 @@ pub fn get_editor(
|
|||||||
get_editor_commandline(&config.buffer_editor, "$env.config.buffer_editor")
|
get_editor_commandline(&config.buffer_editor, "$env.config.buffer_editor")
|
||||||
{
|
{
|
||||||
Ok(buff_editor)
|
Ok(buff_editor)
|
||||||
} else if let Some(value) = env_vars.get("VISUAL") {
|
|
||||||
get_editor_commandline(value, "$env.VISUAL")
|
|
||||||
} else if let Some(value) = env_vars.get("EDITOR") {
|
} else if let Some(value) = env_vars.get("EDITOR") {
|
||||||
get_editor_commandline(value, "$env.EDITOR")
|
get_editor_commandline(value, "$env.EDITOR")
|
||||||
|
} else if let Some(value) = env_vars.get("VISUAL") {
|
||||||
|
get_editor_commandline(value, "$env.VISUAL")
|
||||||
} else {
|
} else {
|
||||||
Err(ShellError::GenericError {
|
Err(ShellError::GenericError(
|
||||||
error: "No editor configured".into(),
|
"No editor configured".into(),
|
||||||
msg:
|
"Please specify one via `$env.config.buffer_editor` or `$env.EDITOR`/`$env.VISUAL`"
|
||||||
"Please specify one via `$env.config.buffer_editor` or `$env.EDITOR`/`$env.VISUAL`"
|
.into(),
|
||||||
.into(),
|
Some(span),
|
||||||
span: Some(span),
|
Some(HELP_MSG.into()),
|
||||||
help: Some(HELP_MSG.into()),
|
vec![],
|
||||||
inner: vec![],
|
))
|
||||||
})
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
70
crates/nu-cmd-dataframe/Cargo.toml
Normal file
70
crates/nu-cmd-dataframe/Cargo.toml
Normal file
@ -0,0 +1,70 @@
|
|||||||
|
[package]
|
||||||
|
authors = ["The Nushell Project Developers"]
|
||||||
|
description = "Nushell's dataframe commands based on polars."
|
||||||
|
edition = "2021"
|
||||||
|
license = "MIT"
|
||||||
|
name = "nu-cmd-dataframe"
|
||||||
|
repository = "https://github.com/nushell/nushell/tree/main/crates/nu-cmd-dataframe"
|
||||||
|
version = "0.87.1"
|
||||||
|
|
||||||
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
|
|
||||||
|
[lib]
|
||||||
|
bench = false
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
nu-engine = { path = "../nu-engine", version = "0.87.1" }
|
||||||
|
nu-parser = { path = "../nu-parser", version = "0.87.1" }
|
||||||
|
nu-protocol = { path = "../nu-protocol", version = "0.87.1" }
|
||||||
|
|
||||||
|
# Potential dependencies for extras
|
||||||
|
chrono = { version = "0.4", features = ["std", "unstable-locales"], default-features = false }
|
||||||
|
chrono-tz = "0.8"
|
||||||
|
fancy-regex = "0.11"
|
||||||
|
indexmap = { version = "2.1" }
|
||||||
|
num = { version = "0.4", optional = true }
|
||||||
|
serde = { version = "1.0", features = ["derive"] }
|
||||||
|
sqlparser = { version = "0.36.1", optional = true }
|
||||||
|
polars-io = { version = "0.33", features = ["avro"], optional = true }
|
||||||
|
|
||||||
|
[dependencies.polars]
|
||||||
|
features = [
|
||||||
|
"arg_where",
|
||||||
|
"checked_arithmetic",
|
||||||
|
"concat_str",
|
||||||
|
"cross_join",
|
||||||
|
"csv",
|
||||||
|
"cum_agg",
|
||||||
|
"default",
|
||||||
|
"dtype-categorical",
|
||||||
|
"dtype-datetime",
|
||||||
|
"dtype-struct",
|
||||||
|
"dtype-i8",
|
||||||
|
"dtype-i16",
|
||||||
|
"dtype-u8",
|
||||||
|
"dtype-u16",
|
||||||
|
"dynamic_group_by",
|
||||||
|
"ipc",
|
||||||
|
"is_in",
|
||||||
|
"json",
|
||||||
|
"lazy",
|
||||||
|
"object",
|
||||||
|
"parquet",
|
||||||
|
"random",
|
||||||
|
"rolling_window",
|
||||||
|
"rows",
|
||||||
|
"serde",
|
||||||
|
"serde-lazy",
|
||||||
|
"strings",
|
||||||
|
"to_dummies",
|
||||||
|
]
|
||||||
|
optional = true
|
||||||
|
version = "0.33"
|
||||||
|
|
||||||
|
[features]
|
||||||
|
dataframe = ["num", "polars", "polars-io", "sqlparser"]
|
||||||
|
default = []
|
||||||
|
|
||||||
|
[dev-dependencies]
|
||||||
|
nu-cmd-lang = { path = "../nu-cmd-lang", version = "0.87.1" }
|
||||||
|
nu-test-support = { path = "../nu-test-support", version = "0.87.1" }
|
134
crates/nu-cmd-dataframe/src/dataframe/eager/append.rs
Normal file
134
crates/nu-cmd-dataframe/src/dataframe/eager/append.rs
Normal file
@ -0,0 +1,134 @@
|
|||||||
|
use nu_engine::CallExt;
|
||||||
|
use nu_protocol::{
|
||||||
|
ast::Call,
|
||||||
|
engine::{Command, EngineState, Stack},
|
||||||
|
Category, Example, PipelineData, ShellError, Signature, Span, SyntaxShape, Type, Value,
|
||||||
|
};
|
||||||
|
|
||||||
|
use super::super::values::{Axis, Column, NuDataFrame};
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct AppendDF;
|
||||||
|
|
||||||
|
impl Command for AppendDF {
|
||||||
|
fn name(&self) -> &str {
|
||||||
|
"dfr append"
|
||||||
|
}
|
||||||
|
|
||||||
|
fn usage(&self) -> &str {
|
||||||
|
"Appends a new dataframe."
|
||||||
|
}
|
||||||
|
|
||||||
|
fn signature(&self) -> Signature {
|
||||||
|
Signature::build(self.name())
|
||||||
|
.required("other", SyntaxShape::Any, "dataframe to be appended")
|
||||||
|
.switch("col", "appends in col orientation", Some('c'))
|
||||||
|
.input_output_type(
|
||||||
|
Type::Custom("dataframe".into()),
|
||||||
|
Type::Custom("dataframe".into()),
|
||||||
|
)
|
||||||
|
.category(Category::Custom("dataframe".into()))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn examples(&self) -> Vec<Example> {
|
||||||
|
vec![
|
||||||
|
Example {
|
||||||
|
description: "Appends a dataframe as new columns",
|
||||||
|
example: r#"let a = ([[a b]; [1 2] [3 4]] | dfr into-df);
|
||||||
|
$a | dfr append $a"#,
|
||||||
|
result: Some(
|
||||||
|
NuDataFrame::try_from_columns(vec![
|
||||||
|
Column::new(
|
||||||
|
"a".to_string(),
|
||||||
|
vec![Value::test_int(1), Value::test_int(3)],
|
||||||
|
),
|
||||||
|
Column::new(
|
||||||
|
"b".to_string(),
|
||||||
|
vec![Value::test_int(2), Value::test_int(4)],
|
||||||
|
),
|
||||||
|
Column::new(
|
||||||
|
"a_x".to_string(),
|
||||||
|
vec![Value::test_int(1), Value::test_int(3)],
|
||||||
|
),
|
||||||
|
Column::new(
|
||||||
|
"b_x".to_string(),
|
||||||
|
vec![Value::test_int(2), Value::test_int(4)],
|
||||||
|
),
|
||||||
|
])
|
||||||
|
.expect("simple df for test should not fail")
|
||||||
|
.into_value(Span::test_data()),
|
||||||
|
),
|
||||||
|
},
|
||||||
|
Example {
|
||||||
|
description: "Appends a dataframe merging at the end of columns",
|
||||||
|
example: r#"let a = ([[a b]; [1 2] [3 4]] | dfr into-df);
|
||||||
|
$a | dfr append $a --col"#,
|
||||||
|
result: Some(
|
||||||
|
NuDataFrame::try_from_columns(vec![
|
||||||
|
Column::new(
|
||||||
|
"a".to_string(),
|
||||||
|
vec![
|
||||||
|
Value::test_int(1),
|
||||||
|
Value::test_int(3),
|
||||||
|
Value::test_int(1),
|
||||||
|
Value::test_int(3),
|
||||||
|
],
|
||||||
|
),
|
||||||
|
Column::new(
|
||||||
|
"b".to_string(),
|
||||||
|
vec![
|
||||||
|
Value::test_int(2),
|
||||||
|
Value::test_int(4),
|
||||||
|
Value::test_int(2),
|
||||||
|
Value::test_int(4),
|
||||||
|
],
|
||||||
|
),
|
||||||
|
])
|
||||||
|
.expect("simple df for test should not fail")
|
||||||
|
.into_value(Span::test_data()),
|
||||||
|
),
|
||||||
|
},
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
fn run(
|
||||||
|
&self,
|
||||||
|
engine_state: &EngineState,
|
||||||
|
stack: &mut Stack,
|
||||||
|
call: &Call,
|
||||||
|
input: PipelineData,
|
||||||
|
) -> Result<PipelineData, ShellError> {
|
||||||
|
command(engine_state, stack, call, input)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn command(
|
||||||
|
engine_state: &EngineState,
|
||||||
|
stack: &mut Stack,
|
||||||
|
call: &Call,
|
||||||
|
input: PipelineData,
|
||||||
|
) -> Result<PipelineData, ShellError> {
|
||||||
|
let other: Value = call.req(engine_state, stack, 0)?;
|
||||||
|
|
||||||
|
let axis = if call.has_flag("col") {
|
||||||
|
Axis::Column
|
||||||
|
} else {
|
||||||
|
Axis::Row
|
||||||
|
};
|
||||||
|
let df_other = NuDataFrame::try_from_value(other)?;
|
||||||
|
let df = NuDataFrame::try_from_pipeline(input, call.head)?;
|
||||||
|
|
||||||
|
df.append_df(&df_other, axis, call.head)
|
||||||
|
.map(|df| PipelineData::Value(NuDataFrame::into_value(df, call.head), None))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod test {
|
||||||
|
use super::super::super::test_dataframe::test_dataframe;
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_examples() {
|
||||||
|
test_dataframe(vec![Box::new(AppendDF {})])
|
||||||
|
}
|
||||||
|
}
|
@ -1,22 +1,19 @@
|
|||||||
use crate::values::NuDataFrame;
|
use super::super::values::NuDataFrame;
|
||||||
use crate::PolarsPlugin;
|
|
||||||
|
|
||||||
use nu_plugin::{EngineInterface, EvaluatedCall, PluginCommand};
|
|
||||||
use nu_protocol::{
|
use nu_protocol::{
|
||||||
Category, Example, LabeledError, PipelineData, ShellError, Signature, Span, Type, Value,
|
ast::Call,
|
||||||
|
engine::{Command, EngineState, Stack},
|
||||||
|
Category, Example, PipelineData, ShellError, Signature, Span, Type, Value,
|
||||||
};
|
};
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct ColumnsDF;
|
pub struct ColumnsDF;
|
||||||
|
|
||||||
impl PluginCommand for ColumnsDF {
|
impl Command for ColumnsDF {
|
||||||
type Plugin = PolarsPlugin;
|
|
||||||
|
|
||||||
fn name(&self) -> &str {
|
fn name(&self) -> &str {
|
||||||
"polars columns"
|
"dfr columns"
|
||||||
}
|
}
|
||||||
|
|
||||||
fn description(&self) -> &str {
|
fn usage(&self) -> &str {
|
||||||
"Show dataframe columns."
|
"Show dataframe columns."
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -29,7 +26,7 @@ impl PluginCommand for ColumnsDF {
|
|||||||
fn examples(&self) -> Vec<Example> {
|
fn examples(&self) -> Vec<Example> {
|
||||||
vec![Example {
|
vec![Example {
|
||||||
description: "Dataframe columns",
|
description: "Dataframe columns",
|
||||||
example: "[[a b]; [1 2] [3 4]] | polars into-df | polars columns",
|
example: "[[a b]; [1 2] [3 4]] | dfr into-df | dfr columns",
|
||||||
result: Some(Value::list(
|
result: Some(Value::list(
|
||||||
vec![Value::test_string("a"), Value::test_string("b")],
|
vec![Value::test_string("a"), Value::test_string("b")],
|
||||||
Span::test_data(),
|
Span::test_data(),
|
||||||
@ -39,27 +36,28 @@ impl PluginCommand for ColumnsDF {
|
|||||||
|
|
||||||
fn run(
|
fn run(
|
||||||
&self,
|
&self,
|
||||||
plugin: &Self::Plugin,
|
engine_state: &EngineState,
|
||||||
_engine: &EngineInterface,
|
stack: &mut Stack,
|
||||||
call: &EvaluatedCall,
|
call: &Call,
|
||||||
input: PipelineData,
|
input: PipelineData,
|
||||||
) -> Result<PipelineData, LabeledError> {
|
) -> Result<PipelineData, ShellError> {
|
||||||
command(plugin, call, input).map_err(|e| e.into())
|
command(engine_state, stack, call, input)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn command(
|
fn command(
|
||||||
plugin: &PolarsPlugin,
|
_engine_state: &EngineState,
|
||||||
call: &EvaluatedCall,
|
_stack: &mut Stack,
|
||||||
|
call: &Call,
|
||||||
input: PipelineData,
|
input: PipelineData,
|
||||||
) -> Result<PipelineData, ShellError> {
|
) -> Result<PipelineData, ShellError> {
|
||||||
let df = NuDataFrame::try_from_pipeline_coerce(plugin, input, call.head)?;
|
let df = NuDataFrame::try_from_pipeline(input, call.head)?;
|
||||||
|
|
||||||
let names: Vec<Value> = df
|
let names: Vec<Value> = df
|
||||||
.as_ref()
|
.as_ref()
|
||||||
.get_column_names()
|
.get_column_names()
|
||||||
.iter()
|
.iter()
|
||||||
.map(|v| Value::string(v.as_str(), call.head))
|
.map(|v| Value::string(*v, call.head))
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
let names = Value::list(names, call.head);
|
let names = Value::list(names, call.head);
|
||||||
@ -69,11 +67,11 @@ fn command(
|
|||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod test {
|
mod test {
|
||||||
|
use super::super::super::test_dataframe::test_dataframe;
|
||||||
use super::*;
|
use super::*;
|
||||||
use crate::test::test_polars_plugin_command;
|
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_examples() -> Result<(), ShellError> {
|
fn test_examples() {
|
||||||
test_polars_plugin_command(&ColumnsDF)
|
test_dataframe(vec![Box::new(ColumnsDF {})])
|
||||||
}
|
}
|
||||||
}
|
}
|
121
crates/nu-cmd-dataframe/src/dataframe/eager/drop.rs
Normal file
121
crates/nu-cmd-dataframe/src/dataframe/eager/drop.rs
Normal file
@ -0,0 +1,121 @@
|
|||||||
|
use nu_engine::CallExt;
|
||||||
|
use nu_protocol::{
|
||||||
|
ast::Call,
|
||||||
|
engine::{Command, EngineState, Stack},
|
||||||
|
Category, Example, PipelineData, ShellError, Signature, Span, SyntaxShape, Type, Value,
|
||||||
|
};
|
||||||
|
|
||||||
|
use super::super::values::utils::convert_columns;
|
||||||
|
use super::super::values::{Column, NuDataFrame};
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct DropDF;
|
||||||
|
|
||||||
|
impl Command for DropDF {
|
||||||
|
fn name(&self) -> &str {
|
||||||
|
"dfr drop"
|
||||||
|
}
|
||||||
|
|
||||||
|
fn usage(&self) -> &str {
|
||||||
|
"Creates a new dataframe by dropping the selected columns."
|
||||||
|
}
|
||||||
|
|
||||||
|
fn signature(&self) -> Signature {
|
||||||
|
Signature::build(self.name())
|
||||||
|
.rest("rest", SyntaxShape::Any, "column names to be dropped")
|
||||||
|
.input_output_type(
|
||||||
|
Type::Custom("dataframe".into()),
|
||||||
|
Type::Custom("dataframe".into()),
|
||||||
|
)
|
||||||
|
.category(Category::Custom("dataframe".into()))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn examples(&self) -> Vec<Example> {
|
||||||
|
vec![Example {
|
||||||
|
description: "drop column a",
|
||||||
|
example: "[[a b]; [1 2] [3 4]] | dfr into-df | dfr drop a",
|
||||||
|
result: Some(
|
||||||
|
NuDataFrame::try_from_columns(vec![Column::new(
|
||||||
|
"b".to_string(),
|
||||||
|
vec![Value::test_int(2), Value::test_int(4)],
|
||||||
|
)])
|
||||||
|
.expect("simple df for test should not fail")
|
||||||
|
.into_value(Span::test_data()),
|
||||||
|
),
|
||||||
|
}]
|
||||||
|
}
|
||||||
|
|
||||||
|
fn run(
|
||||||
|
&self,
|
||||||
|
engine_state: &EngineState,
|
||||||
|
stack: &mut Stack,
|
||||||
|
call: &Call,
|
||||||
|
input: PipelineData,
|
||||||
|
) -> Result<PipelineData, ShellError> {
|
||||||
|
command(engine_state, stack, call, input)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn command(
|
||||||
|
engine_state: &EngineState,
|
||||||
|
stack: &mut Stack,
|
||||||
|
call: &Call,
|
||||||
|
input: PipelineData,
|
||||||
|
) -> Result<PipelineData, ShellError> {
|
||||||
|
let columns: Vec<Value> = call.rest(engine_state, stack, 0)?;
|
||||||
|
let (col_string, col_span) = convert_columns(columns, call.head)?;
|
||||||
|
|
||||||
|
let df = NuDataFrame::try_from_pipeline(input, call.head)?;
|
||||||
|
|
||||||
|
let new_df = col_string
|
||||||
|
.get(0)
|
||||||
|
.ok_or_else(|| {
|
||||||
|
ShellError::GenericError(
|
||||||
|
"Empty names list".into(),
|
||||||
|
"No column names were found".into(),
|
||||||
|
Some(col_span),
|
||||||
|
None,
|
||||||
|
Vec::new(),
|
||||||
|
)
|
||||||
|
})
|
||||||
|
.and_then(|col| {
|
||||||
|
df.as_ref().drop(&col.item).map_err(|e| {
|
||||||
|
ShellError::GenericError(
|
||||||
|
"Error dropping column".into(),
|
||||||
|
e.to_string(),
|
||||||
|
Some(col.span),
|
||||||
|
None,
|
||||||
|
Vec::new(),
|
||||||
|
)
|
||||||
|
})
|
||||||
|
})?;
|
||||||
|
|
||||||
|
// If there are more columns in the drop selection list, these
|
||||||
|
// are added from the resulting dataframe
|
||||||
|
col_string
|
||||||
|
.iter()
|
||||||
|
.skip(1)
|
||||||
|
.try_fold(new_df, |new_df, col| {
|
||||||
|
new_df.drop(&col.item).map_err(|e| {
|
||||||
|
ShellError::GenericError(
|
||||||
|
"Error dropping column".into(),
|
||||||
|
e.to_string(),
|
||||||
|
Some(col.span),
|
||||||
|
None,
|
||||||
|
Vec::new(),
|
||||||
|
)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
.map(|df| PipelineData::Value(NuDataFrame::dataframe_into_value(df, call.head), None))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod test {
|
||||||
|
use super::super::super::test_dataframe::test_dataframe;
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_examples() {
|
||||||
|
test_dataframe(vec![Box::new(DropDF {})])
|
||||||
|
}
|
||||||
|
}
|
124
crates/nu-cmd-dataframe/src/dataframe/eager/drop_duplicates.rs
Normal file
124
crates/nu-cmd-dataframe/src/dataframe/eager/drop_duplicates.rs
Normal file
@ -0,0 +1,124 @@
|
|||||||
|
use nu_engine::CallExt;
|
||||||
|
use nu_protocol::{
|
||||||
|
ast::Call,
|
||||||
|
engine::{Command, EngineState, Stack},
|
||||||
|
Category, Example, PipelineData, ShellError, Signature, Span, SyntaxShape, Type, Value,
|
||||||
|
};
|
||||||
|
use polars::prelude::UniqueKeepStrategy;
|
||||||
|
|
||||||
|
use super::super::values::utils::convert_columns_string;
|
||||||
|
use super::super::values::{Column, NuDataFrame};
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct DropDuplicates;
|
||||||
|
|
||||||
|
impl Command for DropDuplicates {
|
||||||
|
fn name(&self) -> &str {
|
||||||
|
"dfr drop-duplicates"
|
||||||
|
}
|
||||||
|
|
||||||
|
fn usage(&self) -> &str {
|
||||||
|
"Drops duplicate values in dataframe."
|
||||||
|
}
|
||||||
|
|
||||||
|
fn signature(&self) -> Signature {
|
||||||
|
Signature::build(self.name())
|
||||||
|
.optional(
|
||||||
|
"subset",
|
||||||
|
SyntaxShape::Table(vec![]),
|
||||||
|
"subset of columns to drop duplicates",
|
||||||
|
)
|
||||||
|
.switch("maintain", "maintain order", Some('m'))
|
||||||
|
.switch(
|
||||||
|
"last",
|
||||||
|
"keeps last duplicate value (by default keeps first)",
|
||||||
|
Some('l'),
|
||||||
|
)
|
||||||
|
.input_output_type(
|
||||||
|
Type::Custom("dataframe".into()),
|
||||||
|
Type::Custom("dataframe".into()),
|
||||||
|
)
|
||||||
|
.category(Category::Custom("dataframe".into()))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn examples(&self) -> Vec<Example> {
|
||||||
|
vec![Example {
|
||||||
|
description: "drop duplicates",
|
||||||
|
example: "[[a b]; [1 2] [3 4] [1 2]] | dfr into-df | dfr drop-duplicates",
|
||||||
|
result: Some(
|
||||||
|
NuDataFrame::try_from_columns(vec![
|
||||||
|
Column::new(
|
||||||
|
"a".to_string(),
|
||||||
|
vec![Value::test_int(3), Value::test_int(1)],
|
||||||
|
),
|
||||||
|
Column::new(
|
||||||
|
"b".to_string(),
|
||||||
|
vec![Value::test_int(4), Value::test_int(2)],
|
||||||
|
),
|
||||||
|
])
|
||||||
|
.expect("simple df for test should not fail")
|
||||||
|
.into_value(Span::test_data()),
|
||||||
|
),
|
||||||
|
}]
|
||||||
|
}
|
||||||
|
|
||||||
|
fn run(
|
||||||
|
&self,
|
||||||
|
engine_state: &EngineState,
|
||||||
|
stack: &mut Stack,
|
||||||
|
call: &Call,
|
||||||
|
input: PipelineData,
|
||||||
|
) -> Result<PipelineData, ShellError> {
|
||||||
|
command(engine_state, stack, call, input)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn command(
|
||||||
|
engine_state: &EngineState,
|
||||||
|
stack: &mut Stack,
|
||||||
|
call: &Call,
|
||||||
|
input: PipelineData,
|
||||||
|
) -> Result<PipelineData, ShellError> {
|
||||||
|
let columns: Option<Vec<Value>> = call.opt(engine_state, stack, 0)?;
|
||||||
|
let (subset, col_span) = match columns {
|
||||||
|
Some(cols) => {
|
||||||
|
let (agg_string, col_span) = convert_columns_string(cols, call.head)?;
|
||||||
|
(Some(agg_string), col_span)
|
||||||
|
}
|
||||||
|
None => (None, call.head),
|
||||||
|
};
|
||||||
|
|
||||||
|
let df = NuDataFrame::try_from_pipeline(input, call.head)?;
|
||||||
|
|
||||||
|
let subset_slice = subset.as_ref().map(|cols| &cols[..]);
|
||||||
|
|
||||||
|
let keep_strategy = if call.has_flag("last") {
|
||||||
|
UniqueKeepStrategy::Last
|
||||||
|
} else {
|
||||||
|
UniqueKeepStrategy::First
|
||||||
|
};
|
||||||
|
|
||||||
|
df.as_ref()
|
||||||
|
.unique(subset_slice, keep_strategy, None)
|
||||||
|
.map_err(|e| {
|
||||||
|
ShellError::GenericError(
|
||||||
|
"Error dropping duplicates".into(),
|
||||||
|
e.to_string(),
|
||||||
|
Some(col_span),
|
||||||
|
None,
|
||||||
|
Vec::new(),
|
||||||
|
)
|
||||||
|
})
|
||||||
|
.map(|df| PipelineData::Value(NuDataFrame::dataframe_into_value(df, call.head), None))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod test {
|
||||||
|
use super::super::super::test_dataframe::test_dataframe;
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_examples() {
|
||||||
|
test_dataframe(vec![Box::new(DropDuplicates {})])
|
||||||
|
}
|
||||||
|
}
|
140
crates/nu-cmd-dataframe/src/dataframe/eager/drop_nulls.rs
Normal file
140
crates/nu-cmd-dataframe/src/dataframe/eager/drop_nulls.rs
Normal file
@ -0,0 +1,140 @@
|
|||||||
|
use nu_engine::CallExt;
|
||||||
|
use nu_protocol::{
|
||||||
|
ast::Call,
|
||||||
|
engine::{Command, EngineState, Stack},
|
||||||
|
Category, Example, PipelineData, ShellError, Signature, Span, SyntaxShape, Type, Value,
|
||||||
|
};
|
||||||
|
|
||||||
|
use super::super::values::utils::convert_columns_string;
|
||||||
|
use super::super::values::{Column, NuDataFrame};
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct DropNulls;
|
||||||
|
|
||||||
|
impl Command for DropNulls {
|
||||||
|
fn name(&self) -> &str {
|
||||||
|
"dfr drop-nulls"
|
||||||
|
}
|
||||||
|
|
||||||
|
fn usage(&self) -> &str {
|
||||||
|
"Drops null values in dataframe."
|
||||||
|
}
|
||||||
|
|
||||||
|
fn signature(&self) -> Signature {
|
||||||
|
Signature::build(self.name())
|
||||||
|
.optional(
|
||||||
|
"subset",
|
||||||
|
SyntaxShape::Table(vec![]),
|
||||||
|
"subset of columns to drop nulls",
|
||||||
|
)
|
||||||
|
.input_output_type(
|
||||||
|
Type::Custom("dataframe".into()),
|
||||||
|
Type::Custom("dataframe".into()),
|
||||||
|
)
|
||||||
|
.category(Category::Custom("dataframe".into()))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn examples(&self) -> Vec<Example> {
|
||||||
|
vec![
|
||||||
|
Example {
|
||||||
|
description: "drop null values in dataframe",
|
||||||
|
example: r#"let df = ([[a b]; [1 2] [3 0] [1 2]] | dfr into-df);
|
||||||
|
let res = ($df.b / $df.b);
|
||||||
|
let a = ($df | dfr with-column $res --name res);
|
||||||
|
$a | dfr drop-nulls"#,
|
||||||
|
result: Some(
|
||||||
|
NuDataFrame::try_from_columns(vec![
|
||||||
|
Column::new(
|
||||||
|
"a".to_string(),
|
||||||
|
vec![Value::test_int(1), Value::test_int(1)],
|
||||||
|
),
|
||||||
|
Column::new(
|
||||||
|
"b".to_string(),
|
||||||
|
vec![Value::test_int(2), Value::test_int(2)],
|
||||||
|
),
|
||||||
|
Column::new(
|
||||||
|
"res".to_string(),
|
||||||
|
vec![Value::test_int(1), Value::test_int(1)],
|
||||||
|
),
|
||||||
|
])
|
||||||
|
.expect("simple df for test should not fail")
|
||||||
|
.into_value(Span::test_data()),
|
||||||
|
),
|
||||||
|
},
|
||||||
|
Example {
|
||||||
|
description: "drop null values in dataframe",
|
||||||
|
example: r#"let s = ([1 2 0 0 3 4] | dfr into-df);
|
||||||
|
($s / $s) | dfr drop-nulls"#,
|
||||||
|
result: Some(
|
||||||
|
NuDataFrame::try_from_columns(vec![Column::new(
|
||||||
|
"div_0_0".to_string(),
|
||||||
|
vec![
|
||||||
|
Value::test_int(1),
|
||||||
|
Value::test_int(1),
|
||||||
|
Value::test_int(1),
|
||||||
|
Value::test_int(1),
|
||||||
|
],
|
||||||
|
)])
|
||||||
|
.expect("simple df for test should not fail")
|
||||||
|
.into_value(Span::test_data()),
|
||||||
|
),
|
||||||
|
},
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
fn run(
|
||||||
|
&self,
|
||||||
|
engine_state: &EngineState,
|
||||||
|
stack: &mut Stack,
|
||||||
|
call: &Call,
|
||||||
|
input: PipelineData,
|
||||||
|
) -> Result<PipelineData, ShellError> {
|
||||||
|
command(engine_state, stack, call, input)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn command(
|
||||||
|
engine_state: &EngineState,
|
||||||
|
stack: &mut Stack,
|
||||||
|
call: &Call,
|
||||||
|
input: PipelineData,
|
||||||
|
) -> Result<PipelineData, ShellError> {
|
||||||
|
let df = NuDataFrame::try_from_pipeline(input, call.head)?;
|
||||||
|
|
||||||
|
let columns: Option<Vec<Value>> = call.opt(engine_state, stack, 0)?;
|
||||||
|
|
||||||
|
let (subset, col_span) = match columns {
|
||||||
|
Some(cols) => {
|
||||||
|
let (agg_string, col_span) = convert_columns_string(cols, call.head)?;
|
||||||
|
(Some(agg_string), col_span)
|
||||||
|
}
|
||||||
|
None => (None, call.head),
|
||||||
|
};
|
||||||
|
|
||||||
|
let subset_slice = subset.as_ref().map(|cols| &cols[..]);
|
||||||
|
|
||||||
|
df.as_ref()
|
||||||
|
.drop_nulls(subset_slice)
|
||||||
|
.map_err(|e| {
|
||||||
|
ShellError::GenericError(
|
||||||
|
"Error dropping nulls".into(),
|
||||||
|
e.to_string(),
|
||||||
|
Some(col_span),
|
||||||
|
None,
|
||||||
|
Vec::new(),
|
||||||
|
)
|
||||||
|
})
|
||||||
|
.map(|df| PipelineData::Value(NuDataFrame::dataframe_into_value(df, call.head), None))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod test {
|
||||||
|
use super::super::super::test_dataframe::test_dataframe;
|
||||||
|
use super::super::WithColumn;
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_examples() {
|
||||||
|
test_dataframe(vec![Box::new(DropNulls {}), Box::new(WithColumn {})])
|
||||||
|
}
|
||||||
|
}
|
104
crates/nu-cmd-dataframe/src/dataframe/eager/dtypes.rs
Normal file
104
crates/nu-cmd-dataframe/src/dataframe/eager/dtypes.rs
Normal file
@ -0,0 +1,104 @@
|
|||||||
|
use super::super::values::{Column, NuDataFrame};
|
||||||
|
use nu_protocol::{
|
||||||
|
ast::Call,
|
||||||
|
engine::{Command, EngineState, Stack},
|
||||||
|
Category, Example, PipelineData, ShellError, Signature, Span, Type, Value,
|
||||||
|
};
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct DataTypes;
|
||||||
|
|
||||||
|
impl Command for DataTypes {
|
||||||
|
fn name(&self) -> &str {
|
||||||
|
"dfr dtypes"
|
||||||
|
}
|
||||||
|
|
||||||
|
fn usage(&self) -> &str {
|
||||||
|
"Show dataframe data types."
|
||||||
|
}
|
||||||
|
|
||||||
|
fn signature(&self) -> Signature {
|
||||||
|
Signature::build(self.name())
|
||||||
|
.input_output_type(
|
||||||
|
Type::Custom("dataframe".into()),
|
||||||
|
Type::Custom("dataframe".into()),
|
||||||
|
)
|
||||||
|
.category(Category::Custom("dataframe".into()))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn examples(&self) -> Vec<Example> {
|
||||||
|
vec![Example {
|
||||||
|
description: "Dataframe dtypes",
|
||||||
|
example: "[[a b]; [1 2] [3 4]] | dfr into-df | dfr dtypes",
|
||||||
|
result: Some(
|
||||||
|
NuDataFrame::try_from_columns(vec![
|
||||||
|
Column::new(
|
||||||
|
"column".to_string(),
|
||||||
|
vec![Value::test_string("a"), Value::test_string("b")],
|
||||||
|
),
|
||||||
|
Column::new(
|
||||||
|
"dtype".to_string(),
|
||||||
|
vec![Value::test_string("i64"), Value::test_string("i64")],
|
||||||
|
),
|
||||||
|
])
|
||||||
|
.expect("simple df for test should not fail")
|
||||||
|
.into_value(Span::test_data()),
|
||||||
|
),
|
||||||
|
}]
|
||||||
|
}
|
||||||
|
|
||||||
|
fn run(
|
||||||
|
&self,
|
||||||
|
engine_state: &EngineState,
|
||||||
|
stack: &mut Stack,
|
||||||
|
call: &Call,
|
||||||
|
input: PipelineData,
|
||||||
|
) -> Result<PipelineData, ShellError> {
|
||||||
|
command(engine_state, stack, call, input)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn command(
|
||||||
|
_engine_state: &EngineState,
|
||||||
|
_stack: &mut Stack,
|
||||||
|
call: &Call,
|
||||||
|
input: PipelineData,
|
||||||
|
) -> Result<PipelineData, ShellError> {
|
||||||
|
let df = NuDataFrame::try_from_pipeline(input, call.head)?;
|
||||||
|
|
||||||
|
let mut dtypes: Vec<Value> = Vec::new();
|
||||||
|
let names: Vec<Value> = df
|
||||||
|
.as_ref()
|
||||||
|
.get_column_names()
|
||||||
|
.iter()
|
||||||
|
.map(|v| {
|
||||||
|
let dtype = df
|
||||||
|
.as_ref()
|
||||||
|
.column(v)
|
||||||
|
.expect("using name from list of names from dataframe")
|
||||||
|
.dtype();
|
||||||
|
|
||||||
|
let dtype_str = dtype.to_string();
|
||||||
|
dtypes.push(Value::string(dtype_str, call.head));
|
||||||
|
|
||||||
|
Value::string(*v, call.head)
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
let names_col = Column::new("column".to_string(), names);
|
||||||
|
let dtypes_col = Column::new("dtype".to_string(), dtypes);
|
||||||
|
|
||||||
|
NuDataFrame::try_from_columns(vec![names_col, dtypes_col])
|
||||||
|
.map(|df| PipelineData::Value(df.into_value(call.head), None))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod test {
|
||||||
|
use super::super::super::test_dataframe::test_dataframe;
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_examples() {
|
||||||
|
test_dataframe(vec![Box::new(DataTypes {})])
|
||||||
|
}
|
||||||
|
}
|
112
crates/nu-cmd-dataframe/src/dataframe/eager/dummies.rs
Normal file
112
crates/nu-cmd-dataframe/src/dataframe/eager/dummies.rs
Normal file
@ -0,0 +1,112 @@
|
|||||||
|
use super::super::values::NuDataFrame;
|
||||||
|
use nu_protocol::{
|
||||||
|
ast::Call,
|
||||||
|
engine::{Command, EngineState, Stack},
|
||||||
|
Category, Example, PipelineData, ShellError, Signature, Span, Type,
|
||||||
|
};
|
||||||
|
use polars::{prelude::*, series::Series};
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct Dummies;
|
||||||
|
|
||||||
|
impl Command for Dummies {
|
||||||
|
fn name(&self) -> &str {
|
||||||
|
"dfr dummies"
|
||||||
|
}
|
||||||
|
|
||||||
|
fn usage(&self) -> &str {
|
||||||
|
"Creates a new dataframe with dummy variables."
|
||||||
|
}
|
||||||
|
|
||||||
|
fn signature(&self) -> Signature {
|
||||||
|
Signature::build(self.name())
|
||||||
|
.switch("drop-first", "Drop first row", Some('d'))
|
||||||
|
.input_output_type(
|
||||||
|
Type::Custom("dataframe".into()),
|
||||||
|
Type::Custom("dataframe".into()),
|
||||||
|
)
|
||||||
|
.category(Category::Custom("dataframe".into()))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn examples(&self) -> Vec<Example> {
|
||||||
|
vec![
|
||||||
|
Example {
|
||||||
|
description: "Create new dataframe with dummy variables from a dataframe",
|
||||||
|
example: "[[a b]; [1 2] [3 4]] | dfr into-df | dfr dummies",
|
||||||
|
result: Some(
|
||||||
|
NuDataFrame::try_from_series(
|
||||||
|
vec![
|
||||||
|
Series::new("a_1", &[1_u8, 0]),
|
||||||
|
Series::new("a_3", &[0_u8, 1]),
|
||||||
|
Series::new("b_2", &[1_u8, 0]),
|
||||||
|
Series::new("b_4", &[0_u8, 1]),
|
||||||
|
],
|
||||||
|
Span::test_data(),
|
||||||
|
)
|
||||||
|
.expect("simple df for test should not fail")
|
||||||
|
.into_value(Span::test_data()),
|
||||||
|
),
|
||||||
|
},
|
||||||
|
Example {
|
||||||
|
description: "Create new dataframe with dummy variables from a series",
|
||||||
|
example: "[1 2 2 3 3] | dfr into-df | dfr dummies",
|
||||||
|
result: Some(
|
||||||
|
NuDataFrame::try_from_series(
|
||||||
|
vec![
|
||||||
|
Series::new("0_1", &[1_u8, 0, 0, 0, 0]),
|
||||||
|
Series::new("0_2", &[0_u8, 1, 1, 0, 0]),
|
||||||
|
Series::new("0_3", &[0_u8, 0, 0, 1, 1]),
|
||||||
|
],
|
||||||
|
Span::test_data(),
|
||||||
|
)
|
||||||
|
.expect("simple df for test should not fail")
|
||||||
|
.into_value(Span::test_data()),
|
||||||
|
),
|
||||||
|
},
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
fn run(
|
||||||
|
&self,
|
||||||
|
engine_state: &EngineState,
|
||||||
|
stack: &mut Stack,
|
||||||
|
call: &Call,
|
||||||
|
input: PipelineData,
|
||||||
|
) -> Result<PipelineData, ShellError> {
|
||||||
|
command(engine_state, stack, call, input)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn command(
|
||||||
|
_engine_state: &EngineState,
|
||||||
|
_stack: &mut Stack,
|
||||||
|
call: &Call,
|
||||||
|
input: PipelineData,
|
||||||
|
) -> Result<PipelineData, ShellError> {
|
||||||
|
let drop_first: bool = call.has_flag("drop-first");
|
||||||
|
let df = NuDataFrame::try_from_pipeline(input, call.head)?;
|
||||||
|
|
||||||
|
df.as_ref()
|
||||||
|
.to_dummies(None, drop_first)
|
||||||
|
.map_err(|e| {
|
||||||
|
ShellError::GenericError(
|
||||||
|
"Error calculating dummies".into(),
|
||||||
|
e.to_string(),
|
||||||
|
Some(call.head),
|
||||||
|
Some("The only allowed column types for dummies are String or Int".into()),
|
||||||
|
Vec::new(),
|
||||||
|
)
|
||||||
|
})
|
||||||
|
.map(|df| PipelineData::Value(NuDataFrame::dataframe_into_value(df, call.head), None))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod test {
|
||||||
|
use super::super::super::test_dataframe::test_dataframe;
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_examples() {
|
||||||
|
test_dataframe(vec![Box::new(Dummies {})])
|
||||||
|
}
|
||||||
|
}
|
160
crates/nu-cmd-dataframe/src/dataframe/eager/filter_with.rs
Normal file
160
crates/nu-cmd-dataframe/src/dataframe/eager/filter_with.rs
Normal file
@ -0,0 +1,160 @@
|
|||||||
|
use nu_engine::CallExt;
|
||||||
|
use nu_protocol::{
|
||||||
|
ast::Call,
|
||||||
|
engine::{Command, EngineState, Stack},
|
||||||
|
Category, Example, PipelineData, ShellError, Signature, Span, SyntaxShape, Type, Value,
|
||||||
|
};
|
||||||
|
use polars::prelude::LazyFrame;
|
||||||
|
|
||||||
|
use crate::dataframe::values::{NuExpression, NuLazyFrame};
|
||||||
|
|
||||||
|
use super::super::values::{Column, NuDataFrame};
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct FilterWith;
|
||||||
|
|
||||||
|
impl Command for FilterWith {
|
||||||
|
fn name(&self) -> &str {
|
||||||
|
"dfr filter-with"
|
||||||
|
}
|
||||||
|
|
||||||
|
fn usage(&self) -> &str {
|
||||||
|
"Filters dataframe using a mask or expression as reference."
|
||||||
|
}
|
||||||
|
|
||||||
|
fn signature(&self) -> Signature {
|
||||||
|
Signature::build(self.name())
|
||||||
|
.required(
|
||||||
|
"mask or expression",
|
||||||
|
SyntaxShape::Any,
|
||||||
|
"boolean mask used to filter data",
|
||||||
|
)
|
||||||
|
.input_output_type(
|
||||||
|
Type::Custom("dataframe".into()),
|
||||||
|
Type::Custom("dataframe".into()),
|
||||||
|
)
|
||||||
|
.category(Category::Custom("dataframe or lazyframe".into()))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn examples(&self) -> Vec<Example> {
|
||||||
|
vec![
|
||||||
|
Example {
|
||||||
|
description: "Filter dataframe using a bool mask",
|
||||||
|
example: r#"let mask = ([true false] | dfr into-df);
|
||||||
|
[[a b]; [1 2] [3 4]] | dfr into-df | dfr filter-with $mask"#,
|
||||||
|
result: Some(
|
||||||
|
NuDataFrame::try_from_columns(vec![
|
||||||
|
Column::new("a".to_string(), vec![Value::test_int(1)]),
|
||||||
|
Column::new("b".to_string(), vec![Value::test_int(2)]),
|
||||||
|
])
|
||||||
|
.expect("simple df for test should not fail")
|
||||||
|
.into_value(Span::test_data()),
|
||||||
|
),
|
||||||
|
},
|
||||||
|
Example {
|
||||||
|
description: "Filter dataframe using an expression",
|
||||||
|
example: "[[a b]; [1 2] [3 4]] | dfr into-df | dfr filter-with ((dfr col a) > 1)",
|
||||||
|
result: Some(
|
||||||
|
NuDataFrame::try_from_columns(vec![
|
||||||
|
Column::new("a".to_string(), vec![Value::test_int(3)]),
|
||||||
|
Column::new("b".to_string(), vec![Value::test_int(4)]),
|
||||||
|
])
|
||||||
|
.expect("simple df for test should not fail")
|
||||||
|
.into_value(Span::test_data()),
|
||||||
|
),
|
||||||
|
},
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
fn run(
|
||||||
|
&self,
|
||||||
|
engine_state: &EngineState,
|
||||||
|
stack: &mut Stack,
|
||||||
|
call: &Call,
|
||||||
|
input: PipelineData,
|
||||||
|
) -> Result<PipelineData, ShellError> {
|
||||||
|
let value = input.into_value(call.head);
|
||||||
|
|
||||||
|
if NuLazyFrame::can_downcast(&value) {
|
||||||
|
let df = NuLazyFrame::try_from_value(value)?;
|
||||||
|
command_lazy(engine_state, stack, call, df)
|
||||||
|
} else {
|
||||||
|
let df = NuDataFrame::try_from_value(value)?;
|
||||||
|
command_eager(engine_state, stack, call, df)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn command_eager(
|
||||||
|
engine_state: &EngineState,
|
||||||
|
stack: &mut Stack,
|
||||||
|
call: &Call,
|
||||||
|
df: NuDataFrame,
|
||||||
|
) -> Result<PipelineData, ShellError> {
|
||||||
|
let mask_value: Value = call.req(engine_state, stack, 0)?;
|
||||||
|
let mask_span = mask_value.span();
|
||||||
|
|
||||||
|
if NuExpression::can_downcast(&mask_value) {
|
||||||
|
let expression = NuExpression::try_from_value(mask_value)?;
|
||||||
|
let lazy = NuLazyFrame::new(true, df.lazy());
|
||||||
|
let lazy = lazy.apply_with_expr(expression, LazyFrame::filter);
|
||||||
|
|
||||||
|
Ok(PipelineData::Value(
|
||||||
|
NuLazyFrame::into_value(lazy, call.head)?,
|
||||||
|
None,
|
||||||
|
))
|
||||||
|
} else {
|
||||||
|
let mask = NuDataFrame::try_from_value(mask_value)?.as_series(mask_span)?;
|
||||||
|
let mask = mask.bool().map_err(|e| {
|
||||||
|
ShellError::GenericError(
|
||||||
|
"Error casting to bool".into(),
|
||||||
|
e.to_string(),
|
||||||
|
Some(mask_span),
|
||||||
|
Some("Perhaps you want to use a series with booleans as mask".into()),
|
||||||
|
Vec::new(),
|
||||||
|
)
|
||||||
|
})?;
|
||||||
|
|
||||||
|
df.as_ref()
|
||||||
|
.filter(mask)
|
||||||
|
.map_err(|e| {
|
||||||
|
ShellError::GenericError(
|
||||||
|
"Error filtering dataframe".into(),
|
||||||
|
e.to_string(),
|
||||||
|
Some(call.head),
|
||||||
|
Some("The only allowed column types for dummies are String or Int".into()),
|
||||||
|
Vec::new(),
|
||||||
|
)
|
||||||
|
})
|
||||||
|
.map(|df| PipelineData::Value(NuDataFrame::dataframe_into_value(df, call.head), None))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn command_lazy(
|
||||||
|
engine_state: &EngineState,
|
||||||
|
stack: &mut Stack,
|
||||||
|
call: &Call,
|
||||||
|
lazy: NuLazyFrame,
|
||||||
|
) -> Result<PipelineData, ShellError> {
|
||||||
|
let expr: Value = call.req(engine_state, stack, 0)?;
|
||||||
|
let expr = NuExpression::try_from_value(expr)?;
|
||||||
|
|
||||||
|
let lazy = lazy.apply_with_expr(expr, LazyFrame::filter);
|
||||||
|
|
||||||
|
Ok(PipelineData::Value(
|
||||||
|
NuLazyFrame::into_value(lazy, call.head)?,
|
||||||
|
None,
|
||||||
|
))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod test {
|
||||||
|
use super::super::super::test_dataframe::test_dataframe;
|
||||||
|
use super::*;
|
||||||
|
use crate::dataframe::expressions::ExprCol;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_examples() {
|
||||||
|
test_dataframe(vec![Box::new(FilterWith {}), Box::new(ExprCol {})])
|
||||||
|
}
|
||||||
|
}
|
143
crates/nu-cmd-dataframe/src/dataframe/eager/first.rs
Normal file
143
crates/nu-cmd-dataframe/src/dataframe/eager/first.rs
Normal file
@ -0,0 +1,143 @@
|
|||||||
|
use super::super::values::{Column, NuDataFrame, NuExpression};
|
||||||
|
use nu_engine::CallExt;
|
||||||
|
use nu_protocol::{
|
||||||
|
ast::Call,
|
||||||
|
engine::{Command, EngineState, Stack},
|
||||||
|
Category, Example, PipelineData, ShellError, Signature, Span, SyntaxShape, Type, Value,
|
||||||
|
};
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct FirstDF;
|
||||||
|
|
||||||
|
impl Command for FirstDF {
|
||||||
|
fn name(&self) -> &str {
|
||||||
|
"dfr first"
|
||||||
|
}
|
||||||
|
|
||||||
|
fn usage(&self) -> &str {
|
||||||
|
"Show only the first number of rows or create a first expression"
|
||||||
|
}
|
||||||
|
|
||||||
|
fn signature(&self) -> Signature {
|
||||||
|
Signature::build(self.name())
|
||||||
|
.optional(
|
||||||
|
"rows",
|
||||||
|
SyntaxShape::Int,
|
||||||
|
"starting from the front, the number of rows to return",
|
||||||
|
)
|
||||||
|
.input_output_types(vec![
|
||||||
|
(
|
||||||
|
Type::Custom("expression".into()),
|
||||||
|
Type::Custom("expression".into()),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
Type::Custom("dataframe".into()),
|
||||||
|
Type::Custom("dataframe".into()),
|
||||||
|
),
|
||||||
|
])
|
||||||
|
.category(Category::Custom("dataframe".into()))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn examples(&self) -> Vec<Example> {
|
||||||
|
vec![
|
||||||
|
Example {
|
||||||
|
description: "Return the first row of a dataframe",
|
||||||
|
example: "[[a b]; [1 2] [3 4]] | dfr into-df | dfr first",
|
||||||
|
result: Some(
|
||||||
|
NuDataFrame::try_from_columns(vec![
|
||||||
|
Column::new("a".to_string(), vec![Value::test_int(1)]),
|
||||||
|
Column::new("b".to_string(), vec![Value::test_int(2)]),
|
||||||
|
])
|
||||||
|
.expect("should not fail")
|
||||||
|
.into_value(Span::test_data()),
|
||||||
|
),
|
||||||
|
},
|
||||||
|
Example {
|
||||||
|
description: "Return the first two rows of a dataframe",
|
||||||
|
example: "[[a b]; [1 2] [3 4]] | dfr into-df | dfr first 2",
|
||||||
|
result: Some(
|
||||||
|
NuDataFrame::try_from_columns(vec![
|
||||||
|
Column::new(
|
||||||
|
"a".to_string(),
|
||||||
|
vec![Value::test_int(1), Value::test_int(3)],
|
||||||
|
),
|
||||||
|
Column::new(
|
||||||
|
"b".to_string(),
|
||||||
|
vec![Value::test_int(2), Value::test_int(4)],
|
||||||
|
),
|
||||||
|
])
|
||||||
|
.expect("should not fail")
|
||||||
|
.into_value(Span::test_data()),
|
||||||
|
),
|
||||||
|
},
|
||||||
|
Example {
|
||||||
|
description: "Creates a first expression from a column",
|
||||||
|
example: "dfr col a | dfr first",
|
||||||
|
result: None,
|
||||||
|
},
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
fn run(
|
||||||
|
&self,
|
||||||
|
engine_state: &EngineState,
|
||||||
|
stack: &mut Stack,
|
||||||
|
call: &Call,
|
||||||
|
input: PipelineData,
|
||||||
|
) -> Result<PipelineData, ShellError> {
|
||||||
|
let value = input.into_value(call.head);
|
||||||
|
if NuDataFrame::can_downcast(&value) {
|
||||||
|
let df = NuDataFrame::try_from_value(value)?;
|
||||||
|
command(engine_state, stack, call, df)
|
||||||
|
} else {
|
||||||
|
let expr = NuExpression::try_from_value(value)?;
|
||||||
|
let expr: NuExpression = expr.into_polars().first().into();
|
||||||
|
|
||||||
|
Ok(PipelineData::Value(
|
||||||
|
NuExpression::into_value(expr, call.head),
|
||||||
|
None,
|
||||||
|
))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn command(
|
||||||
|
engine_state: &EngineState,
|
||||||
|
stack: &mut Stack,
|
||||||
|
call: &Call,
|
||||||
|
df: NuDataFrame,
|
||||||
|
) -> Result<PipelineData, ShellError> {
|
||||||
|
let rows: Option<usize> = call.opt(engine_state, stack, 0)?;
|
||||||
|
let rows = rows.unwrap_or(1);
|
||||||
|
|
||||||
|
let res = df.as_ref().head(Some(rows));
|
||||||
|
Ok(PipelineData::Value(
|
||||||
|
NuDataFrame::dataframe_into_value(res, call.head),
|
||||||
|
None,
|
||||||
|
))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod test {
|
||||||
|
use super::super::super::test_dataframe::{build_test_engine_state, test_dataframe_example};
|
||||||
|
use super::*;
|
||||||
|
use crate::dataframe::lazy::aggregate::LazyAggregate;
|
||||||
|
use crate::dataframe::lazy::groupby::ToLazyGroupBy;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_examples_dataframe() {
|
||||||
|
let mut engine_state = build_test_engine_state(vec![Box::new(FirstDF {})]);
|
||||||
|
test_dataframe_example(&mut engine_state, &FirstDF.examples()[0]);
|
||||||
|
test_dataframe_example(&mut engine_state, &FirstDF.examples()[1]);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_examples_expression() {
|
||||||
|
let mut engine_state = build_test_engine_state(vec![
|
||||||
|
Box::new(FirstDF {}),
|
||||||
|
Box::new(LazyAggregate {}),
|
||||||
|
Box::new(ToLazyGroupBy {}),
|
||||||
|
]);
|
||||||
|
test_dataframe_example(&mut engine_state, &FirstDF.examples()[2]);
|
||||||
|
}
|
||||||
|
}
|
94
crates/nu-cmd-dataframe/src/dataframe/eager/get.rs
Normal file
94
crates/nu-cmd-dataframe/src/dataframe/eager/get.rs
Normal file
@ -0,0 +1,94 @@
|
|||||||
|
use nu_engine::CallExt;
|
||||||
|
use nu_protocol::{
|
||||||
|
ast::Call,
|
||||||
|
engine::{Command, EngineState, Stack},
|
||||||
|
Category, Example, PipelineData, ShellError, Signature, Span, SyntaxShape, Type, Value,
|
||||||
|
};
|
||||||
|
|
||||||
|
use crate::dataframe::values::utils::convert_columns_string;
|
||||||
|
|
||||||
|
use super::super::values::{Column, NuDataFrame};
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct GetDF;
|
||||||
|
|
||||||
|
impl Command for GetDF {
|
||||||
|
fn name(&self) -> &str {
|
||||||
|
"dfr get"
|
||||||
|
}
|
||||||
|
|
||||||
|
fn usage(&self) -> &str {
|
||||||
|
"Creates dataframe with the selected columns."
|
||||||
|
}
|
||||||
|
|
||||||
|
fn signature(&self) -> Signature {
|
||||||
|
Signature::build(self.name())
|
||||||
|
.rest("rest", SyntaxShape::Any, "column names to sort dataframe")
|
||||||
|
.input_output_type(
|
||||||
|
Type::Custom("dataframe".into()),
|
||||||
|
Type::Custom("dataframe".into()),
|
||||||
|
)
|
||||||
|
.category(Category::Custom("dataframe".into()))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn examples(&self) -> Vec<Example> {
|
||||||
|
vec![Example {
|
||||||
|
description: "Returns the selected column",
|
||||||
|
example: "[[a b]; [1 2] [3 4]] | dfr into-df | dfr get a",
|
||||||
|
result: Some(
|
||||||
|
NuDataFrame::try_from_columns(vec![Column::new(
|
||||||
|
"a".to_string(),
|
||||||
|
vec![Value::test_int(1), Value::test_int(3)],
|
||||||
|
)])
|
||||||
|
.expect("simple df for test should not fail")
|
||||||
|
.into_value(Span::test_data()),
|
||||||
|
),
|
||||||
|
}]
|
||||||
|
}
|
||||||
|
|
||||||
|
fn run(
|
||||||
|
&self,
|
||||||
|
engine_state: &EngineState,
|
||||||
|
stack: &mut Stack,
|
||||||
|
call: &Call,
|
||||||
|
input: PipelineData,
|
||||||
|
) -> Result<PipelineData, ShellError> {
|
||||||
|
command(engine_state, stack, call, input)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn command(
|
||||||
|
engine_state: &EngineState,
|
||||||
|
stack: &mut Stack,
|
||||||
|
call: &Call,
|
||||||
|
input: PipelineData,
|
||||||
|
) -> Result<PipelineData, ShellError> {
|
||||||
|
let columns: Vec<Value> = call.rest(engine_state, stack, 0)?;
|
||||||
|
let (col_string, col_span) = convert_columns_string(columns, call.head)?;
|
||||||
|
|
||||||
|
let df = NuDataFrame::try_from_pipeline(input, call.head)?;
|
||||||
|
|
||||||
|
df.as_ref()
|
||||||
|
.select(col_string)
|
||||||
|
.map_err(|e| {
|
||||||
|
ShellError::GenericError(
|
||||||
|
"Error selecting columns".into(),
|
||||||
|
e.to_string(),
|
||||||
|
Some(col_span),
|
||||||
|
None,
|
||||||
|
Vec::new(),
|
||||||
|
)
|
||||||
|
})
|
||||||
|
.map(|df| PipelineData::Value(NuDataFrame::dataframe_into_value(df, call.head), None))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod test {
|
||||||
|
use super::super::super::test_dataframe::test_dataframe;
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_examples() {
|
||||||
|
test_dataframe(vec![Box::new(GetDF {})])
|
||||||
|
}
|
||||||
|
}
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user