mirror of
https://github.com/nushell/nushell.git
synced 2025-04-21 11:48:28 +02:00
Merge branch 'main' into ecow-record
This commit is contained in:
commit
5556b5416c
16
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
16
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
@ -13,7 +13,7 @@ body:
|
|||||||
id: repro
|
id: repro
|
||||||
attributes:
|
attributes:
|
||||||
label: How to reproduce
|
label: How to reproduce
|
||||||
description: Steps to reproduce the behavior
|
description: Steps to reproduce the behavior (including succinct code examples or screenshots of the observed behavior)
|
||||||
placeholder: |
|
placeholder: |
|
||||||
1.
|
1.
|
||||||
2.
|
2.
|
||||||
@ -28,13 +28,6 @@ body:
|
|||||||
placeholder: I expected nu to...
|
placeholder: I expected nu to...
|
||||||
validations:
|
validations:
|
||||||
required: true
|
required: true
|
||||||
- type: textarea
|
|
||||||
id: screenshots
|
|
||||||
attributes:
|
|
||||||
label: Screenshots
|
|
||||||
description: Please add any relevant screenshots here, if any
|
|
||||||
validations:
|
|
||||||
required: false
|
|
||||||
- type: textarea
|
- type: textarea
|
||||||
id: config
|
id: config
|
||||||
attributes:
|
attributes:
|
||||||
@ -55,10 +48,3 @@ body:
|
|||||||
| installed_plugins | binaryview, chart bar, chart line, fetch, from bson, from sqlite, inc, match, post, ps, query json, s3, selector, start, sys, textview, to bson, to sqlite, tree, xpath |
|
| installed_plugins | binaryview, chart bar, chart line, fetch, from bson, from sqlite, inc, match, post, ps, query json, s3, selector, start, sys, textview, to bson, to sqlite, tree, xpath |
|
||||||
validations:
|
validations:
|
||||||
required: true
|
required: true
|
||||||
- type: textarea
|
|
||||||
id: context
|
|
||||||
attributes:
|
|
||||||
label: Additional context
|
|
||||||
description: Add any other context about the problem here.
|
|
||||||
validations:
|
|
||||||
required: false
|
|
||||||
|
15
.github/dependabot.yml
vendored
15
.github/dependabot.yml
vendored
@ -18,6 +18,21 @@ updates:
|
|||||||
ignore:
|
ignore:
|
||||||
- dependency-name: "*"
|
- dependency-name: "*"
|
||||||
update-types: ["version-update:semver-patch"]
|
update-types: ["version-update:semver-patch"]
|
||||||
|
groups:
|
||||||
|
# Only update polars as a whole as there are many subcrates that need to
|
||||||
|
# be updated at once. We explicitly depend on some of them, so batch their
|
||||||
|
# updates to not take up dependabot PR slots with dysfunctional PRs
|
||||||
|
polars:
|
||||||
|
patterns:
|
||||||
|
- "polars"
|
||||||
|
- "polars-*"
|
||||||
|
# uutils/coreutils also versions all their workspace crates the same at the moment
|
||||||
|
# Most of them have bleeding edge version requirements (some not)
|
||||||
|
# see: https://github.com/uutils/coreutils/blob/main/Cargo.toml
|
||||||
|
uutils:
|
||||||
|
patterns:
|
||||||
|
- "uucore"
|
||||||
|
- "uu_*"
|
||||||
- package-ecosystem: "github-actions"
|
- package-ecosystem: "github-actions"
|
||||||
directory: "/"
|
directory: "/"
|
||||||
schedule:
|
schedule:
|
||||||
|
4
.github/workflows/audit.yml
vendored
4
.github/workflows/audit.yml
vendored
@ -19,7 +19,7 @@ jobs:
|
|||||||
# Prevent sudden announcement of a new advisory from failing ci:
|
# Prevent sudden announcement of a new advisory from failing ci:
|
||||||
continue-on-error: true
|
continue-on-error: true
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4.1.5
|
- uses: actions/checkout@v4.1.7
|
||||||
- uses: rustsec/audit-check@v1.4.1
|
- uses: rustsec/audit-check@v2.0.0
|
||||||
with:
|
with:
|
||||||
token: ${{ secrets.GITHUB_TOKEN }}
|
token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
121
.github/workflows/ci.yml
vendored
121
.github/workflows/ci.yml
vendored
@ -3,6 +3,7 @@ on:
|
|||||||
push:
|
push:
|
||||||
branches:
|
branches:
|
||||||
- main
|
- main
|
||||||
|
- 'patch-release-*'
|
||||||
|
|
||||||
name: continuous-integration
|
name: continuous-integration
|
||||||
|
|
||||||
@ -21,84 +22,53 @@ jobs:
|
|||||||
strategy:
|
strategy:
|
||||||
fail-fast: true
|
fail-fast: true
|
||||||
matrix:
|
matrix:
|
||||||
# Pinning to Ubuntu 20.04 because building on newer Ubuntu versions causes linux-gnu
|
# Pinning to Ubuntu 22.04 because building on newer Ubuntu versions causes linux-gnu
|
||||||
# builds to link against a too-new-for-many-Linux-installs glibc version. Consider
|
# builds to link against a too-new-for-many-Linux-installs glibc version. Consider
|
||||||
# revisiting this when 20.04 is closer to EOL (April 2025)
|
# revisiting this when 22.04 is closer to EOL (June 2027)
|
||||||
#
|
#
|
||||||
# Using macOS 13 runner because 14 is based on the M1 and has half as much RAM (7 GB,
|
# Using macOS 13 runner because 14 is based on the M1 and has half as much RAM (7 GB,
|
||||||
# instead of 14 GB) which is too little for us right now. Revisit when `dfr` commands are
|
# instead of 14 GB) which is too little for us right now. Revisit when `dfr` commands are
|
||||||
# removed and we're only building the `polars` plugin instead
|
# removed and we're only building the `polars` plugin instead
|
||||||
platform: [windows-latest, macos-13, ubuntu-20.04]
|
platform: [windows-latest, macos-13, ubuntu-22.04]
|
||||||
feature: [default, dataframe]
|
|
||||||
include:
|
|
||||||
- feature: default
|
|
||||||
flags: ""
|
|
||||||
- feature: dataframe
|
|
||||||
flags: "--features=dataframe"
|
|
||||||
exclude:
|
|
||||||
- platform: windows-latest
|
|
||||||
feature: dataframe
|
|
||||||
- platform: macos-13
|
|
||||||
feature: dataframe
|
|
||||||
|
|
||||||
runs-on: ${{ matrix.platform }}
|
runs-on: ${{ matrix.platform }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4.1.5
|
- uses: actions/checkout@v4.1.7
|
||||||
|
|
||||||
- name: Setup Rust toolchain and cache
|
- name: Setup Rust toolchain and cache
|
||||||
uses: actions-rust-lang/setup-rust-toolchain@v1.8.0
|
uses: actions-rust-lang/setup-rust-toolchain@v1.10.1
|
||||||
with:
|
|
||||||
rustflags: ""
|
|
||||||
|
|
||||||
- name: cargo fmt
|
- name: cargo fmt
|
||||||
run: cargo fmt --all -- --check
|
run: cargo fmt --all -- --check
|
||||||
|
|
||||||
# If changing these settings also change toolkit.nu
|
# If changing these settings also change toolkit.nu
|
||||||
- name: Clippy
|
- name: Clippy
|
||||||
run: cargo clippy --workspace ${{ matrix.flags }} --exclude nu_plugin_* -- $CLIPPY_OPTIONS
|
run: cargo clippy --workspace --exclude nu_plugin_* -- $CLIPPY_OPTIONS
|
||||||
|
|
||||||
# In tests we don't have to deny unwrap
|
# In tests we don't have to deny unwrap
|
||||||
- name: Clippy of tests
|
- name: Clippy of tests
|
||||||
run: cargo clippy --tests --workspace ${{ matrix.flags }} --exclude nu_plugin_* -- -D warnings
|
run: cargo clippy --tests --workspace --exclude nu_plugin_* -- -D warnings
|
||||||
|
|
||||||
- name: Clippy of benchmarks
|
- name: Clippy of benchmarks
|
||||||
run: cargo clippy --benches --workspace ${{ matrix.flags }} --exclude nu_plugin_* -- -D warnings
|
run: cargo clippy --benches --workspace --exclude nu_plugin_* -- -D warnings
|
||||||
|
|
||||||
tests:
|
tests:
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: true
|
fail-fast: true
|
||||||
matrix:
|
matrix:
|
||||||
platform: [windows-latest, macos-latest, ubuntu-20.04]
|
platform: [windows-latest, macos-latest, ubuntu-22.04]
|
||||||
feature: [default, dataframe]
|
|
||||||
include:
|
|
||||||
# linux CI cannot handle clipboard feature
|
|
||||||
- default-flags: ""
|
|
||||||
- platform: ubuntu-20.04
|
|
||||||
default-flags: "--no-default-features --features=default-no-clipboard"
|
|
||||||
- feature: default
|
|
||||||
flags: ""
|
|
||||||
- feature: dataframe
|
|
||||||
flags: "--features=dataframe"
|
|
||||||
exclude:
|
|
||||||
- platform: windows-latest
|
|
||||||
feature: dataframe
|
|
||||||
- platform: macos-latest
|
|
||||||
feature: dataframe
|
|
||||||
|
|
||||||
runs-on: ${{ matrix.platform }}
|
runs-on: ${{ matrix.platform }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4.1.5
|
- uses: actions/checkout@v4.1.7
|
||||||
|
|
||||||
- name: Setup Rust toolchain and cache
|
- name: Setup Rust toolchain and cache
|
||||||
uses: actions-rust-lang/setup-rust-toolchain@v1.8.0
|
uses: actions-rust-lang/setup-rust-toolchain@v1.10.1
|
||||||
with:
|
|
||||||
rustflags: ""
|
|
||||||
|
|
||||||
- name: Tests
|
- name: Tests
|
||||||
run: cargo test --workspace --profile ci --exclude nu_plugin_* ${{ matrix.default-flags }} ${{ matrix.flags }}
|
run: cargo test --workspace --profile ci --exclude nu_plugin_*
|
||||||
|
|
||||||
- name: Check for clean repo
|
- name: Check for clean repo
|
||||||
shell: bash
|
shell: bash
|
||||||
run: |
|
run: |
|
||||||
@ -114,22 +84,20 @@ jobs:
|
|||||||
strategy:
|
strategy:
|
||||||
fail-fast: true
|
fail-fast: true
|
||||||
matrix:
|
matrix:
|
||||||
platform: [ubuntu-20.04, macos-latest, windows-latest]
|
platform: [ubuntu-22.04, macos-latest, windows-latest]
|
||||||
py:
|
py:
|
||||||
- py
|
- py
|
||||||
|
|
||||||
runs-on: ${{ matrix.platform }}
|
runs-on: ${{ matrix.platform }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4.1.5
|
- uses: actions/checkout@v4.1.7
|
||||||
|
|
||||||
- name: Setup Rust toolchain and cache
|
- name: Setup Rust toolchain and cache
|
||||||
uses: actions-rust-lang/setup-rust-toolchain@v1.8.0
|
uses: actions-rust-lang/setup-rust-toolchain@v1.10.1
|
||||||
with:
|
|
||||||
rustflags: ""
|
|
||||||
|
|
||||||
- name: Install Nushell
|
- name: Install Nushell
|
||||||
run: cargo install --path . --locked --no-default-features
|
run: cargo install --path . --locked --force
|
||||||
|
|
||||||
- name: Standard library tests
|
- name: Standard library tests
|
||||||
run: nu -c 'use crates/nu-std/testing.nu; testing run-tests --path crates/nu-std'
|
run: nu -c 'use crates/nu-std/testing.nu; testing run-tests --path crates/nu-std'
|
||||||
@ -169,17 +137,15 @@ jobs:
|
|||||||
# instead of 14 GB) which is too little for us right now.
|
# instead of 14 GB) which is too little for us right now.
|
||||||
#
|
#
|
||||||
# Failure occurring with clippy for rust 1.77.2
|
# Failure occurring with clippy for rust 1.77.2
|
||||||
platform: [windows-latest, macos-13, ubuntu-20.04]
|
platform: [windows-latest, macos-13, ubuntu-22.04]
|
||||||
|
|
||||||
runs-on: ${{ matrix.platform }}
|
runs-on: ${{ matrix.platform }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4.1.5
|
- uses: actions/checkout@v4.1.7
|
||||||
|
|
||||||
- name: Setup Rust toolchain and cache
|
- name: Setup Rust toolchain and cache
|
||||||
uses: actions-rust-lang/setup-rust-toolchain@v1.8.0
|
uses: actions-rust-lang/setup-rust-toolchain@v1.10.1
|
||||||
with:
|
|
||||||
rustflags: ""
|
|
||||||
|
|
||||||
- name: Clippy
|
- name: Clippy
|
||||||
run: cargo clippy --package nu_plugin_* -- $CLIPPY_OPTIONS
|
run: cargo clippy --package nu_plugin_* -- $CLIPPY_OPTIONS
|
||||||
@ -197,3 +163,50 @@ jobs:
|
|||||||
else
|
else
|
||||||
echo "no changes in working directory";
|
echo "no changes in working directory";
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
wasm:
|
||||||
|
env:
|
||||||
|
WASM_OPTIONS: --no-default-features --target wasm32-unknown-unknown
|
||||||
|
CLIPPY_CONF_DIR: ${{ github.workspace }}/clippy/wasm/
|
||||||
|
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
job:
|
||||||
|
- name: Build WASM
|
||||||
|
command: cargo build
|
||||||
|
args:
|
||||||
|
- name: Clippy WASM
|
||||||
|
command: cargo clippy
|
||||||
|
args: -- $CLIPPY_OPTIONS
|
||||||
|
|
||||||
|
name: ${{ matrix.job.name }}
|
||||||
|
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4.1.7
|
||||||
|
|
||||||
|
- name: Setup Rust toolchain and cache
|
||||||
|
uses: actions-rust-lang/setup-rust-toolchain@v1.10.1
|
||||||
|
|
||||||
|
- name: Add wasm32-unknown-unknown target
|
||||||
|
run: rustup target add wasm32-unknown-unknown
|
||||||
|
|
||||||
|
- run: ${{ matrix.job.command }} -p nu-cmd-base $WASM_OPTIONS ${{ matrix.job.args }}
|
||||||
|
- run: ${{ matrix.job.command }} -p nu-cmd-extra $WASM_OPTIONS ${{ matrix.job.args }}
|
||||||
|
- run: ${{ matrix.job.command }} -p nu-cmd-lang $WASM_OPTIONS ${{ matrix.job.args }}
|
||||||
|
- run: ${{ matrix.job.command }} -p nu-color-config $WASM_OPTIONS ${{ matrix.job.args }}
|
||||||
|
- run: ${{ matrix.job.command }} -p nu-command $WASM_OPTIONS ${{ matrix.job.args }}
|
||||||
|
- run: ${{ matrix.job.command }} -p nu-derive-value $WASM_OPTIONS ${{ matrix.job.args }}
|
||||||
|
- run: ${{ matrix.job.command }} -p nu-engine $WASM_OPTIONS ${{ matrix.job.args }}
|
||||||
|
- run: ${{ matrix.job.command }} -p nu-glob $WASM_OPTIONS ${{ matrix.job.args }}
|
||||||
|
- run: ${{ matrix.job.command }} -p nu-json $WASM_OPTIONS ${{ matrix.job.args }}
|
||||||
|
- run: ${{ matrix.job.command }} -p nu-parser $WASM_OPTIONS ${{ matrix.job.args }}
|
||||||
|
- run: ${{ matrix.job.command }} -p nu-path $WASM_OPTIONS ${{ matrix.job.args }}
|
||||||
|
- run: ${{ matrix.job.command }} -p nu-pretty-hex $WASM_OPTIONS ${{ matrix.job.args }}
|
||||||
|
- run: ${{ matrix.job.command }} -p nu-protocol $WASM_OPTIONS ${{ matrix.job.args }}
|
||||||
|
- run: ${{ matrix.job.command }} -p nu-std $WASM_OPTIONS ${{ matrix.job.args }}
|
||||||
|
- run: ${{ matrix.job.command }} -p nu-system $WASM_OPTIONS ${{ matrix.job.args }}
|
||||||
|
- run: ${{ matrix.job.command }} -p nu-table $WASM_OPTIONS ${{ matrix.job.args }}
|
||||||
|
- run: ${{ matrix.job.command }} -p nu-term-grid $WASM_OPTIONS ${{ matrix.job.args }}
|
||||||
|
- run: ${{ matrix.job.command }} -p nu-utils $WASM_OPTIONS ${{ matrix.job.args }}
|
||||||
|
- run: ${{ matrix.job.command }} -p nuon $WASM_OPTIONS ${{ matrix.job.args }}
|
||||||
|
30
.github/workflows/milestone.yml
vendored
Normal file
30
.github/workflows/milestone.yml
vendored
Normal file
@ -0,0 +1,30 @@
|
|||||||
|
# Description:
|
||||||
|
# - Add milestone to a merged PR automatically
|
||||||
|
# - Add milestone to a closed issue that has a merged PR fix (if any)
|
||||||
|
|
||||||
|
name: Milestone Action
|
||||||
|
on:
|
||||||
|
issues:
|
||||||
|
types: [closed]
|
||||||
|
pull_request_target:
|
||||||
|
types: [closed]
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
update-milestone:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
name: Milestone Update
|
||||||
|
steps:
|
||||||
|
- name: Set Milestone for PR
|
||||||
|
uses: hustcer/milestone-action@main
|
||||||
|
if: github.event.pull_request.merged == true
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
|
# Bind milestone to closed issue that has a merged PR fix
|
||||||
|
- name: Set Milestone for Issue
|
||||||
|
uses: hustcer/milestone-action@v2
|
||||||
|
if: github.event.issue.state == 'closed'
|
||||||
|
with:
|
||||||
|
action: bind-issue
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
172
.github/workflows/nightly-build.yml
vendored
172
.github/workflows/nightly-build.yml
vendored
@ -27,7 +27,7 @@ jobs:
|
|||||||
# if: github.repository == 'nushell/nightly'
|
# if: github.repository == 'nushell/nightly'
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v4.1.5
|
uses: actions/checkout@v4
|
||||||
if: github.repository == 'nushell/nightly'
|
if: github.repository == 'nushell/nightly'
|
||||||
with:
|
with:
|
||||||
ref: main
|
ref: main
|
||||||
@ -36,10 +36,10 @@ jobs:
|
|||||||
token: ${{ secrets.WORKFLOW_TOKEN }}
|
token: ${{ secrets.WORKFLOW_TOKEN }}
|
||||||
|
|
||||||
- name: Setup Nushell
|
- name: Setup Nushell
|
||||||
uses: hustcer/setup-nu@v3.10
|
uses: hustcer/setup-nu@v3
|
||||||
if: github.repository == 'nushell/nightly'
|
if: github.repository == 'nushell/nightly'
|
||||||
with:
|
with:
|
||||||
version: 0.93.0
|
version: 0.101.0
|
||||||
|
|
||||||
# Synchronize the main branch of nightly repo with the main branch of Nushell official repo
|
# Synchronize the main branch of nightly repo with the main branch of Nushell official repo
|
||||||
- name: Prepare for Nightly Release
|
- name: Prepare for Nightly Release
|
||||||
@ -65,7 +65,7 @@ jobs:
|
|||||||
}
|
}
|
||||||
|
|
||||||
standard:
|
standard:
|
||||||
name: Std
|
name: Nu
|
||||||
needs: prepare
|
needs: prepare
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
@ -78,52 +78,50 @@ jobs:
|
|||||||
- x86_64-unknown-linux-gnu
|
- x86_64-unknown-linux-gnu
|
||||||
- x86_64-unknown-linux-musl
|
- x86_64-unknown-linux-musl
|
||||||
- aarch64-unknown-linux-gnu
|
- aarch64-unknown-linux-gnu
|
||||||
|
- aarch64-unknown-linux-musl
|
||||||
- armv7-unknown-linux-gnueabihf
|
- armv7-unknown-linux-gnueabihf
|
||||||
|
- armv7-unknown-linux-musleabihf
|
||||||
- riscv64gc-unknown-linux-gnu
|
- riscv64gc-unknown-linux-gnu
|
||||||
|
- loongarch64-unknown-linux-gnu
|
||||||
extra: ['bin']
|
extra: ['bin']
|
||||||
include:
|
include:
|
||||||
- target: aarch64-apple-darwin
|
- target: aarch64-apple-darwin
|
||||||
os: macos-latest
|
os: macos-latest
|
||||||
target_rustflags: ''
|
|
||||||
- target: x86_64-apple-darwin
|
- target: x86_64-apple-darwin
|
||||||
os: macos-latest
|
os: macos-latest
|
||||||
target_rustflags: ''
|
|
||||||
- target: x86_64-pc-windows-msvc
|
- target: x86_64-pc-windows-msvc
|
||||||
extra: 'bin'
|
extra: 'bin'
|
||||||
os: windows-latest
|
os: windows-latest
|
||||||
target_rustflags: ''
|
|
||||||
- target: x86_64-pc-windows-msvc
|
- target: x86_64-pc-windows-msvc
|
||||||
extra: msi
|
extra: msi
|
||||||
os: windows-latest
|
os: windows-latest
|
||||||
target_rustflags: ''
|
|
||||||
- target: aarch64-pc-windows-msvc
|
- target: aarch64-pc-windows-msvc
|
||||||
extra: 'bin'
|
extra: 'bin'
|
||||||
os: windows-latest
|
os: windows-latest
|
||||||
target_rustflags: ''
|
|
||||||
- target: aarch64-pc-windows-msvc
|
- target: aarch64-pc-windows-msvc
|
||||||
extra: msi
|
extra: msi
|
||||||
os: windows-latest
|
os: windows-latest
|
||||||
target_rustflags: ''
|
|
||||||
- target: x86_64-unknown-linux-gnu
|
- target: x86_64-unknown-linux-gnu
|
||||||
os: ubuntu-20.04
|
os: ubuntu-22.04
|
||||||
target_rustflags: ''
|
|
||||||
- target: x86_64-unknown-linux-musl
|
- target: x86_64-unknown-linux-musl
|
||||||
os: ubuntu-20.04
|
os: ubuntu-22.04
|
||||||
target_rustflags: ''
|
|
||||||
- target: aarch64-unknown-linux-gnu
|
- target: aarch64-unknown-linux-gnu
|
||||||
os: ubuntu-20.04
|
os: ubuntu-22.04
|
||||||
target_rustflags: ''
|
- target: aarch64-unknown-linux-musl
|
||||||
|
os: ubuntu-22.04
|
||||||
- target: armv7-unknown-linux-gnueabihf
|
- target: armv7-unknown-linux-gnueabihf
|
||||||
os: ubuntu-20.04
|
os: ubuntu-22.04
|
||||||
target_rustflags: ''
|
- target: armv7-unknown-linux-musleabihf
|
||||||
|
os: ubuntu-22.04
|
||||||
- target: riscv64gc-unknown-linux-gnu
|
- target: riscv64gc-unknown-linux-gnu
|
||||||
os: ubuntu-latest
|
os: ubuntu-22.04
|
||||||
target_rustflags: ''
|
- target: loongarch64-unknown-linux-gnu
|
||||||
|
os: ubuntu-22.04
|
||||||
|
|
||||||
runs-on: ${{matrix.os}}
|
runs-on: ${{matrix.os}}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4.1.5
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
ref: main
|
ref: main
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
@ -133,26 +131,24 @@ jobs:
|
|||||||
echo "targets = ['${{matrix.target}}']" >> rust-toolchain.toml
|
echo "targets = ['${{matrix.target}}']" >> rust-toolchain.toml
|
||||||
|
|
||||||
- name: Setup Rust toolchain and cache
|
- name: Setup Rust toolchain and cache
|
||||||
uses: actions-rust-lang/setup-rust-toolchain@v1.8.0
|
uses: actions-rust-lang/setup-rust-toolchain@v1.10.1
|
||||||
# WARN: Keep the rustflags to prevent from the winget submission error: `CAQuietExec: Error 0xc0000135`
|
# WARN: Keep the rustflags to prevent from the winget submission error: `CAQuietExec: Error 0xc0000135`
|
||||||
with:
|
with:
|
||||||
rustflags: ''
|
rustflags: ''
|
||||||
|
|
||||||
- name: Setup Nushell
|
- name: Setup Nushell
|
||||||
uses: hustcer/setup-nu@v3.10
|
uses: hustcer/setup-nu@v3
|
||||||
with:
|
with:
|
||||||
version: 0.93.0
|
version: 0.101.0
|
||||||
|
|
||||||
- name: Release Nu Binary
|
- name: Release Nu Binary
|
||||||
id: nu
|
id: nu
|
||||||
run: nu .github/workflows/release-pkg.nu
|
run: nu .github/workflows/release-pkg.nu
|
||||||
env:
|
env:
|
||||||
RELEASE_TYPE: standard
|
|
||||||
OS: ${{ matrix.os }}
|
OS: ${{ matrix.os }}
|
||||||
REF: ${{ github.ref }}
|
REF: ${{ github.ref }}
|
||||||
TARGET: ${{ matrix.target }}
|
TARGET: ${{ matrix.target }}
|
||||||
_EXTRA_: ${{ matrix.extra }}
|
_EXTRA_: ${{ matrix.extra }}
|
||||||
TARGET_RUSTFLAGS: ${{ matrix.target_rustflags }}
|
|
||||||
|
|
||||||
- name: Create an Issue for Release Failure
|
- name: Create an Issue for Release Failure
|
||||||
if: ${{ failure() }}
|
if: ${{ failure() }}
|
||||||
@ -174,7 +170,7 @@ jobs:
|
|||||||
# REF: https://github.com/marketplace/actions/gh-release
|
# REF: https://github.com/marketplace/actions/gh-release
|
||||||
# Create a release only in nushell/nightly repo
|
# Create a release only in nushell/nightly repo
|
||||||
- name: Publish Archive
|
- name: Publish Archive
|
||||||
uses: softprops/action-gh-release@v2.0.5
|
uses: softprops/action-gh-release@v2.0.9
|
||||||
if: ${{ startsWith(github.repository, 'nushell/nightly') }}
|
if: ${{ startsWith(github.repository, 'nushell/nightly') }}
|
||||||
with:
|
with:
|
||||||
prerelease: true
|
prerelease: true
|
||||||
@ -184,122 +180,6 @@ jobs:
|
|||||||
env:
|
env:
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
full:
|
|
||||||
name: Full
|
|
||||||
needs: prepare
|
|
||||||
strategy:
|
|
||||||
fail-fast: false
|
|
||||||
matrix:
|
|
||||||
target:
|
|
||||||
- aarch64-apple-darwin
|
|
||||||
- x86_64-apple-darwin
|
|
||||||
- x86_64-pc-windows-msvc
|
|
||||||
- aarch64-pc-windows-msvc
|
|
||||||
- x86_64-unknown-linux-gnu
|
|
||||||
- x86_64-unknown-linux-musl
|
|
||||||
- aarch64-unknown-linux-gnu
|
|
||||||
extra: ['bin']
|
|
||||||
include:
|
|
||||||
- target: aarch64-apple-darwin
|
|
||||||
os: macos-latest
|
|
||||||
target_rustflags: '--features=dataframe'
|
|
||||||
- target: x86_64-apple-darwin
|
|
||||||
os: macos-latest
|
|
||||||
target_rustflags: '--features=dataframe'
|
|
||||||
- target: x86_64-pc-windows-msvc
|
|
||||||
extra: 'bin'
|
|
||||||
os: windows-latest
|
|
||||||
target_rustflags: '--features=dataframe'
|
|
||||||
- target: x86_64-pc-windows-msvc
|
|
||||||
extra: msi
|
|
||||||
os: windows-latest
|
|
||||||
target_rustflags: '--features=dataframe'
|
|
||||||
- target: aarch64-pc-windows-msvc
|
|
||||||
extra: 'bin'
|
|
||||||
os: windows-latest
|
|
||||||
target_rustflags: '--features=dataframe'
|
|
||||||
- target: aarch64-pc-windows-msvc
|
|
||||||
extra: msi
|
|
||||||
os: windows-latest
|
|
||||||
target_rustflags: '--features=dataframe'
|
|
||||||
- target: x86_64-unknown-linux-gnu
|
|
||||||
os: ubuntu-20.04
|
|
||||||
target_rustflags: '--features=dataframe'
|
|
||||||
- target: x86_64-unknown-linux-musl
|
|
||||||
os: ubuntu-20.04
|
|
||||||
target_rustflags: '--features=dataframe'
|
|
||||||
- target: aarch64-unknown-linux-gnu
|
|
||||||
os: ubuntu-20.04
|
|
||||||
target_rustflags: '--features=dataframe'
|
|
||||||
|
|
||||||
runs-on: ${{matrix.os}}
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v4.1.5
|
|
||||||
with:
|
|
||||||
ref: main
|
|
||||||
fetch-depth: 0
|
|
||||||
|
|
||||||
- name: Update Rust Toolchain Target
|
|
||||||
run: |
|
|
||||||
echo "targets = ['${{matrix.target}}']" >> rust-toolchain.toml
|
|
||||||
|
|
||||||
- name: Setup Rust toolchain and cache
|
|
||||||
uses: actions-rust-lang/setup-rust-toolchain@v1.8.0
|
|
||||||
# WARN: Keep the rustflags to prevent from the winget submission error: `CAQuietExec: Error 0xc0000135`
|
|
||||||
with:
|
|
||||||
rustflags: ''
|
|
||||||
|
|
||||||
- name: Setup Nushell
|
|
||||||
uses: hustcer/setup-nu@v3.10
|
|
||||||
with:
|
|
||||||
version: 0.93.0
|
|
||||||
|
|
||||||
- name: Release Nu Binary
|
|
||||||
id: nu
|
|
||||||
run: nu .github/workflows/release-pkg.nu
|
|
||||||
env:
|
|
||||||
RELEASE_TYPE: full
|
|
||||||
OS: ${{ matrix.os }}
|
|
||||||
REF: ${{ github.ref }}
|
|
||||||
TARGET: ${{ matrix.target }}
|
|
||||||
_EXTRA_: ${{ matrix.extra }}
|
|
||||||
TARGET_RUSTFLAGS: ${{ matrix.target_rustflags }}
|
|
||||||
|
|
||||||
- name: Create an Issue for Release Failure
|
|
||||||
if: ${{ failure() }}
|
|
||||||
uses: JasonEtco/create-an-issue@v2.9.2
|
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
with:
|
|
||||||
update_existing: true
|
|
||||||
search_existing: open
|
|
||||||
filename: .github/AUTO_ISSUE_TEMPLATE/nightly-build-fail.md
|
|
||||||
|
|
||||||
- name: Set Outputs of Short SHA
|
|
||||||
id: vars
|
|
||||||
run: |
|
|
||||||
echo "date=$(date -u +'%Y-%m-%d')" >> $GITHUB_OUTPUT
|
|
||||||
sha_short=$(git rev-parse --short HEAD)
|
|
||||||
echo "sha_short=${sha_short:0:7}" >> $GITHUB_OUTPUT
|
|
||||||
|
|
||||||
# REF: https://github.com/marketplace/actions/gh-release
|
|
||||||
# Create a release only in nushell/nightly repo
|
|
||||||
- name: Publish Archive
|
|
||||||
uses: softprops/action-gh-release@v2.0.5
|
|
||||||
if: ${{ startsWith(github.repository, 'nushell/nightly') }}
|
|
||||||
with:
|
|
||||||
draft: false
|
|
||||||
prerelease: true
|
|
||||||
name: Nu-nightly-${{ steps.vars.outputs.date }}-${{ steps.vars.outputs.sha_short }}
|
|
||||||
tag_name: nightly-${{ steps.vars.outputs.sha_short }}
|
|
||||||
body: |
|
|
||||||
This is a NIGHTLY build of Nushell.
|
|
||||||
It is NOT recommended for production use.
|
|
||||||
files: ${{ steps.nu.outputs.archive }}
|
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
|
|
||||||
cleanup:
|
cleanup:
|
||||||
name: Cleanup
|
name: Cleanup
|
||||||
# Should only run in nushell/nightly repo
|
# Should only run in nushell/nightly repo
|
||||||
@ -310,14 +190,14 @@ jobs:
|
|||||||
- name: Waiting for Release
|
- name: Waiting for Release
|
||||||
run: sleep 1800
|
run: sleep 1800
|
||||||
|
|
||||||
- uses: actions/checkout@v4.1.5
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
ref: main
|
ref: main
|
||||||
|
|
||||||
- name: Setup Nushell
|
- name: Setup Nushell
|
||||||
uses: hustcer/setup-nu@v3.10
|
uses: hustcer/setup-nu@v3
|
||||||
with:
|
with:
|
||||||
version: 0.93.0
|
version: 0.101.0
|
||||||
|
|
||||||
# Keep the last a few releases
|
# Keep the last a few releases
|
||||||
- name: Delete Older Releases
|
- name: Delete Older Releases
|
||||||
|
96
.github/workflows/release-pkg.nu
vendored
96
.github/workflows/release-pkg.nu
vendored
@ -9,7 +9,6 @@
|
|||||||
# Instructions for manually creating an MSI for Winget Releases when they fail
|
# Instructions for manually creating an MSI for Winget Releases when they fail
|
||||||
# Added 2022-11-29 when Windows packaging wouldn't work
|
# Added 2022-11-29 when Windows packaging wouldn't work
|
||||||
# Updated again on 2023-02-23 because msis are still failing validation
|
# Updated again on 2023-02-23 because msis are still failing validation
|
||||||
# Update on 2023-10-18 to use RELEASE_TYPE env var to determine if full or not
|
|
||||||
# To run this manual for windows here are the steps I take
|
# To run this manual for windows here are the steps I take
|
||||||
# checkout the release you want to publish
|
# checkout the release you want to publish
|
||||||
# 1. git checkout 0.86.0
|
# 1. git checkout 0.86.0
|
||||||
@ -17,28 +16,26 @@
|
|||||||
# 2. $env:CARGO_TARGET_DIR = ""
|
# 2. $env:CARGO_TARGET_DIR = ""
|
||||||
# 2. hide-env CARGO_TARGET_DIR
|
# 2. hide-env CARGO_TARGET_DIR
|
||||||
# 3. $env.TARGET = 'x86_64-pc-windows-msvc'
|
# 3. $env.TARGET = 'x86_64-pc-windows-msvc'
|
||||||
# 4. $env.TARGET_RUSTFLAGS = ''
|
# 4. $env.GITHUB_WORKSPACE = 'D:\nushell'
|
||||||
# 5. $env.GITHUB_WORKSPACE = 'D:\nushell'
|
# 5. $env.GITHUB_OUTPUT = 'D:\nushell\output\out.txt'
|
||||||
# 6. $env.GITHUB_OUTPUT = 'D:\nushell\output\out.txt'
|
# 6. $env.OS = 'windows-latest'
|
||||||
# 7. $env.OS = 'windows-latest'
|
|
||||||
# 8. $env.RELEASE_TYPE = '' # There is full and '' for normal releases
|
|
||||||
# make sure 7z.exe is in your path https://www.7-zip.org/download.html
|
# make sure 7z.exe is in your path https://www.7-zip.org/download.html
|
||||||
# 9. $env.Path = ($env.Path | append 'c:\apps\7-zip')
|
# 7. $env.Path = ($env.Path | append 'c:\apps\7-zip')
|
||||||
# make sure aria2c.exe is in your path https://github.com/aria2/aria2
|
# make sure aria2c.exe is in your path https://github.com/aria2/aria2
|
||||||
# 10. $env.Path = ($env.Path | append 'c:\path\to\aria2c')
|
# 8. $env.Path = ($env.Path | append 'c:\path\to\aria2c')
|
||||||
# make sure you have the wixtools installed https://wixtoolset.org/
|
# make sure you have the wixtools installed https://wixtoolset.org/
|
||||||
# 11. $env.Path = ($env.Path | append 'C:\Users\dschroeder\AppData\Local\tauri\WixTools')
|
# 9. $env.Path = ($env.Path | append 'C:\Users\dschroeder\AppData\Local\tauri\WixTools')
|
||||||
# You need to run the release-pkg twice. The first pass, with _EXTRA_ as 'bin', makes the output
|
# You need to run the release-pkg twice. The first pass, with _EXTRA_ as 'bin', makes the output
|
||||||
# folder and builds everything. The second pass, that generates the msi file, with _EXTRA_ as 'msi'
|
# folder and builds everything. The second pass, that generates the msi file, with _EXTRA_ as 'msi'
|
||||||
# 12. $env._EXTRA_ = 'bin'
|
# 10. $env._EXTRA_ = 'bin'
|
||||||
# 13. source .github\workflows\release-pkg.nu
|
# 11. source .github\workflows\release-pkg.nu
|
||||||
# 14. cd ..
|
# 12. cd ..
|
||||||
# 15. $env._EXTRA_ = 'msi'
|
# 13. $env._EXTRA_ = 'msi'
|
||||||
# 16. source .github\workflows\release-pkg.nu
|
# 14. source .github\workflows\release-pkg.nu
|
||||||
# After msi is generated, you have to update winget-pkgs repo, you'll need to patch the release
|
# After msi is generated, you have to update winget-pkgs repo, you'll need to patch the release
|
||||||
# by deleting the existing msi and uploading this new msi. Then you'll need to update the hash
|
# by deleting the existing msi and uploading this new msi. Then you'll need to update the hash
|
||||||
# on the winget-pkgs PR. To generate the hash, run this command
|
# on the winget-pkgs PR. To generate the hash, run this command
|
||||||
# 17. open target\wix\nu-0.74.0-x86_64-pc-windows-msvc.msi | hash sha256
|
# 15. open target\wix\nu-0.74.0-x86_64-pc-windows-msvc.msi | hash sha256
|
||||||
# Then, just take the output and put it in the winget-pkgs PR for the hash on the msi
|
# Then, just take the output and put it in the winget-pkgs PR for the hash on the msi
|
||||||
|
|
||||||
|
|
||||||
@ -48,31 +45,15 @@ let os = $env.OS
|
|||||||
let target = $env.TARGET
|
let target = $env.TARGET
|
||||||
# Repo source dir like `/home/runner/work/nushell/nushell`
|
# Repo source dir like `/home/runner/work/nushell/nushell`
|
||||||
let src = $env.GITHUB_WORKSPACE
|
let src = $env.GITHUB_WORKSPACE
|
||||||
let flags = $env.TARGET_RUSTFLAGS
|
|
||||||
let dist = $'($env.GITHUB_WORKSPACE)/output'
|
let dist = $'($env.GITHUB_WORKSPACE)/output'
|
||||||
let version = (open Cargo.toml | get package.version)
|
let version = (open Cargo.toml | get package.version)
|
||||||
|
|
||||||
print $'Debugging info:'
|
print $'Debugging info:'
|
||||||
print { version: $version, bin: $bin, os: $os, releaseType: $env.RELEASE_TYPE, target: $target, src: $src, flags: $flags, dist: $dist }; hr-line -b
|
print { version: $version, bin: $bin, os: $os, target: $target, src: $src, dist: $dist }; hr-line -b
|
||||||
|
|
||||||
# Rename the full release name so that we won't break the existing scripts for standard release downloading, such as:
|
|
||||||
# curl -s https://api.github.com/repos/chmln/sd/releases/latest | grep browser_download_url | cut -d '"' -f 4 | grep x86_64-unknown-linux-musl
|
|
||||||
const FULL_RLS_NAMING = {
|
|
||||||
x86_64-apple-darwin: 'x86_64-darwin-full',
|
|
||||||
aarch64-apple-darwin: 'aarch64-darwin-full',
|
|
||||||
x86_64-unknown-linux-gnu: 'x86_64-linux-gnu-full',
|
|
||||||
x86_64-pc-windows-msvc: 'x86_64-windows-msvc-full',
|
|
||||||
x86_64-unknown-linux-musl: 'x86_64-linux-musl-full',
|
|
||||||
aarch64-unknown-linux-gnu: 'aarch64-linux-gnu-full',
|
|
||||||
aarch64-pc-windows-msvc: 'aarch64-windows-msvc-full',
|
|
||||||
riscv64gc-unknown-linux-gnu: 'riscv64-linux-gnu-full',
|
|
||||||
armv7-unknown-linux-gnueabihf: 'armv7-linux-gnueabihf-full',
|
|
||||||
}
|
|
||||||
|
|
||||||
# $env
|
# $env
|
||||||
|
|
||||||
let USE_UBUNTU = $os starts-with ubuntu
|
let USE_UBUNTU = $os starts-with ubuntu
|
||||||
let FULL_NAME = $FULL_RLS_NAMING | get -i $target | default 'unknown-target-full'
|
|
||||||
|
|
||||||
print $'(char nl)Packaging ($bin) v($version) for ($target) in ($src)...'; hr-line -b
|
print $'(char nl)Packaging ($bin) v($version) for ($target) in ($src)...'; hr-line -b
|
||||||
if not ('Cargo.lock' | path exists) { cargo generate-lockfile }
|
if not ('Cargo.lock' | path exists) { cargo generate-lockfile }
|
||||||
@ -91,23 +72,44 @@ if $os in ['macos-latest'] or $USE_UBUNTU {
|
|||||||
'aarch64-unknown-linux-gnu' => {
|
'aarch64-unknown-linux-gnu' => {
|
||||||
sudo apt-get install gcc-aarch64-linux-gnu -y
|
sudo apt-get install gcc-aarch64-linux-gnu -y
|
||||||
$env.CARGO_TARGET_AARCH64_UNKNOWN_LINUX_GNU_LINKER = 'aarch64-linux-gnu-gcc'
|
$env.CARGO_TARGET_AARCH64_UNKNOWN_LINUX_GNU_LINKER = 'aarch64-linux-gnu-gcc'
|
||||||
cargo-build-nu $flags
|
cargo-build-nu
|
||||||
}
|
}
|
||||||
'riscv64gc-unknown-linux-gnu' => {
|
'riscv64gc-unknown-linux-gnu' => {
|
||||||
sudo apt-get install gcc-riscv64-linux-gnu -y
|
sudo apt-get install gcc-riscv64-linux-gnu -y
|
||||||
$env.CARGO_TARGET_RISCV64GC_UNKNOWN_LINUX_GNU_LINKER = 'riscv64-linux-gnu-gcc'
|
$env.CARGO_TARGET_RISCV64GC_UNKNOWN_LINUX_GNU_LINKER = 'riscv64-linux-gnu-gcc'
|
||||||
cargo-build-nu $flags
|
cargo-build-nu
|
||||||
}
|
}
|
||||||
'armv7-unknown-linux-gnueabihf' => {
|
'armv7-unknown-linux-gnueabihf' => {
|
||||||
sudo apt-get install pkg-config gcc-arm-linux-gnueabihf -y
|
sudo apt-get install pkg-config gcc-arm-linux-gnueabihf -y
|
||||||
$env.CARGO_TARGET_ARMV7_UNKNOWN_LINUX_GNUEABIHF_LINKER = 'arm-linux-gnueabihf-gcc'
|
$env.CARGO_TARGET_ARMV7_UNKNOWN_LINUX_GNUEABIHF_LINKER = 'arm-linux-gnueabihf-gcc'
|
||||||
cargo-build-nu $flags
|
cargo-build-nu
|
||||||
|
}
|
||||||
|
'aarch64-unknown-linux-musl' => {
|
||||||
|
aria2c https://musl.cc/aarch64-linux-musl-cross.tgz
|
||||||
|
tar -xf aarch64-linux-musl-cross.tgz -C $env.HOME
|
||||||
|
$env.PATH = ($env.PATH | split row (char esep) | prepend $'($env.HOME)/aarch64-linux-musl-cross/bin')
|
||||||
|
$env.CARGO_TARGET_AARCH64_UNKNOWN_LINUX_MUSL_LINKER = 'aarch64-linux-musl-gcc'
|
||||||
|
cargo-build-nu
|
||||||
|
}
|
||||||
|
'armv7-unknown-linux-musleabihf' => {
|
||||||
|
aria2c https://musl.cc/armv7r-linux-musleabihf-cross.tgz
|
||||||
|
tar -xf armv7r-linux-musleabihf-cross.tgz -C $env.HOME
|
||||||
|
$env.PATH = ($env.PATH | split row (char esep) | prepend $'($env.HOME)/armv7r-linux-musleabihf-cross/bin')
|
||||||
|
$env.CARGO_TARGET_ARMV7_UNKNOWN_LINUX_MUSLEABIHF_LINKER = 'armv7r-linux-musleabihf-gcc'
|
||||||
|
cargo-build-nu
|
||||||
|
}
|
||||||
|
'loongarch64-unknown-linux-gnu' => {
|
||||||
|
aria2c https://github.com/loongson/build-tools/releases/download/2024.08.08/x86_64-cross-tools-loongarch64-binutils_2.43-gcc_14.2.0-glibc_2.40.tar.xz
|
||||||
|
tar xf x86_64-cross-tools-loongarch64-*.tar.xz
|
||||||
|
$env.PATH = ($env.PATH | split row (char esep) | prepend $'($env.PWD)/cross-tools/bin')
|
||||||
|
$env.CARGO_TARGET_LOONGARCH64_UNKNOWN_LINUX_GNU_LINKER = 'loongarch64-unknown-linux-gnu-gcc'
|
||||||
|
cargo-build-nu
|
||||||
}
|
}
|
||||||
_ => {
|
_ => {
|
||||||
# musl-tools to fix 'Failed to find tool. Is `musl-gcc` installed?'
|
# musl-tools to fix 'Failed to find tool. Is `musl-gcc` installed?'
|
||||||
# Actually just for x86_64-unknown-linux-musl target
|
# Actually just for x86_64-unknown-linux-musl target
|
||||||
if $USE_UBUNTU { sudo apt install musl-tools -y }
|
if $USE_UBUNTU { sudo apt install musl-tools -y }
|
||||||
cargo-build-nu $flags
|
cargo-build-nu
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -116,7 +118,7 @@ if $os in ['macos-latest'] or $USE_UBUNTU {
|
|||||||
# Build for Windows without static-link-openssl feature
|
# Build for Windows without static-link-openssl feature
|
||||||
# ----------------------------------------------------------------------------
|
# ----------------------------------------------------------------------------
|
||||||
if $os in ['windows-latest'] {
|
if $os in ['windows-latest'] {
|
||||||
cargo-build-nu $flags
|
cargo-build-nu
|
||||||
}
|
}
|
||||||
|
|
||||||
# ----------------------------------------------------------------------------
|
# ----------------------------------------------------------------------------
|
||||||
@ -162,7 +164,7 @@ cd $dist; print $'(char nl)Creating release archive...'; hr-line
|
|||||||
if $os in ['macos-latest'] or $USE_UBUNTU {
|
if $os in ['macos-latest'] or $USE_UBUNTU {
|
||||||
|
|
||||||
let files = (ls | get name)
|
let files = (ls | get name)
|
||||||
let dest = if $env.RELEASE_TYPE == 'full' { $'($bin)-($version)-($FULL_NAME)' } else { $'($bin)-($version)-($target)' }
|
let dest = $'($bin)-($version)-($target)'
|
||||||
let archive = $'($dist)/($dest).tar.gz'
|
let archive = $'($dist)/($dest).tar.gz'
|
||||||
|
|
||||||
mkdir $dest
|
mkdir $dest
|
||||||
@ -177,11 +179,15 @@ if $os in ['macos-latest'] or $USE_UBUNTU {
|
|||||||
|
|
||||||
} else if $os == 'windows-latest' {
|
} else if $os == 'windows-latest' {
|
||||||
|
|
||||||
let releaseStem = if $env.RELEASE_TYPE == 'full' { $'($bin)-($version)-($FULL_NAME)' } else { $'($bin)-($version)-($target)' }
|
let releaseStem = $'($bin)-($version)-($target)'
|
||||||
|
|
||||||
print $'(char nl)Download less related stuffs...'; hr-line
|
print $'(char nl)Download less related stuffs...'; hr-line
|
||||||
|
# todo: less-v661 is out but is released as a zip file. maybe we should switch to that and extract it?
|
||||||
aria2c https://github.com/jftuga/less-Windows/releases/download/less-v608/less.exe -o less.exe
|
aria2c https://github.com/jftuga/less-Windows/releases/download/less-v608/less.exe -o less.exe
|
||||||
aria2c https://raw.githubusercontent.com/jftuga/less-Windows/master/LICENSE -o LICENSE-for-less.txt
|
# the below was renamed because it was failing to download for darren. it should work but it wasn't
|
||||||
|
# todo: maybe we should get rid of this aria2c dependency and just use http get?
|
||||||
|
#aria2c https://raw.githubusercontent.com/jftuga/less-Windows/master/LICENSE -o LICENSE-for-less.txt
|
||||||
|
aria2c https://github.com/jftuga/less-Windows/blob/master/LICENSE -o LICENSE-for-less.txt
|
||||||
|
|
||||||
# Create Windows msi release package
|
# Create Windows msi release package
|
||||||
if (get-env _EXTRA_) == 'msi' {
|
if (get-env _EXTRA_) == 'msi' {
|
||||||
@ -214,20 +220,12 @@ if $os in ['macos-latest'] or $USE_UBUNTU {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
def 'cargo-build-nu' [ options: string ] {
|
def 'cargo-build-nu' [] {
|
||||||
if ($options | str trim | is-empty) {
|
|
||||||
if $os == 'windows-latest' {
|
if $os == 'windows-latest' {
|
||||||
cargo build --release --all --target $target
|
cargo build --release --all --target $target
|
||||||
} else {
|
} else {
|
||||||
cargo build --release --all --target $target --features=static-link-openssl
|
cargo build --release --all --target $target --features=static-link-openssl
|
||||||
}
|
}
|
||||||
} else {
|
|
||||||
if $os == 'windows-latest' {
|
|
||||||
cargo build --release --all --target $target $options
|
|
||||||
} else {
|
|
||||||
cargo build --release --all --target $target --features=static-link-openssl $options
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
# Print a horizontal line marker
|
# Print a horizontal line marker
|
||||||
|
148
.github/workflows/release.yml
vendored
148
.github/workflows/release.yml
vendored
@ -7,15 +7,17 @@ name: Create Release Draft
|
|||||||
on:
|
on:
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
push:
|
push:
|
||||||
tags: ["[0-9]+.[0-9]+.[0-9]+*"]
|
tags:
|
||||||
|
- '[0-9]+.[0-9]+.[0-9]+*'
|
||||||
|
- '!*nightly*' # Don't trigger release for nightly tags
|
||||||
|
|
||||||
defaults:
|
defaults:
|
||||||
run:
|
run:
|
||||||
shell: bash
|
shell: bash
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
standard:
|
release:
|
||||||
name: Std
|
name: Nu
|
||||||
|
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
@ -28,81 +30,78 @@ jobs:
|
|||||||
- x86_64-unknown-linux-gnu
|
- x86_64-unknown-linux-gnu
|
||||||
- x86_64-unknown-linux-musl
|
- x86_64-unknown-linux-musl
|
||||||
- aarch64-unknown-linux-gnu
|
- aarch64-unknown-linux-gnu
|
||||||
|
- aarch64-unknown-linux-musl
|
||||||
- armv7-unknown-linux-gnueabihf
|
- armv7-unknown-linux-gnueabihf
|
||||||
|
- armv7-unknown-linux-musleabihf
|
||||||
- riscv64gc-unknown-linux-gnu
|
- riscv64gc-unknown-linux-gnu
|
||||||
|
- loongarch64-unknown-linux-gnu
|
||||||
extra: ['bin']
|
extra: ['bin']
|
||||||
include:
|
include:
|
||||||
- target: aarch64-apple-darwin
|
- target: aarch64-apple-darwin
|
||||||
os: macos-latest
|
os: macos-latest
|
||||||
target_rustflags: ''
|
|
||||||
- target: x86_64-apple-darwin
|
- target: x86_64-apple-darwin
|
||||||
os: macos-latest
|
os: macos-latest
|
||||||
target_rustflags: ''
|
|
||||||
- target: x86_64-pc-windows-msvc
|
- target: x86_64-pc-windows-msvc
|
||||||
extra: 'bin'
|
extra: 'bin'
|
||||||
os: windows-latest
|
os: windows-latest
|
||||||
target_rustflags: ''
|
|
||||||
- target: x86_64-pc-windows-msvc
|
- target: x86_64-pc-windows-msvc
|
||||||
extra: msi
|
extra: msi
|
||||||
os: windows-latest
|
os: windows-latest
|
||||||
target_rustflags: ''
|
|
||||||
- target: aarch64-pc-windows-msvc
|
- target: aarch64-pc-windows-msvc
|
||||||
extra: 'bin'
|
extra: 'bin'
|
||||||
os: windows-latest
|
os: windows-latest
|
||||||
target_rustflags: ''
|
|
||||||
- target: aarch64-pc-windows-msvc
|
- target: aarch64-pc-windows-msvc
|
||||||
extra: msi
|
extra: msi
|
||||||
os: windows-latest
|
os: windows-latest
|
||||||
target_rustflags: ''
|
|
||||||
- target: x86_64-unknown-linux-gnu
|
- target: x86_64-unknown-linux-gnu
|
||||||
os: ubuntu-20.04
|
os: ubuntu-22.04
|
||||||
target_rustflags: ''
|
|
||||||
- target: x86_64-unknown-linux-musl
|
- target: x86_64-unknown-linux-musl
|
||||||
os: ubuntu-20.04
|
os: ubuntu-22.04
|
||||||
target_rustflags: ''
|
|
||||||
- target: aarch64-unknown-linux-gnu
|
- target: aarch64-unknown-linux-gnu
|
||||||
os: ubuntu-20.04
|
os: ubuntu-22.04
|
||||||
target_rustflags: ''
|
- target: aarch64-unknown-linux-musl
|
||||||
|
os: ubuntu-22.04
|
||||||
- target: armv7-unknown-linux-gnueabihf
|
- target: armv7-unknown-linux-gnueabihf
|
||||||
os: ubuntu-20.04
|
os: ubuntu-22.04
|
||||||
target_rustflags: ''
|
- target: armv7-unknown-linux-musleabihf
|
||||||
|
os: ubuntu-22.04
|
||||||
- target: riscv64gc-unknown-linux-gnu
|
- target: riscv64gc-unknown-linux-gnu
|
||||||
os: ubuntu-latest
|
os: ubuntu-22.04
|
||||||
target_rustflags: ''
|
- target: loongarch64-unknown-linux-gnu
|
||||||
|
os: ubuntu-22.04
|
||||||
|
|
||||||
runs-on: ${{matrix.os}}
|
runs-on: ${{matrix.os}}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4.1.5
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Update Rust Toolchain Target
|
- name: Update Rust Toolchain Target
|
||||||
run: |
|
run: |
|
||||||
echo "targets = ['${{matrix.target}}']" >> rust-toolchain.toml
|
echo "targets = ['${{matrix.target}}']" >> rust-toolchain.toml
|
||||||
|
|
||||||
- name: Setup Rust toolchain
|
- name: Setup Rust toolchain
|
||||||
uses: actions-rust-lang/setup-rust-toolchain@v1.8.0
|
uses: actions-rust-lang/setup-rust-toolchain@v1.10.1
|
||||||
# WARN: Keep the rustflags to prevent from the winget submission error: `CAQuietExec: Error 0xc0000135`
|
# WARN: Keep the rustflags to prevent from the winget submission error: `CAQuietExec: Error 0xc0000135`
|
||||||
with:
|
with:
|
||||||
cache: false
|
cache: false
|
||||||
rustflags: ''
|
rustflags: ''
|
||||||
|
|
||||||
- name: Setup Nushell
|
- name: Setup Nushell
|
||||||
uses: hustcer/setup-nu@v3.10
|
uses: hustcer/setup-nu@v3
|
||||||
with:
|
with:
|
||||||
version: 0.93.0
|
version: 0.101.0
|
||||||
|
|
||||||
- name: Release Nu Binary
|
- name: Release Nu Binary
|
||||||
id: nu
|
id: nu
|
||||||
run: nu .github/workflows/release-pkg.nu
|
run: nu .github/workflows/release-pkg.nu
|
||||||
env:
|
env:
|
||||||
RELEASE_TYPE: standard
|
|
||||||
OS: ${{ matrix.os }}
|
OS: ${{ matrix.os }}
|
||||||
REF: ${{ github.ref }}
|
REF: ${{ github.ref }}
|
||||||
TARGET: ${{ matrix.target }}
|
TARGET: ${{ matrix.target }}
|
||||||
_EXTRA_: ${{ matrix.extra }}
|
_EXTRA_: ${{ matrix.extra }}
|
||||||
TARGET_RUSTFLAGS: ${{ matrix.target_rustflags }}
|
|
||||||
|
|
||||||
# REF: https://github.com/marketplace/actions/gh-release
|
# WARN: Don't upgrade this action due to the release per asset issue.
|
||||||
|
# See: https://github.com/softprops/action-gh-release/issues/445
|
||||||
- name: Publish Archive
|
- name: Publish Archive
|
||||||
uses: softprops/action-gh-release@v2.0.5
|
uses: softprops/action-gh-release@v2.0.5
|
||||||
if: ${{ startsWith(github.ref, 'refs/tags/') }}
|
if: ${{ startsWith(github.ref, 'refs/tags/') }}
|
||||||
@ -112,92 +111,25 @@ jobs:
|
|||||||
env:
|
env:
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
full:
|
sha256sum:
|
||||||
name: Full
|
needs: release
|
||||||
|
name: Create Sha256sum
|
||||||
strategy:
|
runs-on: ubuntu-latest
|
||||||
fail-fast: false
|
|
||||||
matrix:
|
|
||||||
target:
|
|
||||||
- aarch64-apple-darwin
|
|
||||||
- x86_64-apple-darwin
|
|
||||||
- x86_64-pc-windows-msvc
|
|
||||||
- aarch64-pc-windows-msvc
|
|
||||||
- x86_64-unknown-linux-gnu
|
|
||||||
- x86_64-unknown-linux-musl
|
|
||||||
- aarch64-unknown-linux-gnu
|
|
||||||
extra: ['bin']
|
|
||||||
include:
|
|
||||||
- target: aarch64-apple-darwin
|
|
||||||
os: macos-latest
|
|
||||||
target_rustflags: '--features=dataframe'
|
|
||||||
- target: x86_64-apple-darwin
|
|
||||||
os: macos-latest
|
|
||||||
target_rustflags: '--features=dataframe'
|
|
||||||
- target: x86_64-pc-windows-msvc
|
|
||||||
extra: 'bin'
|
|
||||||
os: windows-latest
|
|
||||||
target_rustflags: '--features=dataframe'
|
|
||||||
- target: x86_64-pc-windows-msvc
|
|
||||||
extra: msi
|
|
||||||
os: windows-latest
|
|
||||||
target_rustflags: '--features=dataframe'
|
|
||||||
- target: aarch64-pc-windows-msvc
|
|
||||||
extra: 'bin'
|
|
||||||
os: windows-latest
|
|
||||||
target_rustflags: '--features=dataframe'
|
|
||||||
- target: aarch64-pc-windows-msvc
|
|
||||||
extra: msi
|
|
||||||
os: windows-latest
|
|
||||||
target_rustflags: '--features=dataframe'
|
|
||||||
- target: x86_64-unknown-linux-gnu
|
|
||||||
os: ubuntu-20.04
|
|
||||||
target_rustflags: '--features=dataframe'
|
|
||||||
- target: x86_64-unknown-linux-musl
|
|
||||||
os: ubuntu-20.04
|
|
||||||
target_rustflags: '--features=dataframe'
|
|
||||||
- target: aarch64-unknown-linux-gnu
|
|
||||||
os: ubuntu-20.04
|
|
||||||
target_rustflags: '--features=dataframe'
|
|
||||||
|
|
||||||
runs-on: ${{matrix.os}}
|
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4.1.5
|
- name: Download Release Archives
|
||||||
|
|
||||||
- name: Update Rust Toolchain Target
|
|
||||||
run: |
|
|
||||||
echo "targets = ['${{matrix.target}}']" >> rust-toolchain.toml
|
|
||||||
|
|
||||||
- name: Setup Rust toolchain
|
|
||||||
uses: actions-rust-lang/setup-rust-toolchain@v1.8.0
|
|
||||||
# WARN: Keep the rustflags to prevent from the winget submission error: `CAQuietExec: Error 0xc0000135`
|
|
||||||
with:
|
|
||||||
cache: false
|
|
||||||
rustflags: ''
|
|
||||||
|
|
||||||
- name: Setup Nushell
|
|
||||||
uses: hustcer/setup-nu@v3.10
|
|
||||||
with:
|
|
||||||
version: 0.93.0
|
|
||||||
|
|
||||||
- name: Release Nu Binary
|
|
||||||
id: nu
|
|
||||||
run: nu .github/workflows/release-pkg.nu
|
|
||||||
env:
|
env:
|
||||||
RELEASE_TYPE: full
|
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
OS: ${{ matrix.os }}
|
run: >-
|
||||||
REF: ${{ github.ref }}
|
gh release download ${{ github.ref_name }}
|
||||||
TARGET: ${{ matrix.target }}
|
--repo ${{ github.repository }}
|
||||||
_EXTRA_: ${{ matrix.extra }}
|
--pattern '*'
|
||||||
TARGET_RUSTFLAGS: ${{ matrix.target_rustflags }}
|
--dir release
|
||||||
|
- name: Create Checksums
|
||||||
# REF: https://github.com/marketplace/actions/gh-release
|
run: cd release && shasum -a 256 * > ../SHA256SUMS
|
||||||
- name: Publish Archive
|
- name: Publish Checksums
|
||||||
uses: softprops/action-gh-release@v2.0.5
|
uses: softprops/action-gh-release@v2.0.5
|
||||||
if: ${{ startsWith(github.ref, 'refs/tags/') }}
|
|
||||||
with:
|
with:
|
||||||
draft: true
|
draft: true
|
||||||
files: ${{ steps.nu.outputs.archive }}
|
files: SHA256SUMS
|
||||||
env:
|
env:
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
4
.github/workflows/typos.yml
vendored
4
.github/workflows/typos.yml
vendored
@ -7,7 +7,7 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout Actions Repository
|
- name: Checkout Actions Repository
|
||||||
uses: actions/checkout@v4.1.5
|
uses: actions/checkout@v4.1.7
|
||||||
|
|
||||||
- name: Check spelling
|
- name: Check spelling
|
||||||
uses: crate-ci/typos@v1.21.0
|
uses: crate-ci/typos@v1.29.5
|
||||||
|
26
CITATION.cff
Normal file
26
CITATION.cff
Normal file
@ -0,0 +1,26 @@
|
|||||||
|
cff-version: 1.2.0
|
||||||
|
title: 'Nushell'
|
||||||
|
message: >-
|
||||||
|
If you use this software and wish to cite it,
|
||||||
|
you can use the metadata from this file.
|
||||||
|
type: software
|
||||||
|
authors:
|
||||||
|
- name: "The Nushell Project Team"
|
||||||
|
identifiers:
|
||||||
|
- type: url
|
||||||
|
value: 'https://github.com/nushell/nushell'
|
||||||
|
description: Repository
|
||||||
|
repository-code: 'https://github.com/nushell/nushell'
|
||||||
|
url: 'https://www.nushell.sh/'
|
||||||
|
abstract: >-
|
||||||
|
The goal of the Nushell project is to take the Unix
|
||||||
|
philosophy of shells, where pipes connect simple commands
|
||||||
|
together, and bring it to the modern style of development.
|
||||||
|
Thus, rather than being either a shell, or a programming
|
||||||
|
language, Nushell connects both by bringing a rich
|
||||||
|
programming language and a full-featured shell together
|
||||||
|
into one package.
|
||||||
|
keywords:
|
||||||
|
- nushell
|
||||||
|
- shell
|
||||||
|
license: MIT
|
@ -71,11 +71,6 @@ Read cargo's documentation for more details: https://doc.rust-lang.org/cargo/ref
|
|||||||
cargo run
|
cargo run
|
||||||
```
|
```
|
||||||
|
|
||||||
- Build and run with dataframe support.
|
|
||||||
```nushell
|
|
||||||
cargo run --features=dataframe
|
|
||||||
```
|
|
||||||
|
|
||||||
- Run Clippy on Nushell:
|
- Run Clippy on Nushell:
|
||||||
|
|
||||||
```nushell
|
```nushell
|
||||||
@ -93,11 +88,6 @@ Read cargo's documentation for more details: https://doc.rust-lang.org/cargo/ref
|
|||||||
cargo test --workspace
|
cargo test --workspace
|
||||||
```
|
```
|
||||||
|
|
||||||
along with dataframe tests
|
|
||||||
|
|
||||||
```nushell
|
|
||||||
cargo test --workspace --features=dataframe
|
|
||||||
```
|
|
||||||
or via the `toolkit.nu` command:
|
or via the `toolkit.nu` command:
|
||||||
```nushell
|
```nushell
|
||||||
use toolkit.nu test
|
use toolkit.nu test
|
||||||
|
4829
Cargo.lock
generated
4829
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
215
Cargo.toml
215
Cargo.toml
@ -10,8 +10,8 @@ homepage = "https://www.nushell.sh"
|
|||||||
license = "MIT"
|
license = "MIT"
|
||||||
name = "nu"
|
name = "nu"
|
||||||
repository = "https://github.com/nushell/nushell"
|
repository = "https://github.com/nushell/nushell"
|
||||||
rust-version = "1.77.2"
|
rust-version = "1.83.0"
|
||||||
version = "0.93.1"
|
version = "0.102.1"
|
||||||
|
|
||||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
|
|
||||||
@ -32,7 +32,6 @@ members = [
|
|||||||
"crates/nu-cmd-extra",
|
"crates/nu-cmd-extra",
|
||||||
"crates/nu-cmd-lang",
|
"crates/nu-cmd-lang",
|
||||||
"crates/nu-cmd-plugin",
|
"crates/nu-cmd-plugin",
|
||||||
"crates/nu-cmd-dataframe",
|
|
||||||
"crates/nu-command",
|
"crates/nu-command",
|
||||||
"crates/nu-color-config",
|
"crates/nu-color-config",
|
||||||
"crates/nu-explore",
|
"crates/nu-explore",
|
||||||
@ -40,6 +39,7 @@ members = [
|
|||||||
"crates/nu-lsp",
|
"crates/nu-lsp",
|
||||||
"crates/nu-pretty-hex",
|
"crates/nu-pretty-hex",
|
||||||
"crates/nu-protocol",
|
"crates/nu-protocol",
|
||||||
|
"crates/nu-derive-value",
|
||||||
"crates/nu-plugin",
|
"crates/nu-plugin",
|
||||||
"crates/nu-plugin-core",
|
"crates/nu-plugin-core",
|
||||||
"crates/nu-plugin-engine",
|
"crates/nu-plugin-engine",
|
||||||
@ -66,142 +66,160 @@ alphanumeric-sort = "1.5"
|
|||||||
ansi-str = "0.8"
|
ansi-str = "0.8"
|
||||||
anyhow = "1.0.82"
|
anyhow = "1.0.82"
|
||||||
base64 = "0.22.1"
|
base64 = "0.22.1"
|
||||||
bracoxide = "0.1.2"
|
bracoxide = "0.1.5"
|
||||||
brotli = "5.0"
|
brotli = "7.0"
|
||||||
byteorder = "1.5"
|
byteorder = "1.5"
|
||||||
bytesize = "1.3"
|
bytes = "1"
|
||||||
calamine = "0.24.0"
|
bytesize = "1.3.1"
|
||||||
|
calamine = "0.26.1"
|
||||||
chardetng = "0.1.17"
|
chardetng = "0.1.17"
|
||||||
chrono = { default-features = false, version = "0.4.34" }
|
chrono = { default-features = false, version = "0.4.34" }
|
||||||
chrono-humanize = "0.2.3"
|
chrono-humanize = "0.2.3"
|
||||||
chrono-tz = "0.8"
|
chrono-tz = "0.10"
|
||||||
crossbeam-channel = "0.5.8"
|
crossbeam-channel = "0.5.8"
|
||||||
crossterm = "0.27"
|
crossterm = "0.28.1"
|
||||||
csv = "1.3"
|
csv = "1.3"
|
||||||
ctrlc = "3.4"
|
ctrlc = "3.4"
|
||||||
|
devicons = "0.6.12"
|
||||||
dialoguer = { default-features = false, version = "0.11" }
|
dialoguer = { default-features = false, version = "0.11" }
|
||||||
digest = { default-features = false, version = "0.10" }
|
digest = { default-features = false, version = "0.10" }
|
||||||
dirs-next = "2.0"
|
dirs = "5.0"
|
||||||
|
dirs-sys = "0.4"
|
||||||
dtparse = "2.0"
|
dtparse = "2.0"
|
||||||
encoding_rs = "0.8"
|
encoding_rs = "0.8"
|
||||||
fancy-regex = "0.13"
|
fancy-regex = "0.14"
|
||||||
filesize = "0.2"
|
filesize = "0.2"
|
||||||
filetime = "0.2"
|
filetime = "0.2"
|
||||||
fs_extra = "1.3"
|
|
||||||
fuzzy-matcher = "0.3"
|
|
||||||
hamcrest2 = "0.3"
|
|
||||||
heck = "0.5.0"
|
heck = "0.5.0"
|
||||||
human-date-parser = "0.1.1"
|
human-date-parser = "0.2.0"
|
||||||
indexmap = "2.2"
|
indexmap = "2.7"
|
||||||
indicatif = "0.17"
|
indicatif = "0.17"
|
||||||
interprocess = "2.1.0"
|
interprocess = "2.2.0"
|
||||||
is_executable = "1.0"
|
is_executable = "1.0"
|
||||||
itertools = "0.12"
|
itertools = "0.13"
|
||||||
libc = "0.2"
|
libc = "0.2"
|
||||||
libproc = "0.14"
|
libproc = "0.14"
|
||||||
log = "0.4"
|
log = "0.4"
|
||||||
lru = "0.12"
|
lru = "0.12"
|
||||||
lscolors = { version = "0.17", default-features = false }
|
lscolors = { version = "0.17", default-features = false }
|
||||||
lsp-server = "0.7.5"
|
lsp-server = "0.7.8"
|
||||||
lsp-types = "0.95.0"
|
lsp-types = { version = "0.97.0", features = ["proposed"] }
|
||||||
|
lsp-textdocument = "0.4.1"
|
||||||
mach2 = "0.4"
|
mach2 = "0.4"
|
||||||
md5 = { version = "0.10", package = "md-5" }
|
md5 = { version = "0.10", package = "md-5" }
|
||||||
miette = "7.2"
|
miette = "7.5"
|
||||||
mime = "0.3"
|
mime = "0.3.17"
|
||||||
mime_guess = "2.0"
|
mime_guess = "2.0"
|
||||||
mockito = { version = "1.4", default-features = false }
|
mockito = { version = "1.6", default-features = false }
|
||||||
|
multipart-rs = "0.1.13"
|
||||||
native-tls = "0.2"
|
native-tls = "0.2"
|
||||||
nix = { version = "0.28", default-features = false }
|
nix = { version = "0.29", default-features = false }
|
||||||
notify-debouncer-full = { version = "0.3", default-features = false }
|
notify-debouncer-full = { version = "0.3", default-features = false }
|
||||||
nu-ansi-term = "0.50.0"
|
nu-ansi-term = "0.50.1"
|
||||||
|
nucleo-matcher = "0.3"
|
||||||
num-format = "0.4"
|
num-format = "0.4"
|
||||||
num-traits = "0.2"
|
num-traits = "0.2"
|
||||||
|
oem_cp = "2.0.0"
|
||||||
omnipath = "0.1"
|
omnipath = "0.1"
|
||||||
once_cell = "1.18"
|
open = "5.3"
|
||||||
open = "5.1"
|
os_pipe = { version = "1.2", features = ["io_safety"] }
|
||||||
os_pipe = { version = "1.1", features = ["io_safety"] }
|
|
||||||
pathdiff = "0.2"
|
pathdiff = "0.2"
|
||||||
percent-encoding = "2"
|
percent-encoding = "2"
|
||||||
pretty_assertions = "1.4"
|
pretty_assertions = "1.4"
|
||||||
print-positions = "0.6"
|
print-positions = "0.6"
|
||||||
procfs = "0.16.0"
|
proc-macro-error2 = "2.0"
|
||||||
|
proc-macro2 = "1.0"
|
||||||
|
procfs = "0.17.0"
|
||||||
pwd = "1.3"
|
pwd = "1.3"
|
||||||
quick-xml = "0.31.0"
|
quick-xml = "0.37.0"
|
||||||
quickcheck = "1.0"
|
quickcheck = "1.0"
|
||||||
quickcheck_macros = "1.0"
|
quickcheck_macros = "1.0"
|
||||||
|
quote = "1.0"
|
||||||
rand = "0.8"
|
rand = "0.8"
|
||||||
|
getrandom = "0.2" # pick same version that rand requires
|
||||||
|
rand_chacha = "0.3.1"
|
||||||
ratatui = "0.26"
|
ratatui = "0.26"
|
||||||
rayon = "1.10"
|
rayon = "1.10"
|
||||||
reedline = "0.32.0"
|
reedline = "0.38.0"
|
||||||
regex = "1.9.5"
|
|
||||||
rmp = "0.8"
|
rmp = "0.8"
|
||||||
rmp-serde = "1.3"
|
rmp-serde = "1.3"
|
||||||
ropey = "1.6.1"
|
roxmltree = "0.20"
|
||||||
roxmltree = "0.19"
|
rstest = { version = "0.23", default-features = false }
|
||||||
rstest = { version = "0.18", default-features = false }
|
rstest_reuse = "0.7"
|
||||||
rusqlite = "0.31"
|
rusqlite = "0.31"
|
||||||
rust-embed = "8.4.0"
|
rust-embed = "8.5.0"
|
||||||
same-file = "1.0"
|
scopeguard = { version = "1.2.0" }
|
||||||
serde = { version = "1.0", default-features = false }
|
serde = { version = "1.0" }
|
||||||
serde_json = "1.0"
|
serde_json = "1.0"
|
||||||
serde_urlencoded = "0.7.1"
|
serde_urlencoded = "0.7.1"
|
||||||
serde_yaml = "0.9"
|
serde_yaml = "0.9.33"
|
||||||
sha2 = "0.10"
|
sha2 = "0.10"
|
||||||
strip-ansi-escapes = "0.2.0"
|
strip-ansi-escapes = "0.2.0"
|
||||||
sysinfo = "0.30"
|
syn = "2.0"
|
||||||
tabled = { version = "0.14.0", default-features = false }
|
sysinfo = "0.33"
|
||||||
tempfile = "3.10"
|
tabled = { version = "0.17.0", default-features = false }
|
||||||
terminal_size = "0.3"
|
tempfile = "3.15"
|
||||||
titlecase = "2.0"
|
titlecase = "3.0"
|
||||||
toml = "0.8"
|
toml = "0.8"
|
||||||
trash = "3.3"
|
trash = "5.2"
|
||||||
|
update-informer = { version = "1.2.0", default-features = false, features = ["github", "native-tls", "ureq"] }
|
||||||
umask = "2.1"
|
umask = "2.1"
|
||||||
unicode-segmentation = "1.11"
|
unicode-segmentation = "1.12"
|
||||||
unicode-width = "0.1"
|
unicode-width = "0.2"
|
||||||
ureq = { version = "2.9", default-features = false }
|
ureq = { version = "2.12", default-features = false }
|
||||||
url = "2.2"
|
url = "2.2"
|
||||||
uu_cp = "0.0.25"
|
uu_cp = "0.0.29"
|
||||||
uu_mkdir = "0.0.25"
|
uu_mkdir = "0.0.29"
|
||||||
uu_mktemp = "0.0.25"
|
uu_mktemp = "0.0.29"
|
||||||
uu_mv = "0.0.25"
|
uu_mv = "0.0.29"
|
||||||
uu_whoami = "0.0.25"
|
uu_touch = "0.0.29"
|
||||||
uu_uname = "0.0.25"
|
uu_whoami = "0.0.29"
|
||||||
uucore = "0.0.25"
|
uu_uname = "0.0.29"
|
||||||
uuid = "1.8.0"
|
uucore = "0.0.29"
|
||||||
|
uuid = "1.12.0"
|
||||||
v_htmlescape = "0.15.0"
|
v_htmlescape = "0.15.0"
|
||||||
wax = "0.6"
|
wax = "0.6"
|
||||||
which = "6.0.0"
|
web-time = "1.1.0"
|
||||||
windows = "0.54"
|
which = "7.0.0"
|
||||||
|
windows = "0.56"
|
||||||
|
windows-sys = "0.48"
|
||||||
winreg = "0.52"
|
winreg = "0.52"
|
||||||
|
memchr = "2.7.4"
|
||||||
|
|
||||||
|
[workspace.lints.clippy]
|
||||||
|
# Warning: workspace lints affect library code as well as tests, so don't enable lints that would be too noisy in tests like that.
|
||||||
|
# todo = "warn"
|
||||||
|
unchecked_duration_subtraction = "warn"
|
||||||
|
|
||||||
|
[lints]
|
||||||
|
workspace = true
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
nu-cli = { path = "./crates/nu-cli", version = "0.93.1" }
|
nu-cli = { path = "./crates/nu-cli", version = "0.102.1" }
|
||||||
nu-cmd-base = { path = "./crates/nu-cmd-base", version = "0.93.1" }
|
nu-cmd-base = { path = "./crates/nu-cmd-base", version = "0.102.1" }
|
||||||
nu-cmd-lang = { path = "./crates/nu-cmd-lang", version = "0.93.1" }
|
nu-cmd-lang = { path = "./crates/nu-cmd-lang", version = "0.102.1" }
|
||||||
nu-cmd-plugin = { path = "./crates/nu-cmd-plugin", version = "0.93.1", optional = true }
|
nu-cmd-plugin = { path = "./crates/nu-cmd-plugin", version = "0.102.1", optional = true }
|
||||||
nu-cmd-dataframe = { path = "./crates/nu-cmd-dataframe", version = "0.93.1", features = [
|
nu-cmd-extra = { path = "./crates/nu-cmd-extra", version = "0.102.1" }
|
||||||
"dataframe",
|
nu-command = { path = "./crates/nu-command", version = "0.102.1" }
|
||||||
], optional = true }
|
nu-engine = { path = "./crates/nu-engine", version = "0.102.1" }
|
||||||
nu-cmd-extra = { path = "./crates/nu-cmd-extra", version = "0.93.1" }
|
nu-explore = { path = "./crates/nu-explore", version = "0.102.1" }
|
||||||
nu-command = { path = "./crates/nu-command", version = "0.93.1" }
|
nu-lsp = { path = "./crates/nu-lsp/", version = "0.102.1" }
|
||||||
nu-engine = { path = "./crates/nu-engine", version = "0.93.1" }
|
nu-parser = { path = "./crates/nu-parser", version = "0.102.1" }
|
||||||
nu-explore = { path = "./crates/nu-explore", version = "0.93.1" }
|
nu-path = { path = "./crates/nu-path", version = "0.102.1" }
|
||||||
nu-lsp = { path = "./crates/nu-lsp/", version = "0.93.1" }
|
nu-plugin-engine = { path = "./crates/nu-plugin-engine", optional = true, version = "0.102.1" }
|
||||||
nu-parser = { path = "./crates/nu-parser", version = "0.93.1" }
|
nu-protocol = { path = "./crates/nu-protocol", version = "0.102.1" }
|
||||||
nu-path = { path = "./crates/nu-path", version = "0.93.1" }
|
nu-std = { path = "./crates/nu-std", version = "0.102.1" }
|
||||||
nu-plugin-engine = { path = "./crates/nu-plugin-engine", optional = true, version = "0.93.1" }
|
nu-system = { path = "./crates/nu-system", version = "0.102.1" }
|
||||||
nu-protocol = { path = "./crates/nu-protocol", version = "0.93.1" }
|
nu-utils = { path = "./crates/nu-utils", version = "0.102.1" }
|
||||||
nu-std = { path = "./crates/nu-std", version = "0.93.1" }
|
|
||||||
nu-system = { path = "./crates/nu-system", version = "0.93.1" }
|
|
||||||
nu-utils = { path = "./crates/nu-utils", version = "0.93.1" }
|
|
||||||
|
|
||||||
reedline = { workspace = true, features = ["bashisms", "sqlite"] }
|
reedline = { workspace = true, features = ["bashisms", "sqlite"] }
|
||||||
|
|
||||||
crossterm = { workspace = true }
|
crossterm = { workspace = true }
|
||||||
ctrlc = { workspace = true }
|
ctrlc = { workspace = true }
|
||||||
|
dirs = { workspace = true }
|
||||||
log = { workspace = true }
|
log = { workspace = true }
|
||||||
miette = { workspace = true, features = ["fancy-no-backtrace", "fancy"] }
|
miette = { workspace = true, features = ["fancy-no-backtrace", "fancy"] }
|
||||||
mimalloc = { version = "0.1.37", default-features = false, optional = true }
|
mimalloc = { version = "0.1.42", default-features = false, optional = true }
|
||||||
|
multipart-rs = { workspace = true }
|
||||||
serde_json = { workspace = true }
|
serde_json = { workspace = true }
|
||||||
simplelog = "0.12"
|
simplelog = "0.12"
|
||||||
time = "0.3"
|
time = "0.3"
|
||||||
@ -222,33 +240,36 @@ nix = { workspace = true, default-features = false, features = [
|
|||||||
] }
|
] }
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
nu-test-support = { path = "./crates/nu-test-support", version = "0.93.1" }
|
nu-test-support = { path = "./crates/nu-test-support", version = "0.102.1" }
|
||||||
nu-plugin-protocol = { path = "./crates/nu-plugin-protocol", version = "0.93.1" }
|
nu-plugin-protocol = { path = "./crates/nu-plugin-protocol", version = "0.102.1" }
|
||||||
nu-plugin-core = { path = "./crates/nu-plugin-core", version = "0.93.1" }
|
nu-plugin-core = { path = "./crates/nu-plugin-core", version = "0.102.1" }
|
||||||
assert_cmd = "2.0"
|
assert_cmd = "2.0"
|
||||||
dirs-next = { workspace = true }
|
dirs = { workspace = true }
|
||||||
tango-bench = "0.5"
|
tango-bench = "0.6"
|
||||||
pretty_assertions = { workspace = true }
|
pretty_assertions = { workspace = true }
|
||||||
|
fancy-regex = { workspace = true }
|
||||||
rstest = { workspace = true, default-features = false }
|
rstest = { workspace = true, default-features = false }
|
||||||
serial_test = "3.1"
|
serial_test = "3.2"
|
||||||
tempfile = { workspace = true }
|
tempfile = { workspace = true }
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
plugin = [
|
plugin = [
|
||||||
"nu-plugin-engine",
|
# crates
|
||||||
"nu-cmd-plugin",
|
"nu-cmd-plugin",
|
||||||
|
"nu-plugin-engine",
|
||||||
|
|
||||||
|
# features
|
||||||
"nu-cli/plugin",
|
"nu-cli/plugin",
|
||||||
"nu-parser/plugin",
|
"nu-cmd-lang/plugin",
|
||||||
"nu-command/plugin",
|
"nu-command/plugin",
|
||||||
"nu-protocol/plugin",
|
|
||||||
"nu-engine/plugin",
|
"nu-engine/plugin",
|
||||||
|
"nu-engine/plugin",
|
||||||
|
"nu-parser/plugin",
|
||||||
|
"nu-protocol/plugin",
|
||||||
]
|
]
|
||||||
default = ["default-no-clipboard", "system-clipboard"]
|
|
||||||
# Enables convenient omitting of the system-clipboard feature, as it leads to problems in ci on linux
|
default = [
|
||||||
# See https://github.com/nushell/nushell/pull/11535
|
|
||||||
default-no-clipboard = [
|
|
||||||
"plugin",
|
"plugin",
|
||||||
"which-support",
|
|
||||||
"trash-support",
|
"trash-support",
|
||||||
"sqlite",
|
"sqlite",
|
||||||
"mimalloc",
|
"mimalloc",
|
||||||
@ -261,6 +282,8 @@ stable = ["default"]
|
|||||||
static-link-openssl = ["dep:openssl", "nu-cmd-lang/static-link-openssl"]
|
static-link-openssl = ["dep:openssl", "nu-cmd-lang/static-link-openssl"]
|
||||||
|
|
||||||
mimalloc = ["nu-cmd-lang/mimalloc", "dep:mimalloc"]
|
mimalloc = ["nu-cmd-lang/mimalloc", "dep:mimalloc"]
|
||||||
|
# Optional system clipboard support in `reedline`, this behavior has problematic compatibility with some systems.
|
||||||
|
# Missing X server/ Wayland can cause issues
|
||||||
system-clipboard = [
|
system-clipboard = [
|
||||||
"reedline/system_clipboard",
|
"reedline/system_clipboard",
|
||||||
"nu-cli/system-clipboard",
|
"nu-cli/system-clipboard",
|
||||||
@ -268,14 +291,10 @@ system-clipboard = [
|
|||||||
]
|
]
|
||||||
|
|
||||||
# Stable (Default)
|
# Stable (Default)
|
||||||
which-support = ["nu-command/which-support", "nu-cmd-lang/which-support"]
|
|
||||||
trash-support = ["nu-command/trash-support", "nu-cmd-lang/trash-support"]
|
trash-support = ["nu-command/trash-support", "nu-cmd-lang/trash-support"]
|
||||||
|
|
||||||
# Dataframe feature for nushell
|
|
||||||
dataframe = ["dep:nu-cmd-dataframe", "nu-cmd-lang/dataframe"]
|
|
||||||
|
|
||||||
# SQLite commands for nushell
|
# SQLite commands for nushell
|
||||||
sqlite = ["nu-command/sqlite", "nu-cmd-lang/sqlite"]
|
sqlite = ["nu-command/sqlite", "nu-cmd-lang/sqlite", "nu-std/sqlite"]
|
||||||
|
|
||||||
[profile.release]
|
[profile.release]
|
||||||
opt-level = "s" # Optimize for size
|
opt-level = "s" # Optimize for size
|
||||||
|
2
LICENSE
2
LICENSE
@ -1,6 +1,6 @@
|
|||||||
MIT License
|
MIT License
|
||||||
|
|
||||||
Copyright (c) 2019 - 2023 The Nushell Project Developers
|
Copyright (c) 2019 - 2025 The Nushell Project Developers
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
of this software and associated documentation files (the "Software"), to deal
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
122
README.md
122
README.md
@ -4,7 +4,6 @@
|
|||||||
[](https://github.com/nushell/nushell/actions/workflows/nightly-build.yml)
|
[](https://github.com/nushell/nushell/actions/workflows/nightly-build.yml)
|
||||||
[](https://discord.gg/NtAbbGn)
|
[](https://discord.gg/NtAbbGn)
|
||||||
[](https://changelog.com/podcast/363)
|
[](https://changelog.com/podcast/363)
|
||||||
[](https://twitter.com/nu_shell)
|
|
||||||
[](https://github.com/nushell/nushell/graphs/commit-activity)
|
[](https://github.com/nushell/nushell/graphs/commit-activity)
|
||||||
[](https://github.com/nushell/nushell/graphs/contributors)
|
[](https://github.com/nushell/nushell/graphs/contributors)
|
||||||
|
|
||||||
@ -35,7 +34,7 @@ This project has reached a minimum-viable-product level of quality. Many people
|
|||||||
|
|
||||||
The [Nushell book](https://www.nushell.sh/book/) is the primary source of Nushell documentation. You can find [a full list of Nu commands in the book](https://www.nushell.sh/commands/), and we have many examples of using Nu in our [cookbook](https://www.nushell.sh/cookbook/).
|
The [Nushell book](https://www.nushell.sh/book/) is the primary source of Nushell documentation. You can find [a full list of Nu commands in the book](https://www.nushell.sh/commands/), and we have many examples of using Nu in our [cookbook](https://www.nushell.sh/cookbook/).
|
||||||
|
|
||||||
We're also active on [Discord](https://discord.gg/NtAbbGn) and [Twitter](https://twitter.com/nu_shell); come and chat with us!
|
We're also active on [Discord](https://discord.gg/NtAbbGn); come and chat with us!
|
||||||
|
|
||||||
## Installation
|
## Installation
|
||||||
|
|
||||||
@ -52,13 +51,13 @@ To use `Nu` in GitHub Action, check [setup-nu](https://github.com/marketplace/ac
|
|||||||
|
|
||||||
Detailed installation instructions can be found in the [installation chapter of the book](https://www.nushell.sh/book/installation.html). Nu is available via many package managers:
|
Detailed installation instructions can be found in the [installation chapter of the book](https://www.nushell.sh/book/installation.html). Nu is available via many package managers:
|
||||||
|
|
||||||
[](https://repology.org/project/nushell/versions)
|
[](https://repology.org/project/nushell/versions)
|
||||||
|
|
||||||
For details about which platforms the Nushell team actively supports, see [our platform support policy](devdocs/PLATFORM_SUPPORT.md).
|
For details about which platforms the Nushell team actively supports, see [our platform support policy](devdocs/PLATFORM_SUPPORT.md).
|
||||||
|
|
||||||
## Configuration
|
## Configuration
|
||||||
|
|
||||||
The default configurations can be found at [sample_config](crates/nu-utils/src/sample_config)
|
The default configurations can be found at [sample_config](crates/nu-utils/src/default_files)
|
||||||
which are the configuration files one gets when they startup Nushell for the first time.
|
which are the configuration files one gets when they startup Nushell for the first time.
|
||||||
|
|
||||||
It sets all of the default configuration to run Nushell. From here one can
|
It sets all of the default configuration to run Nushell. From here one can
|
||||||
@ -95,44 +94,44 @@ Commands that work in the pipeline fit into one of three categories:
|
|||||||
Commands are separated by the pipe symbol (`|`) to denote a pipeline flowing left to right.
|
Commands are separated by the pipe symbol (`|`) to denote a pipeline flowing left to right.
|
||||||
|
|
||||||
```shell
|
```shell
|
||||||
> ls | where type == "dir" | table
|
ls | where type == "dir" | table
|
||||||
╭────┬──────────┬──────┬─────────┬───────────────╮
|
# => ╭────┬──────────┬──────┬─────────┬───────────────╮
|
||||||
│ # │ name │ type │ size │ modified │
|
# => │ # │ name │ type │ size │ modified │
|
||||||
├────┼──────────┼──────┼─────────┼───────────────┤
|
# => ├────┼──────────┼──────┼─────────┼───────────────┤
|
||||||
│ 0 │ .cargo │ dir │ 0 B │ 9 minutes ago │
|
# => │ 0 │ .cargo │ dir │ 0 B │ 9 minutes ago │
|
||||||
│ 1 │ assets │ dir │ 0 B │ 2 weeks ago │
|
# => │ 1 │ assets │ dir │ 0 B │ 2 weeks ago │
|
||||||
│ 2 │ crates │ dir │ 4.0 KiB │ 2 weeks ago │
|
# => │ 2 │ crates │ dir │ 4.0 KiB │ 2 weeks ago │
|
||||||
│ 3 │ docker │ dir │ 0 B │ 2 weeks ago │
|
# => │ 3 │ docker │ dir │ 0 B │ 2 weeks ago │
|
||||||
│ 4 │ docs │ dir │ 0 B │ 2 weeks ago │
|
# => │ 4 │ docs │ dir │ 0 B │ 2 weeks ago │
|
||||||
│ 5 │ images │ dir │ 0 B │ 2 weeks ago │
|
# => │ 5 │ images │ dir │ 0 B │ 2 weeks ago │
|
||||||
│ 6 │ pkg_mgrs │ dir │ 0 B │ 2 weeks ago │
|
# => │ 6 │ pkg_mgrs │ dir │ 0 B │ 2 weeks ago │
|
||||||
│ 7 │ samples │ dir │ 0 B │ 2 weeks ago │
|
# => │ 7 │ samples │ dir │ 0 B │ 2 weeks ago │
|
||||||
│ 8 │ src │ dir │ 4.0 KiB │ 2 weeks ago │
|
# => │ 8 │ src │ dir │ 4.0 KiB │ 2 weeks ago │
|
||||||
│ 9 │ target │ dir │ 0 B │ a day ago │
|
# => │ 9 │ target │ dir │ 0 B │ a day ago │
|
||||||
│ 10 │ tests │ dir │ 4.0 KiB │ 2 weeks ago │
|
# => │ 10 │ tests │ dir │ 4.0 KiB │ 2 weeks ago │
|
||||||
│ 11 │ wix │ dir │ 0 B │ 2 weeks ago │
|
# => │ 11 │ wix │ dir │ 0 B │ 2 weeks ago │
|
||||||
╰────┴──────────┴──────┴─────────┴───────────────╯
|
# => ╰────┴──────────┴──────┴─────────┴───────────────╯
|
||||||
```
|
```
|
||||||
|
|
||||||
Because most of the time you'll want to see the output of a pipeline, `table` is assumed.
|
Because most of the time you'll want to see the output of a pipeline, `table` is assumed.
|
||||||
We could have also written the above:
|
We could have also written the above:
|
||||||
|
|
||||||
```shell
|
```shell
|
||||||
> ls | where type == "dir"
|
ls | where type == "dir"
|
||||||
```
|
```
|
||||||
|
|
||||||
Being able to use the same commands and compose them differently is an important philosophy in Nu.
|
Being able to use the same commands and compose them differently is an important philosophy in Nu.
|
||||||
For example, we could use the built-in `ps` command to get a list of the running processes, using the same `where` as above.
|
For example, we could use the built-in `ps` command to get a list of the running processes, using the same `where` as above.
|
||||||
|
|
||||||
```shell
|
```shell
|
||||||
> ps | where cpu > 0
|
ps | where cpu > 0
|
||||||
╭───┬───────┬───────────┬───────┬───────────┬───────────╮
|
# => ╭───┬───────┬───────────┬───────┬───────────┬───────────╮
|
||||||
│ # │ pid │ name │ cpu │ mem │ virtual │
|
# => │ # │ pid │ name │ cpu │ mem │ virtual │
|
||||||
├───┼───────┼───────────┼───────┼───────────┼───────────┤
|
# => ├───┼───────┼───────────┼───────┼───────────┼───────────┤
|
||||||
│ 0 │ 2240 │ Slack.exe │ 16.40 │ 178.3 MiB │ 232.6 MiB │
|
# => │ 0 │ 2240 │ Slack.exe │ 16.40 │ 178.3 MiB │ 232.6 MiB │
|
||||||
│ 1 │ 16948 │ Slack.exe │ 16.32 │ 205.0 MiB │ 197.9 MiB │
|
# => │ 1 │ 16948 │ Slack.exe │ 16.32 │ 205.0 MiB │ 197.9 MiB │
|
||||||
│ 2 │ 17700 │ nu.exe │ 3.77 │ 26.1 MiB │ 8.8 MiB │
|
# => │ 2 │ 17700 │ nu.exe │ 3.77 │ 26.1 MiB │ 8.8 MiB │
|
||||||
╰───┴───────┴───────────┴───────┴───────────┴───────────╯
|
# => ╰───┴───────┴───────────┴───────┴───────────┴───────────╯
|
||||||
```
|
```
|
||||||
|
|
||||||
### Opening files
|
### Opening files
|
||||||
@ -141,46 +140,46 @@ Nu can load file and URL contents as raw text or structured data (if it recogniz
|
|||||||
For example, you can load a .toml file as structured data and explore it:
|
For example, you can load a .toml file as structured data and explore it:
|
||||||
|
|
||||||
```shell
|
```shell
|
||||||
> open Cargo.toml
|
open Cargo.toml
|
||||||
╭──────────────────┬────────────────────╮
|
# => ╭──────────────────┬────────────────────╮
|
||||||
│ bin │ [table 1 row] │
|
# => │ bin │ [table 1 row] │
|
||||||
│ dependencies │ {record 25 fields} │
|
# => │ dependencies │ {record 25 fields} │
|
||||||
│ dev-dependencies │ {record 8 fields} │
|
# => │ dev-dependencies │ {record 8 fields} │
|
||||||
│ features │ {record 10 fields} │
|
# => │ features │ {record 10 fields} │
|
||||||
│ package │ {record 13 fields} │
|
# => │ package │ {record 13 fields} │
|
||||||
│ patch │ {record 1 field} │
|
# => │ patch │ {record 1 field} │
|
||||||
│ profile │ {record 3 fields} │
|
# => │ profile │ {record 3 fields} │
|
||||||
│ target │ {record 3 fields} │
|
# => │ target │ {record 3 fields} │
|
||||||
│ workspace │ {record 1 field} │
|
# => │ workspace │ {record 1 field} │
|
||||||
╰──────────────────┴────────────────────╯
|
# => ╰──────────────────┴────────────────────╯
|
||||||
```
|
```
|
||||||
|
|
||||||
We can pipe this into a command that gets the contents of one of the columns:
|
We can pipe this into a command that gets the contents of one of the columns:
|
||||||
|
|
||||||
```shell
|
```shell
|
||||||
> open Cargo.toml | get package
|
open Cargo.toml | get package
|
||||||
╭───────────────┬────────────────────────────────────╮
|
# => ╭───────────────┬────────────────────────────────────╮
|
||||||
│ authors │ [list 1 item] │
|
# => │ authors │ [list 1 item] │
|
||||||
│ default-run │ nu │
|
# => │ default-run │ nu │
|
||||||
│ description │ A new type of shell │
|
# => │ description │ A new type of shell │
|
||||||
│ documentation │ https://www.nushell.sh/book/ │
|
# => │ documentation │ https://www.nushell.sh/book/ │
|
||||||
│ edition │ 2018 │
|
# => │ edition │ 2018 │
|
||||||
│ exclude │ [list 1 item] │
|
# => │ exclude │ [list 1 item] │
|
||||||
│ homepage │ https://www.nushell.sh │
|
# => │ homepage │ https://www.nushell.sh │
|
||||||
│ license │ MIT │
|
# => │ license │ MIT │
|
||||||
│ metadata │ {record 1 field} │
|
# => │ metadata │ {record 1 field} │
|
||||||
│ name │ nu │
|
# => │ name │ nu │
|
||||||
│ repository │ https://github.com/nushell/nushell │
|
# => │ repository │ https://github.com/nushell/nushell │
|
||||||
│ rust-version │ 1.60 │
|
# => │ rust-version │ 1.60 │
|
||||||
│ version │ 0.72.0 │
|
# => │ version │ 0.72.0 │
|
||||||
╰───────────────┴────────────────────────────────────╯
|
# => ╰───────────────┴────────────────────────────────────╯
|
||||||
```
|
```
|
||||||
|
|
||||||
And if needed we can drill down further:
|
And if needed we can drill down further:
|
||||||
|
|
||||||
```shell
|
```shell
|
||||||
> open Cargo.toml | get package.version
|
open Cargo.toml | get package.version
|
||||||
0.72.0
|
# => 0.72.0
|
||||||
```
|
```
|
||||||
|
|
||||||
### Plugins
|
### Plugins
|
||||||
@ -222,13 +221,14 @@ Please submit an issue or PR to be added to this list.
|
|||||||
- [clap](https://github.com/clap-rs/clap/tree/master/clap_complete_nushell)
|
- [clap](https://github.com/clap-rs/clap/tree/master/clap_complete_nushell)
|
||||||
- [Dorothy](http://github.com/bevry/dorothy)
|
- [Dorothy](http://github.com/bevry/dorothy)
|
||||||
- [Direnv](https://github.com/direnv/direnv/blob/master/docs/hook.md#nushell)
|
- [Direnv](https://github.com/direnv/direnv/blob/master/docs/hook.md#nushell)
|
||||||
|
- [x-cmd](https://x-cmd.com/mod/nu)
|
||||||
|
|
||||||
## Contributing
|
## Contributing
|
||||||
|
|
||||||
See [Contributing](CONTRIBUTING.md) for details. Thanks to all the people who already contributed!
|
See [Contributing](CONTRIBUTING.md) for details. Thanks to all the people who already contributed!
|
||||||
|
|
||||||
<a href="https://github.com/nushell/nushell/graphs/contributors">
|
<a href="https://github.com/nushell/nushell/graphs/contributors">
|
||||||
<img src="https://contributors-img.web.app/image?repo=nushell/nushell&max=750" />
|
<img src="https://contributors-img.web.app/image?repo=nushell/nushell&max=750&columns=20" />
|
||||||
</a>
|
</a>
|
||||||
|
|
||||||
## License
|
## License
|
||||||
|
29
SECURITY.md
Normal file
29
SECURITY.md
Normal file
@ -0,0 +1,29 @@
|
|||||||
|
# Security Policy
|
||||||
|
|
||||||
|
As a shell and programming language Nushell provides you with great powers and the potential to do dangerous things to your computer and data. Whenever there is a risk that a malicious actor can abuse a bug or a violation of documented behavior/assumptions in Nushell to harm you this is a *security* risk.
|
||||||
|
We want to fix those issues without exposing our users to unnecessary risk. Thus we want to explain our security policy.
|
||||||
|
Additional issues may be part of *safety* where the behavior of Nushell as designed and implemented can cause unintended harm or a bug causes damage without the involvement of a third party.
|
||||||
|
|
||||||
|
## Supported Versions
|
||||||
|
|
||||||
|
As Nushell is still under very active pre-stable development, the only version the core team prioritizes for security and safety fixes is the [most recent version as published on GitHub](https://github.com/nushell/nushell/releases/latest).
|
||||||
|
Only if you provide a strong reasoning and the necessary resources, will we consider blessing a backported fix with an official patch release for a previous version.
|
||||||
|
|
||||||
|
## Reporting a Vulnerability
|
||||||
|
|
||||||
|
If you suspect that a bug or behavior of Nushell can affect security or may be potentially exploitable, please report the issue to us in private.
|
||||||
|
Either reach out to the core team on [our Discord server](https://discord.gg/NtAbbGn) to arrange a private channel or use the [GitHub vulnerability reporting form](https://github.com/nushell/nushell/security/advisories/new).
|
||||||
|
Please try to answer the following questions:
|
||||||
|
- How can we reach you for further questions?
|
||||||
|
- What is the bug? Which system of Nushell may be affected?
|
||||||
|
- Do you have proof-of-concept for a potential exploit or have you observed an exploit in the wild?
|
||||||
|
- What is your assessment of the severity based on what could be impacted should the bug be exploited?
|
||||||
|
- Are additional people aware of the issue or deserve credit for identifying the issue?
|
||||||
|
|
||||||
|
We will try to get back to you within a week with:
|
||||||
|
- acknowledging the receipt of the report
|
||||||
|
- an initial plan of how we want to address this including the primary points of contact for further communication
|
||||||
|
- our preliminary assessment of how severe we judge the issue
|
||||||
|
- a proposal for how we can coordinate responsible disclosure (e.g. how we ship the bugfix, if we need to coordinate with distribution maintainers, when you can release a blog post if you want to etc.)
|
||||||
|
|
||||||
|
For purely *safety* related issues where the impact is severe by direct user action instead of malicious input or third parties, feel free to open a regular issue. If we deem that there may be an additional *security* risk on a *safety* issue we may continue discussions in a restricted forum.
|
@ -4,11 +4,14 @@ use nu_plugin_protocol::{PluginCallResponse, PluginOutput};
|
|||||||
|
|
||||||
use nu_protocol::{
|
use nu_protocol::{
|
||||||
engine::{EngineState, Stack},
|
engine::{EngineState, Stack},
|
||||||
PipelineData, Span, Spanned, Value,
|
PipelineData, Signals, Span, Spanned, Value,
|
||||||
};
|
};
|
||||||
use nu_std::load_standard_library;
|
use nu_std::load_standard_library;
|
||||||
use nu_utils::{get_default_config, get_default_env};
|
use nu_utils::{get_default_config, get_default_env};
|
||||||
use std::rc::Rc;
|
use std::{
|
||||||
|
rc::Rc,
|
||||||
|
sync::{atomic::AtomicBool, Arc},
|
||||||
|
};
|
||||||
|
|
||||||
use std::hint::black_box;
|
use std::hint::black_box;
|
||||||
|
|
||||||
@ -42,13 +45,13 @@ fn setup_stack_and_engine_from_command(command: &str) -> (Stack, EngineState) {
|
|||||||
};
|
};
|
||||||
|
|
||||||
let mut stack = Stack::new();
|
let mut stack = Stack::new();
|
||||||
|
|
||||||
evaluate_commands(
|
evaluate_commands(
|
||||||
&commands,
|
&commands,
|
||||||
&mut engine,
|
&mut engine,
|
||||||
&mut stack,
|
&mut stack,
|
||||||
PipelineData::empty(),
|
PipelineData::empty(),
|
||||||
None,
|
Default::default(),
|
||||||
false,
|
|
||||||
)
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
@ -90,8 +93,7 @@ fn bench_command(
|
|||||||
&mut engine,
|
&mut engine,
|
||||||
&mut stack,
|
&mut stack,
|
||||||
PipelineData::empty(),
|
PipelineData::empty(),
|
||||||
None,
|
Default::default(),
|
||||||
false,
|
|
||||||
)
|
)
|
||||||
.unwrap(),
|
.unwrap(),
|
||||||
);
|
);
|
||||||
@ -250,14 +252,12 @@ fn bench_eval_interleave(n: i32) -> impl IntoBenchmarks {
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn bench_eval_interleave_with_ctrlc(n: i32) -> impl IntoBenchmarks {
|
fn bench_eval_interleave_with_interrupt(n: i32) -> impl IntoBenchmarks {
|
||||||
let mut engine = setup_engine();
|
let mut engine = setup_engine();
|
||||||
engine.ctrlc = Some(std::sync::Arc::new(std::sync::atomic::AtomicBool::new(
|
engine.set_signals(Signals::new(Arc::new(AtomicBool::new(false))));
|
||||||
false,
|
|
||||||
)));
|
|
||||||
let stack = Stack::new();
|
let stack = Stack::new();
|
||||||
bench_command(
|
bench_command(
|
||||||
&format!("eval_interleave_with_ctrlc_{n}"),
|
&format!("eval_interleave_with_interrupt_{n}"),
|
||||||
&format!("seq 1 {n} | wrap a | interleave {{ seq 1 {n} | wrap b }} | ignore"),
|
&format!("seq 1 {n} | wrap a | interleave {{ seq 1 {n} | wrap b }} | ignore"),
|
||||||
stack,
|
stack,
|
||||||
engine,
|
engine,
|
||||||
@ -445,9 +445,9 @@ tango_benchmarks!(
|
|||||||
bench_eval_interleave(100),
|
bench_eval_interleave(100),
|
||||||
bench_eval_interleave(1_000),
|
bench_eval_interleave(1_000),
|
||||||
bench_eval_interleave(10_000),
|
bench_eval_interleave(10_000),
|
||||||
bench_eval_interleave_with_ctrlc(100),
|
bench_eval_interleave_with_interrupt(100),
|
||||||
bench_eval_interleave_with_ctrlc(1_000),
|
bench_eval_interleave_with_interrupt(1_000),
|
||||||
bench_eval_interleave_with_ctrlc(10_000),
|
bench_eval_interleave_with_interrupt(10_000),
|
||||||
// For
|
// For
|
||||||
bench_eval_for(1),
|
bench_eval_for(1),
|
||||||
bench_eval_for(10),
|
bench_eval_for(10),
|
||||||
|
3
clippy/wasm/clippy.toml
Normal file
3
clippy/wasm/clippy.toml
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
[[disallowed-types]]
|
||||||
|
path = "std::time::Instant"
|
||||||
|
reason = "WASM panics if used, use `web_time::Instant` instead"
|
@ -5,41 +5,40 @@ repository = "https://github.com/nushell/nushell/tree/main/crates/nu-cli"
|
|||||||
edition = "2021"
|
edition = "2021"
|
||||||
license = "MIT"
|
license = "MIT"
|
||||||
name = "nu-cli"
|
name = "nu-cli"
|
||||||
version = "0.93.1"
|
version = "0.102.1"
|
||||||
|
|
||||||
[lib]
|
[lib]
|
||||||
bench = false
|
bench = false
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
nu-cmd-lang = { path = "../nu-cmd-lang", version = "0.93.1" }
|
nu-cmd-lang = { path = "../nu-cmd-lang", version = "0.102.1" }
|
||||||
nu-command = { path = "../nu-command", version = "0.93.1" }
|
nu-command = { path = "../nu-command", version = "0.102.1" }
|
||||||
nu-test-support = { path = "../nu-test-support", version = "0.93.1" }
|
nu-test-support = { path = "../nu-test-support", version = "0.102.1" }
|
||||||
rstest = { workspace = true, default-features = false }
|
rstest = { workspace = true, default-features = false }
|
||||||
tempfile = { workspace = true }
|
tempfile = { workspace = true }
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
nu-cmd-base = { path = "../nu-cmd-base", version = "0.93.1" }
|
nu-cmd-base = { path = "../nu-cmd-base", version = "0.102.1" }
|
||||||
nu-engine = { path = "../nu-engine", version = "0.93.1" }
|
nu-engine = { path = "../nu-engine", version = "0.102.1", features = ["os"] }
|
||||||
nu-path = { path = "../nu-path", version = "0.93.1" }
|
nu-glob = { path = "../nu-glob", version = "0.102.1" }
|
||||||
nu-parser = { path = "../nu-parser", version = "0.93.1" }
|
nu-path = { path = "../nu-path", version = "0.102.1" }
|
||||||
nu-plugin-engine = { path = "../nu-plugin-engine", version = "0.93.1", optional = true }
|
nu-parser = { path = "../nu-parser", version = "0.102.1" }
|
||||||
nu-protocol = { path = "../nu-protocol", version = "0.93.1" }
|
nu-plugin-engine = { path = "../nu-plugin-engine", version = "0.102.1", optional = true }
|
||||||
nu-utils = { path = "../nu-utils", version = "0.93.1" }
|
nu-protocol = { path = "../nu-protocol", version = "0.102.1", features = ["os"] }
|
||||||
nu-color-config = { path = "../nu-color-config", version = "0.93.1" }
|
nu-utils = { path = "../nu-utils", version = "0.102.1" }
|
||||||
|
nu-color-config = { path = "../nu-color-config", version = "0.102.1" }
|
||||||
nu-ansi-term = { workspace = true }
|
nu-ansi-term = { workspace = true }
|
||||||
reedline = { workspace = true, features = ["bashisms", "sqlite"] }
|
reedline = { workspace = true, features = ["bashisms", "sqlite"] }
|
||||||
|
|
||||||
chrono = { default-features = false, features = ["std"], workspace = true }
|
chrono = { default-features = false, features = ["std"], workspace = true }
|
||||||
crossterm = { workspace = true }
|
crossterm = { workspace = true }
|
||||||
fancy-regex = { workspace = true }
|
fancy-regex = { workspace = true }
|
||||||
fuzzy-matcher = { workspace = true }
|
|
||||||
is_executable = { workspace = true }
|
is_executable = { workspace = true }
|
||||||
log = { workspace = true }
|
log = { workspace = true }
|
||||||
miette = { workspace = true, features = ["fancy-no-backtrace"] }
|
|
||||||
lscolors = { workspace = true, default-features = false, features = ["nu-ansi-term"] }
|
lscolors = { workspace = true, default-features = false, features = ["nu-ansi-term"] }
|
||||||
once_cell = { workspace = true }
|
miette = { workspace = true, features = ["fancy-no-backtrace"] }
|
||||||
|
nucleo-matcher = { workspace = true }
|
||||||
percent-encoding = { workspace = true }
|
percent-encoding = { workspace = true }
|
||||||
pathdiff = { workspace = true }
|
|
||||||
sysinfo = { workspace = true }
|
sysinfo = { workspace = true }
|
||||||
unicode-segmentation = { workspace = true }
|
unicode-segmentation = { workspace = true }
|
||||||
uuid = { workspace = true, features = ["v4"] }
|
uuid = { workspace = true, features = ["v4"] }
|
||||||
@ -48,3 +47,6 @@ which = { workspace = true }
|
|||||||
[features]
|
[features]
|
||||||
plugin = ["nu-plugin-engine"]
|
plugin = ["nu-plugin-engine"]
|
||||||
system-clipboard = ["reedline/system_clipboard"]
|
system-clipboard = ["reedline/system_clipboard"]
|
||||||
|
|
||||||
|
[lints]
|
||||||
|
workspace = true
|
7
crates/nu-cli/README.md
Normal file
7
crates/nu-cli/README.md
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
This crate implements the core functionality of the interactive Nushell REPL and interfaces with `reedline`.
|
||||||
|
Currently implements the syntax highlighting and completions logic.
|
||||||
|
Furthermore includes a few commands that are specific to `reedline`
|
||||||
|
|
||||||
|
## Internal Nushell crate
|
||||||
|
|
||||||
|
This crate implements components of Nushell and is not designed to support plugin authors or other users directly.
|
@ -14,7 +14,7 @@ impl Command for Commandline {
|
|||||||
.category(Category::Core)
|
.category(Category::Core)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn usage(&self) -> &str {
|
fn description(&self) -> &str {
|
||||||
"View the current command line input buffer."
|
"View the current command line input buffer."
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -29,12 +29,12 @@ impl Command for SubCommand {
|
|||||||
.required(
|
.required(
|
||||||
"str",
|
"str",
|
||||||
SyntaxShape::String,
|
SyntaxShape::String,
|
||||||
"the string to perform the operation with",
|
"The string to perform the operation with.",
|
||||||
)
|
)
|
||||||
.category(Category::Core)
|
.category(Category::Core)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn usage(&self) -> &str {
|
fn description(&self) -> &str {
|
||||||
"Modify the current command line input buffer."
|
"Modify the current command line input buffer."
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -16,7 +16,7 @@ impl Command for SubCommand {
|
|||||||
.category(Category::Core)
|
.category(Category::Core)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn usage(&self) -> &str {
|
fn description(&self) -> &str {
|
||||||
"Get the current cursor position."
|
"Get the current cursor position."
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -18,11 +18,11 @@ impl Command for SubCommand {
|
|||||||
"set the current cursor position to the end of the buffer",
|
"set the current cursor position to the end of the buffer",
|
||||||
Some('e'),
|
Some('e'),
|
||||||
)
|
)
|
||||||
.optional("pos", SyntaxShape::Int, "Cursor position to be set")
|
.optional("pos", SyntaxShape::Int, "Cursor position to be set.")
|
||||||
.category(Category::Core)
|
.category(Category::Core)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn usage(&self) -> &str {
|
fn description(&self) -> &str {
|
||||||
"Set the current cursor position."
|
"Set the current cursor position."
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -17,6 +17,7 @@ pub fn add_cli_context(mut engine_state: EngineState) -> EngineState {
|
|||||||
CommandlineGetCursor,
|
CommandlineGetCursor,
|
||||||
CommandlineSetCursor,
|
CommandlineSetCursor,
|
||||||
History,
|
History,
|
||||||
|
HistoryImport,
|
||||||
HistorySession,
|
HistorySession,
|
||||||
Keybindings,
|
Keybindings,
|
||||||
KeybindingsDefault,
|
KeybindingsDefault,
|
||||||
|
9
crates/nu-cli/src/commands/history/fields.rs
Normal file
9
crates/nu-cli/src/commands/history/fields.rs
Normal file
@ -0,0 +1,9 @@
|
|||||||
|
// Each const is named after a HistoryItem field, and the value is the field name to be displayed to
|
||||||
|
// the user (or accept during import).
|
||||||
|
pub const COMMAND_LINE: &str = "command";
|
||||||
|
pub const START_TIMESTAMP: &str = "start_timestamp";
|
||||||
|
pub const HOSTNAME: &str = "hostname";
|
||||||
|
pub const CWD: &str = "cwd";
|
||||||
|
pub const EXIT_STATUS: &str = "exit_status";
|
||||||
|
pub const DURATION: &str = "duration";
|
||||||
|
pub const SESSION_ID: &str = "session_id";
|
@ -1,10 +1,12 @@
|
|||||||
use nu_engine::command_prelude::*;
|
use nu_engine::command_prelude::*;
|
||||||
use nu_protocol::HistoryFileFormat;
|
use nu_protocol::{shell_error::io::IoError, HistoryFileFormat};
|
||||||
use reedline::{
|
use reedline::{
|
||||||
FileBackedHistory, History as ReedlineHistory, HistoryItem, SearchDirection, SearchQuery,
|
FileBackedHistory, History as ReedlineHistory, HistoryItem, SearchDirection, SearchQuery,
|
||||||
SqliteBackedHistory,
|
SqliteBackedHistory,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
use super::fields;
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct History;
|
pub struct History;
|
||||||
|
|
||||||
@ -13,7 +15,7 @@ impl Command for History {
|
|||||||
"history"
|
"history"
|
||||||
}
|
}
|
||||||
|
|
||||||
fn usage(&self) -> &str {
|
fn description(&self) -> &str {
|
||||||
"Get the command history."
|
"Get the command history."
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -42,29 +44,19 @@ impl Command for History {
|
|||||||
let Some(history) = engine_state.history_config() else {
|
let Some(history) = engine_state.history_config() else {
|
||||||
return Ok(PipelineData::empty());
|
return Ok(PipelineData::empty());
|
||||||
};
|
};
|
||||||
|
|
||||||
// todo for sqlite history this command should be an alias to `open ~/.config/nushell/history.sqlite3 | get history`
|
// todo for sqlite history this command should be an alias to `open ~/.config/nushell/history.sqlite3 | get history`
|
||||||
if let Some(config_path) = nu_path::config_dir() {
|
let Some(history_path) = history.file_path() else {
|
||||||
let clear = call.has_flag(engine_state, stack, "clear")?;
|
return Err(ShellError::ConfigDirNotFound { span: Some(head) });
|
||||||
let long = call.has_flag(engine_state, stack, "long")?;
|
};
|
||||||
let ctrlc = engine_state.ctrlc.clone();
|
|
||||||
|
|
||||||
let mut history_path = config_path;
|
if call.has_flag(engine_state, stack, "clear")? {
|
||||||
history_path.push("nushell");
|
|
||||||
match history.file_format {
|
|
||||||
HistoryFileFormat::Sqlite => {
|
|
||||||
history_path.push("history.sqlite3");
|
|
||||||
}
|
|
||||||
HistoryFileFormat::PlainText => {
|
|
||||||
history_path.push("history.txt");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if clear {
|
|
||||||
let _ = std::fs::remove_file(history_path);
|
let _ = std::fs::remove_file(history_path);
|
||||||
// TODO: FIXME also clear the auxiliary files when using sqlite
|
// TODO: FIXME also clear the auxiliary files when using sqlite
|
||||||
Ok(PipelineData::empty())
|
return Ok(PipelineData::empty());
|
||||||
} else {
|
}
|
||||||
|
|
||||||
|
let long = call.has_flag(engine_state, stack, "long")?;
|
||||||
|
let signals = engine_state.signals().clone();
|
||||||
let history_reader: Option<Box<dyn ReedlineHistory>> = match history.file_format {
|
let history_reader: Option<Box<dyn ReedlineHistory>> = match history.file_format {
|
||||||
HistoryFileFormat::Sqlite => {
|
HistoryFileFormat::Sqlite => {
|
||||||
SqliteBackedHistory::with_file(history_path.clone(), None, None)
|
SqliteBackedHistory::with_file(history_path.clone(), None, None)
|
||||||
@ -74,20 +66,17 @@ impl Command for History {
|
|||||||
})
|
})
|
||||||
.ok()
|
.ok()
|
||||||
}
|
}
|
||||||
|
HistoryFileFormat::Plaintext => {
|
||||||
HistoryFileFormat::PlainText => FileBackedHistory::with_file(
|
FileBackedHistory::with_file(history.max_size as usize, history_path.clone())
|
||||||
history.max_size as usize,
|
|
||||||
history_path.clone(),
|
|
||||||
)
|
|
||||||
.map(|inner| {
|
.map(|inner| {
|
||||||
let boxed: Box<dyn ReedlineHistory> = Box::new(inner);
|
let boxed: Box<dyn ReedlineHistory> = Box::new(inner);
|
||||||
boxed
|
boxed
|
||||||
})
|
})
|
||||||
.ok(),
|
.ok()
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
match history.file_format {
|
match history.file_format {
|
||||||
HistoryFileFormat::PlainText => Ok(history_reader
|
HistoryFileFormat::Plaintext => Ok(history_reader
|
||||||
.and_then(|h| {
|
.and_then(|h| {
|
||||||
h.search(SearchQuery::everything(SearchDirection::Forward, None))
|
h.search(SearchQuery::everything(SearchDirection::Forward, None))
|
||||||
.ok()
|
.ok()
|
||||||
@ -96,37 +85,37 @@ impl Command for History {
|
|||||||
entries.into_iter().enumerate().map(move |(idx, entry)| {
|
entries.into_iter().enumerate().map(move |(idx, entry)| {
|
||||||
Value::record(
|
Value::record(
|
||||||
record! {
|
record! {
|
||||||
"command" => Value::string(entry.command_line, head),
|
fields::COMMAND_LINE => Value::string(entry.command_line, head),
|
||||||
|
// TODO: This name is inconsistent with create_history_record.
|
||||||
"index" => Value::int(idx as i64, head),
|
"index" => Value::int(idx as i64, head),
|
||||||
},
|
},
|
||||||
head,
|
head,
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
.ok_or(ShellError::FileNotFound {
|
.ok_or(IoError::new(
|
||||||
file: history_path.display().to_string(),
|
std::io::ErrorKind::NotFound,
|
||||||
span: head,
|
head,
|
||||||
})?
|
history_path,
|
||||||
.into_pipeline_data(head, ctrlc)),
|
))?
|
||||||
|
.into_pipeline_data(head, signals)),
|
||||||
HistoryFileFormat::Sqlite => Ok(history_reader
|
HistoryFileFormat::Sqlite => Ok(history_reader
|
||||||
.and_then(|h| {
|
.and_then(|h| {
|
||||||
h.search(SearchQuery::everything(SearchDirection::Forward, None))
|
h.search(SearchQuery::everything(SearchDirection::Forward, None))
|
||||||
.ok()
|
.ok()
|
||||||
})
|
})
|
||||||
.map(move |entries| {
|
.map(move |entries| {
|
||||||
entries.into_iter().enumerate().map(move |(idx, entry)| {
|
entries
|
||||||
create_history_record(idx, entry, long, head)
|
.into_iter()
|
||||||
|
.enumerate()
|
||||||
|
.map(move |(idx, entry)| create_history_record(idx, entry, long, head))
|
||||||
})
|
})
|
||||||
})
|
.ok_or(IoError::new(
|
||||||
.ok_or(ShellError::FileNotFound {
|
std::io::ErrorKind::NotFound,
|
||||||
file: history_path.display().to_string(),
|
head,
|
||||||
span: head,
|
history_path,
|
||||||
})?
|
))?
|
||||||
.into_pipeline_data(head, ctrlc)),
|
.into_pipeline_data(head, signals)),
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
Err(ShellError::ConfigDirNotFound { span: Some(head) })
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -156,58 +145,34 @@ fn create_history_record(idx: usize, entry: HistoryItem, long: bool, head: Span)
|
|||||||
//2. Create a record of either short or long columns and values
|
//2. Create a record of either short or long columns and values
|
||||||
|
|
||||||
let item_id_value = Value::int(
|
let item_id_value = Value::int(
|
||||||
match entry.id {
|
entry
|
||||||
Some(id) => {
|
.id
|
||||||
let ids = id.to_string();
|
.and_then(|id| id.to_string().parse::<i64>().ok())
|
||||||
match ids.parse::<i64>() {
|
.unwrap_or_default(),
|
||||||
Ok(i) => i,
|
|
||||||
_ => 0i64,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
None => 0i64,
|
|
||||||
},
|
|
||||||
head,
|
head,
|
||||||
);
|
);
|
||||||
let start_timestamp_value = Value::string(
|
let start_timestamp_value = Value::string(
|
||||||
match entry.start_timestamp {
|
entry
|
||||||
Some(time) => time.to_string(),
|
.start_timestamp
|
||||||
None => "".into(),
|
.map(|time| time.to_string())
|
||||||
},
|
.unwrap_or_default(),
|
||||||
head,
|
head,
|
||||||
);
|
);
|
||||||
let command_value = Value::string(entry.command_line, head);
|
let command_value = Value::string(entry.command_line, head);
|
||||||
let session_id_value = Value::int(
|
let session_id_value = Value::int(
|
||||||
match entry.session_id {
|
entry
|
||||||
Some(sid) => {
|
.session_id
|
||||||
let sids = sid.to_string();
|
.and_then(|id| id.to_string().parse::<i64>().ok())
|
||||||
match sids.parse::<i64>() {
|
.unwrap_or_default(),
|
||||||
Ok(i) => i,
|
|
||||||
_ => 0i64,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
None => 0i64,
|
|
||||||
},
|
|
||||||
head,
|
|
||||||
);
|
|
||||||
let hostname_value = Value::string(
|
|
||||||
match entry.hostname {
|
|
||||||
Some(host) => host,
|
|
||||||
None => "".into(),
|
|
||||||
},
|
|
||||||
head,
|
|
||||||
);
|
|
||||||
let cwd_value = Value::string(
|
|
||||||
match entry.cwd {
|
|
||||||
Some(cwd) => cwd,
|
|
||||||
None => "".into(),
|
|
||||||
},
|
|
||||||
head,
|
head,
|
||||||
);
|
);
|
||||||
|
let hostname_value = Value::string(entry.hostname.unwrap_or_default(), head);
|
||||||
|
let cwd_value = Value::string(entry.cwd.unwrap_or_default(), head);
|
||||||
let duration_value = Value::duration(
|
let duration_value = Value::duration(
|
||||||
match entry.duration {
|
entry
|
||||||
Some(d) => d.as_nanos().try_into().unwrap_or(0),
|
.duration
|
||||||
None => 0,
|
.and_then(|d| d.as_nanos().try_into().ok())
|
||||||
},
|
.unwrap_or(0),
|
||||||
head,
|
head,
|
||||||
);
|
);
|
||||||
let exit_status_value = Value::int(entry.exit_status.unwrap_or(0), head);
|
let exit_status_value = Value::int(entry.exit_status.unwrap_or(0), head);
|
||||||
@ -216,13 +181,13 @@ fn create_history_record(idx: usize, entry: HistoryItem, long: bool, head: Span)
|
|||||||
Value::record(
|
Value::record(
|
||||||
record! {
|
record! {
|
||||||
"item_id" => item_id_value,
|
"item_id" => item_id_value,
|
||||||
"start_timestamp" => start_timestamp_value,
|
fields::START_TIMESTAMP => start_timestamp_value,
|
||||||
"command" => command_value,
|
fields::COMMAND_LINE => command_value,
|
||||||
"session_id" => session_id_value,
|
fields::SESSION_ID => session_id_value,
|
||||||
"hostname" => hostname_value,
|
fields::HOSTNAME => hostname_value,
|
||||||
"cwd" => cwd_value,
|
fields::CWD => cwd_value,
|
||||||
"duration" => duration_value,
|
fields::DURATION => duration_value,
|
||||||
"exit_status" => exit_status_value,
|
fields::EXIT_STATUS => exit_status_value,
|
||||||
"idx" => index_value,
|
"idx" => index_value,
|
||||||
},
|
},
|
||||||
head,
|
head,
|
||||||
@ -230,11 +195,11 @@ fn create_history_record(idx: usize, entry: HistoryItem, long: bool, head: Span)
|
|||||||
} else {
|
} else {
|
||||||
Value::record(
|
Value::record(
|
||||||
record! {
|
record! {
|
||||||
"start_timestamp" => start_timestamp_value,
|
fields::START_TIMESTAMP => start_timestamp_value,
|
||||||
"command" => command_value,
|
fields::COMMAND_LINE => command_value,
|
||||||
"cwd" => cwd_value,
|
fields::CWD => cwd_value,
|
||||||
"duration" => duration_value,
|
fields::DURATION => duration_value,
|
||||||
"exit_status" => exit_status_value,
|
fields::EXIT_STATUS => exit_status_value,
|
||||||
},
|
},
|
||||||
head,
|
head,
|
||||||
)
|
)
|
||||||
|
441
crates/nu-cli/src/commands/history/history_import.rs
Normal file
441
crates/nu-cli/src/commands/history/history_import.rs
Normal file
@ -0,0 +1,441 @@
|
|||||||
|
use std::path::{Path, PathBuf};
|
||||||
|
|
||||||
|
use nu_engine::command_prelude::*;
|
||||||
|
use nu_protocol::{
|
||||||
|
shell_error::{self, io::IoError},
|
||||||
|
HistoryFileFormat,
|
||||||
|
};
|
||||||
|
|
||||||
|
use reedline::{
|
||||||
|
FileBackedHistory, History, HistoryItem, ReedlineError, SearchQuery, SqliteBackedHistory,
|
||||||
|
};
|
||||||
|
|
||||||
|
use super::fields;
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct HistoryImport;
|
||||||
|
|
||||||
|
impl Command for HistoryImport {
|
||||||
|
fn name(&self) -> &str {
|
||||||
|
"history import"
|
||||||
|
}
|
||||||
|
|
||||||
|
fn description(&self) -> &str {
|
||||||
|
"Import command line history."
|
||||||
|
}
|
||||||
|
|
||||||
|
fn extra_description(&self) -> &str {
|
||||||
|
r#"Can import history from input, either successive command lines or more detailed records. If providing records, available fields are:
|
||||||
|
command_line, id, start_timestamp, hostname, cwd, duration, exit_status.
|
||||||
|
|
||||||
|
If no input is provided, will import all history items from existing history in the other format: if current history is stored in sqlite, it will store it in plain text and vice versa.
|
||||||
|
|
||||||
|
Note that history item IDs are ignored when importing from file."#
|
||||||
|
}
|
||||||
|
|
||||||
|
fn signature(&self) -> nu_protocol::Signature {
|
||||||
|
Signature::build("history import")
|
||||||
|
.category(Category::History)
|
||||||
|
.input_output_types(vec![
|
||||||
|
(Type::Nothing, Type::Nothing),
|
||||||
|
(Type::String, Type::Nothing),
|
||||||
|
(Type::List(Box::new(Type::String)), Type::Nothing),
|
||||||
|
(Type::table(), Type::Nothing),
|
||||||
|
])
|
||||||
|
}
|
||||||
|
|
||||||
|
fn examples(&self) -> Vec<Example> {
|
||||||
|
vec![
|
||||||
|
Example {
|
||||||
|
example: "history import",
|
||||||
|
description:
|
||||||
|
"Append all items from history in the other format to the current history",
|
||||||
|
result: None,
|
||||||
|
},
|
||||||
|
Example {
|
||||||
|
example: "echo foo | history import",
|
||||||
|
description: "Append `foo` to the current history",
|
||||||
|
result: None,
|
||||||
|
},
|
||||||
|
Example {
|
||||||
|
example: "[[ command_line cwd ]; [ foo /home ]] | history import",
|
||||||
|
description: "Append `foo` ran from `/home` to the current history",
|
||||||
|
result: None,
|
||||||
|
},
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
fn run(
|
||||||
|
&self,
|
||||||
|
engine_state: &EngineState,
|
||||||
|
_stack: &mut Stack,
|
||||||
|
call: &Call,
|
||||||
|
input: PipelineData,
|
||||||
|
) -> Result<PipelineData, ShellError> {
|
||||||
|
let span = call.head;
|
||||||
|
let ok = Ok(Value::nothing(call.head).into_pipeline_data());
|
||||||
|
|
||||||
|
let Some(history) = engine_state.history_config() else {
|
||||||
|
return ok;
|
||||||
|
};
|
||||||
|
let Some(current_history_path) = history.file_path() else {
|
||||||
|
return Err(ShellError::ConfigDirNotFound { span: span.into() });
|
||||||
|
};
|
||||||
|
if let Some(bak_path) = backup(¤t_history_path, span)? {
|
||||||
|
println!("Backed history to {}", bak_path.display());
|
||||||
|
}
|
||||||
|
match input {
|
||||||
|
PipelineData::Empty => {
|
||||||
|
let other_format = match history.file_format {
|
||||||
|
HistoryFileFormat::Sqlite => HistoryFileFormat::Plaintext,
|
||||||
|
HistoryFileFormat::Plaintext => HistoryFileFormat::Sqlite,
|
||||||
|
};
|
||||||
|
let src = new_backend(other_format, None)?;
|
||||||
|
let mut dst = new_backend(history.file_format, Some(current_history_path))?;
|
||||||
|
let items = src
|
||||||
|
.search(SearchQuery::everything(
|
||||||
|
reedline::SearchDirection::Forward,
|
||||||
|
None,
|
||||||
|
))
|
||||||
|
.map_err(error_from_reedline)?
|
||||||
|
.into_iter()
|
||||||
|
.map(Ok);
|
||||||
|
import(dst.as_mut(), items)
|
||||||
|
}
|
||||||
|
_ => {
|
||||||
|
let input = input.into_iter().map(item_from_value);
|
||||||
|
import(
|
||||||
|
new_backend(history.file_format, Some(current_history_path))?.as_mut(),
|
||||||
|
input,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}?;
|
||||||
|
|
||||||
|
ok
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn new_backend(
|
||||||
|
format: HistoryFileFormat,
|
||||||
|
path: Option<PathBuf>,
|
||||||
|
) -> Result<Box<dyn History>, ShellError> {
|
||||||
|
let path = match path {
|
||||||
|
Some(path) => path,
|
||||||
|
None => {
|
||||||
|
let Some(mut path) = nu_path::nu_config_dir() else {
|
||||||
|
return Err(ShellError::ConfigDirNotFound { span: None });
|
||||||
|
};
|
||||||
|
path.push(format.default_file_name());
|
||||||
|
path.into_std_path_buf()
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
fn map(
|
||||||
|
result: Result<impl History + 'static, ReedlineError>,
|
||||||
|
) -> Result<Box<dyn History>, ShellError> {
|
||||||
|
result
|
||||||
|
.map(|x| Box::new(x) as Box<dyn History>)
|
||||||
|
.map_err(error_from_reedline)
|
||||||
|
}
|
||||||
|
match format {
|
||||||
|
// Use a reasonably large value for maximum capacity.
|
||||||
|
HistoryFileFormat::Plaintext => map(FileBackedHistory::with_file(0xfffffff, path)),
|
||||||
|
HistoryFileFormat::Sqlite => map(SqliteBackedHistory::with_file(path, None, None)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn import(
|
||||||
|
dst: &mut dyn History,
|
||||||
|
src: impl Iterator<Item = Result<HistoryItem, ShellError>>,
|
||||||
|
) -> Result<(), ShellError> {
|
||||||
|
for item in src {
|
||||||
|
let mut item = item?;
|
||||||
|
item.id = None;
|
||||||
|
dst.save(item).map_err(error_from_reedline)?;
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn error_from_reedline(e: ReedlineError) -> ShellError {
|
||||||
|
// TODO: Should we add a new ShellError variant?
|
||||||
|
ShellError::GenericError {
|
||||||
|
error: "Reedline error".to_owned(),
|
||||||
|
msg: format!("{e}"),
|
||||||
|
span: None,
|
||||||
|
help: None,
|
||||||
|
inner: Vec::new(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn item_from_value(v: Value) -> Result<HistoryItem, ShellError> {
|
||||||
|
let span = v.span();
|
||||||
|
match v {
|
||||||
|
Value::Record { val, .. } => item_from_record(val.into_owned(), span),
|
||||||
|
Value::String { val, .. } => Ok(HistoryItem {
|
||||||
|
command_line: val,
|
||||||
|
id: None,
|
||||||
|
start_timestamp: None,
|
||||||
|
session_id: None,
|
||||||
|
hostname: None,
|
||||||
|
cwd: None,
|
||||||
|
duration: None,
|
||||||
|
exit_status: None,
|
||||||
|
more_info: None,
|
||||||
|
}),
|
||||||
|
_ => Err(ShellError::UnsupportedInput {
|
||||||
|
msg: "Only list and record inputs are supported".to_owned(),
|
||||||
|
input: v.get_type().to_string(),
|
||||||
|
msg_span: span,
|
||||||
|
input_span: span,
|
||||||
|
}),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn item_from_record(mut rec: Record, span: Span) -> Result<HistoryItem, ShellError> {
|
||||||
|
let cmd = match rec.remove(fields::COMMAND_LINE) {
|
||||||
|
Some(v) => v.as_str()?.to_owned(),
|
||||||
|
None => {
|
||||||
|
return Err(ShellError::TypeMismatch {
|
||||||
|
err_message: format!("missing column: {}", fields::COMMAND_LINE),
|
||||||
|
span,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
fn get<T>(
|
||||||
|
rec: &mut Record,
|
||||||
|
field: &'static str,
|
||||||
|
f: impl FnOnce(Value) -> Result<T, ShellError>,
|
||||||
|
) -> Result<Option<T>, ShellError> {
|
||||||
|
rec.remove(field).map(f).transpose()
|
||||||
|
}
|
||||||
|
|
||||||
|
let rec = &mut rec;
|
||||||
|
let item = HistoryItem {
|
||||||
|
command_line: cmd,
|
||||||
|
id: None,
|
||||||
|
start_timestamp: get(rec, fields::START_TIMESTAMP, |v| Ok(v.as_date()?.to_utc()))?,
|
||||||
|
hostname: get(rec, fields::HOSTNAME, |v| Ok(v.as_str()?.to_owned()))?,
|
||||||
|
cwd: get(rec, fields::CWD, |v| Ok(v.as_str()?.to_owned()))?,
|
||||||
|
exit_status: get(rec, fields::EXIT_STATUS, |v| v.as_int())?,
|
||||||
|
duration: get(rec, fields::DURATION, |v| duration_from_value(v, span))?,
|
||||||
|
more_info: None,
|
||||||
|
// TODO: Currently reedline doesn't let you create session IDs.
|
||||||
|
session_id: None,
|
||||||
|
};
|
||||||
|
|
||||||
|
if !rec.is_empty() {
|
||||||
|
let cols = rec.columns().map(|s| s.as_str()).collect::<Vec<_>>();
|
||||||
|
return Err(ShellError::TypeMismatch {
|
||||||
|
err_message: format!("unsupported column names: {}", cols.join(", ")),
|
||||||
|
span,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
Ok(item)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn duration_from_value(v: Value, span: Span) -> Result<std::time::Duration, ShellError> {
|
||||||
|
chrono::Duration::nanoseconds(v.as_duration()?)
|
||||||
|
.to_std()
|
||||||
|
.map_err(|_| ShellError::NeedsPositiveValue { span })
|
||||||
|
}
|
||||||
|
|
||||||
|
fn find_backup_path(path: &Path, span: Span) -> Result<PathBuf, ShellError> {
|
||||||
|
let Ok(mut bak_path) = path.to_path_buf().into_os_string().into_string() else {
|
||||||
|
// This isn't fundamentally problem, but trying to work with OsString is a nightmare.
|
||||||
|
return Err(ShellError::GenericError {
|
||||||
|
error: "History path not UTF-8".to_string(),
|
||||||
|
msg: "History path must be representable as UTF-8".to_string(),
|
||||||
|
span: Some(span),
|
||||||
|
help: None,
|
||||||
|
inner: vec![],
|
||||||
|
});
|
||||||
|
};
|
||||||
|
bak_path.push_str(".bak");
|
||||||
|
if !Path::new(&bak_path).exists() {
|
||||||
|
return Ok(bak_path.into());
|
||||||
|
}
|
||||||
|
let base_len = bak_path.len();
|
||||||
|
for i in 1..100 {
|
||||||
|
use std::fmt::Write;
|
||||||
|
bak_path.truncate(base_len);
|
||||||
|
write!(&mut bak_path, ".{i}").unwrap();
|
||||||
|
if !Path::new(&bak_path).exists() {
|
||||||
|
return Ok(PathBuf::from(bak_path));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Err(ShellError::GenericError {
|
||||||
|
error: "Too many backup files".to_string(),
|
||||||
|
msg: "Found too many existing backup files".to_string(),
|
||||||
|
span: Some(span),
|
||||||
|
help: None,
|
||||||
|
inner: vec![],
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
fn backup(path: &Path, span: Span) -> Result<Option<PathBuf>, ShellError> {
|
||||||
|
match path.metadata() {
|
||||||
|
Ok(md) if md.is_file() => (),
|
||||||
|
Ok(_) => {
|
||||||
|
return Err(IoError::new_with_additional_context(
|
||||||
|
shell_error::io::ErrorKind::NotAFile,
|
||||||
|
span,
|
||||||
|
PathBuf::from(path),
|
||||||
|
"history path exists but is not a file",
|
||||||
|
)
|
||||||
|
.into())
|
||||||
|
}
|
||||||
|
Err(e) if e.kind() == std::io::ErrorKind::NotFound => return Ok(None),
|
||||||
|
Err(e) => {
|
||||||
|
return Err(IoError::new_internal(
|
||||||
|
e.kind(),
|
||||||
|
"Could not get metadata",
|
||||||
|
nu_protocol::location!(),
|
||||||
|
)
|
||||||
|
.into())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
let bak_path = find_backup_path(path, span)?;
|
||||||
|
std::fs::copy(path, &bak_path).map_err(|err| {
|
||||||
|
IoError::new_internal(
|
||||||
|
err.kind(),
|
||||||
|
"Could not copy backup",
|
||||||
|
nu_protocol::location!(),
|
||||||
|
)
|
||||||
|
})?;
|
||||||
|
Ok(Some(bak_path))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use chrono::DateTime;
|
||||||
|
use rstest::rstest;
|
||||||
|
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_item_from_value_string() -> Result<(), ShellError> {
|
||||||
|
let item = item_from_value(Value::string("foo", Span::unknown()))?;
|
||||||
|
assert_eq!(
|
||||||
|
item,
|
||||||
|
HistoryItem {
|
||||||
|
command_line: "foo".to_string(),
|
||||||
|
id: None,
|
||||||
|
start_timestamp: None,
|
||||||
|
session_id: None,
|
||||||
|
hostname: None,
|
||||||
|
cwd: None,
|
||||||
|
duration: None,
|
||||||
|
exit_status: None,
|
||||||
|
more_info: None
|
||||||
|
}
|
||||||
|
);
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_item_from_value_record() {
|
||||||
|
let span = Span::unknown();
|
||||||
|
let rec = new_record(&[
|
||||||
|
("command", Value::string("foo", span)),
|
||||||
|
(
|
||||||
|
"start_timestamp",
|
||||||
|
Value::date(
|
||||||
|
DateTime::parse_from_rfc3339("1996-12-19T16:39:57-08:00").unwrap(),
|
||||||
|
span,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
("hostname", Value::string("localhost", span)),
|
||||||
|
("cwd", Value::string("/home/test", span)),
|
||||||
|
("duration", Value::duration(100_000_000, span)),
|
||||||
|
("exit_status", Value::int(42, span)),
|
||||||
|
]);
|
||||||
|
let item = item_from_value(rec).unwrap();
|
||||||
|
assert_eq!(
|
||||||
|
item,
|
||||||
|
HistoryItem {
|
||||||
|
command_line: "foo".to_string(),
|
||||||
|
id: None,
|
||||||
|
start_timestamp: Some(
|
||||||
|
DateTime::parse_from_rfc3339("1996-12-19T16:39:57-08:00")
|
||||||
|
.unwrap()
|
||||||
|
.to_utc()
|
||||||
|
),
|
||||||
|
hostname: Some("localhost".to_string()),
|
||||||
|
cwd: Some("/home/test".to_string()),
|
||||||
|
duration: Some(std::time::Duration::from_nanos(100_000_000)),
|
||||||
|
exit_status: Some(42),
|
||||||
|
|
||||||
|
session_id: None,
|
||||||
|
more_info: None
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_item_from_value_record_extra_field() {
|
||||||
|
let span = Span::unknown();
|
||||||
|
let rec = new_record(&[
|
||||||
|
("command_line", Value::string("foo", span)),
|
||||||
|
("id_nonexistent", Value::int(1, span)),
|
||||||
|
]);
|
||||||
|
assert!(item_from_value(rec).is_err());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_item_from_value_record_bad_type() {
|
||||||
|
let span = Span::unknown();
|
||||||
|
let rec = new_record(&[
|
||||||
|
("command_line", Value::string("foo", span)),
|
||||||
|
("id", Value::string("one".to_string(), span)),
|
||||||
|
]);
|
||||||
|
assert!(item_from_value(rec).is_err());
|
||||||
|
}
|
||||||
|
|
||||||
|
fn new_record(rec: &[(&'static str, Value)]) -> Value {
|
||||||
|
let span = Span::unknown();
|
||||||
|
let rec = Record::from_raw_cols_vals(
|
||||||
|
rec.iter().map(|(col, _)| col.to_string()).collect(),
|
||||||
|
rec.iter().map(|(_, val)| val.clone()).collect(),
|
||||||
|
span,
|
||||||
|
span,
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
Value::record(rec, span)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[rstest]
|
||||||
|
#[case::no_backup(&["history.dat"], "history.dat.bak")]
|
||||||
|
#[case::backup_exists(&["history.dat", "history.dat.bak"], "history.dat.bak.1")]
|
||||||
|
#[case::multiple_backups_exists( &["history.dat", "history.dat.bak", "history.dat.bak.1"], "history.dat.bak.2")]
|
||||||
|
fn test_find_backup_path(#[case] existing: &[&str], #[case] want: &str) {
|
||||||
|
let dir = tempfile::tempdir().unwrap();
|
||||||
|
for name in existing {
|
||||||
|
std::fs::File::create_new(dir.path().join(name)).unwrap();
|
||||||
|
}
|
||||||
|
let got = find_backup_path(&dir.path().join("history.dat"), Span::test_data()).unwrap();
|
||||||
|
assert_eq!(got, dir.path().join(want))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_backup() {
|
||||||
|
let dir = tempfile::tempdir().unwrap();
|
||||||
|
let mut history = std::fs::File::create_new(dir.path().join("history.dat")).unwrap();
|
||||||
|
use std::io::Write;
|
||||||
|
write!(&mut history, "123").unwrap();
|
||||||
|
let want_bak_path = dir.path().join("history.dat.bak");
|
||||||
|
assert_eq!(
|
||||||
|
backup(&dir.path().join("history.dat"), Span::test_data()),
|
||||||
|
Ok(Some(want_bak_path.clone()))
|
||||||
|
);
|
||||||
|
let got_data = String::from_utf8(std::fs::read(want_bak_path).unwrap()).unwrap();
|
||||||
|
assert_eq!(got_data, "123");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_backup_no_file() {
|
||||||
|
let dir = tempfile::tempdir().unwrap();
|
||||||
|
let bak_path = backup(&dir.path().join("history.dat"), Span::test_data()).unwrap();
|
||||||
|
assert!(bak_path.is_none());
|
||||||
|
}
|
||||||
|
}
|
@ -8,7 +8,7 @@ impl Command for HistorySession {
|
|||||||
"history session"
|
"history session"
|
||||||
}
|
}
|
||||||
|
|
||||||
fn usage(&self) -> &str {
|
fn description(&self) -> &str {
|
||||||
"Get the command history session."
|
"Get the command history session."
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,5 +1,8 @@
|
|||||||
|
mod fields;
|
||||||
mod history_;
|
mod history_;
|
||||||
|
mod history_import;
|
||||||
mod history_session;
|
mod history_session;
|
||||||
|
|
||||||
pub use history_::History;
|
pub use history_::History;
|
||||||
|
pub use history_import::HistoryImport;
|
||||||
pub use history_session::HistorySession;
|
pub use history_session::HistorySession;
|
||||||
|
@ -14,11 +14,11 @@ impl Command for Keybindings {
|
|||||||
.input_output_types(vec![(Type::Nothing, Type::String)])
|
.input_output_types(vec![(Type::Nothing, Type::String)])
|
||||||
}
|
}
|
||||||
|
|
||||||
fn usage(&self) -> &str {
|
fn description(&self) -> &str {
|
||||||
"Keybindings related commands."
|
"Keybindings related commands."
|
||||||
}
|
}
|
||||||
|
|
||||||
fn extra_usage(&self) -> &str {
|
fn extra_description(&self) -> &str {
|
||||||
r#"You must use one of the following subcommands. Using this command as-is will only produce this help message.
|
r#"You must use one of the following subcommands. Using this command as-is will only produce this help message.
|
||||||
|
|
||||||
For more information on input and keybindings, check:
|
For more information on input and keybindings, check:
|
||||||
@ -36,16 +36,6 @@ For more information on input and keybindings, check:
|
|||||||
call: &Call,
|
call: &Call,
|
||||||
_input: PipelineData,
|
_input: PipelineData,
|
||||||
) -> Result<PipelineData, ShellError> {
|
) -> Result<PipelineData, ShellError> {
|
||||||
Ok(Value::string(
|
Ok(Value::string(get_full_help(self, engine_state, stack), call.head).into_pipeline_data())
|
||||||
get_full_help(
|
|
||||||
&Keybindings.signature(),
|
|
||||||
&Keybindings.examples(),
|
|
||||||
engine_state,
|
|
||||||
stack,
|
|
||||||
self.is_parser_keyword(),
|
|
||||||
),
|
|
||||||
call.head,
|
|
||||||
)
|
|
||||||
.into_pipeline_data())
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -15,7 +15,7 @@ impl Command for KeybindingsDefault {
|
|||||||
.input_output_types(vec![(Type::Nothing, Type::table())])
|
.input_output_types(vec![(Type::Nothing, Type::table())])
|
||||||
}
|
}
|
||||||
|
|
||||||
fn usage(&self) -> &str {
|
fn description(&self) -> &str {
|
||||||
"List default keybindings."
|
"List default keybindings."
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -23,7 +23,7 @@ impl Command for KeybindingsList {
|
|||||||
.category(Category::Platform)
|
.category(Category::Platform)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn usage(&self) -> &str {
|
fn description(&self) -> &str {
|
||||||
"List available options that can be used to create keybindings."
|
"List available options that can be used to create keybindings."
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -49,22 +49,26 @@ impl Command for KeybindingsList {
|
|||||||
|
|
||||||
fn run(
|
fn run(
|
||||||
&self,
|
&self,
|
||||||
_engine_state: &EngineState,
|
engine_state: &EngineState,
|
||||||
_stack: &mut Stack,
|
stack: &mut Stack,
|
||||||
call: &Call,
|
call: &Call,
|
||||||
_input: PipelineData,
|
_input: PipelineData,
|
||||||
) -> Result<PipelineData, ShellError> {
|
) -> Result<PipelineData, ShellError> {
|
||||||
let records = if call.named_len() == 0 {
|
|
||||||
let all_options = ["modifiers", "keycodes", "edits", "modes", "events"];
|
let all_options = ["modifiers", "keycodes", "edits", "modes", "events"];
|
||||||
all_options
|
|
||||||
|
let presence = all_options
|
||||||
.iter()
|
.iter()
|
||||||
.flat_map(|argument| get_records(argument, call.head))
|
.map(|option| call.has_flag(engine_state, stack, option))
|
||||||
.collect()
|
.collect::<Result<Vec<_>, ShellError>>()?;
|
||||||
} else {
|
|
||||||
call.named_iter()
|
let no_option_specified = presence.iter().all(|present| !*present);
|
||||||
.flat_map(|(argument, _, _)| get_records(argument.item.as_str(), call.head))
|
|
||||||
.collect()
|
let records = all_options
|
||||||
};
|
.iter()
|
||||||
|
.zip(presence)
|
||||||
|
.filter(|(_, present)| no_option_specified || *present)
|
||||||
|
.flat_map(|(option, _)| get_records(option, call.head))
|
||||||
|
.collect();
|
||||||
|
|
||||||
Ok(Value::list(records, call.head).into_pipeline_data())
|
Ok(Value::list(records, call.head).into_pipeline_data())
|
||||||
}
|
}
|
||||||
|
@ -2,6 +2,7 @@ use crossterm::{
|
|||||||
event::Event, event::KeyCode, event::KeyEvent, execute, terminal, QueueableCommand,
|
event::Event, event::KeyCode, event::KeyEvent, execute, terminal, QueueableCommand,
|
||||||
};
|
};
|
||||||
use nu_engine::command_prelude::*;
|
use nu_engine::command_prelude::*;
|
||||||
|
use nu_protocol::shell_error::io::IoError;
|
||||||
use std::io::{stdout, Write};
|
use std::io::{stdout, Write};
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
@ -12,11 +13,11 @@ impl Command for KeybindingsListen {
|
|||||||
"keybindings listen"
|
"keybindings listen"
|
||||||
}
|
}
|
||||||
|
|
||||||
fn usage(&self) -> &str {
|
fn description(&self) -> &str {
|
||||||
"Get input from the user."
|
"Get input from the user."
|
||||||
}
|
}
|
||||||
|
|
||||||
fn extra_usage(&self) -> &str {
|
fn extra_description(&self) -> &str {
|
||||||
"This is an internal debugging tool. For better output, try `input listen --types [key]`"
|
"This is an internal debugging tool. For better output, try `input listen --types [key]`"
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -39,7 +40,13 @@ impl Command for KeybindingsListen {
|
|||||||
match print_events(engine_state) {
|
match print_events(engine_state) {
|
||||||
Ok(v) => Ok(v.into_pipeline_data()),
|
Ok(v) => Ok(v.into_pipeline_data()),
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
terminal::disable_raw_mode()?;
|
terminal::disable_raw_mode().map_err(|err| {
|
||||||
|
IoError::new_internal(
|
||||||
|
err.kind(),
|
||||||
|
"Could not disable raw mode",
|
||||||
|
nu_protocol::location!(),
|
||||||
|
)
|
||||||
|
})?;
|
||||||
Err(ShellError::GenericError {
|
Err(ShellError::GenericError {
|
||||||
error: "Error with input".into(),
|
error: "Error with input".into(),
|
||||||
msg: "".into(),
|
msg: "".into(),
|
||||||
@ -63,8 +70,20 @@ impl Command for KeybindingsListen {
|
|||||||
pub fn print_events(engine_state: &EngineState) -> Result<Value, ShellError> {
|
pub fn print_events(engine_state: &EngineState) -> Result<Value, ShellError> {
|
||||||
let config = engine_state.get_config();
|
let config = engine_state.get_config();
|
||||||
|
|
||||||
stdout().flush()?;
|
stdout().flush().map_err(|err| {
|
||||||
terminal::enable_raw_mode()?;
|
IoError::new_internal(
|
||||||
|
err.kind(),
|
||||||
|
"Could not flush stdout",
|
||||||
|
nu_protocol::location!(),
|
||||||
|
)
|
||||||
|
})?;
|
||||||
|
terminal::enable_raw_mode().map_err(|err| {
|
||||||
|
IoError::new_internal(
|
||||||
|
err.kind(),
|
||||||
|
"Could not enable raw mode",
|
||||||
|
nu_protocol::location!(),
|
||||||
|
)
|
||||||
|
})?;
|
||||||
|
|
||||||
if config.use_kitty_protocol {
|
if config.use_kitty_protocol {
|
||||||
if let Ok(false) = crossterm::terminal::supports_keyboard_enhancement() {
|
if let Ok(false) = crossterm::terminal::supports_keyboard_enhancement() {
|
||||||
@ -94,7 +113,9 @@ pub fn print_events(engine_state: &EngineState) -> Result<Value, ShellError> {
|
|||||||
let mut stdout = std::io::BufWriter::new(std::io::stderr());
|
let mut stdout = std::io::BufWriter::new(std::io::stderr());
|
||||||
|
|
||||||
loop {
|
loop {
|
||||||
let event = crossterm::event::read()?;
|
let event = crossterm::event::read().map_err(|err| {
|
||||||
|
IoError::new_internal(err.kind(), "Could not read event", nu_protocol::location!())
|
||||||
|
})?;
|
||||||
if event == Event::Key(KeyCode::Esc.into()) {
|
if event == Event::Key(KeyCode::Esc.into()) {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
@ -113,9 +134,25 @@ pub fn print_events(engine_state: &EngineState) -> Result<Value, ShellError> {
|
|||||||
|
|
||||||
_ => "".to_string(),
|
_ => "".to_string(),
|
||||||
};
|
};
|
||||||
stdout.queue(crossterm::style::Print(o))?;
|
stdout.queue(crossterm::style::Print(o)).map_err(|err| {
|
||||||
stdout.queue(crossterm::style::Print("\r\n"))?;
|
IoError::new_internal(
|
||||||
stdout.flush()?;
|
err.kind(),
|
||||||
|
"Could not print output record",
|
||||||
|
nu_protocol::location!(),
|
||||||
|
)
|
||||||
|
})?;
|
||||||
|
stdout
|
||||||
|
.queue(crossterm::style::Print("\r\n"))
|
||||||
|
.map_err(|err| {
|
||||||
|
IoError::new_internal(
|
||||||
|
err.kind(),
|
||||||
|
"Could not print linebreak",
|
||||||
|
nu_protocol::location!(),
|
||||||
|
)
|
||||||
|
})?;
|
||||||
|
stdout.flush().map_err(|err| {
|
||||||
|
IoError::new_internal(err.kind(), "Could not flush", nu_protocol::location!())
|
||||||
|
})?;
|
||||||
}
|
}
|
||||||
|
|
||||||
if config.use_kitty_protocol {
|
if config.use_kitty_protocol {
|
||||||
@ -125,7 +162,13 @@ pub fn print_events(engine_state: &EngineState) -> Result<Value, ShellError> {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
terminal::disable_raw_mode()?;
|
terminal::disable_raw_mode().map_err(|err| {
|
||||||
|
IoError::new_internal(
|
||||||
|
err.kind(),
|
||||||
|
"Could not disable raw mode",
|
||||||
|
nu_protocol::location!(),
|
||||||
|
)
|
||||||
|
})?;
|
||||||
|
|
||||||
Ok(Value::nothing(Span::unknown()))
|
Ok(Value::nothing(Span::unknown()))
|
||||||
}
|
}
|
||||||
|
@ -7,7 +7,7 @@ mod keybindings_list;
|
|||||||
mod keybindings_listen;
|
mod keybindings_listen;
|
||||||
|
|
||||||
pub use commandline::{Commandline, CommandlineEdit, CommandlineGetCursor, CommandlineSetCursor};
|
pub use commandline::{Commandline, CommandlineEdit, CommandlineGetCursor, CommandlineSetCursor};
|
||||||
pub use history::{History, HistorySession};
|
pub use history::{History, HistoryImport, HistorySession};
|
||||||
pub use keybindings::Keybindings;
|
pub use keybindings::Keybindings;
|
||||||
pub use keybindings_default::KeybindingsDefault;
|
pub use keybindings_default::KeybindingsDefault;
|
||||||
pub use keybindings_list::KeybindingsList;
|
pub use keybindings_list::KeybindingsList;
|
||||||
|
97
crates/nu-cli/src/completions/attribute_completions.rs
Normal file
97
crates/nu-cli/src/completions/attribute_completions.rs
Normal file
@ -0,0 +1,97 @@
|
|||||||
|
use super::{completion_options::NuMatcher, SemanticSuggestion};
|
||||||
|
use crate::{
|
||||||
|
completions::{Completer, CompletionOptions},
|
||||||
|
SuggestionKind,
|
||||||
|
};
|
||||||
|
use nu_protocol::{
|
||||||
|
engine::{Stack, StateWorkingSet},
|
||||||
|
Span,
|
||||||
|
};
|
||||||
|
use reedline::Suggestion;
|
||||||
|
|
||||||
|
pub struct AttributeCompletion;
|
||||||
|
pub struct AttributableCompletion;
|
||||||
|
|
||||||
|
impl Completer for AttributeCompletion {
|
||||||
|
fn fetch(
|
||||||
|
&mut self,
|
||||||
|
working_set: &StateWorkingSet,
|
||||||
|
_stack: &Stack,
|
||||||
|
_prefix: &[u8],
|
||||||
|
span: Span,
|
||||||
|
offset: usize,
|
||||||
|
_pos: usize,
|
||||||
|
options: &CompletionOptions,
|
||||||
|
) -> Vec<SemanticSuggestion> {
|
||||||
|
let partial = working_set.get_span_contents(span);
|
||||||
|
let mut matcher = NuMatcher::new(String::from_utf8_lossy(partial), options.clone());
|
||||||
|
|
||||||
|
let attr_commands = working_set.find_commands_by_predicate(
|
||||||
|
|s| {
|
||||||
|
s.strip_prefix(b"attr ")
|
||||||
|
.map(String::from_utf8_lossy)
|
||||||
|
.is_some_and(|name| matcher.matches(&name))
|
||||||
|
},
|
||||||
|
true,
|
||||||
|
);
|
||||||
|
|
||||||
|
for (name, desc, ty) in attr_commands {
|
||||||
|
let name = name.strip_prefix(b"attr ").unwrap_or(&name);
|
||||||
|
matcher.add_semantic_suggestion(SemanticSuggestion {
|
||||||
|
suggestion: Suggestion {
|
||||||
|
value: String::from_utf8_lossy(name).into_owned(),
|
||||||
|
description: desc,
|
||||||
|
style: None,
|
||||||
|
extra: None,
|
||||||
|
span: reedline::Span {
|
||||||
|
start: span.start - offset,
|
||||||
|
end: span.end - offset,
|
||||||
|
},
|
||||||
|
append_whitespace: false,
|
||||||
|
},
|
||||||
|
kind: Some(SuggestionKind::Command(ty)),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
matcher.results()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Completer for AttributableCompletion {
|
||||||
|
fn fetch(
|
||||||
|
&mut self,
|
||||||
|
working_set: &StateWorkingSet,
|
||||||
|
_stack: &Stack,
|
||||||
|
_prefix: &[u8],
|
||||||
|
span: Span,
|
||||||
|
offset: usize,
|
||||||
|
_pos: usize,
|
||||||
|
options: &CompletionOptions,
|
||||||
|
) -> Vec<SemanticSuggestion> {
|
||||||
|
let partial = working_set.get_span_contents(span);
|
||||||
|
let mut matcher = NuMatcher::new(String::from_utf8_lossy(partial), options.clone());
|
||||||
|
|
||||||
|
for s in ["def", "extern", "export def", "export extern"] {
|
||||||
|
let decl_id = working_set
|
||||||
|
.find_decl(s.as_bytes())
|
||||||
|
.expect("internal error, builtin declaration not found");
|
||||||
|
let cmd = working_set.get_decl(decl_id);
|
||||||
|
matcher.add_semantic_suggestion(SemanticSuggestion {
|
||||||
|
suggestion: Suggestion {
|
||||||
|
value: cmd.name().into(),
|
||||||
|
description: Some(cmd.description().into()),
|
||||||
|
style: None,
|
||||||
|
extra: None,
|
||||||
|
span: reedline::Span {
|
||||||
|
start: span.start - offset,
|
||||||
|
end: span.end - offset,
|
||||||
|
},
|
||||||
|
append_whitespace: false,
|
||||||
|
},
|
||||||
|
kind: Some(SuggestionKind::Command(cmd.command_type())),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
matcher.results()
|
||||||
|
}
|
||||||
|
}
|
@ -1,50 +1,23 @@
|
|||||||
use crate::completions::{CompletionOptions, SortBy};
|
use crate::completions::CompletionOptions;
|
||||||
use nu_protocol::{
|
use nu_protocol::{
|
||||||
engine::{Stack, StateWorkingSet},
|
engine::{Stack, StateWorkingSet},
|
||||||
levenshtein_distance, Span,
|
Span,
|
||||||
};
|
};
|
||||||
use reedline::Suggestion;
|
use reedline::Suggestion;
|
||||||
|
|
||||||
// Completer trait represents the three stages of the completion
|
|
||||||
// fetch, filter and sort
|
|
||||||
pub trait Completer {
|
pub trait Completer {
|
||||||
|
/// Fetch, filter, and sort completions
|
||||||
#[allow(clippy::too_many_arguments)]
|
#[allow(clippy::too_many_arguments)]
|
||||||
fn fetch(
|
fn fetch(
|
||||||
&mut self,
|
&mut self,
|
||||||
working_set: &StateWorkingSet,
|
working_set: &StateWorkingSet,
|
||||||
stack: &Stack,
|
stack: &Stack,
|
||||||
prefix: Vec<u8>,
|
prefix: &[u8],
|
||||||
span: Span,
|
span: Span,
|
||||||
offset: usize,
|
offset: usize,
|
||||||
pos: usize,
|
pos: usize,
|
||||||
options: &CompletionOptions,
|
options: &CompletionOptions,
|
||||||
) -> Vec<SemanticSuggestion>;
|
) -> Vec<SemanticSuggestion>;
|
||||||
|
|
||||||
fn get_sort_by(&self) -> SortBy {
|
|
||||||
SortBy::Ascending
|
|
||||||
}
|
|
||||||
|
|
||||||
fn sort(&self, items: Vec<SemanticSuggestion>, prefix: Vec<u8>) -> Vec<SemanticSuggestion> {
|
|
||||||
let prefix_str = String::from_utf8_lossy(&prefix).to_string();
|
|
||||||
let mut filtered_items = items;
|
|
||||||
|
|
||||||
// Sort items
|
|
||||||
match self.get_sort_by() {
|
|
||||||
SortBy::LevenshteinDistance => {
|
|
||||||
filtered_items.sort_by(|a, b| {
|
|
||||||
let a_distance = levenshtein_distance(&prefix_str, &a.suggestion.value);
|
|
||||||
let b_distance = levenshtein_distance(&prefix_str, &b.suggestion.value);
|
|
||||||
a_distance.cmp(&b_distance)
|
|
||||||
});
|
|
||||||
}
|
|
||||||
SortBy::Ascending => {
|
|
||||||
filtered_items.sort_by(|a, b| a.suggestion.value.cmp(&b.suggestion.value));
|
|
||||||
}
|
|
||||||
SortBy::None => {}
|
|
||||||
};
|
|
||||||
|
|
||||||
filtered_items
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Default, PartialEq)]
|
#[derive(Debug, Default, PartialEq)]
|
||||||
@ -58,6 +31,7 @@ pub struct SemanticSuggestion {
|
|||||||
pub enum SuggestionKind {
|
pub enum SuggestionKind {
|
||||||
Command(nu_protocol::engine::CommandType),
|
Command(nu_protocol::engine::CommandType),
|
||||||
Type(nu_protocol::Type),
|
Type(nu_protocol::Type),
|
||||||
|
Module,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<Suggestion> for SemanticSuggestion {
|
impl From<Suggestion> for SemanticSuggestion {
|
||||||
|
97
crates/nu-cli/src/completions/cell_path_completions.rs
Normal file
97
crates/nu-cli/src/completions/cell_path_completions.rs
Normal file
@ -0,0 +1,97 @@
|
|||||||
|
use crate::completions::{Completer, CompletionOptions, SemanticSuggestion, SuggestionKind};
|
||||||
|
use nu_engine::{column::get_columns, eval_variable};
|
||||||
|
use nu_protocol::{
|
||||||
|
ast::{Expr, FullCellPath, PathMember},
|
||||||
|
engine::{Stack, StateWorkingSet},
|
||||||
|
eval_const::eval_constant,
|
||||||
|
Span, Value,
|
||||||
|
};
|
||||||
|
use reedline::Suggestion;
|
||||||
|
|
||||||
|
use super::completion_options::NuMatcher;
|
||||||
|
|
||||||
|
pub struct CellPathCompletion<'a> {
|
||||||
|
pub full_cell_path: &'a FullCellPath,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Completer for CellPathCompletion<'_> {
|
||||||
|
fn fetch(
|
||||||
|
&mut self,
|
||||||
|
working_set: &StateWorkingSet,
|
||||||
|
stack: &Stack,
|
||||||
|
_prefix: &[u8],
|
||||||
|
_span: Span,
|
||||||
|
offset: usize,
|
||||||
|
_pos: usize,
|
||||||
|
options: &CompletionOptions,
|
||||||
|
) -> Vec<SemanticSuggestion> {
|
||||||
|
// empty tail is already handled as variable names completion
|
||||||
|
let Some((prefix_member, path_members)) = self.full_cell_path.tail.split_last() else {
|
||||||
|
return vec![];
|
||||||
|
};
|
||||||
|
let (mut prefix_str, span) = match prefix_member {
|
||||||
|
PathMember::String { val, span, .. } => (val.clone(), span),
|
||||||
|
PathMember::Int { val, span, .. } => (val.to_string(), span),
|
||||||
|
};
|
||||||
|
// strip the placeholder
|
||||||
|
prefix_str.pop();
|
||||||
|
let true_end = std::cmp::max(span.start, span.end - 1);
|
||||||
|
let span = Span::new(span.start, true_end);
|
||||||
|
let current_span = reedline::Span {
|
||||||
|
start: span.start - offset,
|
||||||
|
end: true_end - offset,
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut matcher = NuMatcher::new(prefix_str, options.clone());
|
||||||
|
|
||||||
|
// evaluate the head expression to get its value
|
||||||
|
let value = if let Expr::Var(var_id) = self.full_cell_path.head.expr {
|
||||||
|
working_set
|
||||||
|
.get_variable(var_id)
|
||||||
|
.const_val
|
||||||
|
.to_owned()
|
||||||
|
.or_else(|| eval_variable(working_set.permanent_state, stack, var_id, span).ok())
|
||||||
|
} else {
|
||||||
|
eval_constant(working_set, &self.full_cell_path.head).ok()
|
||||||
|
}
|
||||||
|
.unwrap_or_default();
|
||||||
|
|
||||||
|
for suggestion in nested_suggestions(&value, path_members, current_span) {
|
||||||
|
matcher.add_semantic_suggestion(suggestion);
|
||||||
|
}
|
||||||
|
matcher.results()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Find recursively the values for cell_path
|
||||||
|
fn nested_suggestions(
|
||||||
|
val: &Value,
|
||||||
|
path_members: &[PathMember],
|
||||||
|
current_span: reedline::Span,
|
||||||
|
) -> Vec<SemanticSuggestion> {
|
||||||
|
let value = val
|
||||||
|
.clone()
|
||||||
|
.follow_cell_path(path_members, false)
|
||||||
|
.unwrap_or_default();
|
||||||
|
|
||||||
|
let kind = SuggestionKind::Type(value.get_type());
|
||||||
|
let str_to_suggestion = |s: String| SemanticSuggestion {
|
||||||
|
suggestion: Suggestion {
|
||||||
|
value: s,
|
||||||
|
span: current_span,
|
||||||
|
..Suggestion::default()
|
||||||
|
},
|
||||||
|
kind: Some(kind.to_owned()),
|
||||||
|
};
|
||||||
|
match value {
|
||||||
|
Value::Record { val, .. } => val
|
||||||
|
.columns()
|
||||||
|
.map(|s| str_to_suggestion(s.to_string()))
|
||||||
|
.collect(),
|
||||||
|
Value::List { vals, .. } => get_columns(vals.as_slice())
|
||||||
|
.into_iter()
|
||||||
|
.map(str_to_suggestion)
|
||||||
|
.collect(),
|
||||||
|
_ => vec![],
|
||||||
|
}
|
||||||
|
}
|
@ -1,5 +1,7 @@
|
|||||||
|
use std::collections::HashMap;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
completions::{Completer, CompletionOptions, MatchAlgorithm, SortBy},
|
completions::{Completer, CompletionOptions},
|
||||||
SuggestionKind,
|
SuggestionKind,
|
||||||
};
|
};
|
||||||
use nu_parser::FlatShape;
|
use nu_parser::FlatShape;
|
||||||
@ -9,7 +11,7 @@ use nu_protocol::{
|
|||||||
};
|
};
|
||||||
use reedline::Suggestion;
|
use reedline::Suggestion;
|
||||||
|
|
||||||
use super::SemanticSuggestion;
|
use super::{completion_options::NuMatcher, SemanticSuggestion};
|
||||||
|
|
||||||
pub struct CommandCompletion {
|
pub struct CommandCompletion {
|
||||||
flattened: Vec<(Span, FlatShape)>,
|
flattened: Vec<(Span, FlatShape)>,
|
||||||
@ -33,15 +35,15 @@ impl CommandCompletion {
|
|||||||
fn external_command_completion(
|
fn external_command_completion(
|
||||||
&self,
|
&self,
|
||||||
working_set: &StateWorkingSet,
|
working_set: &StateWorkingSet,
|
||||||
prefix: &str,
|
sugg_span: reedline::Span,
|
||||||
match_algorithm: MatchAlgorithm,
|
matched_internal: impl Fn(&str) -> bool,
|
||||||
) -> Vec<String> {
|
matcher: &mut NuMatcher<String>,
|
||||||
let mut executables = vec![];
|
) -> HashMap<String, SemanticSuggestion> {
|
||||||
|
let mut suggs = HashMap::new();
|
||||||
|
|
||||||
// os agnostic way to get the PATH env var
|
let paths = working_set.permanent_state.get_env_var_insensitive("path");
|
||||||
let paths = working_set.permanent_state.get_path_env_var();
|
|
||||||
|
|
||||||
if let Some(paths) = paths {
|
if let Some((_, paths)) = paths {
|
||||||
if let Ok(paths) = paths.as_list() {
|
if let Ok(paths) = paths.as_list() {
|
||||||
for path in paths {
|
for path in paths {
|
||||||
let path = path.coerce_str().unwrap_or_default();
|
let path = path.coerce_str().unwrap_or_default();
|
||||||
@ -51,25 +53,41 @@ impl CommandCompletion {
|
|||||||
if working_set
|
if working_set
|
||||||
.permanent_state
|
.permanent_state
|
||||||
.config
|
.config
|
||||||
.max_external_completion_results
|
.completions
|
||||||
> executables.len() as i64
|
.external
|
||||||
&& !executables.contains(
|
.max_results
|
||||||
&item
|
<= suggs.len() as i64
|
||||||
.path()
|
|
||||||
.file_name()
|
|
||||||
.map(|x| x.to_string_lossy().to_string())
|
|
||||||
.unwrap_or_default(),
|
|
||||||
)
|
|
||||||
&& matches!(
|
|
||||||
item.path().file_name().map(|x| match_algorithm
|
|
||||||
.matches_str(&x.to_string_lossy(), prefix)),
|
|
||||||
Some(true)
|
|
||||||
)
|
|
||||||
&& is_executable::is_executable(item.path())
|
|
||||||
{
|
{
|
||||||
if let Ok(name) = item.file_name().into_string() {
|
break;
|
||||||
executables.push(name);
|
|
||||||
}
|
}
|
||||||
|
let Ok(name) = item.file_name().into_string() else {
|
||||||
|
continue;
|
||||||
|
};
|
||||||
|
let value = if matched_internal(&name) {
|
||||||
|
format!("^{}", name)
|
||||||
|
} else {
|
||||||
|
name.clone()
|
||||||
|
};
|
||||||
|
if suggs.contains_key(&value) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
if matcher.matches(&name) && is_executable::is_executable(item.path()) {
|
||||||
|
// If there's an internal command with the same name, adds ^cmd to the
|
||||||
|
// matcher so that both the internal and external command are included
|
||||||
|
matcher.add(&name, value.clone());
|
||||||
|
suggs.insert(
|
||||||
|
value.clone(),
|
||||||
|
SemanticSuggestion {
|
||||||
|
suggestion: Suggestion {
|
||||||
|
value,
|
||||||
|
span: sugg_span,
|
||||||
|
append_whitespace: true,
|
||||||
|
..Default::default()
|
||||||
|
},
|
||||||
|
// TODO: is there a way to create a test?
|
||||||
|
kind: None,
|
||||||
|
},
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -77,7 +95,7 @@ impl CommandCompletion {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
executables
|
suggs
|
||||||
}
|
}
|
||||||
|
|
||||||
fn complete_commands(
|
fn complete_commands(
|
||||||
@ -86,74 +104,60 @@ impl CommandCompletion {
|
|||||||
span: Span,
|
span: Span,
|
||||||
offset: usize,
|
offset: usize,
|
||||||
find_externals: bool,
|
find_externals: bool,
|
||||||
match_algorithm: MatchAlgorithm,
|
options: &CompletionOptions,
|
||||||
) -> Vec<SemanticSuggestion> {
|
) -> Vec<SemanticSuggestion> {
|
||||||
let partial = working_set.get_span_contents(span);
|
let partial = working_set.get_span_contents(span);
|
||||||
|
let mut matcher = NuMatcher::new(String::from_utf8_lossy(partial), options.clone());
|
||||||
|
|
||||||
let filter_predicate = |command: &[u8]| match_algorithm.matches_u8(command, partial);
|
let sugg_span = reedline::Span::new(span.start - offset, span.end - offset);
|
||||||
|
|
||||||
let mut results = working_set
|
let mut internal_suggs = HashMap::new();
|
||||||
.find_commands_by_predicate(filter_predicate, true)
|
let filtered_commands = working_set.find_commands_by_predicate(
|
||||||
.into_iter()
|
|name| {
|
||||||
.map(move |x| SemanticSuggestion {
|
let name = String::from_utf8_lossy(name);
|
||||||
suggestion: Suggestion {
|
matcher.add(&name, name.to_string())
|
||||||
value: String::from_utf8_lossy(&x.0).to_string(),
|
|
||||||
description: x.1,
|
|
||||||
style: None,
|
|
||||||
extra: None,
|
|
||||||
span: reedline::Span::new(span.start - offset, span.end - offset),
|
|
||||||
append_whitespace: true,
|
|
||||||
},
|
},
|
||||||
kind: Some(SuggestionKind::Command(x.2)),
|
true,
|
||||||
})
|
);
|
||||||
.collect::<Vec<_>>();
|
for (name, description, typ) in filtered_commands {
|
||||||
|
let name = String::from_utf8_lossy(&name);
|
||||||
let partial = working_set.get_span_contents(span);
|
internal_suggs.insert(
|
||||||
let partial = String::from_utf8_lossy(partial).to_string();
|
name.to_string(),
|
||||||
|
SemanticSuggestion {
|
||||||
if find_externals {
|
|
||||||
let results_external = self
|
|
||||||
.external_command_completion(working_set, &partial, match_algorithm)
|
|
||||||
.into_iter()
|
|
||||||
.map(move |x| SemanticSuggestion {
|
|
||||||
suggestion: Suggestion {
|
suggestion: Suggestion {
|
||||||
value: x,
|
value: name.to_string(),
|
||||||
description: None,
|
description,
|
||||||
style: None,
|
span: sugg_span,
|
||||||
extra: None,
|
|
||||||
span: reedline::Span::new(span.start - offset, span.end - offset),
|
|
||||||
append_whitespace: true,
|
append_whitespace: true,
|
||||||
|
..Suggestion::default()
|
||||||
},
|
},
|
||||||
// TODO: is there a way to create a test?
|
kind: Some(SuggestionKind::Command(typ)),
|
||||||
kind: None,
|
|
||||||
});
|
|
||||||
|
|
||||||
let results_strings: Vec<String> =
|
|
||||||
results.iter().map(|x| x.suggestion.value.clone()).collect();
|
|
||||||
|
|
||||||
for external in results_external {
|
|
||||||
if results_strings.contains(&external.suggestion.value) {
|
|
||||||
results.push(SemanticSuggestion {
|
|
||||||
suggestion: Suggestion {
|
|
||||||
value: format!("^{}", external.suggestion.value),
|
|
||||||
description: None,
|
|
||||||
style: None,
|
|
||||||
extra: None,
|
|
||||||
span: external.suggestion.span,
|
|
||||||
append_whitespace: true,
|
|
||||||
},
|
},
|
||||||
kind: external.kind,
|
);
|
||||||
})
|
}
|
||||||
|
|
||||||
|
let mut external_suggs = if find_externals {
|
||||||
|
self.external_command_completion(
|
||||||
|
working_set,
|
||||||
|
sugg_span,
|
||||||
|
|name| internal_suggs.contains_key(name),
|
||||||
|
&mut matcher,
|
||||||
|
)
|
||||||
} else {
|
} else {
|
||||||
results.push(external)
|
HashMap::new()
|
||||||
}
|
};
|
||||||
}
|
|
||||||
|
|
||||||
results
|
let mut res = Vec::new();
|
||||||
} else {
|
for cmd_name in matcher.results() {
|
||||||
results
|
if let Some(sugg) = internal_suggs
|
||||||
|
.remove(&cmd_name)
|
||||||
|
.or_else(|| external_suggs.remove(&cmd_name))
|
||||||
|
{
|
||||||
|
res.push(sugg);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
res
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Completer for CommandCompletion {
|
impl Completer for CommandCompletion {
|
||||||
@ -161,7 +165,7 @@ impl Completer for CommandCompletion {
|
|||||||
&mut self,
|
&mut self,
|
||||||
working_set: &StateWorkingSet,
|
working_set: &StateWorkingSet,
|
||||||
_stack: &Stack,
|
_stack: &Stack,
|
||||||
_prefix: Vec<u8>,
|
_prefix: &[u8],
|
||||||
span: Span,
|
span: Span,
|
||||||
offset: usize,
|
offset: usize,
|
||||||
pos: usize,
|
pos: usize,
|
||||||
@ -191,7 +195,7 @@ impl Completer for CommandCompletion {
|
|||||||
Span::new(last.0.start, pos),
|
Span::new(last.0.start, pos),
|
||||||
offset,
|
offset,
|
||||||
false,
|
false,
|
||||||
options.match_algorithm,
|
options,
|
||||||
)
|
)
|
||||||
} else {
|
} else {
|
||||||
vec![]
|
vec![]
|
||||||
@ -202,7 +206,7 @@ impl Completer for CommandCompletion {
|
|||||||
}
|
}
|
||||||
|
|
||||||
let config = working_set.get_config();
|
let config = working_set.get_config();
|
||||||
let commands = if matches!(self.flat_shape, nu_parser::FlatShape::External)
|
if matches!(self.flat_shape, nu_parser::FlatShape::External)
|
||||||
|| matches!(self.flat_shape, nu_parser::FlatShape::InternalCall(_))
|
|| matches!(self.flat_shape, nu_parser::FlatShape::InternalCall(_))
|
||||||
|| ((span.end - span.start) == 0)
|
|| ((span.end - span.start) == 0)
|
||||||
|| is_passthrough_command(working_set.delta.get_file_contents())
|
|| is_passthrough_command(working_set.delta.get_file_contents())
|
||||||
@ -216,18 +220,12 @@ impl Completer for CommandCompletion {
|
|||||||
working_set,
|
working_set,
|
||||||
span,
|
span,
|
||||||
offset,
|
offset,
|
||||||
config.enable_external_completion,
|
config.completions.external.enable,
|
||||||
options.match_algorithm,
|
options,
|
||||||
)
|
)
|
||||||
} else {
|
} else {
|
||||||
vec![]
|
vec![]
|
||||||
};
|
|
||||||
|
|
||||||
subcommands.into_iter().chain(commands).collect::<Vec<_>>()
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_sort_by(&self) -> SortBy {
|
|
||||||
SortBy::LevenshteinDistance
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,11 +1,14 @@
|
|||||||
use crate::completions::{
|
use crate::completions::{
|
||||||
CommandCompletion, Completer, CompletionOptions, CustomCompletion, DirectoryCompletion,
|
AttributableCompletion, AttributeCompletion, CellPathCompletion, CommandCompletion, Completer,
|
||||||
DotNuCompletion, FileCompletion, FlagCompletion, VariableCompletion,
|
CompletionOptions, CustomCompletion, DirectoryCompletion, DotNuCompletion, FileCompletion,
|
||||||
|
FlagCompletion, OperatorCompletion, VariableCompletion,
|
||||||
};
|
};
|
||||||
|
use log::debug;
|
||||||
use nu_color_config::{color_record_to_nustyle, lookup_ansi_color_style};
|
use nu_color_config::{color_record_to_nustyle, lookup_ansi_color_style};
|
||||||
use nu_engine::eval_block;
|
use nu_engine::eval_block;
|
||||||
use nu_parser::{flatten_pipeline_element, parse, FlatShape};
|
use nu_parser::{flatten_expression, parse, FlatShape};
|
||||||
use nu_protocol::{
|
use nu_protocol::{
|
||||||
|
ast::{Expr, Expression, FindMapResult, Traverse},
|
||||||
debugger::WithoutDebug,
|
debugger::WithoutDebug,
|
||||||
engine::{Closure, EngineState, Stack, StateWorkingSet},
|
engine::{Closure, EngineState, Stack, StateWorkingSet},
|
||||||
PipelineData, Span, Value,
|
PipelineData, Span, Value,
|
||||||
@ -15,6 +18,73 @@ use std::{str, sync::Arc};
|
|||||||
|
|
||||||
use super::base::{SemanticSuggestion, SuggestionKind};
|
use super::base::{SemanticSuggestion, SuggestionKind};
|
||||||
|
|
||||||
|
/// Used as the function `f` in find_map Traverse
|
||||||
|
///
|
||||||
|
/// returns the inner-most pipeline_element of interest
|
||||||
|
/// i.e. the one that contains given position and needs completion
|
||||||
|
fn find_pipeline_element_by_position<'a>(
|
||||||
|
expr: &'a Expression,
|
||||||
|
working_set: &'a StateWorkingSet,
|
||||||
|
pos: usize,
|
||||||
|
) -> FindMapResult<&'a Expression> {
|
||||||
|
// skip the entire expression if the position is not in it
|
||||||
|
if !expr.span.contains(pos) {
|
||||||
|
return FindMapResult::Stop;
|
||||||
|
}
|
||||||
|
let closure = |expr: &'a Expression| find_pipeline_element_by_position(expr, working_set, pos);
|
||||||
|
match &expr.expr {
|
||||||
|
Expr::Call(call) => call
|
||||||
|
.arguments
|
||||||
|
.iter()
|
||||||
|
.find_map(|arg| arg.expr().and_then(|e| e.find_map(working_set, &closure)))
|
||||||
|
// if no inner call/external_call found, then this is the inner-most one
|
||||||
|
.or(Some(expr))
|
||||||
|
.map(FindMapResult::Found)
|
||||||
|
.unwrap_or_default(),
|
||||||
|
// TODO: clear separation of internal/external completion logic
|
||||||
|
Expr::ExternalCall(head, arguments) => arguments
|
||||||
|
.iter()
|
||||||
|
.find_map(|arg| arg.expr().find_map(working_set, &closure))
|
||||||
|
.or(head.as_ref().find_map(working_set, &closure))
|
||||||
|
.or(Some(expr))
|
||||||
|
.map(FindMapResult::Found)
|
||||||
|
.unwrap_or_default(),
|
||||||
|
// complete the operator
|
||||||
|
Expr::BinaryOp(lhs, _, rhs) => lhs
|
||||||
|
.find_map(working_set, &closure)
|
||||||
|
.or(rhs.find_map(working_set, &closure))
|
||||||
|
.or(Some(expr))
|
||||||
|
.map(FindMapResult::Found)
|
||||||
|
.unwrap_or_default(),
|
||||||
|
Expr::FullCellPath(fcp) => fcp
|
||||||
|
.head
|
||||||
|
.find_map(working_set, &closure)
|
||||||
|
.or(Some(expr))
|
||||||
|
.map(FindMapResult::Found)
|
||||||
|
.unwrap_or_default(),
|
||||||
|
Expr::Var(_) => FindMapResult::Found(expr),
|
||||||
|
Expr::AttributeBlock(ab) => ab
|
||||||
|
.attributes
|
||||||
|
.iter()
|
||||||
|
.map(|attr| &attr.expr)
|
||||||
|
.chain(Some(ab.item.as_ref()))
|
||||||
|
.find_map(|expr| expr.find_map(working_set, &closure))
|
||||||
|
.or(Some(expr))
|
||||||
|
.map(FindMapResult::Found)
|
||||||
|
.unwrap_or_default(),
|
||||||
|
_ => FindMapResult::Continue,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Before completion, an additional character `a` is added to the source as a placeholder for correct parsing results.
|
||||||
|
/// This function helps to strip it
|
||||||
|
fn strip_placeholder<'a>(working_set: &'a StateWorkingSet, span: &Span) -> (Span, &'a [u8]) {
|
||||||
|
let new_end = std::cmp::max(span.end - 1, span.start);
|
||||||
|
let new_span = Span::new(span.start, new_end);
|
||||||
|
let prefix = working_set.get_span_contents(new_span);
|
||||||
|
(new_span, prefix)
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct NuCompleter {
|
pub struct NuCompleter {
|
||||||
engine_state: Arc<EngineState>,
|
engine_state: Arc<EngineState>,
|
||||||
@ -25,7 +95,7 @@ impl NuCompleter {
|
|||||||
pub fn new(engine_state: Arc<EngineState>, stack: Arc<Stack>) -> Self {
|
pub fn new(engine_state: Arc<EngineState>, stack: Arc<Stack>) -> Self {
|
||||||
Self {
|
Self {
|
||||||
engine_state,
|
engine_state,
|
||||||
stack: Stack::with_parent(stack).reset_out_dest().capture(),
|
stack: Stack::with_parent(stack).reset_out_dest().collect_value(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -33,12 +103,34 @@ impl NuCompleter {
|
|||||||
self.completion_helper(line, pos)
|
self.completion_helper(line, pos)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn variable_names_completion_helper(
|
||||||
|
&self,
|
||||||
|
working_set: &StateWorkingSet,
|
||||||
|
span: Span,
|
||||||
|
offset: usize,
|
||||||
|
) -> Vec<SemanticSuggestion> {
|
||||||
|
let (new_span, prefix) = strip_placeholder(working_set, &span);
|
||||||
|
if !prefix.starts_with(b"$") {
|
||||||
|
return vec![];
|
||||||
|
}
|
||||||
|
let mut variable_names_completer = VariableCompletion {};
|
||||||
|
self.process_completion(
|
||||||
|
&mut variable_names_completer,
|
||||||
|
working_set,
|
||||||
|
prefix,
|
||||||
|
new_span,
|
||||||
|
offset,
|
||||||
|
// pos is not required
|
||||||
|
0,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
// Process the completion for a given completer
|
// Process the completion for a given completer
|
||||||
fn process_completion<T: Completer>(
|
fn process_completion<T: Completer>(
|
||||||
&self,
|
&self,
|
||||||
completer: &mut T,
|
completer: &mut T,
|
||||||
working_set: &StateWorkingSet,
|
working_set: &StateWorkingSet,
|
||||||
prefix: Vec<u8>,
|
prefix: &[u8],
|
||||||
new_span: Span,
|
new_span: Span,
|
||||||
offset: usize,
|
offset: usize,
|
||||||
pos: usize,
|
pos: usize,
|
||||||
@ -46,26 +138,26 @@ impl NuCompleter {
|
|||||||
let config = self.engine_state.get_config();
|
let config = self.engine_state.get_config();
|
||||||
|
|
||||||
let options = CompletionOptions {
|
let options = CompletionOptions {
|
||||||
case_sensitive: config.case_sensitive_completions,
|
case_sensitive: config.completions.case_sensitive,
|
||||||
match_algorithm: config.completion_algorithm.into(),
|
match_algorithm: config.completions.algorithm.into(),
|
||||||
|
sort: config.completions.sort,
|
||||||
..Default::default()
|
..Default::default()
|
||||||
};
|
};
|
||||||
|
|
||||||
// Fetch
|
debug!(
|
||||||
let mut suggestions = completer.fetch(
|
"process_completion: prefix: {}, new_span: {new_span:?}, offset: {offset}, pos: {pos}",
|
||||||
|
String::from_utf8_lossy(prefix)
|
||||||
|
);
|
||||||
|
|
||||||
|
completer.fetch(
|
||||||
working_set,
|
working_set,
|
||||||
&self.stack,
|
&self.stack,
|
||||||
prefix.clone(),
|
prefix,
|
||||||
new_span,
|
new_span,
|
||||||
offset,
|
offset,
|
||||||
pos,
|
pos,
|
||||||
&options,
|
&options,
|
||||||
);
|
)
|
||||||
|
|
||||||
// Sort
|
|
||||||
suggestions = completer.sort(suggestions, prefix);
|
|
||||||
|
|
||||||
suggestions
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn external_completion(
|
fn external_completion(
|
||||||
@ -104,18 +196,24 @@ impl NuCompleter {
|
|||||||
);
|
);
|
||||||
|
|
||||||
match result.and_then(|data| data.into_value(span)) {
|
match result.and_then(|data| data.into_value(span)) {
|
||||||
Ok(value) => {
|
Ok(Value::List { vals, .. }) => {
|
||||||
if let Value::List { vals, .. } = value {
|
|
||||||
let result =
|
let result =
|
||||||
map_value_completions(vals.iter(), Span::new(span.start, span.end), offset);
|
map_value_completions(vals.iter(), Span::new(span.start, span.end), offset);
|
||||||
|
Some(result)
|
||||||
return Some(result);
|
}
|
||||||
|
Ok(Value::Nothing { .. }) => None,
|
||||||
|
Ok(value) => {
|
||||||
|
log::error!(
|
||||||
|
"External completer returned invalid value of type {}",
|
||||||
|
value.get_type().to_string()
|
||||||
|
);
|
||||||
|
Some(vec![])
|
||||||
|
}
|
||||||
|
Err(err) => {
|
||||||
|
log::error!("failed to eval completer block: {err}");
|
||||||
|
Some(vec![])
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Err(err) => println!("failed to eval completer block: {err}"),
|
|
||||||
}
|
|
||||||
|
|
||||||
None
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn completion_helper(&mut self, line: &str, pos: usize) -> Vec<SemanticSuggestion> {
|
fn completion_helper(&mut self, line: &str, pos: usize) -> Vec<SemanticSuggestion> {
|
||||||
@ -133,27 +231,61 @@ impl NuCompleter {
|
|||||||
|
|
||||||
let config = self.engine_state.get_config();
|
let config = self.engine_state.get_config();
|
||||||
|
|
||||||
let output = parse(&mut working_set, Some("completer"), line.as_bytes(), false);
|
let block = parse(&mut working_set, Some("completer"), line.as_bytes(), false);
|
||||||
|
let Some(element_expression) = block.find_map(&working_set, &|expr: &Expression| {
|
||||||
|
find_pipeline_element_by_position(expr, &working_set, pos)
|
||||||
|
}) else {
|
||||||
|
return vec![];
|
||||||
|
};
|
||||||
|
|
||||||
for pipeline in &output.pipelines {
|
match &element_expression.expr {
|
||||||
for pipeline_element in &pipeline.elements {
|
Expr::Var(_) => {
|
||||||
let flattened = flatten_pipeline_element(&working_set, pipeline_element);
|
return self.variable_names_completion_helper(
|
||||||
|
&working_set,
|
||||||
|
element_expression.span,
|
||||||
|
fake_offset,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
Expr::FullCellPath(full_cell_path) => {
|
||||||
|
// e.g. `$e<tab>` parsed as FullCellPath
|
||||||
|
if full_cell_path.tail.is_empty() {
|
||||||
|
return self.variable_names_completion_helper(
|
||||||
|
&working_set,
|
||||||
|
element_expression.span,
|
||||||
|
fake_offset,
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
let mut cell_path_completer = CellPathCompletion { full_cell_path };
|
||||||
|
return self.process_completion(
|
||||||
|
&mut cell_path_completer,
|
||||||
|
&working_set,
|
||||||
|
&[],
|
||||||
|
element_expression.span,
|
||||||
|
fake_offset,
|
||||||
|
pos,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ => (),
|
||||||
|
}
|
||||||
|
|
||||||
|
let flattened = flatten_expression(&working_set, element_expression);
|
||||||
let mut spans: Vec<String> = vec![];
|
let mut spans: Vec<String> = vec![];
|
||||||
|
|
||||||
for (flat_idx, flat) in flattened.iter().enumerate() {
|
for (flat_idx, (span, shape)) in flattened.iter().enumerate() {
|
||||||
let is_passthrough_command = spans
|
let is_passthrough_command = spans
|
||||||
.first()
|
.first()
|
||||||
.filter(|content| content.as_str() == "sudo" || content.as_str() == "doas")
|
.filter(|content| content.as_str() == "sudo" || content.as_str() == "doas")
|
||||||
.is_some();
|
.is_some();
|
||||||
// Read the current spam to string
|
|
||||||
let current_span = working_set.get_span_contents(flat.0).to_vec();
|
|
||||||
let current_span_str = String::from_utf8_lossy(¤t_span);
|
|
||||||
|
|
||||||
let is_last_span = pos >= flat.0.start && pos < flat.0.end;
|
// Read the current span to string
|
||||||
|
let current_span = working_set.get_span_contents(*span);
|
||||||
|
let current_span_str = String::from_utf8_lossy(current_span);
|
||||||
|
let is_last_span = span.contains(pos);
|
||||||
|
|
||||||
// Skip the last 'a' as span item
|
// Skip the last 'a' as span item
|
||||||
if is_last_span {
|
if is_last_span {
|
||||||
let offset = pos - flat.0.start;
|
let offset = pos - span.start;
|
||||||
if offset == 0 {
|
if offset == 0 {
|
||||||
spans.push(String::new())
|
spans.push(String::new())
|
||||||
} else {
|
} else {
|
||||||
@ -167,25 +299,27 @@ impl NuCompleter {
|
|||||||
|
|
||||||
// Complete based on the last span
|
// Complete based on the last span
|
||||||
if is_last_span {
|
if is_last_span {
|
||||||
// Context variables
|
|
||||||
let most_left_var =
|
|
||||||
most_left_variable(flat_idx, &working_set, flattened.clone());
|
|
||||||
|
|
||||||
// Create a new span
|
// Create a new span
|
||||||
let new_span = Span::new(flat.0.start, flat.0.end - 1);
|
let new_span = Span::new(span.start, span.end - 1);
|
||||||
|
|
||||||
// Parses the prefix. Completion should look up to the cursor position, not after.
|
// Parses the prefix. Completion should look up to the cursor position, not after.
|
||||||
let mut prefix = working_set.get_span_contents(flat.0).to_vec();
|
let index = pos - span.start;
|
||||||
let index = pos - flat.0.start;
|
let prefix = ¤t_span[..index];
|
||||||
prefix.drain(index..);
|
|
||||||
|
|
||||||
// Variables completion
|
|
||||||
if prefix.starts_with(b"$") || most_left_var.is_some() {
|
|
||||||
let mut completer =
|
|
||||||
VariableCompletion::new(most_left_var.unwrap_or((vec![], vec![])));
|
|
||||||
|
|
||||||
|
if let Expr::AttributeBlock(ab) = &element_expression.expr {
|
||||||
|
let last_attr = ab.attributes.last().expect("at least one attribute");
|
||||||
|
if let Expr::Garbage = last_attr.expr.expr {
|
||||||
return self.process_completion(
|
return self.process_completion(
|
||||||
&mut completer,
|
&mut AttributeCompletion,
|
||||||
|
&working_set,
|
||||||
|
prefix,
|
||||||
|
new_span,
|
||||||
|
fake_offset,
|
||||||
|
pos,
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
return self.process_completion(
|
||||||
|
&mut AttributableCompletion,
|
||||||
&working_set,
|
&working_set,
|
||||||
prefix,
|
prefix,
|
||||||
new_span,
|
new_span,
|
||||||
@ -193,15 +327,16 @@ impl NuCompleter {
|
|||||||
pos,
|
pos,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Flags completion
|
// Flags completion
|
||||||
if prefix.starts_with(b"-") {
|
if prefix.starts_with(b"-") {
|
||||||
// Try to complete flag internally
|
// Try to complete flag internally
|
||||||
let mut completer = FlagCompletion::new(pipeline_element.expr.clone());
|
let mut completer = FlagCompletion::new(element_expression.clone());
|
||||||
let result = self.process_completion(
|
let result = self.process_completion(
|
||||||
&mut completer,
|
&mut completer,
|
||||||
&working_set,
|
&working_set,
|
||||||
prefix.clone(),
|
prefix,
|
||||||
new_span,
|
new_span,
|
||||||
fake_offset,
|
fake_offset,
|
||||||
pos,
|
pos,
|
||||||
@ -213,7 +348,7 @@ impl NuCompleter {
|
|||||||
|
|
||||||
// We got no results for internal completion
|
// We got no results for internal completion
|
||||||
// now we can check if external completer is set and use it
|
// now we can check if external completer is set and use it
|
||||||
if let Some(closure) = config.external_completer.as_ref() {
|
if let Some(closure) = config.completions.external.completer.as_ref() {
|
||||||
if let Some(external_result) =
|
if let Some(external_result) =
|
||||||
self.external_completion(closure, &spans, fake_offset, new_span)
|
self.external_completion(closure, &spans, fake_offset, new_span)
|
||||||
{
|
{
|
||||||
@ -246,8 +381,7 @@ impl NuCompleter {
|
|||||||
if (is_passthrough_command && flat_idx > 1) || flat_idx > 0 {
|
if (is_passthrough_command && flat_idx > 1) || flat_idx > 0 {
|
||||||
if let Some(previous_expr) = flattened.get(flat_idx - 1) {
|
if let Some(previous_expr) = flattened.get(flat_idx - 1) {
|
||||||
// Read the content for the previous expression
|
// Read the content for the previous expression
|
||||||
let prev_expr_str =
|
let prev_expr_str = working_set.get_span_contents(previous_expr.0).to_vec();
|
||||||
working_set.get_span_contents(previous_expr.0).to_vec();
|
|
||||||
|
|
||||||
// Completion for .nu files
|
// Completion for .nu files
|
||||||
if prev_expr_str == b"use"
|
if prev_expr_str == b"use"
|
||||||
@ -275,17 +409,40 @@ impl NuCompleter {
|
|||||||
fake_offset,
|
fake_offset,
|
||||||
pos,
|
pos,
|
||||||
);
|
);
|
||||||
|
} else if matches!(
|
||||||
|
previous_expr.1,
|
||||||
|
FlatShape::Float
|
||||||
|
| FlatShape::Int
|
||||||
|
| FlatShape::String
|
||||||
|
| FlatShape::List
|
||||||
|
| FlatShape::Bool
|
||||||
|
| FlatShape::Variable(_)
|
||||||
|
) {
|
||||||
|
let mut completer = OperatorCompletion::new(element_expression.clone());
|
||||||
|
|
||||||
|
let operator_suggestion = self.process_completion(
|
||||||
|
&mut completer,
|
||||||
|
&working_set,
|
||||||
|
prefix,
|
||||||
|
new_span,
|
||||||
|
fake_offset,
|
||||||
|
pos,
|
||||||
|
);
|
||||||
|
if !operator_suggestion.is_empty() {
|
||||||
|
return operator_suggestion;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Match other types
|
// Match other types
|
||||||
match &flat.1 {
|
match shape {
|
||||||
FlatShape::Custom(decl_id) => {
|
FlatShape::Custom(decl_id) => {
|
||||||
let mut completer = CustomCompletion::new(
|
let mut completer = CustomCompletion::new(
|
||||||
self.stack.clone(),
|
self.stack.clone(),
|
||||||
*decl_id,
|
*decl_id,
|
||||||
initial_line,
|
initial_line,
|
||||||
|
FileCompletion::new(),
|
||||||
);
|
);
|
||||||
|
|
||||||
return self.process_completion(
|
return self.process_completion(
|
||||||
@ -332,7 +489,7 @@ impl NuCompleter {
|
|||||||
let mut out: Vec<_> = self.process_completion(
|
let mut out: Vec<_> = self.process_completion(
|
||||||
&mut completer,
|
&mut completer,
|
||||||
&working_set,
|
&working_set,
|
||||||
prefix.clone(),
|
prefix,
|
||||||
new_span,
|
new_span,
|
||||||
fake_offset,
|
fake_offset,
|
||||||
pos,
|
pos,
|
||||||
@ -343,13 +500,10 @@ impl NuCompleter {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Try to complete using an external completer (if set)
|
// Try to complete using an external completer (if set)
|
||||||
if let Some(closure) = config.external_completer.as_ref() {
|
if let Some(closure) = config.completions.external.completer.as_ref() {
|
||||||
if let Some(external_result) = self.external_completion(
|
if let Some(external_result) =
|
||||||
closure,
|
self.external_completion(closure, &spans, fake_offset, new_span)
|
||||||
&spans,
|
{
|
||||||
fake_offset,
|
|
||||||
new_span,
|
|
||||||
) {
|
|
||||||
return external_result;
|
return external_result;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -372,8 +526,6 @@ impl NuCompleter {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
vec![]
|
vec![]
|
||||||
}
|
}
|
||||||
@ -388,56 +540,6 @@ impl ReedlineCompleter for NuCompleter {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// reads the most left variable returning it's name (e.g: $myvar)
|
|
||||||
// and the depth (a.b.c)
|
|
||||||
fn most_left_variable(
|
|
||||||
idx: usize,
|
|
||||||
working_set: &StateWorkingSet<'_>,
|
|
||||||
flattened: Vec<(Span, FlatShape)>,
|
|
||||||
) -> Option<(Vec<u8>, Vec<Vec<u8>>)> {
|
|
||||||
// Reverse items to read the list backwards and truncate
|
|
||||||
// because the only items that matters are the ones before the current index
|
|
||||||
let mut rev = flattened;
|
|
||||||
rev.truncate(idx);
|
|
||||||
rev = rev.into_iter().rev().collect();
|
|
||||||
|
|
||||||
// Store the variables and sub levels found and reverse to correct order
|
|
||||||
let mut variables_found: Vec<Vec<u8>> = vec![];
|
|
||||||
let mut found_var = false;
|
|
||||||
for item in rev.clone() {
|
|
||||||
let result = working_set.get_span_contents(item.0).to_vec();
|
|
||||||
|
|
||||||
match item.1 {
|
|
||||||
FlatShape::Variable(_) => {
|
|
||||||
variables_found.push(result);
|
|
||||||
found_var = true;
|
|
||||||
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
FlatShape::String => {
|
|
||||||
variables_found.push(result);
|
|
||||||
}
|
|
||||||
_ => {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// If most left var was not found
|
|
||||||
if !found_var {
|
|
||||||
return None;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Reverse the order back
|
|
||||||
variables_found = variables_found.into_iter().rev().collect();
|
|
||||||
|
|
||||||
// Extract the variable and the sublevels
|
|
||||||
let var = variables_found.first().unwrap_or(&vec![]).to_vec();
|
|
||||||
let sublevels: Vec<Vec<u8>> = variables_found.into_iter().skip(1).collect();
|
|
||||||
|
|
||||||
Some((var, sublevels))
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn map_value_completions<'a>(
|
pub fn map_value_completions<'a>(
|
||||||
list: impl Iterator<Item = &'a Value>,
|
list: impl Iterator<Item = &'a Value>,
|
||||||
span: Span,
|
span: Span,
|
||||||
@ -449,14 +551,11 @@ pub fn map_value_completions<'a>(
|
|||||||
return Some(SemanticSuggestion {
|
return Some(SemanticSuggestion {
|
||||||
suggestion: Suggestion {
|
suggestion: Suggestion {
|
||||||
value: s,
|
value: s,
|
||||||
description: None,
|
|
||||||
style: None,
|
|
||||||
extra: None,
|
|
||||||
span: reedline::Span {
|
span: reedline::Span {
|
||||||
start: span.start - offset,
|
start: span.start - offset,
|
||||||
end: span.end - offset,
|
end: span.end - offset,
|
||||||
},
|
},
|
||||||
append_whitespace: false,
|
..Suggestion::default()
|
||||||
},
|
},
|
||||||
kind: Some(SuggestionKind::Type(x.get_type())),
|
kind: Some(SuggestionKind::Type(x.get_type())),
|
||||||
});
|
});
|
||||||
@ -466,14 +565,11 @@ pub fn map_value_completions<'a>(
|
|||||||
if let Ok(record) = x.as_record() {
|
if let Ok(record) = x.as_record() {
|
||||||
let mut suggestion = Suggestion {
|
let mut suggestion = Suggestion {
|
||||||
value: String::from(""), // Initialize with empty string
|
value: String::from(""), // Initialize with empty string
|
||||||
description: None,
|
|
||||||
style: None,
|
|
||||||
extra: None,
|
|
||||||
span: reedline::Span {
|
span: reedline::Span {
|
||||||
start: span.start - offset,
|
start: span.start - offset,
|
||||||
end: span.end - offset,
|
end: span.end - offset,
|
||||||
},
|
},
|
||||||
append_whitespace: false,
|
..Suggestion::default()
|
||||||
};
|
};
|
||||||
|
|
||||||
// Iterate the cols looking for `value` and `description`
|
// Iterate the cols looking for `value` and `description`
|
||||||
@ -542,6 +638,11 @@ mod completer_tests {
|
|||||||
|
|
||||||
let mut completer = NuCompleter::new(engine_state.into(), Arc::new(Stack::new()));
|
let mut completer = NuCompleter::new(engine_state.into(), Arc::new(Stack::new()));
|
||||||
let dataset = [
|
let dataset = [
|
||||||
|
("1 bit-sh", true, "b", vec!["bit-shl", "bit-shr"]),
|
||||||
|
("1.0 bit-sh", false, "b", vec![]),
|
||||||
|
("1 m", true, "m", vec!["mod"]),
|
||||||
|
("1.0 m", true, "m", vec!["mod"]),
|
||||||
|
("\"a\" s", true, "s", vec!["starts-with"]),
|
||||||
("sudo", false, "", Vec::new()),
|
("sudo", false, "", Vec::new()),
|
||||||
("sudo l", true, "l", vec!["ls", "let", "lines", "loop"]),
|
("sudo l", true, "l", vec!["ls", "let", "lines", "loop"]),
|
||||||
(" sudo", false, "", Vec::new()),
|
(" sudo", false, "", Vec::new()),
|
||||||
|
@ -1,67 +1,111 @@
|
|||||||
use crate::completions::{matches, CompletionOptions};
|
use super::{completion_options::NuMatcher, MatchAlgorithm};
|
||||||
|
use crate::completions::CompletionOptions;
|
||||||
use nu_ansi_term::Style;
|
use nu_ansi_term::Style;
|
||||||
use nu_engine::env_to_string;
|
use nu_engine::env_to_string;
|
||||||
use nu_path::home_dir;
|
use nu_path::dots::expand_ndots;
|
||||||
|
use nu_path::{expand_to_real_path, home_dir};
|
||||||
use nu_protocol::{
|
use nu_protocol::{
|
||||||
engine::{EngineState, Stack, StateWorkingSet},
|
engine::{EngineState, Stack, StateWorkingSet},
|
||||||
Span,
|
Span,
|
||||||
};
|
};
|
||||||
use nu_utils::get_ls_colors;
|
use nu_utils::get_ls_colors;
|
||||||
use std::path::{
|
use nu_utils::IgnoreCaseExt;
|
||||||
is_separator, Component, Path, PathBuf, MAIN_SEPARATOR as SEP, MAIN_SEPARATOR_STR,
|
use std::path::{is_separator, Component, Path, PathBuf, MAIN_SEPARATOR as SEP};
|
||||||
};
|
|
||||||
|
|
||||||
#[derive(Clone, Default)]
|
#[derive(Clone, Default)]
|
||||||
pub struct PathBuiltFromString {
|
pub struct PathBuiltFromString {
|
||||||
|
cwd: PathBuf,
|
||||||
parts: Vec<String>,
|
parts: Vec<String>,
|
||||||
isdir: bool,
|
isdir: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Recursively goes through paths that match a given `partial`.
|
||||||
|
/// built: State struct for a valid matching path built so far.
|
||||||
|
///
|
||||||
|
/// `isdir`: whether the current partial path has a trailing slash.
|
||||||
|
/// Parsing a path string into a pathbuf loses that bit of information.
|
||||||
|
///
|
||||||
|
/// want_directory: Whether we want only directories as completion matches.
|
||||||
|
/// Some commands like `cd` can only be run on directories whereas others
|
||||||
|
/// like `ls` can be run on regular files as well.
|
||||||
fn complete_rec(
|
fn complete_rec(
|
||||||
partial: &[&str],
|
partial: &[&str],
|
||||||
built: &PathBuiltFromString,
|
built_paths: &[PathBuiltFromString],
|
||||||
cwd: &Path,
|
|
||||||
options: &CompletionOptions,
|
options: &CompletionOptions,
|
||||||
dir: bool,
|
want_directory: bool,
|
||||||
isdir: bool,
|
isdir: bool,
|
||||||
) -> Vec<PathBuiltFromString> {
|
) -> Vec<PathBuiltFromString> {
|
||||||
let mut completions = vec![];
|
|
||||||
|
|
||||||
if let Some((&base, rest)) = partial.split_first() {
|
if let Some((&base, rest)) = partial.split_first() {
|
||||||
if (base == "." || base == "..") && (isdir || !rest.is_empty()) {
|
if base.chars().all(|c| c == '.') && (isdir || !rest.is_empty()) {
|
||||||
|
let built_paths: Vec<_> = built_paths
|
||||||
|
.iter()
|
||||||
|
.map(|built| {
|
||||||
let mut built = built.clone();
|
let mut built = built.clone();
|
||||||
built.parts.push(base.to_string());
|
built.parts.push(base.to_string());
|
||||||
built.isdir = true;
|
built.isdir = true;
|
||||||
return complete_rec(rest, &built, cwd, options, dir, isdir);
|
built
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
return complete_rec(rest, &built_paths, options, want_directory, isdir);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut built_path = cwd.to_path_buf();
|
let prefix = partial.first().unwrap_or(&"");
|
||||||
|
let mut matcher = NuMatcher::new(prefix, options.clone());
|
||||||
|
|
||||||
|
for built in built_paths {
|
||||||
|
let mut path = built.cwd.clone();
|
||||||
for part in &built.parts {
|
for part in &built.parts {
|
||||||
built_path.push(part);
|
path.push(part);
|
||||||
}
|
}
|
||||||
|
|
||||||
let Ok(result) = built_path.read_dir() else {
|
let Ok(result) = path.read_dir() else {
|
||||||
return completions;
|
continue;
|
||||||
};
|
};
|
||||||
|
|
||||||
for entry in result.filter_map(|e| e.ok()) {
|
for entry in result.filter_map(|e| e.ok()) {
|
||||||
let entry_name = entry.file_name().to_string_lossy().into_owned();
|
let entry_name = entry.file_name().to_string_lossy().into_owned();
|
||||||
let entry_isdir = entry.path().is_dir();
|
let entry_isdir = entry.path().is_dir() && !entry.path().is_symlink();
|
||||||
let mut built = built.clone();
|
let mut built = built.clone();
|
||||||
built.parts.push(entry_name.clone());
|
built.parts.push(entry_name.clone());
|
||||||
built.isdir = entry_isdir;
|
built.isdir = entry_isdir;
|
||||||
|
|
||||||
if !dir || entry_isdir {
|
if !want_directory || entry_isdir {
|
||||||
|
matcher.add(entry_name.clone(), (entry_name, built));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut completions = vec![];
|
||||||
|
for (entry_name, built) in matcher.results() {
|
||||||
match partial.split_first() {
|
match partial.split_first() {
|
||||||
Some((base, rest)) => {
|
Some((base, rest)) => {
|
||||||
if matches(base, &entry_name, options) {
|
// We use `isdir` to confirm that the current component has
|
||||||
|
// at least one next component or a slash.
|
||||||
|
// Serves as confirmation to ignore longer completions for
|
||||||
|
// components in between.
|
||||||
if !rest.is_empty() || isdir {
|
if !rest.is_empty() || isdir {
|
||||||
completions
|
completions.extend(complete_rec(
|
||||||
.extend(complete_rec(rest, &built, cwd, options, dir, isdir));
|
rest,
|
||||||
|
&[built],
|
||||||
|
options,
|
||||||
|
want_directory,
|
||||||
|
isdir,
|
||||||
|
));
|
||||||
} else {
|
} else {
|
||||||
completions.push(built);
|
completions.push(built);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// For https://github.com/nushell/nushell/issues/13204
|
||||||
|
if isdir && options.match_algorithm == MatchAlgorithm::Prefix {
|
||||||
|
let exact_match = if options.case_sensitive {
|
||||||
|
entry_name.eq(base)
|
||||||
|
} else {
|
||||||
|
entry_name.to_folded_case().eq(&base.to_folded_case())
|
||||||
|
};
|
||||||
|
if exact_match {
|
||||||
|
break;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
None => {
|
None => {
|
||||||
@ -69,7 +113,6 @@ fn complete_rec(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
completions
|
completions
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -81,16 +124,16 @@ enum OriginalCwd {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl OriginalCwd {
|
impl OriginalCwd {
|
||||||
fn apply(&self, mut p: PathBuiltFromString) -> String {
|
fn apply(&self, mut p: PathBuiltFromString, path_separator: char) -> String {
|
||||||
match self {
|
match self {
|
||||||
Self::None => {}
|
Self::None => {}
|
||||||
Self::Home => p.parts.insert(0, "~".to_string()),
|
Self::Home => p.parts.insert(0, "~".to_string()),
|
||||||
Self::Prefix(s) => p.parts.insert(0, s.clone()),
|
Self::Prefix(s) => p.parts.insert(0, s.clone()),
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut ret = p.parts.join(MAIN_SEPARATOR_STR);
|
let mut ret = p.parts.join(&path_separator.to_string());
|
||||||
if p.isdir {
|
if p.isdir {
|
||||||
ret.push(SEP);
|
ret.push(path_separator);
|
||||||
}
|
}
|
||||||
ret
|
ret
|
||||||
}
|
}
|
||||||
@ -110,29 +153,58 @@ fn surround_remove(partial: &str) -> String {
|
|||||||
partial.to_string()
|
partial.to_string()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub struct FileSuggestion {
|
||||||
|
pub span: nu_protocol::Span,
|
||||||
|
pub path: String,
|
||||||
|
pub style: Option<Style>,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// # Parameters
|
||||||
|
/// * `cwds` - A list of directories in which to search. The only reason this isn't a single string
|
||||||
|
/// is because dotnu_completions searches in multiple directories at once
|
||||||
pub fn complete_item(
|
pub fn complete_item(
|
||||||
want_directory: bool,
|
want_directory: bool,
|
||||||
span: nu_protocol::Span,
|
span: nu_protocol::Span,
|
||||||
partial: &str,
|
partial: &str,
|
||||||
cwd: &str,
|
cwds: &[impl AsRef<str>],
|
||||||
options: &CompletionOptions,
|
options: &CompletionOptions,
|
||||||
engine_state: &EngineState,
|
engine_state: &EngineState,
|
||||||
stack: &Stack,
|
stack: &Stack,
|
||||||
) -> Vec<(nu_protocol::Span, String, Option<Style>)> {
|
) -> Vec<FileSuggestion> {
|
||||||
let partial = surround_remove(partial);
|
let cleaned_partial = surround_remove(partial);
|
||||||
let isdir = partial.ends_with(is_separator);
|
let isdir = cleaned_partial.ends_with(is_separator);
|
||||||
let cwd_pathbuf = Path::new(cwd).to_path_buf();
|
let expanded_partial = expand_ndots(Path::new(&cleaned_partial));
|
||||||
let ls_colors = (engine_state.config.use_ls_colors_completions
|
let should_collapse_dots = expanded_partial != Path::new(&cleaned_partial);
|
||||||
&& engine_state.config.use_ansi_coloring)
|
let mut partial = expanded_partial.to_string_lossy().to_string();
|
||||||
|
|
||||||
|
#[cfg(unix)]
|
||||||
|
let path_separator = SEP;
|
||||||
|
#[cfg(windows)]
|
||||||
|
let path_separator = cleaned_partial
|
||||||
|
.chars()
|
||||||
|
.rfind(|c: &char| is_separator(*c))
|
||||||
|
.unwrap_or(SEP);
|
||||||
|
|
||||||
|
// Handle the trailing dot case
|
||||||
|
if cleaned_partial.ends_with(&format!("{path_separator}.")) {
|
||||||
|
partial.push_str(&format!("{path_separator}."));
|
||||||
|
}
|
||||||
|
|
||||||
|
let cwd_pathbufs: Vec<_> = cwds
|
||||||
|
.iter()
|
||||||
|
.map(|cwd| Path::new(cwd.as_ref()).to_path_buf())
|
||||||
|
.collect();
|
||||||
|
let ls_colors = (engine_state.config.completions.use_ls_colors
|
||||||
|
&& engine_state.config.use_ansi_coloring.get(engine_state))
|
||||||
.then(|| {
|
.then(|| {
|
||||||
let ls_colors_env_str = match stack.get_env_var(engine_state, "LS_COLORS") {
|
let ls_colors_env_str = match stack.get_env_var(engine_state, "LS_COLORS") {
|
||||||
Some(v) => env_to_string("LS_COLORS", &v, engine_state, stack).ok(),
|
Some(v) => env_to_string("LS_COLORS", v, engine_state, stack).ok(),
|
||||||
None => None,
|
None => None,
|
||||||
};
|
};
|
||||||
get_ls_colors(ls_colors_env_str)
|
get_ls_colors(ls_colors_env_str)
|
||||||
});
|
});
|
||||||
|
|
||||||
let mut cwd = cwd_pathbuf.clone();
|
let mut cwds = cwd_pathbufs.clone();
|
||||||
let mut prefix_len = 0;
|
let mut prefix_len = 0;
|
||||||
let mut original_cwd = OriginalCwd::None;
|
let mut original_cwd = OriginalCwd::None;
|
||||||
|
|
||||||
@ -140,25 +212,21 @@ pub fn complete_item(
|
|||||||
match components.peek().cloned() {
|
match components.peek().cloned() {
|
||||||
Some(c @ Component::Prefix(..)) => {
|
Some(c @ Component::Prefix(..)) => {
|
||||||
// windows only by definition
|
// windows only by definition
|
||||||
components.next();
|
cwds = vec![[c, Component::RootDir].iter().collect()];
|
||||||
if let Some(Component::RootDir) = components.peek().cloned() {
|
|
||||||
components.next();
|
|
||||||
};
|
|
||||||
cwd = [c, Component::RootDir].iter().collect();
|
|
||||||
prefix_len = c.as_os_str().len();
|
prefix_len = c.as_os_str().len();
|
||||||
original_cwd = OriginalCwd::Prefix(c.as_os_str().to_string_lossy().into_owned());
|
original_cwd = OriginalCwd::Prefix(c.as_os_str().to_string_lossy().into_owned());
|
||||||
}
|
}
|
||||||
Some(c @ Component::RootDir) => {
|
Some(c @ Component::RootDir) => {
|
||||||
components.next();
|
|
||||||
// This is kind of a hack. When joining an empty string with the rest,
|
// This is kind of a hack. When joining an empty string with the rest,
|
||||||
// we add the slash automagically
|
// we add the slash automagically
|
||||||
cwd = PathBuf::from(c.as_os_str());
|
cwds = vec![PathBuf::from(c.as_os_str())];
|
||||||
prefix_len = 1;
|
prefix_len = 1;
|
||||||
original_cwd = OriginalCwd::Prefix(String::new());
|
original_cwd = OriginalCwd::Prefix(String::new());
|
||||||
}
|
}
|
||||||
Some(Component::Normal(home)) if home.to_string_lossy() == "~" => {
|
Some(Component::Normal(home)) if home.to_string_lossy() == "~" => {
|
||||||
components.next();
|
cwds = home_dir()
|
||||||
cwd = home_dir().unwrap_or(cwd_pathbuf);
|
.map(|dir| vec![dir.into()])
|
||||||
|
.unwrap_or(cwd_pathbufs);
|
||||||
prefix_len = 1;
|
prefix_len = 1;
|
||||||
original_cwd = OriginalCwd::Home;
|
original_cwd = OriginalCwd::Home;
|
||||||
}
|
}
|
||||||
@ -175,21 +243,39 @@ pub fn complete_item(
|
|||||||
|
|
||||||
complete_rec(
|
complete_rec(
|
||||||
partial.as_slice(),
|
partial.as_slice(),
|
||||||
&PathBuiltFromString::default(),
|
&cwds
|
||||||
&cwd,
|
.into_iter()
|
||||||
|
.map(|cwd| PathBuiltFromString {
|
||||||
|
cwd,
|
||||||
|
parts: Vec::new(),
|
||||||
|
isdir: false,
|
||||||
|
})
|
||||||
|
.collect::<Vec<_>>(),
|
||||||
options,
|
options,
|
||||||
want_directory,
|
want_directory,
|
||||||
isdir,
|
isdir,
|
||||||
)
|
)
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|p| {
|
.map(|mut p| {
|
||||||
let path = original_cwd.apply(p);
|
if should_collapse_dots {
|
||||||
|
p = collapse_ndots(p);
|
||||||
|
}
|
||||||
|
let path = original_cwd.apply(p, path_separator);
|
||||||
let style = ls_colors.as_ref().map(|lsc| {
|
let style = ls_colors.as_ref().map(|lsc| {
|
||||||
lsc.style_for_path_with_metadata(&path, std::fs::symlink_metadata(&path).ok().as_ref())
|
lsc.style_for_path_with_metadata(
|
||||||
|
&path,
|
||||||
|
std::fs::symlink_metadata(expand_to_real_path(&path))
|
||||||
|
.ok()
|
||||||
|
.as_ref(),
|
||||||
|
)
|
||||||
.map(lscolors::Style::to_nu_ansi_term_style)
|
.map(lscolors::Style::to_nu_ansi_term_style)
|
||||||
.unwrap_or_default()
|
.unwrap_or_default()
|
||||||
});
|
});
|
||||||
(span, escape_path(path, want_directory), style)
|
FileSuggestion {
|
||||||
|
span,
|
||||||
|
path: escape_path(path, want_directory),
|
||||||
|
style,
|
||||||
|
}
|
||||||
})
|
})
|
||||||
.collect()
|
.collect()
|
||||||
}
|
}
|
||||||
@ -197,8 +283,9 @@ pub fn complete_item(
|
|||||||
// Fix files or folders with quotes or hashes
|
// Fix files or folders with quotes or hashes
|
||||||
pub fn escape_path(path: String, dir: bool) -> String {
|
pub fn escape_path(path: String, dir: bool) -> String {
|
||||||
// make glob pattern have the highest priority.
|
// make glob pattern have the highest priority.
|
||||||
let glob_contaminated = path.contains(['[', '*', ']', '?']);
|
if nu_glob::is_glob(path.as_str()) {
|
||||||
if glob_contaminated {
|
let pathbuf = nu_path::expand_tilde(path);
|
||||||
|
let path = pathbuf.to_string_lossy();
|
||||||
return if path.contains('\'') {
|
return if path.contains('\'') {
|
||||||
// decide to use double quote, also need to escape `"` in path
|
// decide to use double quote, also need to escape `"` in path
|
||||||
// or else users can't do anything with completed path either.
|
// or else users can't do anything with completed path either.
|
||||||
@ -211,8 +298,10 @@ pub fn escape_path(path: String, dir: bool) -> String {
|
|||||||
let filename_contaminated = !dir && path.contains(['\'', '"', ' ', '#', '(', ')']);
|
let filename_contaminated = !dir && path.contains(['\'', '"', ' ', '#', '(', ')']);
|
||||||
let dirname_contaminated = dir && path.contains(['\'', '"', ' ', '#']);
|
let dirname_contaminated = dir && path.contains(['\'', '"', ' ', '#']);
|
||||||
let maybe_flag = path.starts_with('-');
|
let maybe_flag = path.starts_with('-');
|
||||||
|
let maybe_variable = path.starts_with('$');
|
||||||
let maybe_number = path.parse::<f64>().is_ok();
|
let maybe_number = path.parse::<f64>().is_ok();
|
||||||
if filename_contaminated || dirname_contaminated || maybe_flag || maybe_number {
|
if filename_contaminated || dirname_contaminated || maybe_flag || maybe_variable || maybe_number
|
||||||
|
{
|
||||||
format!("`{path}`")
|
format!("`{path}`")
|
||||||
} else {
|
} else {
|
||||||
path
|
path
|
||||||
@ -251,3 +340,38 @@ pub fn adjust_if_intermediate(
|
|||||||
readjusted,
|
readjusted,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Collapse multiple ".." components into n-dots.
|
||||||
|
///
|
||||||
|
/// It performs the reverse operation of `expand_ndots`, collapsing sequences of ".." into n-dots,
|
||||||
|
/// such as "..." and "....".
|
||||||
|
///
|
||||||
|
/// The resulting path will use platform-specific path separators, regardless of what path separators were used in the input.
|
||||||
|
fn collapse_ndots(path: PathBuiltFromString) -> PathBuiltFromString {
|
||||||
|
let mut result = PathBuiltFromString {
|
||||||
|
parts: Vec::with_capacity(path.parts.len()),
|
||||||
|
isdir: path.isdir,
|
||||||
|
cwd: path.cwd,
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut dot_count = 0;
|
||||||
|
|
||||||
|
for part in path.parts {
|
||||||
|
if part == ".." {
|
||||||
|
dot_count += 1;
|
||||||
|
} else {
|
||||||
|
if dot_count > 0 {
|
||||||
|
result.parts.push(".".repeat(dot_count + 1));
|
||||||
|
dot_count = 0;
|
||||||
|
}
|
||||||
|
result.parts.push(part);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add any remaining dots
|
||||||
|
if dot_count > 0 {
|
||||||
|
result.parts.push(".".repeat(dot_count + 1));
|
||||||
|
}
|
||||||
|
|
||||||
|
result
|
||||||
|
}
|
||||||
|
@ -1,17 +1,16 @@
|
|||||||
use fuzzy_matcher::{skim::SkimMatcherV2, FuzzyMatcher};
|
|
||||||
use nu_parser::trim_quotes_str;
|
use nu_parser::trim_quotes_str;
|
||||||
use nu_protocol::CompletionAlgorithm;
|
use nu_protocol::{CompletionAlgorithm, CompletionSort};
|
||||||
use std::fmt::Display;
|
use nu_utils::IgnoreCaseExt;
|
||||||
|
use nucleo_matcher::{
|
||||||
|
pattern::{Atom, AtomKind, CaseMatching, Normalization},
|
||||||
|
Config, Matcher, Utf32Str,
|
||||||
|
};
|
||||||
|
use std::{borrow::Cow, fmt::Display};
|
||||||
|
|
||||||
#[derive(Copy, Clone)]
|
use super::SemanticSuggestion;
|
||||||
pub enum SortBy {
|
|
||||||
LevenshteinDistance,
|
|
||||||
Ascending,
|
|
||||||
None,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Describes how suggestions should be matched.
|
/// Describes how suggestions should be matched.
|
||||||
#[derive(Copy, Clone, Debug)]
|
#[derive(Copy, Clone, Debug, PartialEq)]
|
||||||
pub enum MatchAlgorithm {
|
pub enum MatchAlgorithm {
|
||||||
/// Only show suggestions which begin with the given input
|
/// Only show suggestions which begin with the given input
|
||||||
///
|
///
|
||||||
@ -26,32 +25,169 @@ pub enum MatchAlgorithm {
|
|||||||
Fuzzy,
|
Fuzzy,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl MatchAlgorithm {
|
pub struct NuMatcher<T> {
|
||||||
/// Returns whether the `needle` search text matches the given `haystack`.
|
options: CompletionOptions,
|
||||||
pub fn matches_str(&self, haystack: &str, needle: &str) -> bool {
|
needle: String,
|
||||||
|
state: State<T>,
|
||||||
|
}
|
||||||
|
|
||||||
|
enum State<T> {
|
||||||
|
Prefix {
|
||||||
|
/// Holds (haystack, item)
|
||||||
|
items: Vec<(String, T)>,
|
||||||
|
},
|
||||||
|
Fuzzy {
|
||||||
|
matcher: Matcher,
|
||||||
|
atom: Atom,
|
||||||
|
/// Holds (haystack, item, score)
|
||||||
|
items: Vec<(String, T, u16)>,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Filters and sorts suggestions
|
||||||
|
impl<T> NuMatcher<T> {
|
||||||
|
/// # Arguments
|
||||||
|
///
|
||||||
|
/// * `needle` - The text to search for
|
||||||
|
pub fn new(needle: impl AsRef<str>, options: CompletionOptions) -> NuMatcher<T> {
|
||||||
|
let needle = trim_quotes_str(needle.as_ref());
|
||||||
|
match options.match_algorithm {
|
||||||
|
MatchAlgorithm::Prefix => {
|
||||||
|
let lowercase_needle = if options.case_sensitive {
|
||||||
|
needle.to_owned()
|
||||||
|
} else {
|
||||||
|
needle.to_folded_case()
|
||||||
|
};
|
||||||
|
NuMatcher {
|
||||||
|
options,
|
||||||
|
needle: lowercase_needle,
|
||||||
|
state: State::Prefix { items: Vec::new() },
|
||||||
|
}
|
||||||
|
}
|
||||||
|
MatchAlgorithm::Fuzzy => {
|
||||||
|
let atom = Atom::new(
|
||||||
|
needle,
|
||||||
|
if options.case_sensitive {
|
||||||
|
CaseMatching::Respect
|
||||||
|
} else {
|
||||||
|
CaseMatching::Ignore
|
||||||
|
},
|
||||||
|
Normalization::Smart,
|
||||||
|
AtomKind::Fuzzy,
|
||||||
|
false,
|
||||||
|
);
|
||||||
|
NuMatcher {
|
||||||
|
options,
|
||||||
|
needle: needle.to_owned(),
|
||||||
|
state: State::Fuzzy {
|
||||||
|
matcher: Matcher::new(Config::DEFAULT),
|
||||||
|
atom,
|
||||||
|
items: Vec::new(),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns whether or not the haystack matches the needle. If it does, `item` is added
|
||||||
|
/// to the list of matches (if given).
|
||||||
|
///
|
||||||
|
/// Helper to avoid code duplication between [NuMatcher::add] and [NuMatcher::matches].
|
||||||
|
fn matches_aux(&mut self, haystack: &str, item: Option<T>) -> bool {
|
||||||
let haystack = trim_quotes_str(haystack);
|
let haystack = trim_quotes_str(haystack);
|
||||||
let needle = trim_quotes_str(needle);
|
match &mut self.state {
|
||||||
match *self {
|
State::Prefix { items } => {
|
||||||
MatchAlgorithm::Prefix => haystack.starts_with(needle),
|
let haystack_folded = if self.options.case_sensitive {
|
||||||
MatchAlgorithm::Fuzzy => {
|
Cow::Borrowed(haystack)
|
||||||
let matcher = SkimMatcherV2::default();
|
} else {
|
||||||
matcher.fuzzy_match(haystack, needle).is_some()
|
Cow::Owned(haystack.to_folded_case())
|
||||||
|
};
|
||||||
|
let matches = if self.options.positional {
|
||||||
|
haystack_folded.starts_with(self.needle.as_str())
|
||||||
|
} else {
|
||||||
|
haystack_folded.contains(self.needle.as_str())
|
||||||
|
};
|
||||||
|
if matches {
|
||||||
|
if let Some(item) = item {
|
||||||
|
items.push((haystack.to_string(), item));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
matches
|
||||||
|
}
|
||||||
|
State::Fuzzy {
|
||||||
|
matcher,
|
||||||
|
atom,
|
||||||
|
items,
|
||||||
|
} => {
|
||||||
|
let mut haystack_buf = Vec::new();
|
||||||
|
let haystack_utf32 = Utf32Str::new(trim_quotes_str(haystack), &mut haystack_buf);
|
||||||
|
let mut indices = Vec::new();
|
||||||
|
let Some(score) = atom.indices(haystack_utf32, matcher, &mut indices) else {
|
||||||
|
return false;
|
||||||
|
};
|
||||||
|
if let Some(item) = item {
|
||||||
|
items.push((haystack.to_string(), item, score));
|
||||||
|
}
|
||||||
|
true
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns whether the `needle` search text matches the given `haystack`.
|
/// Add the given item if the given haystack matches the needle.
|
||||||
pub fn matches_u8(&self, haystack: &[u8], needle: &[u8]) -> bool {
|
///
|
||||||
match *self {
|
/// Returns whether the item was added.
|
||||||
MatchAlgorithm::Prefix => haystack.starts_with(needle),
|
pub fn add(&mut self, haystack: impl AsRef<str>, item: T) -> bool {
|
||||||
MatchAlgorithm::Fuzzy => {
|
self.matches_aux(haystack.as_ref(), Some(item))
|
||||||
let haystack_str = String::from_utf8_lossy(haystack);
|
}
|
||||||
let needle_str = String::from_utf8_lossy(needle);
|
|
||||||
|
|
||||||
let matcher = SkimMatcherV2::default();
|
/// Returns whether the haystack matches the needle.
|
||||||
matcher.fuzzy_match(&haystack_str, &needle_str).is_some()
|
pub fn matches(&mut self, haystack: &str) -> bool {
|
||||||
|
self.matches_aux(haystack, None)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get all the items that matched (sorted)
|
||||||
|
pub fn results(self) -> Vec<T> {
|
||||||
|
match self.state {
|
||||||
|
State::Prefix { mut items, .. } => {
|
||||||
|
items.sort_by(|(haystack1, _), (haystack2, _)| {
|
||||||
|
let cmp_sensitive = haystack1.cmp(haystack2);
|
||||||
|
if self.options.case_sensitive {
|
||||||
|
cmp_sensitive
|
||||||
|
} else {
|
||||||
|
haystack1
|
||||||
|
.to_folded_case()
|
||||||
|
.cmp(&haystack2.to_folded_case())
|
||||||
|
.then(cmp_sensitive)
|
||||||
|
}
|
||||||
|
});
|
||||||
|
items.into_iter().map(|(_, item)| item).collect::<Vec<_>>()
|
||||||
|
}
|
||||||
|
State::Fuzzy { mut items, .. } => {
|
||||||
|
match self.options.sort {
|
||||||
|
CompletionSort::Alphabetical => {
|
||||||
|
items.sort_by(|(haystack1, _, _), (haystack2, _, _)| {
|
||||||
|
haystack1.cmp(haystack2)
|
||||||
|
});
|
||||||
|
}
|
||||||
|
CompletionSort::Smart => {
|
||||||
|
items.sort_by(|(haystack1, _, score1), (haystack2, _, score2)| {
|
||||||
|
score2.cmp(score1).then(haystack1.cmp(haystack2))
|
||||||
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
items
|
||||||
|
.into_iter()
|
||||||
|
.map(|(_, item, _)| item)
|
||||||
|
.collect::<Vec<_>>()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl NuMatcher<SemanticSuggestion> {
|
||||||
|
pub fn add_semantic_suggestion(&mut self, sugg: SemanticSuggestion) -> bool {
|
||||||
|
let value = sugg.suggestion.value.to_string();
|
||||||
|
self.add(value, sugg)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -96,6 +232,7 @@ pub struct CompletionOptions {
|
|||||||
pub case_sensitive: bool,
|
pub case_sensitive: bool,
|
||||||
pub positional: bool,
|
pub positional: bool,
|
||||||
pub match_algorithm: MatchAlgorithm,
|
pub match_algorithm: MatchAlgorithm,
|
||||||
|
pub sort: CompletionSort,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Default for CompletionOptions {
|
impl Default for CompletionOptions {
|
||||||
@ -104,41 +241,74 @@ impl Default for CompletionOptions {
|
|||||||
case_sensitive: true,
|
case_sensitive: true,
|
||||||
positional: true,
|
positional: true,
|
||||||
match_algorithm: MatchAlgorithm::Prefix,
|
match_algorithm: MatchAlgorithm::Prefix,
|
||||||
|
sort: Default::default(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod test {
|
mod test {
|
||||||
use super::MatchAlgorithm;
|
use rstest::rstest;
|
||||||
|
|
||||||
#[test]
|
use super::{CompletionOptions, MatchAlgorithm, NuMatcher};
|
||||||
fn match_algorithm_prefix() {
|
|
||||||
let algorithm = MatchAlgorithm::Prefix;
|
|
||||||
|
|
||||||
assert!(algorithm.matches_str("example text", ""));
|
#[rstest]
|
||||||
assert!(algorithm.matches_str("example text", "examp"));
|
#[case(MatchAlgorithm::Prefix, "example text", "", true)]
|
||||||
assert!(!algorithm.matches_str("example text", "text"));
|
#[case(MatchAlgorithm::Prefix, "example text", "examp", true)]
|
||||||
|
#[case(MatchAlgorithm::Prefix, "example text", "text", false)]
|
||||||
assert!(algorithm.matches_u8(&[1, 2, 3], &[]));
|
#[case(MatchAlgorithm::Fuzzy, "example text", "", true)]
|
||||||
assert!(algorithm.matches_u8(&[1, 2, 3], &[1, 2]));
|
#[case(MatchAlgorithm::Fuzzy, "example text", "examp", true)]
|
||||||
assert!(!algorithm.matches_u8(&[1, 2, 3], &[2, 3]));
|
#[case(MatchAlgorithm::Fuzzy, "example text", "ext", true)]
|
||||||
|
#[case(MatchAlgorithm::Fuzzy, "example text", "mplxt", true)]
|
||||||
|
#[case(MatchAlgorithm::Fuzzy, "example text", "mpp", false)]
|
||||||
|
fn match_algorithm_simple(
|
||||||
|
#[case] match_algorithm: MatchAlgorithm,
|
||||||
|
#[case] haystack: &str,
|
||||||
|
#[case] needle: &str,
|
||||||
|
#[case] should_match: bool,
|
||||||
|
) {
|
||||||
|
let options = CompletionOptions {
|
||||||
|
match_algorithm,
|
||||||
|
..Default::default()
|
||||||
|
};
|
||||||
|
let mut matcher = NuMatcher::new(needle, options);
|
||||||
|
matcher.add(haystack, haystack);
|
||||||
|
if should_match {
|
||||||
|
assert_eq!(vec![haystack], matcher.results());
|
||||||
|
} else {
|
||||||
|
assert_ne!(vec![haystack], matcher.results());
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn match_algorithm_fuzzy() {
|
fn match_algorithm_fuzzy_sort_score() {
|
||||||
let algorithm = MatchAlgorithm::Fuzzy;
|
let options = CompletionOptions {
|
||||||
|
match_algorithm: MatchAlgorithm::Fuzzy,
|
||||||
|
..Default::default()
|
||||||
|
};
|
||||||
|
let mut matcher = NuMatcher::new("fob", options);
|
||||||
|
for item in ["foo/bar", "fob", "foo bar"] {
|
||||||
|
matcher.add(item, item);
|
||||||
|
}
|
||||||
|
// Sort by score, then in alphabetical order
|
||||||
|
assert_eq!(vec!["fob", "foo bar", "foo/bar"], matcher.results());
|
||||||
|
}
|
||||||
|
|
||||||
assert!(algorithm.matches_str("example text", ""));
|
#[test]
|
||||||
assert!(algorithm.matches_str("example text", "examp"));
|
fn match_algorithm_fuzzy_sort_strip() {
|
||||||
assert!(algorithm.matches_str("example text", "ext"));
|
let options = CompletionOptions {
|
||||||
assert!(algorithm.matches_str("example text", "mplxt"));
|
match_algorithm: MatchAlgorithm::Fuzzy,
|
||||||
assert!(!algorithm.matches_str("example text", "mpp"));
|
..Default::default()
|
||||||
|
};
|
||||||
assert!(algorithm.matches_u8(&[1, 2, 3], &[]));
|
let mut matcher = NuMatcher::new("'love spaces' ", options);
|
||||||
assert!(algorithm.matches_u8(&[1, 2, 3], &[1, 2]));
|
for item in [
|
||||||
assert!(algorithm.matches_u8(&[1, 2, 3], &[2, 3]));
|
"'i love spaces'",
|
||||||
assert!(algorithm.matches_u8(&[1, 2, 3], &[1, 3]));
|
"'i love spaces' so much",
|
||||||
assert!(!algorithm.matches_u8(&[1, 2, 3], &[2, 2]));
|
"'lovespaces' ",
|
||||||
|
] {
|
||||||
|
matcher.add(item, item);
|
||||||
|
}
|
||||||
|
// Make sure the spaces are respected
|
||||||
|
assert_eq!(vec!["'i love spaces' so much"], matcher.results());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,45 +1,45 @@
|
|||||||
use crate::completions::{
|
use crate::completions::{
|
||||||
completer::map_value_completions, Completer, CompletionOptions, MatchAlgorithm,
|
completer::map_value_completions, Completer, CompletionOptions, SemanticSuggestion,
|
||||||
SemanticSuggestion, SortBy,
|
|
||||||
};
|
};
|
||||||
use nu_engine::eval_call;
|
use nu_engine::eval_call;
|
||||||
use nu_protocol::{
|
use nu_protocol::{
|
||||||
ast::{Argument, Call, Expr, Expression},
|
ast::{Argument, Call, Expr, Expression},
|
||||||
debugger::WithoutDebug,
|
debugger::WithoutDebug,
|
||||||
engine::{Stack, StateWorkingSet},
|
engine::{Stack, StateWorkingSet},
|
||||||
PipelineData, Span, Type, Value,
|
DeclId, PipelineData, Span, Type, Value,
|
||||||
};
|
};
|
||||||
use nu_utils::IgnoreCaseExt;
|
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
|
|
||||||
pub struct CustomCompletion {
|
use super::completion_options::NuMatcher;
|
||||||
|
|
||||||
|
pub struct CustomCompletion<T: Completer> {
|
||||||
stack: Stack,
|
stack: Stack,
|
||||||
decl_id: usize,
|
decl_id: DeclId,
|
||||||
line: String,
|
line: String,
|
||||||
sort_by: SortBy,
|
fallback: T,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl CustomCompletion {
|
impl<T: Completer> CustomCompletion<T> {
|
||||||
pub fn new(stack: Stack, decl_id: usize, line: String) -> Self {
|
pub fn new(stack: Stack, decl_id: DeclId, line: String, fallback: T) -> Self {
|
||||||
Self {
|
Self {
|
||||||
stack,
|
stack,
|
||||||
decl_id,
|
decl_id,
|
||||||
line,
|
line,
|
||||||
sort_by: SortBy::None,
|
fallback,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Completer for CustomCompletion {
|
impl<T: Completer> Completer for CustomCompletion<T> {
|
||||||
fn fetch(
|
fn fetch(
|
||||||
&mut self,
|
&mut self,
|
||||||
working_set: &StateWorkingSet,
|
working_set: &StateWorkingSet,
|
||||||
_stack: &Stack,
|
stack: &Stack,
|
||||||
prefix: Vec<u8>,
|
prefix: &[u8],
|
||||||
span: Span,
|
span: Span,
|
||||||
offset: usize,
|
offset: usize,
|
||||||
pos: usize,
|
pos: usize,
|
||||||
completion_options: &CompletionOptions,
|
orig_options: &CompletionOptions,
|
||||||
) -> Vec<SemanticSuggestion> {
|
) -> Vec<SemanticSuggestion> {
|
||||||
// Line position
|
// Line position
|
||||||
let line_pos = pos - offset;
|
let line_pos = pos - offset;
|
||||||
@ -52,30 +52,28 @@ impl Completer for CustomCompletion {
|
|||||||
decl_id: self.decl_id,
|
decl_id: self.decl_id,
|
||||||
head: span,
|
head: span,
|
||||||
arguments: vec![
|
arguments: vec![
|
||||||
Argument::Positional(Expression {
|
Argument::Positional(Expression::new_unknown(
|
||||||
span: Span::unknown(),
|
Expr::String(self.line.clone()),
|
||||||
ty: Type::String,
|
Span::unknown(),
|
||||||
expr: Expr::String(self.line.clone()),
|
Type::String,
|
||||||
custom_completion: None,
|
)),
|
||||||
}),
|
Argument::Positional(Expression::new_unknown(
|
||||||
Argument::Positional(Expression {
|
Expr::Int(line_pos as i64),
|
||||||
span: Span::unknown(),
|
Span::unknown(),
|
||||||
ty: Type::Int,
|
Type::Int,
|
||||||
expr: Expr::Int(line_pos as i64),
|
)),
|
||||||
custom_completion: None,
|
|
||||||
}),
|
|
||||||
],
|
],
|
||||||
parser_info: HashMap::new(),
|
parser_info: HashMap::new(),
|
||||||
},
|
},
|
||||||
PipelineData::empty(),
|
PipelineData::empty(),
|
||||||
);
|
);
|
||||||
|
|
||||||
let mut custom_completion_options = None;
|
let mut completion_options = orig_options.clone();
|
||||||
|
let mut should_sort = true;
|
||||||
|
|
||||||
// Parse result
|
// Parse result
|
||||||
let suggestions = result
|
let suggestions = match result.and_then(|data| data.into_value(span)) {
|
||||||
.and_then(|data| data.into_value(span))
|
Ok(value) => match &value {
|
||||||
.map(|value| match &value {
|
|
||||||
Value::Record { val, .. } => {
|
Value::Record { val, .. } => {
|
||||||
let completions = val
|
let completions = val
|
||||||
.get("completions")
|
.get("completions")
|
||||||
@ -88,81 +86,70 @@ impl Completer for CustomCompletion {
|
|||||||
let options = val.get("options");
|
let options = val.get("options");
|
||||||
|
|
||||||
if let Some(Value::Record { val: options, .. }) = &options {
|
if let Some(Value::Record { val: options, .. }) = &options {
|
||||||
let should_sort = options
|
if let Some(sort) = options.get("sort").and_then(|val| val.as_bool().ok()) {
|
||||||
.get("sort")
|
should_sort = sort;
|
||||||
.and_then(|val| val.as_bool().ok())
|
|
||||||
.unwrap_or(false);
|
|
||||||
|
|
||||||
if should_sort {
|
|
||||||
self.sort_by = SortBy::Ascending;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
custom_completion_options = Some(CompletionOptions {
|
if let Some(case_sensitive) = options
|
||||||
case_sensitive: options
|
|
||||||
.get("case_sensitive")
|
.get("case_sensitive")
|
||||||
.and_then(|val| val.as_bool().ok())
|
.and_then(|val| val.as_bool().ok())
|
||||||
.unwrap_or(true),
|
{
|
||||||
positional: options
|
completion_options.case_sensitive = case_sensitive;
|
||||||
.get("positional")
|
}
|
||||||
.and_then(|val| val.as_bool().ok())
|
if let Some(positional) =
|
||||||
.unwrap_or(true),
|
options.get("positional").and_then(|val| val.as_bool().ok())
|
||||||
match_algorithm: match options.get("completion_algorithm") {
|
{
|
||||||
Some(option) => option
|
completion_options.positional = positional;
|
||||||
.coerce_string()
|
}
|
||||||
.ok()
|
if let Some(algorithm) = options
|
||||||
|
.get("completion_algorithm")
|
||||||
|
.and_then(|option| option.coerce_string().ok())
|
||||||
.and_then(|option| option.try_into().ok())
|
.and_then(|option| option.try_into().ok())
|
||||||
.unwrap_or(MatchAlgorithm::Prefix),
|
{
|
||||||
None => completion_options.match_algorithm,
|
completion_options.match_algorithm = algorithm;
|
||||||
},
|
}
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
completions
|
completions
|
||||||
}
|
}
|
||||||
Value::List { vals, .. } => map_value_completions(vals.iter(), span, offset),
|
Value::List { vals, .. } => map_value_completions(vals.iter(), span, offset),
|
||||||
_ => vec![],
|
Value::Nothing { .. } => {
|
||||||
})
|
return self.fallback.fetch(
|
||||||
.unwrap_or_default();
|
working_set,
|
||||||
|
stack,
|
||||||
if let Some(custom_completion_options) = custom_completion_options {
|
prefix,
|
||||||
filter(&prefix, suggestions, &custom_completion_options)
|
span,
|
||||||
} else {
|
offset,
|
||||||
filter(&prefix, suggestions, completion_options)
|
pos,
|
||||||
}
|
orig_options,
|
||||||
}
|
);
|
||||||
|
|
||||||
fn get_sort_by(&self) -> SortBy {
|
|
||||||
self.sort_by
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn filter(
|
|
||||||
prefix: &[u8],
|
|
||||||
items: Vec<SemanticSuggestion>,
|
|
||||||
options: &CompletionOptions,
|
|
||||||
) -> Vec<SemanticSuggestion> {
|
|
||||||
items
|
|
||||||
.into_iter()
|
|
||||||
.filter(|it| match options.match_algorithm {
|
|
||||||
MatchAlgorithm::Prefix => match (options.case_sensitive, options.positional) {
|
|
||||||
(true, true) => it.suggestion.value.as_bytes().starts_with(prefix),
|
|
||||||
(true, false) => it
|
|
||||||
.suggestion
|
|
||||||
.value
|
|
||||||
.contains(std::str::from_utf8(prefix).unwrap_or("")),
|
|
||||||
(false, positional) => {
|
|
||||||
let value = it.suggestion.value.to_folded_case();
|
|
||||||
let prefix = std::str::from_utf8(prefix).unwrap_or("").to_folded_case();
|
|
||||||
if positional {
|
|
||||||
value.starts_with(&prefix)
|
|
||||||
} else {
|
|
||||||
value.contains(&prefix)
|
|
||||||
}
|
}
|
||||||
|
_ => {
|
||||||
|
log::error!(
|
||||||
|
"Custom completer returned invalid value of type {}",
|
||||||
|
value.get_type().to_string()
|
||||||
|
);
|
||||||
|
return vec![];
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
MatchAlgorithm::Fuzzy => options
|
Err(e) => {
|
||||||
.match_algorithm
|
log::error!("Error getting custom completions: {e}");
|
||||||
.matches_u8(it.suggestion.value.as_bytes(), prefix),
|
return vec![];
|
||||||
})
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut matcher = NuMatcher::new(String::from_utf8_lossy(prefix), completion_options);
|
||||||
|
|
||||||
|
if should_sort {
|
||||||
|
for sugg in suggestions {
|
||||||
|
matcher.add_semantic_suggestion(sugg);
|
||||||
|
}
|
||||||
|
matcher.results()
|
||||||
|
} else {
|
||||||
|
suggestions
|
||||||
|
.into_iter()
|
||||||
|
.filter(|sugg| matcher.matches(&sugg.suggestion.value))
|
||||||
.collect()
|
.collect()
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
@ -1,16 +1,15 @@
|
|||||||
use crate::completions::{
|
use crate::completions::{
|
||||||
completion_common::{adjust_if_intermediate, complete_item, AdjustView},
|
completion_common::{adjust_if_intermediate, complete_item, AdjustView},
|
||||||
Completer, CompletionOptions, SortBy,
|
Completer, CompletionOptions,
|
||||||
};
|
};
|
||||||
use nu_ansi_term::Style;
|
|
||||||
use nu_protocol::{
|
use nu_protocol::{
|
||||||
engine::{EngineState, Stack, StateWorkingSet},
|
engine::{EngineState, Stack, StateWorkingSet},
|
||||||
levenshtein_distance, Span,
|
Span,
|
||||||
};
|
};
|
||||||
use reedline::Suggestion;
|
use reedline::Suggestion;
|
||||||
use std::path::{Path, MAIN_SEPARATOR as SEP};
|
use std::path::Path;
|
||||||
|
|
||||||
use super::SemanticSuggestion;
|
use super::{completion_common::FileSuggestion, SemanticSuggestion};
|
||||||
|
|
||||||
#[derive(Clone, Default)]
|
#[derive(Clone, Default)]
|
||||||
pub struct DirectoryCompletion {}
|
pub struct DirectoryCompletion {}
|
||||||
@ -26,17 +25,17 @@ impl Completer for DirectoryCompletion {
|
|||||||
&mut self,
|
&mut self,
|
||||||
working_set: &StateWorkingSet,
|
working_set: &StateWorkingSet,
|
||||||
stack: &Stack,
|
stack: &Stack,
|
||||||
prefix: Vec<u8>,
|
prefix: &[u8],
|
||||||
span: Span,
|
span: Span,
|
||||||
offset: usize,
|
offset: usize,
|
||||||
_pos: usize,
|
_pos: usize,
|
||||||
options: &CompletionOptions,
|
options: &CompletionOptions,
|
||||||
) -> Vec<SemanticSuggestion> {
|
) -> Vec<SemanticSuggestion> {
|
||||||
let AdjustView { prefix, span, .. } = adjust_if_intermediate(&prefix, working_set, span);
|
let AdjustView { prefix, span, .. } = adjust_if_intermediate(prefix, working_set, span);
|
||||||
|
|
||||||
// Filter only the folders
|
// Filter only the folders
|
||||||
#[allow(deprecated)]
|
#[allow(deprecated)]
|
||||||
let output: Vec<_> = directory_completion(
|
let items: Vec<_> = directory_completion(
|
||||||
span,
|
span,
|
||||||
&prefix,
|
&prefix,
|
||||||
&working_set.permanent_state.current_work_dir(),
|
&working_set.permanent_state.current_work_dir(),
|
||||||
@ -47,56 +46,24 @@ impl Completer for DirectoryCompletion {
|
|||||||
.into_iter()
|
.into_iter()
|
||||||
.map(move |x| SemanticSuggestion {
|
.map(move |x| SemanticSuggestion {
|
||||||
suggestion: Suggestion {
|
suggestion: Suggestion {
|
||||||
value: x.1,
|
value: x.path,
|
||||||
description: None,
|
style: x.style,
|
||||||
style: x.2,
|
|
||||||
extra: None,
|
|
||||||
span: reedline::Span {
|
span: reedline::Span {
|
||||||
start: x.0.start - offset,
|
start: x.span.start - offset,
|
||||||
end: x.0.end - offset,
|
end: x.span.end - offset,
|
||||||
},
|
},
|
||||||
append_whitespace: false,
|
..Suggestion::default()
|
||||||
},
|
},
|
||||||
// TODO????
|
// TODO????
|
||||||
kind: None,
|
kind: None,
|
||||||
})
|
})
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
output
|
|
||||||
}
|
|
||||||
|
|
||||||
// Sort results prioritizing the non hidden folders
|
|
||||||
fn sort(&self, items: Vec<SemanticSuggestion>, prefix: Vec<u8>) -> Vec<SemanticSuggestion> {
|
|
||||||
let prefix_str = String::from_utf8_lossy(&prefix).to_string();
|
|
||||||
|
|
||||||
// Sort items
|
|
||||||
let mut sorted_items = items;
|
|
||||||
|
|
||||||
match self.get_sort_by() {
|
|
||||||
SortBy::Ascending => {
|
|
||||||
sorted_items.sort_by(|a, b| {
|
|
||||||
// Ignore trailing slashes in folder names when sorting
|
|
||||||
a.suggestion
|
|
||||||
.value
|
|
||||||
.trim_end_matches(SEP)
|
|
||||||
.cmp(b.suggestion.value.trim_end_matches(SEP))
|
|
||||||
});
|
|
||||||
}
|
|
||||||
SortBy::LevenshteinDistance => {
|
|
||||||
sorted_items.sort_by(|a, b| {
|
|
||||||
let a_distance = levenshtein_distance(&prefix_str, &a.suggestion.value);
|
|
||||||
let b_distance = levenshtein_distance(&prefix_str, &b.suggestion.value);
|
|
||||||
a_distance.cmp(&b_distance)
|
|
||||||
});
|
|
||||||
}
|
|
||||||
_ => (),
|
|
||||||
}
|
|
||||||
|
|
||||||
// Separate the results between hidden and non hidden
|
// Separate the results between hidden and non hidden
|
||||||
let mut hidden: Vec<SemanticSuggestion> = vec![];
|
let mut hidden: Vec<SemanticSuggestion> = vec![];
|
||||||
let mut non_hidden: Vec<SemanticSuggestion> = vec![];
|
let mut non_hidden: Vec<SemanticSuggestion> = vec![];
|
||||||
|
|
||||||
for item in sorted_items.into_iter() {
|
for item in items.into_iter() {
|
||||||
let item_path = Path::new(&item.suggestion.value);
|
let item_path = Path::new(&item.suggestion.value);
|
||||||
|
|
||||||
if let Some(value) = item_path.file_name() {
|
if let Some(value) = item_path.file_name() {
|
||||||
@ -124,6 +91,6 @@ pub fn directory_completion(
|
|||||||
options: &CompletionOptions,
|
options: &CompletionOptions,
|
||||||
engine_state: &EngineState,
|
engine_state: &EngineState,
|
||||||
stack: &Stack,
|
stack: &Stack,
|
||||||
) -> Vec<(nu_protocol::Span, String, Option<Style>)> {
|
) -> Vec<FileSuggestion> {
|
||||||
complete_item(true, span, partial, cwd, options, engine_state, stack)
|
complete_item(true, span, partial, &[cwd], options, engine_state, stack)
|
||||||
}
|
}
|
||||||
|
@ -1,12 +1,16 @@
|
|||||||
use crate::completions::{file_path_completion, Completer, CompletionOptions, SortBy};
|
use crate::completions::{file_path_completion, Completer, CompletionOptions};
|
||||||
|
use nu_path::expand_tilde;
|
||||||
use nu_protocol::{
|
use nu_protocol::{
|
||||||
engine::{Stack, StateWorkingSet},
|
engine::{Stack, StateWorkingSet},
|
||||||
Span,
|
Span,
|
||||||
};
|
};
|
||||||
use reedline::Suggestion;
|
use reedline::Suggestion;
|
||||||
use std::path::{is_separator, Path, MAIN_SEPARATOR as SEP, MAIN_SEPARATOR_STR};
|
use std::{
|
||||||
|
collections::HashSet,
|
||||||
|
path::{is_separator, PathBuf, MAIN_SEPARATOR as SEP, MAIN_SEPARATOR_STR},
|
||||||
|
};
|
||||||
|
|
||||||
use super::SemanticSuggestion;
|
use super::{SemanticSuggestion, SuggestionKind};
|
||||||
|
|
||||||
#[derive(Clone, Default)]
|
#[derive(Clone, Default)]
|
||||||
pub struct DotNuCompletion {}
|
pub struct DotNuCompletion {}
|
||||||
@ -22,119 +26,143 @@ impl Completer for DotNuCompletion {
|
|||||||
&mut self,
|
&mut self,
|
||||||
working_set: &StateWorkingSet,
|
working_set: &StateWorkingSet,
|
||||||
stack: &Stack,
|
stack: &Stack,
|
||||||
prefix: Vec<u8>,
|
prefix: &[u8],
|
||||||
span: Span,
|
span: Span,
|
||||||
offset: usize,
|
offset: usize,
|
||||||
_pos: usize,
|
_pos: usize,
|
||||||
options: &CompletionOptions,
|
options: &CompletionOptions,
|
||||||
) -> Vec<SemanticSuggestion> {
|
) -> Vec<SemanticSuggestion> {
|
||||||
let prefix_str = String::from_utf8_lossy(&prefix).replace('`', "");
|
let prefix_str = String::from_utf8_lossy(prefix);
|
||||||
let mut search_dirs: Vec<String> = vec![];
|
let start_with_backquote = prefix_str.starts_with('`');
|
||||||
|
let end_with_backquote = prefix_str.ends_with('`');
|
||||||
|
let prefix_str = prefix_str.replace('`', "");
|
||||||
|
// e.g. `./`, `..\`, `/`
|
||||||
|
let not_lib_dirs = prefix_str
|
||||||
|
.chars()
|
||||||
|
.find(|c| *c != '.')
|
||||||
|
.is_some_and(is_separator);
|
||||||
|
let mut search_dirs: Vec<PathBuf> = vec![];
|
||||||
|
|
||||||
// If prefix_str is only a word we want to search in the current dir
|
let (base, partial) = if let Some((parent, remain)) = prefix_str.rsplit_once(is_separator) {
|
||||||
let (base, partial) = prefix_str
|
// If prefix_str is only a word we want to search in the current dir.
|
||||||
.rsplit_once(is_separator)
|
// "/xx" should be split to "/" and "xx".
|
||||||
.unwrap_or((".", &prefix_str));
|
if parent.is_empty() {
|
||||||
|
(MAIN_SEPARATOR_STR, remain)
|
||||||
|
} else {
|
||||||
|
(parent, remain)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
(".", prefix_str.as_str())
|
||||||
|
};
|
||||||
let base_dir = base.replace(is_separator, MAIN_SEPARATOR_STR);
|
let base_dir = base.replace(is_separator, MAIN_SEPARATOR_STR);
|
||||||
let mut partial = partial.to_string();
|
|
||||||
// On windows, this standardizes paths to use \
|
|
||||||
let mut is_current_folder = false;
|
|
||||||
|
|
||||||
// Fetch the lib dirs
|
// Fetch the lib dirs
|
||||||
let lib_dirs: Vec<String> = if let Some(lib_dirs) = working_set.get_env_var("NU_LIB_DIRS") {
|
// NOTE: 2 ways to setup `NU_LIB_DIRS`
|
||||||
|
// 1. `const NU_LIB_DIRS = [paths]`, equal to `nu -I paths`
|
||||||
|
// 2. `$env.NU_LIB_DIRS = [paths]`
|
||||||
|
let const_lib_dirs = working_set
|
||||||
|
.find_variable(b"$NU_LIB_DIRS")
|
||||||
|
.and_then(|vid| working_set.get_variable(vid).const_val.as_ref());
|
||||||
|
let env_lib_dirs = working_set.get_env_var("NU_LIB_DIRS");
|
||||||
|
let lib_dirs: HashSet<PathBuf> = [const_lib_dirs, env_lib_dirs]
|
||||||
|
.into_iter()
|
||||||
|
.flatten()
|
||||||
|
.flat_map(|lib_dirs| {
|
||||||
lib_dirs
|
lib_dirs
|
||||||
.as_list()
|
.as_list()
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.flat_map(|it| {
|
.flat_map(|it| it.iter().filter_map(|x| x.to_path().ok()))
|
||||||
it.iter().map(|x| {
|
.map(expand_tilde)
|
||||||
x.to_path()
|
|
||||||
.expect("internal error: failed to convert lib path")
|
|
||||||
})
|
})
|
||||||
})
|
.collect();
|
||||||
.map(|it| {
|
|
||||||
it.into_os_string()
|
|
||||||
.into_string()
|
|
||||||
.expect("internal error: failed to convert OS path")
|
|
||||||
})
|
|
||||||
.collect()
|
|
||||||
} else {
|
|
||||||
vec![]
|
|
||||||
};
|
|
||||||
|
|
||||||
// Check if the base_dir is a folder
|
// Check if the base_dir is a folder
|
||||||
// rsplit_once removes the separator
|
let cwd = working_set.permanent_state.cwd(None);
|
||||||
if base_dir != "." {
|
if base_dir != "." {
|
||||||
// Add the base dir into the directories to be searched
|
let expanded_base_dir = expand_tilde(&base_dir);
|
||||||
search_dirs.push(base_dir.clone());
|
let is_base_dir_relative = expanded_base_dir.is_relative();
|
||||||
|
// Search in base_dir as well as lib_dirs.
|
||||||
// Reset the partial adding the basic dir back
|
// After expanded, base_dir can be a relative path or absolute path.
|
||||||
// in order to make the span replace work properly
|
// If relative, we join "current working dir" with it to get subdirectory and add to search_dirs.
|
||||||
let mut base_dir_partial = base_dir;
|
// If absolute, we add it to search_dirs.
|
||||||
base_dir_partial.push_str(&partial);
|
if let Ok(mut cwd) = cwd {
|
||||||
|
if is_base_dir_relative {
|
||||||
partial = base_dir_partial;
|
cwd.push(&base_dir);
|
||||||
|
search_dirs.push(cwd.into_std_path_buf());
|
||||||
} else {
|
} else {
|
||||||
// Fetch the current folder
|
search_dirs.push(expanded_base_dir);
|
||||||
#[allow(deprecated)]
|
}
|
||||||
let current_folder = working_set.permanent_state.current_work_dir();
|
}
|
||||||
is_current_folder = true;
|
if !not_lib_dirs {
|
||||||
|
search_dirs.extend(lib_dirs.into_iter().map(|mut dir| {
|
||||||
// Add the current folder and the lib dirs into the
|
dir.push(&base_dir);
|
||||||
// directories to be searched
|
dir
|
||||||
search_dirs.push(current_folder);
|
}));
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
if let Ok(cwd) = cwd {
|
||||||
|
search_dirs.push(cwd.into_std_path_buf());
|
||||||
|
}
|
||||||
|
if !not_lib_dirs {
|
||||||
search_dirs.extend(lib_dirs);
|
search_dirs.extend(lib_dirs);
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Fetch the files filtering the ones that ends with .nu
|
// Fetch the files filtering the ones that ends with .nu
|
||||||
// and transform them into suggestions
|
// and transform them into suggestions
|
||||||
let output: Vec<SemanticSuggestion> = search_dirs
|
|
||||||
.into_iter()
|
|
||||||
.flat_map(|search_dir| {
|
|
||||||
let completions = file_path_completion(
|
let completions = file_path_completion(
|
||||||
span,
|
span,
|
||||||
&partial,
|
partial,
|
||||||
&search_dir,
|
&search_dirs
|
||||||
|
.iter()
|
||||||
|
.filter_map(|d| d.to_str())
|
||||||
|
.collect::<Vec<_>>(),
|
||||||
options,
|
options,
|
||||||
working_set.permanent_state,
|
working_set.permanent_state,
|
||||||
stack,
|
stack,
|
||||||
);
|
);
|
||||||
completions
|
completions
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.filter(move |it| {
|
|
||||||
// Different base dir, so we list the .nu files or folders
|
// Different base dir, so we list the .nu files or folders
|
||||||
if !is_current_folder {
|
.filter(|it| {
|
||||||
it.1.ends_with(".nu") || it.1.ends_with(SEP)
|
// for paths with spaces in them
|
||||||
} else {
|
let path = it.path.trim_end_matches('`');
|
||||||
// Lib dirs, so we filter only the .nu files or directory modules
|
path.ends_with(".nu") || path.ends_with(SEP)
|
||||||
if it.1.ends_with(SEP) {
|
|
||||||
Path::new(&search_dir).join(&it.1).join("mod.nu").exists()
|
|
||||||
} else {
|
|
||||||
it.1.ends_with(".nu")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
})
|
})
|
||||||
.map(move |x| SemanticSuggestion {
|
.map(|x| {
|
||||||
|
let append_whitespace =
|
||||||
|
x.path.ends_with(".nu") && (!start_with_backquote || end_with_backquote);
|
||||||
|
// Re-calculate the span to replace
|
||||||
|
let mut span_offset = 0;
|
||||||
|
let mut value = x.path.to_string();
|
||||||
|
// Complete only the last path component
|
||||||
|
if base_dir == MAIN_SEPARATOR_STR {
|
||||||
|
span_offset = base_dir.len()
|
||||||
|
} else if base_dir != "." {
|
||||||
|
span_offset = base_dir.len() + 1
|
||||||
|
}
|
||||||
|
// Retain only one '`'
|
||||||
|
if start_with_backquote {
|
||||||
|
value = value.trim_start_matches('`').to_string();
|
||||||
|
span_offset += 1;
|
||||||
|
}
|
||||||
|
// Add the backquote back
|
||||||
|
if end_with_backquote && !value.ends_with('`') {
|
||||||
|
value.push('`');
|
||||||
|
}
|
||||||
|
let end = x.span.end - offset;
|
||||||
|
let start = std::cmp::min(end, x.span.start - offset + span_offset);
|
||||||
|
SemanticSuggestion {
|
||||||
suggestion: Suggestion {
|
suggestion: Suggestion {
|
||||||
value: x.1,
|
value,
|
||||||
description: None,
|
style: x.style,
|
||||||
style: x.2,
|
span: reedline::Span { start, end },
|
||||||
extra: None,
|
append_whitespace,
|
||||||
span: reedline::Span {
|
..Suggestion::default()
|
||||||
start: x.0.start - offset,
|
|
||||||
end: x.0.end - offset,
|
|
||||||
},
|
},
|
||||||
append_whitespace: true,
|
kind: Some(SuggestionKind::Module),
|
||||||
},
|
}
|
||||||
// TODO????
|
|
||||||
kind: None,
|
|
||||||
})
|
})
|
||||||
})
|
.collect::<Vec<_>>()
|
||||||
.collect();
|
|
||||||
|
|
||||||
output
|
|
||||||
}
|
|
||||||
|
|
||||||
fn get_sort_by(&self) -> SortBy {
|
|
||||||
SortBy::LevenshteinDistance
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,17 +1,15 @@
|
|||||||
use crate::completions::{
|
use crate::completions::{
|
||||||
completion_common::{adjust_if_intermediate, complete_item, AdjustView},
|
completion_common::{adjust_if_intermediate, complete_item, AdjustView},
|
||||||
Completer, CompletionOptions, SortBy,
|
Completer, CompletionOptions,
|
||||||
};
|
};
|
||||||
use nu_ansi_term::Style;
|
|
||||||
use nu_protocol::{
|
use nu_protocol::{
|
||||||
engine::{EngineState, Stack, StateWorkingSet},
|
engine::{EngineState, Stack, StateWorkingSet},
|
||||||
levenshtein_distance, Span,
|
Span,
|
||||||
};
|
};
|
||||||
use nu_utils::IgnoreCaseExt;
|
|
||||||
use reedline::Suggestion;
|
use reedline::Suggestion;
|
||||||
use std::path::{Path, MAIN_SEPARATOR as SEP};
|
use std::path::Path;
|
||||||
|
|
||||||
use super::SemanticSuggestion;
|
use super::{completion_common::FileSuggestion, SemanticSuggestion};
|
||||||
|
|
||||||
#[derive(Clone, Default)]
|
#[derive(Clone, Default)]
|
||||||
pub struct FileCompletion {}
|
pub struct FileCompletion {}
|
||||||
@ -27,7 +25,7 @@ impl Completer for FileCompletion {
|
|||||||
&mut self,
|
&mut self,
|
||||||
working_set: &StateWorkingSet,
|
working_set: &StateWorkingSet,
|
||||||
stack: &Stack,
|
stack: &Stack,
|
||||||
prefix: Vec<u8>,
|
prefix: &[u8],
|
||||||
span: Span,
|
span: Span,
|
||||||
offset: usize,
|
offset: usize,
|
||||||
_pos: usize,
|
_pos: usize,
|
||||||
@ -37,14 +35,14 @@ impl Completer for FileCompletion {
|
|||||||
prefix,
|
prefix,
|
||||||
span,
|
span,
|
||||||
readjusted,
|
readjusted,
|
||||||
} = adjust_if_intermediate(&prefix, working_set, span);
|
} = adjust_if_intermediate(prefix, working_set, span);
|
||||||
|
|
||||||
#[allow(deprecated)]
|
#[allow(deprecated)]
|
||||||
let output: Vec<_> = complete_item(
|
let items: Vec<_> = complete_item(
|
||||||
readjusted,
|
readjusted,
|
||||||
span,
|
span,
|
||||||
&prefix,
|
&prefix,
|
||||||
&working_set.permanent_state.current_work_dir(),
|
&[&working_set.permanent_state.current_work_dir()],
|
||||||
options,
|
options,
|
||||||
working_set.permanent_state,
|
working_set.permanent_state,
|
||||||
stack,
|
stack,
|
||||||
@ -52,56 +50,26 @@ impl Completer for FileCompletion {
|
|||||||
.into_iter()
|
.into_iter()
|
||||||
.map(move |x| SemanticSuggestion {
|
.map(move |x| SemanticSuggestion {
|
||||||
suggestion: Suggestion {
|
suggestion: Suggestion {
|
||||||
value: x.1,
|
value: x.path,
|
||||||
description: None,
|
style: x.style,
|
||||||
style: x.2,
|
|
||||||
extra: None,
|
|
||||||
span: reedline::Span {
|
span: reedline::Span {
|
||||||
start: x.0.start - offset,
|
start: x.span.start - offset,
|
||||||
end: x.0.end - offset,
|
end: x.span.end - offset,
|
||||||
},
|
},
|
||||||
append_whitespace: false,
|
..Suggestion::default()
|
||||||
},
|
},
|
||||||
// TODO????
|
// TODO????
|
||||||
kind: None,
|
kind: None,
|
||||||
})
|
})
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
output
|
|
||||||
}
|
|
||||||
|
|
||||||
// Sort results prioritizing the non hidden folders
|
// Sort results prioritizing the non hidden folders
|
||||||
fn sort(&self, items: Vec<SemanticSuggestion>, prefix: Vec<u8>) -> Vec<SemanticSuggestion> {
|
|
||||||
let prefix_str = String::from_utf8_lossy(&prefix).to_string();
|
|
||||||
|
|
||||||
// Sort items
|
|
||||||
let mut sorted_items = items;
|
|
||||||
|
|
||||||
match self.get_sort_by() {
|
|
||||||
SortBy::Ascending => {
|
|
||||||
sorted_items.sort_by(|a, b| {
|
|
||||||
// Ignore trailing slashes in folder names when sorting
|
|
||||||
a.suggestion
|
|
||||||
.value
|
|
||||||
.trim_end_matches(SEP)
|
|
||||||
.cmp(b.suggestion.value.trim_end_matches(SEP))
|
|
||||||
});
|
|
||||||
}
|
|
||||||
SortBy::LevenshteinDistance => {
|
|
||||||
sorted_items.sort_by(|a, b| {
|
|
||||||
let a_distance = levenshtein_distance(&prefix_str, &a.suggestion.value);
|
|
||||||
let b_distance = levenshtein_distance(&prefix_str, &b.suggestion.value);
|
|
||||||
a_distance.cmp(&b_distance)
|
|
||||||
});
|
|
||||||
}
|
|
||||||
_ => (),
|
|
||||||
}
|
|
||||||
|
|
||||||
// Separate the results between hidden and non hidden
|
// Separate the results between hidden and non hidden
|
||||||
let mut hidden: Vec<SemanticSuggestion> = vec![];
|
let mut hidden: Vec<SemanticSuggestion> = vec![];
|
||||||
let mut non_hidden: Vec<SemanticSuggestion> = vec![];
|
let mut non_hidden: Vec<SemanticSuggestion> = vec![];
|
||||||
|
|
||||||
for item in sorted_items.into_iter() {
|
for item in items.into_iter() {
|
||||||
let item_path = Path::new(&item.suggestion.value);
|
let item_path = Path::new(&item.suggestion.value);
|
||||||
|
|
||||||
if let Some(value) = item_path.file_name() {
|
if let Some(value) = item_path.file_name() {
|
||||||
@ -125,21 +93,10 @@ impl Completer for FileCompletion {
|
|||||||
pub fn file_path_completion(
|
pub fn file_path_completion(
|
||||||
span: nu_protocol::Span,
|
span: nu_protocol::Span,
|
||||||
partial: &str,
|
partial: &str,
|
||||||
cwd: &str,
|
cwds: &[impl AsRef<str>],
|
||||||
options: &CompletionOptions,
|
options: &CompletionOptions,
|
||||||
engine_state: &EngineState,
|
engine_state: &EngineState,
|
||||||
stack: &Stack,
|
stack: &Stack,
|
||||||
) -> Vec<(nu_protocol::Span, String, Option<Style>)> {
|
) -> Vec<FileSuggestion> {
|
||||||
complete_item(false, span, partial, cwd, options, engine_state, stack)
|
complete_item(false, span, partial, cwds, options, engine_state, stack)
|
||||||
}
|
|
||||||
|
|
||||||
pub fn matches(partial: &str, from: &str, options: &CompletionOptions) -> bool {
|
|
||||||
// Check for case sensitive
|
|
||||||
if !options.case_sensitive {
|
|
||||||
return options
|
|
||||||
.match_algorithm
|
|
||||||
.matches_str(&from.to_folded_case(), &partial.to_folded_case());
|
|
||||||
}
|
|
||||||
|
|
||||||
options.match_algorithm.matches_str(from, partial)
|
|
||||||
}
|
}
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
use crate::completions::{Completer, CompletionOptions};
|
use crate::completions::{completion_options::NuMatcher, Completer, CompletionOptions};
|
||||||
use nu_protocol::{
|
use nu_protocol::{
|
||||||
ast::{Expr, Expression},
|
ast::{Expr, Expression},
|
||||||
engine::{Stack, StateWorkingSet},
|
engine::{Stack, StateWorkingSet},
|
||||||
@ -24,7 +24,7 @@ impl Completer for FlagCompletion {
|
|||||||
&mut self,
|
&mut self,
|
||||||
working_set: &StateWorkingSet,
|
working_set: &StateWorkingSet,
|
||||||
_stack: &Stack,
|
_stack: &Stack,
|
||||||
prefix: Vec<u8>,
|
prefix: &[u8],
|
||||||
span: Span,
|
span: Span,
|
||||||
offset: usize,
|
offset: usize,
|
||||||
_pos: usize,
|
_pos: usize,
|
||||||
@ -35,7 +35,7 @@ impl Completer for FlagCompletion {
|
|||||||
let decl = working_set.get_decl(call.decl_id);
|
let decl = working_set.get_decl(call.decl_id);
|
||||||
let sig = decl.signature();
|
let sig = decl.signature();
|
||||||
|
|
||||||
let mut output = vec![];
|
let mut matcher = NuMatcher::new(String::from_utf8_lossy(prefix), options.clone());
|
||||||
|
|
||||||
for named in &sig.named {
|
for named in &sig.named {
|
||||||
let flag_desc = &named.desc;
|
let flag_desc = &named.desc;
|
||||||
@ -44,24 +44,21 @@ impl Completer for FlagCompletion {
|
|||||||
short.encode_utf8(&mut named);
|
short.encode_utf8(&mut named);
|
||||||
named.insert(0, b'-');
|
named.insert(0, b'-');
|
||||||
|
|
||||||
if options.match_algorithm.matches_u8(&named, &prefix) {
|
matcher.add_semantic_suggestion(SemanticSuggestion {
|
||||||
output.push(SemanticSuggestion {
|
|
||||||
suggestion: Suggestion {
|
suggestion: Suggestion {
|
||||||
value: String::from_utf8_lossy(&named).to_string(),
|
value: String::from_utf8_lossy(&named).to_string(),
|
||||||
description: Some(flag_desc.to_string()),
|
description: Some(flag_desc.to_string()),
|
||||||
style: None,
|
|
||||||
extra: None,
|
|
||||||
span: reedline::Span {
|
span: reedline::Span {
|
||||||
start: span.start - offset,
|
start: span.start - offset,
|
||||||
end: span.end - offset,
|
end: span.end - offset,
|
||||||
},
|
},
|
||||||
append_whitespace: true,
|
append_whitespace: true,
|
||||||
|
..Suggestion::default()
|
||||||
},
|
},
|
||||||
// TODO????
|
// TODO????
|
||||||
kind: None,
|
kind: None,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
if named.long.is_empty() {
|
if named.long.is_empty() {
|
||||||
continue;
|
continue;
|
||||||
@ -71,26 +68,23 @@ impl Completer for FlagCompletion {
|
|||||||
named.insert(0, b'-');
|
named.insert(0, b'-');
|
||||||
named.insert(0, b'-');
|
named.insert(0, b'-');
|
||||||
|
|
||||||
if options.match_algorithm.matches_u8(&named, &prefix) {
|
matcher.add_semantic_suggestion(SemanticSuggestion {
|
||||||
output.push(SemanticSuggestion {
|
|
||||||
suggestion: Suggestion {
|
suggestion: Suggestion {
|
||||||
value: String::from_utf8_lossy(&named).to_string(),
|
value: String::from_utf8_lossy(&named).to_string(),
|
||||||
description: Some(flag_desc.to_string()),
|
description: Some(flag_desc.to_string()),
|
||||||
style: None,
|
|
||||||
extra: None,
|
|
||||||
span: reedline::Span {
|
span: reedline::Span {
|
||||||
start: span.start - offset,
|
start: span.start - offset,
|
||||||
end: span.end - offset,
|
end: span.end - offset,
|
||||||
},
|
},
|
||||||
append_whitespace: true,
|
append_whitespace: true,
|
||||||
|
..Suggestion::default()
|
||||||
},
|
},
|
||||||
// TODO????
|
// TODO????
|
||||||
kind: None,
|
kind: None,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
return output;
|
return matcher.results();
|
||||||
}
|
}
|
||||||
|
|
||||||
vec![]
|
vec![]
|
||||||
|
@ -1,4 +1,6 @@
|
|||||||
|
mod attribute_completions;
|
||||||
mod base;
|
mod base;
|
||||||
|
mod cell_path_completions;
|
||||||
mod command_completions;
|
mod command_completions;
|
||||||
mod completer;
|
mod completer;
|
||||||
mod completion_common;
|
mod completion_common;
|
||||||
@ -8,15 +10,19 @@ mod directory_completions;
|
|||||||
mod dotnu_completions;
|
mod dotnu_completions;
|
||||||
mod file_completions;
|
mod file_completions;
|
||||||
mod flag_completions;
|
mod flag_completions;
|
||||||
|
mod operator_completions;
|
||||||
mod variable_completions;
|
mod variable_completions;
|
||||||
|
|
||||||
|
pub use attribute_completions::{AttributableCompletion, AttributeCompletion};
|
||||||
pub use base::{Completer, SemanticSuggestion, SuggestionKind};
|
pub use base::{Completer, SemanticSuggestion, SuggestionKind};
|
||||||
|
pub use cell_path_completions::CellPathCompletion;
|
||||||
pub use command_completions::CommandCompletion;
|
pub use command_completions::CommandCompletion;
|
||||||
pub use completer::NuCompleter;
|
pub use completer::NuCompleter;
|
||||||
pub use completion_options::{CompletionOptions, MatchAlgorithm, SortBy};
|
pub use completion_options::{CompletionOptions, MatchAlgorithm};
|
||||||
pub use custom_completions::CustomCompletion;
|
pub use custom_completions::CustomCompletion;
|
||||||
pub use directory_completions::DirectoryCompletion;
|
pub use directory_completions::DirectoryCompletion;
|
||||||
pub use dotnu_completions::DotNuCompletion;
|
pub use dotnu_completions::DotNuCompletion;
|
||||||
pub use file_completions::{file_path_completion, matches, FileCompletion};
|
pub use file_completions::{file_path_completion, FileCompletion};
|
||||||
pub use flag_completions::FlagCompletion;
|
pub use flag_completions::FlagCompletion;
|
||||||
|
pub use operator_completions::OperatorCompletion;
|
||||||
pub use variable_completions::VariableCompletion;
|
pub use variable_completions::VariableCompletion;
|
||||||
|
170
crates/nu-cli/src/completions/operator_completions.rs
Normal file
170
crates/nu-cli/src/completions/operator_completions.rs
Normal file
@ -0,0 +1,170 @@
|
|||||||
|
use crate::completions::{
|
||||||
|
completion_options::NuMatcher, Completer, CompletionOptions, SemanticSuggestion, SuggestionKind,
|
||||||
|
};
|
||||||
|
use nu_protocol::{
|
||||||
|
ast::{Expr, Expression},
|
||||||
|
engine::{Stack, StateWorkingSet},
|
||||||
|
Span, Type,
|
||||||
|
};
|
||||||
|
use reedline::Suggestion;
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct OperatorCompletion {
|
||||||
|
previous_expr: Expression,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl OperatorCompletion {
|
||||||
|
pub fn new(previous_expr: Expression) -> Self {
|
||||||
|
OperatorCompletion { previous_expr }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Completer for OperatorCompletion {
|
||||||
|
fn fetch(
|
||||||
|
&mut self,
|
||||||
|
working_set: &StateWorkingSet,
|
||||||
|
_stack: &Stack,
|
||||||
|
_prefix: &[u8],
|
||||||
|
span: Span,
|
||||||
|
offset: usize,
|
||||||
|
_pos: usize,
|
||||||
|
options: &CompletionOptions,
|
||||||
|
) -> Vec<SemanticSuggestion> {
|
||||||
|
//Check if int, float, or string
|
||||||
|
let partial = std::str::from_utf8(working_set.get_span_contents(span)).unwrap_or("");
|
||||||
|
let op = match &self.previous_expr.expr {
|
||||||
|
Expr::BinaryOp(x, _, _) => &x.expr,
|
||||||
|
_ => {
|
||||||
|
return vec![];
|
||||||
|
}
|
||||||
|
};
|
||||||
|
let possible_operations = match op {
|
||||||
|
Expr::Int(_) => vec![
|
||||||
|
("+", "Add (Plus)"),
|
||||||
|
("-", "Subtract (Minus)"),
|
||||||
|
("*", "Multiply"),
|
||||||
|
("/", "Divide"),
|
||||||
|
("==", "Equal to"),
|
||||||
|
("!=", "Not equal to"),
|
||||||
|
("//", "Floor division"),
|
||||||
|
("<", "Less than"),
|
||||||
|
(">", "Greater than"),
|
||||||
|
("<=", "Less than or equal to"),
|
||||||
|
(">=", "Greater than or equal to"),
|
||||||
|
("mod", "Floor division remainder (Modulo)"),
|
||||||
|
("**", "Power of"),
|
||||||
|
("bit-or", "Bitwise OR"),
|
||||||
|
("bit-xor", "Bitwise exclusive OR"),
|
||||||
|
("bit-and", "Bitwise AND"),
|
||||||
|
("bit-shl", "Bitwise shift left"),
|
||||||
|
("bit-shr", "Bitwise shift right"),
|
||||||
|
("in", "Is a member of (doesn't use regex)"),
|
||||||
|
("not-in", "Is not a member of (doesn't use regex)"),
|
||||||
|
],
|
||||||
|
Expr::String(_) => vec![
|
||||||
|
("=~", "Contains regex match"),
|
||||||
|
("like", "Contains regex match"),
|
||||||
|
("!~", "Does not contain regex match"),
|
||||||
|
("not-like", "Does not contain regex match"),
|
||||||
|
(
|
||||||
|
"++",
|
||||||
|
"Concatenates two lists, two strings, or two binary values",
|
||||||
|
),
|
||||||
|
("in", "Is a member of (doesn't use regex)"),
|
||||||
|
("not-in", "Is not a member of (doesn't use regex)"),
|
||||||
|
("starts-with", "Starts with"),
|
||||||
|
("ends-with", "Ends with"),
|
||||||
|
],
|
||||||
|
Expr::Float(_) => vec![
|
||||||
|
("+", "Add (Plus)"),
|
||||||
|
("-", "Subtract (Minus)"),
|
||||||
|
("*", "Multiply"),
|
||||||
|
("/", "Divide"),
|
||||||
|
("==", "Equal to"),
|
||||||
|
("!=", "Not equal to"),
|
||||||
|
("//", "Floor division"),
|
||||||
|
("<", "Less than"),
|
||||||
|
(">", "Greater than"),
|
||||||
|
("<=", "Less than or equal to"),
|
||||||
|
(">=", "Greater than or equal to"),
|
||||||
|
("mod", "Floor division remainder (Modulo)"),
|
||||||
|
("**", "Power of"),
|
||||||
|
("in", "Is a member of (doesn't use regex)"),
|
||||||
|
("not-in", "Is not a member of (doesn't use regex)"),
|
||||||
|
],
|
||||||
|
Expr::Bool(_) => vec![
|
||||||
|
(
|
||||||
|
"and",
|
||||||
|
"Both values are true (short-circuits when first value is false)",
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"or",
|
||||||
|
"Either value is true (short-circuits when first value is true)",
|
||||||
|
),
|
||||||
|
("xor", "One value is true and the other is false"),
|
||||||
|
("not", "Negates a value or expression"),
|
||||||
|
("in", "Is a member of (doesn't use regex)"),
|
||||||
|
("not-in", "Is not a member of (doesn't use regex)"),
|
||||||
|
],
|
||||||
|
Expr::FullCellPath(path) => match path.head.expr {
|
||||||
|
Expr::List(_) => vec![
|
||||||
|
(
|
||||||
|
"++",
|
||||||
|
"Concatenates two lists, two strings, or two binary values",
|
||||||
|
),
|
||||||
|
("has", "Contains a value of (doesn't use regex)"),
|
||||||
|
("not-has", "Does not contain a value of (doesn't use regex)"),
|
||||||
|
],
|
||||||
|
Expr::Var(id) => get_variable_completions(id, working_set),
|
||||||
|
_ => vec![],
|
||||||
|
},
|
||||||
|
_ => vec![],
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut matcher = NuMatcher::new(partial, options.clone());
|
||||||
|
for (symbol, desc) in possible_operations.into_iter() {
|
||||||
|
matcher.add_semantic_suggestion(SemanticSuggestion {
|
||||||
|
suggestion: Suggestion {
|
||||||
|
value: symbol.to_string(),
|
||||||
|
description: Some(desc.to_string()),
|
||||||
|
span: reedline::Span::new(span.start - offset, span.end - offset),
|
||||||
|
append_whitespace: true,
|
||||||
|
..Suggestion::default()
|
||||||
|
},
|
||||||
|
kind: Some(SuggestionKind::Command(
|
||||||
|
nu_protocol::engine::CommandType::Builtin,
|
||||||
|
)),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
matcher.results()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_variable_completions<'a>(
|
||||||
|
id: nu_protocol::Id<nu_protocol::marker::Var>,
|
||||||
|
working_set: &StateWorkingSet,
|
||||||
|
) -> Vec<(&'a str, &'a str)> {
|
||||||
|
let var = working_set.get_variable(id);
|
||||||
|
if !var.mutable {
|
||||||
|
return vec![];
|
||||||
|
}
|
||||||
|
|
||||||
|
match var.ty {
|
||||||
|
Type::List(_) | Type::String | Type::Binary => vec![
|
||||||
|
(
|
||||||
|
"++=",
|
||||||
|
"Concatenates two lists, two strings, or two binary values",
|
||||||
|
),
|
||||||
|
("=", "Assigns a value to a variable."),
|
||||||
|
],
|
||||||
|
|
||||||
|
Type::Int | Type::Float => vec![
|
||||||
|
("=", "Assigns a value to a variable."),
|
||||||
|
("+=", "Adds a value to a variable."),
|
||||||
|
("-=", "Subtracts a value from a variable."),
|
||||||
|
("*=", "Multiplies a variable by a value"),
|
||||||
|
("/=", "Divides a variable by a value."),
|
||||||
|
],
|
||||||
|
_ => vec![],
|
||||||
|
}
|
||||||
|
}
|
@ -1,170 +1,58 @@
|
|||||||
use crate::completions::{
|
use crate::completions::{Completer, CompletionOptions, SemanticSuggestion, SuggestionKind};
|
||||||
Completer, CompletionOptions, MatchAlgorithm, SemanticSuggestion, SuggestionKind,
|
|
||||||
};
|
|
||||||
use nu_engine::{column::get_columns, eval_variable};
|
|
||||||
use nu_protocol::{
|
use nu_protocol::{
|
||||||
engine::{Stack, StateWorkingSet},
|
engine::{Stack, StateWorkingSet},
|
||||||
Span, Value,
|
Span, VarId,
|
||||||
};
|
};
|
||||||
use reedline::Suggestion;
|
use reedline::Suggestion;
|
||||||
use std::str;
|
|
||||||
|
|
||||||
#[derive(Clone)]
|
use super::completion_options::NuMatcher;
|
||||||
pub struct VariableCompletion {
|
|
||||||
var_context: (Vec<u8>, Vec<Vec<u8>>), // tuple with $var and the sublevels (.b.c.d)
|
|
||||||
}
|
|
||||||
|
|
||||||
impl VariableCompletion {
|
pub struct VariableCompletion {}
|
||||||
pub fn new(var_context: (Vec<u8>, Vec<Vec<u8>>)) -> Self {
|
|
||||||
Self { var_context }
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Completer for VariableCompletion {
|
impl Completer for VariableCompletion {
|
||||||
fn fetch(
|
fn fetch(
|
||||||
&mut self,
|
&mut self,
|
||||||
working_set: &StateWorkingSet,
|
working_set: &StateWorkingSet,
|
||||||
stack: &Stack,
|
_stack: &Stack,
|
||||||
prefix: Vec<u8>,
|
prefix: &[u8],
|
||||||
span: Span,
|
span: Span,
|
||||||
offset: usize,
|
offset: usize,
|
||||||
_pos: usize,
|
_pos: usize,
|
||||||
options: &CompletionOptions,
|
options: &CompletionOptions,
|
||||||
) -> Vec<SemanticSuggestion> {
|
) -> Vec<SemanticSuggestion> {
|
||||||
let mut output = vec![];
|
let prefix_str = String::from_utf8_lossy(prefix);
|
||||||
let builtins = ["$nu", "$in", "$env"];
|
let mut matcher = NuMatcher::new(prefix_str, options.clone());
|
||||||
let var_str = std::str::from_utf8(&self.var_context.0).unwrap_or("");
|
|
||||||
let var_id = working_set.find_variable(&self.var_context.0);
|
|
||||||
let current_span = reedline::Span {
|
let current_span = reedline::Span {
|
||||||
start: span.start - offset,
|
start: span.start - offset,
|
||||||
end: span.end - offset,
|
end: span.end - offset,
|
||||||
};
|
};
|
||||||
let sublevels_count = self.var_context.1.len();
|
|
||||||
|
|
||||||
// Completions for the given variable
|
|
||||||
if !var_str.is_empty() {
|
|
||||||
// Completion for $env.<tab>
|
|
||||||
if var_str == "$env" {
|
|
||||||
let env_vars = stack.get_env_vars(working_set.permanent_state);
|
|
||||||
|
|
||||||
// Return nested values
|
|
||||||
if sublevels_count > 0 {
|
|
||||||
// Extract the target var ($env.<target-var>)
|
|
||||||
let target_var = self.var_context.1[0].clone();
|
|
||||||
let target_var_str =
|
|
||||||
str::from_utf8(&target_var).unwrap_or_default().to_string();
|
|
||||||
|
|
||||||
// Everything after the target var is the nested level ($env.<target-var>.<nested_levels>...)
|
|
||||||
let nested_levels: Vec<Vec<u8>> =
|
|
||||||
self.var_context.1.clone().into_iter().skip(1).collect();
|
|
||||||
|
|
||||||
if let Some(val) = env_vars.get(&target_var_str) {
|
|
||||||
for suggestion in nested_suggestions(val, &nested_levels, current_span) {
|
|
||||||
if options.match_algorithm.matches_u8_insensitive(
|
|
||||||
options.case_sensitive,
|
|
||||||
suggestion.suggestion.value.as_bytes(),
|
|
||||||
&prefix,
|
|
||||||
) {
|
|
||||||
output.push(suggestion);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return output;
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
// No nesting provided, return all env vars
|
|
||||||
for env_var in env_vars {
|
|
||||||
if options.match_algorithm.matches_u8_insensitive(
|
|
||||||
options.case_sensitive,
|
|
||||||
env_var.0.as_bytes(),
|
|
||||||
&prefix,
|
|
||||||
) {
|
|
||||||
output.push(SemanticSuggestion {
|
|
||||||
suggestion: Suggestion {
|
|
||||||
value: env_var.0,
|
|
||||||
description: None,
|
|
||||||
style: None,
|
|
||||||
extra: None,
|
|
||||||
span: current_span,
|
|
||||||
append_whitespace: false,
|
|
||||||
},
|
|
||||||
kind: Some(SuggestionKind::Type(env_var.1.get_type())),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return output;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Completions for $nu.<tab>
|
|
||||||
if var_str == "$nu" {
|
|
||||||
// Eval nu var
|
|
||||||
if let Ok(nuval) = eval_variable(
|
|
||||||
working_set.permanent_state,
|
|
||||||
stack,
|
|
||||||
nu_protocol::NU_VARIABLE_ID,
|
|
||||||
nu_protocol::Span::new(current_span.start, current_span.end),
|
|
||||||
) {
|
|
||||||
for suggestion in nested_suggestions(&nuval, &self.var_context.1, current_span)
|
|
||||||
{
|
|
||||||
if options.match_algorithm.matches_u8_insensitive(
|
|
||||||
options.case_sensitive,
|
|
||||||
suggestion.suggestion.value.as_bytes(),
|
|
||||||
&prefix,
|
|
||||||
) {
|
|
||||||
output.push(suggestion);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return output;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Completion other variable types
|
|
||||||
if let Some(var_id) = var_id {
|
|
||||||
// Extract the variable value from the stack
|
|
||||||
let var = stack.get_var(var_id, Span::new(span.start, span.end));
|
|
||||||
|
|
||||||
// If the value exists and it's of type Record
|
|
||||||
if let Ok(value) = var {
|
|
||||||
for suggestion in nested_suggestions(&value, &self.var_context.1, current_span)
|
|
||||||
{
|
|
||||||
if options.match_algorithm.matches_u8_insensitive(
|
|
||||||
options.case_sensitive,
|
|
||||||
suggestion.suggestion.value.as_bytes(),
|
|
||||||
&prefix,
|
|
||||||
) {
|
|
||||||
output.push(suggestion);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return output;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Variable completion (e.g: $en<tab> to complete $env)
|
// Variable completion (e.g: $en<tab> to complete $env)
|
||||||
|
let builtins = ["$nu", "$in", "$env"];
|
||||||
for builtin in builtins {
|
for builtin in builtins {
|
||||||
if options.match_algorithm.matches_u8_insensitive(
|
matcher.add_semantic_suggestion(SemanticSuggestion {
|
||||||
options.case_sensitive,
|
|
||||||
builtin.as_bytes(),
|
|
||||||
&prefix,
|
|
||||||
) {
|
|
||||||
output.push(SemanticSuggestion {
|
|
||||||
suggestion: Suggestion {
|
suggestion: Suggestion {
|
||||||
value: builtin.to_string(),
|
value: builtin.to_string(),
|
||||||
description: None,
|
|
||||||
style: None,
|
|
||||||
extra: None,
|
|
||||||
span: current_span,
|
span: current_span,
|
||||||
append_whitespace: false,
|
..Suggestion::default()
|
||||||
},
|
},
|
||||||
// TODO is there a way to get the VarId to get the type???
|
// TODO is there a way to get the VarId to get the type???
|
||||||
kind: None,
|
kind: None,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
let mut add_candidate = |name, var_id: &VarId| {
|
||||||
|
matcher.add_semantic_suggestion(SemanticSuggestion {
|
||||||
|
suggestion: Suggestion {
|
||||||
|
value: String::from_utf8_lossy(name).to_string(),
|
||||||
|
span: current_span,
|
||||||
|
..Suggestion::default()
|
||||||
|
},
|
||||||
|
kind: Some(SuggestionKind::Type(
|
||||||
|
working_set.get_variable(*var_id).ty.clone(),
|
||||||
|
)),
|
||||||
|
})
|
||||||
|
};
|
||||||
|
|
||||||
// TODO: The following can be refactored (see find_commands_by_predicate() used in
|
// TODO: The following can be refactored (see find_commands_by_predicate() used in
|
||||||
// command_completions).
|
// command_completions).
|
||||||
@ -172,30 +60,11 @@ impl Completer for VariableCompletion {
|
|||||||
// Working set scope vars
|
// Working set scope vars
|
||||||
for scope_frame in working_set.delta.scope.iter().rev() {
|
for scope_frame in working_set.delta.scope.iter().rev() {
|
||||||
for overlay_frame in scope_frame.active_overlays(&mut removed_overlays).rev() {
|
for overlay_frame in scope_frame.active_overlays(&mut removed_overlays).rev() {
|
||||||
for v in &overlay_frame.vars {
|
for (name, var_id) in &overlay_frame.vars {
|
||||||
if options.match_algorithm.matches_u8_insensitive(
|
add_candidate(name, var_id);
|
||||||
options.case_sensitive,
|
|
||||||
v.0,
|
|
||||||
&prefix,
|
|
||||||
) {
|
|
||||||
output.push(SemanticSuggestion {
|
|
||||||
suggestion: Suggestion {
|
|
||||||
value: String::from_utf8_lossy(v.0).to_string(),
|
|
||||||
description: None,
|
|
||||||
style: None,
|
|
||||||
extra: None,
|
|
||||||
span: current_span,
|
|
||||||
append_whitespace: false,
|
|
||||||
},
|
|
||||||
kind: Some(SuggestionKind::Type(
|
|
||||||
working_set.get_variable(*v.1).ty.clone(),
|
|
||||||
)),
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
// Permanent state vars
|
// Permanent state vars
|
||||||
// for scope in &self.engine_state.scope {
|
// for scope in &self.engine_state.scope {
|
||||||
for overlay_frame in working_set
|
for overlay_frame in working_set
|
||||||
@ -203,125 +72,11 @@ impl Completer for VariableCompletion {
|
|||||||
.active_overlays(&removed_overlays)
|
.active_overlays(&removed_overlays)
|
||||||
.rev()
|
.rev()
|
||||||
{
|
{
|
||||||
for v in &overlay_frame.vars {
|
for (name, var_id) in &overlay_frame.vars {
|
||||||
if options.match_algorithm.matches_u8_insensitive(
|
add_candidate(name, var_id);
|
||||||
options.case_sensitive,
|
|
||||||
v.0,
|
|
||||||
&prefix,
|
|
||||||
) {
|
|
||||||
output.push(SemanticSuggestion {
|
|
||||||
suggestion: Suggestion {
|
|
||||||
value: String::from_utf8_lossy(v.0).to_string(),
|
|
||||||
description: None,
|
|
||||||
style: None,
|
|
||||||
extra: None,
|
|
||||||
span: current_span,
|
|
||||||
append_whitespace: false,
|
|
||||||
},
|
|
||||||
kind: Some(SuggestionKind::Type(
|
|
||||||
working_set.get_variable(*v.1).ty.clone(),
|
|
||||||
)),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
output.dedup(); // TODO: Removes only consecutive duplicates, is it intended?
|
|
||||||
|
|
||||||
output
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Find recursively the values for sublevels
|
matcher.results()
|
||||||
// if no sublevels are set it returns the current value
|
|
||||||
fn nested_suggestions(
|
|
||||||
val: &Value,
|
|
||||||
sublevels: &[Vec<u8>],
|
|
||||||
current_span: reedline::Span,
|
|
||||||
) -> Vec<SemanticSuggestion> {
|
|
||||||
let mut output: Vec<SemanticSuggestion> = vec![];
|
|
||||||
let value = recursive_value(val, sublevels).unwrap_or_else(Value::nothing);
|
|
||||||
|
|
||||||
let kind = SuggestionKind::Type(value.get_type());
|
|
||||||
match value {
|
|
||||||
Value::Record { val, .. } => {
|
|
||||||
// Add all the columns as completion
|
|
||||||
for col in val.columns() {
|
|
||||||
output.push(SemanticSuggestion {
|
|
||||||
suggestion: Suggestion {
|
|
||||||
value: col.clone(),
|
|
||||||
description: None,
|
|
||||||
style: None,
|
|
||||||
extra: None,
|
|
||||||
span: current_span,
|
|
||||||
append_whitespace: false,
|
|
||||||
},
|
|
||||||
kind: Some(kind.clone()),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
output
|
|
||||||
}
|
|
||||||
Value::List { vals, .. } => {
|
|
||||||
for column_name in get_columns(vals.as_slice()) {
|
|
||||||
output.push(SemanticSuggestion {
|
|
||||||
suggestion: Suggestion {
|
|
||||||
value: column_name,
|
|
||||||
description: None,
|
|
||||||
style: None,
|
|
||||||
extra: None,
|
|
||||||
span: current_span,
|
|
||||||
append_whitespace: false,
|
|
||||||
},
|
|
||||||
kind: Some(kind.clone()),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
output
|
|
||||||
}
|
|
||||||
_ => output,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Extracts the recursive value (e.g: $var.a.b.c)
|
|
||||||
fn recursive_value(val: &Value, sublevels: &[Vec<u8>]) -> Result<Value, Span> {
|
|
||||||
// Go to next sublevel
|
|
||||||
if let Some((sublevel, next_sublevels)) = sublevels.split_first() {
|
|
||||||
let span = val.span();
|
|
||||||
match val {
|
|
||||||
Value::Record { val, .. } => {
|
|
||||||
if let Some((_, value)) = val.iter().find(|(key, _)| key.as_bytes() == sublevel) {
|
|
||||||
// If matches try to fetch recursively the next
|
|
||||||
recursive_value(value, next_sublevels)
|
|
||||||
} else {
|
|
||||||
// Current sublevel value not found
|
|
||||||
Err(span)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Value::List { vals, .. } => {
|
|
||||||
for col in get_columns(vals.as_slice()) {
|
|
||||||
if col.as_bytes() == *sublevel {
|
|
||||||
let val = val.get_data_by_key(&col).ok_or(span)?;
|
|
||||||
return recursive_value(&val, next_sublevels);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Current sublevel value not found
|
|
||||||
Err(span)
|
|
||||||
}
|
|
||||||
_ => Ok(val.clone()),
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
Ok(val.clone())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl MatchAlgorithm {
|
|
||||||
pub fn matches_u8_insensitive(&self, sensitive: bool, haystack: &[u8], needle: &[u8]) -> bool {
|
|
||||||
if sensitive {
|
|
||||||
self.matches_u8(haystack, needle)
|
|
||||||
} else {
|
|
||||||
self.matches_u8(&haystack.to_ascii_lowercase(), &needle.to_ascii_lowercase())
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -2,13 +2,13 @@ use crate::util::eval_source;
|
|||||||
#[cfg(feature = "plugin")]
|
#[cfg(feature = "plugin")]
|
||||||
use nu_path::canonicalize_with;
|
use nu_path::canonicalize_with;
|
||||||
#[cfg(feature = "plugin")]
|
#[cfg(feature = "plugin")]
|
||||||
use nu_protocol::{engine::StateWorkingSet, report_error, ParseError, PluginRegistryFile, Spanned};
|
use nu_protocol::{engine::StateWorkingSet, ParseError, PluginRegistryFile, Spanned};
|
||||||
use nu_protocol::{
|
use nu_protocol::{
|
||||||
engine::{EngineState, Stack},
|
engine::{EngineState, Stack},
|
||||||
report_error_new, HistoryFileFormat, PipelineData,
|
report_shell_error, PipelineData,
|
||||||
};
|
};
|
||||||
#[cfg(feature = "plugin")]
|
#[cfg(feature = "plugin")]
|
||||||
use nu_utils::utils::perf;
|
use nu_utils::perf;
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
|
|
||||||
#[cfg(feature = "plugin")]
|
#[cfg(feature = "plugin")]
|
||||||
@ -16,16 +16,9 @@ const PLUGIN_FILE: &str = "plugin.msgpackz";
|
|||||||
#[cfg(feature = "plugin")]
|
#[cfg(feature = "plugin")]
|
||||||
const OLD_PLUGIN_FILE: &str = "plugin.nu";
|
const OLD_PLUGIN_FILE: &str = "plugin.nu";
|
||||||
|
|
||||||
const HISTORY_FILE_TXT: &str = "history.txt";
|
|
||||||
const HISTORY_FILE_SQLITE: &str = "history.sqlite3";
|
|
||||||
|
|
||||||
#[cfg(feature = "plugin")]
|
#[cfg(feature = "plugin")]
|
||||||
pub fn read_plugin_file(
|
pub fn read_plugin_file(engine_state: &mut EngineState, plugin_file: Option<Spanned<String>>) {
|
||||||
engine_state: &mut EngineState,
|
use nu_protocol::{shell_error::io::IoError, ShellError};
|
||||||
plugin_file: Option<Spanned<String>>,
|
|
||||||
storage_path: &str,
|
|
||||||
) {
|
|
||||||
use nu_protocol::ShellError;
|
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
|
|
||||||
let span = plugin_file.as_ref().map(|s| s.span);
|
let span = plugin_file.as_ref().map(|s| s.span);
|
||||||
@ -36,7 +29,7 @@ pub fn read_plugin_file(
|
|||||||
.and_then(|p| Path::new(&p.item).extension())
|
.and_then(|p| Path::new(&p.item).extension())
|
||||||
.is_some_and(|ext| ext == "nu")
|
.is_some_and(|ext| ext == "nu")
|
||||||
{
|
{
|
||||||
report_error_new(
|
report_shell_error(
|
||||||
engine_state,
|
engine_state,
|
||||||
&ShellError::GenericError {
|
&ShellError::GenericError {
|
||||||
error: "Wrong plugin file format".into(),
|
error: "Wrong plugin file format".into(),
|
||||||
@ -52,14 +45,14 @@ pub fn read_plugin_file(
|
|||||||
let mut start_time = std::time::Instant::now();
|
let mut start_time = std::time::Instant::now();
|
||||||
// Reading signatures from plugin registry file
|
// Reading signatures from plugin registry file
|
||||||
// The plugin.msgpackz file stores the parsed signature collected from each registered plugin
|
// The plugin.msgpackz file stores the parsed signature collected from each registered plugin
|
||||||
add_plugin_file(engine_state, plugin_file.clone(), storage_path);
|
add_plugin_file(engine_state, plugin_file.clone());
|
||||||
perf(
|
perf!(
|
||||||
"add plugin file to engine_state",
|
"add plugin file to engine_state",
|
||||||
start_time,
|
start_time,
|
||||||
file!(),
|
engine_state
|
||||||
line!(),
|
.get_config()
|
||||||
column!(),
|
.use_ansi_coloring
|
||||||
engine_state.get_config().use_ansi_coloring,
|
.get(engine_state)
|
||||||
);
|
);
|
||||||
|
|
||||||
start_time = std::time::Instant::now();
|
start_time = std::time::Instant::now();
|
||||||
@ -73,8 +66,7 @@ pub fn read_plugin_file(
|
|||||||
log::warn!("Plugin file not found: {}", plugin_path.display());
|
log::warn!("Plugin file not found: {}", plugin_path.display());
|
||||||
|
|
||||||
// Try migration of an old plugin file if this wasn't a custom plugin file
|
// Try migration of an old plugin file if this wasn't a custom plugin file
|
||||||
if plugin_file.is_none() && migrate_old_plugin_file(engine_state, storage_path)
|
if plugin_file.is_none() && migrate_old_plugin_file(engine_state) {
|
||||||
{
|
|
||||||
let Ok(file) = std::fs::File::open(&plugin_path) else {
|
let Ok(file) = std::fs::File::open(&plugin_path) else {
|
||||||
log::warn!("Failed to load newly migrated plugin file");
|
log::warn!("Failed to load newly migrated plugin file");
|
||||||
return;
|
return;
|
||||||
@ -84,18 +76,14 @@ pub fn read_plugin_file(
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
report_error_new(
|
report_shell_error(
|
||||||
engine_state,
|
engine_state,
|
||||||
&ShellError::GenericError {
|
&ShellError::Io(IoError::new_internal_with_path(
|
||||||
error: format!(
|
err.kind(),
|
||||||
"Error while opening plugin registry file: {}",
|
"Could not open plugin registry file",
|
||||||
plugin_path.display()
|
nu_protocol::location!(),
|
||||||
),
|
plugin_path,
|
||||||
msg: "plugin path defined here".into(),
|
)),
|
||||||
span,
|
|
||||||
help: None,
|
|
||||||
inner: vec![err.into()],
|
|
||||||
},
|
|
||||||
);
|
);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
@ -116,7 +104,7 @@ pub fn read_plugin_file(
|
|||||||
Ok(contents) => contents,
|
Ok(contents) => contents,
|
||||||
Err(err) => {
|
Err(err) => {
|
||||||
log::warn!("Failed to read plugin registry file: {err:?}");
|
log::warn!("Failed to read plugin registry file: {err:?}");
|
||||||
report_error_new(
|
report_shell_error(
|
||||||
engine_state,
|
engine_state,
|
||||||
&ShellError::GenericError {
|
&ShellError::GenericError {
|
||||||
error: format!(
|
error: format!(
|
||||||
@ -137,13 +125,13 @@ pub fn read_plugin_file(
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
perf(
|
perf!(
|
||||||
&format!("read plugin file {}", plugin_path.display()),
|
&format!("read plugin file {}", plugin_path.display()),
|
||||||
start_time,
|
start_time,
|
||||||
file!(),
|
engine_state
|
||||||
line!(),
|
.get_config()
|
||||||
column!(),
|
.use_ansi_coloring
|
||||||
engine_state.get_config().use_ansi_coloring,
|
.get(engine_state)
|
||||||
);
|
);
|
||||||
start_time = std::time::Instant::now();
|
start_time = std::time::Instant::now();
|
||||||
|
|
||||||
@ -152,30 +140,26 @@ pub fn read_plugin_file(
|
|||||||
nu_plugin_engine::load_plugin_file(&mut working_set, &contents, span);
|
nu_plugin_engine::load_plugin_file(&mut working_set, &contents, span);
|
||||||
|
|
||||||
if let Err(err) = engine_state.merge_delta(working_set.render()) {
|
if let Err(err) = engine_state.merge_delta(working_set.render()) {
|
||||||
report_error_new(engine_state, &err);
|
report_shell_error(engine_state, &err);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
perf(
|
perf!(
|
||||||
&format!("load plugin file {}", plugin_path.display()),
|
&format!("load plugin file {}", plugin_path.display()),
|
||||||
start_time,
|
start_time,
|
||||||
file!(),
|
engine_state
|
||||||
line!(),
|
.get_config()
|
||||||
column!(),
|
.use_ansi_coloring
|
||||||
engine_state.get_config().use_ansi_coloring,
|
.get(engine_state)
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(feature = "plugin")]
|
#[cfg(feature = "plugin")]
|
||||||
pub fn add_plugin_file(
|
pub fn add_plugin_file(engine_state: &mut EngineState, plugin_file: Option<Spanned<String>>) {
|
||||||
engine_state: &mut EngineState,
|
|
||||||
plugin_file: Option<Spanned<String>>,
|
|
||||||
storage_path: &str,
|
|
||||||
) {
|
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
|
|
||||||
let working_set = StateWorkingSet::new(engine_state);
|
use nu_protocol::report_parse_error;
|
||||||
|
|
||||||
if let Ok(cwd) = engine_state.cwd_as_string(None) {
|
if let Ok(cwd) = engine_state.cwd_as_string(None) {
|
||||||
if let Some(plugin_file) = plugin_file {
|
if let Some(plugin_file) = plugin_file {
|
||||||
@ -190,18 +174,18 @@ pub fn add_plugin_file(
|
|||||||
engine_state.plugin_path = Some(path)
|
engine_state.plugin_path = Some(path)
|
||||||
} else {
|
} else {
|
||||||
// It's an error if the directory for the plugin file doesn't exist.
|
// It's an error if the directory for the plugin file doesn't exist.
|
||||||
report_error(
|
report_parse_error(
|
||||||
&working_set,
|
&StateWorkingSet::new(engine_state),
|
||||||
&ParseError::FileNotFound(
|
&ParseError::FileNotFound(
|
||||||
path_dir.to_string_lossy().into_owned(),
|
path_dir.to_string_lossy().into_owned(),
|
||||||
plugin_file.span,
|
plugin_file.span,
|
||||||
),
|
),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
} else if let Some(mut plugin_path) = nu_path::config_dir() {
|
} else if let Some(plugin_path) = nu_path::nu_config_dir() {
|
||||||
// Path to store plugins signatures
|
// Path to store plugins signatures
|
||||||
plugin_path.push(storage_path);
|
let mut plugin_path =
|
||||||
let mut plugin_path = canonicalize_with(&plugin_path, &cwd).unwrap_or(plugin_path);
|
canonicalize_with(&plugin_path, &cwd).unwrap_or(plugin_path.into());
|
||||||
plugin_path.push(PLUGIN_FILE);
|
plugin_path.push(PLUGIN_FILE);
|
||||||
let plugin_path = canonicalize_with(&plugin_path, &cwd).unwrap_or(plugin_path);
|
let plugin_path = canonicalize_with(&plugin_path, &cwd).unwrap_or(plugin_path);
|
||||||
engine_state.plugin_path = Some(plugin_path);
|
engine_state.plugin_path = Some(plugin_path);
|
||||||
@ -222,7 +206,8 @@ pub fn eval_config_contents(
|
|||||||
let prev_file = engine_state.file.take();
|
let prev_file = engine_state.file.take();
|
||||||
engine_state.file = Some(config_path.clone());
|
engine_state.file = Some(config_path.clone());
|
||||||
|
|
||||||
eval_source(
|
// TODO: ignore this error?
|
||||||
|
let _ = eval_source(
|
||||||
engine_state,
|
engine_state,
|
||||||
stack,
|
stack,
|
||||||
&contents,
|
&contents,
|
||||||
@ -235,36 +220,18 @@ pub fn eval_config_contents(
|
|||||||
engine_state.file = prev_file;
|
engine_state.file = prev_file;
|
||||||
|
|
||||||
// Merge the environment in case env vars changed in the config
|
// Merge the environment in case env vars changed in the config
|
||||||
match engine_state.cwd(Some(stack)) {
|
if let Err(e) = engine_state.merge_env(stack) {
|
||||||
Ok(cwd) => {
|
report_shell_error(engine_state, &e);
|
||||||
if let Err(e) = engine_state.merge_env(stack, cwd) {
|
|
||||||
report_error_new(engine_state, &e);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Err(e) => {
|
|
||||||
report_error_new(engine_state, &e);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn get_history_path(storage_path: &str, mode: HistoryFileFormat) -> Option<PathBuf> {
|
|
||||||
nu_path::config_dir().map(|mut history_path| {
|
|
||||||
history_path.push(storage_path);
|
|
||||||
history_path.push(match mode {
|
|
||||||
HistoryFileFormat::PlainText => HISTORY_FILE_TXT,
|
|
||||||
HistoryFileFormat::Sqlite => HISTORY_FILE_SQLITE,
|
|
||||||
});
|
|
||||||
history_path
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(feature = "plugin")]
|
#[cfg(feature = "plugin")]
|
||||||
pub fn migrate_old_plugin_file(engine_state: &EngineState, storage_path: &str) -> bool {
|
pub fn migrate_old_plugin_file(engine_state: &EngineState) -> bool {
|
||||||
use nu_protocol::{
|
use nu_protocol::{
|
||||||
PluginExample, PluginIdentity, PluginRegistryItem, PluginRegistryItemData, PluginSignature,
|
shell_error::io::IoError, PluginExample, PluginIdentity, PluginRegistryItem,
|
||||||
ShellError,
|
PluginRegistryItemData, PluginSignature, ShellError,
|
||||||
};
|
};
|
||||||
use std::collections::BTreeMap;
|
use std::collections::BTreeMap;
|
||||||
|
|
||||||
@ -274,10 +241,9 @@ pub fn migrate_old_plugin_file(engine_state: &EngineState, storage_path: &str) -
|
|||||||
return false;
|
return false;
|
||||||
};
|
};
|
||||||
|
|
||||||
let Some(config_dir) = nu_path::config_dir().and_then(|mut dir| {
|
let Some(config_dir) =
|
||||||
dir.push(storage_path);
|
nu_path::nu_config_dir().and_then(|dir| nu_path::canonicalize_with(dir, &cwd).ok())
|
||||||
nu_path::canonicalize_with(dir, &cwd).ok()
|
else {
|
||||||
}) else {
|
|
||||||
return false;
|
return false;
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -288,7 +254,7 @@ pub fn migrate_old_plugin_file(engine_state: &EngineState, storage_path: &str) -
|
|||||||
let old_contents = match std::fs::read(&old_plugin_file_path) {
|
let old_contents = match std::fs::read(&old_plugin_file_path) {
|
||||||
Ok(old_contents) => old_contents,
|
Ok(old_contents) => old_contents,
|
||||||
Err(err) => {
|
Err(err) => {
|
||||||
report_error_new(
|
report_shell_error(
|
||||||
engine_state,
|
engine_state,
|
||||||
&ShellError::GenericError {
|
&ShellError::GenericError {
|
||||||
error: "Can't read old plugin file to migrate".into(),
|
error: "Can't read old plugin file to migrate".into(),
|
||||||
@ -344,17 +310,28 @@ pub fn migrate_old_plugin_file(engine_state: &EngineState, storage_path: &str) -
|
|||||||
name: identity.name().to_owned(),
|
name: identity.name().to_owned(),
|
||||||
filename: identity.filename().to_owned(),
|
filename: identity.filename().to_owned(),
|
||||||
shell: identity.shell().map(|p| p.to_owned()),
|
shell: identity.shell().map(|p| p.to_owned()),
|
||||||
data: PluginRegistryItemData::Valid { commands },
|
data: PluginRegistryItemData::Valid {
|
||||||
|
metadata: Default::default(),
|
||||||
|
commands,
|
||||||
|
},
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
// Write the new file
|
// Write the new file
|
||||||
let new_plugin_file_path = config_dir.join(PLUGIN_FILE);
|
let new_plugin_file_path = config_dir.join(PLUGIN_FILE);
|
||||||
if let Err(err) = std::fs::File::create(&new_plugin_file_path)
|
if let Err(err) = std::fs::File::create(&new_plugin_file_path)
|
||||||
.map_err(|e| e.into())
|
.map_err(|err| {
|
||||||
|
IoError::new_internal_with_path(
|
||||||
|
err.kind(),
|
||||||
|
"Could not create new plugin file",
|
||||||
|
nu_protocol::location!(),
|
||||||
|
new_plugin_file_path.clone(),
|
||||||
|
)
|
||||||
|
})
|
||||||
|
.map_err(ShellError::from)
|
||||||
.and_then(|file| contents.write_to(file, None))
|
.and_then(|file| contents.write_to(file, None))
|
||||||
{
|
{
|
||||||
report_error_new(
|
report_shell_error(
|
||||||
&engine_state,
|
&engine_state,
|
||||||
&ShellError::GenericError {
|
&ShellError::GenericError {
|
||||||
error: "Failed to save migrated plugin file".into(),
|
error: "Failed to save migrated plugin file".into(),
|
||||||
@ -378,13 +355,13 @@ pub fn migrate_old_plugin_file(engine_state: &EngineState, storage_path: &str) -
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
perf(
|
perf!(
|
||||||
"migrate old plugin file",
|
"migrate old plugin file",
|
||||||
start_time,
|
start_time,
|
||||||
file!(),
|
engine_state
|
||||||
line!(),
|
.get_config()
|
||||||
column!(),
|
.use_ansi_coloring
|
||||||
engine_state.get_config().use_ansi_coloring,
|
.get(&engine_state)
|
||||||
);
|
);
|
||||||
true
|
true
|
||||||
}
|
}
|
||||||
|
@ -1,42 +1,76 @@
|
|||||||
use log::info;
|
use log::info;
|
||||||
use nu_engine::{convert_env_values, eval_block};
|
use nu_engine::eval_block;
|
||||||
use nu_parser::parse;
|
use nu_parser::parse;
|
||||||
use nu_protocol::{
|
use nu_protocol::{
|
||||||
|
cli_error::report_compile_error,
|
||||||
debugger::WithoutDebug,
|
debugger::WithoutDebug,
|
||||||
engine::{EngineState, Stack, StateWorkingSet},
|
engine::{EngineState, Stack, StateWorkingSet},
|
||||||
report_error, PipelineData, ShellError, Spanned, Value,
|
report_parse_error, report_parse_warning, PipelineData, ShellError, Spanned, Value,
|
||||||
};
|
};
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
|
use crate::util::print_pipeline;
|
||||||
|
|
||||||
|
#[derive(Default)]
|
||||||
|
pub struct EvaluateCommandsOpts {
|
||||||
|
pub table_mode: Option<Value>,
|
||||||
|
pub error_style: Option<Value>,
|
||||||
|
pub no_newline: bool,
|
||||||
|
}
|
||||||
|
|
||||||
/// Run a command (or commands) given to us by the user
|
/// Run a command (or commands) given to us by the user
|
||||||
pub fn evaluate_commands(
|
pub fn evaluate_commands(
|
||||||
commands: &Spanned<String>,
|
commands: &Spanned<String>,
|
||||||
engine_state: &mut EngineState,
|
engine_state: &mut EngineState,
|
||||||
stack: &mut Stack,
|
stack: &mut Stack,
|
||||||
input: PipelineData,
|
input: PipelineData,
|
||||||
table_mode: Option<Value>,
|
opts: EvaluateCommandsOpts,
|
||||||
no_newline: bool,
|
|
||||||
) -> Result<(), ShellError> {
|
) -> Result<(), ShellError> {
|
||||||
// Translate environment variables from Strings to Values
|
let EvaluateCommandsOpts {
|
||||||
convert_env_values(engine_state, stack)?;
|
table_mode,
|
||||||
|
error_style,
|
||||||
|
no_newline,
|
||||||
|
} = opts;
|
||||||
|
|
||||||
|
// Handle the configured error style early
|
||||||
|
if let Some(e_style) = error_style {
|
||||||
|
match e_style.coerce_str()?.parse() {
|
||||||
|
Ok(e_style) => {
|
||||||
|
Arc::make_mut(&mut engine_state.config).error_style = e_style;
|
||||||
|
}
|
||||||
|
Err(err) => {
|
||||||
|
return Err(ShellError::GenericError {
|
||||||
|
error: "Invalid value for `--error-style`".into(),
|
||||||
|
msg: err.into(),
|
||||||
|
span: Some(e_style.span()),
|
||||||
|
help: None,
|
||||||
|
inner: vec![],
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Parse the source code
|
// Parse the source code
|
||||||
let (block, delta) = {
|
let (block, delta) = {
|
||||||
if let Some(ref t_mode) = table_mode {
|
if let Some(ref t_mode) = table_mode {
|
||||||
let mut config = engine_state.get_config().clone();
|
Arc::make_mut(&mut engine_state.config).table.mode =
|
||||||
config.table_mode = t_mode.coerce_str()?.parse().unwrap_or_default();
|
t_mode.coerce_str()?.parse().unwrap_or_default();
|
||||||
engine_state.set_config(config);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut working_set = StateWorkingSet::new(engine_state);
|
let mut working_set = StateWorkingSet::new(engine_state);
|
||||||
|
|
||||||
let output = parse(&mut working_set, None, commands.item.as_bytes(), false);
|
let output = parse(&mut working_set, None, commands.item.as_bytes(), false);
|
||||||
if let Some(warning) = working_set.parse_warnings.first() {
|
if let Some(warning) = working_set.parse_warnings.first() {
|
||||||
report_error(&working_set, warning);
|
report_parse_warning(&working_set, warning);
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(err) = working_set.parse_errors.first() {
|
if let Some(err) = working_set.parse_errors.first() {
|
||||||
report_error(&working_set, err);
|
report_parse_error(&working_set, err);
|
||||||
|
std::process::exit(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(err) = working_set.compile_errors.first() {
|
||||||
|
report_compile_error(&working_set, err);
|
||||||
std::process::exit(1);
|
std::process::exit(1);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -54,15 +88,11 @@ pub fn evaluate_commands(
|
|||||||
}
|
}
|
||||||
|
|
||||||
if let Some(t_mode) = table_mode {
|
if let Some(t_mode) = table_mode {
|
||||||
Arc::make_mut(&mut engine_state.config).table_mode =
|
Arc::make_mut(&mut engine_state.config).table.mode =
|
||||||
t_mode.coerce_str()?.parse().unwrap_or_default();
|
t_mode.coerce_str()?.parse().unwrap_or_default();
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(status) = pipeline.print(engine_state, stack, no_newline, false)? {
|
print_pipeline(engine_state, stack, pipeline, no_newline)?;
|
||||||
if status.code() != 0 {
|
|
||||||
std::process::exit(status.code())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
info!("evaluate {}:{}:{}", file!(), line!(), column!());
|
info!("evaluate {}:{}:{}", file!(), line!(), column!());
|
||||||
|
|
||||||
|
@ -1,14 +1,17 @@
|
|||||||
use crate::util::eval_source;
|
use crate::util::{eval_source, print_pipeline};
|
||||||
use log::{info, trace};
|
use log::{info, trace};
|
||||||
use nu_engine::{convert_env_values, eval_block};
|
use nu_engine::eval_block;
|
||||||
use nu_parser::parse;
|
use nu_parser::parse;
|
||||||
use nu_path::canonicalize_with;
|
use nu_path::canonicalize_with;
|
||||||
use nu_protocol::{
|
use nu_protocol::{
|
||||||
|
cli_error::report_compile_error,
|
||||||
debugger::WithoutDebug,
|
debugger::WithoutDebug,
|
||||||
engine::{EngineState, Stack, StateWorkingSet},
|
engine::{EngineState, Stack, StateWorkingSet},
|
||||||
report_error, PipelineData, ShellError, Span, Value,
|
report_parse_error, report_parse_warning,
|
||||||
|
shell_error::io::*,
|
||||||
|
PipelineData, ShellError, Span, Value,
|
||||||
};
|
};
|
||||||
use std::sync::Arc;
|
use std::{path::PathBuf, sync::Arc};
|
||||||
|
|
||||||
/// Entry point for evaluating a file.
|
/// Entry point for evaluating a file.
|
||||||
///
|
///
|
||||||
@ -21,15 +24,15 @@ pub fn evaluate_file(
|
|||||||
stack: &mut Stack,
|
stack: &mut Stack,
|
||||||
input: PipelineData,
|
input: PipelineData,
|
||||||
) -> Result<(), ShellError> {
|
) -> Result<(), ShellError> {
|
||||||
// Convert environment variables from Strings to Values and store them in the engine state.
|
|
||||||
convert_env_values(engine_state, stack)?;
|
|
||||||
|
|
||||||
let cwd = engine_state.cwd_as_string(Some(stack))?;
|
let cwd = engine_state.cwd_as_string(Some(stack))?;
|
||||||
|
|
||||||
let file_path =
|
let file_path = canonicalize_with(&path, cwd).map_err(|err| {
|
||||||
canonicalize_with(&path, cwd).map_err(|err| ShellError::FileNotFoundCustom {
|
IoError::new_internal_with_path(
|
||||||
msg: format!("Could not access file '{path}': {err}"),
|
err.kind().not_found_as(NotFound::File),
|
||||||
span: Span::unknown(),
|
"Could not access file",
|
||||||
|
nu_protocol::location!(),
|
||||||
|
PathBuf::from(&path),
|
||||||
|
)
|
||||||
})?;
|
})?;
|
||||||
|
|
||||||
let file_path_str = file_path
|
let file_path_str = file_path
|
||||||
@ -42,17 +45,23 @@ pub fn evaluate_file(
|
|||||||
span: Span::unknown(),
|
span: Span::unknown(),
|
||||||
})?;
|
})?;
|
||||||
|
|
||||||
let file = std::fs::read(&file_path).map_err(|err| ShellError::FileNotFoundCustom {
|
let file = std::fs::read(&file_path).map_err(|err| {
|
||||||
msg: format!("Could not read file '{file_path_str}': {err}"),
|
IoError::new_internal_with_path(
|
||||||
span: Span::unknown(),
|
err.kind().not_found_as(NotFound::File),
|
||||||
|
"Could not read file",
|
||||||
|
nu_protocol::location!(),
|
||||||
|
file_path.clone(),
|
||||||
|
)
|
||||||
})?;
|
})?;
|
||||||
engine_state.file = Some(file_path.clone());
|
engine_state.file = Some(file_path.clone());
|
||||||
|
|
||||||
let parent = file_path
|
let parent = file_path.parent().ok_or_else(|| {
|
||||||
.parent()
|
IoError::new_internal_with_path(
|
||||||
.ok_or_else(|| ShellError::FileNotFoundCustom {
|
ErrorKind::DirectoryNotFound,
|
||||||
msg: format!("The file path '{file_path_str}' does not have a parent"),
|
"The file path does not have a parent",
|
||||||
span: Span::unknown(),
|
nu_protocol::location!(),
|
||||||
|
file_path.clone(),
|
||||||
|
)
|
||||||
})?;
|
})?;
|
||||||
|
|
||||||
stack.add_env_var(
|
stack.add_env_var(
|
||||||
@ -76,9 +85,18 @@ pub fn evaluate_file(
|
|||||||
trace!("parsing file: {}", file_path_str);
|
trace!("parsing file: {}", file_path_str);
|
||||||
let block = parse(&mut working_set, Some(file_path_str), &file, false);
|
let block = parse(&mut working_set, Some(file_path_str), &file, false);
|
||||||
|
|
||||||
|
if let Some(warning) = working_set.parse_warnings.first() {
|
||||||
|
report_parse_warning(&working_set, warning);
|
||||||
|
}
|
||||||
|
|
||||||
// If any parse errors were found, report the first error and exit.
|
// If any parse errors were found, report the first error and exit.
|
||||||
if let Some(err) = working_set.parse_errors.first() {
|
if let Some(err) = working_set.parse_errors.first() {
|
||||||
report_error(&working_set, err);
|
report_parse_error(&working_set, err);
|
||||||
|
std::process::exit(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(err) = working_set.compile_errors.first() {
|
||||||
|
report_compile_error(&working_set, err);
|
||||||
std::process::exit(1);
|
std::process::exit(1);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -109,11 +127,7 @@ pub fn evaluate_file(
|
|||||||
};
|
};
|
||||||
|
|
||||||
// Print the pipeline output of the last command of the file.
|
// Print the pipeline output of the last command of the file.
|
||||||
if let Some(status) = pipeline.print(engine_state, stack, true, false)? {
|
print_pipeline(engine_state, stack, pipeline, true)?;
|
||||||
if status.code() != 0 {
|
|
||||||
std::process::exit(status.code())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Invoke the main command with arguments.
|
// Invoke the main command with arguments.
|
||||||
// Arguments with whitespaces are quoted, thus can be safely concatenated by whitespace.
|
// Arguments with whitespaces are quoted, thus can be safely concatenated by whitespace.
|
||||||
@ -131,7 +145,7 @@ pub fn evaluate_file(
|
|||||||
};
|
};
|
||||||
|
|
||||||
if exit_code != 0 {
|
if exit_code != 0 {
|
||||||
std::process::exit(exit_code)
|
std::process::exit(exit_code);
|
||||||
}
|
}
|
||||||
|
|
||||||
info!("evaluate {}:{}:{}", file!(), line!(), column!());
|
info!("evaluate {}:{}:{}", file!(), line!(), column!());
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
#![doc = include_str!("../README.md")]
|
||||||
mod commands;
|
mod commands;
|
||||||
mod completions;
|
mod completions;
|
||||||
mod config_files;
|
mod config_files;
|
||||||
@ -17,10 +18,9 @@ mod validation;
|
|||||||
pub use commands::add_cli_context;
|
pub use commands::add_cli_context;
|
||||||
pub use completions::{FileCompletion, NuCompleter, SemanticSuggestion, SuggestionKind};
|
pub use completions::{FileCompletion, NuCompleter, SemanticSuggestion, SuggestionKind};
|
||||||
pub use config_files::eval_config_contents;
|
pub use config_files::eval_config_contents;
|
||||||
pub use eval_cmds::evaluate_commands;
|
pub use eval_cmds::{evaluate_commands, EvaluateCommandsOpts};
|
||||||
pub use eval_file::evaluate_file;
|
pub use eval_file::evaluate_file;
|
||||||
pub use menus::NuHelpCompleter;
|
pub use menus::NuHelpCompleter;
|
||||||
pub use nu_cmd_base::util::get_init_cwd;
|
|
||||||
pub use nu_highlight::NuHighlight;
|
pub use nu_highlight::NuHighlight;
|
||||||
pub use print::Print;
|
pub use print::Print;
|
||||||
pub use prompt::NushellPrompt;
|
pub use prompt::NushellPrompt;
|
||||||
|
@ -1,62 +1,73 @@
|
|||||||
use nu_engine::documentation::get_flags_section;
|
use nu_engine::documentation::{get_flags_section, HelpStyle};
|
||||||
use nu_protocol::{engine::EngineState, levenshtein_distance};
|
use nu_protocol::{engine::EngineState, levenshtein_distance, Config};
|
||||||
use nu_utils::IgnoreCaseExt;
|
use nu_utils::IgnoreCaseExt;
|
||||||
use reedline::{Completer, Suggestion};
|
use reedline::{Completer, Suggestion};
|
||||||
use std::{fmt::Write, sync::Arc};
|
use std::{fmt::Write, sync::Arc};
|
||||||
|
|
||||||
pub struct NuHelpCompleter(Arc<EngineState>);
|
pub struct NuHelpCompleter {
|
||||||
|
engine_state: Arc<EngineState>,
|
||||||
|
config: Arc<Config>,
|
||||||
|
}
|
||||||
|
|
||||||
impl NuHelpCompleter {
|
impl NuHelpCompleter {
|
||||||
pub fn new(engine_state: Arc<EngineState>) -> Self {
|
pub fn new(engine_state: Arc<EngineState>, config: Arc<Config>) -> Self {
|
||||||
Self(engine_state)
|
Self {
|
||||||
|
engine_state,
|
||||||
|
config,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn completion_helper(&self, line: &str, pos: usize) -> Vec<Suggestion> {
|
fn completion_helper(&self, line: &str, pos: usize) -> Vec<Suggestion> {
|
||||||
let full_commands = self.0.get_signatures_with_examples(false);
|
|
||||||
let folded_line = line.to_folded_case();
|
let folded_line = line.to_folded_case();
|
||||||
|
|
||||||
//Vec<(Signature, Vec<Example>, bool, bool)> {
|
let mut help_style = HelpStyle::default();
|
||||||
let mut commands = full_commands
|
help_style.update_from_config(&self.engine_state, &self.config);
|
||||||
.iter()
|
|
||||||
.filter(|(sig, _, _, _, _)| {
|
let mut commands = self
|
||||||
sig.name.to_folded_case().contains(&folded_line)
|
.engine_state
|
||||||
|| sig.usage.to_folded_case().contains(&folded_line)
|
.get_decls_sorted(false)
|
||||||
|| sig
|
.into_iter()
|
||||||
.search_terms
|
.filter_map(|(_, decl_id)| {
|
||||||
.iter()
|
let decl = self.engine_state.get_decl(decl_id);
|
||||||
|
(decl.name().to_folded_case().contains(&folded_line)
|
||||||
|
|| decl.description().to_folded_case().contains(&folded_line)
|
||||||
|
|| decl
|
||||||
|
.search_terms()
|
||||||
|
.into_iter()
|
||||||
.any(|term| term.to_folded_case().contains(&folded_line))
|
.any(|term| term.to_folded_case().contains(&folded_line))
|
||||||
|| sig.extra_usage.to_folded_case().contains(&folded_line)
|
|| decl
|
||||||
|
.extra_description()
|
||||||
|
.to_folded_case()
|
||||||
|
.contains(&folded_line))
|
||||||
|
.then_some(decl)
|
||||||
})
|
})
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
commands.sort_by(|(a, _, _, _, _), (b, _, _, _, _)| {
|
commands.sort_by_cached_key(|decl| levenshtein_distance(line, decl.name()));
|
||||||
let a_distance = levenshtein_distance(line, &a.name);
|
|
||||||
let b_distance = levenshtein_distance(line, &b.name);
|
|
||||||
a_distance.cmp(&b_distance)
|
|
||||||
});
|
|
||||||
|
|
||||||
commands
|
commands
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|(sig, examples, _, _, _)| {
|
.map(|decl| {
|
||||||
let mut long_desc = String::new();
|
let mut long_desc = String::new();
|
||||||
|
|
||||||
let usage = &sig.usage;
|
let description = decl.description();
|
||||||
if !usage.is_empty() {
|
if !description.is_empty() {
|
||||||
long_desc.push_str(usage);
|
long_desc.push_str(description);
|
||||||
long_desc.push_str("\r\n\r\n");
|
long_desc.push_str("\r\n\r\n");
|
||||||
}
|
}
|
||||||
|
|
||||||
let extra_usage = &sig.extra_usage;
|
let extra_desc = decl.extra_description();
|
||||||
if !extra_usage.is_empty() {
|
if !extra_desc.is_empty() {
|
||||||
long_desc.push_str(extra_usage);
|
long_desc.push_str(extra_desc);
|
||||||
long_desc.push_str("\r\n\r\n");
|
long_desc.push_str("\r\n\r\n");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let sig = decl.signature();
|
||||||
let _ = write!(long_desc, "Usage:\r\n > {}\r\n", sig.call_signature());
|
let _ = write!(long_desc, "Usage:\r\n > {}\r\n", sig.call_signature());
|
||||||
|
|
||||||
if !sig.named.is_empty() {
|
if !sig.named.is_empty() {
|
||||||
long_desc.push_str(&get_flags_section(Some(&*self.0.clone()), sig, |v| {
|
long_desc.push_str(&get_flags_section(&sig, &help_style, |v| {
|
||||||
v.to_parsable_string(", ", &self.0.config)
|
v.to_parsable_string(", ", &self.config)
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -72,7 +83,7 @@ impl NuHelpCompleter {
|
|||||||
let opt_suffix = if let Some(value) = &positional.default_value {
|
let opt_suffix = if let Some(value) = &positional.default_value {
|
||||||
format!(
|
format!(
|
||||||
" (optional, default: {})",
|
" (optional, default: {})",
|
||||||
&value.to_parsable_string(", ", &self.0.config),
|
&value.to_parsable_string(", ", &self.config),
|
||||||
)
|
)
|
||||||
} else {
|
} else {
|
||||||
(" (optional)").to_string()
|
(" (optional)").to_string()
|
||||||
@ -93,21 +104,21 @@ impl NuHelpCompleter {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let extra: Vec<String> = examples
|
let extra: Vec<String> = decl
|
||||||
|
.examples()
|
||||||
.iter()
|
.iter()
|
||||||
.map(|example| example.example.replace('\n', "\r\n"))
|
.map(|example| example.example.replace('\n', "\r\n"))
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
Suggestion {
|
Suggestion {
|
||||||
value: sig.name.clone(),
|
value: decl.name().into(),
|
||||||
description: Some(long_desc),
|
description: Some(long_desc),
|
||||||
style: None,
|
|
||||||
extra: Some(extra),
|
extra: Some(extra),
|
||||||
span: reedline::Span {
|
span: reedline::Span {
|
||||||
start: pos - line.len(),
|
start: pos - line.len(),
|
||||||
end: pos,
|
end: pos,
|
||||||
},
|
},
|
||||||
append_whitespace: false,
|
..Suggestion::default()
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
.collect()
|
.collect()
|
||||||
@ -138,7 +149,8 @@ mod test {
|
|||||||
) {
|
) {
|
||||||
let engine_state =
|
let engine_state =
|
||||||
nu_command::add_shell_command_context(nu_cmd_lang::create_default_context());
|
nu_command::add_shell_command_context(nu_cmd_lang::create_default_context());
|
||||||
let mut completer = NuHelpCompleter::new(engine_state.into());
|
let config = engine_state.get_config().clone();
|
||||||
|
let mut completer = NuHelpCompleter::new(engine_state.into(), config);
|
||||||
let suggestions = completer.complete(line, end);
|
let suggestions = completer.complete(line, end);
|
||||||
|
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
|
@ -2,7 +2,7 @@ use nu_engine::eval_block;
|
|||||||
use nu_protocol::{
|
use nu_protocol::{
|
||||||
debugger::WithoutDebug,
|
debugger::WithoutDebug,
|
||||||
engine::{EngineState, Stack},
|
engine::{EngineState, Stack},
|
||||||
IntoPipelineData, Span, Value,
|
BlockId, IntoPipelineData, Span, Value,
|
||||||
};
|
};
|
||||||
use reedline::{menu_functions::parse_selection_char, Completer, Suggestion};
|
use reedline::{menu_functions::parse_selection_char, Completer, Suggestion};
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
@ -10,7 +10,7 @@ use std::sync::Arc;
|
|||||||
const SELECTION_CHAR: char = '!';
|
const SELECTION_CHAR: char = '!';
|
||||||
|
|
||||||
pub struct NuMenuCompleter {
|
pub struct NuMenuCompleter {
|
||||||
block_id: usize,
|
block_id: BlockId,
|
||||||
span: Span,
|
span: Span,
|
||||||
stack: Stack,
|
stack: Stack,
|
||||||
engine_state: Arc<EngineState>,
|
engine_state: Arc<EngineState>,
|
||||||
@ -19,7 +19,7 @@ pub struct NuMenuCompleter {
|
|||||||
|
|
||||||
impl NuMenuCompleter {
|
impl NuMenuCompleter {
|
||||||
pub fn new(
|
pub fn new(
|
||||||
block_id: usize,
|
block_id: BlockId,
|
||||||
span: Span,
|
span: Span,
|
||||||
stack: Stack,
|
stack: Stack,
|
||||||
engine_state: Arc<EngineState>,
|
engine_state: Arc<EngineState>,
|
||||||
@ -28,7 +28,7 @@ impl NuMenuCompleter {
|
|||||||
Self {
|
Self {
|
||||||
block_id,
|
block_id,
|
||||||
span,
|
span,
|
||||||
stack: stack.reset_out_dest().capture(),
|
stack: stack.reset_out_dest().collect_value(),
|
||||||
engine_state,
|
engine_state,
|
||||||
only_buffer_difference,
|
only_buffer_difference,
|
||||||
}
|
}
|
||||||
@ -142,10 +142,9 @@ fn convert_to_suggestions(
|
|||||||
vec![Suggestion {
|
vec![Suggestion {
|
||||||
value: text,
|
value: text,
|
||||||
description,
|
description,
|
||||||
style: None,
|
|
||||||
extra,
|
extra,
|
||||||
span,
|
span,
|
||||||
append_whitespace: false,
|
..Suggestion::default()
|
||||||
}]
|
}]
|
||||||
}
|
}
|
||||||
Value::List { vals, .. } => vals
|
Value::List { vals, .. } => vals
|
||||||
@ -154,9 +153,6 @@ fn convert_to_suggestions(
|
|||||||
.collect(),
|
.collect(),
|
||||||
_ => vec![Suggestion {
|
_ => vec![Suggestion {
|
||||||
value: format!("Not a record: {value:?}"),
|
value: format!("Not a record: {value:?}"),
|
||||||
description: None,
|
|
||||||
style: None,
|
|
||||||
extra: None,
|
|
||||||
span: reedline::Span {
|
span: reedline::Span {
|
||||||
start: if only_buffer_difference {
|
start: if only_buffer_difference {
|
||||||
pos - line.len()
|
pos - line.len()
|
||||||
@ -169,7 +165,7 @@ fn convert_to_suggestions(
|
|||||||
line.len()
|
line.len()
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
append_whitespace: false,
|
..Suggestion::default()
|
||||||
}],
|
}],
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,3 +1,5 @@
|
|||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
use nu_engine::command_prelude::*;
|
use nu_engine::command_prelude::*;
|
||||||
use reedline::{Highlighter, StyledText};
|
use reedline::{Highlighter, StyledText};
|
||||||
|
|
||||||
@ -15,7 +17,7 @@ impl Command for NuHighlight {
|
|||||||
.input_output_types(vec![(Type::String, Type::String)])
|
.input_output_types(vec![(Type::String, Type::String)])
|
||||||
}
|
}
|
||||||
|
|
||||||
fn usage(&self) -> &str {
|
fn description(&self) -> &str {
|
||||||
"Syntax highlight the input string."
|
"Syntax highlight the input string."
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -32,14 +34,11 @@ impl Command for NuHighlight {
|
|||||||
) -> Result<PipelineData, ShellError> {
|
) -> Result<PipelineData, ShellError> {
|
||||||
let head = call.head;
|
let head = call.head;
|
||||||
|
|
||||||
let ctrlc = engine_state.ctrlc.clone();
|
let signals = engine_state.signals();
|
||||||
let engine_state = std::sync::Arc::new(engine_state.clone());
|
|
||||||
let config = engine_state.get_config().clone();
|
|
||||||
|
|
||||||
let highlighter = crate::NuHighlighter {
|
let highlighter = crate::NuHighlighter {
|
||||||
engine_state,
|
engine_state: Arc::new(engine_state.clone()),
|
||||||
stack: std::sync::Arc::new(stack.clone()),
|
stack: Arc::new(stack.clone()),
|
||||||
config,
|
|
||||||
};
|
};
|
||||||
|
|
||||||
input.map(
|
input.map(
|
||||||
@ -50,7 +49,7 @@ impl Command for NuHighlight {
|
|||||||
}
|
}
|
||||||
Err(err) => Value::error(err, head),
|
Err(err) => Value::error(err, head),
|
||||||
},
|
},
|
||||||
ctrlc,
|
signals,
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
use nu_engine::command_prelude::*;
|
use nu_engine::command_prelude::*;
|
||||||
|
use nu_protocol::ByteStreamSource;
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct Print;
|
pub struct Print;
|
||||||
@ -22,14 +23,19 @@ impl Command for Print {
|
|||||||
Some('n'),
|
Some('n'),
|
||||||
)
|
)
|
||||||
.switch("stderr", "print to stderr instead of stdout", Some('e'))
|
.switch("stderr", "print to stderr instead of stdout", Some('e'))
|
||||||
|
.switch(
|
||||||
|
"raw",
|
||||||
|
"print without formatting (including binary data)",
|
||||||
|
Some('r'),
|
||||||
|
)
|
||||||
.category(Category::Strings)
|
.category(Category::Strings)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn usage(&self) -> &str {
|
fn description(&self) -> &str {
|
||||||
"Print the given values to stdout."
|
"Print the given values to stdout."
|
||||||
}
|
}
|
||||||
|
|
||||||
fn extra_usage(&self) -> &str {
|
fn extra_description(&self) -> &str {
|
||||||
r#"Unlike `echo`, this command does not return any value (`print | describe` will return "nothing").
|
r#"Unlike `echo`, this command does not return any value (`print | describe` will return "nothing").
|
||||||
Since this command has no output, there is no point in piping it with other commands.
|
Since this command has no output, there is no point in piping it with other commands.
|
||||||
|
|
||||||
@ -45,20 +51,39 @@ Since this command has no output, there is no point in piping it with other comm
|
|||||||
engine_state: &EngineState,
|
engine_state: &EngineState,
|
||||||
stack: &mut Stack,
|
stack: &mut Stack,
|
||||||
call: &Call,
|
call: &Call,
|
||||||
input: PipelineData,
|
mut input: PipelineData,
|
||||||
) -> Result<PipelineData, ShellError> {
|
) -> Result<PipelineData, ShellError> {
|
||||||
let args: Vec<Value> = call.rest(engine_state, stack, 0)?;
|
let args: Vec<Value> = call.rest(engine_state, stack, 0)?;
|
||||||
let no_newline = call.has_flag(engine_state, stack, "no-newline")?;
|
let no_newline = call.has_flag(engine_state, stack, "no-newline")?;
|
||||||
let to_stderr = call.has_flag(engine_state, stack, "stderr")?;
|
let to_stderr = call.has_flag(engine_state, stack, "stderr")?;
|
||||||
|
let raw = call.has_flag(engine_state, stack, "raw")?;
|
||||||
|
|
||||||
// This will allow for easy printing of pipelines as well
|
// This will allow for easy printing of pipelines as well
|
||||||
if !args.is_empty() {
|
if !args.is_empty() {
|
||||||
for arg in args {
|
for arg in args {
|
||||||
|
if raw {
|
||||||
arg.into_pipeline_data()
|
arg.into_pipeline_data()
|
||||||
.print(engine_state, stack, no_newline, to_stderr)?;
|
.print_raw(engine_state, no_newline, to_stderr)?;
|
||||||
|
} else {
|
||||||
|
arg.into_pipeline_data().print_table(
|
||||||
|
engine_state,
|
||||||
|
stack,
|
||||||
|
no_newline,
|
||||||
|
to_stderr,
|
||||||
|
)?;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
} else if !input.is_nothing() {
|
} else if !input.is_nothing() {
|
||||||
input.print(engine_state, stack, no_newline, to_stderr)?;
|
if let PipelineData::ByteStream(stream, _) = &mut input {
|
||||||
|
if let ByteStreamSource::Child(child) = stream.source_mut() {
|
||||||
|
child.ignore_error(true);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if raw {
|
||||||
|
input.print_raw(engine_state, no_newline, to_stderr)?;
|
||||||
|
} else {
|
||||||
|
input.print_table(engine_state, stack, no_newline, to_stderr)?;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(PipelineData::empty())
|
Ok(PipelineData::empty())
|
||||||
@ -76,6 +101,11 @@ Since this command has no output, there is no point in piping it with other comm
|
|||||||
example: r#"print (2 + 3)"#,
|
example: r#"print (2 + 3)"#,
|
||||||
result: None,
|
result: None,
|
||||||
},
|
},
|
||||||
|
Example {
|
||||||
|
description: "Print 'ABC' from binary data",
|
||||||
|
example: r#"0x[41 42 43] | print --raw"#,
|
||||||
|
result: None,
|
||||||
|
},
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,10 +1,7 @@
|
|||||||
use crate::prompt_update::{
|
use crate::prompt_update::{
|
||||||
POST_PROMPT_MARKER, PRE_PROMPT_MARKER, VSCODE_POST_PROMPT_MARKER, VSCODE_PRE_PROMPT_MARKER,
|
POST_PROMPT_MARKER, PRE_PROMPT_MARKER, VSCODE_POST_PROMPT_MARKER, VSCODE_PRE_PROMPT_MARKER,
|
||||||
};
|
};
|
||||||
use nu_protocol::{
|
use nu_protocol::engine::{EngineState, Stack};
|
||||||
engine::{EngineState, Stack},
|
|
||||||
Value,
|
|
||||||
};
|
|
||||||
#[cfg(windows)]
|
#[cfg(windows)]
|
||||||
use nu_utils::enable_vt_processing;
|
use nu_utils::enable_vt_processing;
|
||||||
use reedline::{
|
use reedline::{
|
||||||
@ -124,8 +121,11 @@ impl Prompt for NushellPrompt {
|
|||||||
.replace('\n', "\r\n");
|
.replace('\n', "\r\n");
|
||||||
|
|
||||||
if self.shell_integration_osc633 {
|
if self.shell_integration_osc633 {
|
||||||
if self.stack.get_env_var(&self.engine_state, "TERM_PROGRAM")
|
if self
|
||||||
== Some(Value::test_string("vscode"))
|
.stack
|
||||||
|
.get_env_var(&self.engine_state, "TERM_PROGRAM")
|
||||||
|
.and_then(|v| v.as_str().ok())
|
||||||
|
== Some("vscode")
|
||||||
{
|
{
|
||||||
// We're in vscode and we have osc633 enabled
|
// We're in vscode and we have osc633 enabled
|
||||||
format!("{VSCODE_PRE_PROMPT_MARKER}{prompt}{VSCODE_POST_PROMPT_MARKER}").into()
|
format!("{VSCODE_PRE_PROMPT_MARKER}{prompt}{VSCODE_POST_PROMPT_MARKER}").into()
|
||||||
|
@ -1,9 +1,9 @@
|
|||||||
use crate::NushellPrompt;
|
use crate::NushellPrompt;
|
||||||
use log::trace;
|
use log::{trace, warn};
|
||||||
use nu_engine::ClosureEvalOnce;
|
use nu_engine::ClosureEvalOnce;
|
||||||
use nu_protocol::{
|
use nu_protocol::{
|
||||||
engine::{EngineState, Stack},
|
engine::{EngineState, Stack},
|
||||||
report_error_new, Config, PipelineData, Value,
|
report_shell_error, Config, PipelineData, Value,
|
||||||
};
|
};
|
||||||
use reedline::Prompt;
|
use reedline::Prompt;
|
||||||
|
|
||||||
@ -30,27 +30,21 @@ pub(crate) const TRANSIENT_PROMPT_MULTILINE_INDICATOR: &str =
|
|||||||
pub(crate) const PRE_PROMPT_MARKER: &str = "\x1b]133;A\x1b\\";
|
pub(crate) const PRE_PROMPT_MARKER: &str = "\x1b]133;A\x1b\\";
|
||||||
pub(crate) const POST_PROMPT_MARKER: &str = "\x1b]133;B\x1b\\";
|
pub(crate) const POST_PROMPT_MARKER: &str = "\x1b]133;B\x1b\\";
|
||||||
pub(crate) const PRE_EXECUTION_MARKER: &str = "\x1b]133;C\x1b\\";
|
pub(crate) const PRE_EXECUTION_MARKER: &str = "\x1b]133;C\x1b\\";
|
||||||
#[allow(dead_code)]
|
|
||||||
pub(crate) const POST_EXECUTION_MARKER_PREFIX: &str = "\x1b]133;D;";
|
pub(crate) const POST_EXECUTION_MARKER_PREFIX: &str = "\x1b]133;D;";
|
||||||
#[allow(dead_code)]
|
|
||||||
pub(crate) const POST_EXECUTION_MARKER_SUFFIX: &str = "\x1b\\";
|
pub(crate) const POST_EXECUTION_MARKER_SUFFIX: &str = "\x1b\\";
|
||||||
|
|
||||||
// OSC633 is the same as OSC133 but specifically for VSCode
|
// OSC633 is the same as OSC133 but specifically for VSCode
|
||||||
pub(crate) const VSCODE_PRE_PROMPT_MARKER: &str = "\x1b]633;A\x1b\\";
|
pub(crate) const VSCODE_PRE_PROMPT_MARKER: &str = "\x1b]633;A\x1b\\";
|
||||||
pub(crate) const VSCODE_POST_PROMPT_MARKER: &str = "\x1b]633;B\x1b\\";
|
pub(crate) const VSCODE_POST_PROMPT_MARKER: &str = "\x1b]633;B\x1b\\";
|
||||||
#[allow(dead_code)]
|
|
||||||
pub(crate) const VSCODE_PRE_EXECUTION_MARKER: &str = "\x1b]633;C\x1b\\";
|
pub(crate) const VSCODE_PRE_EXECUTION_MARKER: &str = "\x1b]633;C\x1b\\";
|
||||||
#[allow(dead_code)]
|
|
||||||
//"\x1b]633;D;{}\x1b\\"
|
//"\x1b]633;D;{}\x1b\\"
|
||||||
pub(crate) const VSCODE_POST_EXECUTION_MARKER_PREFIX: &str = "\x1b]633;D;";
|
pub(crate) const VSCODE_POST_EXECUTION_MARKER_PREFIX: &str = "\x1b]633;D;";
|
||||||
#[allow(dead_code)]
|
|
||||||
pub(crate) const VSCODE_POST_EXECUTION_MARKER_SUFFIX: &str = "\x1b\\";
|
pub(crate) const VSCODE_POST_EXECUTION_MARKER_SUFFIX: &str = "\x1b\\";
|
||||||
#[allow(dead_code)]
|
//"\x1b]633;E;{}\x1b\\"
|
||||||
pub(crate) const VSCODE_COMMANDLINE_MARKER: &str = "\x1b]633;E\x1b\\";
|
pub(crate) const VSCODE_COMMANDLINE_MARKER_PREFIX: &str = "\x1b]633;E;";
|
||||||
#[allow(dead_code)]
|
pub(crate) const VSCODE_COMMANDLINE_MARKER_SUFFIX: &str = "\x1b\\";
|
||||||
// "\x1b]633;P;Cwd={}\x1b\\"
|
// "\x1b]633;P;Cwd={}\x1b\\"
|
||||||
pub(crate) const VSCODE_CWD_PROPERTY_MARKER_PREFIX: &str = "\x1b]633;P;Cwd=";
|
pub(crate) const VSCODE_CWD_PROPERTY_MARKER_PREFIX: &str = "\x1b]633;P;Cwd=";
|
||||||
#[allow(dead_code)]
|
|
||||||
pub(crate) const VSCODE_CWD_PROPERTY_MARKER_SUFFIX: &str = "\x1b\\";
|
pub(crate) const VSCODE_CWD_PROPERTY_MARKER_SUFFIX: &str = "\x1b\\";
|
||||||
|
|
||||||
pub(crate) const RESET_APPLICATION_MODE: &str = "\x1b[?1l";
|
pub(crate) const RESET_APPLICATION_MODE: &str = "\x1b[?1l";
|
||||||
@ -65,7 +59,7 @@ fn get_prompt_string(
|
|||||||
.get_env_var(engine_state, prompt)
|
.get_env_var(engine_state, prompt)
|
||||||
.and_then(|v| match v {
|
.and_then(|v| match v {
|
||||||
Value::Closure { val, .. } => {
|
Value::Closure { val, .. } => {
|
||||||
let result = ClosureEvalOnce::new(engine_state, stack, *val)
|
let result = ClosureEvalOnce::new(engine_state, stack, val.as_ref().clone())
|
||||||
.run_with_input(PipelineData::Empty);
|
.run_with_input(PipelineData::Empty);
|
||||||
|
|
||||||
trace!(
|
trace!(
|
||||||
@ -77,7 +71,7 @@ fn get_prompt_string(
|
|||||||
|
|
||||||
result
|
result
|
||||||
.map_err(|err| {
|
.map_err(|err| {
|
||||||
report_error_new(engine_state, &err);
|
report_shell_error(engine_state, &err);
|
||||||
})
|
})
|
||||||
.ok()
|
.ok()
|
||||||
}
|
}
|
||||||
@ -86,18 +80,19 @@ fn get_prompt_string(
|
|||||||
})
|
})
|
||||||
.and_then(|pipeline_data| {
|
.and_then(|pipeline_data| {
|
||||||
let output = pipeline_data.collect_string("", config).ok();
|
let output = pipeline_data.collect_string("", config).ok();
|
||||||
|
let ansi_output = output.map(|mut x| {
|
||||||
|
// Always reset the color at the start of the right prompt
|
||||||
|
// to ensure there is no ansi bleed over
|
||||||
|
if x.is_empty() && prompt == PROMPT_COMMAND_RIGHT {
|
||||||
|
x.insert_str(0, "\x1b[0m")
|
||||||
|
};
|
||||||
|
|
||||||
output.map(|mut x| {
|
|
||||||
// Just remove the very last newline.
|
|
||||||
if x.ends_with('\n') {
|
|
||||||
x.pop();
|
|
||||||
}
|
|
||||||
|
|
||||||
if x.ends_with('\r') {
|
|
||||||
x.pop();
|
|
||||||
}
|
|
||||||
x
|
x
|
||||||
})
|
});
|
||||||
|
// Let's keep this for debugging purposes with nu --log-level warn
|
||||||
|
warn!("{}:{}:{} {:?}", file!(), line!(), column!(), ansi_output);
|
||||||
|
|
||||||
|
ansi_output
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -115,13 +110,17 @@ pub(crate) fn update_prompt(
|
|||||||
|
|
||||||
// Now that we have the prompt string lets ansify it.
|
// Now that we have the prompt string lets ansify it.
|
||||||
// <133 A><prompt><133 B><command><133 C><command output>
|
// <133 A><prompt><133 B><command><133 C><command output>
|
||||||
let left_prompt_string = if config.shell_integration_osc633 {
|
let left_prompt_string = if config.shell_integration.osc633 {
|
||||||
if stack.get_env_var(engine_state, "TERM_PROGRAM") == Some(Value::test_string("vscode")) {
|
if stack
|
||||||
|
.get_env_var(engine_state, "TERM_PROGRAM")
|
||||||
|
.and_then(|v| v.as_str().ok())
|
||||||
|
== Some("vscode")
|
||||||
|
{
|
||||||
// We're in vscode and we have osc633 enabled
|
// We're in vscode and we have osc633 enabled
|
||||||
Some(format!(
|
Some(format!(
|
||||||
"{VSCODE_PRE_PROMPT_MARKER}{configured_left_prompt_string}{VSCODE_POST_PROMPT_MARKER}"
|
"{VSCODE_PRE_PROMPT_MARKER}{configured_left_prompt_string}{VSCODE_POST_PROMPT_MARKER}"
|
||||||
))
|
))
|
||||||
} else if config.shell_integration_osc133 {
|
} else if config.shell_integration.osc133 {
|
||||||
// If we're in VSCode but we don't find the env var, but we have osc133 set, then use it
|
// If we're in VSCode but we don't find the env var, but we have osc133 set, then use it
|
||||||
Some(format!(
|
Some(format!(
|
||||||
"{PRE_PROMPT_MARKER}{configured_left_prompt_string}{POST_PROMPT_MARKER}"
|
"{PRE_PROMPT_MARKER}{configured_left_prompt_string}{POST_PROMPT_MARKER}"
|
||||||
@ -129,7 +128,7 @@ pub(crate) fn update_prompt(
|
|||||||
} else {
|
} else {
|
||||||
configured_left_prompt_string.into()
|
configured_left_prompt_string.into()
|
||||||
}
|
}
|
||||||
} else if config.shell_integration_osc133 {
|
} else if config.shell_integration.osc133 {
|
||||||
Some(format!(
|
Some(format!(
|
||||||
"{PRE_PROMPT_MARKER}{configured_left_prompt_string}{POST_PROMPT_MARKER}"
|
"{PRE_PROMPT_MARKER}{configured_left_prompt_string}{POST_PROMPT_MARKER}"
|
||||||
))
|
))
|
||||||
|
@ -1,16 +1,14 @@
|
|||||||
use crate::{menus::NuMenuCompleter, NuHelpCompleter};
|
use crate::{menus::NuMenuCompleter, NuHelpCompleter};
|
||||||
use crossterm::event::{KeyCode, KeyModifiers};
|
use crossterm::event::{KeyCode, KeyModifiers};
|
||||||
use log::trace;
|
|
||||||
use nu_ansi_term::Style;
|
use nu_ansi_term::Style;
|
||||||
use nu_color_config::{color_record_to_nustyle, lookup_ansi_color_style};
|
use nu_color_config::{color_record_to_nustyle, lookup_ansi_color_style};
|
||||||
use nu_engine::eval_block;
|
use nu_engine::eval_block;
|
||||||
use nu_parser::parse;
|
use nu_parser::parse;
|
||||||
use nu_protocol::{
|
use nu_protocol::{
|
||||||
create_menus,
|
|
||||||
debugger::WithoutDebug,
|
debugger::WithoutDebug,
|
||||||
engine::{EngineState, Stack, StateWorkingSet},
|
engine::{EngineState, Stack, StateWorkingSet},
|
||||||
extract_value, Config, EditBindings, ParsedKeybinding, ParsedMenu, PipelineData, Record,
|
extract_value, Config, EditBindings, FromValue, ParsedKeybinding, ParsedMenu, PipelineData,
|
||||||
ShellError, Span, Value,
|
Record, ShellError, Span, Type, Value,
|
||||||
};
|
};
|
||||||
use reedline::{
|
use reedline::{
|
||||||
default_emacs_keybindings, default_vi_insert_keybindings, default_vi_normal_keybindings,
|
default_emacs_keybindings, default_vi_insert_keybindings, default_vi_normal_keybindings,
|
||||||
@ -37,6 +35,41 @@ const DEFAULT_COMPLETION_MENU: &str = r#"
|
|||||||
}
|
}
|
||||||
}"#;
|
}"#;
|
||||||
|
|
||||||
|
const DEFAULT_IDE_COMPLETION_MENU: &str = r#"
|
||||||
|
{
|
||||||
|
name: ide_completion_menu
|
||||||
|
only_buffer_difference: false
|
||||||
|
marker: "| "
|
||||||
|
type: {
|
||||||
|
layout: ide
|
||||||
|
min_completion_width: 0,
|
||||||
|
max_completion_width: 50,
|
||||||
|
max_completion_height: 10, # will be limited by the available lines in the terminal
|
||||||
|
padding: 0,
|
||||||
|
border: true,
|
||||||
|
cursor_offset: 0,
|
||||||
|
description_mode: "prefer_right"
|
||||||
|
min_description_width: 0
|
||||||
|
max_description_width: 50
|
||||||
|
max_description_height: 10
|
||||||
|
description_offset: 1
|
||||||
|
# If true, the cursor pos will be corrected, so the suggestions match up with the typed text
|
||||||
|
#
|
||||||
|
# C:\> str
|
||||||
|
# str join
|
||||||
|
# str trim
|
||||||
|
# str split
|
||||||
|
correct_cursor_pos: false
|
||||||
|
}
|
||||||
|
style: {
|
||||||
|
text: green
|
||||||
|
selected_text: { attr: r }
|
||||||
|
description_text: yellow
|
||||||
|
match_text: { attr: u }
|
||||||
|
selected_match_text: { attr: ur }
|
||||||
|
}
|
||||||
|
}"#;
|
||||||
|
|
||||||
const DEFAULT_HISTORY_MENU: &str = r#"
|
const DEFAULT_HISTORY_MENU: &str = r#"
|
||||||
{
|
{
|
||||||
name: history_menu
|
name: history_menu
|
||||||
@ -76,31 +109,41 @@ const DEFAULT_HELP_MENU: &str = r#"
|
|||||||
// Adds all menus to line editor
|
// Adds all menus to line editor
|
||||||
pub(crate) fn add_menus(
|
pub(crate) fn add_menus(
|
||||||
mut line_editor: Reedline,
|
mut line_editor: Reedline,
|
||||||
engine_state: Arc<EngineState>,
|
engine_state_ref: Arc<EngineState>,
|
||||||
stack: &Stack,
|
stack: &Stack,
|
||||||
config: &Config,
|
config: Arc<Config>,
|
||||||
) -> Result<Reedline, ShellError> {
|
) -> Result<Reedline, ShellError> {
|
||||||
trace!("add_menus: config: {:#?}", &config);
|
//log::trace!("add_menus: config: {:#?}", &config);
|
||||||
line_editor = line_editor.clear_menus();
|
line_editor = line_editor.clear_menus();
|
||||||
|
|
||||||
for menu in &config.menus {
|
for menu in &config.menus {
|
||||||
line_editor = add_menu(line_editor, menu, engine_state.clone(), stack, config)?
|
line_editor = add_menu(
|
||||||
|
line_editor,
|
||||||
|
menu,
|
||||||
|
engine_state_ref.clone(),
|
||||||
|
stack,
|
||||||
|
config.clone(),
|
||||||
|
)?
|
||||||
}
|
}
|
||||||
|
|
||||||
// Checking if the default menus have been added from the config file
|
// Checking if the default menus have been added from the config file
|
||||||
let default_menus = [
|
let default_menus = [
|
||||||
("completion_menu", DEFAULT_COMPLETION_MENU),
|
("completion_menu", DEFAULT_COMPLETION_MENU),
|
||||||
|
("ide_completion_menu", DEFAULT_IDE_COMPLETION_MENU),
|
||||||
("history_menu", DEFAULT_HISTORY_MENU),
|
("history_menu", DEFAULT_HISTORY_MENU),
|
||||||
("help_menu", DEFAULT_HELP_MENU),
|
("help_menu", DEFAULT_HELP_MENU),
|
||||||
];
|
];
|
||||||
|
|
||||||
|
let mut engine_state = (*engine_state_ref).clone();
|
||||||
|
let mut menu_eval_results = vec![];
|
||||||
|
|
||||||
for (name, definition) in default_menus {
|
for (name, definition) in default_menus {
|
||||||
if !config
|
if !config
|
||||||
.menus
|
.menus
|
||||||
.iter()
|
.iter()
|
||||||
.any(|menu| menu.name.to_expanded_string("", config) == name)
|
.any(|menu| menu.name.to_expanded_string("", &config) == name)
|
||||||
{
|
{
|
||||||
let (block, _) = {
|
let (block, delta) = {
|
||||||
let mut working_set = StateWorkingSet::new(&engine_state);
|
let mut working_set = StateWorkingSet::new(&engine_state);
|
||||||
let output = parse(
|
let output = parse(
|
||||||
&mut working_set,
|
&mut working_set,
|
||||||
@ -112,16 +155,30 @@ pub(crate) fn add_menus(
|
|||||||
(output, working_set.render())
|
(output, working_set.render())
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut temp_stack = Stack::new().capture();
|
engine_state.merge_delta(delta)?;
|
||||||
let input = PipelineData::Empty;
|
|
||||||
let res = eval_block::<WithoutDebug>(&engine_state, &mut temp_stack, &block, input)?;
|
|
||||||
|
|
||||||
|
let mut temp_stack = Stack::new().collect_value();
|
||||||
|
let input = PipelineData::Empty;
|
||||||
|
menu_eval_results.push(eval_block::<WithoutDebug>(
|
||||||
|
&engine_state,
|
||||||
|
&mut temp_stack,
|
||||||
|
&block,
|
||||||
|
input,
|
||||||
|
)?);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let new_engine_state_ref = Arc::new(engine_state);
|
||||||
|
|
||||||
|
for res in menu_eval_results.into_iter() {
|
||||||
if let PipelineData::Value(value, None) = res {
|
if let PipelineData::Value(value, None) = res {
|
||||||
for menu in create_menus(&value)? {
|
line_editor = add_menu(
|
||||||
line_editor =
|
line_editor,
|
||||||
add_menu(line_editor, &menu, engine_state.clone(), stack, config)?;
|
&ParsedMenu::from_value(value)?,
|
||||||
}
|
new_engine_state_ref.clone(),
|
||||||
}
|
stack,
|
||||||
|
config.clone(),
|
||||||
|
)?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -133,33 +190,33 @@ fn add_menu(
|
|||||||
menu: &ParsedMenu,
|
menu: &ParsedMenu,
|
||||||
engine_state: Arc<EngineState>,
|
engine_state: Arc<EngineState>,
|
||||||
stack: &Stack,
|
stack: &Stack,
|
||||||
config: &Config,
|
config: Arc<Config>,
|
||||||
) -> Result<Reedline, ShellError> {
|
) -> Result<Reedline, ShellError> {
|
||||||
let span = menu.menu_type.span();
|
let span = menu.r#type.span();
|
||||||
if let Value::Record { val, .. } = &menu.menu_type {
|
if let Value::Record { val, .. } = &menu.r#type {
|
||||||
let layout = extract_value("layout", val, span)?.to_expanded_string("", config);
|
let layout = extract_value("layout", val, span)?.to_expanded_string("", &config);
|
||||||
|
|
||||||
match layout.as_str() {
|
match layout.as_str() {
|
||||||
"columnar" => add_columnar_menu(line_editor, menu, engine_state, stack, config),
|
"columnar" => add_columnar_menu(line_editor, menu, engine_state, stack, &config),
|
||||||
"list" => add_list_menu(line_editor, menu, engine_state, stack, config),
|
"list" => add_list_menu(line_editor, menu, engine_state, stack, config),
|
||||||
"ide" => add_ide_menu(line_editor, menu, engine_state, stack, config),
|
"ide" => add_ide_menu(line_editor, menu, engine_state, stack, config),
|
||||||
"description" => add_description_menu(line_editor, menu, engine_state, stack, config),
|
"description" => add_description_menu(line_editor, menu, engine_state, stack, config),
|
||||||
_ => Err(ShellError::UnsupportedConfigValue {
|
str => Err(ShellError::InvalidValue {
|
||||||
expected: "columnar, list, ide or description".to_string(),
|
valid: "'columnar', 'list', 'ide', or 'description'".into(),
|
||||||
value: menu.menu_type.to_abbreviated_string(config),
|
actual: format!("'{str}'"),
|
||||||
span: menu.menu_type.span(),
|
span,
|
||||||
}),
|
}),
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
Err(ShellError::UnsupportedConfigValue {
|
Err(ShellError::RuntimeTypeMismatch {
|
||||||
expected: "only record type".to_string(),
|
expected: Type::record(),
|
||||||
value: menu.menu_type.to_abbreviated_string(config),
|
actual: menu.r#type.get_type(),
|
||||||
span: menu.menu_type.span(),
|
span,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_style(record: &Record, name: &str, span: Span) -> Option<Style> {
|
fn get_style(record: &Record, name: &'static str, span: Span) -> Option<Style> {
|
||||||
extract_value(name, record, span)
|
extract_value(name, record, span)
|
||||||
.ok()
|
.ok()
|
||||||
.map(|text| match text {
|
.map(|text| match text {
|
||||||
@ -169,6 +226,29 @@ fn get_style(record: &Record, name: &str, span: Span) -> Option<Style> {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn set_menu_style<M: MenuBuilder>(mut menu: M, style: &Value) -> M {
|
||||||
|
let span = style.span();
|
||||||
|
let Value::Record { val, .. } = &style else {
|
||||||
|
return menu;
|
||||||
|
};
|
||||||
|
if let Some(style) = get_style(val, "text", span) {
|
||||||
|
menu = menu.with_text_style(style);
|
||||||
|
}
|
||||||
|
if let Some(style) = get_style(val, "selected_text", span) {
|
||||||
|
menu = menu.with_selected_text_style(style);
|
||||||
|
}
|
||||||
|
if let Some(style) = get_style(val, "description_text", span) {
|
||||||
|
menu = menu.with_description_text_style(style);
|
||||||
|
}
|
||||||
|
if let Some(style) = get_style(val, "match_text", span) {
|
||||||
|
menu = menu.with_match_text_style(style);
|
||||||
|
}
|
||||||
|
if let Some(style) = get_style(val, "selected_match_text", span) {
|
||||||
|
menu = menu.with_selected_match_text_style(style);
|
||||||
|
}
|
||||||
|
menu
|
||||||
|
}
|
||||||
|
|
||||||
// Adds a columnar menu to the editor engine
|
// Adds a columnar menu to the editor engine
|
||||||
pub(crate) fn add_columnar_menu(
|
pub(crate) fn add_columnar_menu(
|
||||||
line_editor: Reedline,
|
line_editor: Reedline,
|
||||||
@ -177,11 +257,11 @@ pub(crate) fn add_columnar_menu(
|
|||||||
stack: &Stack,
|
stack: &Stack,
|
||||||
config: &Config,
|
config: &Config,
|
||||||
) -> Result<Reedline, ShellError> {
|
) -> Result<Reedline, ShellError> {
|
||||||
let span = menu.menu_type.span();
|
let span = menu.r#type.span();
|
||||||
let name = menu.name.to_expanded_string("", config);
|
let name = menu.name.to_expanded_string("", config);
|
||||||
let mut columnar_menu = ColumnarMenu::default().with_name(&name);
|
let mut columnar_menu = ColumnarMenu::default().with_name(&name);
|
||||||
|
|
||||||
if let Value::Record { val, .. } = &menu.menu_type {
|
if let Value::Record { val, .. } = &menu.r#type {
|
||||||
columnar_menu = match extract_value("columns", val, span) {
|
columnar_menu = match extract_value("columns", val, span) {
|
||||||
Ok(columns) => {
|
Ok(columns) => {
|
||||||
let columns = columns.as_int()?;
|
let columns = columns.as_int()?;
|
||||||
@ -207,24 +287,7 @@ pub(crate) fn add_columnar_menu(
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
let span = menu.style.span();
|
columnar_menu = set_menu_style(columnar_menu, &menu.style);
|
||||||
if let Value::Record { val, .. } = &menu.style {
|
|
||||||
if let Some(style) = get_style(val, "text", span) {
|
|
||||||
columnar_menu = columnar_menu.with_text_style(style);
|
|
||||||
}
|
|
||||||
if let Some(style) = get_style(val, "selected_text", span) {
|
|
||||||
columnar_menu = columnar_menu.with_selected_text_style(style);
|
|
||||||
}
|
|
||||||
if let Some(style) = get_style(val, "description_text", span) {
|
|
||||||
columnar_menu = columnar_menu.with_description_text_style(style);
|
|
||||||
}
|
|
||||||
if let Some(style) = get_style(val, "match_text", span) {
|
|
||||||
columnar_menu = columnar_menu.with_match_text_style(style);
|
|
||||||
}
|
|
||||||
if let Some(style) = get_style(val, "selected_match_text", span) {
|
|
||||||
columnar_menu = columnar_menu.with_selected_match_text_style(style);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let marker = menu.marker.to_expanded_string("", config);
|
let marker = menu.marker.to_expanded_string("", config);
|
||||||
columnar_menu = columnar_menu.with_marker(&marker);
|
columnar_menu = columnar_menu.with_marker(&marker);
|
||||||
@ -232,30 +295,23 @@ pub(crate) fn add_columnar_menu(
|
|||||||
let only_buffer_difference = menu.only_buffer_difference.as_bool()?;
|
let only_buffer_difference = menu.only_buffer_difference.as_bool()?;
|
||||||
columnar_menu = columnar_menu.with_only_buffer_difference(only_buffer_difference);
|
columnar_menu = columnar_menu.with_only_buffer_difference(only_buffer_difference);
|
||||||
|
|
||||||
let span = menu.source.span();
|
let completer = if let Some(closure) = &menu.source {
|
||||||
match &menu.source {
|
|
||||||
Value::Nothing { .. } => {
|
|
||||||
Ok(line_editor.with_menu(ReedlineMenu::EngineCompleter(Box::new(columnar_menu))))
|
|
||||||
}
|
|
||||||
Value::Closure { val, .. } => {
|
|
||||||
let menu_completer = NuMenuCompleter::new(
|
let menu_completer = NuMenuCompleter::new(
|
||||||
val.block_id,
|
closure.block_id,
|
||||||
span,
|
span,
|
||||||
stack.captures_to_stack(val.captures.clone()),
|
stack.captures_to_stack(closure.captures.clone()),
|
||||||
engine_state,
|
engine_state,
|
||||||
only_buffer_difference,
|
only_buffer_difference,
|
||||||
);
|
);
|
||||||
Ok(line_editor.with_menu(ReedlineMenu::WithCompleter {
|
ReedlineMenu::WithCompleter {
|
||||||
menu: Box::new(columnar_menu),
|
menu: Box::new(columnar_menu),
|
||||||
completer: Box::new(menu_completer),
|
completer: Box::new(menu_completer),
|
||||||
}))
|
|
||||||
}
|
|
||||||
_ => Err(ShellError::UnsupportedConfigValue {
|
|
||||||
expected: "block or omitted value".to_string(),
|
|
||||||
value: menu.source.to_abbreviated_string(config),
|
|
||||||
span,
|
|
||||||
}),
|
|
||||||
}
|
}
|
||||||
|
} else {
|
||||||
|
ReedlineMenu::EngineCompleter(Box::new(columnar_menu))
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok(line_editor.with_menu(completer))
|
||||||
}
|
}
|
||||||
|
|
||||||
// Adds a search menu to the line editor
|
// Adds a search menu to the line editor
|
||||||
@ -264,13 +320,13 @@ pub(crate) fn add_list_menu(
|
|||||||
menu: &ParsedMenu,
|
menu: &ParsedMenu,
|
||||||
engine_state: Arc<EngineState>,
|
engine_state: Arc<EngineState>,
|
||||||
stack: &Stack,
|
stack: &Stack,
|
||||||
config: &Config,
|
config: Arc<Config>,
|
||||||
) -> Result<Reedline, ShellError> {
|
) -> Result<Reedline, ShellError> {
|
||||||
let name = menu.name.to_expanded_string("", config);
|
let name = menu.name.to_expanded_string("", &config);
|
||||||
let mut list_menu = ListMenu::default().with_name(&name);
|
let mut list_menu = ListMenu::default().with_name(&name);
|
||||||
|
|
||||||
let span = menu.menu_type.span();
|
let span = menu.r#type.span();
|
||||||
if let Value::Record { val, .. } = &menu.menu_type {
|
if let Value::Record { val, .. } = &menu.r#type {
|
||||||
list_menu = match extract_value("page_size", val, span) {
|
list_menu = match extract_value("page_size", val, span) {
|
||||||
Ok(page_size) => {
|
Ok(page_size) => {
|
||||||
let page_size = page_size.as_int()?;
|
let page_size = page_size.as_int()?;
|
||||||
@ -280,49 +336,31 @@ pub(crate) fn add_list_menu(
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
let span = menu.style.span();
|
list_menu = set_menu_style(list_menu, &menu.style);
|
||||||
if let Value::Record { val, .. } = &menu.style {
|
|
||||||
if let Some(style) = get_style(val, "text", span) {
|
|
||||||
list_menu = list_menu.with_text_style(style);
|
|
||||||
}
|
|
||||||
if let Some(style) = get_style(val, "selected_text", span) {
|
|
||||||
list_menu = list_menu.with_selected_text_style(style);
|
|
||||||
}
|
|
||||||
if let Some(style) = get_style(val, "description_text", span) {
|
|
||||||
list_menu = list_menu.with_description_text_style(style);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let marker = menu.marker.to_expanded_string("", config);
|
let marker = menu.marker.to_expanded_string("", &config);
|
||||||
list_menu = list_menu.with_marker(&marker);
|
list_menu = list_menu.with_marker(&marker);
|
||||||
|
|
||||||
let only_buffer_difference = menu.only_buffer_difference.as_bool()?;
|
let only_buffer_difference = menu.only_buffer_difference.as_bool()?;
|
||||||
list_menu = list_menu.with_only_buffer_difference(only_buffer_difference);
|
list_menu = list_menu.with_only_buffer_difference(only_buffer_difference);
|
||||||
|
|
||||||
let span = menu.source.span();
|
let completer = if let Some(closure) = &menu.source {
|
||||||
match &menu.source {
|
|
||||||
Value::Nothing { .. } => {
|
|
||||||
Ok(line_editor.with_menu(ReedlineMenu::HistoryMenu(Box::new(list_menu))))
|
|
||||||
}
|
|
||||||
Value::Closure { val, .. } => {
|
|
||||||
let menu_completer = NuMenuCompleter::new(
|
let menu_completer = NuMenuCompleter::new(
|
||||||
val.block_id,
|
closure.block_id,
|
||||||
span,
|
span,
|
||||||
stack.captures_to_stack(val.captures.clone()),
|
stack.captures_to_stack(closure.captures.clone()),
|
||||||
engine_state,
|
engine_state,
|
||||||
only_buffer_difference,
|
only_buffer_difference,
|
||||||
);
|
);
|
||||||
Ok(line_editor.with_menu(ReedlineMenu::WithCompleter {
|
ReedlineMenu::WithCompleter {
|
||||||
menu: Box::new(list_menu),
|
menu: Box::new(list_menu),
|
||||||
completer: Box::new(menu_completer),
|
completer: Box::new(menu_completer),
|
||||||
}))
|
|
||||||
}
|
|
||||||
_ => Err(ShellError::UnsupportedConfigValue {
|
|
||||||
expected: "block or omitted value".to_string(),
|
|
||||||
value: menu.source.to_abbreviated_string(config),
|
|
||||||
span: menu.source.span(),
|
|
||||||
}),
|
|
||||||
}
|
}
|
||||||
|
} else {
|
||||||
|
ReedlineMenu::HistoryMenu(Box::new(list_menu))
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok(line_editor.with_menu(completer))
|
||||||
}
|
}
|
||||||
|
|
||||||
// Adds an IDE menu to the line editor
|
// Adds an IDE menu to the line editor
|
||||||
@ -331,13 +369,13 @@ pub(crate) fn add_ide_menu(
|
|||||||
menu: &ParsedMenu,
|
menu: &ParsedMenu,
|
||||||
engine_state: Arc<EngineState>,
|
engine_state: Arc<EngineState>,
|
||||||
stack: &Stack,
|
stack: &Stack,
|
||||||
config: &Config,
|
config: Arc<Config>,
|
||||||
) -> Result<Reedline, ShellError> {
|
) -> Result<Reedline, ShellError> {
|
||||||
let span = menu.menu_type.span();
|
let span = menu.r#type.span();
|
||||||
let name = menu.name.to_expanded_string("", config);
|
let name = menu.name.to_expanded_string("", &config);
|
||||||
let mut ide_menu = IdeMenu::default().with_name(&name);
|
let mut ide_menu = IdeMenu::default().with_name(&name);
|
||||||
|
|
||||||
if let Value::Record { val, .. } = &menu.menu_type {
|
if let Value::Record { val, .. } = &menu.r#type {
|
||||||
ide_menu = match extract_value("min_completion_width", val, span) {
|
ide_menu = match extract_value("min_completion_width", val, span) {
|
||||||
Ok(min_completion_width) => {
|
Ok(min_completion_width) => {
|
||||||
let min_completion_width = min_completion_width.as_int()?;
|
let min_completion_width = min_completion_width.as_int()?;
|
||||||
@ -397,9 +435,9 @@ pub(crate) fn add_ide_menu(
|
|||||||
vertical,
|
vertical,
|
||||||
)
|
)
|
||||||
} else {
|
} else {
|
||||||
return Err(ShellError::UnsupportedConfigValue {
|
return Err(ShellError::RuntimeTypeMismatch {
|
||||||
expected: "bool or record".to_string(),
|
expected: Type::custom("bool or record"),
|
||||||
value: border.to_abbreviated_string(config),
|
actual: border.get_type(),
|
||||||
span: border.span(),
|
span: border.span(),
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
@ -420,10 +458,10 @@ pub(crate) fn add_ide_menu(
|
|||||||
"left" => ide_menu.with_description_mode(DescriptionMode::Left),
|
"left" => ide_menu.with_description_mode(DescriptionMode::Left),
|
||||||
"right" => ide_menu.with_description_mode(DescriptionMode::Right),
|
"right" => ide_menu.with_description_mode(DescriptionMode::Right),
|
||||||
"prefer_right" => ide_menu.with_description_mode(DescriptionMode::PreferRight),
|
"prefer_right" => ide_menu.with_description_mode(DescriptionMode::PreferRight),
|
||||||
_ => {
|
str => {
|
||||||
return Err(ShellError::UnsupportedConfigValue {
|
return Err(ShellError::InvalidValue {
|
||||||
expected: "\"left\", \"right\" or \"prefer_right\"".to_string(),
|
valid: "'left', 'right', or 'prefer_right'".into(),
|
||||||
value: description_mode.to_abbreviated_string(config),
|
actual: format!("'{str}'"),
|
||||||
span: description_mode.span(),
|
span: description_mode.span(),
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
@ -472,55 +510,31 @@ pub(crate) fn add_ide_menu(
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
let span = menu.style.span();
|
ide_menu = set_menu_style(ide_menu, &menu.style);
|
||||||
if let Value::Record { val, .. } = &menu.style {
|
|
||||||
if let Some(style) = get_style(val, "text", span) {
|
|
||||||
ide_menu = ide_menu.with_text_style(style);
|
|
||||||
}
|
|
||||||
if let Some(style) = get_style(val, "selected_text", span) {
|
|
||||||
ide_menu = ide_menu.with_selected_text_style(style);
|
|
||||||
}
|
|
||||||
if let Some(style) = get_style(val, "description_text", span) {
|
|
||||||
ide_menu = ide_menu.with_description_text_style(style);
|
|
||||||
}
|
|
||||||
if let Some(style) = get_style(val, "match_text", span) {
|
|
||||||
ide_menu = ide_menu.with_match_text_style(style);
|
|
||||||
}
|
|
||||||
if let Some(style) = get_style(val, "selected_match_text", span) {
|
|
||||||
ide_menu = ide_menu.with_selected_match_text_style(style);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let marker = menu.marker.to_expanded_string("", config);
|
let marker = menu.marker.to_expanded_string("", &config);
|
||||||
ide_menu = ide_menu.with_marker(&marker);
|
ide_menu = ide_menu.with_marker(&marker);
|
||||||
|
|
||||||
let only_buffer_difference = menu.only_buffer_difference.as_bool()?;
|
let only_buffer_difference = menu.only_buffer_difference.as_bool()?;
|
||||||
ide_menu = ide_menu.with_only_buffer_difference(only_buffer_difference);
|
ide_menu = ide_menu.with_only_buffer_difference(only_buffer_difference);
|
||||||
|
|
||||||
let span = menu.source.span();
|
let completer = if let Some(closure) = &menu.source {
|
||||||
match &menu.source {
|
|
||||||
Value::Nothing { .. } => {
|
|
||||||
Ok(line_editor.with_menu(ReedlineMenu::EngineCompleter(Box::new(ide_menu))))
|
|
||||||
}
|
|
||||||
Value::Closure { val, .. } => {
|
|
||||||
let menu_completer = NuMenuCompleter::new(
|
let menu_completer = NuMenuCompleter::new(
|
||||||
val.block_id,
|
closure.block_id,
|
||||||
span,
|
span,
|
||||||
stack.captures_to_stack(val.captures.clone()),
|
stack.captures_to_stack(closure.captures.clone()),
|
||||||
engine_state,
|
engine_state,
|
||||||
only_buffer_difference,
|
only_buffer_difference,
|
||||||
);
|
);
|
||||||
Ok(line_editor.with_menu(ReedlineMenu::WithCompleter {
|
ReedlineMenu::WithCompleter {
|
||||||
menu: Box::new(ide_menu),
|
menu: Box::new(ide_menu),
|
||||||
completer: Box::new(menu_completer),
|
completer: Box::new(menu_completer),
|
||||||
}))
|
|
||||||
}
|
|
||||||
_ => Err(ShellError::UnsupportedConfigValue {
|
|
||||||
expected: "block or omitted value".to_string(),
|
|
||||||
value: menu.source.to_abbreviated_string(config),
|
|
||||||
span,
|
|
||||||
}),
|
|
||||||
}
|
}
|
||||||
|
} else {
|
||||||
|
ReedlineMenu::EngineCompleter(Box::new(ide_menu))
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok(line_editor.with_menu(completer))
|
||||||
}
|
}
|
||||||
|
|
||||||
// Adds a description menu to the line editor
|
// Adds a description menu to the line editor
|
||||||
@ -529,13 +543,13 @@ pub(crate) fn add_description_menu(
|
|||||||
menu: &ParsedMenu,
|
menu: &ParsedMenu,
|
||||||
engine_state: Arc<EngineState>,
|
engine_state: Arc<EngineState>,
|
||||||
stack: &Stack,
|
stack: &Stack,
|
||||||
config: &Config,
|
config: Arc<Config>,
|
||||||
) -> Result<Reedline, ShellError> {
|
) -> Result<Reedline, ShellError> {
|
||||||
let name = menu.name.to_expanded_string("", config);
|
let name = menu.name.to_expanded_string("", &config);
|
||||||
let mut description_menu = DescriptionMenu::default().with_name(&name);
|
let mut description_menu = DescriptionMenu::default().with_name(&name);
|
||||||
|
|
||||||
let span = menu.menu_type.span();
|
let span = menu.r#type.span();
|
||||||
if let Value::Record { val, .. } = &menu.menu_type {
|
if let Value::Record { val, .. } = &menu.r#type {
|
||||||
description_menu = match extract_value("columns", val, span) {
|
description_menu = match extract_value("columns", val, span) {
|
||||||
Ok(columns) => {
|
Ok(columns) => {
|
||||||
let columns = columns.as_int()?;
|
let columns = columns.as_int()?;
|
||||||
@ -577,53 +591,35 @@ pub(crate) fn add_description_menu(
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
let span = menu.style.span();
|
description_menu = set_menu_style(description_menu, &menu.style);
|
||||||
if let Value::Record { val, .. } = &menu.style {
|
|
||||||
if let Some(style) = get_style(val, "text", span) {
|
|
||||||
description_menu = description_menu.with_text_style(style);
|
|
||||||
}
|
|
||||||
if let Some(style) = get_style(val, "selected_text", span) {
|
|
||||||
description_menu = description_menu.with_selected_text_style(style);
|
|
||||||
}
|
|
||||||
if let Some(style) = get_style(val, "description_text", span) {
|
|
||||||
description_menu = description_menu.with_description_text_style(style);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let marker = menu.marker.to_expanded_string("", config);
|
let marker = menu.marker.to_expanded_string("", &config);
|
||||||
description_menu = description_menu.with_marker(&marker);
|
description_menu = description_menu.with_marker(&marker);
|
||||||
|
|
||||||
let only_buffer_difference = menu.only_buffer_difference.as_bool()?;
|
let only_buffer_difference = menu.only_buffer_difference.as_bool()?;
|
||||||
description_menu = description_menu.with_only_buffer_difference(only_buffer_difference);
|
description_menu = description_menu.with_only_buffer_difference(only_buffer_difference);
|
||||||
|
|
||||||
let span = menu.source.span();
|
let completer = if let Some(closure) = &menu.source {
|
||||||
match &menu.source {
|
|
||||||
Value::Nothing { .. } => {
|
|
||||||
let completer = Box::new(NuHelpCompleter::new(engine_state));
|
|
||||||
Ok(line_editor.with_menu(ReedlineMenu::WithCompleter {
|
|
||||||
menu: Box::new(description_menu),
|
|
||||||
completer,
|
|
||||||
}))
|
|
||||||
}
|
|
||||||
Value::Closure { val, .. } => {
|
|
||||||
let menu_completer = NuMenuCompleter::new(
|
let menu_completer = NuMenuCompleter::new(
|
||||||
val.block_id,
|
closure.block_id,
|
||||||
span,
|
span,
|
||||||
stack.captures_to_stack(val.captures.clone()),
|
stack.captures_to_stack(closure.captures.clone()),
|
||||||
engine_state,
|
engine_state,
|
||||||
only_buffer_difference,
|
only_buffer_difference,
|
||||||
);
|
);
|
||||||
Ok(line_editor.with_menu(ReedlineMenu::WithCompleter {
|
ReedlineMenu::WithCompleter {
|
||||||
menu: Box::new(description_menu),
|
menu: Box::new(description_menu),
|
||||||
completer: Box::new(menu_completer),
|
completer: Box::new(menu_completer),
|
||||||
}))
|
|
||||||
}
|
}
|
||||||
_ => Err(ShellError::UnsupportedConfigValue {
|
} else {
|
||||||
expected: "closure or omitted value".to_string(),
|
let menu_completer = NuHelpCompleter::new(engine_state, config);
|
||||||
value: menu.source.to_abbreviated_string(config),
|
ReedlineMenu::WithCompleter {
|
||||||
span: menu.source.span(),
|
menu: Box::new(description_menu),
|
||||||
}),
|
completer: Box::new(menu_completer),
|
||||||
}
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok(line_editor.with_menu(completer))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn add_menu_keybindings(keybindings: &mut Keybindings) {
|
fn add_menu_keybindings(keybindings: &mut Keybindings) {
|
||||||
@ -638,6 +634,16 @@ fn add_menu_keybindings(keybindings: &mut Keybindings) {
|
|||||||
]),
|
]),
|
||||||
);
|
);
|
||||||
|
|
||||||
|
keybindings.add_binding(
|
||||||
|
KeyModifiers::CONTROL,
|
||||||
|
KeyCode::Char(' '),
|
||||||
|
ReedlineEvent::UntilFound(vec![
|
||||||
|
ReedlineEvent::Menu("ide_completion_menu".to_string()),
|
||||||
|
ReedlineEvent::MenuNext,
|
||||||
|
ReedlineEvent::Edit(vec![EditCommand::Complete]),
|
||||||
|
]),
|
||||||
|
);
|
||||||
|
|
||||||
keybindings.add_binding(
|
keybindings.add_binding(
|
||||||
KeyModifiers::SHIFT,
|
KeyModifiers::SHIFT,
|
||||||
KeyCode::BackTab,
|
KeyCode::BackTab,
|
||||||
@ -734,12 +740,18 @@ fn add_keybinding(
|
|||||||
let span = mode.span();
|
let span = mode.span();
|
||||||
match &mode {
|
match &mode {
|
||||||
Value::String { val, .. } => match val.as_str() {
|
Value::String { val, .. } => match val.as_str() {
|
||||||
"emacs" => add_parsed_keybinding(emacs_keybindings, keybinding, config),
|
str if str.eq_ignore_ascii_case("emacs") => {
|
||||||
"vi_insert" => add_parsed_keybinding(insert_keybindings, keybinding, config),
|
add_parsed_keybinding(emacs_keybindings, keybinding, config)
|
||||||
"vi_normal" => add_parsed_keybinding(normal_keybindings, keybinding, config),
|
}
|
||||||
m => Err(ShellError::UnsupportedConfigValue {
|
str if str.eq_ignore_ascii_case("vi_insert") => {
|
||||||
expected: "emacs, vi_insert or vi_normal".to_string(),
|
add_parsed_keybinding(insert_keybindings, keybinding, config)
|
||||||
value: m.to_string(),
|
}
|
||||||
|
str if str.eq_ignore_ascii_case("vi_normal") => {
|
||||||
|
add_parsed_keybinding(normal_keybindings, keybinding, config)
|
||||||
|
}
|
||||||
|
str => Err(ShellError::InvalidValue {
|
||||||
|
valid: "'emacs', 'vi_insert', or 'vi_normal'".into(),
|
||||||
|
actual: format!("'{str}'"),
|
||||||
span,
|
span,
|
||||||
}),
|
}),
|
||||||
},
|
},
|
||||||
@ -757,9 +769,9 @@ fn add_keybinding(
|
|||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
v => Err(ShellError::UnsupportedConfigValue {
|
v => Err(ShellError::RuntimeTypeMismatch {
|
||||||
expected: "string or list of strings".to_string(),
|
expected: Type::custom("string or list<string>"),
|
||||||
value: v.to_abbreviated_string(config),
|
actual: v.get_type(),
|
||||||
span: v.span(),
|
span: v.span(),
|
||||||
}),
|
}),
|
||||||
}
|
}
|
||||||
@ -770,56 +782,72 @@ fn add_parsed_keybinding(
|
|||||||
keybinding: &ParsedKeybinding,
|
keybinding: &ParsedKeybinding,
|
||||||
config: &Config,
|
config: &Config,
|
||||||
) -> Result<(), ShellError> {
|
) -> Result<(), ShellError> {
|
||||||
let modifier = match keybinding
|
let Ok(modifier_str) = keybinding.modifier.as_str() else {
|
||||||
.modifier
|
return Err(ShellError::RuntimeTypeMismatch {
|
||||||
.to_expanded_string("", config)
|
expected: Type::String,
|
||||||
.to_ascii_lowercase()
|
actual: keybinding.modifier.get_type(),
|
||||||
.as_str()
|
|
||||||
{
|
|
||||||
"control" => KeyModifiers::CONTROL,
|
|
||||||
"shift" => KeyModifiers::SHIFT,
|
|
||||||
"alt" => KeyModifiers::ALT,
|
|
||||||
"none" => KeyModifiers::NONE,
|
|
||||||
"shift_alt" | "alt_shift" => KeyModifiers::SHIFT | KeyModifiers::ALT,
|
|
||||||
"control_shift" | "shift_control" => KeyModifiers::CONTROL | KeyModifiers::SHIFT,
|
|
||||||
"control_alt" | "alt_control" => KeyModifiers::CONTROL | KeyModifiers::ALT,
|
|
||||||
"control_alt_shift" | "control_shift_alt" => {
|
|
||||||
KeyModifiers::CONTROL | KeyModifiers::ALT | KeyModifiers::SHIFT
|
|
||||||
}
|
|
||||||
_ => {
|
|
||||||
return Err(ShellError::UnsupportedConfigValue {
|
|
||||||
expected: "CONTROL, SHIFT, ALT or NONE".to_string(),
|
|
||||||
value: keybinding.modifier.to_abbreviated_string(config),
|
|
||||||
span: keybinding.modifier.span(),
|
span: keybinding.modifier.span(),
|
||||||
})
|
});
|
||||||
}
|
|
||||||
};
|
};
|
||||||
|
|
||||||
let keycode = match keybinding
|
let mut modifier = KeyModifiers::NONE;
|
||||||
.keycode
|
if !str::eq_ignore_ascii_case(modifier_str, "none") {
|
||||||
.to_expanded_string("", config)
|
for part in modifier_str.split('_') {
|
||||||
.to_ascii_lowercase()
|
match part.to_ascii_lowercase().as_str() {
|
||||||
.as_str()
|
"control" => modifier |= KeyModifiers::CONTROL,
|
||||||
{
|
"shift" => modifier |= KeyModifiers::SHIFT,
|
||||||
"backspace" => KeyCode::Backspace,
|
"alt" => modifier |= KeyModifiers::ALT,
|
||||||
"enter" => KeyCode::Enter,
|
"super" => modifier |= KeyModifiers::SUPER,
|
||||||
c if c.starts_with("char_") => {
|
"hyper" => modifier |= KeyModifiers::HYPER,
|
||||||
let mut char_iter = c.chars().skip(5);
|
"meta" => modifier |= KeyModifiers::META,
|
||||||
let pos1 = char_iter.next();
|
_ => {
|
||||||
let pos2 = char_iter.next();
|
return Err(ShellError::InvalidValue {
|
||||||
|
valid: "'control', 'shift', 'alt', 'super', 'hyper', 'meta', or 'none'"
|
||||||
|
.into(),
|
||||||
|
actual: format!("'{part}'"),
|
||||||
|
span: keybinding.modifier.span(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
let char = if let (Some(char), None) = (pos1, pos2) {
|
let Ok(keycode) = keybinding.keycode.as_str() else {
|
||||||
char
|
return Err(ShellError::RuntimeTypeMismatch {
|
||||||
} else {
|
expected: Type::String,
|
||||||
return Err(ShellError::UnsupportedConfigValue {
|
actual: keybinding.keycode.get_type(),
|
||||||
expected: "char_<CHAR: unicode codepoint>".to_string(),
|
|
||||||
value: c.to_string(),
|
|
||||||
span: keybinding.keycode.span(),
|
span: keybinding.keycode.span(),
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
KeyCode::Char(char)
|
let keycode_lower = keycode.to_ascii_lowercase();
|
||||||
|
|
||||||
|
let keycode = if let Some(rest) = keycode_lower.strip_prefix("char_") {
|
||||||
|
let error = |valid: &str, actual: &str| ShellError::InvalidValue {
|
||||||
|
valid: valid.into(),
|
||||||
|
actual: actual.into(),
|
||||||
|
span: keybinding.keycode.span(),
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut char_iter = rest.chars();
|
||||||
|
let char = match (char_iter.next(), char_iter.next()) {
|
||||||
|
(Some(char), None) => char,
|
||||||
|
(Some('u'), Some(_)) => {
|
||||||
|
// This will never panic as we know there are at least two symbols
|
||||||
|
let Ok(code_point) = u32::from_str_radix(&rest[1..], 16) else {
|
||||||
|
return Err(error("a valid hex code", keycode));
|
||||||
|
};
|
||||||
|
|
||||||
|
char::from_u32(code_point).ok_or(error("a valid Unicode code point", keycode))?
|
||||||
}
|
}
|
||||||
|
_ => return Err(error("'char_<char>' or 'char_u<hex code>'", keycode)),
|
||||||
|
};
|
||||||
|
|
||||||
|
KeyCode::Char(char)
|
||||||
|
} else {
|
||||||
|
match keycode_lower.as_str() {
|
||||||
|
"backspace" => KeyCode::Backspace,
|
||||||
|
"enter" => KeyCode::Enter,
|
||||||
"space" => KeyCode::Char(' '),
|
"space" => KeyCode::Char(' '),
|
||||||
"down" => KeyCode::Down,
|
"down" => KeyCode::Down,
|
||||||
"up" => KeyCode::Up,
|
"up" => KeyCode::Up,
|
||||||
@ -833,28 +861,28 @@ fn add_parsed_keybinding(
|
|||||||
"backtab" => KeyCode::BackTab,
|
"backtab" => KeyCode::BackTab,
|
||||||
"delete" => KeyCode::Delete,
|
"delete" => KeyCode::Delete,
|
||||||
"insert" => KeyCode::Insert,
|
"insert" => KeyCode::Insert,
|
||||||
c if c.starts_with('f') => {
|
c if c.starts_with('f') => c[1..]
|
||||||
let fn_num: u8 = c[1..]
|
|
||||||
.parse()
|
.parse()
|
||||||
.ok()
|
.ok()
|
||||||
.filter(|num| matches!(num, 1..=20))
|
.filter(|num| (1..=35).contains(num))
|
||||||
.ok_or(ShellError::UnsupportedConfigValue {
|
.map(KeyCode::F)
|
||||||
expected: "(f1|f2|...|f20)".to_string(),
|
.ok_or(ShellError::InvalidValue {
|
||||||
value: format!("unknown function key: {c}"),
|
valid: "'f1', 'f2', ..., or 'f35'".into(),
|
||||||
|
actual: format!("'{keycode}'"),
|
||||||
span: keybinding.keycode.span(),
|
span: keybinding.keycode.span(),
|
||||||
})?;
|
})?,
|
||||||
KeyCode::F(fn_num)
|
|
||||||
}
|
|
||||||
"null" => KeyCode::Null,
|
"null" => KeyCode::Null,
|
||||||
"esc" | "escape" => KeyCode::Esc,
|
"esc" | "escape" => KeyCode::Esc,
|
||||||
_ => {
|
_ => {
|
||||||
return Err(ShellError::UnsupportedConfigValue {
|
return Err(ShellError::InvalidValue {
|
||||||
expected: "crossterm KeyCode".to_string(),
|
valid: "a crossterm KeyCode".into(),
|
||||||
value: keybinding.keycode.to_abbreviated_string(config),
|
actual: format!("'{keycode}'"),
|
||||||
span: keybinding.keycode.span(),
|
span: keybinding.keycode.span(),
|
||||||
})
|
});
|
||||||
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
if let Some(event) = parse_event(&keybinding.event, config)? {
|
if let Some(event) = parse_event(&keybinding.event, config)? {
|
||||||
keybindings.add_binding(modifier, keycode, event);
|
keybindings.add_binding(modifier, keycode, event);
|
||||||
} else {
|
} else {
|
||||||
@ -876,8 +904,8 @@ impl<'config> EventType<'config> {
|
|||||||
.map(Self::Send)
|
.map(Self::Send)
|
||||||
.or_else(|_| extract_value("edit", record, span).map(Self::Edit))
|
.or_else(|_| extract_value("edit", record, span).map(Self::Edit))
|
||||||
.or_else(|_| extract_value("until", record, span).map(Self::Until))
|
.or_else(|_| extract_value("until", record, span).map(Self::Until))
|
||||||
.map_err(|_| ShellError::MissingConfigValue {
|
.map_err(|_| ShellError::MissingRequiredColumn {
|
||||||
missing_value: "send, edit or until".to_string(),
|
column: "'send', 'edit', or 'until'",
|
||||||
span,
|
span,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
@ -915,9 +943,9 @@ fn parse_event(value: &Value, config: &Config) -> Result<Option<ReedlineEvent>,
|
|||||||
.iter()
|
.iter()
|
||||||
.map(|value| match parse_event(value, config) {
|
.map(|value| match parse_event(value, config) {
|
||||||
Ok(inner) => match inner {
|
Ok(inner) => match inner {
|
||||||
None => Err(ShellError::UnsupportedConfigValue {
|
None => Err(ShellError::RuntimeTypeMismatch {
|
||||||
expected: "List containing valid events".to_string(),
|
expected: Type::custom("record or table"),
|
||||||
value: "Nothing value (null)".to_string(),
|
actual: value.get_type(),
|
||||||
span: value.span(),
|
span: value.span(),
|
||||||
}),
|
}),
|
||||||
Some(event) => Ok(event),
|
Some(event) => Ok(event),
|
||||||
@ -928,9 +956,9 @@ fn parse_event(value: &Value, config: &Config) -> Result<Option<ReedlineEvent>,
|
|||||||
|
|
||||||
Ok(Some(ReedlineEvent::UntilFound(events)))
|
Ok(Some(ReedlineEvent::UntilFound(events)))
|
||||||
}
|
}
|
||||||
v => Err(ShellError::UnsupportedConfigValue {
|
v => Err(ShellError::RuntimeTypeMismatch {
|
||||||
expected: "list of events".to_string(),
|
expected: Type::list(Type::Any),
|
||||||
value: v.to_abbreviated_string(config),
|
actual: v.get_type(),
|
||||||
span: v.span(),
|
span: v.span(),
|
||||||
}),
|
}),
|
||||||
},
|
},
|
||||||
@ -940,9 +968,9 @@ fn parse_event(value: &Value, config: &Config) -> Result<Option<ReedlineEvent>,
|
|||||||
.iter()
|
.iter()
|
||||||
.map(|value| match parse_event(value, config) {
|
.map(|value| match parse_event(value, config) {
|
||||||
Ok(inner) => match inner {
|
Ok(inner) => match inner {
|
||||||
None => Err(ShellError::UnsupportedConfigValue {
|
None => Err(ShellError::RuntimeTypeMismatch {
|
||||||
expected: "List containing valid events".to_string(),
|
expected: Type::custom("record or table"),
|
||||||
value: "Nothing value (null)".to_string(),
|
actual: value.get_type(),
|
||||||
span: value.span(),
|
span: value.span(),
|
||||||
}),
|
}),
|
||||||
Some(event) => Ok(event),
|
Some(event) => Ok(event),
|
||||||
@ -954,9 +982,9 @@ fn parse_event(value: &Value, config: &Config) -> Result<Option<ReedlineEvent>,
|
|||||||
Ok(Some(ReedlineEvent::Multiple(events)))
|
Ok(Some(ReedlineEvent::Multiple(events)))
|
||||||
}
|
}
|
||||||
Value::Nothing { .. } => Ok(None),
|
Value::Nothing { .. } => Ok(None),
|
||||||
v => Err(ShellError::UnsupportedConfigValue {
|
v => Err(ShellError::RuntimeTypeMismatch {
|
||||||
expected: "record or list of records, null to unbind key".to_string(),
|
expected: Type::custom("record, table, or nothing"),
|
||||||
value: v.to_abbreviated_string(config),
|
actual: v.get_type(),
|
||||||
span: v.span(),
|
span: v.span(),
|
||||||
}),
|
}),
|
||||||
}
|
}
|
||||||
@ -1005,12 +1033,12 @@ fn event_from_record(
|
|||||||
let cmd = extract_value("cmd", record, span)?;
|
let cmd = extract_value("cmd", record, span)?;
|
||||||
ReedlineEvent::ExecuteHostCommand(cmd.to_expanded_string("", config))
|
ReedlineEvent::ExecuteHostCommand(cmd.to_expanded_string("", config))
|
||||||
}
|
}
|
||||||
v => {
|
str => {
|
||||||
return Err(ShellError::UnsupportedConfigValue {
|
return Err(ShellError::InvalidValue {
|
||||||
expected: "Reedline event".to_string(),
|
valid: "a reedline event".into(),
|
||||||
value: v.to_string(),
|
actual: format!("'{str}'"),
|
||||||
span,
|
span,
|
||||||
})
|
});
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -1103,7 +1131,7 @@ fn edit_from_record(
|
|||||||
}
|
}
|
||||||
"insertchar" => {
|
"insertchar" => {
|
||||||
let value = extract_value("value", record, span)?;
|
let value = extract_value("value", record, span)?;
|
||||||
let char = extract_char(value, config)?;
|
let char = extract_char(value)?;
|
||||||
EditCommand::InsertChar(char)
|
EditCommand::InsertChar(char)
|
||||||
}
|
}
|
||||||
"insertstring" => {
|
"insertstring" => {
|
||||||
@ -1140,17 +1168,17 @@ fn edit_from_record(
|
|||||||
"redo" => EditCommand::Redo,
|
"redo" => EditCommand::Redo,
|
||||||
"cutrightuntil" => {
|
"cutrightuntil" => {
|
||||||
let value = extract_value("value", record, span)?;
|
let value = extract_value("value", record, span)?;
|
||||||
let char = extract_char(value, config)?;
|
let char = extract_char(value)?;
|
||||||
EditCommand::CutRightUntil(char)
|
EditCommand::CutRightUntil(char)
|
||||||
}
|
}
|
||||||
"cutrightbefore" => {
|
"cutrightbefore" => {
|
||||||
let value = extract_value("value", record, span)?;
|
let value = extract_value("value", record, span)?;
|
||||||
let char = extract_char(value, config)?;
|
let char = extract_char(value)?;
|
||||||
EditCommand::CutRightBefore(char)
|
EditCommand::CutRightBefore(char)
|
||||||
}
|
}
|
||||||
"moverightuntil" => {
|
"moverightuntil" => {
|
||||||
let value = extract_value("value", record, span)?;
|
let value = extract_value("value", record, span)?;
|
||||||
let char = extract_char(value, config)?;
|
let char = extract_char(value)?;
|
||||||
let select = extract_value("select", record, span)
|
let select = extract_value("select", record, span)
|
||||||
.and_then(|value| value.as_bool())
|
.and_then(|value| value.as_bool())
|
||||||
.unwrap_or(false);
|
.unwrap_or(false);
|
||||||
@ -1158,7 +1186,7 @@ fn edit_from_record(
|
|||||||
}
|
}
|
||||||
"moverightbefore" => {
|
"moverightbefore" => {
|
||||||
let value = extract_value("value", record, span)?;
|
let value = extract_value("value", record, span)?;
|
||||||
let char = extract_char(value, config)?;
|
let char = extract_char(value)?;
|
||||||
let select = extract_value("select", record, span)
|
let select = extract_value("select", record, span)
|
||||||
.and_then(|value| value.as_bool())
|
.and_then(|value| value.as_bool())
|
||||||
.unwrap_or(false);
|
.unwrap_or(false);
|
||||||
@ -1166,17 +1194,17 @@ fn edit_from_record(
|
|||||||
}
|
}
|
||||||
"cutleftuntil" => {
|
"cutleftuntil" => {
|
||||||
let value = extract_value("value", record, span)?;
|
let value = extract_value("value", record, span)?;
|
||||||
let char = extract_char(value, config)?;
|
let char = extract_char(value)?;
|
||||||
EditCommand::CutLeftUntil(char)
|
EditCommand::CutLeftUntil(char)
|
||||||
}
|
}
|
||||||
"cutleftbefore" => {
|
"cutleftbefore" => {
|
||||||
let value = extract_value("value", record, span)?;
|
let value = extract_value("value", record, span)?;
|
||||||
let char = extract_char(value, config)?;
|
let char = extract_char(value)?;
|
||||||
EditCommand::CutLeftBefore(char)
|
EditCommand::CutLeftBefore(char)
|
||||||
}
|
}
|
||||||
"moveleftuntil" => {
|
"moveleftuntil" => {
|
||||||
let value = extract_value("value", record, span)?;
|
let value = extract_value("value", record, span)?;
|
||||||
let char = extract_char(value, config)?;
|
let char = extract_char(value)?;
|
||||||
let select = extract_value("select", record, span)
|
let select = extract_value("select", record, span)
|
||||||
.and_then(|value| value.as_bool())
|
.and_then(|value| value.as_bool())
|
||||||
.unwrap_or(false);
|
.unwrap_or(false);
|
||||||
@ -1184,7 +1212,7 @@ fn edit_from_record(
|
|||||||
}
|
}
|
||||||
"moveleftbefore" => {
|
"moveleftbefore" => {
|
||||||
let value = extract_value("value", record, span)?;
|
let value = extract_value("value", record, span)?;
|
||||||
let char = extract_char(value, config)?;
|
let char = extract_char(value)?;
|
||||||
let select = extract_value("select", record, span)
|
let select = extract_value("select", record, span)
|
||||||
.and_then(|value| value.as_bool())
|
.and_then(|value| value.as_bool())
|
||||||
.unwrap_or(false);
|
.unwrap_or(false);
|
||||||
@ -1201,29 +1229,37 @@ fn edit_from_record(
|
|||||||
#[cfg(feature = "system-clipboard")]
|
#[cfg(feature = "system-clipboard")]
|
||||||
"pastesystem" => EditCommand::PasteSystem,
|
"pastesystem" => EditCommand::PasteSystem,
|
||||||
"selectall" => EditCommand::SelectAll,
|
"selectall" => EditCommand::SelectAll,
|
||||||
e => {
|
str => {
|
||||||
return Err(ShellError::UnsupportedConfigValue {
|
return Err(ShellError::InvalidValue {
|
||||||
expected: "reedline EditCommand".to_string(),
|
valid: "a reedline EditCommand".into(),
|
||||||
value: e.to_string(),
|
actual: format!("'{str}'"),
|
||||||
span,
|
span,
|
||||||
})
|
});
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
Ok(edit)
|
Ok(edit)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn extract_char(value: &Value, config: &Config) -> Result<char, ShellError> {
|
fn extract_char(value: &Value) -> Result<char, ShellError> {
|
||||||
let span = value.span();
|
if let Ok(str) = value.as_str() {
|
||||||
value
|
let mut chars = str.chars();
|
||||||
.to_expanded_string("", config)
|
match (chars.next(), chars.next()) {
|
||||||
.chars()
|
(Some(c), None) => Ok(c),
|
||||||
.next()
|
_ => Err(ShellError::InvalidValue {
|
||||||
.ok_or_else(|| ShellError::MissingConfigValue {
|
valid: "a single character".into(),
|
||||||
missing_value: "char to insert".to_string(),
|
actual: format!("'{str}'"),
|
||||||
span,
|
span: value.span(),
|
||||||
|
}),
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
Err(ShellError::RuntimeTypeMismatch {
|
||||||
|
expected: Type::String,
|
||||||
|
actual: value.get_type(),
|
||||||
|
span: value.span(),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod test {
|
mod test {
|
||||||
@ -1351,7 +1387,7 @@ mod test {
|
|||||||
|
|
||||||
let span = Span::test_data();
|
let span = Span::test_data();
|
||||||
let b = EventType::try_from_record(&event, span);
|
let b = EventType::try_from_record(&event, span);
|
||||||
assert!(matches!(b, Err(ShellError::MissingConfigValue { .. })));
|
assert!(matches!(b, Err(ShellError::MissingRequiredColumn { .. })));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
File diff suppressed because it is too large
Load Diff
@ -6,7 +6,7 @@ use nu_parser::{flatten_block, parse, FlatShape};
|
|||||||
use nu_protocol::{
|
use nu_protocol::{
|
||||||
ast::{Block, Expr, Expression, PipelineRedirection, RecordItem},
|
ast::{Block, Expr, Expression, PipelineRedirection, RecordItem},
|
||||||
engine::{EngineState, Stack, StateWorkingSet},
|
engine::{EngineState, Stack, StateWorkingSet},
|
||||||
Config, Span,
|
Span,
|
||||||
};
|
};
|
||||||
use reedline::{Highlighter, StyledText};
|
use reedline::{Highlighter, StyledText};
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
@ -14,15 +14,14 @@ use std::sync::Arc;
|
|||||||
pub struct NuHighlighter {
|
pub struct NuHighlighter {
|
||||||
pub engine_state: Arc<EngineState>,
|
pub engine_state: Arc<EngineState>,
|
||||||
pub stack: Arc<Stack>,
|
pub stack: Arc<Stack>,
|
||||||
pub config: Config,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Highlighter for NuHighlighter {
|
impl Highlighter for NuHighlighter {
|
||||||
fn highlight(&self, line: &str, _cursor: usize) -> StyledText {
|
fn highlight(&self, line: &str, _cursor: usize) -> StyledText {
|
||||||
trace!("highlighting: {}", line);
|
trace!("highlighting: {}", line);
|
||||||
|
|
||||||
let highlight_resolved_externals =
|
let config = self.stack.get_config(&self.engine_state);
|
||||||
self.engine_state.get_config().highlight_resolved_externals;
|
let highlight_resolved_externals = config.highlight_resolved_externals;
|
||||||
let mut working_set = StateWorkingSet::new(&self.engine_state);
|
let mut working_set = StateWorkingSet::new(&self.engine_state);
|
||||||
let block = parse(&mut working_set, None, line.as_bytes(), false);
|
let block = parse(&mut working_set, None, line.as_bytes(), false);
|
||||||
let (shapes, global_span_offset) = {
|
let (shapes, global_span_offset) = {
|
||||||
@ -88,7 +87,7 @@ impl Highlighter for NuHighlighter {
|
|||||||
.to_string();
|
.to_string();
|
||||||
|
|
||||||
let mut add_colored_token = |shape: &FlatShape, text: String| {
|
let mut add_colored_token = |shape: &FlatShape, text: String| {
|
||||||
output.push((get_shape_color(shape.as_str(), &self.config), text));
|
output.push((get_shape_color(shape.as_str(), &config), text));
|
||||||
};
|
};
|
||||||
|
|
||||||
match shape.1 {
|
match shape.1 {
|
||||||
@ -128,9 +127,9 @@ impl Highlighter for NuHighlighter {
|
|||||||
let start = part.start - span.start;
|
let start = part.start - span.start;
|
||||||
let end = part.end - span.start;
|
let end = part.end - span.start;
|
||||||
let text = next_token[start..end].to_string();
|
let text = next_token[start..end].to_string();
|
||||||
let mut style = get_shape_color(shape.as_str(), &self.config);
|
let mut style = get_shape_color(shape.as_str(), &config);
|
||||||
if highlight {
|
if highlight {
|
||||||
style = get_matching_brackets_style(style, &self.config);
|
style = get_matching_brackets_style(style, &config);
|
||||||
}
|
}
|
||||||
output.push((style, text));
|
output.push((style, text));
|
||||||
}
|
}
|
||||||
@ -138,14 +137,13 @@ impl Highlighter for NuHighlighter {
|
|||||||
|
|
||||||
FlatShape::Filepath => add_colored_token(&shape.1, next_token),
|
FlatShape::Filepath => add_colored_token(&shape.1, next_token),
|
||||||
FlatShape::Directory => add_colored_token(&shape.1, next_token),
|
FlatShape::Directory => add_colored_token(&shape.1, next_token),
|
||||||
|
FlatShape::GlobInterpolation => add_colored_token(&shape.1, next_token),
|
||||||
FlatShape::GlobPattern => add_colored_token(&shape.1, next_token),
|
FlatShape::GlobPattern => add_colored_token(&shape.1, next_token),
|
||||||
FlatShape::Variable(_) | FlatShape::VarDecl(_) => {
|
FlatShape::Variable(_) | FlatShape::VarDecl(_) => {
|
||||||
add_colored_token(&shape.1, next_token)
|
add_colored_token(&shape.1, next_token)
|
||||||
}
|
}
|
||||||
FlatShape::Flag => add_colored_token(&shape.1, next_token),
|
FlatShape::Flag => add_colored_token(&shape.1, next_token),
|
||||||
FlatShape::Pipe => add_colored_token(&shape.1, next_token),
|
FlatShape::Pipe => add_colored_token(&shape.1, next_token),
|
||||||
FlatShape::And => add_colored_token(&shape.1, next_token),
|
|
||||||
FlatShape::Or => add_colored_token(&shape.1, next_token),
|
|
||||||
FlatShape::Redirection => add_colored_token(&shape.1, next_token),
|
FlatShape::Redirection => add_colored_token(&shape.1, next_token),
|
||||||
FlatShape::Custom(..) => add_colored_token(&shape.1, next_token),
|
FlatShape::Custom(..) => add_colored_token(&shape.1, next_token),
|
||||||
FlatShape::MatchPattern => add_colored_token(&shape.1, next_token),
|
FlatShape::MatchPattern => add_colored_token(&shape.1, next_token),
|
||||||
@ -311,6 +309,7 @@ fn find_matching_block_end_in_expr(
|
|||||||
.unwrap_or(expression.span.start);
|
.unwrap_or(expression.span.start);
|
||||||
|
|
||||||
return match &expression.expr {
|
return match &expression.expr {
|
||||||
|
// TODO: Can't these be handled with an `_ => None` branch? Refactor
|
||||||
Expr::Bool(_) => None,
|
Expr::Bool(_) => None,
|
||||||
Expr::Int(_) => None,
|
Expr::Int(_) => None,
|
||||||
Expr::Float(_) => None,
|
Expr::Float(_) => None,
|
||||||
@ -337,6 +336,28 @@ fn find_matching_block_end_in_expr(
|
|||||||
Expr::Nothing => None,
|
Expr::Nothing => None,
|
||||||
Expr::Garbage => None,
|
Expr::Garbage => None,
|
||||||
|
|
||||||
|
Expr::AttributeBlock(ab) => ab
|
||||||
|
.attributes
|
||||||
|
.iter()
|
||||||
|
.find_map(|attr| {
|
||||||
|
find_matching_block_end_in_expr(
|
||||||
|
line,
|
||||||
|
working_set,
|
||||||
|
&attr.expr,
|
||||||
|
global_span_offset,
|
||||||
|
global_cursor_offset,
|
||||||
|
)
|
||||||
|
})
|
||||||
|
.or_else(|| {
|
||||||
|
find_matching_block_end_in_expr(
|
||||||
|
line,
|
||||||
|
working_set,
|
||||||
|
&ab.item,
|
||||||
|
global_span_offset,
|
||||||
|
global_cursor_offset,
|
||||||
|
)
|
||||||
|
}),
|
||||||
|
|
||||||
Expr::Table(table) => {
|
Expr::Table(table) => {
|
||||||
if expr_last == global_cursor_offset {
|
if expr_last == global_cursor_offset {
|
||||||
// cursor is at table end
|
// cursor is at table end
|
||||||
@ -429,6 +450,14 @@ fn find_matching_block_end_in_expr(
|
|||||||
)
|
)
|
||||||
}),
|
}),
|
||||||
|
|
||||||
|
Expr::Collect(_, expr) => find_matching_block_end_in_expr(
|
||||||
|
line,
|
||||||
|
working_set,
|
||||||
|
expr,
|
||||||
|
global_span_offset,
|
||||||
|
global_cursor_offset,
|
||||||
|
),
|
||||||
|
|
||||||
Expr::Block(block_id)
|
Expr::Block(block_id)
|
||||||
| Expr::Closure(block_id)
|
| Expr::Closure(block_id)
|
||||||
| Expr::RowCondition(block_id)
|
| Expr::RowCondition(block_id)
|
||||||
@ -452,7 +481,8 @@ fn find_matching_block_end_in_expr(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Expr::StringInterpolation(exprs) => exprs.iter().find_map(|expr| {
|
Expr::StringInterpolation(exprs) | Expr::GlobInterpolation(exprs, _) => {
|
||||||
|
exprs.iter().find_map(|expr| {
|
||||||
find_matching_block_end_in_expr(
|
find_matching_block_end_in_expr(
|
||||||
line,
|
line,
|
||||||
working_set,
|
working_set,
|
||||||
@ -460,7 +490,8 @@ fn find_matching_block_end_in_expr(
|
|||||||
global_span_offset,
|
global_span_offset,
|
||||||
global_cursor_offset,
|
global_cursor_offset,
|
||||||
)
|
)
|
||||||
}),
|
})
|
||||||
|
}
|
||||||
|
|
||||||
Expr::List(list) => {
|
Expr::List(list) => {
|
||||||
if expr_last == global_cursor_offset {
|
if expr_last == global_cursor_offset {
|
||||||
|
@ -1,14 +1,18 @@
|
|||||||
|
#![allow(clippy::byte_char_slices)]
|
||||||
|
|
||||||
use nu_cmd_base::hook::eval_hook;
|
use nu_cmd_base::hook::eval_hook;
|
||||||
use nu_engine::{eval_block, eval_block_with_early_return};
|
use nu_engine::{eval_block, eval_block_with_early_return};
|
||||||
use nu_parser::{escape_quote_string, lex, parse, unescape_unquote_string, Token, TokenContents};
|
use nu_parser::{lex, parse, unescape_unquote_string, Token, TokenContents};
|
||||||
use nu_protocol::{
|
use nu_protocol::{
|
||||||
|
cli_error::report_compile_error,
|
||||||
debugger::WithoutDebug,
|
debugger::WithoutDebug,
|
||||||
engine::{EngineState, Stack, StateWorkingSet},
|
engine::{EngineState, Stack, StateWorkingSet},
|
||||||
report_error, report_error_new, PipelineData, ShellError, Span, Value,
|
report_parse_error, report_parse_warning, report_shell_error, PipelineData, ShellError, Span,
|
||||||
|
Value,
|
||||||
};
|
};
|
||||||
#[cfg(windows)]
|
#[cfg(windows)]
|
||||||
use nu_utils::enable_vt_processing;
|
use nu_utils::enable_vt_processing;
|
||||||
use nu_utils::utils::perf;
|
use nu_utils::{escape_quote_string, perf};
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
|
|
||||||
// This will collect environment variables from std::env and adds them to a stack.
|
// This will collect environment variables from std::env and adds them to a stack.
|
||||||
@ -39,7 +43,7 @@ fn gather_env_vars(
|
|||||||
init_cwd: &Path,
|
init_cwd: &Path,
|
||||||
) {
|
) {
|
||||||
fn report_capture_error(engine_state: &EngineState, env_str: &str, msg: &str) {
|
fn report_capture_error(engine_state: &EngineState, env_str: &str, msg: &str) {
|
||||||
report_error_new(
|
report_shell_error(
|
||||||
engine_state,
|
engine_state,
|
||||||
&ShellError::GenericError {
|
&ShellError::GenericError {
|
||||||
error: format!("Environment variable was not captured: {env_str}"),
|
error: format!("Environment variable was not captured: {env_str}"),
|
||||||
@ -70,7 +74,7 @@ fn gather_env_vars(
|
|||||||
}
|
}
|
||||||
None => {
|
None => {
|
||||||
// Could not capture current working directory
|
// Could not capture current working directory
|
||||||
report_error_new(
|
report_shell_error(
|
||||||
engine_state,
|
engine_state,
|
||||||
&ShellError::GenericError {
|
&ShellError::GenericError {
|
||||||
error: "Current directory is not a valid utf-8 path".into(),
|
error: "Current directory is not a valid utf-8 path".into(),
|
||||||
@ -128,7 +132,7 @@ fn gather_env_vars(
|
|||||||
working_set.error(err);
|
working_set.error(err);
|
||||||
}
|
}
|
||||||
|
|
||||||
if working_set.parse_errors.first().is_some() {
|
if !working_set.parse_errors.is_empty() {
|
||||||
report_capture_error(
|
report_capture_error(
|
||||||
engine_state,
|
engine_state,
|
||||||
&String::from_utf8_lossy(contents),
|
&String::from_utf8_lossy(contents),
|
||||||
@ -172,7 +176,7 @@ fn gather_env_vars(
|
|||||||
working_set.error(err);
|
working_set.error(err);
|
||||||
}
|
}
|
||||||
|
|
||||||
if working_set.parse_errors.first().is_some() {
|
if !working_set.parse_errors.is_empty() {
|
||||||
report_capture_error(
|
report_capture_error(
|
||||||
engine_state,
|
engine_state,
|
||||||
&String::from_utf8_lossy(contents),
|
&String::from_utf8_lossy(contents),
|
||||||
@ -199,6 +203,35 @@ fn gather_env_vars(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Print a pipeline with formatting applied based on display_output hook.
|
||||||
|
///
|
||||||
|
/// This function should be preferred when printing values resulting from a completed evaluation.
|
||||||
|
/// For values printed as part of a command's execution, such as values printed by the `print` command,
|
||||||
|
/// the `PipelineData::print_table` function should be preferred instead as it is not config-dependent.
|
||||||
|
///
|
||||||
|
/// `no_newline` controls if we need to attach newline character to output.
|
||||||
|
pub fn print_pipeline(
|
||||||
|
engine_state: &mut EngineState,
|
||||||
|
stack: &mut Stack,
|
||||||
|
pipeline: PipelineData,
|
||||||
|
no_newline: bool,
|
||||||
|
) -> Result<(), ShellError> {
|
||||||
|
if let Some(hook) = engine_state.get_config().hooks.display_output.clone() {
|
||||||
|
let pipeline = eval_hook(
|
||||||
|
engine_state,
|
||||||
|
stack,
|
||||||
|
Some(pipeline),
|
||||||
|
vec![],
|
||||||
|
&hook,
|
||||||
|
"display_output",
|
||||||
|
)?;
|
||||||
|
pipeline.print_raw(engine_state, no_newline, false)
|
||||||
|
} else {
|
||||||
|
// if display_output isn't set, we should still prefer to print with some formatting
|
||||||
|
pipeline.print_table(engine_state, stack, no_newline, false)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub fn eval_source(
|
pub fn eval_source(
|
||||||
engine_state: &mut EngineState,
|
engine_state: &mut EngineState,
|
||||||
stack: &mut Stack,
|
stack: &mut Stack,
|
||||||
@ -210,31 +243,32 @@ pub fn eval_source(
|
|||||||
let start_time = std::time::Instant::now();
|
let start_time = std::time::Instant::now();
|
||||||
|
|
||||||
let exit_code = match evaluate_source(engine_state, stack, source, fname, input, allow_return) {
|
let exit_code = match evaluate_source(engine_state, stack, source, fname, input, allow_return) {
|
||||||
Ok(code) => code.unwrap_or(0),
|
Ok(failed) => {
|
||||||
|
let code = failed.into();
|
||||||
|
stack.set_last_exit_code(code, Span::unknown());
|
||||||
|
code
|
||||||
|
}
|
||||||
Err(err) => {
|
Err(err) => {
|
||||||
report_error_new(engine_state, &err);
|
report_shell_error(engine_state, &err);
|
||||||
1
|
let code = err.exit_code();
|
||||||
|
stack.set_last_error(&err);
|
||||||
|
code.unwrap_or(0)
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
stack.add_env_var(
|
|
||||||
"LAST_EXIT_CODE".to_string(),
|
|
||||||
Value::int(exit_code.into(), Span::unknown()),
|
|
||||||
);
|
|
||||||
|
|
||||||
// reset vt processing, aka ansi because illbehaved externals can break it
|
// reset vt processing, aka ansi because illbehaved externals can break it
|
||||||
#[cfg(windows)]
|
#[cfg(windows)]
|
||||||
{
|
{
|
||||||
let _ = enable_vt_processing();
|
let _ = enable_vt_processing();
|
||||||
}
|
}
|
||||||
|
|
||||||
perf(
|
perf!(
|
||||||
&format!("eval_source {}", &fname),
|
&format!("eval_source {}", &fname),
|
||||||
start_time,
|
start_time,
|
||||||
file!(),
|
engine_state
|
||||||
line!(),
|
.get_config()
|
||||||
column!(),
|
.use_ansi_coloring
|
||||||
engine_state.get_config().use_ansi_coloring,
|
.get(engine_state)
|
||||||
);
|
);
|
||||||
|
|
||||||
exit_code
|
exit_code
|
||||||
@ -247,7 +281,7 @@ fn evaluate_source(
|
|||||||
fname: &str,
|
fname: &str,
|
||||||
input: PipelineData,
|
input: PipelineData,
|
||||||
allow_return: bool,
|
allow_return: bool,
|
||||||
) -> Result<Option<i32>, ShellError> {
|
) -> Result<bool, ShellError> {
|
||||||
let (block, delta) = {
|
let (block, delta) = {
|
||||||
let mut working_set = StateWorkingSet::new(engine_state);
|
let mut working_set = StateWorkingSet::new(engine_state);
|
||||||
let output = parse(
|
let output = parse(
|
||||||
@ -257,12 +291,17 @@ fn evaluate_source(
|
|||||||
false,
|
false,
|
||||||
);
|
);
|
||||||
if let Some(warning) = working_set.parse_warnings.first() {
|
if let Some(warning) = working_set.parse_warnings.first() {
|
||||||
report_error(&working_set, warning);
|
report_parse_warning(&working_set, warning);
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(err) = working_set.parse_errors.first() {
|
if let Some(err) = working_set.parse_errors.first() {
|
||||||
report_error(&working_set, err);
|
report_parse_error(&working_set, err);
|
||||||
return Ok(Some(1));
|
return Ok(true);
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(err) = working_set.compile_errors.first() {
|
||||||
|
report_compile_error(&working_set, err);
|
||||||
|
return Ok(true);
|
||||||
}
|
}
|
||||||
|
|
||||||
(output, working_set.render())
|
(output, working_set.render())
|
||||||
@ -276,25 +315,10 @@ fn evaluate_source(
|
|||||||
eval_block::<WithoutDebug>(engine_state, stack, &block, input)
|
eval_block::<WithoutDebug>(engine_state, stack, &block, input)
|
||||||
}?;
|
}?;
|
||||||
|
|
||||||
let status = if let PipelineData::ByteStream(stream, ..) = pipeline {
|
let no_newline = matches!(&pipeline, &PipelineData::ByteStream(..));
|
||||||
stream.print(false)?
|
print_pipeline(engine_state, stack, pipeline, no_newline)?;
|
||||||
} else {
|
|
||||||
if let Some(hook) = engine_state.get_config().hooks.display_output.clone() {
|
|
||||||
let pipeline = eval_hook(
|
|
||||||
engine_state,
|
|
||||||
stack,
|
|
||||||
Some(pipeline),
|
|
||||||
vec![],
|
|
||||||
&hook,
|
|
||||||
"display_output",
|
|
||||||
)?;
|
|
||||||
pipeline.print(engine_state, stack, false, false)
|
|
||||||
} else {
|
|
||||||
pipeline.print(engine_state, stack, true, false)
|
|
||||||
}?
|
|
||||||
};
|
|
||||||
|
|
||||||
Ok(status.map(|status| status.code()))
|
Ok(false)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
@ -319,16 +343,10 @@ mod test {
|
|||||||
|
|
||||||
let env = engine_state.render_env_vars();
|
let env = engine_state.render_env_vars();
|
||||||
|
|
||||||
assert!(
|
assert!(matches!(env.get("FOO"), Some(&Value::String { val, .. }) if val == "foo"));
|
||||||
matches!(env.get(&"FOO".to_string()), Some(&Value::String { val, .. }) if val == "foo")
|
assert!(matches!(env.get("SYMBOLS"), Some(&Value::String { val, .. }) if val == symbols));
|
||||||
);
|
assert!(matches!(env.get(symbols), Some(&Value::String { val, .. }) if val == "symbols"));
|
||||||
assert!(
|
assert!(env.contains_key("PWD"));
|
||||||
matches!(env.get(&"SYMBOLS".to_string()), Some(&Value::String { val, .. }) if val == symbols)
|
|
||||||
);
|
|
||||||
assert!(
|
|
||||||
matches!(env.get(&symbols.to_string()), Some(&Value::String { val, .. }) if val == "symbols")
|
|
||||||
);
|
|
||||||
assert!(env.get(&"PWD".to_string()).is_some());
|
|
||||||
assert_eq!(env.len(), 4);
|
assert_eq!(env.len(), 4);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
296
crates/nu-cli/tests/commands/history_import.rs
Normal file
296
crates/nu-cli/tests/commands/history_import.rs
Normal file
@ -0,0 +1,296 @@
|
|||||||
|
use nu_protocol::HistoryFileFormat;
|
||||||
|
use nu_test_support::{nu, Outcome};
|
||||||
|
use reedline::{
|
||||||
|
FileBackedHistory, History, HistoryItem, HistoryItemId, ReedlineError, SearchQuery,
|
||||||
|
SqliteBackedHistory,
|
||||||
|
};
|
||||||
|
use rstest::rstest;
|
||||||
|
use tempfile::TempDir;
|
||||||
|
|
||||||
|
struct Test {
|
||||||
|
cfg_dir: TempDir,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Test {
|
||||||
|
fn new(history_format: &'static str) -> Self {
|
||||||
|
let cfg_dir = tempfile::Builder::new()
|
||||||
|
.prefix("history_import_test")
|
||||||
|
.tempdir()
|
||||||
|
.unwrap();
|
||||||
|
// Assigning to $env.config.history.file_format seems to work only in startup
|
||||||
|
// configuration.
|
||||||
|
std::fs::write(
|
||||||
|
cfg_dir.path().join("env.nu"),
|
||||||
|
format!("$env.config.history.file_format = {history_format:?}"),
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
Self { cfg_dir }
|
||||||
|
}
|
||||||
|
|
||||||
|
fn nu(&self, cmd: impl AsRef<str>) -> Outcome {
|
||||||
|
let env = [(
|
||||||
|
"XDG_CONFIG_HOME".to_string(),
|
||||||
|
self.cfg_dir.path().to_str().unwrap().to_string(),
|
||||||
|
)];
|
||||||
|
let env_config = self.cfg_dir.path().join("env.nu");
|
||||||
|
nu!(envs: env, env_config: env_config, cmd.as_ref())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn open_plaintext(&self) -> Result<FileBackedHistory, ReedlineError> {
|
||||||
|
FileBackedHistory::with_file(
|
||||||
|
100,
|
||||||
|
self.cfg_dir
|
||||||
|
.path()
|
||||||
|
.join("nushell")
|
||||||
|
.join(HistoryFileFormat::Plaintext.default_file_name()),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn open_sqlite(&self) -> Result<SqliteBackedHistory, ReedlineError> {
|
||||||
|
SqliteBackedHistory::with_file(
|
||||||
|
self.cfg_dir
|
||||||
|
.path()
|
||||||
|
.join("nushell")
|
||||||
|
.join(HistoryFileFormat::Sqlite.default_file_name()),
|
||||||
|
None,
|
||||||
|
None,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn open_backend(&self, format: HistoryFileFormat) -> Result<Box<dyn History>, ReedlineError> {
|
||||||
|
fn boxed(be: impl History + 'static) -> Box<dyn History> {
|
||||||
|
Box::new(be)
|
||||||
|
}
|
||||||
|
use HistoryFileFormat::*;
|
||||||
|
match format {
|
||||||
|
Plaintext => self.open_plaintext().map(boxed),
|
||||||
|
Sqlite => self.open_sqlite().map(boxed),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
enum HistorySource {
|
||||||
|
Vec(Vec<HistoryItem>),
|
||||||
|
Command(&'static str),
|
||||||
|
}
|
||||||
|
|
||||||
|
struct TestCase {
|
||||||
|
dst_format: HistoryFileFormat,
|
||||||
|
dst_history: Vec<HistoryItem>,
|
||||||
|
src_history: HistorySource,
|
||||||
|
want_history: Vec<HistoryItem>,
|
||||||
|
}
|
||||||
|
|
||||||
|
const EMPTY_TEST_CASE: TestCase = TestCase {
|
||||||
|
dst_format: HistoryFileFormat::Plaintext,
|
||||||
|
dst_history: Vec::new(),
|
||||||
|
src_history: HistorySource::Vec(Vec::new()),
|
||||||
|
want_history: Vec::new(),
|
||||||
|
};
|
||||||
|
|
||||||
|
impl TestCase {
|
||||||
|
fn run(self) {
|
||||||
|
use HistoryFileFormat::*;
|
||||||
|
let test = Test::new(match self.dst_format {
|
||||||
|
Plaintext => "plaintext",
|
||||||
|
Sqlite => "sqlite",
|
||||||
|
});
|
||||||
|
save_all(
|
||||||
|
&mut *test.open_backend(self.dst_format).unwrap(),
|
||||||
|
self.dst_history,
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
let outcome = match self.src_history {
|
||||||
|
HistorySource::Vec(src_history) => {
|
||||||
|
let src_format = match self.dst_format {
|
||||||
|
Plaintext => Sqlite,
|
||||||
|
Sqlite => Plaintext,
|
||||||
|
};
|
||||||
|
save_all(&mut *test.open_backend(src_format).unwrap(), src_history).unwrap();
|
||||||
|
test.nu("history import")
|
||||||
|
}
|
||||||
|
HistorySource::Command(cmd) => {
|
||||||
|
let mut cmd = cmd.to_string();
|
||||||
|
cmd.push_str(" | history import");
|
||||||
|
test.nu(cmd)
|
||||||
|
}
|
||||||
|
};
|
||||||
|
assert!(outcome.status.success());
|
||||||
|
let got = query_all(&*test.open_backend(self.dst_format).unwrap()).unwrap();
|
||||||
|
|
||||||
|
// Compare just the commands first, for readability.
|
||||||
|
fn commands_only(items: &[HistoryItem]) -> Vec<&str> {
|
||||||
|
items
|
||||||
|
.iter()
|
||||||
|
.map(|item| item.command_line.as_str())
|
||||||
|
.collect()
|
||||||
|
}
|
||||||
|
assert_eq!(commands_only(&got), commands_only(&self.want_history));
|
||||||
|
// If commands match, compare full items.
|
||||||
|
assert_eq!(got, self.want_history);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn query_all(history: &dyn History) -> Result<Vec<HistoryItem>, ReedlineError> {
|
||||||
|
history.search(SearchQuery::everything(
|
||||||
|
reedline::SearchDirection::Forward,
|
||||||
|
None,
|
||||||
|
))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn save_all(history: &mut dyn History, items: Vec<HistoryItem>) -> Result<(), ReedlineError> {
|
||||||
|
for item in items {
|
||||||
|
history.save(item)?;
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
const EMPTY_ITEM: HistoryItem = HistoryItem {
|
||||||
|
command_line: String::new(),
|
||||||
|
id: None,
|
||||||
|
start_timestamp: None,
|
||||||
|
session_id: None,
|
||||||
|
hostname: None,
|
||||||
|
cwd: None,
|
||||||
|
duration: None,
|
||||||
|
exit_status: None,
|
||||||
|
more_info: None,
|
||||||
|
};
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn history_import_pipe_string() {
|
||||||
|
TestCase {
|
||||||
|
dst_format: HistoryFileFormat::Plaintext,
|
||||||
|
src_history: HistorySource::Command("echo bar"),
|
||||||
|
want_history: vec![HistoryItem {
|
||||||
|
id: Some(HistoryItemId::new(0)),
|
||||||
|
command_line: "bar".to_string(),
|
||||||
|
..EMPTY_ITEM
|
||||||
|
}],
|
||||||
|
..EMPTY_TEST_CASE
|
||||||
|
}
|
||||||
|
.run();
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn history_import_pipe_record() {
|
||||||
|
TestCase {
|
||||||
|
dst_format: HistoryFileFormat::Sqlite,
|
||||||
|
src_history: HistorySource::Command("[[cwd command]; [/tmp some_command]]"),
|
||||||
|
want_history: vec![HistoryItem {
|
||||||
|
id: Some(HistoryItemId::new(1)),
|
||||||
|
command_line: "some_command".to_string(),
|
||||||
|
cwd: Some("/tmp".to_string()),
|
||||||
|
..EMPTY_ITEM
|
||||||
|
}],
|
||||||
|
..EMPTY_TEST_CASE
|
||||||
|
}
|
||||||
|
.run();
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn to_empty_plaintext() {
|
||||||
|
TestCase {
|
||||||
|
dst_format: HistoryFileFormat::Plaintext,
|
||||||
|
src_history: HistorySource::Vec(vec![
|
||||||
|
HistoryItem {
|
||||||
|
command_line: "foo".to_string(),
|
||||||
|
..EMPTY_ITEM
|
||||||
|
},
|
||||||
|
HistoryItem {
|
||||||
|
command_line: "bar".to_string(),
|
||||||
|
..EMPTY_ITEM
|
||||||
|
},
|
||||||
|
]),
|
||||||
|
want_history: vec![
|
||||||
|
HistoryItem {
|
||||||
|
id: Some(HistoryItemId::new(0)),
|
||||||
|
command_line: "foo".to_string(),
|
||||||
|
..EMPTY_ITEM
|
||||||
|
},
|
||||||
|
HistoryItem {
|
||||||
|
id: Some(HistoryItemId::new(1)),
|
||||||
|
command_line: "bar".to_string(),
|
||||||
|
..EMPTY_ITEM
|
||||||
|
},
|
||||||
|
],
|
||||||
|
..EMPTY_TEST_CASE
|
||||||
|
}
|
||||||
|
.run()
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn to_empty_sqlite() {
|
||||||
|
TestCase {
|
||||||
|
dst_format: HistoryFileFormat::Sqlite,
|
||||||
|
src_history: HistorySource::Vec(vec![
|
||||||
|
HistoryItem {
|
||||||
|
command_line: "foo".to_string(),
|
||||||
|
..EMPTY_ITEM
|
||||||
|
},
|
||||||
|
HistoryItem {
|
||||||
|
command_line: "bar".to_string(),
|
||||||
|
..EMPTY_ITEM
|
||||||
|
},
|
||||||
|
]),
|
||||||
|
want_history: vec![
|
||||||
|
HistoryItem {
|
||||||
|
id: Some(HistoryItemId::new(1)),
|
||||||
|
command_line: "foo".to_string(),
|
||||||
|
..EMPTY_ITEM
|
||||||
|
},
|
||||||
|
HistoryItem {
|
||||||
|
id: Some(HistoryItemId::new(2)),
|
||||||
|
command_line: "bar".to_string(),
|
||||||
|
..EMPTY_ITEM
|
||||||
|
},
|
||||||
|
],
|
||||||
|
..EMPTY_TEST_CASE
|
||||||
|
}
|
||||||
|
.run()
|
||||||
|
}
|
||||||
|
|
||||||
|
#[rstest]
|
||||||
|
#[case::plaintext(HistoryFileFormat::Plaintext)]
|
||||||
|
#[case::sqlite(HistoryFileFormat::Sqlite)]
|
||||||
|
fn to_existing(#[case] dst_format: HistoryFileFormat) {
|
||||||
|
TestCase {
|
||||||
|
dst_format,
|
||||||
|
dst_history: vec![
|
||||||
|
HistoryItem {
|
||||||
|
id: Some(HistoryItemId::new(0)),
|
||||||
|
command_line: "original-1".to_string(),
|
||||||
|
..EMPTY_ITEM
|
||||||
|
},
|
||||||
|
HistoryItem {
|
||||||
|
id: Some(HistoryItemId::new(1)),
|
||||||
|
command_line: "original-2".to_string(),
|
||||||
|
..EMPTY_ITEM
|
||||||
|
},
|
||||||
|
],
|
||||||
|
src_history: HistorySource::Vec(vec![HistoryItem {
|
||||||
|
id: Some(HistoryItemId::new(1)),
|
||||||
|
command_line: "new".to_string(),
|
||||||
|
..EMPTY_ITEM
|
||||||
|
}]),
|
||||||
|
want_history: vec![
|
||||||
|
HistoryItem {
|
||||||
|
id: Some(HistoryItemId::new(0)),
|
||||||
|
command_line: "original-1".to_string(),
|
||||||
|
..EMPTY_ITEM
|
||||||
|
},
|
||||||
|
HistoryItem {
|
||||||
|
id: Some(HistoryItemId::new(1)),
|
||||||
|
command_line: "original-2".to_string(),
|
||||||
|
..EMPTY_ITEM
|
||||||
|
},
|
||||||
|
HistoryItem {
|
||||||
|
id: Some(HistoryItemId::new(2)),
|
||||||
|
command_line: "new".to_string(),
|
||||||
|
..EMPTY_ITEM
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}
|
||||||
|
.run()
|
||||||
|
}
|
7
crates/nu-cli/tests/commands/keybindings_list.rs
Normal file
7
crates/nu-cli/tests/commands/keybindings_list.rs
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
use nu_test_support::nu;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn not_empty() {
|
||||||
|
let result = nu!("keybindings list | is-not-empty");
|
||||||
|
assert_eq!(result.out, "true");
|
||||||
|
}
|
@ -1 +1,3 @@
|
|||||||
|
mod history_import;
|
||||||
|
mod keybindings_list;
|
||||||
mod nu_highlight;
|
mod nu_highlight;
|
||||||
|
File diff suppressed because it is too large
Load Diff
@ -1,5 +1,6 @@
|
|||||||
use nu_engine::eval_block;
|
use nu_engine::eval_block;
|
||||||
use nu_parser::parse;
|
use nu_parser::parse;
|
||||||
|
use nu_path::{AbsolutePathBuf, PathBuf};
|
||||||
use nu_protocol::{
|
use nu_protocol::{
|
||||||
debugger::WithoutDebug,
|
debugger::WithoutDebug,
|
||||||
engine::{EngineState, Stack, StateWorkingSet},
|
engine::{EngineState, Stack, StateWorkingSet},
|
||||||
@ -7,14 +8,14 @@ use nu_protocol::{
|
|||||||
};
|
};
|
||||||
use nu_test_support::fs;
|
use nu_test_support::fs;
|
||||||
use reedline::Suggestion;
|
use reedline::Suggestion;
|
||||||
use std::path::{PathBuf, MAIN_SEPARATOR};
|
use std::path::MAIN_SEPARATOR;
|
||||||
|
|
||||||
fn create_default_context() -> EngineState {
|
fn create_default_context() -> EngineState {
|
||||||
nu_command::add_shell_command_context(nu_cmd_lang::create_default_context())
|
nu_command::add_shell_command_context(nu_cmd_lang::create_default_context())
|
||||||
}
|
}
|
||||||
|
|
||||||
// creates a new engine with the current path into the completions fixtures folder
|
// creates a new engine with the current path into the completions fixtures folder
|
||||||
pub fn new_engine() -> (PathBuf, String, EngineState, Stack) {
|
pub fn new_engine() -> (AbsolutePathBuf, String, EngineState, Stack) {
|
||||||
// Target folder inside assets
|
// Target folder inside assets
|
||||||
let dir = fs::fixtures().join("completions");
|
let dir = fs::fixtures().join("completions");
|
||||||
let dir_str = dir
|
let dir_str = dir
|
||||||
@ -62,13 +63,71 @@ pub fn new_engine() -> (PathBuf, String, EngineState, Stack) {
|
|||||||
);
|
);
|
||||||
|
|
||||||
// Merge environment into the permanent state
|
// Merge environment into the permanent state
|
||||||
let merge_result = engine_state.merge_env(&mut stack, &dir);
|
let merge_result = engine_state.merge_env(&mut stack);
|
||||||
assert!(merge_result.is_ok());
|
assert!(merge_result.is_ok());
|
||||||
|
|
||||||
(dir, dir_str, engine_state, stack)
|
(dir, dir_str, engine_state, stack)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn new_quote_engine() -> (PathBuf, String, EngineState, Stack) {
|
// creates a new engine with the current path into the completions fixtures folder
|
||||||
|
pub fn new_dotnu_engine() -> (AbsolutePathBuf, String, EngineState, Stack) {
|
||||||
|
// Target folder inside assets
|
||||||
|
let dir = fs::fixtures().join("dotnu_completions");
|
||||||
|
let dir_str = dir
|
||||||
|
.clone()
|
||||||
|
.into_os_string()
|
||||||
|
.into_string()
|
||||||
|
.unwrap_or_default();
|
||||||
|
let dir_span = nu_protocol::Span::new(0, dir_str.len());
|
||||||
|
|
||||||
|
// Create a new engine with default context
|
||||||
|
let mut engine_state = create_default_context();
|
||||||
|
|
||||||
|
// Add $nu
|
||||||
|
engine_state.generate_nu_constant();
|
||||||
|
|
||||||
|
// const $NU_LIB_DIRS
|
||||||
|
let mut working_set = StateWorkingSet::new(&engine_state);
|
||||||
|
let var_id = working_set.add_variable(
|
||||||
|
b"$NU_LIB_DIRS".into(),
|
||||||
|
Span::unknown(),
|
||||||
|
nu_protocol::Type::List(Box::new(nu_protocol::Type::String)),
|
||||||
|
false,
|
||||||
|
);
|
||||||
|
working_set.set_variable_const_val(
|
||||||
|
var_id,
|
||||||
|
Value::test_list(vec![
|
||||||
|
Value::string(file(dir.join("lib-dir1")), dir_span),
|
||||||
|
Value::string(file(dir.join("lib-dir3")), dir_span),
|
||||||
|
]),
|
||||||
|
);
|
||||||
|
let _ = engine_state.merge_delta(working_set.render());
|
||||||
|
|
||||||
|
// New stack
|
||||||
|
let mut stack = Stack::new();
|
||||||
|
|
||||||
|
// Add pwd as env var
|
||||||
|
stack.add_env_var("PWD".to_string(), Value::string(dir_str.clone(), dir_span));
|
||||||
|
stack.add_env_var(
|
||||||
|
"TEST".to_string(),
|
||||||
|
Value::string("NUSHELL".to_string(), dir_span),
|
||||||
|
);
|
||||||
|
stack.add_env_var(
|
||||||
|
"NU_LIB_DIRS".into(),
|
||||||
|
Value::test_list(vec![
|
||||||
|
Value::string(file(dir.join("lib-dir2")), dir_span),
|
||||||
|
Value::string(file(dir.join("lib-dir3")), dir_span),
|
||||||
|
]),
|
||||||
|
);
|
||||||
|
|
||||||
|
// Merge environment into the permanent state
|
||||||
|
let merge_result = engine_state.merge_env(&mut stack);
|
||||||
|
assert!(merge_result.is_ok());
|
||||||
|
|
||||||
|
(dir, dir_str, engine_state, stack)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn new_quote_engine() -> (AbsolutePathBuf, String, EngineState, Stack) {
|
||||||
// Target folder inside assets
|
// Target folder inside assets
|
||||||
let dir = fs::fixtures().join("quoted_completions");
|
let dir = fs::fixtures().join("quoted_completions");
|
||||||
let dir_str = dir
|
let dir_str = dir
|
||||||
@ -97,13 +156,13 @@ pub fn new_quote_engine() -> (PathBuf, String, EngineState, Stack) {
|
|||||||
);
|
);
|
||||||
|
|
||||||
// Merge environment into the permanent state
|
// Merge environment into the permanent state
|
||||||
let merge_result = engine_state.merge_env(&mut stack, &dir);
|
let merge_result = engine_state.merge_env(&mut stack);
|
||||||
assert!(merge_result.is_ok());
|
assert!(merge_result.is_ok());
|
||||||
|
|
||||||
(dir, dir_str, engine_state, stack)
|
(dir, dir_str, engine_state, stack)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn new_partial_engine() -> (PathBuf, String, EngineState, Stack) {
|
pub fn new_partial_engine() -> (AbsolutePathBuf, String, EngineState, Stack) {
|
||||||
// Target folder inside assets
|
// Target folder inside assets
|
||||||
let dir = fs::fixtures().join("partial_completions");
|
let dir = fs::fixtures().join("partial_completions");
|
||||||
let dir_str = dir
|
let dir_str = dir
|
||||||
@ -132,14 +191,14 @@ pub fn new_partial_engine() -> (PathBuf, String, EngineState, Stack) {
|
|||||||
);
|
);
|
||||||
|
|
||||||
// Merge environment into the permanent state
|
// Merge environment into the permanent state
|
||||||
let merge_result = engine_state.merge_env(&mut stack, &dir);
|
let merge_result = engine_state.merge_env(&mut stack);
|
||||||
assert!(merge_result.is_ok());
|
assert!(merge_result.is_ok());
|
||||||
|
|
||||||
(dir, dir_str, engine_state, stack)
|
(dir, dir_str, engine_state, stack)
|
||||||
}
|
}
|
||||||
|
|
||||||
// match a list of suggestions with the expected values
|
// match a list of suggestions with the expected values
|
||||||
pub fn match_suggestions(expected: Vec<String>, suggestions: Vec<Suggestion>) {
|
pub fn match_suggestions(expected: &Vec<String>, suggestions: &Vec<Suggestion>) {
|
||||||
let expected_len = expected.len();
|
let expected_len = expected.len();
|
||||||
let suggestions_len = suggestions.len();
|
let suggestions_len = suggestions.len();
|
||||||
if expected_len != suggestions_len {
|
if expected_len != suggestions_len {
|
||||||
@ -149,22 +208,25 @@ pub fn match_suggestions(expected: Vec<String>, suggestions: Vec<Suggestion>) {
|
|||||||
Expected: {expected:#?}\n"
|
Expected: {expected:#?}\n"
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
expected.iter().zip(suggestions).for_each(|it| {
|
|
||||||
assert_eq!(it.0, &it.1.value);
|
let suggestoins_str = suggestions
|
||||||
});
|
.iter()
|
||||||
|
.map(|it| it.value.clone())
|
||||||
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
|
assert_eq!(expected, &suggestoins_str);
|
||||||
}
|
}
|
||||||
|
|
||||||
// append the separator to the converted path
|
// append the separator to the converted path
|
||||||
pub fn folder(path: PathBuf) -> String {
|
pub fn folder(path: impl Into<PathBuf>) -> String {
|
||||||
let mut converted_path = file(path);
|
let mut converted_path = file(path);
|
||||||
converted_path.push(MAIN_SEPARATOR);
|
converted_path.push(MAIN_SEPARATOR);
|
||||||
|
|
||||||
converted_path
|
converted_path
|
||||||
}
|
}
|
||||||
|
|
||||||
// convert a given path to string
|
// convert a given path to string
|
||||||
pub fn file(path: PathBuf) -> String {
|
pub fn file(path: impl Into<PathBuf>) -> String {
|
||||||
path.into_os_string().into_string().unwrap_or_default()
|
path.into().into_os_string().into_string().unwrap()
|
||||||
}
|
}
|
||||||
|
|
||||||
// merge_input executes the given input into the engine
|
// merge_input executes the given input into the engine
|
||||||
@ -173,7 +235,6 @@ pub fn merge_input(
|
|||||||
input: &[u8],
|
input: &[u8],
|
||||||
engine_state: &mut EngineState,
|
engine_state: &mut EngineState,
|
||||||
stack: &mut Stack,
|
stack: &mut Stack,
|
||||||
dir: PathBuf,
|
|
||||||
) -> Result<(), ShellError> {
|
) -> Result<(), ShellError> {
|
||||||
let (block, delta) = {
|
let (block, delta) = {
|
||||||
let mut working_set = StateWorkingSet::new(engine_state);
|
let mut working_set = StateWorkingSet::new(engine_state);
|
||||||
@ -196,5 +257,5 @@ pub fn merge_input(
|
|||||||
.is_ok());
|
.is_ok());
|
||||||
|
|
||||||
// Merge environment into the permanent state
|
// Merge environment into the permanent state
|
||||||
engine_state.merge_env(stack, &dir)
|
engine_state.merge_env(stack)
|
||||||
}
|
}
|
||||||
|
@ -5,15 +5,18 @@ edition = "2021"
|
|||||||
license = "MIT"
|
license = "MIT"
|
||||||
name = "nu-cmd-base"
|
name = "nu-cmd-base"
|
||||||
repository = "https://github.com/nushell/nushell/tree/main/crates/nu-cmd-base"
|
repository = "https://github.com/nushell/nushell/tree/main/crates/nu-cmd-base"
|
||||||
version = "0.93.1"
|
version = "0.102.1"
|
||||||
|
|
||||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
|
|
||||||
|
[lints]
|
||||||
|
workspace = true
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
nu-engine = { path = "../nu-engine", version = "0.93.1" }
|
nu-engine = { path = "../nu-engine", version = "0.102.1", default-features = false }
|
||||||
nu-parser = { path = "../nu-parser", version = "0.93.1" }
|
nu-parser = { path = "../nu-parser", version = "0.102.1" }
|
||||||
nu-path = { path = "../nu-path", version = "0.93.1" }
|
nu-path = { path = "../nu-path", version = "0.102.1" }
|
||||||
nu-protocol = { path = "../nu-protocol", version = "0.93.1" }
|
nu-protocol = { path = "../nu-protocol", version = "0.102.1", default-features = false }
|
||||||
|
|
||||||
indexmap = { workspace = true }
|
indexmap = { workspace = true }
|
||||||
miette = { workspace = true }
|
miette = { workspace = true }
|
||||||
|
5
crates/nu-cmd-base/README.md
Normal file
5
crates/nu-cmd-base/README.md
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
Utilities used by the different `nu-command`/`nu-cmd-*` crates, should not contain any full `Command` implementations.
|
||||||
|
|
||||||
|
## Internal Nushell crate
|
||||||
|
|
||||||
|
This crate implements components of Nushell and is not designed to support plugin authors or other users directly.
|
@ -1,58 +1,58 @@
|
|||||||
use crate::util::get_guaranteed_cwd;
|
|
||||||
use miette::Result;
|
use miette::Result;
|
||||||
use nu_engine::{eval_block, eval_block_with_early_return};
|
use nu_engine::{eval_block, eval_block_with_early_return};
|
||||||
use nu_parser::parse;
|
use nu_parser::parse;
|
||||||
use nu_protocol::{
|
use nu_protocol::{
|
||||||
cli_error::{report_error, report_error_new},
|
cli_error::{report_parse_error, report_shell_error},
|
||||||
debugger::WithoutDebug,
|
debugger::WithoutDebug,
|
||||||
engine::{Closure, EngineState, Stack, StateWorkingSet},
|
engine::{Closure, EngineState, Stack, StateWorkingSet},
|
||||||
PipelineData, PositionalArg, ShellError, Span, Type, Value, VarId,
|
PipelineData, PositionalArg, ShellError, Span, Type, Value, VarId,
|
||||||
};
|
};
|
||||||
use std::sync::Arc;
|
use std::{collections::HashMap, sync::Arc};
|
||||||
|
|
||||||
pub fn eval_env_change_hook(
|
pub fn eval_env_change_hook(
|
||||||
env_change_hook: Option<Value>,
|
env_change_hook: &HashMap<String, Vec<Value>>,
|
||||||
engine_state: &mut EngineState,
|
engine_state: &mut EngineState,
|
||||||
stack: &mut Stack,
|
stack: &mut Stack,
|
||||||
) -> Result<(), ShellError> {
|
) -> Result<(), ShellError> {
|
||||||
if let Some(hook) = env_change_hook {
|
for (env, hooks) in env_change_hook {
|
||||||
match hook {
|
let before = engine_state.previous_env_vars.get(env);
|
||||||
Value::Record { val, .. } => {
|
let after = stack.get_env_var(engine_state, env);
|
||||||
for (env_name, hook_value) in &val {
|
|
||||||
let before = engine_state
|
|
||||||
.previous_env_vars
|
|
||||||
.get(env_name)
|
|
||||||
.cloned()
|
|
||||||
.unwrap_or_default();
|
|
||||||
|
|
||||||
let after = stack
|
|
||||||
.get_env_var(engine_state, env_name)
|
|
||||||
.unwrap_or_default();
|
|
||||||
|
|
||||||
if before != after {
|
if before != after {
|
||||||
|
let before = before.cloned().unwrap_or_default();
|
||||||
|
let after = after.cloned().unwrap_or_default();
|
||||||
|
|
||||||
|
eval_hooks(
|
||||||
|
engine_state,
|
||||||
|
stack,
|
||||||
|
vec![("$before".into(), before), ("$after".into(), after.clone())],
|
||||||
|
hooks,
|
||||||
|
"env_change",
|
||||||
|
)?;
|
||||||
|
|
||||||
|
Arc::make_mut(&mut engine_state.previous_env_vars).insert(env.clone(), after);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn eval_hooks(
|
||||||
|
engine_state: &mut EngineState,
|
||||||
|
stack: &mut Stack,
|
||||||
|
arguments: Vec<(String, Value)>,
|
||||||
|
hooks: &[Value],
|
||||||
|
hook_name: &str,
|
||||||
|
) -> Result<(), ShellError> {
|
||||||
|
for hook in hooks {
|
||||||
eval_hook(
|
eval_hook(
|
||||||
engine_state,
|
engine_state,
|
||||||
stack,
|
stack,
|
||||||
None,
|
None,
|
||||||
vec![("$before".into(), before), ("$after".into(), after.clone())],
|
arguments.clone(),
|
||||||
hook_value,
|
hook,
|
||||||
"env_change",
|
&format!("{hook_name} list, recursive"),
|
||||||
)?;
|
)?;
|
||||||
|
|
||||||
Arc::make_mut(&mut engine_state.previous_env_vars)
|
|
||||||
.insert(env_name.to_string(), after);
|
|
||||||
}
|
}
|
||||||
}
|
|
||||||
}
|
|
||||||
x => {
|
|
||||||
return Err(ShellError::TypeMismatch {
|
|
||||||
err_message: "record for the 'env_change' hook".to_string(),
|
|
||||||
span: x.span(),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -91,12 +91,13 @@ pub fn eval_hook(
|
|||||||
false,
|
false,
|
||||||
);
|
);
|
||||||
if let Some(err) = working_set.parse_errors.first() {
|
if let Some(err) = working_set.parse_errors.first() {
|
||||||
report_error(&working_set, err);
|
report_parse_error(&working_set, err);
|
||||||
|
return Err(ShellError::GenericError {
|
||||||
return Err(ShellError::UnsupportedConfigValue {
|
error: format!("Failed to run {hook_name} hook"),
|
||||||
expected: "valid source code".into(),
|
msg: "source code has errors".into(),
|
||||||
value: "source code with syntax errors".into(),
|
span: Some(span),
|
||||||
span,
|
help: None,
|
||||||
|
inner: Vec::new(),
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -123,7 +124,7 @@ pub fn eval_hook(
|
|||||||
output = pipeline_data;
|
output = pipeline_data;
|
||||||
}
|
}
|
||||||
Err(err) => {
|
Err(err) => {
|
||||||
report_error_new(engine_state, &err);
|
report_shell_error(engine_state, &err);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -132,16 +133,7 @@ pub fn eval_hook(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
Value::List { vals, .. } => {
|
Value::List { vals, .. } => {
|
||||||
for val in vals {
|
eval_hooks(engine_state, stack, arguments, vals, hook_name)?;
|
||||||
eval_hook(
|
|
||||||
engine_state,
|
|
||||||
stack,
|
|
||||||
None,
|
|
||||||
arguments.clone(),
|
|
||||||
val,
|
|
||||||
&format!("{hook_name} list, recursive"),
|
|
||||||
)?;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
Value::Record { val, .. } => {
|
Value::Record { val, .. } => {
|
||||||
// Hooks can optionally be a record in this form:
|
// Hooks can optionally be a record in this form:
|
||||||
@ -167,10 +159,10 @@ pub fn eval_hook(
|
|||||||
{
|
{
|
||||||
val
|
val
|
||||||
} else {
|
} else {
|
||||||
return Err(ShellError::UnsupportedConfigValue {
|
return Err(ShellError::RuntimeTypeMismatch {
|
||||||
expected: "boolean output".to_string(),
|
expected: Type::Bool,
|
||||||
value: "other PipelineData variant".to_string(),
|
actual: pipeline_data.get_type(),
|
||||||
span: other_span,
|
span: pipeline_data.span().unwrap_or(other_span),
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -179,9 +171,9 @@ pub fn eval_hook(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
return Err(ShellError::UnsupportedConfigValue {
|
return Err(ShellError::RuntimeTypeMismatch {
|
||||||
expected: "block".to_string(),
|
expected: Type::Closure,
|
||||||
value: format!("{}", condition.get_type()),
|
actual: condition.get_type(),
|
||||||
span: other_span,
|
span: other_span,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
@ -194,7 +186,7 @@ pub fn eval_hook(
|
|||||||
let Some(follow) = val.get("code") else {
|
let Some(follow) = val.get("code") else {
|
||||||
return Err(ShellError::CantFindColumn {
|
return Err(ShellError::CantFindColumn {
|
||||||
col_name: "code".into(),
|
col_name: "code".into(),
|
||||||
span,
|
span: Some(span),
|
||||||
src_span: span,
|
src_span: span,
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
@ -223,12 +215,13 @@ pub fn eval_hook(
|
|||||||
false,
|
false,
|
||||||
);
|
);
|
||||||
if let Some(err) = working_set.parse_errors.first() {
|
if let Some(err) = working_set.parse_errors.first() {
|
||||||
report_error(&working_set, err);
|
report_parse_error(&working_set, err);
|
||||||
|
return Err(ShellError::GenericError {
|
||||||
return Err(ShellError::UnsupportedConfigValue {
|
error: format!("Failed to run {hook_name} hook"),
|
||||||
expected: "valid source code".into(),
|
msg: "source code has errors".into(),
|
||||||
value: "source code with syntax errors".into(),
|
span: Some(span),
|
||||||
span: source_span,
|
help: None,
|
||||||
|
inner: Vec::new(),
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -251,7 +244,7 @@ pub fn eval_hook(
|
|||||||
output = pipeline_data;
|
output = pipeline_data;
|
||||||
}
|
}
|
||||||
Err(err) => {
|
Err(err) => {
|
||||||
report_error_new(engine_state, &err);
|
report_shell_error(engine_state, &err);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -263,9 +256,9 @@ pub fn eval_hook(
|
|||||||
run_hook(engine_state, stack, val, input, arguments, source_span)?;
|
run_hook(engine_state, stack, val, input, arguments, source_span)?;
|
||||||
}
|
}
|
||||||
other => {
|
other => {
|
||||||
return Err(ShellError::UnsupportedConfigValue {
|
return Err(ShellError::RuntimeTypeMismatch {
|
||||||
expected: "block or string".to_string(),
|
expected: Type::custom("string or closure"),
|
||||||
value: format!("{}", other.get_type()),
|
actual: other.get_type(),
|
||||||
span: source_span,
|
span: source_span,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
@ -276,16 +269,15 @@ pub fn eval_hook(
|
|||||||
output = run_hook(engine_state, stack, val, input, arguments, span)?;
|
output = run_hook(engine_state, stack, val, input, arguments, span)?;
|
||||||
}
|
}
|
||||||
other => {
|
other => {
|
||||||
return Err(ShellError::UnsupportedConfigValue {
|
return Err(ShellError::RuntimeTypeMismatch {
|
||||||
expected: "string, block, record, or list of commands".into(),
|
expected: Type::custom("string, closure, record, or list"),
|
||||||
value: format!("{}", other.get_type()),
|
actual: other.get_type(),
|
||||||
span: other.span(),
|
span: other.span(),
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let cwd = get_guaranteed_cwd(engine_state, stack);
|
engine_state.merge_env(stack)?;
|
||||||
engine_state.merge_env(stack, cwd)?;
|
|
||||||
|
|
||||||
Ok(output)
|
Ok(output)
|
||||||
}
|
}
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
use nu_protocol::{ast::CellPath, PipelineData, ShellError, Span, Value};
|
use nu_protocol::{ast::CellPath, PipelineData, ShellError, Signals, Span, Value};
|
||||||
use std::sync::{atomic::AtomicBool, Arc};
|
use std::sync::Arc;
|
||||||
|
|
||||||
pub trait CmdArgument {
|
pub trait CmdArgument {
|
||||||
fn take_cell_paths(&mut self) -> Option<Vec<CellPath>>;
|
fn take_cell_paths(&mut self) -> Option<Vec<CellPath>>;
|
||||||
@ -40,7 +40,7 @@ pub fn operate<C, A>(
|
|||||||
mut arg: A,
|
mut arg: A,
|
||||||
input: PipelineData,
|
input: PipelineData,
|
||||||
span: Span,
|
span: Span,
|
||||||
ctrlc: Option<Arc<AtomicBool>>,
|
signals: &Signals,
|
||||||
) -> Result<PipelineData, ShellError>
|
) -> Result<PipelineData, ShellError>
|
||||||
where
|
where
|
||||||
A: CmdArgument + Send + Sync + 'static,
|
A: CmdArgument + Send + Sync + 'static,
|
||||||
@ -55,7 +55,7 @@ where
|
|||||||
_ => cmd(&v, &arg, span),
|
_ => cmd(&v, &arg, span),
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
ctrlc,
|
signals,
|
||||||
),
|
),
|
||||||
Some(column_paths) => {
|
Some(column_paths) => {
|
||||||
let arg = Arc::new(arg);
|
let arg = Arc::new(arg);
|
||||||
@ -79,7 +79,7 @@ where
|
|||||||
}
|
}
|
||||||
v
|
v
|
||||||
},
|
},
|
||||||
ctrlc,
|
signals,
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
#![doc = include_str!("../README.md")]
|
||||||
pub mod formats;
|
pub mod formats;
|
||||||
pub mod hook;
|
pub mod hook;
|
||||||
pub mod input_handler;
|
pub mod input_handler;
|
||||||
|
@ -2,24 +2,11 @@ use nu_protocol::{
|
|||||||
engine::{EngineState, Stack},
|
engine::{EngineState, Stack},
|
||||||
Range, ShellError, Span, Value,
|
Range, ShellError, Span, Value,
|
||||||
};
|
};
|
||||||
use std::{ops::Bound, path::PathBuf};
|
use std::ops::Bound;
|
||||||
|
|
||||||
pub fn get_init_cwd() -> PathBuf {
|
|
||||||
std::env::current_dir().unwrap_or_else(|_| {
|
|
||||||
std::env::var("PWD")
|
|
||||||
.map(Into::into)
|
|
||||||
.unwrap_or_else(|_| nu_path::home_dir().unwrap_or_default())
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn get_guaranteed_cwd(engine_state: &EngineState, stack: &Stack) -> PathBuf {
|
|
||||||
engine_state
|
|
||||||
.cwd(Some(stack))
|
|
||||||
.unwrap_or(crate::util::get_init_cwd())
|
|
||||||
}
|
|
||||||
|
|
||||||
type MakeRangeError = fn(&str, Span) -> ShellError;
|
type MakeRangeError = fn(&str, Span) -> ShellError;
|
||||||
|
|
||||||
|
/// Returns a inclusive pair of boundary in given `range`.
|
||||||
pub fn process_range(range: &Range) -> Result<(isize, isize), MakeRangeError> {
|
pub fn process_range(range: &Range) -> Result<(isize, isize), MakeRangeError> {
|
||||||
match range {
|
match range {
|
||||||
Range::IntRange(range) => {
|
Range::IntRange(range) => {
|
||||||
@ -91,10 +78,10 @@ pub fn get_editor(
|
|||||||
get_editor_commandline(&config.buffer_editor, "$env.config.buffer_editor")
|
get_editor_commandline(&config.buffer_editor, "$env.config.buffer_editor")
|
||||||
{
|
{
|
||||||
Ok(buff_editor)
|
Ok(buff_editor)
|
||||||
} else if let Some(value) = env_vars.get("EDITOR") {
|
|
||||||
get_editor_commandline(value, "$env.EDITOR")
|
|
||||||
} else if let Some(value) = env_vars.get("VISUAL") {
|
} else if let Some(value) = env_vars.get("VISUAL") {
|
||||||
get_editor_commandline(value, "$env.VISUAL")
|
get_editor_commandline(value, "$env.VISUAL")
|
||||||
|
} else if let Some(value) = env_vars.get("EDITOR") {
|
||||||
|
get_editor_commandline(value, "$env.EDITOR")
|
||||||
} else {
|
} else {
|
||||||
Err(ShellError::GenericError {
|
Err(ShellError::GenericError {
|
||||||
error: "No editor configured".into(),
|
error: "No editor configured".into(),
|
||||||
|
@ -1,75 +0,0 @@
|
|||||||
[package]
|
|
||||||
authors = ["The Nushell Project Developers"]
|
|
||||||
description = "Nushell's dataframe commands based on polars."
|
|
||||||
edition = "2021"
|
|
||||||
license = "MIT"
|
|
||||||
name = "nu-cmd-dataframe"
|
|
||||||
repository = "https://github.com/nushell/nushell/tree/main/crates/nu-cmd-dataframe"
|
|
||||||
version = "0.93.1"
|
|
||||||
|
|
||||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
|
||||||
|
|
||||||
[lib]
|
|
||||||
bench = false
|
|
||||||
|
|
||||||
[dependencies]
|
|
||||||
nu-engine = { path = "../nu-engine", version = "0.93.1" }
|
|
||||||
nu-parser = { path = "../nu-parser", version = "0.93.1" }
|
|
||||||
nu-protocol = { path = "../nu-protocol", version = "0.93.1" }
|
|
||||||
|
|
||||||
# Potential dependencies for extras
|
|
||||||
chrono = { workspace = true, features = ["std", "unstable-locales"], default-features = false }
|
|
||||||
chrono-tz = { workspace = true }
|
|
||||||
fancy-regex = { workspace = true }
|
|
||||||
indexmap = { workspace = true }
|
|
||||||
num = { version = "0.4", optional = true }
|
|
||||||
serde = { workspace = true, features = ["derive"] }
|
|
||||||
# keep sqlparser at 0.39.0 until we can update polars
|
|
||||||
sqlparser = { version = "0.45", optional = true }
|
|
||||||
polars-io = { version = "0.39", features = ["avro"], optional = true }
|
|
||||||
polars-arrow = { version = "0.39", optional = true }
|
|
||||||
polars-ops = { version = "0.39", optional = true }
|
|
||||||
polars-plan = { version = "0.39", features = ["regex"], optional = true }
|
|
||||||
polars-utils = { version = "0.39", optional = true }
|
|
||||||
|
|
||||||
[dependencies.polars]
|
|
||||||
features = [
|
|
||||||
"arg_where",
|
|
||||||
"checked_arithmetic",
|
|
||||||
"concat_str",
|
|
||||||
"cross_join",
|
|
||||||
"csv",
|
|
||||||
"cum_agg",
|
|
||||||
"dtype-categorical",
|
|
||||||
"dtype-datetime",
|
|
||||||
"dtype-struct",
|
|
||||||
"dtype-i8",
|
|
||||||
"dtype-i16",
|
|
||||||
"dtype-u8",
|
|
||||||
"dtype-u16",
|
|
||||||
"dynamic_group_by",
|
|
||||||
"ipc",
|
|
||||||
"is_in",
|
|
||||||
"json",
|
|
||||||
"lazy",
|
|
||||||
"object",
|
|
||||||
"parquet",
|
|
||||||
"random",
|
|
||||||
"rolling_window",
|
|
||||||
"rows",
|
|
||||||
"serde",
|
|
||||||
"serde-lazy",
|
|
||||||
"strings",
|
|
||||||
"temporal",
|
|
||||||
"to_dummies",
|
|
||||||
]
|
|
||||||
default-features = false
|
|
||||||
optional = true
|
|
||||||
version = "0.39"
|
|
||||||
|
|
||||||
[features]
|
|
||||||
dataframe = ["num", "polars", "polars-io", "polars-arrow", "polars-ops", "polars-plan", "polars-utils", "sqlparser"]
|
|
||||||
default = []
|
|
||||||
|
|
||||||
[dev-dependencies]
|
|
||||||
nu-cmd-lang = { path = "../nu-cmd-lang", version = "0.93.1" }
|
|
@ -1,12 +0,0 @@
|
|||||||
# Dataframe
|
|
||||||
|
|
||||||
This dataframe directory holds all of the definitions of the dataframe data structures and commands.
|
|
||||||
|
|
||||||
There are three sections of commands:
|
|
||||||
|
|
||||||
* [eager](./eager)
|
|
||||||
* [series](./series)
|
|
||||||
* [values](./values)
|
|
||||||
|
|
||||||
For more details see the
|
|
||||||
[Nushell book section on dataframes](https://www.nushell.sh/book/dataframes.html)
|
|
@ -1,134 +0,0 @@
|
|||||||
use crate::dataframe::values::{Axis, Column, NuDataFrame};
|
|
||||||
use nu_engine::command_prelude::*;
|
|
||||||
|
|
||||||
#[derive(Clone)]
|
|
||||||
pub struct AppendDF;
|
|
||||||
|
|
||||||
impl Command for AppendDF {
|
|
||||||
fn name(&self) -> &str {
|
|
||||||
"dfr append"
|
|
||||||
}
|
|
||||||
|
|
||||||
fn usage(&self) -> &str {
|
|
||||||
"Appends a new dataframe."
|
|
||||||
}
|
|
||||||
|
|
||||||
fn signature(&self) -> Signature {
|
|
||||||
Signature::build(self.name())
|
|
||||||
.required("other", SyntaxShape::Any, "dataframe to be appended")
|
|
||||||
.switch("col", "appends in col orientation", Some('c'))
|
|
||||||
.input_output_type(
|
|
||||||
Type::Custom("dataframe".into()),
|
|
||||||
Type::Custom("dataframe".into()),
|
|
||||||
)
|
|
||||||
.category(Category::Custom("dataframe".into()))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn examples(&self) -> Vec<Example> {
|
|
||||||
vec![
|
|
||||||
Example {
|
|
||||||
description: "Appends a dataframe as new columns",
|
|
||||||
example: r#"let a = ([[a b]; [1 2] [3 4]] | dfr into-df);
|
|
||||||
$a | dfr append $a"#,
|
|
||||||
result: Some(
|
|
||||||
NuDataFrame::try_from_columns(
|
|
||||||
vec![
|
|
||||||
Column::new(
|
|
||||||
"a".to_string(),
|
|
||||||
vec![Value::test_int(1), Value::test_int(3)],
|
|
||||||
),
|
|
||||||
Column::new(
|
|
||||||
"b".to_string(),
|
|
||||||
vec![Value::test_int(2), Value::test_int(4)],
|
|
||||||
),
|
|
||||||
Column::new(
|
|
||||||
"a_x".to_string(),
|
|
||||||
vec![Value::test_int(1), Value::test_int(3)],
|
|
||||||
),
|
|
||||||
Column::new(
|
|
||||||
"b_x".to_string(),
|
|
||||||
vec![Value::test_int(2), Value::test_int(4)],
|
|
||||||
),
|
|
||||||
],
|
|
||||||
None,
|
|
||||||
)
|
|
||||||
.expect("simple df for test should not fail")
|
|
||||||
.into_value(Span::test_data()),
|
|
||||||
),
|
|
||||||
},
|
|
||||||
Example {
|
|
||||||
description: "Appends a dataframe merging at the end of columns",
|
|
||||||
example: r#"let a = ([[a b]; [1 2] [3 4]] | dfr into-df);
|
|
||||||
$a | dfr append $a --col"#,
|
|
||||||
result: Some(
|
|
||||||
NuDataFrame::try_from_columns(
|
|
||||||
vec![
|
|
||||||
Column::new(
|
|
||||||
"a".to_string(),
|
|
||||||
vec![
|
|
||||||
Value::test_int(1),
|
|
||||||
Value::test_int(3),
|
|
||||||
Value::test_int(1),
|
|
||||||
Value::test_int(3),
|
|
||||||
],
|
|
||||||
),
|
|
||||||
Column::new(
|
|
||||||
"b".to_string(),
|
|
||||||
vec![
|
|
||||||
Value::test_int(2),
|
|
||||||
Value::test_int(4),
|
|
||||||
Value::test_int(2),
|
|
||||||
Value::test_int(4),
|
|
||||||
],
|
|
||||||
),
|
|
||||||
],
|
|
||||||
None,
|
|
||||||
)
|
|
||||||
.expect("simple df for test should not fail")
|
|
||||||
.into_value(Span::test_data()),
|
|
||||||
),
|
|
||||||
},
|
|
||||||
]
|
|
||||||
}
|
|
||||||
|
|
||||||
fn run(
|
|
||||||
&self,
|
|
||||||
engine_state: &EngineState,
|
|
||||||
stack: &mut Stack,
|
|
||||||
call: &Call,
|
|
||||||
input: PipelineData,
|
|
||||||
) -> Result<PipelineData, ShellError> {
|
|
||||||
command(engine_state, stack, call, input)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn command(
|
|
||||||
engine_state: &EngineState,
|
|
||||||
stack: &mut Stack,
|
|
||||||
call: &Call,
|
|
||||||
input: PipelineData,
|
|
||||||
) -> Result<PipelineData, ShellError> {
|
|
||||||
let other: Value = call.req(engine_state, stack, 0)?;
|
|
||||||
|
|
||||||
let axis = if call.has_flag(engine_state, stack, "col")? {
|
|
||||||
Axis::Column
|
|
||||||
} else {
|
|
||||||
Axis::Row
|
|
||||||
};
|
|
||||||
let df_other = NuDataFrame::try_from_value(other)?;
|
|
||||||
let df = NuDataFrame::try_from_pipeline(input, call.head)?;
|
|
||||||
|
|
||||||
df.append_df(&df_other, axis, call.head)
|
|
||||||
.map(|df| PipelineData::Value(NuDataFrame::into_value(df, call.head), None))
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod test {
|
|
||||||
use super::super::super::test_dataframe::test_dataframe;
|
|
||||||
use super::*;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_examples() {
|
|
||||||
test_dataframe(vec![Box::new(AppendDF {})])
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,195 +0,0 @@
|
|||||||
use crate::dataframe::values::{str_to_dtype, NuDataFrame, NuExpression, NuLazyFrame};
|
|
||||||
use nu_engine::command_prelude::*;
|
|
||||||
|
|
||||||
use polars::prelude::*;
|
|
||||||
|
|
||||||
#[derive(Clone)]
|
|
||||||
pub struct CastDF;
|
|
||||||
|
|
||||||
impl Command for CastDF {
|
|
||||||
fn name(&self) -> &str {
|
|
||||||
"dfr cast"
|
|
||||||
}
|
|
||||||
|
|
||||||
fn usage(&self) -> &str {
|
|
||||||
"Cast a column to a different dtype."
|
|
||||||
}
|
|
||||||
|
|
||||||
fn signature(&self) -> Signature {
|
|
||||||
Signature::build(self.name())
|
|
||||||
.input_output_types(vec![
|
|
||||||
(
|
|
||||||
Type::Custom("expression".into()),
|
|
||||||
Type::Custom("expression".into()),
|
|
||||||
),
|
|
||||||
(
|
|
||||||
Type::Custom("dataframe".into()),
|
|
||||||
Type::Custom("dataframe".into()),
|
|
||||||
),
|
|
||||||
])
|
|
||||||
.required(
|
|
||||||
"dtype",
|
|
||||||
SyntaxShape::String,
|
|
||||||
"The dtype to cast the column to",
|
|
||||||
)
|
|
||||||
.optional(
|
|
||||||
"column",
|
|
||||||
SyntaxShape::String,
|
|
||||||
"The column to cast. Required when used with a dataframe.",
|
|
||||||
)
|
|
||||||
.category(Category::Custom("dataframe".into()))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn examples(&self) -> Vec<Example> {
|
|
||||||
vec![
|
|
||||||
Example {
|
|
||||||
description: "Cast a column in a dataframe to a different dtype",
|
|
||||||
example: "[[a b]; [1 2] [3 4]] | dfr into-df | dfr cast u8 a | dfr schema",
|
|
||||||
result: Some(Value::record(
|
|
||||||
record! {
|
|
||||||
"a" => Value::string("u8", Span::test_data()),
|
|
||||||
"b" => Value::string("i64", Span::test_data()),
|
|
||||||
},
|
|
||||||
Span::test_data(),
|
|
||||||
)),
|
|
||||||
},
|
|
||||||
Example {
|
|
||||||
description: "Cast a column in a lazy dataframe to a different dtype",
|
|
||||||
example: "[[a b]; [1 2] [3 4]] | dfr into-df | dfr into-lazy | dfr cast u8 a | dfr schema",
|
|
||||||
result: Some(Value::record(
|
|
||||||
record! {
|
|
||||||
"a" => Value::string("u8", Span::test_data()),
|
|
||||||
"b" => Value::string("i64", Span::test_data()),
|
|
||||||
},
|
|
||||||
Span::test_data(),
|
|
||||||
)),
|
|
||||||
},
|
|
||||||
Example {
|
|
||||||
description: "Cast a column in a expression to a different dtype",
|
|
||||||
example: r#"[[a b]; [1 2] [1 4]] | dfr into-df | dfr group-by a | dfr agg [ (dfr col b | dfr cast u8 | dfr min | dfr as "b_min") ] | dfr schema"#,
|
|
||||||
result: None
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
|
|
||||||
fn run(
|
|
||||||
&self,
|
|
||||||
engine_state: &EngineState,
|
|
||||||
stack: &mut Stack,
|
|
||||||
call: &Call,
|
|
||||||
input: PipelineData,
|
|
||||||
) -> Result<PipelineData, ShellError> {
|
|
||||||
let value = input.into_value(call.head)?;
|
|
||||||
if NuLazyFrame::can_downcast(&value) {
|
|
||||||
let (dtype, column_nm) = df_args(engine_state, stack, call)?;
|
|
||||||
let df = NuLazyFrame::try_from_value(value)?;
|
|
||||||
command_lazy(call, column_nm, dtype, df)
|
|
||||||
} else if NuDataFrame::can_downcast(&value) {
|
|
||||||
let (dtype, column_nm) = df_args(engine_state, stack, call)?;
|
|
||||||
let df = NuDataFrame::try_from_value(value)?;
|
|
||||||
command_eager(call, column_nm, dtype, df)
|
|
||||||
} else {
|
|
||||||
let dtype: String = call.req(engine_state, stack, 0)?;
|
|
||||||
let dtype = str_to_dtype(&dtype, call.head)?;
|
|
||||||
|
|
||||||
let expr = NuExpression::try_from_value(value)?;
|
|
||||||
let expr: NuExpression = expr.into_polars().cast(dtype).into();
|
|
||||||
|
|
||||||
Ok(PipelineData::Value(
|
|
||||||
NuExpression::into_value(expr, call.head),
|
|
||||||
None,
|
|
||||||
))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn df_args(
|
|
||||||
engine_state: &EngineState,
|
|
||||||
stack: &mut Stack,
|
|
||||||
call: &Call,
|
|
||||||
) -> Result<(DataType, String), ShellError> {
|
|
||||||
let dtype = dtype_arg(engine_state, stack, call)?;
|
|
||||||
let column_nm: String =
|
|
||||||
call.opt(engine_state, stack, 1)?
|
|
||||||
.ok_or(ShellError::MissingParameter {
|
|
||||||
param_name: "column_name".into(),
|
|
||||||
span: call.head,
|
|
||||||
})?;
|
|
||||||
Ok((dtype, column_nm))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn dtype_arg(
|
|
||||||
engine_state: &EngineState,
|
|
||||||
stack: &mut Stack,
|
|
||||||
call: &Call,
|
|
||||||
) -> Result<DataType, ShellError> {
|
|
||||||
let dtype: String = call.req(engine_state, stack, 0)?;
|
|
||||||
str_to_dtype(&dtype, call.head)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn command_lazy(
|
|
||||||
call: &Call,
|
|
||||||
column_nm: String,
|
|
||||||
dtype: DataType,
|
|
||||||
lazy: NuLazyFrame,
|
|
||||||
) -> Result<PipelineData, ShellError> {
|
|
||||||
let column = col(&column_nm).cast(dtype);
|
|
||||||
let lazy = lazy.into_polars().with_columns(&[column]);
|
|
||||||
let lazy = NuLazyFrame::new(false, lazy);
|
|
||||||
|
|
||||||
Ok(PipelineData::Value(
|
|
||||||
NuLazyFrame::into_value(lazy, call.head)?,
|
|
||||||
None,
|
|
||||||
))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn command_eager(
|
|
||||||
call: &Call,
|
|
||||||
column_nm: String,
|
|
||||||
dtype: DataType,
|
|
||||||
nu_df: NuDataFrame,
|
|
||||||
) -> Result<PipelineData, ShellError> {
|
|
||||||
let mut df = nu_df.df;
|
|
||||||
let column = df
|
|
||||||
.column(&column_nm)
|
|
||||||
.map_err(|e| ShellError::GenericError {
|
|
||||||
error: format!("{e}"),
|
|
||||||
msg: "".into(),
|
|
||||||
span: Some(call.head),
|
|
||||||
help: None,
|
|
||||||
inner: vec![],
|
|
||||||
})?;
|
|
||||||
|
|
||||||
let casted = column.cast(&dtype).map_err(|e| ShellError::GenericError {
|
|
||||||
error: format!("{e}"),
|
|
||||||
msg: "".into(),
|
|
||||||
span: Some(call.head),
|
|
||||||
help: None,
|
|
||||||
inner: vec![],
|
|
||||||
})?;
|
|
||||||
|
|
||||||
let _ = df
|
|
||||||
.with_column(casted)
|
|
||||||
.map_err(|e| ShellError::GenericError {
|
|
||||||
error: format!("{e}"),
|
|
||||||
msg: "".into(),
|
|
||||||
span: Some(call.head),
|
|
||||||
help: None,
|
|
||||||
inner: vec![],
|
|
||||||
})?;
|
|
||||||
|
|
||||||
let df = NuDataFrame::new(false, df);
|
|
||||||
Ok(PipelineData::Value(df.into_value(call.head), None))
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod test {
|
|
||||||
|
|
||||||
use super::super::super::test_dataframe::test_dataframe;
|
|
||||||
use super::*;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_examples() {
|
|
||||||
test_dataframe(vec![Box::new(CastDF {})])
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,73 +0,0 @@
|
|||||||
use crate::dataframe::values::NuDataFrame;
|
|
||||||
use nu_engine::command_prelude::*;
|
|
||||||
|
|
||||||
#[derive(Clone)]
|
|
||||||
pub struct ColumnsDF;
|
|
||||||
|
|
||||||
impl Command for ColumnsDF {
|
|
||||||
fn name(&self) -> &str {
|
|
||||||
"dfr columns"
|
|
||||||
}
|
|
||||||
|
|
||||||
fn usage(&self) -> &str {
|
|
||||||
"Show dataframe columns."
|
|
||||||
}
|
|
||||||
|
|
||||||
fn signature(&self) -> Signature {
|
|
||||||
Signature::build(self.name())
|
|
||||||
.input_output_type(Type::Custom("dataframe".into()), Type::Any)
|
|
||||||
.category(Category::Custom("dataframe".into()))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn examples(&self) -> Vec<Example> {
|
|
||||||
vec![Example {
|
|
||||||
description: "Dataframe columns",
|
|
||||||
example: "[[a b]; [1 2] [3 4]] | dfr into-df | dfr columns",
|
|
||||||
result: Some(Value::list(
|
|
||||||
vec![Value::test_string("a"), Value::test_string("b")],
|
|
||||||
Span::test_data(),
|
|
||||||
)),
|
|
||||||
}]
|
|
||||||
}
|
|
||||||
|
|
||||||
fn run(
|
|
||||||
&self,
|
|
||||||
engine_state: &EngineState,
|
|
||||||
stack: &mut Stack,
|
|
||||||
call: &Call,
|
|
||||||
input: PipelineData,
|
|
||||||
) -> Result<PipelineData, ShellError> {
|
|
||||||
command(engine_state, stack, call, input)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn command(
|
|
||||||
_engine_state: &EngineState,
|
|
||||||
_stack: &mut Stack,
|
|
||||||
call: &Call,
|
|
||||||
input: PipelineData,
|
|
||||||
) -> Result<PipelineData, ShellError> {
|
|
||||||
let df = NuDataFrame::try_from_pipeline(input, call.head)?;
|
|
||||||
|
|
||||||
let names: Vec<Value> = df
|
|
||||||
.as_ref()
|
|
||||||
.get_column_names()
|
|
||||||
.iter()
|
|
||||||
.map(|v| Value::string(*v, call.head))
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
let names = Value::list(names, call.head);
|
|
||||||
|
|
||||||
Ok(PipelineData::Value(names, None))
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod test {
|
|
||||||
use super::super::super::test_dataframe::test_dataframe;
|
|
||||||
use super::*;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_examples() {
|
|
||||||
test_dataframe(vec![Box::new(ColumnsDF {})])
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,115 +0,0 @@
|
|||||||
use crate::dataframe::values::{utils::convert_columns, Column, NuDataFrame};
|
|
||||||
use nu_engine::command_prelude::*;
|
|
||||||
|
|
||||||
#[derive(Clone)]
|
|
||||||
pub struct DropDF;
|
|
||||||
|
|
||||||
impl Command for DropDF {
|
|
||||||
fn name(&self) -> &str {
|
|
||||||
"dfr drop"
|
|
||||||
}
|
|
||||||
|
|
||||||
fn usage(&self) -> &str {
|
|
||||||
"Creates a new dataframe by dropping the selected columns."
|
|
||||||
}
|
|
||||||
|
|
||||||
fn signature(&self) -> Signature {
|
|
||||||
Signature::build(self.name())
|
|
||||||
.rest("rest", SyntaxShape::Any, "column names to be dropped")
|
|
||||||
.input_output_type(
|
|
||||||
Type::Custom("dataframe".into()),
|
|
||||||
Type::Custom("dataframe".into()),
|
|
||||||
)
|
|
||||||
.category(Category::Custom("dataframe".into()))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn examples(&self) -> Vec<Example> {
|
|
||||||
vec![Example {
|
|
||||||
description: "drop column a",
|
|
||||||
example: "[[a b]; [1 2] [3 4]] | dfr into-df | dfr drop a",
|
|
||||||
result: Some(
|
|
||||||
NuDataFrame::try_from_columns(
|
|
||||||
vec![Column::new(
|
|
||||||
"b".to_string(),
|
|
||||||
vec![Value::test_int(2), Value::test_int(4)],
|
|
||||||
)],
|
|
||||||
None,
|
|
||||||
)
|
|
||||||
.expect("simple df for test should not fail")
|
|
||||||
.into_value(Span::test_data()),
|
|
||||||
),
|
|
||||||
}]
|
|
||||||
}
|
|
||||||
|
|
||||||
fn run(
|
|
||||||
&self,
|
|
||||||
engine_state: &EngineState,
|
|
||||||
stack: &mut Stack,
|
|
||||||
call: &Call,
|
|
||||||
input: PipelineData,
|
|
||||||
) -> Result<PipelineData, ShellError> {
|
|
||||||
command(engine_state, stack, call, input)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn command(
|
|
||||||
engine_state: &EngineState,
|
|
||||||
stack: &mut Stack,
|
|
||||||
call: &Call,
|
|
||||||
input: PipelineData,
|
|
||||||
) -> Result<PipelineData, ShellError> {
|
|
||||||
let columns: Vec<Value> = call.rest(engine_state, stack, 0)?;
|
|
||||||
let (col_string, col_span) = convert_columns(columns, call.head)?;
|
|
||||||
|
|
||||||
let df = NuDataFrame::try_from_pipeline(input, call.head)?;
|
|
||||||
|
|
||||||
let new_df = col_string
|
|
||||||
.first()
|
|
||||||
.ok_or_else(|| ShellError::GenericError {
|
|
||||||
error: "Empty names list".into(),
|
|
||||||
msg: "No column names were found".into(),
|
|
||||||
span: Some(col_span),
|
|
||||||
help: None,
|
|
||||||
inner: vec![],
|
|
||||||
})
|
|
||||||
.and_then(|col| {
|
|
||||||
df.as_ref()
|
|
||||||
.drop(&col.item)
|
|
||||||
.map_err(|e| ShellError::GenericError {
|
|
||||||
error: "Error dropping column".into(),
|
|
||||||
msg: e.to_string(),
|
|
||||||
span: Some(col.span),
|
|
||||||
help: None,
|
|
||||||
inner: vec![],
|
|
||||||
})
|
|
||||||
})?;
|
|
||||||
|
|
||||||
// If there are more columns in the drop selection list, these
|
|
||||||
// are added from the resulting dataframe
|
|
||||||
col_string
|
|
||||||
.iter()
|
|
||||||
.skip(1)
|
|
||||||
.try_fold(new_df, |new_df, col| {
|
|
||||||
new_df
|
|
||||||
.drop(&col.item)
|
|
||||||
.map_err(|e| ShellError::GenericError {
|
|
||||||
error: "Error dropping column".into(),
|
|
||||||
msg: e.to_string(),
|
|
||||||
span: Some(col.span),
|
|
||||||
help: None,
|
|
||||||
inner: vec![],
|
|
||||||
})
|
|
||||||
})
|
|
||||||
.map(|df| PipelineData::Value(NuDataFrame::dataframe_into_value(df, call.head), None))
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod test {
|
|
||||||
use super::super::super::test_dataframe::test_dataframe;
|
|
||||||
use super::*;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_examples() {
|
|
||||||
test_dataframe(vec![Box::new(DropDF {})])
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,119 +0,0 @@
|
|||||||
use crate::dataframe::values::{utils::convert_columns_string, Column, NuDataFrame};
|
|
||||||
use nu_engine::command_prelude::*;
|
|
||||||
|
|
||||||
use polars::prelude::UniqueKeepStrategy;
|
|
||||||
|
|
||||||
#[derive(Clone)]
|
|
||||||
pub struct DropDuplicates;
|
|
||||||
|
|
||||||
impl Command for DropDuplicates {
|
|
||||||
fn name(&self) -> &str {
|
|
||||||
"dfr drop-duplicates"
|
|
||||||
}
|
|
||||||
|
|
||||||
fn usage(&self) -> &str {
|
|
||||||
"Drops duplicate values in dataframe."
|
|
||||||
}
|
|
||||||
|
|
||||||
fn signature(&self) -> Signature {
|
|
||||||
Signature::build(self.name())
|
|
||||||
.optional(
|
|
||||||
"subset",
|
|
||||||
SyntaxShape::Table(vec![]),
|
|
||||||
"subset of columns to drop duplicates",
|
|
||||||
)
|
|
||||||
.switch("maintain", "maintain order", Some('m'))
|
|
||||||
.switch(
|
|
||||||
"last",
|
|
||||||
"keeps last duplicate value (by default keeps first)",
|
|
||||||
Some('l'),
|
|
||||||
)
|
|
||||||
.input_output_type(
|
|
||||||
Type::Custom("dataframe".into()),
|
|
||||||
Type::Custom("dataframe".into()),
|
|
||||||
)
|
|
||||||
.category(Category::Custom("dataframe".into()))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn examples(&self) -> Vec<Example> {
|
|
||||||
vec![Example {
|
|
||||||
description: "drop duplicates",
|
|
||||||
example: "[[a b]; [1 2] [3 4] [1 2]] | dfr into-df | dfr drop-duplicates",
|
|
||||||
result: Some(
|
|
||||||
NuDataFrame::try_from_columns(
|
|
||||||
vec![
|
|
||||||
Column::new(
|
|
||||||
"a".to_string(),
|
|
||||||
vec![Value::test_int(3), Value::test_int(1)],
|
|
||||||
),
|
|
||||||
Column::new(
|
|
||||||
"b".to_string(),
|
|
||||||
vec![Value::test_int(4), Value::test_int(2)],
|
|
||||||
),
|
|
||||||
],
|
|
||||||
None,
|
|
||||||
)
|
|
||||||
.expect("simple df for test should not fail")
|
|
||||||
.into_value(Span::test_data()),
|
|
||||||
),
|
|
||||||
}]
|
|
||||||
}
|
|
||||||
|
|
||||||
fn run(
|
|
||||||
&self,
|
|
||||||
engine_state: &EngineState,
|
|
||||||
stack: &mut Stack,
|
|
||||||
call: &Call,
|
|
||||||
input: PipelineData,
|
|
||||||
) -> Result<PipelineData, ShellError> {
|
|
||||||
command(engine_state, stack, call, input)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn command(
|
|
||||||
engine_state: &EngineState,
|
|
||||||
stack: &mut Stack,
|
|
||||||
call: &Call,
|
|
||||||
input: PipelineData,
|
|
||||||
) -> Result<PipelineData, ShellError> {
|
|
||||||
let columns: Option<Vec<Value>> = call.opt(engine_state, stack, 0)?;
|
|
||||||
let (subset, col_span) = match columns {
|
|
||||||
Some(cols) => {
|
|
||||||
let (agg_string, col_span) = convert_columns_string(cols, call.head)?;
|
|
||||||
(Some(agg_string), col_span)
|
|
||||||
}
|
|
||||||
None => (None, call.head),
|
|
||||||
};
|
|
||||||
|
|
||||||
let df = NuDataFrame::try_from_pipeline(input, call.head)?;
|
|
||||||
|
|
||||||
let subset_slice = subset.as_ref().map(|cols| &cols[..]);
|
|
||||||
|
|
||||||
let keep_strategy = if call.has_flag(engine_state, stack, "last")? {
|
|
||||||
UniqueKeepStrategy::Last
|
|
||||||
} else {
|
|
||||||
UniqueKeepStrategy::First
|
|
||||||
};
|
|
||||||
|
|
||||||
df.as_ref()
|
|
||||||
.unique(subset_slice, keep_strategy, None)
|
|
||||||
.map_err(|e| ShellError::GenericError {
|
|
||||||
error: "Error dropping duplicates".into(),
|
|
||||||
msg: e.to_string(),
|
|
||||||
span: Some(col_span),
|
|
||||||
help: None,
|
|
||||||
inner: vec![],
|
|
||||||
})
|
|
||||||
.map(|df| PipelineData::Value(NuDataFrame::dataframe_into_value(df, call.head), None))
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod test {
|
|
||||||
use super::super::super::test_dataframe::test_dataframe;
|
|
||||||
use super::*;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_examples() {
|
|
||||||
test_dataframe(vec![Box::new(DropDuplicates {})])
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,137 +0,0 @@
|
|||||||
use crate::dataframe::values::{utils::convert_columns_string, Column, NuDataFrame};
|
|
||||||
use nu_engine::command_prelude::*;
|
|
||||||
|
|
||||||
#[derive(Clone)]
|
|
||||||
pub struct DropNulls;
|
|
||||||
|
|
||||||
impl Command for DropNulls {
|
|
||||||
fn name(&self) -> &str {
|
|
||||||
"dfr drop-nulls"
|
|
||||||
}
|
|
||||||
|
|
||||||
fn usage(&self) -> &str {
|
|
||||||
"Drops null values in dataframe."
|
|
||||||
}
|
|
||||||
|
|
||||||
fn signature(&self) -> Signature {
|
|
||||||
Signature::build(self.name())
|
|
||||||
.optional(
|
|
||||||
"subset",
|
|
||||||
SyntaxShape::Table(vec![]),
|
|
||||||
"subset of columns to drop nulls",
|
|
||||||
)
|
|
||||||
.input_output_type(
|
|
||||||
Type::Custom("dataframe".into()),
|
|
||||||
Type::Custom("dataframe".into()),
|
|
||||||
)
|
|
||||||
.category(Category::Custom("dataframe".into()))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn examples(&self) -> Vec<Example> {
|
|
||||||
vec![
|
|
||||||
Example {
|
|
||||||
description: "drop null values in dataframe",
|
|
||||||
example: r#"let df = ([[a b]; [1 2] [3 0] [1 2]] | dfr into-df);
|
|
||||||
let res = ($df.b / $df.b);
|
|
||||||
let a = ($df | dfr with-column $res --name res);
|
|
||||||
$a | dfr drop-nulls"#,
|
|
||||||
result: Some(
|
|
||||||
NuDataFrame::try_from_columns(
|
|
||||||
vec![
|
|
||||||
Column::new(
|
|
||||||
"a".to_string(),
|
|
||||||
vec![Value::test_int(1), Value::test_int(1)],
|
|
||||||
),
|
|
||||||
Column::new(
|
|
||||||
"b".to_string(),
|
|
||||||
vec![Value::test_int(2), Value::test_int(2)],
|
|
||||||
),
|
|
||||||
Column::new(
|
|
||||||
"res".to_string(),
|
|
||||||
vec![Value::test_int(1), Value::test_int(1)],
|
|
||||||
),
|
|
||||||
],
|
|
||||||
None,
|
|
||||||
)
|
|
||||||
.expect("simple df for test should not fail")
|
|
||||||
.into_value(Span::test_data()),
|
|
||||||
),
|
|
||||||
},
|
|
||||||
Example {
|
|
||||||
description: "drop null values in dataframe",
|
|
||||||
example: r#"let s = ([1 2 0 0 3 4] | dfr into-df);
|
|
||||||
($s / $s) | dfr drop-nulls"#,
|
|
||||||
result: Some(
|
|
||||||
NuDataFrame::try_from_columns(
|
|
||||||
vec![Column::new(
|
|
||||||
"div_0_0".to_string(),
|
|
||||||
vec![
|
|
||||||
Value::test_int(1),
|
|
||||||
Value::test_int(1),
|
|
||||||
Value::test_int(1),
|
|
||||||
Value::test_int(1),
|
|
||||||
],
|
|
||||||
)],
|
|
||||||
None,
|
|
||||||
)
|
|
||||||
.expect("simple df for test should not fail")
|
|
||||||
.into_value(Span::test_data()),
|
|
||||||
),
|
|
||||||
},
|
|
||||||
]
|
|
||||||
}
|
|
||||||
|
|
||||||
fn run(
|
|
||||||
&self,
|
|
||||||
engine_state: &EngineState,
|
|
||||||
stack: &mut Stack,
|
|
||||||
call: &Call,
|
|
||||||
input: PipelineData,
|
|
||||||
) -> Result<PipelineData, ShellError> {
|
|
||||||
command(engine_state, stack, call, input)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn command(
|
|
||||||
engine_state: &EngineState,
|
|
||||||
stack: &mut Stack,
|
|
||||||
call: &Call,
|
|
||||||
input: PipelineData,
|
|
||||||
) -> Result<PipelineData, ShellError> {
|
|
||||||
let df = NuDataFrame::try_from_pipeline(input, call.head)?;
|
|
||||||
|
|
||||||
let columns: Option<Vec<Value>> = call.opt(engine_state, stack, 0)?;
|
|
||||||
|
|
||||||
let (subset, col_span) = match columns {
|
|
||||||
Some(cols) => {
|
|
||||||
let (agg_string, col_span) = convert_columns_string(cols, call.head)?;
|
|
||||||
(Some(agg_string), col_span)
|
|
||||||
}
|
|
||||||
None => (None, call.head),
|
|
||||||
};
|
|
||||||
|
|
||||||
let subset_slice = subset.as_ref().map(|cols| &cols[..]);
|
|
||||||
|
|
||||||
df.as_ref()
|
|
||||||
.drop_nulls(subset_slice)
|
|
||||||
.map_err(|e| ShellError::GenericError {
|
|
||||||
error: "Error dropping nulls".into(),
|
|
||||||
msg: e.to_string(),
|
|
||||||
span: Some(col_span),
|
|
||||||
help: None,
|
|
||||||
inner: vec![],
|
|
||||||
})
|
|
||||||
.map(|df| PipelineData::Value(NuDataFrame::dataframe_into_value(df, call.head), None))
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod test {
|
|
||||||
use super::super::super::test_dataframe::test_dataframe;
|
|
||||||
use super::super::WithColumn;
|
|
||||||
use super::*;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_examples() {
|
|
||||||
test_dataframe(vec![Box::new(DropNulls {}), Box::new(WithColumn {})])
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,104 +0,0 @@
|
|||||||
use crate::dataframe::values::{Column, NuDataFrame};
|
|
||||||
use nu_engine::command_prelude::*;
|
|
||||||
|
|
||||||
#[derive(Clone)]
|
|
||||||
pub struct DataTypes;
|
|
||||||
|
|
||||||
impl Command for DataTypes {
|
|
||||||
fn name(&self) -> &str {
|
|
||||||
"dfr dtypes"
|
|
||||||
}
|
|
||||||
|
|
||||||
fn usage(&self) -> &str {
|
|
||||||
"Show dataframe data types."
|
|
||||||
}
|
|
||||||
|
|
||||||
fn signature(&self) -> Signature {
|
|
||||||
Signature::build(self.name())
|
|
||||||
.input_output_type(
|
|
||||||
Type::Custom("dataframe".into()),
|
|
||||||
Type::Custom("dataframe".into()),
|
|
||||||
)
|
|
||||||
.category(Category::Custom("dataframe".into()))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn examples(&self) -> Vec<Example> {
|
|
||||||
vec![Example {
|
|
||||||
description: "Dataframe dtypes",
|
|
||||||
example: "[[a b]; [1 2] [3 4]] | dfr into-df | dfr dtypes",
|
|
||||||
result: Some(
|
|
||||||
NuDataFrame::try_from_columns(
|
|
||||||
vec![
|
|
||||||
Column::new(
|
|
||||||
"column".to_string(),
|
|
||||||
vec![Value::test_string("a"), Value::test_string("b")],
|
|
||||||
),
|
|
||||||
Column::new(
|
|
||||||
"dtype".to_string(),
|
|
||||||
vec![Value::test_string("i64"), Value::test_string("i64")],
|
|
||||||
),
|
|
||||||
],
|
|
||||||
None,
|
|
||||||
)
|
|
||||||
.expect("simple df for test should not fail")
|
|
||||||
.into_value(Span::test_data()),
|
|
||||||
),
|
|
||||||
}]
|
|
||||||
}
|
|
||||||
|
|
||||||
fn run(
|
|
||||||
&self,
|
|
||||||
engine_state: &EngineState,
|
|
||||||
stack: &mut Stack,
|
|
||||||
call: &Call,
|
|
||||||
input: PipelineData,
|
|
||||||
) -> Result<PipelineData, ShellError> {
|
|
||||||
command(engine_state, stack, call, input)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn command(
|
|
||||||
_engine_state: &EngineState,
|
|
||||||
_stack: &mut Stack,
|
|
||||||
call: &Call,
|
|
||||||
input: PipelineData,
|
|
||||||
) -> Result<PipelineData, ShellError> {
|
|
||||||
let df = NuDataFrame::try_from_pipeline(input, call.head)?;
|
|
||||||
|
|
||||||
let mut dtypes: Vec<Value> = Vec::new();
|
|
||||||
let names: Vec<Value> = df
|
|
||||||
.as_ref()
|
|
||||||
.get_column_names()
|
|
||||||
.iter()
|
|
||||||
.map(|v| {
|
|
||||||
let dtype = df
|
|
||||||
.as_ref()
|
|
||||||
.column(v)
|
|
||||||
.expect("using name from list of names from dataframe")
|
|
||||||
.dtype();
|
|
||||||
|
|
||||||
let dtype_str = dtype.to_string();
|
|
||||||
|
|
||||||
dtypes.push(Value::string(dtype_str, call.head));
|
|
||||||
|
|
||||||
Value::string(*v, call.head)
|
|
||||||
})
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
let names_col = Column::new("column".to_string(), names);
|
|
||||||
let dtypes_col = Column::new("dtype".to_string(), dtypes);
|
|
||||||
|
|
||||||
NuDataFrame::try_from_columns(vec![names_col, dtypes_col], None)
|
|
||||||
.map(|df| PipelineData::Value(df.into_value(call.head), None))
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod test {
|
|
||||||
use super::super::super::test_dataframe::test_dataframe;
|
|
||||||
use super::*;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_examples() {
|
|
||||||
test_dataframe(vec![Box::new(DataTypes {})])
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,107 +0,0 @@
|
|||||||
use crate::dataframe::values::NuDataFrame;
|
|
||||||
use nu_engine::command_prelude::*;
|
|
||||||
|
|
||||||
use polars::{prelude::*, series::Series};
|
|
||||||
|
|
||||||
#[derive(Clone)]
|
|
||||||
pub struct Dummies;
|
|
||||||
|
|
||||||
impl Command for Dummies {
|
|
||||||
fn name(&self) -> &str {
|
|
||||||
"dfr dummies"
|
|
||||||
}
|
|
||||||
|
|
||||||
fn usage(&self) -> &str {
|
|
||||||
"Creates a new dataframe with dummy variables."
|
|
||||||
}
|
|
||||||
|
|
||||||
fn signature(&self) -> Signature {
|
|
||||||
Signature::build(self.name())
|
|
||||||
.switch("drop-first", "Drop first row", Some('d'))
|
|
||||||
.input_output_type(
|
|
||||||
Type::Custom("dataframe".into()),
|
|
||||||
Type::Custom("dataframe".into()),
|
|
||||||
)
|
|
||||||
.category(Category::Custom("dataframe".into()))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn examples(&self) -> Vec<Example> {
|
|
||||||
vec![
|
|
||||||
Example {
|
|
||||||
description: "Create new dataframe with dummy variables from a dataframe",
|
|
||||||
example: "[[a b]; [1 2] [3 4]] | dfr into-df | dfr dummies",
|
|
||||||
result: Some(
|
|
||||||
NuDataFrame::try_from_series(
|
|
||||||
vec![
|
|
||||||
Series::new("a_1", &[1_u8, 0]),
|
|
||||||
Series::new("a_3", &[0_u8, 1]),
|
|
||||||
Series::new("b_2", &[1_u8, 0]),
|
|
||||||
Series::new("b_4", &[0_u8, 1]),
|
|
||||||
],
|
|
||||||
Span::test_data(),
|
|
||||||
)
|
|
||||||
.expect("simple df for test should not fail")
|
|
||||||
.into_value(Span::test_data()),
|
|
||||||
),
|
|
||||||
},
|
|
||||||
Example {
|
|
||||||
description: "Create new dataframe with dummy variables from a series",
|
|
||||||
example: "[1 2 2 3 3] | dfr into-df | dfr dummies",
|
|
||||||
result: Some(
|
|
||||||
NuDataFrame::try_from_series(
|
|
||||||
vec![
|
|
||||||
Series::new("0_1", &[1_u8, 0, 0, 0, 0]),
|
|
||||||
Series::new("0_2", &[0_u8, 1, 1, 0, 0]),
|
|
||||||
Series::new("0_3", &[0_u8, 0, 0, 1, 1]),
|
|
||||||
],
|
|
||||||
Span::test_data(),
|
|
||||||
)
|
|
||||||
.expect("simple df for test should not fail")
|
|
||||||
.into_value(Span::test_data()),
|
|
||||||
),
|
|
||||||
},
|
|
||||||
]
|
|
||||||
}
|
|
||||||
|
|
||||||
fn run(
|
|
||||||
&self,
|
|
||||||
engine_state: &EngineState,
|
|
||||||
stack: &mut Stack,
|
|
||||||
call: &Call,
|
|
||||||
input: PipelineData,
|
|
||||||
) -> Result<PipelineData, ShellError> {
|
|
||||||
command(engine_state, stack, call, input)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn command(
|
|
||||||
engine_state: &EngineState,
|
|
||||||
stack: &mut Stack,
|
|
||||||
call: &Call,
|
|
||||||
input: PipelineData,
|
|
||||||
) -> Result<PipelineData, ShellError> {
|
|
||||||
let drop_first: bool = call.has_flag(engine_state, stack, "drop-first")?;
|
|
||||||
let df = NuDataFrame::try_from_pipeline(input, call.head)?;
|
|
||||||
|
|
||||||
df.as_ref()
|
|
||||||
.to_dummies(None, drop_first)
|
|
||||||
.map_err(|e| ShellError::GenericError {
|
|
||||||
error: "Error calculating dummies".into(),
|
|
||||||
msg: e.to_string(),
|
|
||||||
span: Some(call.head),
|
|
||||||
help: Some("The only allowed column types for dummies are String or Int".into()),
|
|
||||||
inner: vec![],
|
|
||||||
})
|
|
||||||
.map(|df| PipelineData::Value(NuDataFrame::dataframe_into_value(df, call.head), None))
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod test {
|
|
||||||
use super::super::super::test_dataframe::test_dataframe;
|
|
||||||
use super::*;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_examples() {
|
|
||||||
test_dataframe(vec![Box::new(Dummies {})])
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,144 +0,0 @@
|
|||||||
use crate::dataframe::values::{Column, NuDataFrame, NuExpression};
|
|
||||||
use nu_engine::command_prelude::*;
|
|
||||||
|
|
||||||
#[derive(Clone)]
|
|
||||||
pub struct FirstDF;
|
|
||||||
|
|
||||||
impl Command for FirstDF {
|
|
||||||
fn name(&self) -> &str {
|
|
||||||
"dfr first"
|
|
||||||
}
|
|
||||||
|
|
||||||
fn usage(&self) -> &str {
|
|
||||||
"Show only the first number of rows or create a first expression"
|
|
||||||
}
|
|
||||||
|
|
||||||
fn signature(&self) -> Signature {
|
|
||||||
Signature::build(self.name())
|
|
||||||
.optional(
|
|
||||||
"rows",
|
|
||||||
SyntaxShape::Int,
|
|
||||||
"starting from the front, the number of rows to return",
|
|
||||||
)
|
|
||||||
.input_output_types(vec![
|
|
||||||
(
|
|
||||||
Type::Custom("expression".into()),
|
|
||||||
Type::Custom("expression".into()),
|
|
||||||
),
|
|
||||||
(
|
|
||||||
Type::Custom("dataframe".into()),
|
|
||||||
Type::Custom("dataframe".into()),
|
|
||||||
),
|
|
||||||
])
|
|
||||||
.category(Category::Custom("dataframe".into()))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn examples(&self) -> Vec<Example> {
|
|
||||||
vec![
|
|
||||||
Example {
|
|
||||||
description: "Return the first row of a dataframe",
|
|
||||||
example: "[[a b]; [1 2] [3 4]] | dfr into-df | dfr first",
|
|
||||||
result: Some(
|
|
||||||
NuDataFrame::try_from_columns(
|
|
||||||
vec![
|
|
||||||
Column::new("a".to_string(), vec![Value::test_int(1)]),
|
|
||||||
Column::new("b".to_string(), vec![Value::test_int(2)]),
|
|
||||||
],
|
|
||||||
None,
|
|
||||||
)
|
|
||||||
.expect("should not fail")
|
|
||||||
.into_value(Span::test_data()),
|
|
||||||
),
|
|
||||||
},
|
|
||||||
Example {
|
|
||||||
description: "Return the first two rows of a dataframe",
|
|
||||||
example: "[[a b]; [1 2] [3 4]] | dfr into-df | dfr first 2",
|
|
||||||
result: Some(
|
|
||||||
NuDataFrame::try_from_columns(
|
|
||||||
vec![
|
|
||||||
Column::new(
|
|
||||||
"a".to_string(),
|
|
||||||
vec![Value::test_int(1), Value::test_int(3)],
|
|
||||||
),
|
|
||||||
Column::new(
|
|
||||||
"b".to_string(),
|
|
||||||
vec![Value::test_int(2), Value::test_int(4)],
|
|
||||||
),
|
|
||||||
],
|
|
||||||
None,
|
|
||||||
)
|
|
||||||
.expect("should not fail")
|
|
||||||
.into_value(Span::test_data()),
|
|
||||||
),
|
|
||||||
},
|
|
||||||
Example {
|
|
||||||
description: "Creates a first expression from a column",
|
|
||||||
example: "dfr col a | dfr first",
|
|
||||||
result: None,
|
|
||||||
},
|
|
||||||
]
|
|
||||||
}
|
|
||||||
|
|
||||||
fn run(
|
|
||||||
&self,
|
|
||||||
engine_state: &EngineState,
|
|
||||||
stack: &mut Stack,
|
|
||||||
call: &Call,
|
|
||||||
input: PipelineData,
|
|
||||||
) -> Result<PipelineData, ShellError> {
|
|
||||||
let value = input.into_value(call.head)?;
|
|
||||||
if NuDataFrame::can_downcast(&value) {
|
|
||||||
let df = NuDataFrame::try_from_value(value)?;
|
|
||||||
command(engine_state, stack, call, df)
|
|
||||||
} else {
|
|
||||||
let expr = NuExpression::try_from_value(value)?;
|
|
||||||
let expr: NuExpression = expr.into_polars().first().into();
|
|
||||||
|
|
||||||
Ok(PipelineData::Value(
|
|
||||||
NuExpression::into_value(expr, call.head),
|
|
||||||
None,
|
|
||||||
))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn command(
|
|
||||||
engine_state: &EngineState,
|
|
||||||
stack: &mut Stack,
|
|
||||||
call: &Call,
|
|
||||||
df: NuDataFrame,
|
|
||||||
) -> Result<PipelineData, ShellError> {
|
|
||||||
let rows: Option<usize> = call.opt(engine_state, stack, 0)?;
|
|
||||||
let rows = rows.unwrap_or(1);
|
|
||||||
|
|
||||||
let res = df.as_ref().head(Some(rows));
|
|
||||||
Ok(PipelineData::Value(
|
|
||||||
NuDataFrame::dataframe_into_value(res, call.head),
|
|
||||||
None,
|
|
||||||
))
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod test {
|
|
||||||
use super::super::super::test_dataframe::{build_test_engine_state, test_dataframe_example};
|
|
||||||
use super::*;
|
|
||||||
use crate::dataframe::lazy::aggregate::LazyAggregate;
|
|
||||||
use crate::dataframe::lazy::groupby::ToLazyGroupBy;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_examples_dataframe() {
|
|
||||||
let mut engine_state = build_test_engine_state(vec![Box::new(FirstDF {})]);
|
|
||||||
test_dataframe_example(&mut engine_state, &FirstDF.examples()[0]);
|
|
||||||
test_dataframe_example(&mut engine_state, &FirstDF.examples()[1]);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_examples_expression() {
|
|
||||||
let mut engine_state = build_test_engine_state(vec![
|
|
||||||
Box::new(FirstDF {}),
|
|
||||||
Box::new(LazyAggregate {}),
|
|
||||||
Box::new(ToLazyGroupBy {}),
|
|
||||||
]);
|
|
||||||
test_dataframe_example(&mut engine_state, &FirstDF.examples()[2]);
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,87 +0,0 @@
|
|||||||
use crate::dataframe::values::{utils::convert_columns_string, Column, NuDataFrame};
|
|
||||||
use nu_engine::command_prelude::*;
|
|
||||||
|
|
||||||
#[derive(Clone)]
|
|
||||||
pub struct GetDF;
|
|
||||||
|
|
||||||
impl Command for GetDF {
|
|
||||||
fn name(&self) -> &str {
|
|
||||||
"dfr get"
|
|
||||||
}
|
|
||||||
|
|
||||||
fn usage(&self) -> &str {
|
|
||||||
"Creates dataframe with the selected columns."
|
|
||||||
}
|
|
||||||
|
|
||||||
fn signature(&self) -> Signature {
|
|
||||||
Signature::build(self.name())
|
|
||||||
.rest("rest", SyntaxShape::Any, "column names to sort dataframe")
|
|
||||||
.input_output_type(
|
|
||||||
Type::Custom("dataframe".into()),
|
|
||||||
Type::Custom("dataframe".into()),
|
|
||||||
)
|
|
||||||
.category(Category::Custom("dataframe".into()))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn examples(&self) -> Vec<Example> {
|
|
||||||
vec![Example {
|
|
||||||
description: "Returns the selected column",
|
|
||||||
example: "[[a b]; [1 2] [3 4]] | dfr into-df | dfr get a",
|
|
||||||
result: Some(
|
|
||||||
NuDataFrame::try_from_columns(
|
|
||||||
vec![Column::new(
|
|
||||||
"a".to_string(),
|
|
||||||
vec![Value::test_int(1), Value::test_int(3)],
|
|
||||||
)],
|
|
||||||
None,
|
|
||||||
)
|
|
||||||
.expect("simple df for test should not fail")
|
|
||||||
.into_value(Span::test_data()),
|
|
||||||
),
|
|
||||||
}]
|
|
||||||
}
|
|
||||||
|
|
||||||
fn run(
|
|
||||||
&self,
|
|
||||||
engine_state: &EngineState,
|
|
||||||
stack: &mut Stack,
|
|
||||||
call: &Call,
|
|
||||||
input: PipelineData,
|
|
||||||
) -> Result<PipelineData, ShellError> {
|
|
||||||
command(engine_state, stack, call, input)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn command(
|
|
||||||
engine_state: &EngineState,
|
|
||||||
stack: &mut Stack,
|
|
||||||
call: &Call,
|
|
||||||
input: PipelineData,
|
|
||||||
) -> Result<PipelineData, ShellError> {
|
|
||||||
let columns: Vec<Value> = call.rest(engine_state, stack, 0)?;
|
|
||||||
let (col_string, col_span) = convert_columns_string(columns, call.head)?;
|
|
||||||
|
|
||||||
let df = NuDataFrame::try_from_pipeline(input, call.head)?;
|
|
||||||
|
|
||||||
df.as_ref()
|
|
||||||
.select(col_string)
|
|
||||||
.map_err(|e| ShellError::GenericError {
|
|
||||||
error: "Error selecting columns".into(),
|
|
||||||
msg: e.to_string(),
|
|
||||||
span: Some(col_span),
|
|
||||||
help: None,
|
|
||||||
inner: vec![],
|
|
||||||
})
|
|
||||||
.map(|df| PipelineData::Value(NuDataFrame::dataframe_into_value(df, call.head), None))
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod test {
|
|
||||||
use super::super::super::test_dataframe::test_dataframe;
|
|
||||||
use super::*;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_examples() {
|
|
||||||
test_dataframe(vec![Box::new(GetDF {})])
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,118 +0,0 @@
|
|||||||
use crate::dataframe::values::{utils::DEFAULT_ROWS, Column, NuDataFrame, NuExpression};
|
|
||||||
use nu_engine::command_prelude::*;
|
|
||||||
|
|
||||||
#[derive(Clone)]
|
|
||||||
pub struct LastDF;
|
|
||||||
|
|
||||||
impl Command for LastDF {
|
|
||||||
fn name(&self) -> &str {
|
|
||||||
"dfr last"
|
|
||||||
}
|
|
||||||
|
|
||||||
fn usage(&self) -> &str {
|
|
||||||
"Creates new dataframe with tail rows or creates a last expression."
|
|
||||||
}
|
|
||||||
|
|
||||||
fn signature(&self) -> Signature {
|
|
||||||
Signature::build(self.name())
|
|
||||||
.optional("rows", SyntaxShape::Int, "Number of rows for tail")
|
|
||||||
.input_output_types(vec![
|
|
||||||
(
|
|
||||||
Type::Custom("expression".into()),
|
|
||||||
Type::Custom("expression".into()),
|
|
||||||
),
|
|
||||||
(
|
|
||||||
Type::Custom("dataframe".into()),
|
|
||||||
Type::Custom("dataframe".into()),
|
|
||||||
),
|
|
||||||
])
|
|
||||||
.category(Category::Custom("dataframe".into()))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn examples(&self) -> Vec<Example> {
|
|
||||||
vec![
|
|
||||||
Example {
|
|
||||||
description: "Create new dataframe with last rows",
|
|
||||||
example: "[[a b]; [1 2] [3 4]] | dfr into-df | dfr last 1",
|
|
||||||
result: Some(
|
|
||||||
NuDataFrame::try_from_columns(
|
|
||||||
vec![
|
|
||||||
Column::new("a".to_string(), vec![Value::test_int(3)]),
|
|
||||||
Column::new("b".to_string(), vec![Value::test_int(4)]),
|
|
||||||
],
|
|
||||||
None,
|
|
||||||
)
|
|
||||||
.expect("simple df for test should not fail")
|
|
||||||
.into_value(Span::test_data()),
|
|
||||||
),
|
|
||||||
},
|
|
||||||
Example {
|
|
||||||
description: "Creates a last expression from a column",
|
|
||||||
example: "dfr col a | dfr last",
|
|
||||||
result: None,
|
|
||||||
},
|
|
||||||
]
|
|
||||||
}
|
|
||||||
|
|
||||||
fn run(
|
|
||||||
&self,
|
|
||||||
engine_state: &EngineState,
|
|
||||||
stack: &mut Stack,
|
|
||||||
call: &Call,
|
|
||||||
input: PipelineData,
|
|
||||||
) -> Result<PipelineData, ShellError> {
|
|
||||||
let value = input.into_value(call.head)?;
|
|
||||||
if NuDataFrame::can_downcast(&value) {
|
|
||||||
let df = NuDataFrame::try_from_value(value)?;
|
|
||||||
command(engine_state, stack, call, df)
|
|
||||||
} else {
|
|
||||||
let expr = NuExpression::try_from_value(value)?;
|
|
||||||
let expr: NuExpression = expr.into_polars().last().into();
|
|
||||||
|
|
||||||
Ok(PipelineData::Value(
|
|
||||||
NuExpression::into_value(expr, call.head),
|
|
||||||
None,
|
|
||||||
))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn command(
|
|
||||||
engine_state: &EngineState,
|
|
||||||
stack: &mut Stack,
|
|
||||||
call: &Call,
|
|
||||||
df: NuDataFrame,
|
|
||||||
) -> Result<PipelineData, ShellError> {
|
|
||||||
let rows: Option<usize> = call.opt(engine_state, stack, 0)?;
|
|
||||||
let rows = rows.unwrap_or(DEFAULT_ROWS);
|
|
||||||
|
|
||||||
let res = df.as_ref().tail(Some(rows));
|
|
||||||
Ok(PipelineData::Value(
|
|
||||||
NuDataFrame::dataframe_into_value(res, call.head),
|
|
||||||
None,
|
|
||||||
))
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod test {
|
|
||||||
use super::super::super::test_dataframe::{build_test_engine_state, test_dataframe_example};
|
|
||||||
use super::*;
|
|
||||||
use crate::dataframe::lazy::aggregate::LazyAggregate;
|
|
||||||
use crate::dataframe::lazy::groupby::ToLazyGroupBy;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_examples_dataframe() {
|
|
||||||
let mut engine_state = build_test_engine_state(vec![Box::new(LastDF {})]);
|
|
||||||
test_dataframe_example(&mut engine_state, &LastDF.examples()[0]);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_examples_expression() {
|
|
||||||
let mut engine_state = build_test_engine_state(vec![
|
|
||||||
Box::new(LastDF {}),
|
|
||||||
Box::new(LazyAggregate {}),
|
|
||||||
Box::new(ToLazyGroupBy {}),
|
|
||||||
]);
|
|
||||||
test_dataframe_example(&mut engine_state, &LastDF.examples()[1]);
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,68 +0,0 @@
|
|||||||
use crate::dataframe::values::NuDataFrame;
|
|
||||||
use nu_engine::command_prelude::*;
|
|
||||||
|
|
||||||
#[derive(Clone)]
|
|
||||||
pub struct ListDF;
|
|
||||||
|
|
||||||
impl Command for ListDF {
|
|
||||||
fn name(&self) -> &str {
|
|
||||||
"dfr ls"
|
|
||||||
}
|
|
||||||
|
|
||||||
fn usage(&self) -> &str {
|
|
||||||
"Lists stored dataframes."
|
|
||||||
}
|
|
||||||
|
|
||||||
fn signature(&self) -> Signature {
|
|
||||||
Signature::build(self.name()).category(Category::Custom("dataframe".into()))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn examples(&self) -> Vec<Example> {
|
|
||||||
vec![Example {
|
|
||||||
description: "Creates a new dataframe and shows it in the dataframe list",
|
|
||||||
example: r#"let test = ([[a b];[1 2] [3 4]] | dfr into-df);
|
|
||||||
ls"#,
|
|
||||||
result: None,
|
|
||||||
}]
|
|
||||||
}
|
|
||||||
|
|
||||||
fn run(
|
|
||||||
&self,
|
|
||||||
engine_state: &EngineState,
|
|
||||||
stack: &mut Stack,
|
|
||||||
call: &Call,
|
|
||||||
_input: PipelineData,
|
|
||||||
) -> Result<PipelineData, ShellError> {
|
|
||||||
let mut vals: Vec<(String, Value)> = vec![];
|
|
||||||
|
|
||||||
for overlay_frame in engine_state.active_overlays(&[]) {
|
|
||||||
for var in &overlay_frame.vars {
|
|
||||||
if let Ok(value) = stack.get_var(*var.1, call.head) {
|
|
||||||
let name = String::from_utf8_lossy(var.0).to_string();
|
|
||||||
vals.push((name, value));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let vals = vals
|
|
||||||
.into_iter()
|
|
||||||
.filter_map(|(name, value)| {
|
|
||||||
NuDataFrame::try_from_value(value).ok().map(|df| (name, df))
|
|
||||||
})
|
|
||||||
.map(|(name, df)| {
|
|
||||||
Value::record(
|
|
||||||
record! {
|
|
||||||
"name" => Value::string(name, call.head),
|
|
||||||
"columns" => Value::int(df.as_ref().width() as i64, call.head),
|
|
||||||
"rows" => Value::int(df.as_ref().height() as i64, call.head),
|
|
||||||
},
|
|
||||||
call.head,
|
|
||||||
)
|
|
||||||
})
|
|
||||||
.collect::<Vec<Value>>();
|
|
||||||
|
|
||||||
let list = Value::list(vals, call.head);
|
|
||||||
|
|
||||||
Ok(list.into_pipeline_data())
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,248 +0,0 @@
|
|||||||
use crate::dataframe::values::{utils::convert_columns_string, Column, NuDataFrame};
|
|
||||||
use nu_engine::command_prelude::*;
|
|
||||||
|
|
||||||
#[derive(Clone)]
|
|
||||||
pub struct MeltDF;
|
|
||||||
|
|
||||||
impl Command for MeltDF {
|
|
||||||
fn name(&self) -> &str {
|
|
||||||
"dfr melt"
|
|
||||||
}
|
|
||||||
|
|
||||||
fn usage(&self) -> &str {
|
|
||||||
"Unpivot a DataFrame from wide to long format."
|
|
||||||
}
|
|
||||||
|
|
||||||
fn signature(&self) -> Signature {
|
|
||||||
Signature::build(self.name())
|
|
||||||
.required_named(
|
|
||||||
"columns",
|
|
||||||
SyntaxShape::Table(vec![]),
|
|
||||||
"column names for melting",
|
|
||||||
Some('c'),
|
|
||||||
)
|
|
||||||
.required_named(
|
|
||||||
"values",
|
|
||||||
SyntaxShape::Table(vec![]),
|
|
||||||
"column names used as value columns",
|
|
||||||
Some('v'),
|
|
||||||
)
|
|
||||||
.named(
|
|
||||||
"variable-name",
|
|
||||||
SyntaxShape::String,
|
|
||||||
"optional name for variable column",
|
|
||||||
Some('r'),
|
|
||||||
)
|
|
||||||
.named(
|
|
||||||
"value-name",
|
|
||||||
SyntaxShape::String,
|
|
||||||
"optional name for value column",
|
|
||||||
Some('l'),
|
|
||||||
)
|
|
||||||
.input_output_type(
|
|
||||||
Type::Custom("dataframe".into()),
|
|
||||||
Type::Custom("dataframe".into()),
|
|
||||||
)
|
|
||||||
.category(Category::Custom("dataframe".into()))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn examples(&self) -> Vec<Example> {
|
|
||||||
vec![Example {
|
|
||||||
description: "melt dataframe",
|
|
||||||
example:
|
|
||||||
"[[a b c d]; [x 1 4 a] [y 2 5 b] [z 3 6 c]] | dfr into-df | dfr melt -c [b c] -v [a d]",
|
|
||||||
result: Some(
|
|
||||||
NuDataFrame::try_from_columns(vec![
|
|
||||||
Column::new(
|
|
||||||
"b".to_string(),
|
|
||||||
vec![
|
|
||||||
Value::test_int(1),
|
|
||||||
Value::test_int(2),
|
|
||||||
Value::test_int(3),
|
|
||||||
Value::test_int(1),
|
|
||||||
Value::test_int(2),
|
|
||||||
Value::test_int(3),
|
|
||||||
],
|
|
||||||
),
|
|
||||||
Column::new(
|
|
||||||
"c".to_string(),
|
|
||||||
vec![
|
|
||||||
Value::test_int(4),
|
|
||||||
Value::test_int(5),
|
|
||||||
Value::test_int(6),
|
|
||||||
Value::test_int(4),
|
|
||||||
Value::test_int(5),
|
|
||||||
Value::test_int(6),
|
|
||||||
],
|
|
||||||
),
|
|
||||||
Column::new(
|
|
||||||
"variable".to_string(),
|
|
||||||
vec![
|
|
||||||
Value::test_string("a"),
|
|
||||||
Value::test_string("a"),
|
|
||||||
Value::test_string("a"),
|
|
||||||
Value::test_string("d"),
|
|
||||||
Value::test_string("d"),
|
|
||||||
Value::test_string("d"),
|
|
||||||
],
|
|
||||||
),
|
|
||||||
Column::new(
|
|
||||||
"value".to_string(),
|
|
||||||
vec![
|
|
||||||
Value::test_string("x"),
|
|
||||||
Value::test_string("y"),
|
|
||||||
Value::test_string("z"),
|
|
||||||
Value::test_string("a"),
|
|
||||||
Value::test_string("b"),
|
|
||||||
Value::test_string("c"),
|
|
||||||
],
|
|
||||||
),
|
|
||||||
], None)
|
|
||||||
.expect("simple df for test should not fail")
|
|
||||||
.into_value(Span::test_data()),
|
|
||||||
),
|
|
||||||
}]
|
|
||||||
}
|
|
||||||
|
|
||||||
fn run(
|
|
||||||
&self,
|
|
||||||
engine_state: &EngineState,
|
|
||||||
stack: &mut Stack,
|
|
||||||
call: &Call,
|
|
||||||
input: PipelineData,
|
|
||||||
) -> Result<PipelineData, ShellError> {
|
|
||||||
command(engine_state, stack, call, input)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn command(
|
|
||||||
engine_state: &EngineState,
|
|
||||||
stack: &mut Stack,
|
|
||||||
call: &Call,
|
|
||||||
input: PipelineData,
|
|
||||||
) -> Result<PipelineData, ShellError> {
|
|
||||||
let id_col: Vec<Value> = call
|
|
||||||
.get_flag(engine_state, stack, "columns")?
|
|
||||||
.expect("required value");
|
|
||||||
let val_col: Vec<Value> = call
|
|
||||||
.get_flag(engine_state, stack, "values")?
|
|
||||||
.expect("required value");
|
|
||||||
|
|
||||||
let value_name: Option<Spanned<String>> = call.get_flag(engine_state, stack, "value-name")?;
|
|
||||||
let variable_name: Option<Spanned<String>> =
|
|
||||||
call.get_flag(engine_state, stack, "variable-name")?;
|
|
||||||
|
|
||||||
let (id_col_string, id_col_span) = convert_columns_string(id_col, call.head)?;
|
|
||||||
let (val_col_string, val_col_span) = convert_columns_string(val_col, call.head)?;
|
|
||||||
|
|
||||||
let df = NuDataFrame::try_from_pipeline(input, call.head)?;
|
|
||||||
|
|
||||||
check_column_datatypes(df.as_ref(), &id_col_string, id_col_span)?;
|
|
||||||
check_column_datatypes(df.as_ref(), &val_col_string, val_col_span)?;
|
|
||||||
|
|
||||||
let mut res = df
|
|
||||||
.as_ref()
|
|
||||||
.melt(&id_col_string, &val_col_string)
|
|
||||||
.map_err(|e| ShellError::GenericError {
|
|
||||||
error: "Error calculating melt".into(),
|
|
||||||
msg: e.to_string(),
|
|
||||||
span: Some(call.head),
|
|
||||||
help: None,
|
|
||||||
inner: vec![],
|
|
||||||
})?;
|
|
||||||
|
|
||||||
if let Some(name) = &variable_name {
|
|
||||||
res.rename("variable", &name.item)
|
|
||||||
.map_err(|e| ShellError::GenericError {
|
|
||||||
error: "Error renaming column".into(),
|
|
||||||
msg: e.to_string(),
|
|
||||||
span: Some(name.span),
|
|
||||||
help: None,
|
|
||||||
inner: vec![],
|
|
||||||
})?;
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(name) = &value_name {
|
|
||||||
res.rename("value", &name.item)
|
|
||||||
.map_err(|e| ShellError::GenericError {
|
|
||||||
error: "Error renaming column".into(),
|
|
||||||
msg: e.to_string(),
|
|
||||||
span: Some(name.span),
|
|
||||||
help: None,
|
|
||||||
inner: vec![],
|
|
||||||
})?;
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(PipelineData::Value(
|
|
||||||
NuDataFrame::dataframe_into_value(res, call.head),
|
|
||||||
None,
|
|
||||||
))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn check_column_datatypes<T: AsRef<str>>(
|
|
||||||
df: &polars::prelude::DataFrame,
|
|
||||||
cols: &[T],
|
|
||||||
col_span: Span,
|
|
||||||
) -> Result<(), ShellError> {
|
|
||||||
if cols.is_empty() {
|
|
||||||
return Err(ShellError::GenericError {
|
|
||||||
error: "Merge error".into(),
|
|
||||||
msg: "empty column list".into(),
|
|
||||||
span: Some(col_span),
|
|
||||||
help: None,
|
|
||||||
inner: vec![],
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
// Checking if they are same type
|
|
||||||
if cols.len() > 1 {
|
|
||||||
for w in cols.windows(2) {
|
|
||||||
let l_series = df
|
|
||||||
.column(w[0].as_ref())
|
|
||||||
.map_err(|e| ShellError::GenericError {
|
|
||||||
error: "Error selecting columns".into(),
|
|
||||||
msg: e.to_string(),
|
|
||||||
span: Some(col_span),
|
|
||||||
help: None,
|
|
||||||
inner: vec![],
|
|
||||||
})?;
|
|
||||||
|
|
||||||
let r_series = df
|
|
||||||
.column(w[1].as_ref())
|
|
||||||
.map_err(|e| ShellError::GenericError {
|
|
||||||
error: "Error selecting columns".into(),
|
|
||||||
msg: e.to_string(),
|
|
||||||
span: Some(col_span),
|
|
||||||
help: None,
|
|
||||||
inner: vec![],
|
|
||||||
})?;
|
|
||||||
|
|
||||||
if l_series.dtype() != r_series.dtype() {
|
|
||||||
return Err(ShellError::GenericError {
|
|
||||||
error: "Merge error".into(),
|
|
||||||
msg: "found different column types in list".into(),
|
|
||||||
span: Some(col_span),
|
|
||||||
help: Some(format!(
|
|
||||||
"datatypes {} and {} are incompatible",
|
|
||||||
l_series.dtype(),
|
|
||||||
r_series.dtype()
|
|
||||||
)),
|
|
||||||
inner: vec![],
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod test {
|
|
||||||
use super::super::super::test_dataframe::test_dataframe;
|
|
||||||
use super::*;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_examples() {
|
|
||||||
test_dataframe(vec![Box::new(MeltDF {})])
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,114 +0,0 @@
|
|||||||
mod append;
|
|
||||||
mod cast;
|
|
||||||
mod columns;
|
|
||||||
mod drop;
|
|
||||||
mod drop_duplicates;
|
|
||||||
mod drop_nulls;
|
|
||||||
mod dtypes;
|
|
||||||
mod dummies;
|
|
||||||
mod filter_with;
|
|
||||||
mod first;
|
|
||||||
mod get;
|
|
||||||
mod last;
|
|
||||||
mod list;
|
|
||||||
mod melt;
|
|
||||||
mod open;
|
|
||||||
mod query_df;
|
|
||||||
mod rename;
|
|
||||||
mod sample;
|
|
||||||
mod schema;
|
|
||||||
mod shape;
|
|
||||||
mod slice;
|
|
||||||
mod sql_context;
|
|
||||||
mod sql_expr;
|
|
||||||
mod summary;
|
|
||||||
mod take;
|
|
||||||
mod to_arrow;
|
|
||||||
mod to_avro;
|
|
||||||
mod to_csv;
|
|
||||||
mod to_df;
|
|
||||||
mod to_json_lines;
|
|
||||||
mod to_nu;
|
|
||||||
mod to_parquet;
|
|
||||||
mod with_column;
|
|
||||||
|
|
||||||
use nu_protocol::engine::StateWorkingSet;
|
|
||||||
|
|
||||||
pub use self::open::OpenDataFrame;
|
|
||||||
pub use append::AppendDF;
|
|
||||||
pub use cast::CastDF;
|
|
||||||
pub use columns::ColumnsDF;
|
|
||||||
pub use drop::DropDF;
|
|
||||||
pub use drop_duplicates::DropDuplicates;
|
|
||||||
pub use drop_nulls::DropNulls;
|
|
||||||
pub use dtypes::DataTypes;
|
|
||||||
pub use dummies::Dummies;
|
|
||||||
pub use filter_with::FilterWith;
|
|
||||||
pub use first::FirstDF;
|
|
||||||
pub use get::GetDF;
|
|
||||||
pub use last::LastDF;
|
|
||||||
pub use list::ListDF;
|
|
||||||
pub use melt::MeltDF;
|
|
||||||
pub use query_df::QueryDf;
|
|
||||||
pub use rename::RenameDF;
|
|
||||||
pub use sample::SampleDF;
|
|
||||||
pub use schema::SchemaDF;
|
|
||||||
pub use shape::ShapeDF;
|
|
||||||
pub use slice::SliceDF;
|
|
||||||
pub use sql_context::SQLContext;
|
|
||||||
pub use summary::Summary;
|
|
||||||
pub use take::TakeDF;
|
|
||||||
pub use to_arrow::ToArrow;
|
|
||||||
pub use to_avro::ToAvro;
|
|
||||||
pub use to_csv::ToCSV;
|
|
||||||
pub use to_df::ToDataFrame;
|
|
||||||
pub use to_json_lines::ToJsonLines;
|
|
||||||
pub use to_nu::ToNu;
|
|
||||||
pub use to_parquet::ToParquet;
|
|
||||||
pub use with_column::WithColumn;
|
|
||||||
|
|
||||||
pub fn add_eager_decls(working_set: &mut StateWorkingSet) {
|
|
||||||
macro_rules! bind_command {
|
|
||||||
( $command:expr ) => {
|
|
||||||
working_set.add_decl(Box::new($command));
|
|
||||||
};
|
|
||||||
( $( $command:expr ),* ) => {
|
|
||||||
$( working_set.add_decl(Box::new($command)); )*
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
// Dataframe commands
|
|
||||||
bind_command!(
|
|
||||||
AppendDF,
|
|
||||||
CastDF,
|
|
||||||
ColumnsDF,
|
|
||||||
DataTypes,
|
|
||||||
Summary,
|
|
||||||
DropDF,
|
|
||||||
DropDuplicates,
|
|
||||||
DropNulls,
|
|
||||||
Dummies,
|
|
||||||
FilterWith,
|
|
||||||
FirstDF,
|
|
||||||
GetDF,
|
|
||||||
LastDF,
|
|
||||||
ListDF,
|
|
||||||
MeltDF,
|
|
||||||
OpenDataFrame,
|
|
||||||
QueryDf,
|
|
||||||
RenameDF,
|
|
||||||
SampleDF,
|
|
||||||
SchemaDF,
|
|
||||||
ShapeDF,
|
|
||||||
SliceDF,
|
|
||||||
TakeDF,
|
|
||||||
ToArrow,
|
|
||||||
ToAvro,
|
|
||||||
ToCSV,
|
|
||||||
ToDataFrame,
|
|
||||||
ToNu,
|
|
||||||
ToParquet,
|
|
||||||
ToJsonLines,
|
|
||||||
WithColumn
|
|
||||||
);
|
|
||||||
}
|
|
@ -1,518 +0,0 @@
|
|||||||
use crate::dataframe::values::{NuDataFrame, NuLazyFrame, NuSchema};
|
|
||||||
use nu_engine::command_prelude::*;
|
|
||||||
|
|
||||||
use polars::prelude::{
|
|
||||||
CsvEncoding, CsvReader, IpcReader, JsonFormat, JsonReader, LazyCsvReader, LazyFileListReader,
|
|
||||||
LazyFrame, ParallelStrategy, ParquetReader, ScanArgsIpc, ScanArgsParquet, SerReader,
|
|
||||||
};
|
|
||||||
use polars_io::{avro::AvroReader, HiveOptions};
|
|
||||||
use std::{fs::File, io::BufReader, path::PathBuf};
|
|
||||||
|
|
||||||
#[derive(Clone)]
|
|
||||||
pub struct OpenDataFrame;
|
|
||||||
|
|
||||||
impl Command for OpenDataFrame {
|
|
||||||
fn name(&self) -> &str {
|
|
||||||
"dfr open"
|
|
||||||
}
|
|
||||||
|
|
||||||
fn usage(&self) -> &str {
|
|
||||||
"Opens CSV, JSON, JSON lines, arrow, avro, or parquet file to create dataframe."
|
|
||||||
}
|
|
||||||
|
|
||||||
fn signature(&self) -> Signature {
|
|
||||||
Signature::build(self.name())
|
|
||||||
.required(
|
|
||||||
"file",
|
|
||||||
SyntaxShape::Filepath,
|
|
||||||
"file path to load values from",
|
|
||||||
)
|
|
||||||
.switch("lazy", "creates a lazy dataframe", Some('l'))
|
|
||||||
.named(
|
|
||||||
"type",
|
|
||||||
SyntaxShape::String,
|
|
||||||
"File type: csv, tsv, json, parquet, arrow, avro. If omitted, derive from file extension",
|
|
||||||
Some('t'),
|
|
||||||
)
|
|
||||||
.named(
|
|
||||||
"delimiter",
|
|
||||||
SyntaxShape::String,
|
|
||||||
"file delimiter character. CSV file",
|
|
||||||
Some('d'),
|
|
||||||
)
|
|
||||||
.switch(
|
|
||||||
"no-header",
|
|
||||||
"Indicates if file doesn't have header. CSV file",
|
|
||||||
None,
|
|
||||||
)
|
|
||||||
.named(
|
|
||||||
"infer-schema",
|
|
||||||
SyntaxShape::Number,
|
|
||||||
"Number of rows to infer the schema of the file. CSV file",
|
|
||||||
None,
|
|
||||||
)
|
|
||||||
.named(
|
|
||||||
"skip-rows",
|
|
||||||
SyntaxShape::Number,
|
|
||||||
"Number of rows to skip from file. CSV file",
|
|
||||||
None,
|
|
||||||
)
|
|
||||||
.named(
|
|
||||||
"columns",
|
|
||||||
SyntaxShape::List(Box::new(SyntaxShape::String)),
|
|
||||||
"Columns to be selected from csv file. CSV and Parquet file",
|
|
||||||
None,
|
|
||||||
)
|
|
||||||
.named(
|
|
||||||
"schema",
|
|
||||||
SyntaxShape::Record(vec![]),
|
|
||||||
r#"Polars Schema in format [{name: str}]. CSV, JSON, and JSONL files"#,
|
|
||||||
Some('s')
|
|
||||||
)
|
|
||||||
.input_output_type(Type::Any, Type::Custom("dataframe".into()))
|
|
||||||
.category(Category::Custom("dataframe".into()))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn examples(&self) -> Vec<Example> {
|
|
||||||
vec![Example {
|
|
||||||
description: "Takes a file name and creates a dataframe",
|
|
||||||
example: "dfr open test.csv",
|
|
||||||
result: None,
|
|
||||||
}]
|
|
||||||
}
|
|
||||||
|
|
||||||
fn run(
|
|
||||||
&self,
|
|
||||||
engine_state: &EngineState,
|
|
||||||
stack: &mut Stack,
|
|
||||||
call: &Call,
|
|
||||||
_input: PipelineData,
|
|
||||||
) -> Result<PipelineData, ShellError> {
|
|
||||||
command(engine_state, stack, call)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn command(
|
|
||||||
engine_state: &EngineState,
|
|
||||||
stack: &mut Stack,
|
|
||||||
call: &Call,
|
|
||||||
) -> Result<PipelineData, ShellError> {
|
|
||||||
let file: Spanned<PathBuf> = call.req(engine_state, stack, 0)?;
|
|
||||||
|
|
||||||
let type_option: Option<Spanned<String>> = call.get_flag(engine_state, stack, "type")?;
|
|
||||||
|
|
||||||
let type_id = match &type_option {
|
|
||||||
Some(ref t) => Some((t.item.to_owned(), "Invalid type", t.span)),
|
|
||||||
None => file.item.extension().map(|e| {
|
|
||||||
(
|
|
||||||
e.to_string_lossy().into_owned(),
|
|
||||||
"Invalid extension",
|
|
||||||
file.span,
|
|
||||||
)
|
|
||||||
}),
|
|
||||||
};
|
|
||||||
|
|
||||||
match type_id {
|
|
||||||
Some((e, msg, blamed)) => match e.as_str() {
|
|
||||||
"csv" | "tsv" => from_csv(engine_state, stack, call),
|
|
||||||
"parquet" | "parq" => from_parquet(engine_state, stack, call),
|
|
||||||
"ipc" | "arrow" => from_ipc(engine_state, stack, call),
|
|
||||||
"json" => from_json(engine_state, stack, call),
|
|
||||||
"jsonl" => from_jsonl(engine_state, stack, call),
|
|
||||||
"avro" => from_avro(engine_state, stack, call),
|
|
||||||
_ => Err(ShellError::FileNotFoundCustom {
|
|
||||||
msg: format!(
|
|
||||||
"{msg}. Supported values: csv, tsv, parquet, ipc, arrow, json, jsonl, avro"
|
|
||||||
),
|
|
||||||
span: blamed,
|
|
||||||
}),
|
|
||||||
},
|
|
||||||
None => Err(ShellError::FileNotFoundCustom {
|
|
||||||
msg: "File without extension".into(),
|
|
||||||
span: file.span,
|
|
||||||
}),
|
|
||||||
}
|
|
||||||
.map(|value| PipelineData::Value(value, None))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn from_parquet(
|
|
||||||
engine_state: &EngineState,
|
|
||||||
stack: &mut Stack,
|
|
||||||
call: &Call,
|
|
||||||
) -> Result<Value, ShellError> {
|
|
||||||
if call.has_flag(engine_state, stack, "lazy")? {
|
|
||||||
let file: String = call.req(engine_state, stack, 0)?;
|
|
||||||
let args = ScanArgsParquet {
|
|
||||||
n_rows: None,
|
|
||||||
cache: true,
|
|
||||||
parallel: ParallelStrategy::Auto,
|
|
||||||
rechunk: false,
|
|
||||||
row_index: None,
|
|
||||||
low_memory: false,
|
|
||||||
cloud_options: None,
|
|
||||||
use_statistics: false,
|
|
||||||
hive_options: HiveOptions::default(),
|
|
||||||
};
|
|
||||||
|
|
||||||
let df: NuLazyFrame = LazyFrame::scan_parquet(file, args)
|
|
||||||
.map_err(|e| ShellError::GenericError {
|
|
||||||
error: "Parquet reader error".into(),
|
|
||||||
msg: format!("{e:?}"),
|
|
||||||
span: Some(call.head),
|
|
||||||
help: None,
|
|
||||||
inner: vec![],
|
|
||||||
})?
|
|
||||||
.into();
|
|
||||||
|
|
||||||
df.into_value(call.head)
|
|
||||||
} else {
|
|
||||||
let file: Spanned<PathBuf> = call.req(engine_state, stack, 0)?;
|
|
||||||
let columns: Option<Vec<String>> = call.get_flag(engine_state, stack, "columns")?;
|
|
||||||
|
|
||||||
let r = File::open(&file.item).map_err(|e| ShellError::GenericError {
|
|
||||||
error: "Error opening file".into(),
|
|
||||||
msg: e.to_string(),
|
|
||||||
span: Some(file.span),
|
|
||||||
help: None,
|
|
||||||
inner: vec![],
|
|
||||||
})?;
|
|
||||||
let reader = ParquetReader::new(r);
|
|
||||||
|
|
||||||
let reader = match columns {
|
|
||||||
None => reader,
|
|
||||||
Some(columns) => reader.with_columns(Some(columns)),
|
|
||||||
};
|
|
||||||
|
|
||||||
let df: NuDataFrame = reader
|
|
||||||
.finish()
|
|
||||||
.map_err(|e| ShellError::GenericError {
|
|
||||||
error: "Parquet reader error".into(),
|
|
||||||
msg: format!("{e:?}"),
|
|
||||||
span: Some(call.head),
|
|
||||||
help: None,
|
|
||||||
inner: vec![],
|
|
||||||
})?
|
|
||||||
.into();
|
|
||||||
|
|
||||||
Ok(df.into_value(call.head))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn from_avro(
|
|
||||||
engine_state: &EngineState,
|
|
||||||
stack: &mut Stack,
|
|
||||||
call: &Call,
|
|
||||||
) -> Result<Value, ShellError> {
|
|
||||||
let file: Spanned<PathBuf> = call.req(engine_state, stack, 0)?;
|
|
||||||
let columns: Option<Vec<String>> = call.get_flag(engine_state, stack, "columns")?;
|
|
||||||
|
|
||||||
let r = File::open(&file.item).map_err(|e| ShellError::GenericError {
|
|
||||||
error: "Error opening file".into(),
|
|
||||||
msg: e.to_string(),
|
|
||||||
span: Some(file.span),
|
|
||||||
help: None,
|
|
||||||
inner: vec![],
|
|
||||||
})?;
|
|
||||||
let reader = AvroReader::new(r);
|
|
||||||
|
|
||||||
let reader = match columns {
|
|
||||||
None => reader,
|
|
||||||
Some(columns) => reader.with_columns(Some(columns)),
|
|
||||||
};
|
|
||||||
|
|
||||||
let df: NuDataFrame = reader
|
|
||||||
.finish()
|
|
||||||
.map_err(|e| ShellError::GenericError {
|
|
||||||
error: "Avro reader error".into(),
|
|
||||||
msg: format!("{e:?}"),
|
|
||||||
span: Some(call.head),
|
|
||||||
help: None,
|
|
||||||
inner: vec![],
|
|
||||||
})?
|
|
||||||
.into();
|
|
||||||
|
|
||||||
Ok(df.into_value(call.head))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn from_ipc(
|
|
||||||
engine_state: &EngineState,
|
|
||||||
stack: &mut Stack,
|
|
||||||
call: &Call,
|
|
||||||
) -> Result<Value, ShellError> {
|
|
||||||
if call.has_flag(engine_state, stack, "lazy")? {
|
|
||||||
let file: String = call.req(engine_state, stack, 0)?;
|
|
||||||
let args = ScanArgsIpc {
|
|
||||||
n_rows: None,
|
|
||||||
cache: true,
|
|
||||||
rechunk: false,
|
|
||||||
row_index: None,
|
|
||||||
memory_map: true,
|
|
||||||
cloud_options: None,
|
|
||||||
};
|
|
||||||
|
|
||||||
let df: NuLazyFrame = LazyFrame::scan_ipc(file, args)
|
|
||||||
.map_err(|e| ShellError::GenericError {
|
|
||||||
error: "IPC reader error".into(),
|
|
||||||
msg: format!("{e:?}"),
|
|
||||||
span: Some(call.head),
|
|
||||||
help: None,
|
|
||||||
inner: vec![],
|
|
||||||
})?
|
|
||||||
.into();
|
|
||||||
|
|
||||||
df.into_value(call.head)
|
|
||||||
} else {
|
|
||||||
let file: Spanned<PathBuf> = call.req(engine_state, stack, 0)?;
|
|
||||||
let columns: Option<Vec<String>> = call.get_flag(engine_state, stack, "columns")?;
|
|
||||||
|
|
||||||
let r = File::open(&file.item).map_err(|e| ShellError::GenericError {
|
|
||||||
error: "Error opening file".into(),
|
|
||||||
msg: e.to_string(),
|
|
||||||
span: Some(file.span),
|
|
||||||
help: None,
|
|
||||||
inner: vec![],
|
|
||||||
})?;
|
|
||||||
let reader = IpcReader::new(r);
|
|
||||||
|
|
||||||
let reader = match columns {
|
|
||||||
None => reader,
|
|
||||||
Some(columns) => reader.with_columns(Some(columns)),
|
|
||||||
};
|
|
||||||
|
|
||||||
let df: NuDataFrame = reader
|
|
||||||
.finish()
|
|
||||||
.map_err(|e| ShellError::GenericError {
|
|
||||||
error: "IPC reader error".into(),
|
|
||||||
msg: format!("{e:?}"),
|
|
||||||
span: Some(call.head),
|
|
||||||
help: None,
|
|
||||||
inner: vec![],
|
|
||||||
})?
|
|
||||||
.into();
|
|
||||||
|
|
||||||
Ok(df.into_value(call.head))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn from_json(
|
|
||||||
engine_state: &EngineState,
|
|
||||||
stack: &mut Stack,
|
|
||||||
call: &Call,
|
|
||||||
) -> Result<Value, ShellError> {
|
|
||||||
let file: Spanned<PathBuf> = call.req(engine_state, stack, 0)?;
|
|
||||||
let file = File::open(&file.item).map_err(|e| ShellError::GenericError {
|
|
||||||
error: "Error opening file".into(),
|
|
||||||
msg: e.to_string(),
|
|
||||||
span: Some(file.span),
|
|
||||||
help: None,
|
|
||||||
inner: vec![],
|
|
||||||
})?;
|
|
||||||
let maybe_schema = call
|
|
||||||
.get_flag(engine_state, stack, "schema")?
|
|
||||||
.map(|schema| NuSchema::try_from(&schema))
|
|
||||||
.transpose()?;
|
|
||||||
|
|
||||||
let buf_reader = BufReader::new(file);
|
|
||||||
let reader = JsonReader::new(buf_reader);
|
|
||||||
|
|
||||||
let reader = match maybe_schema {
|
|
||||||
Some(schema) => reader.with_schema(schema.into()),
|
|
||||||
None => reader,
|
|
||||||
};
|
|
||||||
|
|
||||||
let df: NuDataFrame = reader
|
|
||||||
.finish()
|
|
||||||
.map_err(|e| ShellError::GenericError {
|
|
||||||
error: "Json reader error".into(),
|
|
||||||
msg: format!("{e:?}"),
|
|
||||||
span: Some(call.head),
|
|
||||||
help: None,
|
|
||||||
inner: vec![],
|
|
||||||
})?
|
|
||||||
.into();
|
|
||||||
|
|
||||||
Ok(df.into_value(call.head))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn from_jsonl(
|
|
||||||
engine_state: &EngineState,
|
|
||||||
stack: &mut Stack,
|
|
||||||
call: &Call,
|
|
||||||
) -> Result<Value, ShellError> {
|
|
||||||
let infer_schema: Option<usize> = call.get_flag(engine_state, stack, "infer-schema")?;
|
|
||||||
let maybe_schema = call
|
|
||||||
.get_flag(engine_state, stack, "schema")?
|
|
||||||
.map(|schema| NuSchema::try_from(&schema))
|
|
||||||
.transpose()?;
|
|
||||||
let file: Spanned<PathBuf> = call.req(engine_state, stack, 0)?;
|
|
||||||
let file = File::open(&file.item).map_err(|e| ShellError::GenericError {
|
|
||||||
error: "Error opening file".into(),
|
|
||||||
msg: e.to_string(),
|
|
||||||
span: Some(file.span),
|
|
||||||
help: None,
|
|
||||||
inner: vec![],
|
|
||||||
})?;
|
|
||||||
|
|
||||||
let buf_reader = BufReader::new(file);
|
|
||||||
let reader = JsonReader::new(buf_reader)
|
|
||||||
.with_json_format(JsonFormat::JsonLines)
|
|
||||||
.infer_schema_len(infer_schema);
|
|
||||||
|
|
||||||
let reader = match maybe_schema {
|
|
||||||
Some(schema) => reader.with_schema(schema.into()),
|
|
||||||
None => reader,
|
|
||||||
};
|
|
||||||
|
|
||||||
let df: NuDataFrame = reader
|
|
||||||
.finish()
|
|
||||||
.map_err(|e| ShellError::GenericError {
|
|
||||||
error: "Json lines reader error".into(),
|
|
||||||
msg: format!("{e:?}"),
|
|
||||||
span: Some(call.head),
|
|
||||||
help: None,
|
|
||||||
inner: vec![],
|
|
||||||
})?
|
|
||||||
.into();
|
|
||||||
|
|
||||||
Ok(df.into_value(call.head))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn from_csv(
|
|
||||||
engine_state: &EngineState,
|
|
||||||
stack: &mut Stack,
|
|
||||||
call: &Call,
|
|
||||||
) -> Result<Value, ShellError> {
|
|
||||||
let delimiter: Option<Spanned<String>> = call.get_flag(engine_state, stack, "delimiter")?;
|
|
||||||
let no_header: bool = call.has_flag(engine_state, stack, "no-header")?;
|
|
||||||
let infer_schema: Option<usize> = call.get_flag(engine_state, stack, "infer-schema")?;
|
|
||||||
let skip_rows: Option<usize> = call.get_flag(engine_state, stack, "skip-rows")?;
|
|
||||||
let columns: Option<Vec<String>> = call.get_flag(engine_state, stack, "columns")?;
|
|
||||||
|
|
||||||
let maybe_schema = call
|
|
||||||
.get_flag(engine_state, stack, "schema")?
|
|
||||||
.map(|schema| NuSchema::try_from(&schema))
|
|
||||||
.transpose()?;
|
|
||||||
|
|
||||||
if call.has_flag(engine_state, stack, "lazy")? {
|
|
||||||
let file: String = call.req(engine_state, stack, 0)?;
|
|
||||||
let csv_reader = LazyCsvReader::new(file);
|
|
||||||
|
|
||||||
let csv_reader = match delimiter {
|
|
||||||
None => csv_reader,
|
|
||||||
Some(d) => {
|
|
||||||
if d.item.len() != 1 {
|
|
||||||
return Err(ShellError::GenericError {
|
|
||||||
error: "Incorrect delimiter".into(),
|
|
||||||
msg: "Delimiter has to be one character".into(),
|
|
||||||
span: Some(d.span),
|
|
||||||
help: None,
|
|
||||||
inner: vec![],
|
|
||||||
});
|
|
||||||
} else {
|
|
||||||
let delimiter = match d.item.chars().next() {
|
|
||||||
Some(d) => d as u8,
|
|
||||||
None => unreachable!(),
|
|
||||||
};
|
|
||||||
csv_reader.with_separator(delimiter)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
let csv_reader = csv_reader.has_header(!no_header);
|
|
||||||
|
|
||||||
let csv_reader = match maybe_schema {
|
|
||||||
Some(schema) => csv_reader.with_schema(Some(schema.into())),
|
|
||||||
None => csv_reader,
|
|
||||||
};
|
|
||||||
|
|
||||||
let csv_reader = match infer_schema {
|
|
||||||
None => csv_reader,
|
|
||||||
Some(r) => csv_reader.with_infer_schema_length(Some(r)),
|
|
||||||
};
|
|
||||||
|
|
||||||
let csv_reader = match skip_rows {
|
|
||||||
None => csv_reader,
|
|
||||||
Some(r) => csv_reader.with_skip_rows(r),
|
|
||||||
};
|
|
||||||
|
|
||||||
let df: NuLazyFrame = csv_reader
|
|
||||||
.finish()
|
|
||||||
.map_err(|e| ShellError::GenericError {
|
|
||||||
error: "Parquet reader error".into(),
|
|
||||||
msg: format!("{e:?}"),
|
|
||||||
span: Some(call.head),
|
|
||||||
help: None,
|
|
||||||
inner: vec![],
|
|
||||||
})?
|
|
||||||
.into();
|
|
||||||
|
|
||||||
df.into_value(call.head)
|
|
||||||
} else {
|
|
||||||
let file: Spanned<PathBuf> = call.req(engine_state, stack, 0)?;
|
|
||||||
let csv_reader = CsvReader::from_path(&file.item)
|
|
||||||
.map_err(|e| ShellError::GenericError {
|
|
||||||
error: "Error creating CSV reader".into(),
|
|
||||||
msg: e.to_string(),
|
|
||||||
span: Some(file.span),
|
|
||||||
help: None,
|
|
||||||
inner: vec![],
|
|
||||||
})?
|
|
||||||
.with_encoding(CsvEncoding::LossyUtf8);
|
|
||||||
|
|
||||||
let csv_reader = match delimiter {
|
|
||||||
None => csv_reader,
|
|
||||||
Some(d) => {
|
|
||||||
if d.item.len() != 1 {
|
|
||||||
return Err(ShellError::GenericError {
|
|
||||||
error: "Incorrect delimiter".into(),
|
|
||||||
msg: "Delimiter has to be one character".into(),
|
|
||||||
span: Some(d.span),
|
|
||||||
help: None,
|
|
||||||
inner: vec![],
|
|
||||||
});
|
|
||||||
} else {
|
|
||||||
let delimiter = match d.item.chars().next() {
|
|
||||||
Some(d) => d as u8,
|
|
||||||
None => unreachable!(),
|
|
||||||
};
|
|
||||||
csv_reader.with_separator(delimiter)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
let csv_reader = csv_reader.has_header(!no_header);
|
|
||||||
|
|
||||||
let csv_reader = match maybe_schema {
|
|
||||||
Some(schema) => csv_reader.with_schema(Some(schema.into())),
|
|
||||||
None => csv_reader,
|
|
||||||
};
|
|
||||||
|
|
||||||
let csv_reader = match infer_schema {
|
|
||||||
None => csv_reader,
|
|
||||||
Some(r) => csv_reader.infer_schema(Some(r)),
|
|
||||||
};
|
|
||||||
|
|
||||||
let csv_reader = match skip_rows {
|
|
||||||
None => csv_reader,
|
|
||||||
Some(r) => csv_reader.with_skip_rows(r),
|
|
||||||
};
|
|
||||||
|
|
||||||
let csv_reader = match columns {
|
|
||||||
None => csv_reader,
|
|
||||||
Some(columns) => csv_reader.with_columns(Some(columns)),
|
|
||||||
};
|
|
||||||
|
|
||||||
let df: NuDataFrame = csv_reader
|
|
||||||
.finish()
|
|
||||||
.map_err(|e| ShellError::GenericError {
|
|
||||||
error: "Parquet reader error".into(),
|
|
||||||
msg: format!("{e:?}"),
|
|
||||||
span: Some(call.head),
|
|
||||||
help: None,
|
|
||||||
inner: vec![],
|
|
||||||
})?
|
|
||||||
.into();
|
|
||||||
|
|
||||||
Ok(df.into_value(call.head))
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,104 +0,0 @@
|
|||||||
use crate::dataframe::{
|
|
||||||
eager::SQLContext,
|
|
||||||
values::{Column, NuDataFrame, NuLazyFrame},
|
|
||||||
};
|
|
||||||
use nu_engine::command_prelude::*;
|
|
||||||
|
|
||||||
// attribution:
|
|
||||||
// sql_context.rs, and sql_expr.rs were copied from polars-sql. thank you.
|
|
||||||
// maybe we should just use the crate at some point but it's not published yet.
|
|
||||||
// https://github.com/pola-rs/polars/tree/master/polars-sql
|
|
||||||
|
|
||||||
#[derive(Clone)]
|
|
||||||
pub struct QueryDf;
|
|
||||||
|
|
||||||
impl Command for QueryDf {
|
|
||||||
fn name(&self) -> &str {
|
|
||||||
"dfr query"
|
|
||||||
}
|
|
||||||
|
|
||||||
fn usage(&self) -> &str {
|
|
||||||
"Query dataframe using SQL. Note: The dataframe is always named 'df' in your query's from clause."
|
|
||||||
}
|
|
||||||
|
|
||||||
fn signature(&self) -> Signature {
|
|
||||||
Signature::build(self.name())
|
|
||||||
.required("sql", SyntaxShape::String, "sql query")
|
|
||||||
.input_output_type(
|
|
||||||
Type::Custom("dataframe".into()),
|
|
||||||
Type::Custom("dataframe".into()),
|
|
||||||
)
|
|
||||||
.category(Category::Custom("dataframe".into()))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn search_terms(&self) -> Vec<&str> {
|
|
||||||
vec!["dataframe", "sql", "search"]
|
|
||||||
}
|
|
||||||
|
|
||||||
fn examples(&self) -> Vec<Example> {
|
|
||||||
vec![Example {
|
|
||||||
description: "Query dataframe using SQL",
|
|
||||||
example: "[[a b]; [1 2] [3 4]] | dfr into-df | dfr query 'select a from df'",
|
|
||||||
result: Some(
|
|
||||||
NuDataFrame::try_from_columns(
|
|
||||||
vec![Column::new(
|
|
||||||
"a".to_string(),
|
|
||||||
vec![Value::test_int(1), Value::test_int(3)],
|
|
||||||
)],
|
|
||||||
None,
|
|
||||||
)
|
|
||||||
.expect("simple df for test should not fail")
|
|
||||||
.into_value(Span::test_data()),
|
|
||||||
),
|
|
||||||
}]
|
|
||||||
}
|
|
||||||
|
|
||||||
fn run(
|
|
||||||
&self,
|
|
||||||
engine_state: &EngineState,
|
|
||||||
stack: &mut Stack,
|
|
||||||
call: &Call,
|
|
||||||
input: PipelineData,
|
|
||||||
) -> Result<PipelineData, ShellError> {
|
|
||||||
command(engine_state, stack, call, input)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn command(
|
|
||||||
engine_state: &EngineState,
|
|
||||||
stack: &mut Stack,
|
|
||||||
call: &Call,
|
|
||||||
input: PipelineData,
|
|
||||||
) -> Result<PipelineData, ShellError> {
|
|
||||||
let sql_query: String = call.req(engine_state, stack, 0)?;
|
|
||||||
let df = NuDataFrame::try_from_pipeline(input, call.head)?;
|
|
||||||
|
|
||||||
let mut ctx = SQLContext::new();
|
|
||||||
ctx.register("df", &df.df);
|
|
||||||
let df_sql = ctx
|
|
||||||
.execute(&sql_query)
|
|
||||||
.map_err(|e| ShellError::GenericError {
|
|
||||||
error: "Dataframe Error".into(),
|
|
||||||
msg: e.to_string(),
|
|
||||||
span: Some(call.head),
|
|
||||||
help: None,
|
|
||||||
inner: vec![],
|
|
||||||
})?;
|
|
||||||
let lazy = NuLazyFrame::new(false, df_sql);
|
|
||||||
|
|
||||||
let eager = lazy.collect(call.head)?;
|
|
||||||
let value = Value::custom(Box::new(eager), call.head);
|
|
||||||
|
|
||||||
Ok(PipelineData::Value(value, None))
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod test {
|
|
||||||
use super::super::super::test_dataframe::test_dataframe;
|
|
||||||
use super::*;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_examples() {
|
|
||||||
test_dataframe(vec![Box::new(QueryDf {})])
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,185 +0,0 @@
|
|||||||
use crate::dataframe::{
|
|
||||||
utils::extract_strings,
|
|
||||||
values::{Column, NuDataFrame, NuLazyFrame},
|
|
||||||
};
|
|
||||||
use nu_engine::command_prelude::*;
|
|
||||||
|
|
||||||
#[derive(Clone)]
|
|
||||||
pub struct RenameDF;
|
|
||||||
|
|
||||||
impl Command for RenameDF {
|
|
||||||
fn name(&self) -> &str {
|
|
||||||
"dfr rename"
|
|
||||||
}
|
|
||||||
|
|
||||||
fn usage(&self) -> &str {
|
|
||||||
"Rename a dataframe column."
|
|
||||||
}
|
|
||||||
|
|
||||||
fn signature(&self) -> Signature {
|
|
||||||
Signature::build(self.name())
|
|
||||||
.required(
|
|
||||||
"columns",
|
|
||||||
SyntaxShape::Any,
|
|
||||||
"Column(s) to be renamed. A string or list of strings",
|
|
||||||
)
|
|
||||||
.required(
|
|
||||||
"new names",
|
|
||||||
SyntaxShape::Any,
|
|
||||||
"New names for the selected column(s). A string or list of strings",
|
|
||||||
)
|
|
||||||
.input_output_type(
|
|
||||||
Type::Custom("dataframe".into()),
|
|
||||||
Type::Custom("dataframe".into()),
|
|
||||||
)
|
|
||||||
.category(Category::Custom("dataframe or lazyframe".into()))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn examples(&self) -> Vec<Example> {
|
|
||||||
vec![
|
|
||||||
Example {
|
|
||||||
description: "Renames a series",
|
|
||||||
example: "[5 6 7 8] | dfr into-df | dfr rename '0' new_name",
|
|
||||||
result: Some(
|
|
||||||
NuDataFrame::try_from_columns(
|
|
||||||
vec![Column::new(
|
|
||||||
"new_name".to_string(),
|
|
||||||
vec![
|
|
||||||
Value::test_int(5),
|
|
||||||
Value::test_int(6),
|
|
||||||
Value::test_int(7),
|
|
||||||
Value::test_int(8),
|
|
||||||
],
|
|
||||||
)],
|
|
||||||
None,
|
|
||||||
)
|
|
||||||
.expect("simple df for test should not fail")
|
|
||||||
.into_value(Span::test_data()),
|
|
||||||
),
|
|
||||||
},
|
|
||||||
Example {
|
|
||||||
description: "Renames a dataframe column",
|
|
||||||
example: "[[a b]; [1 2] [3 4]] | dfr into-df | dfr rename a a_new",
|
|
||||||
result: Some(
|
|
||||||
NuDataFrame::try_from_columns(
|
|
||||||
vec![
|
|
||||||
Column::new(
|
|
||||||
"a_new".to_string(),
|
|
||||||
vec![Value::test_int(1), Value::test_int(3)],
|
|
||||||
),
|
|
||||||
Column::new(
|
|
||||||
"b".to_string(),
|
|
||||||
vec![Value::test_int(2), Value::test_int(4)],
|
|
||||||
),
|
|
||||||
],
|
|
||||||
None,
|
|
||||||
)
|
|
||||||
.expect("simple df for test should not fail")
|
|
||||||
.into_value(Span::test_data()),
|
|
||||||
),
|
|
||||||
},
|
|
||||||
Example {
|
|
||||||
description: "Renames two dataframe columns",
|
|
||||||
example: "[[a b]; [1 2] [3 4]] | dfr into-df | dfr rename [a b] [a_new b_new]",
|
|
||||||
result: Some(
|
|
||||||
NuDataFrame::try_from_columns(
|
|
||||||
vec![
|
|
||||||
Column::new(
|
|
||||||
"a_new".to_string(),
|
|
||||||
vec![Value::test_int(1), Value::test_int(3)],
|
|
||||||
),
|
|
||||||
Column::new(
|
|
||||||
"b_new".to_string(),
|
|
||||||
vec![Value::test_int(2), Value::test_int(4)],
|
|
||||||
),
|
|
||||||
],
|
|
||||||
None,
|
|
||||||
)
|
|
||||||
.expect("simple df for test should not fail")
|
|
||||||
.into_value(Span::test_data()),
|
|
||||||
),
|
|
||||||
},
|
|
||||||
]
|
|
||||||
}
|
|
||||||
|
|
||||||
fn run(
|
|
||||||
&self,
|
|
||||||
engine_state: &EngineState,
|
|
||||||
stack: &mut Stack,
|
|
||||||
call: &Call,
|
|
||||||
input: PipelineData,
|
|
||||||
) -> Result<PipelineData, ShellError> {
|
|
||||||
let value = input.into_value(call.head)?;
|
|
||||||
if NuLazyFrame::can_downcast(&value) {
|
|
||||||
let df = NuLazyFrame::try_from_value(value)?;
|
|
||||||
command_lazy(engine_state, stack, call, df)
|
|
||||||
} else {
|
|
||||||
let df = NuDataFrame::try_from_value(value)?;
|
|
||||||
command_eager(engine_state, stack, call, df)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn command_eager(
|
|
||||||
engine_state: &EngineState,
|
|
||||||
stack: &mut Stack,
|
|
||||||
call: &Call,
|
|
||||||
mut df: NuDataFrame,
|
|
||||||
) -> Result<PipelineData, ShellError> {
|
|
||||||
let columns: Value = call.req(engine_state, stack, 0)?;
|
|
||||||
let columns = extract_strings(columns)?;
|
|
||||||
|
|
||||||
let new_names: Value = call.req(engine_state, stack, 1)?;
|
|
||||||
let new_names = extract_strings(new_names)?;
|
|
||||||
|
|
||||||
for (from, to) in columns.iter().zip(new_names.iter()) {
|
|
||||||
df.as_mut()
|
|
||||||
.rename(from, to)
|
|
||||||
.map_err(|e| ShellError::GenericError {
|
|
||||||
error: "Error renaming".into(),
|
|
||||||
msg: e.to_string(),
|
|
||||||
span: Some(call.head),
|
|
||||||
help: None,
|
|
||||||
inner: vec![],
|
|
||||||
})?;
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(PipelineData::Value(df.into_value(call.head), None))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn command_lazy(
|
|
||||||
engine_state: &EngineState,
|
|
||||||
stack: &mut Stack,
|
|
||||||
call: &Call,
|
|
||||||
lazy: NuLazyFrame,
|
|
||||||
) -> Result<PipelineData, ShellError> {
|
|
||||||
let columns: Value = call.req(engine_state, stack, 0)?;
|
|
||||||
let columns = extract_strings(columns)?;
|
|
||||||
|
|
||||||
let new_names: Value = call.req(engine_state, stack, 1)?;
|
|
||||||
let new_names = extract_strings(new_names)?;
|
|
||||||
|
|
||||||
if columns.len() != new_names.len() {
|
|
||||||
let value: Value = call.req(engine_state, stack, 1)?;
|
|
||||||
return Err(ShellError::IncompatibleParametersSingle {
|
|
||||||
msg: "New name list has different size to column list".into(),
|
|
||||||
span: value.span(),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
let lazy = lazy.into_polars();
|
|
||||||
let lazy: NuLazyFrame = lazy.rename(&columns, &new_names).into();
|
|
||||||
|
|
||||||
Ok(PipelineData::Value(lazy.into_value(call.head)?, None))
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod test {
|
|
||||||
use super::super::super::test_dataframe::test_dataframe;
|
|
||||||
use super::*;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_examples() {
|
|
||||||
test_dataframe(vec![Box::new(RenameDF {})])
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,127 +0,0 @@
|
|||||||
use crate::dataframe::values::NuDataFrame;
|
|
||||||
use nu_engine::command_prelude::*;
|
|
||||||
|
|
||||||
use polars::{prelude::NamedFrom, series::Series};
|
|
||||||
|
|
||||||
#[derive(Clone)]
|
|
||||||
pub struct SampleDF;
|
|
||||||
|
|
||||||
impl Command for SampleDF {
|
|
||||||
fn name(&self) -> &str {
|
|
||||||
"dfr sample"
|
|
||||||
}
|
|
||||||
|
|
||||||
fn usage(&self) -> &str {
|
|
||||||
"Create sample dataframe."
|
|
||||||
}
|
|
||||||
|
|
||||||
fn signature(&self) -> Signature {
|
|
||||||
Signature::build(self.name())
|
|
||||||
.named(
|
|
||||||
"n-rows",
|
|
||||||
SyntaxShape::Int,
|
|
||||||
"number of rows to be taken from dataframe",
|
|
||||||
Some('n'),
|
|
||||||
)
|
|
||||||
.named(
|
|
||||||
"fraction",
|
|
||||||
SyntaxShape::Number,
|
|
||||||
"fraction of dataframe to be taken",
|
|
||||||
Some('f'),
|
|
||||||
)
|
|
||||||
.named(
|
|
||||||
"seed",
|
|
||||||
SyntaxShape::Number,
|
|
||||||
"seed for the selection",
|
|
||||||
Some('s'),
|
|
||||||
)
|
|
||||||
.switch("replace", "sample with replace", Some('e'))
|
|
||||||
.switch("shuffle", "shuffle sample", Some('u'))
|
|
||||||
.input_output_type(
|
|
||||||
Type::Custom("dataframe".into()),
|
|
||||||
Type::Custom("dataframe".into()),
|
|
||||||
)
|
|
||||||
.category(Category::Custom("dataframe".into()))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn examples(&self) -> Vec<Example> {
|
|
||||||
vec![
|
|
||||||
Example {
|
|
||||||
description: "Sample rows from dataframe",
|
|
||||||
example: "[[a b]; [1 2] [3 4]] | dfr into-df | dfr sample --n-rows 1",
|
|
||||||
result: None, // No expected value because sampling is random
|
|
||||||
},
|
|
||||||
Example {
|
|
||||||
description: "Shows sample row using fraction and replace",
|
|
||||||
example:
|
|
||||||
"[[a b]; [1 2] [3 4] [5 6]] | dfr into-df | dfr sample --fraction 0.5 --replace",
|
|
||||||
result: None, // No expected value because sampling is random
|
|
||||||
},
|
|
||||||
]
|
|
||||||
}
|
|
||||||
|
|
||||||
fn run(
|
|
||||||
&self,
|
|
||||||
engine_state: &EngineState,
|
|
||||||
stack: &mut Stack,
|
|
||||||
call: &Call,
|
|
||||||
input: PipelineData,
|
|
||||||
) -> Result<PipelineData, ShellError> {
|
|
||||||
command(engine_state, stack, call, input)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn command(
|
|
||||||
engine_state: &EngineState,
|
|
||||||
stack: &mut Stack,
|
|
||||||
call: &Call,
|
|
||||||
input: PipelineData,
|
|
||||||
) -> Result<PipelineData, ShellError> {
|
|
||||||
let rows: Option<Spanned<i64>> = call.get_flag(engine_state, stack, "n-rows")?;
|
|
||||||
let fraction: Option<Spanned<f64>> = call.get_flag(engine_state, stack, "fraction")?;
|
|
||||||
let seed: Option<u64> = call
|
|
||||||
.get_flag::<i64>(engine_state, stack, "seed")?
|
|
||||||
.map(|val| val as u64);
|
|
||||||
let replace: bool = call.has_flag(engine_state, stack, "replace")?;
|
|
||||||
let shuffle: bool = call.has_flag(engine_state, stack, "shuffle")?;
|
|
||||||
|
|
||||||
let df = NuDataFrame::try_from_pipeline(input, call.head)?;
|
|
||||||
|
|
||||||
match (rows, fraction) {
|
|
||||||
(Some(rows), None) => df
|
|
||||||
.as_ref()
|
|
||||||
.sample_n(&Series::new("s", &[rows.item]), replace, shuffle, seed)
|
|
||||||
.map_err(|e| ShellError::GenericError {
|
|
||||||
error: "Error creating sample".into(),
|
|
||||||
msg: e.to_string(),
|
|
||||||
span: Some(rows.span),
|
|
||||||
help: None,
|
|
||||||
inner: vec![],
|
|
||||||
}),
|
|
||||||
(None, Some(frac)) => df
|
|
||||||
.as_ref()
|
|
||||||
.sample_frac(&Series::new("frac", &[frac.item]), replace, shuffle, seed)
|
|
||||||
.map_err(|e| ShellError::GenericError {
|
|
||||||
error: "Error creating sample".into(),
|
|
||||||
msg: e.to_string(),
|
|
||||||
span: Some(frac.span),
|
|
||||||
help: None,
|
|
||||||
inner: vec![],
|
|
||||||
}),
|
|
||||||
(Some(_), Some(_)) => Err(ShellError::GenericError {
|
|
||||||
error: "Incompatible flags".into(),
|
|
||||||
msg: "Only one selection criterion allowed".into(),
|
|
||||||
span: Some(call.head),
|
|
||||||
help: None,
|
|
||||||
inner: vec![],
|
|
||||||
}),
|
|
||||||
(None, None) => Err(ShellError::GenericError {
|
|
||||||
error: "No selection".into(),
|
|
||||||
msg: "No selection criterion was found".into(),
|
|
||||||
span: Some(call.head),
|
|
||||||
help: Some("Perhaps you want to use the flag -n or -f".into()),
|
|
||||||
inner: vec![],
|
|
||||||
}),
|
|
||||||
}
|
|
||||||
.map(|df| PipelineData::Value(NuDataFrame::dataframe_into_value(df, call.head), None))
|
|
||||||
}
|
|
@ -1,112 +0,0 @@
|
|||||||
use crate::dataframe::values::NuDataFrame;
|
|
||||||
use nu_engine::command_prelude::*;
|
|
||||||
|
|
||||||
#[derive(Clone)]
|
|
||||||
pub struct SchemaDF;
|
|
||||||
|
|
||||||
impl Command for SchemaDF {
|
|
||||||
fn name(&self) -> &str {
|
|
||||||
"dfr schema"
|
|
||||||
}
|
|
||||||
|
|
||||||
fn usage(&self) -> &str {
|
|
||||||
"Show schema for a dataframe."
|
|
||||||
}
|
|
||||||
|
|
||||||
fn signature(&self) -> Signature {
|
|
||||||
Signature::build(self.name())
|
|
||||||
.switch("datatype-list", "creates a lazy dataframe", Some('l'))
|
|
||||||
.input_output_type(
|
|
||||||
Type::Custom("dataframe".into()),
|
|
||||||
Type::Custom("dataframe".into()),
|
|
||||||
)
|
|
||||||
.category(Category::Custom("dataframe".into()))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn examples(&self) -> Vec<Example> {
|
|
||||||
vec![Example {
|
|
||||||
description: "Dataframe schema",
|
|
||||||
example: r#"[[a b]; [1 "foo"] [3 "bar"]] | dfr into-df | dfr schema"#,
|
|
||||||
result: Some(Value::record(
|
|
||||||
record! {
|
|
||||||
"a" => Value::string("i64", Span::test_data()),
|
|
||||||
"b" => Value::string("str", Span::test_data()),
|
|
||||||
},
|
|
||||||
Span::test_data(),
|
|
||||||
)),
|
|
||||||
}]
|
|
||||||
}
|
|
||||||
|
|
||||||
fn run(
|
|
||||||
&self,
|
|
||||||
engine_state: &EngineState,
|
|
||||||
stack: &mut Stack,
|
|
||||||
call: &Call,
|
|
||||||
input: PipelineData,
|
|
||||||
) -> Result<PipelineData, ShellError> {
|
|
||||||
if call.has_flag(engine_state, stack, "datatype-list")? {
|
|
||||||
Ok(PipelineData::Value(datatype_list(Span::unknown()), None))
|
|
||||||
} else {
|
|
||||||
command(engine_state, stack, call, input)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn command(
|
|
||||||
_engine_state: &EngineState,
|
|
||||||
_stack: &mut Stack,
|
|
||||||
call: &Call,
|
|
||||||
input: PipelineData,
|
|
||||||
) -> Result<PipelineData, ShellError> {
|
|
||||||
let df = NuDataFrame::try_from_pipeline(input, call.head)?;
|
|
||||||
let schema = df.schema();
|
|
||||||
let value: Value = schema.into();
|
|
||||||
Ok(PipelineData::Value(value, None))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn datatype_list(span: Span) -> Value {
|
|
||||||
let types: Vec<Value> = [
|
|
||||||
("null", ""),
|
|
||||||
("bool", ""),
|
|
||||||
("u8", ""),
|
|
||||||
("u16", ""),
|
|
||||||
("u32", ""),
|
|
||||||
("u64", ""),
|
|
||||||
("i8", ""),
|
|
||||||
("i16", ""),
|
|
||||||
("i32", ""),
|
|
||||||
("i64", ""),
|
|
||||||
("f32", ""),
|
|
||||||
("f64", ""),
|
|
||||||
("str", ""),
|
|
||||||
("binary", ""),
|
|
||||||
("date", ""),
|
|
||||||
("datetime<time_unit: (ms, us, ns) timezone (optional)>", "Time Unit can be: milliseconds: ms, microseconds: us, nanoseconds: ns. Timezone wildcard is *. Other Timezone examples: UTC, America/Los_Angeles."),
|
|
||||||
("duration<time_unit: (ms, us, ns)>", "Time Unit can be: milliseconds: ms, microseconds: us, nanoseconds: ns."),
|
|
||||||
("time", ""),
|
|
||||||
("object", ""),
|
|
||||||
("unknown", ""),
|
|
||||||
("list<dtype>", ""),
|
|
||||||
]
|
|
||||||
.iter()
|
|
||||||
.map(|(dtype, note)| {
|
|
||||||
Value::record(record! {
|
|
||||||
"dtype" => Value::string(*dtype, span),
|
|
||||||
"note" => Value::string(*note, span),
|
|
||||||
},
|
|
||||||
span)
|
|
||||||
})
|
|
||||||
.collect();
|
|
||||||
Value::list(types, span)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod test {
|
|
||||||
use super::super::super::test_dataframe::test_dataframe;
|
|
||||||
use super::*;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_examples() {
|
|
||||||
test_dataframe(vec![Box::new(SchemaDF {})])
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,82 +0,0 @@
|
|||||||
use crate::dataframe::values::{Column, NuDataFrame};
|
|
||||||
use nu_engine::command_prelude::*;
|
|
||||||
|
|
||||||
#[derive(Clone)]
|
|
||||||
pub struct ShapeDF;
|
|
||||||
|
|
||||||
impl Command for ShapeDF {
|
|
||||||
fn name(&self) -> &str {
|
|
||||||
"dfr shape"
|
|
||||||
}
|
|
||||||
|
|
||||||
fn usage(&self) -> &str {
|
|
||||||
"Shows column and row size for a dataframe."
|
|
||||||
}
|
|
||||||
|
|
||||||
fn signature(&self) -> Signature {
|
|
||||||
Signature::build(self.name())
|
|
||||||
.input_output_type(
|
|
||||||
Type::Custom("dataframe".into()),
|
|
||||||
Type::Custom("dataframe".into()),
|
|
||||||
)
|
|
||||||
.category(Category::Custom("dataframe".into()))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn examples(&self) -> Vec<Example> {
|
|
||||||
vec![Example {
|
|
||||||
description: "Shows row and column shape",
|
|
||||||
example: "[[a b]; [1 2] [3 4]] | dfr into-df | dfr shape",
|
|
||||||
result: Some(
|
|
||||||
NuDataFrame::try_from_columns(
|
|
||||||
vec![
|
|
||||||
Column::new("rows".to_string(), vec![Value::test_int(2)]),
|
|
||||||
Column::new("columns".to_string(), vec![Value::test_int(2)]),
|
|
||||||
],
|
|
||||||
None,
|
|
||||||
)
|
|
||||||
.expect("simple df for test should not fail")
|
|
||||||
.into_value(Span::test_data()),
|
|
||||||
),
|
|
||||||
}]
|
|
||||||
}
|
|
||||||
|
|
||||||
fn run(
|
|
||||||
&self,
|
|
||||||
engine_state: &EngineState,
|
|
||||||
stack: &mut Stack,
|
|
||||||
call: &Call,
|
|
||||||
input: PipelineData,
|
|
||||||
) -> Result<PipelineData, ShellError> {
|
|
||||||
command(engine_state, stack, call, input)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn command(
|
|
||||||
_engine_state: &EngineState,
|
|
||||||
_stack: &mut Stack,
|
|
||||||
call: &Call,
|
|
||||||
input: PipelineData,
|
|
||||||
) -> Result<PipelineData, ShellError> {
|
|
||||||
let df = NuDataFrame::try_from_pipeline(input, call.head)?;
|
|
||||||
|
|
||||||
let rows = Value::int(df.as_ref().height() as i64, call.head);
|
|
||||||
|
|
||||||
let cols = Value::int(df.as_ref().width() as i64, call.head);
|
|
||||||
|
|
||||||
let rows_col = Column::new("rows".to_string(), vec![rows]);
|
|
||||||
let cols_col = Column::new("columns".to_string(), vec![cols]);
|
|
||||||
|
|
||||||
NuDataFrame::try_from_columns(vec![rows_col, cols_col], None)
|
|
||||||
.map(|df| PipelineData::Value(df.into_value(call.head), None))
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod test {
|
|
||||||
use super::super::super::test_dataframe::test_dataframe;
|
|
||||||
use super::*;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_examples() {
|
|
||||||
test_dataframe(vec![Box::new(ShapeDF {})])
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,84 +0,0 @@
|
|||||||
use crate::dataframe::values::{Column, NuDataFrame};
|
|
||||||
use nu_engine::command_prelude::*;
|
|
||||||
|
|
||||||
#[derive(Clone)]
|
|
||||||
pub struct SliceDF;
|
|
||||||
|
|
||||||
impl Command for SliceDF {
|
|
||||||
fn name(&self) -> &str {
|
|
||||||
"dfr slice"
|
|
||||||
}
|
|
||||||
|
|
||||||
fn usage(&self) -> &str {
|
|
||||||
"Creates new dataframe from a slice of rows."
|
|
||||||
}
|
|
||||||
|
|
||||||
fn signature(&self) -> Signature {
|
|
||||||
Signature::build(self.name())
|
|
||||||
.required("offset", SyntaxShape::Int, "start of slice")
|
|
||||||
.required("size", SyntaxShape::Int, "size of slice")
|
|
||||||
.input_output_type(
|
|
||||||
Type::Custom("dataframe".into()),
|
|
||||||
Type::Custom("dataframe".into()),
|
|
||||||
)
|
|
||||||
.category(Category::Custom("dataframe".into()))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn examples(&self) -> Vec<Example> {
|
|
||||||
vec![Example {
|
|
||||||
description: "Create new dataframe from a slice of the rows",
|
|
||||||
example: "[[a b]; [1 2] [3 4]] | dfr into-df | dfr slice 0 1",
|
|
||||||
result: Some(
|
|
||||||
NuDataFrame::try_from_columns(
|
|
||||||
vec![
|
|
||||||
Column::new("a".to_string(), vec![Value::test_int(1)]),
|
|
||||||
Column::new("b".to_string(), vec![Value::test_int(2)]),
|
|
||||||
],
|
|
||||||
None,
|
|
||||||
)
|
|
||||||
.expect("simple df for test should not fail")
|
|
||||||
.into_value(Span::test_data()),
|
|
||||||
),
|
|
||||||
}]
|
|
||||||
}
|
|
||||||
|
|
||||||
fn run(
|
|
||||||
&self,
|
|
||||||
engine_state: &EngineState,
|
|
||||||
stack: &mut Stack,
|
|
||||||
call: &Call,
|
|
||||||
input: PipelineData,
|
|
||||||
) -> Result<PipelineData, ShellError> {
|
|
||||||
command(engine_state, stack, call, input)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn command(
|
|
||||||
engine_state: &EngineState,
|
|
||||||
stack: &mut Stack,
|
|
||||||
call: &Call,
|
|
||||||
input: PipelineData,
|
|
||||||
) -> Result<PipelineData, ShellError> {
|
|
||||||
let offset: i64 = call.req(engine_state, stack, 0)?;
|
|
||||||
let size: usize = call.req(engine_state, stack, 1)?;
|
|
||||||
|
|
||||||
let df = NuDataFrame::try_from_pipeline(input, call.head)?;
|
|
||||||
|
|
||||||
let res = df.as_ref().slice(offset, size);
|
|
||||||
|
|
||||||
Ok(PipelineData::Value(
|
|
||||||
NuDataFrame::dataframe_into_value(res, call.head),
|
|
||||||
None,
|
|
||||||
))
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod test {
|
|
||||||
use super::super::super::test_dataframe::test_dataframe;
|
|
||||||
use super::*;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_examples() {
|
|
||||||
test_dataframe(vec![Box::new(SliceDF {})])
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,200 +0,0 @@
|
|||||||
use polars::error::PolarsError;
|
|
||||||
use polars::prelude::{col, lit, DataType, Expr, LiteralValue, PolarsResult as Result, TimeUnit};
|
|
||||||
|
|
||||||
use sqlparser::ast::{
|
|
||||||
ArrayElemTypeDef, BinaryOperator as SQLBinaryOperator, DataType as SQLDataType,
|
|
||||||
Expr as SqlExpr, Function as SQLFunction, Value as SqlValue, WindowType,
|
|
||||||
};
|
|
||||||
|
|
||||||
fn map_sql_polars_datatype(data_type: &SQLDataType) -> Result<DataType> {
|
|
||||||
Ok(match data_type {
|
|
||||||
SQLDataType::Char(_)
|
|
||||||
| SQLDataType::Varchar(_)
|
|
||||||
| SQLDataType::Uuid
|
|
||||||
| SQLDataType::Clob(_)
|
|
||||||
| SQLDataType::Text
|
|
||||||
| SQLDataType::String(_) => DataType::String,
|
|
||||||
SQLDataType::Float(_) => DataType::Float32,
|
|
||||||
SQLDataType::Real => DataType::Float32,
|
|
||||||
SQLDataType::Double => DataType::Float64,
|
|
||||||
SQLDataType::TinyInt(_) => DataType::Int8,
|
|
||||||
SQLDataType::UnsignedTinyInt(_) => DataType::UInt8,
|
|
||||||
SQLDataType::SmallInt(_) => DataType::Int16,
|
|
||||||
SQLDataType::UnsignedSmallInt(_) => DataType::UInt16,
|
|
||||||
SQLDataType::Int(_) => DataType::Int32,
|
|
||||||
SQLDataType::UnsignedInt(_) => DataType::UInt32,
|
|
||||||
SQLDataType::BigInt(_) => DataType::Int64,
|
|
||||||
SQLDataType::UnsignedBigInt(_) => DataType::UInt64,
|
|
||||||
|
|
||||||
SQLDataType::Boolean => DataType::Boolean,
|
|
||||||
SQLDataType::Date => DataType::Date,
|
|
||||||
SQLDataType::Time(_, _) => DataType::Time,
|
|
||||||
SQLDataType::Timestamp(_, _) => DataType::Datetime(TimeUnit::Microseconds, None),
|
|
||||||
SQLDataType::Interval => DataType::Duration(TimeUnit::Microseconds),
|
|
||||||
SQLDataType::Array(array_type_def) => match array_type_def {
|
|
||||||
ArrayElemTypeDef::AngleBracket(inner_type)
|
|
||||||
| ArrayElemTypeDef::SquareBracket(inner_type) => {
|
|
||||||
DataType::List(Box::new(map_sql_polars_datatype(inner_type)?))
|
|
||||||
}
|
|
||||||
_ => {
|
|
||||||
return Err(PolarsError::ComputeError(
|
|
||||||
"SQL Datatype Array(None) was not supported in polars-sql yet!".into(),
|
|
||||||
))
|
|
||||||
}
|
|
||||||
},
|
|
||||||
_ => {
|
|
||||||
return Err(PolarsError::ComputeError(
|
|
||||||
format!("SQL Datatype {data_type:?} was not supported in polars-sql yet!").into(),
|
|
||||||
))
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
fn cast_(expr: Expr, data_type: &SQLDataType) -> Result<Expr> {
|
|
||||||
let polars_type = map_sql_polars_datatype(data_type)?;
|
|
||||||
Ok(expr.cast(polars_type))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn binary_op_(left: Expr, right: Expr, op: &SQLBinaryOperator) -> Result<Expr> {
|
|
||||||
Ok(match op {
|
|
||||||
SQLBinaryOperator::Plus => left + right,
|
|
||||||
SQLBinaryOperator::Minus => left - right,
|
|
||||||
SQLBinaryOperator::Multiply => left * right,
|
|
||||||
SQLBinaryOperator::Divide => left / right,
|
|
||||||
SQLBinaryOperator::Modulo => left % right,
|
|
||||||
SQLBinaryOperator::StringConcat => {
|
|
||||||
left.cast(DataType::String) + right.cast(DataType::String)
|
|
||||||
}
|
|
||||||
SQLBinaryOperator::Gt => left.gt(right),
|
|
||||||
SQLBinaryOperator::Lt => left.lt(right),
|
|
||||||
SQLBinaryOperator::GtEq => left.gt_eq(right),
|
|
||||||
SQLBinaryOperator::LtEq => left.lt_eq(right),
|
|
||||||
SQLBinaryOperator::Eq => left.eq(right),
|
|
||||||
SQLBinaryOperator::NotEq => left.eq(right).not(),
|
|
||||||
SQLBinaryOperator::And => left.and(right),
|
|
||||||
SQLBinaryOperator::Or => left.or(right),
|
|
||||||
SQLBinaryOperator::Xor => left.xor(right),
|
|
||||||
_ => {
|
|
||||||
return Err(PolarsError::ComputeError(
|
|
||||||
format!("SQL Operator {op:?} was not supported in polars-sql yet!").into(),
|
|
||||||
))
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
fn literal_expr(value: &SqlValue) -> Result<Expr> {
|
|
||||||
Ok(match value {
|
|
||||||
SqlValue::Number(s, _) => {
|
|
||||||
// Check for existence of decimal separator dot
|
|
||||||
if s.contains('.') {
|
|
||||||
s.parse::<f64>().map(lit).map_err(|_| {
|
|
||||||
PolarsError::ComputeError(format!("Can't parse literal {s:?}").into())
|
|
||||||
})
|
|
||||||
} else {
|
|
||||||
s.parse::<i64>().map(lit).map_err(|_| {
|
|
||||||
PolarsError::ComputeError(format!("Can't parse literal {s:?}").into())
|
|
||||||
})
|
|
||||||
}?
|
|
||||||
}
|
|
||||||
SqlValue::SingleQuotedString(s) => lit(s.clone()),
|
|
||||||
SqlValue::NationalStringLiteral(s) => lit(s.clone()),
|
|
||||||
SqlValue::HexStringLiteral(s) => lit(s.clone()),
|
|
||||||
SqlValue::DoubleQuotedString(s) => lit(s.clone()),
|
|
||||||
SqlValue::Boolean(b) => lit(*b),
|
|
||||||
SqlValue::Null => Expr::Literal(LiteralValue::Null),
|
|
||||||
_ => {
|
|
||||||
return Err(PolarsError::ComputeError(
|
|
||||||
format!("Parsing SQL Value {value:?} was not supported in polars-sql yet!").into(),
|
|
||||||
))
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn parse_sql_expr(expr: &SqlExpr) -> Result<Expr> {
|
|
||||||
Ok(match expr {
|
|
||||||
SqlExpr::Identifier(e) => col(&e.value),
|
|
||||||
SqlExpr::BinaryOp { left, op, right } => {
|
|
||||||
let left = parse_sql_expr(left)?;
|
|
||||||
let right = parse_sql_expr(right)?;
|
|
||||||
binary_op_(left, right, op)?
|
|
||||||
}
|
|
||||||
SqlExpr::Function(sql_function) => parse_sql_function(sql_function)?,
|
|
||||||
SqlExpr::Cast {
|
|
||||||
expr,
|
|
||||||
data_type,
|
|
||||||
format: _,
|
|
||||||
} => cast_(parse_sql_expr(expr)?, data_type)?,
|
|
||||||
SqlExpr::Nested(expr) => parse_sql_expr(expr)?,
|
|
||||||
SqlExpr::Value(value) => literal_expr(value)?,
|
|
||||||
_ => {
|
|
||||||
return Err(PolarsError::ComputeError(
|
|
||||||
format!("Expression: {expr:?} was not supported in polars-sql yet!").into(),
|
|
||||||
))
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
fn apply_window_spec(expr: Expr, window_type: Option<&WindowType>) -> Result<Expr> {
|
|
||||||
Ok(match &window_type {
|
|
||||||
Some(wtype) => match wtype {
|
|
||||||
WindowType::WindowSpec(window_spec) => {
|
|
||||||
// Process for simple window specification, partition by first
|
|
||||||
let partition_by = window_spec
|
|
||||||
.partition_by
|
|
||||||
.iter()
|
|
||||||
.map(parse_sql_expr)
|
|
||||||
.collect::<Result<Vec<_>>>()?;
|
|
||||||
expr.over(partition_by)
|
|
||||||
// Order by and Row range may not be supported at the moment
|
|
||||||
}
|
|
||||||
// TODO: make NamedWindow work
|
|
||||||
WindowType::NamedWindow(_named) => {
|
|
||||||
return Err(PolarsError::ComputeError(
|
|
||||||
format!("Expression: {expr:?} was not supported in polars-sql yet!").into(),
|
|
||||||
))
|
|
||||||
}
|
|
||||||
},
|
|
||||||
None => expr,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
fn parse_sql_function(sql_function: &SQLFunction) -> Result<Expr> {
|
|
||||||
use sqlparser::ast::{FunctionArg, FunctionArgExpr};
|
|
||||||
// Function name mostly do not have name space, so it mostly take the first args
|
|
||||||
let function_name = sql_function.name.0[0].value.to_ascii_lowercase();
|
|
||||||
let args = sql_function
|
|
||||||
.args
|
|
||||||
.iter()
|
|
||||||
.map(|arg| match arg {
|
|
||||||
FunctionArg::Named { arg, .. } => arg,
|
|
||||||
FunctionArg::Unnamed(arg) => arg,
|
|
||||||
})
|
|
||||||
.collect::<Vec<_>>();
|
|
||||||
Ok(
|
|
||||||
match (
|
|
||||||
function_name.as_str(),
|
|
||||||
args.as_slice(),
|
|
||||||
sql_function.distinct,
|
|
||||||
) {
|
|
||||||
("sum", [FunctionArgExpr::Expr(expr)], false) => {
|
|
||||||
apply_window_spec(parse_sql_expr(expr)?, sql_function.over.as_ref())?.sum()
|
|
||||||
}
|
|
||||||
("count", [FunctionArgExpr::Expr(expr)], false) => {
|
|
||||||
apply_window_spec(parse_sql_expr(expr)?, sql_function.over.as_ref())?.count()
|
|
||||||
}
|
|
||||||
("count", [FunctionArgExpr::Expr(expr)], true) => {
|
|
||||||
apply_window_spec(parse_sql_expr(expr)?, sql_function.over.as_ref())?.n_unique()
|
|
||||||
}
|
|
||||||
// Special case for wildcard args to count function.
|
|
||||||
("count", [FunctionArgExpr::Wildcard], false) => lit(1i32).count(),
|
|
||||||
_ => {
|
|
||||||
return Err(PolarsError::ComputeError(
|
|
||||||
format!(
|
|
||||||
"Function {function_name:?} with args {args:?} was not supported in polars-sql yet!"
|
|
||||||
)
|
|
||||||
.into(),
|
|
||||||
))
|
|
||||||
}
|
|
||||||
},
|
|
||||||
)
|
|
||||||
}
|
|
@ -1,279 +0,0 @@
|
|||||||
use crate::dataframe::values::{Column, NuDataFrame};
|
|
||||||
use nu_engine::command_prelude::*;
|
|
||||||
|
|
||||||
use polars::{
|
|
||||||
chunked_array::ChunkedArray,
|
|
||||||
prelude::{
|
|
||||||
AnyValue, DataFrame, DataType, Float64Type, IntoSeries, NewChunkedArray,
|
|
||||||
QuantileInterpolOptions, Series, StringType,
|
|
||||||
},
|
|
||||||
};
|
|
||||||
|
|
||||||
#[derive(Clone)]
|
|
||||||
pub struct Summary;
|
|
||||||
|
|
||||||
impl Command for Summary {
|
|
||||||
fn name(&self) -> &str {
|
|
||||||
"dfr summary"
|
|
||||||
}
|
|
||||||
|
|
||||||
fn usage(&self) -> &str {
|
|
||||||
"For a dataframe, produces descriptive statistics (summary statistics) for its numeric columns."
|
|
||||||
}
|
|
||||||
|
|
||||||
fn signature(&self) -> Signature {
|
|
||||||
Signature::build(self.name())
|
|
||||||
.category(Category::Custom("dataframe".into()))
|
|
||||||
.input_output_type(
|
|
||||||
Type::Custom("dataframe".into()),
|
|
||||||
Type::Custom("dataframe".into()),
|
|
||||||
)
|
|
||||||
.named(
|
|
||||||
"quantiles",
|
|
||||||
SyntaxShape::Table(vec![]),
|
|
||||||
"provide optional quantiles",
|
|
||||||
Some('q'),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn examples(&self) -> Vec<Example> {
|
|
||||||
vec![Example {
|
|
||||||
description: "list dataframe descriptives",
|
|
||||||
example: "[[a b]; [1 1] [1 1]] | dfr into-df | dfr summary",
|
|
||||||
result: Some(
|
|
||||||
NuDataFrame::try_from_columns(
|
|
||||||
vec![
|
|
||||||
Column::new(
|
|
||||||
"descriptor".to_string(),
|
|
||||||
vec![
|
|
||||||
Value::test_string("count"),
|
|
||||||
Value::test_string("sum"),
|
|
||||||
Value::test_string("mean"),
|
|
||||||
Value::test_string("median"),
|
|
||||||
Value::test_string("std"),
|
|
||||||
Value::test_string("min"),
|
|
||||||
Value::test_string("25%"),
|
|
||||||
Value::test_string("50%"),
|
|
||||||
Value::test_string("75%"),
|
|
||||||
Value::test_string("max"),
|
|
||||||
],
|
|
||||||
),
|
|
||||||
Column::new(
|
|
||||||
"a (i64)".to_string(),
|
|
||||||
vec![
|
|
||||||
Value::test_float(2.0),
|
|
||||||
Value::test_float(2.0),
|
|
||||||
Value::test_float(1.0),
|
|
||||||
Value::test_float(1.0),
|
|
||||||
Value::test_float(0.0),
|
|
||||||
Value::test_float(1.0),
|
|
||||||
Value::test_float(1.0),
|
|
||||||
Value::test_float(1.0),
|
|
||||||
Value::test_float(1.0),
|
|
||||||
Value::test_float(1.0),
|
|
||||||
],
|
|
||||||
),
|
|
||||||
Column::new(
|
|
||||||
"b (i64)".to_string(),
|
|
||||||
vec![
|
|
||||||
Value::test_float(2.0),
|
|
||||||
Value::test_float(2.0),
|
|
||||||
Value::test_float(1.0),
|
|
||||||
Value::test_float(1.0),
|
|
||||||
Value::test_float(0.0),
|
|
||||||
Value::test_float(1.0),
|
|
||||||
Value::test_float(1.0),
|
|
||||||
Value::test_float(1.0),
|
|
||||||
Value::test_float(1.0),
|
|
||||||
Value::test_float(1.0),
|
|
||||||
],
|
|
||||||
),
|
|
||||||
],
|
|
||||||
None,
|
|
||||||
)
|
|
||||||
.expect("simple df for test should not fail")
|
|
||||||
.into_value(Span::test_data()),
|
|
||||||
),
|
|
||||||
}]
|
|
||||||
}
|
|
||||||
|
|
||||||
fn run(
|
|
||||||
&self,
|
|
||||||
engine_state: &EngineState,
|
|
||||||
stack: &mut Stack,
|
|
||||||
call: &Call,
|
|
||||||
input: PipelineData,
|
|
||||||
) -> Result<PipelineData, ShellError> {
|
|
||||||
command(engine_state, stack, call, input)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn command(
|
|
||||||
engine_state: &EngineState,
|
|
||||||
stack: &mut Stack,
|
|
||||||
call: &Call,
|
|
||||||
input: PipelineData,
|
|
||||||
) -> Result<PipelineData, ShellError> {
|
|
||||||
let quantiles: Option<Vec<Value>> = call.get_flag(engine_state, stack, "quantiles")?;
|
|
||||||
let quantiles = quantiles.map(|values| {
|
|
||||||
values
|
|
||||||
.iter()
|
|
||||||
.map(|value| {
|
|
||||||
let span = value.span();
|
|
||||||
match value {
|
|
||||||
Value::Float { val, .. } => {
|
|
||||||
if (&0.0..=&1.0).contains(&val) {
|
|
||||||
Ok(*val)
|
|
||||||
} else {
|
|
||||||
Err(ShellError::GenericError {
|
|
||||||
error: "Incorrect value for quantile".into(),
|
|
||||||
msg: "value should be between 0 and 1".into(),
|
|
||||||
span: Some(span),
|
|
||||||
help: None,
|
|
||||||
inner: vec![],
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Value::Error { error, .. } => Err(*error.clone()),
|
|
||||||
_ => Err(ShellError::GenericError {
|
|
||||||
error: "Incorrect value for quantile".into(),
|
|
||||||
msg: "value should be a float".into(),
|
|
||||||
span: Some(span),
|
|
||||||
help: None,
|
|
||||||
inner: vec![],
|
|
||||||
}),
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.collect::<Result<Vec<f64>, ShellError>>()
|
|
||||||
});
|
|
||||||
|
|
||||||
let quantiles = match quantiles {
|
|
||||||
Some(quantiles) => quantiles?,
|
|
||||||
None => vec![0.25, 0.50, 0.75],
|
|
||||||
};
|
|
||||||
|
|
||||||
let mut quantiles_labels = quantiles
|
|
||||||
.iter()
|
|
||||||
.map(|q| Some(format!("{}%", q * 100.0)))
|
|
||||||
.collect::<Vec<Option<String>>>();
|
|
||||||
let mut labels = vec![
|
|
||||||
Some("count".to_string()),
|
|
||||||
Some("sum".to_string()),
|
|
||||||
Some("mean".to_string()),
|
|
||||||
Some("median".to_string()),
|
|
||||||
Some("std".to_string()),
|
|
||||||
Some("min".to_string()),
|
|
||||||
];
|
|
||||||
labels.append(&mut quantiles_labels);
|
|
||||||
labels.push(Some("max".to_string()));
|
|
||||||
|
|
||||||
let df = NuDataFrame::try_from_pipeline(input, call.head)?;
|
|
||||||
|
|
||||||
let names = ChunkedArray::<StringType>::from_slice_options("descriptor", &labels).into_series();
|
|
||||||
|
|
||||||
let head = std::iter::once(names);
|
|
||||||
|
|
||||||
let tail = df
|
|
||||||
.as_ref()
|
|
||||||
.get_columns()
|
|
||||||
.iter()
|
|
||||||
.filter(|col| !matches!(col.dtype(), &DataType::Object("object", _)))
|
|
||||||
.map(|col| {
|
|
||||||
let count = col.len() as f64;
|
|
||||||
|
|
||||||
let sum = col.sum_as_series().ok().and_then(|series| {
|
|
||||||
series
|
|
||||||
.cast(&DataType::Float64)
|
|
||||||
.ok()
|
|
||||||
.and_then(|ca| match ca.get(0) {
|
|
||||||
Ok(AnyValue::Float64(v)) => Some(v),
|
|
||||||
_ => None,
|
|
||||||
})
|
|
||||||
});
|
|
||||||
|
|
||||||
let mean = match col.mean_as_series().get(0) {
|
|
||||||
Ok(AnyValue::Float64(v)) => Some(v),
|
|
||||||
_ => None,
|
|
||||||
};
|
|
||||||
|
|
||||||
let median = match col.median_as_series() {
|
|
||||||
Ok(v) => match v.get(0) {
|
|
||||||
Ok(AnyValue::Float64(v)) => Some(v),
|
|
||||||
_ => None,
|
|
||||||
},
|
|
||||||
_ => None,
|
|
||||||
};
|
|
||||||
|
|
||||||
let std = match col.std_as_series(0) {
|
|
||||||
Ok(v) => match v.get(0) {
|
|
||||||
Ok(AnyValue::Float64(v)) => Some(v),
|
|
||||||
_ => None,
|
|
||||||
},
|
|
||||||
_ => None,
|
|
||||||
};
|
|
||||||
|
|
||||||
let min = col.min_as_series().ok().and_then(|series| {
|
|
||||||
series
|
|
||||||
.cast(&DataType::Float64)
|
|
||||||
.ok()
|
|
||||||
.and_then(|ca| match ca.get(0) {
|
|
||||||
Ok(AnyValue::Float64(v)) => Some(v),
|
|
||||||
_ => None,
|
|
||||||
})
|
|
||||||
});
|
|
||||||
|
|
||||||
let mut quantiles = quantiles
|
|
||||||
.clone()
|
|
||||||
.into_iter()
|
|
||||||
.map(|q| {
|
|
||||||
col.quantile_as_series(q, QuantileInterpolOptions::default())
|
|
||||||
.ok()
|
|
||||||
.and_then(|ca| ca.cast(&DataType::Float64).ok())
|
|
||||||
.and_then(|ca| match ca.get(0) {
|
|
||||||
Ok(AnyValue::Float64(v)) => Some(v),
|
|
||||||
_ => None,
|
|
||||||
})
|
|
||||||
})
|
|
||||||
.collect::<Vec<Option<f64>>>();
|
|
||||||
|
|
||||||
let max = col.max_as_series().ok().and_then(|series| {
|
|
||||||
series
|
|
||||||
.cast(&DataType::Float64)
|
|
||||||
.ok()
|
|
||||||
.and_then(|ca| match ca.get(0) {
|
|
||||||
Ok(AnyValue::Float64(v)) => Some(v),
|
|
||||||
_ => None,
|
|
||||||
})
|
|
||||||
});
|
|
||||||
|
|
||||||
let mut descriptors = vec![Some(count), sum, mean, median, std, min];
|
|
||||||
descriptors.append(&mut quantiles);
|
|
||||||
descriptors.push(max);
|
|
||||||
|
|
||||||
let name = format!("{} ({})", col.name(), col.dtype());
|
|
||||||
ChunkedArray::<Float64Type>::from_slice_options(&name, &descriptors).into_series()
|
|
||||||
});
|
|
||||||
|
|
||||||
let res = head.chain(tail).collect::<Vec<Series>>();
|
|
||||||
|
|
||||||
DataFrame::new(res)
|
|
||||||
.map_err(|e| ShellError::GenericError {
|
|
||||||
error: "Dataframe Error".into(),
|
|
||||||
msg: e.to_string(),
|
|
||||||
span: Some(call.head),
|
|
||||||
help: None,
|
|
||||||
inner: vec![],
|
|
||||||
})
|
|
||||||
.map(|df| PipelineData::Value(NuDataFrame::dataframe_into_value(df, call.head), None))
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod test {
|
|
||||||
use super::super::super::test_dataframe::test_dataframe;
|
|
||||||
use super::*;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_examples() {
|
|
||||||
test_dataframe(vec![Box::new(Summary {})])
|
|
||||||
}
|
|
||||||
}
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user