Compare commits

..

3 Commits

Author SHA1 Message Date
3d631490bc Bump version to 0.87.1 (#11056) 2023-11-18 18:46:36 +01:00
68211dea3e Send only absolute paths to uu_cp (#11080)
# Description
Fixes https://github.com/nushell/nushell/issues/10832

Replaces: https://github.com/nushell/nushell/pull/10843
2023-11-18 17:57:49 +01:00
b8e9293c45 Fix rm path handling (#11064)
# Description
Fixes issue #11061 where `rm` fails to find a file after a `cd`. It
looks like the new glob functions do not return absolute file paths
which we forgot to account for.

# Tests
Added a test (fails on current main, but passes with this PR).

---------

Co-authored-by: Jakub Žádník <kubouch@gmail.com>
2023-11-18 17:57:49 +01:00
1097 changed files with 33253 additions and 60573 deletions

View File

@ -1,12 +1,10 @@
[files] [files]
extend-exclude = ["crates/nu-command/tests/commands/table.rs", "*.tsv", "*.json", "*.txt", "tests/fixtures/formats/*"] extend-exclude = ["crates/nu-command/tests/commands/table.rs", "*.tsv", "*.json", "*.txt"]
[default.extend-words] [default.extend-words]
# Ignore false-positives # Ignore false-positives
nd = "nd" nd = "nd"
pn = "pn"
fo = "fo" fo = "fo"
ful = "ful"
ons = "ons" ons = "ons"
ba = "ba" ba = "ba"
Plasticos = "Plasticos" Plasticos = "Plasticos"
@ -14,4 +12,3 @@ IIF = "IIF"
numer = "numer" numer = "numer"
ratatui = "ratatui" ratatui = "ratatui"
doas = "doas" doas = "doas"
wheres = "wheres"

View File

@ -0,0 +1,11 @@
---
name: standard library bug or feature report
about: Used to submit issues related to the nu standard library
title: ''
labels: ['needs-triage', 'std-library']
assignees: ''
---
**Describe the bug or feature**
A clear and concise description of what the bug is.

View File

@ -11,10 +11,6 @@ updates:
directory: "/" directory: "/"
schedule: schedule:
interval: "weekly" interval: "weekly"
# We release on Tuesdays and open dependabot PRs will rebase after the
# version bump and thus consume unnecessary workers during release, thus
# let's open new ones on Wednesday
day: "wednesday"
ignore: ignore:
- dependency-name: "*" - dependency-name: "*"
update-types: ["version-update:semver-patch"] update-types: ["version-update:semver-patch"]
@ -22,4 +18,3 @@ updates:
directory: "/" directory: "/"
schedule: schedule:
interval: "weekly" interval: "weekly"
day: "wednesday"

View File

@ -19,7 +19,7 @@ jobs:
# Prevent sudden announcement of a new advisory from failing ci: # Prevent sudden announcement of a new advisory from failing ci:
continue-on-error: true continue-on-error: true
steps: steps:
- uses: actions/checkout@v4.1.2 - uses: actions/checkout@v4
- uses: rustsec/audit-check@v1.4.1 - uses: rustsec/audit-check@v1.4.1
with: with:
token: ${{ secrets.GITHUB_TOKEN }} token: ${{ secrets.GITHUB_TOKEN }}

View File

@ -1,12 +0,0 @@
let toolchain_spec = open rust-toolchain.toml | get toolchain.channel
let msrv_spec = open Cargo.toml | get package.rust-version
# This check is conservative in the sense that we use `rust-toolchain.toml`'s
# override to ensure that this is the upper-bound for the minimum supported
# rust version
if $toolchain_spec != $msrv_spec {
print -e "Mismatching rust compiler versions specified in `Cargo.toml` and `rust-toolchain.toml`"
print -e $"Cargo.toml: ($msrv_spec)"
print -e $"rust-toolchain.toml: ($toolchain_spec)"
exit 1
}

View File

@ -12,10 +12,6 @@ env:
# If changing these settings also change toolkit.nu # If changing these settings also change toolkit.nu
CLIPPY_OPTIONS: "-D warnings -D clippy::unwrap_used" CLIPPY_OPTIONS: "-D warnings -D clippy::unwrap_used"
concurrency:
group: ${{ github.workflow }}-${{ github.head_ref && github.ref || github.run_id }}
cancel-in-progress: true
jobs: jobs:
fmt-clippy: fmt-clippy:
strategy: strategy:
@ -25,12 +21,14 @@ jobs:
# builds to link against a too-new-for-many-Linux-installs glibc version. Consider # builds to link against a too-new-for-many-Linux-installs glibc version. Consider
# revisiting this when 20.04 is closer to EOL (April 2025) # revisiting this when 20.04 is closer to EOL (April 2025)
platform: [windows-latest, macos-latest, ubuntu-20.04] platform: [windows-latest, macos-latest, ubuntu-20.04]
feature: [default, dataframe] feature: [default, dataframe, extra]
include: include:
- feature: default - feature: default
flags: "" flags: ""
- feature: dataframe - feature: dataframe
flags: "--features=dataframe" flags: "--features=dataframe"
- feature: extra
flags: "--features=extra"
exclude: exclude:
- platform: windows-latest - platform: windows-latest
feature: dataframe feature: dataframe
@ -40,10 +38,10 @@ jobs:
runs-on: ${{ matrix.platform }} runs-on: ${{ matrix.platform }}
steps: steps:
- uses: actions/checkout@v4.1.2 - uses: actions/checkout@v4
- name: Setup Rust toolchain and cache - name: Setup Rust toolchain and cache
uses: actions-rust-lang/setup-rust-toolchain@v1.8.0 uses: actions-rust-lang/setup-rust-toolchain@v1.5.0
with: with:
rustflags: "" rustflags: ""
@ -58,53 +56,41 @@ jobs:
- name: Clippy of tests - name: Clippy of tests
run: cargo clippy --tests --workspace ${{ matrix.flags }} --exclude nu_plugin_* -- -D warnings run: cargo clippy --tests --workspace ${{ matrix.flags }} --exclude nu_plugin_* -- -D warnings
- name: Clippy of benchmarks
run: cargo clippy --benches --workspace ${{ matrix.flags }} --exclude nu_plugin_* -- -D warnings
tests: tests:
strategy: strategy:
fail-fast: true fail-fast: true
matrix: matrix:
platform: [windows-latest, macos-latest, ubuntu-20.04] platform: [windows-latest, macos-latest, ubuntu-20.04]
feature: [default, dataframe] feature: [default, dataframe, extra]
include: include:
# linux CI cannot handle clipboard feature
- default-flags: ""
- platform: ubuntu-20.04
default-flags: "--no-default-features --features=default-no-clipboard"
- feature: default - feature: default
flags: "" flags: ""
- feature: dataframe - feature: dataframe
flags: "--features=dataframe" flags: "--features=dataframe"
- feature: extra
flags: "--features=extra"
exclude: exclude:
- platform: windows-latest - platform: windows-latest
feature: dataframe feature: dataframe
- platform: macos-latest - platform: macos-latest
feature: dataframe feature: dataframe
- platform: windows-latest
feature: extra
- platform: macos-latest
feature: extra
runs-on: ${{ matrix.platform }} runs-on: ${{ matrix.platform }}
steps: steps:
- uses: actions/checkout@v4.1.2 - uses: actions/checkout@v4
- name: Setup Rust toolchain and cache - name: Setup Rust toolchain and cache
uses: actions-rust-lang/setup-rust-toolchain@v1.8.0 uses: actions-rust-lang/setup-rust-toolchain@v1.5.0
with: with:
rustflags: "" rustflags: ""
- name: Tests - name: Tests
run: cargo test --workspace --profile ci --exclude nu_plugin_* ${{ matrix.default-flags }} ${{ matrix.flags }} run: cargo test --workspace --profile ci --exclude nu_plugin_* ${{ matrix.flags }}
- name: Check for clean repo
shell: bash
run: |
if [ -n "$(git status --porcelain)" ]; then
echo "there are changes";
git status --porcelain
exit 1
else
echo "no changes in working directory";
fi
std-lib-and-python-virtualenv: std-lib-and-python-virtualenv:
strategy: strategy:
@ -117,10 +103,10 @@ jobs:
runs-on: ${{ matrix.platform }} runs-on: ${{ matrix.platform }}
steps: steps:
- uses: actions/checkout@v4.1.2 - uses: actions/checkout@v4
- name: Setup Rust toolchain and cache - name: Setup Rust toolchain and cache
uses: actions-rust-lang/setup-rust-toolchain@v1.8.0 uses: actions-rust-lang/setup-rust-toolchain@v1.5.0
with: with:
rustflags: "" rustflags: ""
@ -128,13 +114,10 @@ jobs:
run: cargo install --path . --locked --no-default-features run: cargo install --path . --locked --no-default-features
- name: Standard library tests - name: Standard library tests
run: nu -c 'use crates/nu-std/testing.nu; testing run-tests --path crates/nu-std' run: nu -c 'use std testing; testing run-tests --path crates/nu-std'
- name: Ensure that Cargo.toml MSRV and rust-toolchain.toml use the same version
run: nu .github/workflows/check-msrv.nu
- name: Setup Python - name: Setup Python
uses: actions/setup-python@v5 uses: actions/setup-python@v4
with: with:
python-version: "3.10" python-version: "3.10"
@ -146,17 +129,6 @@ jobs:
run: nu scripts/test_virtualenv.nu run: nu scripts/test_virtualenv.nu
shell: bash shell: bash
- name: Check for clean repo
shell: bash
run: |
if [ -n "$(git status --porcelain)" ]; then
echo "there are changes";
git status --porcelain
exit 1
else
echo "no changes in working directory";
fi
plugins: plugins:
strategy: strategy:
fail-fast: true fail-fast: true
@ -166,26 +138,15 @@ jobs:
runs-on: ${{ matrix.platform }} runs-on: ${{ matrix.platform }}
steps: steps:
- uses: actions/checkout@v4.1.2 - uses: actions/checkout@v4
- name: Setup Rust toolchain and cache - name: Setup Rust toolchain and cache
uses: actions-rust-lang/setup-rust-toolchain@v1.8.0 uses: actions-rust-lang/setup-rust-toolchain@v1.5.0
with: with:
rustflags: "" rustflags: ""
- name: Clippy - name: Clippy
run: cargo clippy --package nu_plugin_* -- $CLIPPY_OPTIONS run: cargo clippy --package nu_plugin_* ${{ matrix.flags }} -- $CLIPPY_OPTIONS
- name: Tests - name: Tests
run: cargo test --profile ci --package nu_plugin_* run: cargo test --profile ci --package nu_plugin_*
- name: Check for clean repo
shell: bash
run: |
if [ -n "$(git status --porcelain)" ]; then
echo "there are changes";
git status --porcelain
exit 1
else
echo "no changes in working directory";
fi

View File

@ -27,7 +27,7 @@ jobs:
# if: github.repository == 'nushell/nightly' # if: github.repository == 'nushell/nightly'
steps: steps:
- name: Checkout - name: Checkout
uses: actions/checkout@v4.1.2 uses: actions/checkout@v4
if: github.repository == 'nushell/nightly' if: github.repository == 'nushell/nightly'
with: with:
ref: main ref: main
@ -36,10 +36,12 @@ jobs:
token: ${{ secrets.WORKFLOW_TOKEN }} token: ${{ secrets.WORKFLOW_TOKEN }}
- name: Setup Nushell - name: Setup Nushell
uses: hustcer/setup-nu@v3.9 uses: hustcer/setup-nu@v3.8
if: github.repository == 'nushell/nightly' if: github.repository == 'nushell/nightly'
with: with:
version: 0.91.0 version: 0.86.0
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
# Synchronize the main branch of nightly repo with the main branch of Nushell official repo # Synchronize the main branch of nightly repo with the main branch of Nushell official repo
- name: Prepare for Nightly Release - name: Prepare for Nightly Release
@ -117,13 +119,13 @@ jobs:
os: ubuntu-20.04 os: ubuntu-20.04
target_rustflags: '' target_rustflags: ''
- target: riscv64gc-unknown-linux-gnu - target: riscv64gc-unknown-linux-gnu
os: ubuntu-latest os: ubuntu-20.04
target_rustflags: '' target_rustflags: ''
runs-on: ${{matrix.os}} runs-on: ${{matrix.os}}
steps: steps:
- uses: actions/checkout@v4.1.2 - uses: actions/checkout@v4
with: with:
ref: main ref: main
fetch-depth: 0 fetch-depth: 0
@ -133,15 +135,17 @@ jobs:
echo "targets = ['${{matrix.target}}']" >> rust-toolchain.toml echo "targets = ['${{matrix.target}}']" >> rust-toolchain.toml
- name: Setup Rust toolchain and cache - name: Setup Rust toolchain and cache
uses: actions-rust-lang/setup-rust-toolchain@v1.8.0 uses: actions-rust-lang/setup-rust-toolchain@v1.5.0
# WARN: Keep the rustflags to prevent from the winget submission error: `CAQuietExec: Error 0xc0000135` # WARN: Keep the rustflags to prevent from the winget submission error: `CAQuietExec: Error 0xc0000135`
with: with:
rustflags: '' rustflags: ''
- name: Setup Nushell - name: Setup Nushell
uses: hustcer/setup-nu@v3.9 uses: hustcer/setup-nu@v3.8
with: with:
version: 0.91.0 version: 0.86.0
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Release Nu Binary - name: Release Nu Binary
id: nu id: nu
@ -156,7 +160,7 @@ jobs:
- name: Create an Issue for Release Failure - name: Create an Issue for Release Failure
if: ${{ failure() }} if: ${{ failure() }}
uses: JasonEtco/create-an-issue@v2.9.2 uses: JasonEtco/create-an-issue@v2.9.1
env: env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with: with:
@ -174,7 +178,7 @@ jobs:
# REF: https://github.com/marketplace/actions/gh-release # REF: https://github.com/marketplace/actions/gh-release
# Create a release only in nushell/nightly repo # Create a release only in nushell/nightly repo
- name: Publish Archive - name: Publish Archive
uses: softprops/action-gh-release@v2.0.4 uses: softprops/action-gh-release@v0.1.15
if: ${{ startsWith(github.repository, 'nushell/nightly') }} if: ${{ startsWith(github.repository, 'nushell/nightly') }}
with: with:
prerelease: true prerelease: true
@ -202,40 +206,40 @@ jobs:
include: include:
- target: aarch64-apple-darwin - target: aarch64-apple-darwin
os: macos-latest os: macos-latest
target_rustflags: '--features=dataframe' target_rustflags: '--features=dataframe,extra'
- target: x86_64-apple-darwin - target: x86_64-apple-darwin
os: macos-latest os: macos-latest
target_rustflags: '--features=dataframe' target_rustflags: '--features=dataframe,extra'
- target: x86_64-pc-windows-msvc - target: x86_64-pc-windows-msvc
extra: 'bin' extra: 'bin'
os: windows-latest os: windows-latest
target_rustflags: '--features=dataframe' target_rustflags: '--features=dataframe,extra'
- target: x86_64-pc-windows-msvc - target: x86_64-pc-windows-msvc
extra: msi extra: msi
os: windows-latest os: windows-latest
target_rustflags: '--features=dataframe' target_rustflags: '--features=dataframe,extra'
- target: aarch64-pc-windows-msvc - target: aarch64-pc-windows-msvc
extra: 'bin' extra: 'bin'
os: windows-latest os: windows-latest
target_rustflags: '--features=dataframe' target_rustflags: '--features=dataframe,extra'
- target: aarch64-pc-windows-msvc - target: aarch64-pc-windows-msvc
extra: msi extra: msi
os: windows-latest os: windows-latest
target_rustflags: '--features=dataframe' target_rustflags: '--features=dataframe,extra'
- target: x86_64-unknown-linux-gnu - target: x86_64-unknown-linux-gnu
os: ubuntu-20.04 os: ubuntu-20.04
target_rustflags: '--features=dataframe' target_rustflags: '--features=dataframe,extra'
- target: x86_64-unknown-linux-musl - target: x86_64-unknown-linux-musl
os: ubuntu-20.04 os: ubuntu-20.04
target_rustflags: '--features=dataframe' target_rustflags: '--features=dataframe,extra'
- target: aarch64-unknown-linux-gnu - target: aarch64-unknown-linux-gnu
os: ubuntu-20.04 os: ubuntu-20.04
target_rustflags: '--features=dataframe' target_rustflags: '--features=dataframe,extra'
runs-on: ${{matrix.os}} runs-on: ${{matrix.os}}
steps: steps:
- uses: actions/checkout@v4.1.2 - uses: actions/checkout@v4
with: with:
ref: main ref: main
fetch-depth: 0 fetch-depth: 0
@ -245,15 +249,17 @@ jobs:
echo "targets = ['${{matrix.target}}']" >> rust-toolchain.toml echo "targets = ['${{matrix.target}}']" >> rust-toolchain.toml
- name: Setup Rust toolchain and cache - name: Setup Rust toolchain and cache
uses: actions-rust-lang/setup-rust-toolchain@v1.8.0 uses: actions-rust-lang/setup-rust-toolchain@v1.5.0
# WARN: Keep the rustflags to prevent from the winget submission error: `CAQuietExec: Error 0xc0000135` # WARN: Keep the rustflags to prevent from the winget submission error: `CAQuietExec: Error 0xc0000135`
with: with:
rustflags: '' rustflags: ''
- name: Setup Nushell - name: Setup Nushell
uses: hustcer/setup-nu@v3.9 uses: hustcer/setup-nu@v3.8
with: with:
version: 0.91.0 version: 0.86.0
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Release Nu Binary - name: Release Nu Binary
id: nu id: nu
@ -268,7 +274,7 @@ jobs:
- name: Create an Issue for Release Failure - name: Create an Issue for Release Failure
if: ${{ failure() }} if: ${{ failure() }}
uses: JasonEtco/create-an-issue@v2.9.2 uses: JasonEtco/create-an-issue@v2.9.1
env: env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with: with:
@ -286,7 +292,7 @@ jobs:
# REF: https://github.com/marketplace/actions/gh-release # REF: https://github.com/marketplace/actions/gh-release
# Create a release only in nushell/nightly repo # Create a release only in nushell/nightly repo
- name: Publish Archive - name: Publish Archive
uses: softprops/action-gh-release@v2.0.4 uses: softprops/action-gh-release@v0.1.15
if: ${{ startsWith(github.repository, 'nushell/nightly') }} if: ${{ startsWith(github.repository, 'nushell/nightly') }}
with: with:
draft: false draft: false
@ -310,14 +316,16 @@ jobs:
- name: Waiting for Release - name: Waiting for Release
run: sleep 1800 run: sleep 1800
- uses: actions/checkout@v4.1.2 - uses: actions/checkout@v4
with: with:
ref: main ref: main
- name: Setup Nushell - name: Setup Nushell
uses: hustcer/setup-nu@v3.9 uses: hustcer/setup-nu@v3.8
with: with:
version: 0.91.0 version: 0.86.0
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
# Keep the last a few releases # Keep the last a few releases
- name: Delete Older Releases - name: Delete Older Releases

View File

@ -71,7 +71,7 @@ const FULL_RLS_NAMING = {
# $env # $env
let USE_UBUNTU = $os starts-with ubuntu let USE_UBUNTU = 'ubuntu-20.04'
let FULL_NAME = $FULL_RLS_NAMING | get -i $target | default 'unknown-target-full' let FULL_NAME = $FULL_RLS_NAMING | get -i $target | default 'unknown-target-full'
print $'(char nl)Packaging ($bin) v($version) for ($target) in ($src)...'; hr-line -b print $'(char nl)Packaging ($bin) v($version) for ($target) in ($src)...'; hr-line -b
@ -82,8 +82,8 @@ print $'Start building ($bin)...'; hr-line
# ---------------------------------------------------------------------------- # ----------------------------------------------------------------------------
# Build for Ubuntu and macOS # Build for Ubuntu and macOS
# ---------------------------------------------------------------------------- # ----------------------------------------------------------------------------
if $os in ['macos-latest'] or $USE_UBUNTU { if $os in [$USE_UBUNTU, 'macos-latest'] {
if $USE_UBUNTU { if $os == $USE_UBUNTU {
sudo apt update sudo apt update
sudo apt-get install libxcb-composite0-dev -y sudo apt-get install libxcb-composite0-dev -y
} }
@ -106,7 +106,7 @@ if $os in ['macos-latest'] or $USE_UBUNTU {
_ => { _ => {
# musl-tools to fix 'Failed to find tool. Is `musl-gcc` installed?' # musl-tools to fix 'Failed to find tool. Is `musl-gcc` installed?'
# Actually just for x86_64-unknown-linux-musl target # Actually just for x86_64-unknown-linux-musl target
if $USE_UBUNTU { sudo apt install musl-tools -y } if $os == $USE_UBUNTU { sudo apt install musl-tools -y }
cargo-build-nu $flags cargo-build-nu $flags
} }
} }
@ -128,16 +128,18 @@ let executable = $'target/($target)/release/($bin)*($suffix)'
print $'Current executable file: ($executable)' print $'Current executable file: ($executable)'
cd $src; mkdir $dist; cd $src; mkdir $dist;
rm -rf ...(glob $'target/($target)/release/*.d') ...(glob $'target/($target)/release/nu_pretty_hex*') rm -rf $'target/($target)/release/*.d' $'target/($target)/release/nu_pretty_hex*'
print $'(char nl)All executable files:'; hr-line print $'(char nl)All executable files:'; hr-line
# We have to use `print` here to make sure the command output is displayed # We have to use `print` here to make sure the command output is displayed
print (ls -f ($executable | into glob)); sleep 1sec print (ls -f $executable); sleep 1sec
print $'(char nl)Copying release files...'; hr-line print $'(char nl)Copying release files...'; hr-line
"To use Nu plugins, use the register command to tell Nu where to find the plugin. For example: "To use Nu plugins, use the register command to tell Nu where to find the plugin. For example:
> register ./nu_plugin_query" | save $'($dist)/README.txt' -f > register ./nu_plugin_query" | save $'($dist)/README.txt' -f
[LICENSE ...(glob $executable)] | each {|it| cp -rv $it $dist } | flatten [LICENSE $executable] | each {|it| cp -rv $it $dist } | flatten
# Sleep a few seconds to make sure the cp process finished successfully
sleep 3sec
print $'(char nl)Check binary release version detail:'; hr-line print $'(char nl)Check binary release version detail:'; hr-line
let ver = if $os == 'windows-latest' { let ver = if $os == 'windows-latest' {
@ -146,23 +148,23 @@ let ver = if $os == 'windows-latest' {
(do -i { ./output/nu -c 'version' }) | str join (do -i { ./output/nu -c 'version' }) | str join
} }
if ($ver | str trim | is-empty) { if ($ver | str trim | is-empty) {
print $'(ansi r)Incompatible Nu binary: The binary cross compiled is not runnable on current arch...(ansi reset)' print $'(ansi r)Incompatible nu binary...(ansi reset)'
} else { print $ver } } else { print $ver }
# ---------------------------------------------------------------------------- # ----------------------------------------------------------------------------
# Create a release archive and send it to output for the following steps # Create a release archive and send it to output for the following steps
# ---------------------------------------------------------------------------- # ----------------------------------------------------------------------------
cd $dist; print $'(char nl)Creating release archive...'; hr-line cd $dist; print $'(char nl)Creating release archive...'; hr-line
if $os in ['macos-latest'] or $USE_UBUNTU { if $os in [$USE_UBUNTU, 'macos-latest'] {
let files = (ls | get name) let files = (ls | get name)
let dest = if $env.RELEASE_TYPE == 'full' { $'($bin)-($version)-($FULL_NAME)' } else { $'($bin)-($version)-($target)' } let dest = if $env.RELEASE_TYPE == 'full' { $'($bin)-($version)-($FULL_NAME)' } else { $'($bin)-($version)-($target)' }
let archive = $'($dist)/($dest).tar.gz' let archive = $'($dist)/($dest).tar.gz'
mkdir $dest mkdir $dest
$files | each {|it| cp -v $it $dest } $files | each {|it| mv $it $dest } | ignore
print $'(char nl)(ansi g)Archive contents:(ansi reset)'; hr-line; ls $dest | print print $'(char nl)(ansi g)Archive contents:(ansi reset)'; hr-line; ls $dest
tar -czf $archive $dest tar -czf $archive $dest
print $'archive: ---> ($archive)'; ls $archive print $'archive: ---> ($archive)'; ls $archive
@ -181,11 +183,10 @@ if $os in ['macos-latest'] or $USE_UBUNTU {
if (get-env _EXTRA_) == 'msi' { if (get-env _EXTRA_) == 'msi' {
let wixRelease = $'($src)/target/wix/($releaseStem).msi' let wixRelease = $'($src)/target/wix/($releaseStem).msi'
print $'(char nl)Start creating Windows msi package with the following contents...' print $'(char nl)Start creating Windows msi package...'
cd $src; hr-line cd $src; hr-line
# Wix need the binaries be stored in target/release/ # Wix need the binaries be stored in target/release/
cp -r ($'($dist)/*' | into glob) target/release/ cp -r $'($dist)/*' target/release/
ls target/release/* | print
cargo install cargo-wix --version 0.3.4 cargo install cargo-wix --version 0.3.4
cargo wix --no-build --nocapture --package nu --output $wixRelease cargo wix --no-build --nocapture --package nu --output $wixRelease
# Workaround for https://github.com/softprops/action-gh-release/issues/280 # Workaround for https://github.com/softprops/action-gh-release/issues/280
@ -195,9 +196,9 @@ if $os in ['macos-latest'] or $USE_UBUNTU {
} else { } else {
print $'(char nl)(ansi g)Archive contents:(ansi reset)'; hr-line; ls | print print $'(char nl)(ansi g)Archive contents:(ansi reset)'; hr-line; ls
let archive = $'($dist)/($releaseStem).zip' let archive = $'($dist)/($releaseStem).zip'
7z a $archive ...(glob *) 7z a $archive *
let pkg = (ls -f $archive | get name) let pkg = (ls -f $archive | get name)
if not ($pkg | is-empty) { if not ($pkg | is-empty) {
# Workaround for https://github.com/softprops/action-gh-release/issues/280 # Workaround for https://github.com/softprops/action-gh-release/issues/280

View File

@ -18,7 +18,6 @@ jobs:
name: Std name: Std
strategy: strategy:
fail-fast: false
matrix: matrix:
target: target:
- aarch64-apple-darwin - aarch64-apple-darwin
@ -67,29 +66,30 @@ jobs:
os: ubuntu-20.04 os: ubuntu-20.04
target_rustflags: '' target_rustflags: ''
- target: riscv64gc-unknown-linux-gnu - target: riscv64gc-unknown-linux-gnu
os: ubuntu-latest os: ubuntu-20.04
target_rustflags: '' target_rustflags: ''
runs-on: ${{matrix.os}} runs-on: ${{matrix.os}}
steps: steps:
- uses: actions/checkout@v4.1.2 - uses: actions/checkout@v4
- name: Update Rust Toolchain Target - name: Update Rust Toolchain Target
run: | run: |
echo "targets = ['${{matrix.target}}']" >> rust-toolchain.toml echo "targets = ['${{matrix.target}}']" >> rust-toolchain.toml
- name: Setup Rust toolchain - name: Setup Rust toolchain and cache
uses: actions-rust-lang/setup-rust-toolchain@v1.8.0 uses: actions-rust-lang/setup-rust-toolchain@v1.5.0
# WARN: Keep the rustflags to prevent from the winget submission error: `CAQuietExec: Error 0xc0000135` # WARN: Keep the rustflags to prevent from the winget submission error: `CAQuietExec: Error 0xc0000135`
with: with:
cache: false
rustflags: '' rustflags: ''
- name: Setup Nushell - name: Setup Nushell
uses: hustcer/setup-nu@v3.9 uses: hustcer/setup-nu@v3.8
with: with:
version: 0.91.0 version: 0.86.0
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Release Nu Binary - name: Release Nu Binary
id: nu id: nu
@ -104,7 +104,7 @@ jobs:
# REF: https://github.com/marketplace/actions/gh-release # REF: https://github.com/marketplace/actions/gh-release
- name: Publish Archive - name: Publish Archive
uses: softprops/action-gh-release@v2.0.4 uses: softprops/action-gh-release@v0.1.15
if: ${{ startsWith(github.ref, 'refs/tags/') }} if: ${{ startsWith(github.ref, 'refs/tags/') }}
with: with:
draft: true draft: true
@ -130,56 +130,57 @@ jobs:
include: include:
- target: aarch64-apple-darwin - target: aarch64-apple-darwin
os: macos-latest os: macos-latest
target_rustflags: '--features=dataframe' target_rustflags: '--features=dataframe,extra'
- target: x86_64-apple-darwin - target: x86_64-apple-darwin
os: macos-latest os: macos-latest
target_rustflags: '--features=dataframe' target_rustflags: '--features=dataframe,extra'
- target: x86_64-pc-windows-msvc - target: x86_64-pc-windows-msvc
extra: 'bin' extra: 'bin'
os: windows-latest os: windows-latest
target_rustflags: '--features=dataframe' target_rustflags: '--features=dataframe,extra'
- target: x86_64-pc-windows-msvc - target: x86_64-pc-windows-msvc
extra: msi extra: msi
os: windows-latest os: windows-latest
target_rustflags: '--features=dataframe' target_rustflags: '--features=dataframe,extra'
- target: aarch64-pc-windows-msvc - target: aarch64-pc-windows-msvc
extra: 'bin' extra: 'bin'
os: windows-latest os: windows-latest
target_rustflags: '--features=dataframe' target_rustflags: '--features=dataframe,extra'
- target: aarch64-pc-windows-msvc - target: aarch64-pc-windows-msvc
extra: msi extra: msi
os: windows-latest os: windows-latest
target_rustflags: '--features=dataframe' target_rustflags: '--features=dataframe,extra'
- target: x86_64-unknown-linux-gnu - target: x86_64-unknown-linux-gnu
os: ubuntu-20.04 os: ubuntu-20.04
target_rustflags: '--features=dataframe' target_rustflags: '--features=dataframe,extra'
- target: x86_64-unknown-linux-musl - target: x86_64-unknown-linux-musl
os: ubuntu-20.04 os: ubuntu-20.04
target_rustflags: '--features=dataframe' target_rustflags: '--features=dataframe,extra'
- target: aarch64-unknown-linux-gnu - target: aarch64-unknown-linux-gnu
os: ubuntu-20.04 os: ubuntu-20.04
target_rustflags: '--features=dataframe' target_rustflags: '--features=dataframe,extra'
runs-on: ${{matrix.os}} runs-on: ${{matrix.os}}
steps: steps:
- uses: actions/checkout@v4.1.2 - uses: actions/checkout@v4
- name: Update Rust Toolchain Target - name: Update Rust Toolchain Target
run: | run: |
echo "targets = ['${{matrix.target}}']" >> rust-toolchain.toml echo "targets = ['${{matrix.target}}']" >> rust-toolchain.toml
- name: Setup Rust toolchain - name: Setup Rust toolchain and cache
uses: actions-rust-lang/setup-rust-toolchain@v1.8.0 uses: actions-rust-lang/setup-rust-toolchain@v1.5.0
# WARN: Keep the rustflags to prevent from the winget submission error: `CAQuietExec: Error 0xc0000135` # WARN: Keep the rustflags to prevent from the winget submission error: `CAQuietExec: Error 0xc0000135`
with: with:
cache: false
rustflags: '' rustflags: ''
- name: Setup Nushell - name: Setup Nushell
uses: hustcer/setup-nu@v3.9 uses: hustcer/setup-nu@v3.8
with: with:
version: 0.91.0 version: 0.86.0
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Release Nu Binary - name: Release Nu Binary
id: nu id: nu
@ -194,7 +195,7 @@ jobs:
# REF: https://github.com/marketplace/actions/gh-release # REF: https://github.com/marketplace/actions/gh-release
- name: Publish Archive - name: Publish Archive
uses: softprops/action-gh-release@v2.0.4 uses: softprops/action-gh-release@v0.1.15
if: ${{ startsWith(github.ref, 'refs/tags/') }} if: ${{ startsWith(github.ref, 'refs/tags/') }}
with: with:
draft: true draft: true

View File

@ -7,7 +7,9 @@ jobs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- name: Checkout Actions Repository - name: Checkout Actions Repository
uses: actions/checkout@v4.1.2 uses: actions/checkout@v4
- name: Check spelling - name: Check spelling
uses: crate-ci/typos@v1.20.3 uses: crate-ci/typos@v1.16.23
with:
config: ./.github/.typos.toml

View File

@ -10,16 +10,11 @@ Welcome to Nushell and thank you for considering contributing!
- [Useful commands](#useful-commands) - [Useful commands](#useful-commands)
- [Debugging tips](#debugging-tips) - [Debugging tips](#debugging-tips)
- [Git etiquette](#git-etiquette) - [Git etiquette](#git-etiquette)
- [Our Rust style](#our-rust-style)
- [Generally discouraged](#generally-discouraged)
- [Things we want to get better at](#things-we-want-to-get-better-at)
- [License](#license) - [License](#license)
## Other helpful resources
More resources can be found in the nascent [developer documentation](devdocs/README.md) in this repo.
- [Developer FAQ](devdocs/FAQ.md)
- [Platform support policy](devdocs/PLATFORM_SUPPORT.md)
- [Our Rust style](devdocs/rust_style.md)
## Proposing design changes ## Proposing design changes
First of all, before diving into the code, if you want to create a new feature, change something significantly, and especially if the change is user-facing, it is a good practice to first get an approval from the core team before starting to work on it. First of all, before diving into the code, if you want to create a new feature, change something significantly, and especially if the change is user-facing, it is a good practice to first get an approval from the core team before starting to work on it.
@ -68,74 +63,74 @@ Read cargo's documentation for more details: https://doc.rust-lang.org/cargo/ref
- Build and run Nushell: - Build and run Nushell:
```nushell ```shell
cargo run cargo run
``` ```
- Build and run with dataframe support. - Build and run with dataframe support.
```nushell ```shell
cargo run --features=dataframe cargo run --features=dataframe
``` ```
- Run Clippy on Nushell: - Run Clippy on Nushell:
```nushell ```shell
cargo clippy --workspace -- -D warnings -D clippy::unwrap_used cargo clippy --workspace -- -D warnings -D clippy::unwrap_used
``` ```
or via the `toolkit.nu` command: or via the `toolkit.nu` command:
```nushell ```shell
use toolkit.nu clippy use toolkit.nu clippy
clippy clippy
``` ```
- Run all tests: - Run all tests:
```nushell ```shell
cargo test --workspace cargo test --workspace
``` ```
along with dataframe tests along with dataframe tests
```nushell ```shell
cargo test --workspace --features=dataframe cargo test --workspace --features=dataframe
``` ```
or via the `toolkit.nu` command: or via the `toolkit.nu` command:
```nushell ```shell
use toolkit.nu test use toolkit.nu test
test test
``` ```
- Run all tests for a specific command - Run all tests for a specific command
```nushell ```shell
cargo test --package nu-cli --test main -- commands::<command_name_here> cargo test --package nu-cli --test main -- commands::<command_name_here>
``` ```
- Check to see if there are code formatting issues - Check to see if there are code formatting issues
```nushell ```shell
cargo fmt --all -- --check cargo fmt --all -- --check
``` ```
or via the `toolkit.nu` command: or via the `toolkit.nu` command:
```nushell ```shell
use toolkit.nu fmt use toolkit.nu fmt
fmt --check fmt --check
``` ```
- Format the code in the project - Format the code in the project
```nushell ```shell
cargo fmt --all cargo fmt --all
``` ```
or via the `toolkit.nu` command: or via the `toolkit.nu` command:
```nushell ```shell
use toolkit.nu fmt use toolkit.nu fmt
fmt fmt
``` ```
- Set up `git` hooks to check formatting and run `clippy` before committing and pushing: - Set up `git` hooks to check formatting and run `clippy` before committing and pushing:
```nushell ```shell
use toolkit.nu setup-git-hooks use toolkit.nu setup-git-hooks
setup-git-hooks setup-git-hooks
``` ```
@ -145,12 +140,12 @@ Read cargo's documentation for more details: https://doc.rust-lang.org/cargo/ref
- To view verbose logs when developing, enable the `trace` log level. - To view verbose logs when developing, enable the `trace` log level.
```nushell ```shell
cargo run --release -- --log-level trace cargo run --release -- --log-level trace
``` ```
- To redirect trace logs to a file, enable the `--log-target file` switch. - To redirect trace logs to a file, enable the `--log-target file` switch.
```nushell ```shell
cargo run --release -- --log-level trace --log-target file cargo run --release -- --log-level trace --log-target file
open $"($nu.temp-path)/nu-($nu.pid).log" open $"($nu.temp-path)/nu-($nu.pid).log"
``` ```
@ -242,6 +237,51 @@ You can help us to make the review process a smooth experience:
- Feel free to notify your reviewers or affected PR authors if your change might cause larger conflicts with another change. - Feel free to notify your reviewers or affected PR authors if your change might cause larger conflicts with another change.
- During the rollup of multiple PRs, we may choose to resolve merge conflicts and CI failures ourselves. (Allow maintainers to push to your branch to enable us to do this quickly.) - During the rollup of multiple PRs, we may choose to resolve merge conflicts and CI failures ourselves. (Allow maintainers to push to your branch to enable us to do this quickly.)
## Our Rust style
To make the collaboration on a project the scale of Nushell easy, we want to work towards a style of Rust code that can easily be understood by all of our contributors. We conservatively rely on most of [`clippy`s suggestions](https://github.com/rust-lang/rust-clippy) to get to the holy grail of "idiomatic" code. Good code in our eyes is not the most clever use of all available language features or with the most unique personal touch but readable and strikes a balance between being concise, and also unsurprising and explicit in the places where it matters.
One example of this philosophy is that we generally avoid to fight the borrow-checker in our data model but rather try to get to a correct and simple solution first and then figure out where we should reuse data to achieve the necessary performance. As we are still pre-1.0 this served us well to be able to quickly refactor or change larger parts of the code base.
### Generally discouraged
#### `+nightly` language features or things only available in the most recent `+stable`
To make life for the people easier that maintain the Nushell packages in various distributions with their own release cycle of `rustc` we typically rely on slightly older Rust versions. We do not make explicit guarantees how far back in the past we live but you can find out in our [`rust-toolchain.toml`](https://github.com/nushell/nushell/blob/main/rust-toolchain.toml)
(As a rule of thumb this has been typically been approximately 2 releases behind the newest stable compiler.)
The use of nightly features is prohibited.
#### Panicking
As Nushell aims to provide a reliable foundational way for folks to interact with their computer, we cannot carelessly crash the execution of their work by panicking Nushell.
Thus panicking is not an allowed error handling strategy for anything that could be triggered by user input OR behavior of the outside system. If Nushell panics this is a bug or we are against all odds already in an unrecoverable state (The system stopped cooperating, we went out of memory). The use of `.unwrap()` is thus outright banned and any uses of `.expect()` or related panicking macros like `unreachable!` should include a helpful description which assumptions have been violated.
#### `unsafe` code
For any use of `unsafe` code we need to require even higher standards and additional review. If you add or alter `unsafe` blocks you have to be familiar with the promises you need to uphold as found in the [Rustonomicon](https://doc.rust-lang.org/nomicon/intro.html). All `unsafe` uses should include `// SAFETY:` comments explaining how the invariants are upheld and thus alerting you what to watch out for when making a change.
##### FFI with system calls and the outside world
As a shell Nushell needs to interact with system APIs in several places, for which FFI code with unsafe blocks may be necessary. In some cases this can be handled by safe API wrapper crates but in some cases we may choose to directly do those calls.
If you do so you need to document the system behavior on top of the Rust memory model guarantees that you uphold. This means documenting whether using a particular system call is safe to use in a particular context and all failure cases are properly recovered.
##### Implementing self-contained data structures
Another motivation for reaching to `unsafe` code might be to try to implement a particular data structure that is not expressible on safe `std` library APIs. Doing so in the Nushell code base would have to clear a high bar for need based on profiling results. Also you should first do a survey of the [crate ecosystem](https://crates.io) that there doesn't exist a usable well vetted crate that already provides safe APIs to the desired datastructure.
##### Make things go faster by removing checks
This is probably a bad idea if you feel tempted to do so. Don't
#### Macros
Another advanced feature people feel tempted to use to work around perceived limitations of Rusts syntax and we are not particularly fans of are custom macros.
They have clear downsides not only in terms of readability if they locally introduce a different syntax. Most tooling apart from the compiler will struggle more with them. This limits for example consistent automatic formatting or automated refactors with `rust-analyzer`.
That you can fluently read `macro_rules!` is less likely than regular code. This can lead people to introduce funky behavior when using a macro. Be it because a macro is not following proper hygiene rules or because it introduces excessive work at compile time.
So we generally discourage the addition of macros. In a lot of cases your macro may start do something that can be expressed with functions or generics in a much more reusable fashion.
The only exceptions we may allow need to demonstrate that the macro can fix something that is otherwise extremely unreadable, error-prone, or consistently worse at compile time.
### Things we want to get better at
These are things we did pretty liberally to get Nushell off the ground, that make things harder for a high quality stable product. You may run across them but shouldn't take them as an endorsed example.
#### Liberal use of third-party dependencies
The amazing variety of crates on [crates.io](https://crates.io) allowed us to quickly get Nushell into a feature rich state but it left us with a bunch of baggage to clean up.
Each dependency introduces a compile time cost and duplicated code can add to the overall binary size. Also vetting more for correct and secure implementations takes unreasonably more time as this is also a continuous process of reacting to updates or potential vulnerabilities.
Thus we only want to accept dependencies that are essential and well tested implementations of a particular requirement of Nushells codebase.
Also as a project for the move to 1.0 we will try to unify among a set of dependencies if they possibly implement similar things in an area. We don't need three different crates with potentially perfect fit for three problems but rather one reliable crate with a maximized overlap between what it provides and what we need.
We will favor crates that are well tested and used and promise to be more stable and still frequently maintained.
#### Deeply nested code
As Nushell uses a lot of enums in its internal data representation there are a lot of `match` expressions. Combined with the need to handle a lot of edge cases and be defensive about any errors this has led to some absolutely hard to read deeply nested code (e.g. in the parser but also in the implementation of several commands).
This can be observed both as a "rightward drift" where the main part of the code is found after many levels of indentations or by long function bodies with several layers of branching with seemingly repeated branching inside the higher branch level.
This can also be exacerbated by "quick" bugfixes/enhancements that may just try to add a special case to catch a previously unexpected condition. The likelihood of introducing a bug in a sea of code duplication is high.
To combat this, consider using the early-`return` pattern to reject invalid data early in one place instead of building a tree through Rust's expression constructs with a lot of duplicated paths. Unpacking data into a type that expresses that the necessary things already have been checked and using functions to properly deal with separate and common behavior can also help.
## License ## License
We use the [MIT License](https://github.com/nushell/nushell/blob/main/LICENSE) in all of our Nushell projects. If you are including or referencing a crate that uses the [GPL License](https://www.gnu.org/licenses/gpl-3.0.en.html#license-text) unfortunately we will not be able to accept your PR. We use the [MIT License](https://github.com/nushell/nushell/blob/main/LICENSE) in all of our Nushell projects. If you are including or referencing a crate that uses the [GPL License](https://www.gnu.org/licenses/gpl-3.0.en.html#license-text) unfortunately we will not be able to accept your PR.

2897
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -10,8 +10,8 @@ homepage = "https://www.nushell.sh"
license = "MIT" license = "MIT"
name = "nu" name = "nu"
repository = "https://github.com/nushell/nushell" repository = "https://github.com/nushell/nushell"
rust-version = "1.77.2" rust-version = "1.60"
version = "0.92.2" version = "0.87.1"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
@ -33,14 +33,9 @@ members = [
"crates/nu-cmd-lang", "crates/nu-cmd-lang",
"crates/nu-cmd-dataframe", "crates/nu-cmd-dataframe",
"crates/nu-command", "crates/nu-command",
"crates/nu-color-config",
"crates/nu-explore",
"crates/nu-json",
"crates/nu-lsp", "crates/nu-lsp",
"crates/nu-pretty-hex",
"crates/nu-protocol", "crates/nu-protocol",
"crates/nu-plugin", "crates/nu-plugin",
"crates/nu-plugin-test-support",
"crates/nu_plugin_inc", "crates/nu_plugin_inc",
"crates/nu_plugin_gstat", "crates/nu_plugin_gstat",
"crates/nu_plugin_example", "crates/nu_plugin_example",
@ -48,159 +43,53 @@ members = [
"crates/nu_plugin_custom_values", "crates/nu_plugin_custom_values",
"crates/nu_plugin_formats", "crates/nu_plugin_formats",
"crates/nu-std", "crates/nu-std",
"crates/nu-table",
"crates/nu-term-grid",
"crates/nu-test-support",
"crates/nu-utils", "crates/nu-utils",
] ]
[workspace.dependencies]
alphanumeric-sort = "1.5"
ansi-str = "0.8"
base64 = "0.22"
bracoxide = "0.1.2"
byteorder = "1.5"
bytesize = "1.3"
calamine = "0.24.0"
chardetng = "0.1.17"
chrono = { default-features = false, version = "0.4" }
chrono-humanize = "0.2.3"
chrono-tz = "0.8"
crossbeam-channel = "0.5.8"
crossterm = "0.27"
csv = "1.3"
ctrlc = "3.4"
dialoguer = { default-features = false, version = "0.11" }
digest = { default-features = false, version = "0.10" }
dirs-next = "2.0"
dtparse = "2.0"
encoding_rs = "0.8"
fancy-regex = "0.13"
filesize = "0.2"
filetime = "0.2"
fs_extra = "1.3"
fuzzy-matcher = "0.3"
hamcrest2 = "0.3"
heck = "0.5.0"
human-date-parser = "0.1.1"
indexmap = "2.2"
indicatif = "0.17"
is_executable = "1.0"
itertools = "0.12"
libc = "0.2"
libproc = "0.14"
log = "0.4"
lru = "0.12"
lscolors = { version = "0.17", default-features = false }
lsp-server = "0.7.5"
lsp-types = "0.95.0"
mach2 = "0.4"
md5 = { version = "0.10", package = "md-5"}
miette = "7.2"
mime = "0.3"
mime_guess = "2.0"
mockito = { version = "1.4", default-features = false }
native-tls = "0.2"
nix = { version = "0.28", default-features = false }
notify-debouncer-full = { version = "0.3", default-features = false }
nu-ansi-term = "0.50.0"
num-format = "0.4"
num-traits = "0.2"
omnipath = "0.1"
once_cell = "1.18"
open = "5.1"
os_pipe = "1.1"
pathdiff = "0.2"
percent-encoding = "2"
print-positions = "0.6"
procfs = "0.16.0"
pwd = "1.3"
quick-xml = "0.31.0"
quickcheck = "1.0"
quickcheck_macros = "1.0"
rand = "0.8"
ratatui = "0.26"
rayon = "1.10"
reedline = "0.31.0"
regex = "1.9.5"
ropey = "1.6.1"
roxmltree = "0.19"
rstest = { version = "0.18", default-features = false }
rusqlite = "0.31"
rust-embed = "8.2.0"
same-file = "1.0"
serde = { version = "1.0", default-features = false }
serde_json = "1.0"
serde_urlencoded = "0.7.1"
serde_yaml = "0.9"
sha2 = "0.10"
strip-ansi-escapes = "0.2.0"
sysinfo = "0.30"
tabled = { version = "0.14.0", default-features = false }
tempfile = "3.10"
terminal_size = "0.3"
titlecase = "2.0"
toml = "0.8"
trash = "3.3"
umask = "2.1"
unicode-segmentation = "1.11"
unicode-width = "0.1"
ureq = { version = "2.9", default-features = false }
url = "2.2"
uu_cp = "0.0.25"
uu_mkdir = "0.0.25"
uu_mktemp = "0.0.25"
uu_mv = "0.0.25"
uu_whoami = "0.0.25"
uu_uname = "0.0.25"
uucore = "0.0.25"
uuid = "1.8.0"
v_htmlescape = "0.15.0"
wax = "0.6"
which = "6.0.0"
windows = "0.54"
winreg = "0.52"
[dependencies] [dependencies]
nu-cli = { path = "./crates/nu-cli", version = "0.92.2" } nu-cli = { path = "./crates/nu-cli", version = "0.87.1" }
nu-cmd-base = { path = "./crates/nu-cmd-base", version = "0.92.2" } nu-color-config = { path = "./crates/nu-color-config", version = "0.87.1" }
nu-cmd-lang = { path = "./crates/nu-cmd-lang", version = "0.92.2" } nu-cmd-base = { path = "./crates/nu-cmd-base", version = "0.87.1" }
nu-cmd-dataframe = { path = "./crates/nu-cmd-dataframe", version = "0.92.2", features = [ nu-cmd-lang = { path = "./crates/nu-cmd-lang", version = "0.87.1" }
"dataframe", nu-cmd-dataframe = { path = "./crates/nu-cmd-dataframe", version = "0.87.1", features = ["dataframe"], optional = true }
], optional = true } nu-cmd-extra = { path = "./crates/nu-cmd-extra", version = "0.87.1", optional = true }
nu-cmd-extra = { path = "./crates/nu-cmd-extra", version = "0.92.2" } nu-command = { path = "./crates/nu-command", version = "0.87.1" }
nu-command = { path = "./crates/nu-command", version = "0.92.2" } nu-engine = { path = "./crates/nu-engine", version = "0.87.1" }
nu-engine = { path = "./crates/nu-engine", version = "0.92.2" } nu-explore = { path = "./crates/nu-explore", version = "0.87.1" }
nu-explore = { path = "./crates/nu-explore", version = "0.92.2" } nu-json = { path = "./crates/nu-json", version = "0.87.1" }
nu-lsp = { path = "./crates/nu-lsp/", version = "0.92.2" } nu-lsp = { path = "./crates/nu-lsp/", version = "0.87.1" }
nu-parser = { path = "./crates/nu-parser", version = "0.92.2" } nu-parser = { path = "./crates/nu-parser", version = "0.87.1" }
nu-path = { path = "./crates/nu-path", version = "0.92.2" } nu-path = { path = "./crates/nu-path", version = "0.87.1" }
nu-plugin = { path = "./crates/nu-plugin", optional = true, version = "0.92.2" } nu-plugin = { path = "./crates/nu-plugin", optional = true, version = "0.87.1" }
nu-protocol = { path = "./crates/nu-protocol", version = "0.92.2" } nu-pretty-hex = { path = "./crates/nu-pretty-hex", version = "0.87.1" }
nu-std = { path = "./crates/nu-std", version = "0.92.2" } nu-protocol = { path = "./crates/nu-protocol", version = "0.87.1" }
nu-system = { path = "./crates/nu-system", version = "0.92.2" } nu-system = { path = "./crates/nu-system", version = "0.87.1" }
nu-utils = { path = "./crates/nu-utils", version = "0.92.2" } nu-table = { path = "./crates/nu-table", version = "0.87.1" }
nu-term-grid = { path = "./crates/nu-term-grid", version = "0.87.1" }
nu-std = { path = "./crates/nu-std", version = "0.87.1" }
nu-utils = { path = "./crates/nu-utils", version = "0.87.1" }
nu-ansi-term = "0.49.0"
reedline = { version = "0.26.0", features = ["bashisms", "sqlite"] }
reedline = { workspace = true, features = ["bashisms", "sqlite"] } crossterm = "0.27"
ctrlc = "3.4"
crossterm = { workspace = true } log = "0.4"
ctrlc = { workspace = true } miette = { version = "5.10", features = ["fancy-no-backtrace"] }
log = { workspace = true }
miette = { workspace = true, features = ["fancy-no-backtrace", "fancy"] }
mimalloc = { version = "0.1.37", default-features = false, optional = true } mimalloc = { version = "0.1.37", default-features = false, optional = true }
serde_json = { workspace = true } serde_json = "1.0"
simplelog = "0.12" simplelog = "0.12"
time = "0.3" time = "0.3"
[target.'cfg(not(target_os = "windows"))'.dependencies] [target.'cfg(not(target_os = "windows"))'.dependencies]
# Our dependencies don't use OpenSSL on Windows # Our dependencies don't use OpenSSL on Windows
openssl = { version = "0.10", features = ["vendored"], optional = true } openssl = { version = "0.10", features = ["vendored"], optional = true }
signal-hook = { version = "0.3", default-features = false }
[target.'cfg(windows)'.build-dependencies] [target.'cfg(windows)'.build-dependencies]
winresource = "0.1" winresource = "0.1"
[target.'cfg(target_family = "unix")'.dependencies] [target.'cfg(target_family = "unix")'.dependencies]
nix = { workspace = true, default-features = false, features = [ nix = { version = "0.27", default-features = false, features = [
"signal", "signal",
"process", "process",
"fs", "fs",
@ -208,14 +97,13 @@ nix = { workspace = true, default-features = false, features = [
] } ] }
[dev-dependencies] [dev-dependencies]
nu-test-support = { path = "./crates/nu-test-support", version = "0.92.2" } nu-test-support = { path = "./crates/nu-test-support", version = "0.87.1" }
assert_cmd = "2.0" assert_cmd = "2.0"
dirs-next = { workspace = true } criterion = "0.5"
divan = "0.1.14"
pretty_assertions = "1.4" pretty_assertions = "1.4"
rstest = { workspace = true, default-features = false } rstest = { version = "0.18", default-features = false }
serial_test = "3.0" serial_test = "2.0"
tempfile = { workspace = true } tempfile = "3.8"
[features] [features]
plugin = [ plugin = [
@ -226,16 +114,7 @@ plugin = [
"nu-protocol/plugin", "nu-protocol/plugin",
"nu-engine/plugin", "nu-engine/plugin",
] ]
default = ["default-no-clipboard", "system-clipboard"] default = ["plugin", "which-support", "trash-support", "sqlite", "mimalloc"]
# Enables convenient omitting of the system-clipboard feature, as it leads to problems in ci on linux
# See https://github.com/nushell/nushell/pull/11535
default-no-clipboard = [
"plugin",
"which-support",
"trash-support",
"sqlite",
"mimalloc",
]
stable = ["default"] stable = ["default"]
wasi = ["nu-cmd-lang/wasi"] wasi = ["nu-cmd-lang/wasi"]
# NOTE: individual features are also passed to `nu-cmd-lang` that uses them to generate the feature matrix in the `version` command # NOTE: individual features are also passed to `nu-cmd-lang` that uses them to generate the feature matrix in the `version` command
@ -245,12 +124,14 @@ wasi = ["nu-cmd-lang/wasi"]
static-link-openssl = ["dep:openssl", "nu-cmd-lang/static-link-openssl"] static-link-openssl = ["dep:openssl", "nu-cmd-lang/static-link-openssl"]
mimalloc = ["nu-cmd-lang/mimalloc", "dep:mimalloc"] mimalloc = ["nu-cmd-lang/mimalloc", "dep:mimalloc"]
system-clipboard = ["reedline/system_clipboard", "nu-cli/system-clipboard"]
# Stable (Default) # Stable (Default)
which-support = ["nu-command/which-support", "nu-cmd-lang/which-support"] which-support = ["nu-command/which-support", "nu-cmd-lang/which-support"]
trash-support = ["nu-command/trash-support", "nu-cmd-lang/trash-support"] trash-support = ["nu-command/trash-support", "nu-cmd-lang/trash-support"]
# Extra feature for nushell
extra = ["dep:nu-cmd-extra", "nu-cmd-lang/extra"]
# Dataframe feature for nushell # Dataframe feature for nushell
dataframe = ["dep:nu-cmd-dataframe", "nu-cmd-lang/dataframe"] dataframe = ["dep:nu-cmd-dataframe", "nu-cmd-lang/dataframe"]
@ -285,9 +166,11 @@ bench = false
# To use a development version of a dependency please use a global override here # To use a development version of a dependency please use a global override here
# changing versions in each sub-crate of the workspace is tedious # changing versions in each sub-crate of the workspace is tedious
[patch.crates-io] [patch.crates-io]
# reedline = { git = "https://github.com/nushell/reedline", branch = "main" } # reedline = { git = "https://github.com/nushell/reedline.git", branch = "main" }
# nu-ansi-term = {git = "https://github.com/nushell/nu-ansi-term.git", branch = "main"} # nu-ansi-term = {git = "https://github.com/nushell/nu-ansi-term.git", branch = "main"}
# uu_cp = { git = "https://github.com/uutils/coreutils.git", branch = "main" }
# Criterion benchmarking setup
# Run all benchmarks with `cargo bench` # Run all benchmarks with `cargo bench`
# Run individual benchmarks like `cargo bench -- <regex>` e.g. `cargo bench -- parse` # Run individual benchmarks like `cargo bench -- <regex>` e.g. `cargo bench -- parse`
[[bench]] [[bench]]

View File

@ -54,7 +54,7 @@ Detailed installation instructions can be found in the [installation chapter of
[![Packaging status](https://repology.org/badge/vertical-allrepos/nushell.svg)](https://repology.org/project/nushell/versions) [![Packaging status](https://repology.org/badge/vertical-allrepos/nushell.svg)](https://repology.org/project/nushell/versions)
For details about which platforms the Nushell team actively supports, see [our platform support policy](devdocs/PLATFORM_SUPPORT.md). For details about which platforms the Nushell team actively supports, see [our platform support policy](PLATFORM_SUPPORT.md).
## Configuration ## Configuration
@ -199,7 +199,7 @@ topics that have been presented.
Nu adheres closely to a set of goals that make up its design philosophy. As features are added, they are checked against these goals. Nu adheres closely to a set of goals that make up its design philosophy. As features are added, they are checked against these goals.
- First and foremost, Nu is cross-platform. Commands and techniques should work across platforms and Nu has [first-class support for Windows, macOS, and Linux](devdocs/PLATFORM_SUPPORT.md). - First and foremost, Nu is cross-platform. Commands and techniques should work across platforms and Nu has [first-class support for Windows, macOS, and Linux](PLATFORM_SUPPORT.md).
- Nu ensures compatibility with existing platform-specific executables. - Nu ensures compatibility with existing platform-specific executables.
@ -228,7 +228,7 @@ Please submit an issue or PR to be added to this list.
See [Contributing](CONTRIBUTING.md) for details. Thanks to all the people who already contributed! See [Contributing](CONTRIBUTING.md) for details. Thanks to all the people who already contributed!
<a href="https://github.com/nushell/nushell/graphs/contributors"> <a href="https://github.com/nushell/nushell/graphs/contributors">
<img src="https://contributors-img.web.app/image?repo=nushell/nushell&max=750" /> <img src="https://contributors-img.web.app/image?repo=nushell/nushell&max=600" />
</a> </a>
## License ## License

View File

@ -1,6 +1,6 @@
# Divan benchmarks # Criterion benchmarks
These are benchmarks using [Divan](https://github.com/nvzqz/divan), a microbenchmarking tool for Rust. These are benchmarks using [Criterion](https://github.com/bheisler/criterion.rs), a microbenchmarking tool for Rust.
Run all benchmarks with `cargo bench` Run all benchmarks with `cargo bench`

View File

@ -1,367 +1,115 @@
use nu_cli::{eval_source, evaluate_commands}; use criterion::{criterion_group, criterion_main, BatchSize, Criterion};
use nu_cli::eval_source;
use nu_parser::parse; use nu_parser::parse;
use nu_plugin::{Encoder, EncodingType, PluginCallResponse, PluginOutput}; use nu_plugin::{EncodingType, PluginResponse};
use nu_protocol::{ use nu_protocol::{engine::EngineState, PipelineData, Span, Value};
engine::{EngineState, Stack},
eval_const::create_nu_constant,
PipelineData, Span, Spanned, Value, NU_VARIABLE_ID,
};
use nu_std::load_standard_library;
use nu_utils::{get_default_config, get_default_env}; use nu_utils::{get_default_config, get_default_env};
use std::path::{Path, PathBuf};
fn main() {
// Run registered benchmarks.
divan::main();
}
fn load_bench_commands() -> EngineState { fn load_bench_commands() -> EngineState {
nu_command::add_shell_command_context(nu_cmd_lang::create_default_context()) nu_command::add_shell_command_context(nu_cmd_lang::create_default_context())
} }
fn canonicalize_path(engine_state: &EngineState, path: &Path) -> PathBuf {
let cwd = engine_state.current_work_dir();
if path.exists() {
match nu_path::canonicalize_with(path, cwd) {
Ok(canon_path) => canon_path,
Err(_) => path.to_owned(),
}
} else {
path.to_owned()
}
}
fn get_home_path(engine_state: &EngineState) -> PathBuf {
nu_path::home_dir()
.map(|path| canonicalize_path(engine_state, &path))
.unwrap_or_default()
}
fn setup_engine() -> EngineState {
let mut engine_state = load_bench_commands();
let home_path = get_home_path(&engine_state);
// parsing config.nu breaks without PWD set, so set a valid path
engine_state.add_env_var(
"PWD".into(),
Value::string(home_path.to_string_lossy(), Span::test_data()),
);
let nu_const = create_nu_constant(&engine_state, Span::unknown())
.expect("Failed to create nushell constant.");
engine_state.set_variable_const_val(NU_VARIABLE_ID, nu_const);
engine_state
}
fn bench_command(bencher: divan::Bencher, scaled_command: String) {
bench_command_with_custom_stack_and_engine(
bencher,
scaled_command,
Stack::new(),
setup_engine(),
)
}
fn bench_command_with_custom_stack_and_engine(
bencher: divan::Bencher,
scaled_command: String,
stack: nu_protocol::engine::Stack,
mut engine: EngineState,
) {
load_standard_library(&mut engine).unwrap();
let commands = Spanned {
span: Span::unknown(),
item: scaled_command,
};
bencher
.with_inputs(|| engine.clone())
.bench_values(|mut engine| {
evaluate_commands(
&commands,
&mut engine,
&mut stack.clone(),
PipelineData::empty(),
None,
)
.unwrap();
})
}
fn setup_stack_and_engine_from_command(command: &str) -> (Stack, EngineState) {
let mut engine = setup_engine();
let commands = Spanned {
span: Span::unknown(),
item: command.to_string(),
};
let mut stack = Stack::new();
evaluate_commands(
&commands,
&mut engine,
&mut stack,
PipelineData::empty(),
None,
)
.unwrap();
(stack, engine)
}
// FIXME: All benchmarks live in this 1 file to speed up build times when benchmarking. // FIXME: All benchmarks live in this 1 file to speed up build times when benchmarking.
// When the *_benchmarks functions were in different files, `cargo bench` would build // When the *_benchmarks functions were in different files, `cargo bench` would build
// an executable for every single one - incredibly slowly. Would be nice to figure out // an executable for every single one - incredibly slowly. Would be nice to figure out
// a way to split things up again. // a way to split things up again.
#[divan::bench] fn parser_benchmarks(c: &mut Criterion) {
fn load_standard_lib(bencher: divan::Bencher) { let mut engine_state = load_bench_commands();
let engine = setup_engine(); // parsing config.nu breaks without PWD set
bencher engine_state.add_env_var(
.with_inputs(|| engine.clone()) "PWD".into(),
.bench_values(|mut engine| { Value::string("/some/dir".to_string(), Span::test_data()),
load_standard_library(&mut engine).unwrap();
})
}
#[divan::bench_group]
mod record {
use super::*;
fn create_flat_record_string(n: i32) -> String {
let mut s = String::from("let record = {");
for i in 0..n {
s.push_str(&format!("col_{}: {}", i, i));
if i < n - 1 {
s.push_str(", ");
}
}
s.push('}');
s
}
fn create_nested_record_string(depth: i32) -> String {
let mut s = String::from("let record = {");
for _ in 0..depth {
s.push_str("col: {");
}
s.push_str("col_final: 0");
for _ in 0..depth {
s.push('}');
}
s.push('}');
s
}
#[divan::bench(args = [1, 10, 100, 1000])]
fn create(bencher: divan::Bencher, n: i32) {
bench_command(bencher, create_flat_record_string(n));
}
#[divan::bench(args = [1, 10, 100, 1000])]
fn flat_access(bencher: divan::Bencher, n: i32) {
let (stack, engine) = setup_stack_and_engine_from_command(&create_flat_record_string(n));
bench_command_with_custom_stack_and_engine(
bencher,
"$record.col_0 | ignore".to_string(),
stack,
engine,
); );
}
#[divan::bench(args = [1, 2, 4, 8, 16, 32, 64, 128])]
fn nest_access(bencher: divan::Bencher, depth: i32) {
let (stack, engine) =
setup_stack_and_engine_from_command(&create_nested_record_string(depth));
let nested_access = ".col".repeat(depth as usize);
bench_command_with_custom_stack_and_engine(
bencher,
format!("$record{} | ignore", nested_access),
stack,
engine,
);
}
}
#[divan::bench_group]
mod table {
use super::*;
fn create_example_table_nrows(n: i32) -> String {
let mut s = String::from("let table = [[foo bar baz]; ");
for i in 0..n {
s.push_str(&format!("[0, 1, {i}]"));
if i < n - 1 {
s.push_str(", ");
}
}
s.push(']');
s
}
#[divan::bench(args = [1, 10, 100, 1000])]
fn create(bencher: divan::Bencher, n: i32) {
bench_command(bencher, create_example_table_nrows(n));
}
#[divan::bench(args = [1, 10, 100, 1000])]
fn get(bencher: divan::Bencher, n: i32) {
let (stack, engine) = setup_stack_and_engine_from_command(&create_example_table_nrows(n));
bench_command_with_custom_stack_and_engine(
bencher,
"$table | get bar | math sum | ignore".to_string(),
stack,
engine,
);
}
#[divan::bench(args = [1, 10, 100, 1000])]
fn select(bencher: divan::Bencher, n: i32) {
let (stack, engine) = setup_stack_and_engine_from_command(&create_example_table_nrows(n));
bench_command_with_custom_stack_and_engine(
bencher,
"$table | select foo baz | ignore".to_string(),
stack,
engine,
);
}
}
#[divan::bench_group]
mod eval_commands {
use super::*;
#[divan::bench(args = [100, 1_000, 10_000])]
fn interleave(bencher: divan::Bencher, n: i32) {
bench_command(
bencher,
format!("seq 1 {n} | wrap a | interleave {{ seq 1 {n} | wrap b }} | ignore"),
)
}
#[divan::bench(args = [100, 1_000, 10_000])]
fn interleave_with_ctrlc(bencher: divan::Bencher, n: i32) {
let mut engine = setup_engine();
engine.ctrlc = Some(std::sync::Arc::new(std::sync::atomic::AtomicBool::new(
false,
)));
load_standard_library(&mut engine).unwrap();
let commands = Spanned {
span: Span::unknown(),
item: format!("seq 1 {n} | wrap a | interleave {{ seq 1 {n} | wrap b }} | ignore"),
};
bencher
.with_inputs(|| engine.clone())
.bench_values(|mut engine| {
evaluate_commands(
&commands,
&mut engine,
&mut nu_protocol::engine::Stack::new(),
PipelineData::empty(),
None,
)
.unwrap();
})
}
#[divan::bench(args = [1, 5, 10, 100, 1_000])]
fn for_range(bencher: divan::Bencher, n: i32) {
bench_command(bencher, format!("(for $x in (1..{}) {{ sleep 50ns }})", n))
}
#[divan::bench(args = [1, 5, 10, 100, 1_000])]
fn each(bencher: divan::Bencher, n: i32) {
bench_command(
bencher,
format!("(1..{}) | each {{|_| sleep 50ns }} | ignore", n),
)
}
#[divan::bench(args = [1, 5, 10, 100, 1_000])]
fn par_each_1t(bencher: divan::Bencher, n: i32) {
bench_command(
bencher,
format!("(1..{}) | par-each -t 1 {{|_| sleep 50ns }} | ignore", n),
)
}
#[divan::bench(args = [1, 5, 10, 100, 1_000])]
fn par_each_2t(bencher: divan::Bencher, n: i32) {
bench_command(
bencher,
format!("(1..{}) | par-each -t 2 {{|_| sleep 50ns }} | ignore", n),
)
}
}
#[divan::bench_group()]
mod parser_benchmarks {
use super::*;
#[divan::bench()]
fn parse_default_config_file(bencher: divan::Bencher) {
let engine_state = setup_engine();
let default_env = get_default_config().as_bytes();
bencher
.with_inputs(|| nu_protocol::engine::StateWorkingSet::new(&engine_state))
.bench_refs(|working_set| parse(working_set, None, default_env, false))
}
#[divan::bench()]
fn parse_default_env_file(bencher: divan::Bencher) {
let engine_state = setup_engine();
let default_env = get_default_env().as_bytes(); let default_env = get_default_env().as_bytes();
c.bench_function("parse_default_env_file", |b| {
b.iter_batched(
|| nu_protocol::engine::StateWorkingSet::new(&engine_state),
|mut working_set| parse(&mut working_set, None, default_env, false),
BatchSize::SmallInput,
)
});
bencher let default_config = get_default_config().as_bytes();
.with_inputs(|| nu_protocol::engine::StateWorkingSet::new(&engine_state)) c.bench_function("parse_default_config_file", |b| {
.bench_refs(|working_set| parse(working_set, None, default_env, false)) b.iter_batched(
} || nu_protocol::engine::StateWorkingSet::new(&engine_state),
} |mut working_set| parse(&mut working_set, None, default_config, false),
BatchSize::SmallInput,
)
});
#[divan::bench_group()] c.bench_function("eval default_env.nu", |b| {
mod eval_benchmarks { b.iter(|| {
use super::*; let mut engine_state = load_bench_commands();
let mut stack = nu_protocol::engine::Stack::new();
#[divan::bench()]
fn eval_default_env(bencher: divan::Bencher) {
let default_env = get_default_env().as_bytes();
let fname = "default_env.nu";
bencher
.with_inputs(|| (setup_engine(), nu_protocol::engine::Stack::new()))
.bench_values(|(mut engine_state, mut stack)| {
eval_source( eval_source(
&mut engine_state, &mut engine_state,
&mut stack, &mut stack,
default_env, get_default_env().as_bytes(),
fname, "default_env.nu",
PipelineData::empty(), PipelineData::empty(),
false, false,
) )
}) })
} });
#[divan::bench()] c.bench_function("eval default_config.nu", |b| {
fn eval_default_config(bencher: divan::Bencher) { b.iter(|| {
let default_env = get_default_config().as_bytes(); let mut engine_state = load_bench_commands();
let fname = "default_config.nu"; // parsing config.nu breaks without PWD set
bencher engine_state.add_env_var(
.with_inputs(|| (setup_engine(), nu_protocol::engine::Stack::new())) "PWD".into(),
.bench_values(|(mut engine_state, mut stack)| { Value::string("/some/dir".to_string(), Span::test_data()),
);
let mut stack = nu_protocol::engine::Stack::new();
eval_source( eval_source(
&mut engine_state, &mut engine_state,
&mut stack, &mut stack,
default_env, get_default_config().as_bytes(),
fname, "default_config.nu",
PipelineData::empty(), PipelineData::empty(),
false, false,
) )
}) })
});
} }
fn eval_benchmarks(c: &mut Criterion) {
c.bench_function("eval default_env.nu", |b| {
b.iter(|| {
let mut engine_state = load_bench_commands();
let mut stack = nu_protocol::engine::Stack::new();
eval_source(
&mut engine_state,
&mut stack,
get_default_env().as_bytes(),
"default_env.nu",
PipelineData::empty(),
false,
)
})
});
c.bench_function("eval default_config.nu", |b| {
b.iter(|| {
let mut engine_state = load_bench_commands();
// parsing config.nu breaks without PWD set
engine_state.add_env_var(
"PWD".into(),
Value::string("/some/dir".to_string(), Span::test_data()),
);
let mut stack = nu_protocol::engine::Stack::new();
eval_source(
&mut engine_state,
&mut stack,
get_default_config().as_bytes(),
"default_config.nu",
PipelineData::empty(),
false,
)
})
});
} }
// generate a new table data with `row_cnt` rows, `col_cnt` columns. // generate a new table data with `row_cnt` rows, `col_cnt` columns.
@ -375,76 +123,50 @@ fn encoding_test_data(row_cnt: usize, col_cnt: usize) -> Value {
Value::list(vec![record; row_cnt], Span::test_data()) Value::list(vec![record; row_cnt], Span::test_data())
} }
#[divan::bench_group()] fn encoding_benchmarks(c: &mut Criterion) {
mod encoding_benchmarks { let mut group = c.benchmark_group("Encoding");
use super::*; let test_cnt_pairs = [(100, 5), (100, 15), (10000, 5), (10000, 15)];
for (row_cnt, col_cnt) in test_cnt_pairs.into_iter() {
#[divan::bench(args = [(100, 5), (10000, 15)])] for fmt in ["json", "msgpack"] {
fn json_encode(bencher: divan::Bencher, (row_cnt, col_cnt): (usize, usize)) { group.bench_function(&format!("{fmt} encode {row_cnt} * {col_cnt}"), |b| {
let test_data = PluginOutput::CallResponse(
0,
PluginCallResponse::value(encoding_test_data(row_cnt, col_cnt)),
);
let encoder = EncodingType::try_from_bytes(b"json").unwrap();
bencher
.with_inputs(Vec::new)
.bench_values(|mut res| encoder.encode(&test_data, &mut res))
}
#[divan::bench(args = [(100, 5), (10000, 15)])]
fn msgpack_encode(bencher: divan::Bencher, (row_cnt, col_cnt): (usize, usize)) {
let test_data = PluginOutput::CallResponse(
0,
PluginCallResponse::value(encoding_test_data(row_cnt, col_cnt)),
);
let encoder = EncodingType::try_from_bytes(b"msgpack").unwrap();
bencher
.with_inputs(Vec::new)
.bench_values(|mut res| encoder.encode(&test_data, &mut res))
}
}
#[divan::bench_group()]
mod decoding_benchmarks {
use super::*;
#[divan::bench(args = [(100, 5), (10000, 15)])]
fn json_decode(bencher: divan::Bencher, (row_cnt, col_cnt): (usize, usize)) {
let test_data = PluginOutput::CallResponse(
0,
PluginCallResponse::value(encoding_test_data(row_cnt, col_cnt)),
);
let encoder = EncodingType::try_from_bytes(b"json").unwrap();
let mut res = vec![]; let mut res = vec![];
encoder.encode(&test_data, &mut res).unwrap(); let test_data =
bencher PluginResponse::Value(Box::new(encoding_test_data(row_cnt, col_cnt)));
.with_inputs(|| { let encoder = EncodingType::try_from_bytes(fmt.as_bytes()).unwrap();
let mut binary_data = std::io::Cursor::new(res.clone()); b.iter(|| encoder.encode_response(&test_data, &mut res))
binary_data.set_position(0); });
binary_data }
}) }
.bench_values(|mut binary_data| -> Result<Option<PluginOutput>, _> { group.finish();
encoder.decode(&mut binary_data)
})
} }
#[divan::bench(args = [(100, 5), (10000, 15)])] fn decoding_benchmarks(c: &mut Criterion) {
fn msgpack_decode(bencher: divan::Bencher, (row_cnt, col_cnt): (usize, usize)) { let mut group = c.benchmark_group("Decoding");
let test_data = PluginOutput::CallResponse( let test_cnt_pairs = [(100, 5), (100, 15), (10000, 5), (10000, 15)];
0, for (row_cnt, col_cnt) in test_cnt_pairs.into_iter() {
PluginCallResponse::value(encoding_test_data(row_cnt, col_cnt)), for fmt in ["json", "msgpack"] {
); group.bench_function(&format!("{fmt} decode for {row_cnt} * {col_cnt}"), |b| {
let encoder = EncodingType::try_from_bytes(b"msgpack").unwrap();
let mut res = vec![]; let mut res = vec![];
encoder.encode(&test_data, &mut res).unwrap(); let test_data =
bencher PluginResponse::Value(Box::new(encoding_test_data(row_cnt, col_cnt)));
.with_inputs(|| { let encoder = EncodingType::try_from_bytes(fmt.as_bytes()).unwrap();
let mut binary_data = std::io::Cursor::new(res.clone()); encoder.encode_response(&test_data, &mut res).unwrap();
let mut binary_data = std::io::Cursor::new(res);
b.iter(|| {
binary_data.set_position(0); binary_data.set_position(0);
binary_data encoder.decode_response(&mut binary_data)
})
.bench_values(|mut binary_data| -> Result<Option<PluginOutput>, _> {
encoder.decode(&mut binary_data)
}) })
});
} }
} }
group.finish();
}
criterion_group!(
benches,
parser_benchmarks,
eval_benchmarks,
encoding_benchmarks,
decoding_benchmarks
);
criterion_main!(benches);

View File

@ -5,44 +5,41 @@ repository = "https://github.com/nushell/nushell/tree/main/crates/nu-cli"
edition = "2021" edition = "2021"
license = "MIT" license = "MIT"
name = "nu-cli" name = "nu-cli"
version = "0.92.2" version = "0.87.1"
[lib] [lib]
bench = false bench = false
[dev-dependencies] [dev-dependencies]
nu-cmd-lang = { path = "../nu-cmd-lang", version = "0.92.2" } nu-cmd-lang = { path = "../nu-cmd-lang", version = "0.87.1" }
nu-command = { path = "../nu-command", version = "0.92.2" } nu-command = { path = "../nu-command", version = "0.87.1" }
nu-test-support = { path = "../nu-test-support", version = "0.92.2" } nu-test-support = { path = "../nu-test-support", version = "0.87.1" }
rstest = { workspace = true, default-features = false } rstest = { version = "0.18.1", default-features = false }
[dependencies] [dependencies]
nu-cmd-base = { path = "../nu-cmd-base", version = "0.92.2" } nu-cmd-base = { path = "../nu-cmd-base", version = "0.87.1" }
nu-engine = { path = "../nu-engine", version = "0.92.2" } nu-engine = { path = "../nu-engine", version = "0.87.1" }
nu-path = { path = "../nu-path", version = "0.92.2" } nu-path = { path = "../nu-path", version = "0.87.1" }
nu-parser = { path = "../nu-parser", version = "0.92.2" } nu-parser = { path = "../nu-parser", version = "0.87.1" }
nu-protocol = { path = "../nu-protocol", version = "0.92.2" } nu-protocol = { path = "../nu-protocol", version = "0.87.1" }
nu-utils = { path = "../nu-utils", version = "0.92.2" } nu-utils = { path = "../nu-utils", version = "0.87.1" }
nu-color-config = { path = "../nu-color-config", version = "0.92.2" } nu-color-config = { path = "../nu-color-config", version = "0.87.1" }
nu-ansi-term = { workspace = true } nu-ansi-term = "0.49.0"
reedline = { workspace = true, features = ["bashisms", "sqlite"] } reedline = { version = "0.26.0", features = ["bashisms", "sqlite"] }
chrono = { default-features = false, features = ["std"], workspace = true } chrono = { default-features = false, features = ["std"], version = "0.4" }
crossterm = { workspace = true } crossterm = "0.27"
fancy-regex = { workspace = true } fancy-regex = "0.11"
fuzzy-matcher = { workspace = true } fuzzy-matcher = "0.3"
is_executable = { workspace = true } is_executable = "1.0"
log = { workspace = true } log = "0.4"
miette = { workspace = true, features = ["fancy-no-backtrace"] } miette = { version = "5.10", features = ["fancy-no-backtrace"] }
lscolors = { workspace = true, default-features = false, features = ["nu-ansi-term"] } once_cell = "1.18"
once_cell = { workspace = true } percent-encoding = "2"
percent-encoding = { workspace = true } pathdiff = "0.2"
pathdiff = { workspace = true } sysinfo = "0.29"
sysinfo = { workspace = true } unicode-segmentation = "1.10"
unicode-segmentation = { workspace = true } uuid = { version = "1.5.0", features = ["v4"] }
uuid = { workspace = true, features = ["v4"] }
which = { workspace = true }
[features] [features]
plugin = [] plugin = []
system-clipboard = ["reedline/system_clipboard"]

View File

@ -0,0 +1,129 @@
use nu_engine::CallExt;
use nu_protocol::{
ast::Call,
engine::{Command, EngineState, Stack},
Category, IntoPipelineData, PipelineData, ShellError, Signature, SyntaxShape, Type, Value,
};
use unicode_segmentation::UnicodeSegmentation;
#[derive(Clone)]
pub struct Commandline;
impl Command for Commandline {
fn name(&self) -> &str {
"commandline"
}
fn signature(&self) -> Signature {
Signature::build("commandline")
.input_output_types(vec![
(Type::Nothing, Type::Nothing),
(Type::String, Type::String),
])
.switch(
"cursor",
"Set or get the current cursor position",
Some('c'),
)
.switch(
"cursor-end",
"Set the current cursor position to the end of the buffer",
Some('e'),
)
.switch(
"append",
"appends the string to the end of the buffer",
Some('a'),
)
.switch(
"insert",
"inserts the string into the buffer at the cursor position",
Some('i'),
)
.switch(
"replace",
"replaces the current contents of the buffer (default)",
Some('r'),
)
.optional(
"cmd",
SyntaxShape::String,
"the string to perform the operation with",
)
.category(Category::Core)
}
fn usage(&self) -> &str {
"View or modify the current command line input buffer."
}
fn search_terms(&self) -> Vec<&str> {
vec!["repl", "interactive"]
}
fn run(
&self,
engine_state: &EngineState,
stack: &mut Stack,
call: &Call,
_input: PipelineData,
) -> Result<PipelineData, ShellError> {
if let Some(cmd) = call.opt::<Value>(engine_state, stack, 0)? {
let mut repl = engine_state.repl_state.lock().expect("repl state mutex");
if call.has_flag("cursor") {
let cmd_str = cmd.as_string()?;
match cmd_str.parse::<i64>() {
Ok(n) => {
repl.cursor_pos = if n <= 0 {
0usize
} else {
repl.buffer
.grapheme_indices(true)
.map(|(i, _c)| i)
.nth(n as usize)
.unwrap_or(repl.buffer.len())
}
}
Err(_) => {
return Err(ShellError::CantConvert {
to_type: "int".to_string(),
from_type: "string".to_string(),
span: cmd.span(),
help: Some(format!(
r#"string "{cmd_str}" does not represent a valid int"#
)),
})
}
}
} else if call.has_flag("append") {
repl.buffer.push_str(&cmd.as_string()?);
} else if call.has_flag("insert") {
let cmd_str = cmd.as_string()?;
let cursor_pos = repl.cursor_pos;
repl.buffer.insert_str(cursor_pos, &cmd_str);
repl.cursor_pos += cmd_str.len();
} else {
repl.buffer = cmd.as_string()?;
repl.cursor_pos = repl.buffer.len();
}
Ok(Value::nothing(call.head).into_pipeline_data())
} else {
let mut repl = engine_state.repl_state.lock().expect("repl state mutex");
if call.has_flag("cursor-end") {
repl.cursor_pos = repl.buffer.graphemes(true).count();
Ok(Value::nothing(call.head).into_pipeline_data())
} else if call.has_flag("cursor") {
let char_pos = repl
.buffer
.grapheme_indices(true)
.chain(std::iter::once((repl.buffer.len(), "")))
.position(|(i, _c)| i == repl.cursor_pos)
.expect("Cursor position isn't on a grapheme boundary");
Ok(Value::string(char_pos.to_string(), call.head).into_pipeline_data())
} else {
Ok(Value::string(repl.buffer.to_string(), call.head).into_pipeline_data())
}
}
}
}

View File

@ -1,184 +0,0 @@
use nu_engine::command_prelude::*;
use unicode_segmentation::UnicodeSegmentation;
#[derive(Clone)]
pub struct Commandline;
impl Command for Commandline {
fn name(&self) -> &str {
"commandline"
}
fn signature(&self) -> Signature {
Signature::build("commandline")
.input_output_types(vec![
(Type::Nothing, Type::Nothing),
(Type::String, Type::String),
])
.switch(
"cursor",
"Set or get the current cursor position",
Some('c'),
)
.switch(
"cursor-end",
"Set the current cursor position to the end of the buffer",
Some('e'),
)
.switch(
"append",
"appends the string to the end of the buffer",
Some('a'),
)
.switch(
"insert",
"inserts the string into the buffer at the cursor position",
Some('i'),
)
.switch(
"replace",
"replaces the current contents of the buffer (default)",
Some('r'),
)
.optional(
"cmd",
SyntaxShape::String,
"the string to perform the operation with",
)
.category(Category::Core)
}
fn usage(&self) -> &str {
"View or modify the current command line input buffer."
}
fn search_terms(&self) -> Vec<&str> {
vec!["repl", "interactive"]
}
fn run(
&self,
engine_state: &EngineState,
stack: &mut Stack,
call: &Call,
_input: PipelineData,
) -> Result<PipelineData, ShellError> {
if let Some(cmd) = call.opt::<Value>(engine_state, stack, 0)? {
let span = cmd.span();
let cmd = cmd.coerce_into_string()?;
let mut repl = engine_state.repl_state.lock().expect("repl state mutex");
if call.has_flag(engine_state, stack, "cursor")? {
nu_protocol::report_error_new(
engine_state,
&ShellError::GenericError {
error: "`--cursor (-c)` is deprecated".into(),
msg: "Setting the current cursor position by `--cursor (-c)` is deprecated"
.into(),
span: Some(call.arguments_span()),
help: Some("Use `commandline set-cursor`".into()),
inner: vec![],
},
);
match cmd.parse::<i64>() {
Ok(n) => {
repl.cursor_pos = if n <= 0 {
0usize
} else {
repl.buffer
.grapheme_indices(true)
.map(|(i, _c)| i)
.nth(n as usize)
.unwrap_or(repl.buffer.len())
}
}
Err(_) => {
return Err(ShellError::CantConvert {
to_type: "int".to_string(),
from_type: "string".to_string(),
span,
help: Some(format!(r#"string "{cmd}" does not represent a valid int"#)),
})
}
}
} else if call.has_flag(engine_state, stack, "append")? {
nu_protocol::report_error_new(
engine_state,
&ShellError::GenericError {
error: "`--append (-a)` is deprecated".into(),
msg: "Appending the string to the end of the buffer by `--append (-a)` is deprecated".into(),
span: Some(call.arguments_span()),
help: Some("Use `commandline edit --append (-a)`".into()),
inner: vec![],
},
);
repl.buffer.push_str(&cmd);
} else if call.has_flag(engine_state, stack, "insert")? {
nu_protocol::report_error_new(
engine_state,
&ShellError::GenericError {
error: "`--insert (-i)` is deprecated".into(),
msg: "Inserts the string into the buffer at the cursor position by `--insert (-i)` is deprecated".into(),
span: Some(call.arguments_span()),
help: Some("Use `commandline edit --insert (-i)`".into()),
inner: vec![],
},
);
let cursor_pos = repl.cursor_pos;
repl.buffer.insert_str(cursor_pos, &cmd);
repl.cursor_pos += cmd.len();
} else {
nu_protocol::report_error_new(
engine_state,
&ShellError::GenericError {
error: "`--replace (-r)` is deprecated".into(),
msg: "Replacing the current contents of the buffer by `--replace (-p)` or positional argument is deprecated".into(),
span: Some(call.arguments_span()),
help: Some("Use `commandline edit --replace (-r)`".into()),
inner: vec![],
},
);
repl.buffer = cmd;
repl.cursor_pos = repl.buffer.len();
}
Ok(Value::nothing(call.head).into_pipeline_data())
} else {
let mut repl = engine_state.repl_state.lock().expect("repl state mutex");
if call.has_flag(engine_state, stack, "cursor-end")? {
nu_protocol::report_error_new(
engine_state,
&ShellError::GenericError {
error: "`--cursor-end (-e)` is deprecated".into(),
msg: "Setting the current cursor position to the end of the buffer by `--cursor-end (-e)` is deprecated".into(),
span: Some(call.arguments_span()),
help: Some("Use `commandline set-cursor --end (-e)`".into()),
inner: vec![],
},
);
repl.cursor_pos = repl.buffer.len();
Ok(Value::nothing(call.head).into_pipeline_data())
} else if call.has_flag(engine_state, stack, "cursor")? {
nu_protocol::report_error_new(
engine_state,
&ShellError::GenericError {
error: "`--cursor (-c)` is deprecated".into(),
msg: "Getting the current cursor position by `--cursor (-c)` is deprecated"
.into(),
span: Some(call.arguments_span()),
help: Some("Use `commandline get-cursor`".into()),
inner: vec![],
},
);
let char_pos = repl
.buffer
.grapheme_indices(true)
.chain(std::iter::once((repl.buffer.len(), "")))
.position(|(i, _c)| i == repl.cursor_pos)
.expect("Cursor position isn't on a grapheme boundary");
Ok(Value::string(char_pos.to_string(), call.head).into_pipeline_data())
} else {
Ok(Value::string(repl.buffer.to_string(), call.head).into_pipeline_data())
}
}
}
}

View File

@ -1,66 +0,0 @@
use nu_engine::command_prelude::*;
#[derive(Clone)]
pub struct SubCommand;
impl Command for SubCommand {
fn name(&self) -> &str {
"commandline edit"
}
fn signature(&self) -> Signature {
Signature::build(self.name())
.input_output_types(vec![(Type::Nothing, Type::Nothing)])
.switch(
"append",
"appends the string to the end of the buffer",
Some('a'),
)
.switch(
"insert",
"inserts the string into the buffer at the cursor position",
Some('i'),
)
.switch(
"replace",
"replaces the current contents of the buffer (default)",
Some('r'),
)
.required(
"str",
SyntaxShape::String,
"the string to perform the operation with",
)
.category(Category::Core)
}
fn usage(&self) -> &str {
"Modify the current command line input buffer."
}
fn search_terms(&self) -> Vec<&str> {
vec!["repl", "interactive"]
}
fn run(
&self,
engine_state: &EngineState,
stack: &mut Stack,
call: &Call,
_input: PipelineData,
) -> Result<PipelineData, ShellError> {
let str: String = call.req(engine_state, stack, 0)?;
let mut repl = engine_state.repl_state.lock().expect("repl state mutex");
if call.has_flag(engine_state, stack, "append")? {
repl.buffer.push_str(&str);
} else if call.has_flag(engine_state, stack, "insert")? {
let cursor_pos = repl.cursor_pos;
repl.buffer.insert_str(cursor_pos, &str);
repl.cursor_pos += str.len();
} else {
repl.buffer = str;
repl.cursor_pos = repl.buffer.len();
}
Ok(Value::nothing(call.head).into_pipeline_data())
}
}

View File

@ -1,52 +0,0 @@
use nu_engine::command_prelude::*;
use unicode_segmentation::UnicodeSegmentation;
#[derive(Clone)]
pub struct SubCommand;
impl Command for SubCommand {
fn name(&self) -> &str {
"commandline get-cursor"
}
fn signature(&self) -> Signature {
Signature::build(self.name())
.input_output_types(vec![(Type::Nothing, Type::Int)])
.allow_variants_without_examples(true)
.category(Category::Core)
}
fn usage(&self) -> &str {
"Get the current cursor position."
}
fn search_terms(&self) -> Vec<&str> {
vec!["repl", "interactive"]
}
fn run(
&self,
engine_state: &EngineState,
_stack: &mut Stack,
call: &Call,
_input: PipelineData,
) -> Result<PipelineData, ShellError> {
let repl = engine_state.repl_state.lock().expect("repl state mutex");
let char_pos = repl
.buffer
.grapheme_indices(true)
.chain(std::iter::once((repl.buffer.len(), "")))
.position(|(i, _c)| i == repl.cursor_pos)
.expect("Cursor position isn't on a grapheme boundary");
match i64::try_from(char_pos) {
Ok(pos) => Ok(Value::int(pos, call.head).into_pipeline_data()),
Err(e) => Err(ShellError::GenericError {
error: "Failed to convert cursor position to int".to_string(),
msg: e.to_string(),
span: None,
help: None,
inner: vec![],
}),
}
}
}

View File

@ -1,9 +0,0 @@
mod commandline_;
mod edit;
mod get_cursor;
mod set_cursor;
pub use commandline_::Commandline;
pub use edit::SubCommand as CommandlineEdit;
pub use get_cursor::SubCommand as CommandlineGetCursor;
pub use set_cursor::SubCommand as CommandlineSetCursor;

View File

@ -1,65 +0,0 @@
use nu_engine::command_prelude::*;
use unicode_segmentation::UnicodeSegmentation;
#[derive(Clone)]
pub struct SubCommand;
impl Command for SubCommand {
fn name(&self) -> &str {
"commandline set-cursor"
}
fn signature(&self) -> Signature {
Signature::build(self.name())
.input_output_types(vec![(Type::Nothing, Type::Nothing)])
.switch(
"end",
"set the current cursor position to the end of the buffer",
Some('e'),
)
.optional("pos", SyntaxShape::Int, "Cursor position to be set")
.category(Category::Core)
}
fn usage(&self) -> &str {
"Set the current cursor position."
}
fn search_terms(&self) -> Vec<&str> {
vec!["repl", "interactive"]
}
fn run(
&self,
engine_state: &EngineState,
stack: &mut Stack,
call: &Call,
_input: PipelineData,
) -> Result<PipelineData, ShellError> {
let mut repl = engine_state.repl_state.lock().expect("repl state mutex");
if let Some(pos) = call.opt::<i64>(engine_state, stack, 0)? {
repl.cursor_pos = if pos <= 0 {
0usize
} else {
repl.buffer
.grapheme_indices(true)
.map(|(i, _c)| i)
.nth(pos as usize)
.unwrap_or(repl.buffer.len())
};
Ok(Value::nothing(call.head).into_pipeline_data())
} else if call.has_flag(engine_state, stack, "end")? {
repl.cursor_pos = repl.buffer.len();
Ok(Value::nothing(call.head).into_pipeline_data())
} else {
Err(ShellError::GenericError {
error: "Required a positional argument or a flag".to_string(),
msg: "".to_string(),
span: None,
help: None,
inner: vec![],
})
}
}
}

View File

@ -1,6 +1,7 @@
use crate::commands::*;
use nu_protocol::engine::{EngineState, StateWorkingSet}; use nu_protocol::engine::{EngineState, StateWorkingSet};
use crate::commands::*;
pub fn add_cli_context(mut engine_state: EngineState) -> EngineState { pub fn add_cli_context(mut engine_state: EngineState) -> EngineState {
let delta = { let delta = {
let mut working_set = StateWorkingSet::new(&engine_state); let mut working_set = StateWorkingSet::new(&engine_state);
@ -13,9 +14,6 @@ pub fn add_cli_context(mut engine_state: EngineState) -> EngineState {
bind_command! { bind_command! {
Commandline, Commandline,
CommandlineEdit,
CommandlineGetCursor,
CommandlineSetCursor,
History, History,
HistorySession, HistorySession,
Keybindings, Keybindings,

View File

@ -1,5 +1,9 @@
use nu_engine::command_prelude::*; use nu_protocol::ast::Call;
use nu_protocol::HistoryFileFormat; use nu_protocol::engine::{Command, EngineState, Stack};
use nu_protocol::{
record, Category, Example, HistoryFileFormat, IntoInterruptiblePipelineData, PipelineData,
ShellError, Signature, Span, Type, Value,
};
use reedline::{ use reedline::{
FileBackedHistory, History as ReedlineHistory, HistoryItem, SearchDirection, SearchQuery, FileBackedHistory, History as ReedlineHistory, HistoryItem, SearchDirection, SearchQuery,
SqliteBackedHistory, SqliteBackedHistory,
@ -19,7 +23,10 @@ impl Command for History {
fn signature(&self) -> nu_protocol::Signature { fn signature(&self) -> nu_protocol::Signature {
Signature::build("history") Signature::build("history")
.input_output_types(vec![(Type::Nothing, Type::Any)]) .input_output_types(vec![
(Type::Nothing, Type::Table(vec![])),
(Type::Nothing, Type::Nothing),
])
.allow_variants_without_examples(true) .allow_variants_without_examples(true)
.switch("clear", "Clears out the history entries", Some('c')) .switch("clear", "Clears out the history entries", Some('c'))
.switch( .switch(
@ -27,31 +34,27 @@ impl Command for History {
"Show long listing of entries for sqlite history", "Show long listing of entries for sqlite history",
Some('l'), Some('l'),
) )
.category(Category::History) .category(Category::Misc)
} }
fn run( fn run(
&self, &self,
engine_state: &EngineState, engine_state: &EngineState,
stack: &mut Stack, _stack: &mut Stack,
call: &Call, call: &Call,
_input: PipelineData, _input: PipelineData,
) -> Result<PipelineData, ShellError> { ) -> Result<PipelineData, ShellError> {
let head = call.head; let head = call.head;
let Some(history) = engine_state.history_config() else {
return Ok(PipelineData::empty());
};
// todo for sqlite history this command should be an alias to `open ~/.config/nushell/history.sqlite3 | get history` // todo for sqlite history this command should be an alias to `open ~/.config/nushell/history.sqlite3 | get history`
if let Some(config_path) = nu_path::config_dir() { if let Some(config_path) = nu_path::config_dir() {
let clear = call.has_flag(engine_state, stack, "clear")?; let clear = call.has_flag("clear");
let long = call.has_flag(engine_state, stack, "long")?; let long = call.has_flag("long");
let ctrlc = engine_state.ctrlc.clone(); let ctrlc = engine_state.ctrlc.clone();
let mut history_path = config_path; let mut history_path = config_path;
history_path.push("nushell"); history_path.push("nushell");
match history.file_format { match engine_state.config.history_file_format {
HistoryFileFormat::Sqlite => { HistoryFileFormat::Sqlite => {
history_path.push("history.sqlite3"); history_path.push("history.sqlite3");
} }
@ -65,9 +68,10 @@ impl Command for History {
// TODO: FIXME also clear the auxiliary files when using sqlite // TODO: FIXME also clear the auxiliary files when using sqlite
Ok(PipelineData::empty()) Ok(PipelineData::empty())
} else { } else {
let history_reader: Option<Box<dyn ReedlineHistory>> = match history.file_format { let history_reader: Option<Box<dyn ReedlineHistory>> =
match engine_state.config.history_file_format {
HistoryFileFormat::Sqlite => { HistoryFileFormat::Sqlite => {
SqliteBackedHistory::with_file(history_path.clone(), None, None) SqliteBackedHistory::with_file(history_path, None, None)
.map(|inner| { .map(|inner| {
let boxed: Box<dyn ReedlineHistory> = Box::new(inner); let boxed: Box<dyn ReedlineHistory> = Box::new(inner);
boxed boxed
@ -76,8 +80,8 @@ impl Command for History {
} }
HistoryFileFormat::PlainText => FileBackedHistory::with_file( HistoryFileFormat::PlainText => FileBackedHistory::with_file(
history.max_size as usize, engine_state.config.max_history_size as usize,
history_path.clone(), history_path,
) )
.map(|inner| { .map(|inner| {
let boxed: Box<dyn ReedlineHistory> = Box::new(inner); let boxed: Box<dyn ReedlineHistory> = Box::new(inner);
@ -86,7 +90,7 @@ impl Command for History {
.ok(), .ok(),
}; };
match history.file_format { match engine_state.config.history_file_format {
HistoryFileFormat::PlainText => Ok(history_reader HistoryFileFormat::PlainText => Ok(history_reader
.and_then(|h| { .and_then(|h| {
h.search(SearchQuery::everything(SearchDirection::Forward, None)) h.search(SearchQuery::everything(SearchDirection::Forward, None))
@ -103,10 +107,7 @@ impl Command for History {
) )
}) })
}) })
.ok_or(ShellError::FileNotFound { .ok_or(ShellError::FileNotFound(head))?
file: history_path.display().to_string(),
span: head,
})?
.into_pipeline_data(ctrlc)), .into_pipeline_data(ctrlc)),
HistoryFileFormat::Sqlite => Ok(history_reader HistoryFileFormat::Sqlite => Ok(history_reader
.and_then(|h| { .and_then(|h| {
@ -118,15 +119,12 @@ impl Command for History {
create_history_record(idx, entry, long, head) create_history_record(idx, entry, long, head)
}) })
}) })
.ok_or(ShellError::FileNotFound { .ok_or(ShellError::FileNotFound(head))?
file: history_path.display().to_string(),
span: head,
})?
.into_pipeline_data(ctrlc)), .into_pipeline_data(ctrlc)),
} }
} }
} else { } else {
Err(ShellError::ConfigDirNotFound { span: Some(head) }) Err(ShellError::FileNotFound(head))
} }
} }

View File

@ -1,5 +0,0 @@
mod history_;
mod history_session;
pub use history_::History;
pub use history_session::HistorySession;

View File

@ -1,4 +1,8 @@
use nu_engine::command_prelude::*; use nu_protocol::ast::Call;
use nu_protocol::engine::{Command, EngineState, Stack};
use nu_protocol::{
Category, Example, IntoPipelineData, PipelineData, ShellError, Signature, Type, Value,
};
#[derive(Clone)] #[derive(Clone)]
pub struct HistorySession; pub struct HistorySession;
@ -14,7 +18,7 @@ impl Command for HistorySession {
fn signature(&self) -> nu_protocol::Signature { fn signature(&self) -> nu_protocol::Signature {
Signature::build("history session") Signature::build("history session")
.category(Category::History) .category(Category::Misc)
.input_output_types(vec![(Type::Nothing, Type::Int)]) .input_output_types(vec![(Type::Nothing, Type::Int)])
} }

View File

@ -1,4 +1,9 @@
use nu_engine::{command_prelude::*, get_full_help}; use nu_engine::get_full_help;
use nu_protocol::{
ast::Call,
engine::{Command, EngineState, Stack},
Category, IntoPipelineData, PipelineData, ShellError, Signature, Type, Value,
};
#[derive(Clone)] #[derive(Clone)]
pub struct Keybindings; pub struct Keybindings;

View File

@ -1,4 +1,8 @@
use nu_engine::command_prelude::*; use nu_protocol::{
ast::Call,
engine::{Command, EngineState, Stack},
record, Category, Example, IntoPipelineData, PipelineData, ShellError, Signature, Type, Value,
};
use reedline::get_reedline_default_keybindings; use reedline::get_reedline_default_keybindings;
#[derive(Clone)] #[derive(Clone)]

View File

@ -1,4 +1,9 @@
use nu_engine::command_prelude::*; use nu_protocol::{
ast::Call,
engine::{Command, EngineState, Stack},
record, Category, Example, IntoPipelineData, PipelineData, ShellError, Signature, Span, Type,
Value,
};
use reedline::{ use reedline::{
get_reedline_edit_commands, get_reedline_keybinding_modifiers, get_reedline_keycodes, get_reedline_edit_commands, get_reedline_keybinding_modifiers, get_reedline_keycodes,
get_reedline_prompt_edit_modes, get_reedline_reedline_events, get_reedline_prompt_edit_modes, get_reedline_reedline_events,

View File

@ -1,7 +1,12 @@
use crossterm::{ use crossterm::execute;
event::Event, event::KeyCode, event::KeyEvent, execute, terminal, QueueableCommand, use crossterm::QueueableCommand;
use crossterm::{event::Event, event::KeyCode, event::KeyEvent, terminal};
use nu_protocol::ast::Call;
use nu_protocol::engine::{Command, EngineState, Stack};
use nu_protocol::{
record, Category, Example, IntoPipelineData, PipelineData, ShellError, Signature, Span, Type,
Value,
}; };
use nu_engine::command_prelude::*;
use std::io::{stdout, Write}; use std::io::{stdout, Write};
#[derive(Clone)] #[derive(Clone)]
@ -40,13 +45,13 @@ impl Command for KeybindingsListen {
Ok(v) => Ok(v.into_pipeline_data()), Ok(v) => Ok(v.into_pipeline_data()),
Err(e) => { Err(e) => {
terminal::disable_raw_mode()?; terminal::disable_raw_mode()?;
Err(ShellError::GenericError { Err(ShellError::GenericError(
error: "Error with input".into(), "Error with input".to_string(),
msg: "".into(), "".to_string(),
span: None, None,
help: Some(e.to_string()), Some(e.to_string()),
inner: vec![], Vec::new(),
}) ))
} }
} }
} }
@ -107,7 +112,7 @@ pub fn print_events(engine_state: &EngineState) -> Result<Value, ShellError> {
let o = match v { let o = match v {
Value::Record { val, .. } => val Value::Record { val, .. } => val
.iter() .iter()
.map(|(x, y)| format!("{}: {}", x, y.to_expanded_string("", config))) .map(|(x, y)| format!("{}: {}", x, y.into_string("", config)))
.collect::<Vec<String>>() .collect::<Vec<String>>()
.join(", "), .join(", "),

View File

@ -1,13 +1,15 @@
mod commandline; mod commandline;
mod default_context; mod default_context;
mod history; mod history;
mod history_session;
mod keybindings; mod keybindings;
mod keybindings_default; mod keybindings_default;
mod keybindings_list; mod keybindings_list;
mod keybindings_listen; mod keybindings_listen;
pub use commandline::{Commandline, CommandlineEdit, CommandlineGetCursor, CommandlineSetCursor}; pub use commandline::Commandline;
pub use history::{History, HistorySession}; pub use history::History;
pub use history_session::HistorySession;
pub use keybindings::Keybindings; pub use keybindings::Keybindings;
pub use keybindings_default::KeybindingsDefault; pub use keybindings_default::KeybindingsDefault;
pub use keybindings_list::KeybindingsList; pub use keybindings_list::KeybindingsList;

View File

@ -13,13 +13,13 @@ pub trait Completer {
offset: usize, offset: usize,
pos: usize, pos: usize,
options: &CompletionOptions, options: &CompletionOptions,
) -> Vec<SemanticSuggestion>; ) -> Vec<Suggestion>;
fn get_sort_by(&self) -> SortBy { fn get_sort_by(&self) -> SortBy {
SortBy::Ascending SortBy::Ascending
} }
fn sort(&self, items: Vec<SemanticSuggestion>, prefix: Vec<u8>) -> Vec<SemanticSuggestion> { fn sort(&self, items: Vec<Suggestion>, prefix: Vec<u8>) -> Vec<Suggestion> {
let prefix_str = String::from_utf8_lossy(&prefix).to_string(); let prefix_str = String::from_utf8_lossy(&prefix).to_string();
let mut filtered_items = items; let mut filtered_items = items;
@ -27,13 +27,13 @@ pub trait Completer {
match self.get_sort_by() { match self.get_sort_by() {
SortBy::LevenshteinDistance => { SortBy::LevenshteinDistance => {
filtered_items.sort_by(|a, b| { filtered_items.sort_by(|a, b| {
let a_distance = levenshtein_distance(&prefix_str, &a.suggestion.value); let a_distance = levenshtein_distance(&prefix_str, &a.value);
let b_distance = levenshtein_distance(&prefix_str, &b.suggestion.value); let b_distance = levenshtein_distance(&prefix_str, &b.value);
a_distance.cmp(&b_distance) a_distance.cmp(&b_distance)
}); });
} }
SortBy::Ascending => { SortBy::Ascending => {
filtered_items.sort_by(|a, b| a.suggestion.value.cmp(&b.suggestion.value)); filtered_items.sort_by(|a, b| a.value.cmp(&b.value));
} }
SortBy::None => {} SortBy::None => {}
}; };
@ -41,25 +41,3 @@ pub trait Completer {
filtered_items filtered_items
} }
} }
#[derive(Debug, Default, PartialEq)]
pub struct SemanticSuggestion {
pub suggestion: Suggestion,
pub kind: Option<SuggestionKind>,
}
// TODO: think about name: maybe suggestion context?
#[derive(Clone, Debug, PartialEq)]
pub enum SuggestionKind {
Command(nu_protocol::engine::CommandType),
Type(nu_protocol::Type),
}
impl From<Suggestion> for SemanticSuggestion {
fn from(suggestion: Suggestion) -> Self {
Self {
suggestion,
..Default::default()
}
}
}

View File

@ -1,17 +1,12 @@
use crate::{ use crate::completions::{Completer, CompletionOptions, MatchAlgorithm, SortBy};
completions::{Completer, CompletionOptions, MatchAlgorithm, SortBy},
SuggestionKind,
};
use nu_parser::FlatShape; use nu_parser::FlatShape;
use nu_protocol::{ use nu_protocol::{
engine::{CachedFile, EngineState, StateWorkingSet}, engine::{EngineState, StateWorkingSet},
Span, Span,
}; };
use reedline::Suggestion; use reedline::Suggestion;
use std::sync::Arc; use std::sync::Arc;
use super::SemanticSuggestion;
pub struct CommandCompletion { pub struct CommandCompletion {
engine_state: Arc<EngineState>, engine_state: Arc<EngineState>,
flattened: Vec<(Span, FlatShape)>, flattened: Vec<(Span, FlatShape)>,
@ -48,9 +43,9 @@ impl CommandCompletion {
if let Some(paths) = paths { if let Some(paths) = paths {
if let Ok(paths) = paths.as_list() { if let Ok(paths) = paths.as_list() {
for path in paths { for path in paths {
let path = path.coerce_str().unwrap_or_default(); let path = path.as_string().unwrap_or_default();
if let Ok(mut contents) = std::fs::read_dir(path.as_ref()) { if let Ok(mut contents) = std::fs::read_dir(path) {
while let Some(Ok(item)) = contents.next() { while let Some(Ok(item)) = contents.next() {
if self.engine_state.config.max_external_completion_results if self.engine_state.config.max_external_completion_results
> executables.len() as i64 > executables.len() as i64
@ -88,7 +83,7 @@ impl CommandCompletion {
offset: usize, offset: usize,
find_externals: bool, find_externals: bool,
match_algorithm: MatchAlgorithm, match_algorithm: MatchAlgorithm,
) -> Vec<SemanticSuggestion> { ) -> Vec<Suggestion> {
let partial = working_set.get_span_contents(span); let partial = working_set.get_span_contents(span);
let filter_predicate = |command: &[u8]| match_algorithm.matches_u8(command, partial); let filter_predicate = |command: &[u8]| match_algorithm.matches_u8(command, partial);
@ -96,16 +91,12 @@ impl CommandCompletion {
let mut results = working_set let mut results = working_set
.find_commands_by_predicate(filter_predicate, true) .find_commands_by_predicate(filter_predicate, true)
.into_iter() .into_iter()
.map(move |x| SemanticSuggestion { .map(move |x| Suggestion {
suggestion: Suggestion {
value: String::from_utf8_lossy(&x.0).to_string(), value: String::from_utf8_lossy(&x.0).to_string(),
description: x.1, description: x.1,
style: None,
extra: None, extra: None,
span: reedline::Span::new(span.start - offset, span.end - offset), span: reedline::Span::new(span.start - offset, span.end - offset),
append_whitespace: true, append_whitespace: true,
},
kind: Some(SuggestionKind::Command(x.2)),
}) })
.collect::<Vec<_>>(); .collect::<Vec<_>>();
@ -116,34 +107,25 @@ impl CommandCompletion {
let results_external = self let results_external = self
.external_command_completion(&partial, match_algorithm) .external_command_completion(&partial, match_algorithm)
.into_iter() .into_iter()
.map(move |x| SemanticSuggestion { .map(move |x| Suggestion {
suggestion: Suggestion {
value: x, value: x,
description: None, description: None,
style: None,
extra: None, extra: None,
span: reedline::Span::new(span.start - offset, span.end - offset), span: reedline::Span::new(span.start - offset, span.end - offset),
append_whitespace: true, append_whitespace: true,
},
// TODO: is there a way to create a test?
kind: None,
}); });
let results_strings: Vec<String> = let results_strings: Vec<String> =
results.iter().map(|x| x.suggestion.value.clone()).collect(); results.clone().into_iter().map(|x| x.value).collect();
for external in results_external { for external in results_external {
if results_strings.contains(&external.suggestion.value) { if results_strings.contains(&external.value) {
results.push(SemanticSuggestion { results.push(Suggestion {
suggestion: Suggestion { value: format!("^{}", external.value),
value: format!("^{}", external.suggestion.value),
description: None, description: None,
style: None,
extra: None, extra: None,
span: external.suggestion.span, span: external.span,
append_whitespace: true, append_whitespace: true,
},
kind: external.kind,
}) })
} else { } else {
results.push(external) results.push(external)
@ -166,7 +148,7 @@ impl Completer for CommandCompletion {
offset: usize, offset: usize,
pos: usize, pos: usize,
options: &CompletionOptions, options: &CompletionOptions,
) -> Vec<SemanticSuggestion> { ) -> Vec<Suggestion> {
let last = self let last = self
.flattened .flattened
.iter() .iter()
@ -244,9 +226,8 @@ pub fn find_non_whitespace_index(contents: &[u8], start: usize) -> usize {
} }
} }
pub fn is_passthrough_command(working_set_file_contents: &[CachedFile]) -> bool { pub fn is_passthrough_command(working_set_file_contents: &[(Vec<u8>, usize, usize)]) -> bool {
for cached_file in working_set_file_contents { for (contents, _, _) in working_set_file_contents {
let contents = &cached_file.content;
let last_pipe_pos_rev = contents.iter().rev().position(|x| x == &b'|'); let last_pipe_pos_rev = contents.iter().rev().position(|x| x == &b'|');
let last_pipe_pos = last_pipe_pos_rev.map(|x| contents.len() - x).unwrap_or(0); let last_pipe_pos = last_pipe_pos_rev.map(|x| contents.len() - x).unwrap_or(0);
@ -269,7 +250,7 @@ mod command_completions_tests {
#[test] #[test]
fn test_find_non_whitespace_index() { fn test_find_non_whitespace_index() {
let commands = [ let commands = vec![
(" hello", 4), (" hello", 4),
("sudo ", 0), ("sudo ", 0),
(" sudo ", 2), (" sudo ", 2),
@ -289,7 +270,7 @@ mod command_completions_tests {
#[test] #[test]
fn test_is_last_command_passthrough() { fn test_is_last_command_passthrough() {
let commands = [ let commands = vec![
(" hello", false), (" hello", false),
(" sudo ", true), (" sudo ", true),
("sudo ", true), ("sudo ", true),
@ -311,7 +292,7 @@ mod command_completions_tests {
let input = ele.0.as_bytes(); let input = ele.0.as_bytes();
let mut engine_state = EngineState::new(); let mut engine_state = EngineState::new();
engine_state.add_file("test.nu".into(), Arc::new([])); engine_state.add_file("test.nu".into(), vec![]);
let delta = { let delta = {
let mut working_set = StateWorkingSet::new(&engine_state); let mut working_set = StateWorkingSet::new(&engine_state);

View File

@ -2,18 +2,16 @@ use crate::completions::{
CommandCompletion, Completer, CompletionOptions, CustomCompletion, DirectoryCompletion, CommandCompletion, Completer, CompletionOptions, CustomCompletion, DirectoryCompletion,
DotNuCompletion, FileCompletion, FlagCompletion, VariableCompletion, DotNuCompletion, FileCompletion, FlagCompletion, VariableCompletion,
}; };
use nu_color_config::{color_record_to_nustyle, lookup_ansi_color_style};
use nu_engine::eval_block; use nu_engine::eval_block;
use nu_parser::{flatten_pipeline_element, parse, FlatShape}; use nu_parser::{flatten_expression, parse, FlatShape};
use nu_protocol::{ use nu_protocol::{
debugger::WithoutDebug, ast::PipelineElement,
engine::{EngineState, Stack, StateWorkingSet}, engine::{EngineState, Stack, StateWorkingSet},
BlockId, PipelineData, Span, Value, BlockId, PipelineData, Span, Value,
}; };
use reedline::{Completer as ReedlineCompleter, Suggestion}; use reedline::{Completer as ReedlineCompleter, Suggestion};
use std::{str, sync::Arc}; use std::str;
use std::sync::Arc;
use super::base::{SemanticSuggestion, SuggestionKind};
#[derive(Clone)] #[derive(Clone)]
pub struct NuCompleter { pub struct NuCompleter {
@ -25,14 +23,10 @@ impl NuCompleter {
pub fn new(engine_state: Arc<EngineState>, stack: Stack) -> Self { pub fn new(engine_state: Arc<EngineState>, stack: Stack) -> Self {
Self { Self {
engine_state, engine_state,
stack: stack.reset_stdio().capture(), stack,
} }
} }
pub fn fetch_completions_at(&mut self, line: &str, pos: usize) -> Vec<SemanticSuggestion> {
self.completion_helper(line, pos)
}
// Process the completion for a given completer // Process the completion for a given completer
fn process_completion<T: Completer>( fn process_completion<T: Completer>(
&self, &self,
@ -42,7 +36,7 @@ impl NuCompleter {
new_span: Span, new_span: Span,
offset: usize, offset: usize,
pos: usize, pos: usize,
) -> Vec<SemanticSuggestion> { ) -> Vec<Suggestion> {
let config = self.engine_state.get_config(); let config = self.engine_state.get_config();
let options = CompletionOptions { let options = CompletionOptions {
@ -67,14 +61,13 @@ impl NuCompleter {
spans: &[String], spans: &[String],
offset: usize, offset: usize,
span: Span, span: Span,
) -> Option<Vec<SemanticSuggestion>> { ) -> Option<Vec<Suggestion>> {
let stack = self.stack.clone();
let block = self.engine_state.get_block(block_id); let block = self.engine_state.get_block(block_id);
let mut callee_stack = self let mut callee_stack = stack.gather_captures(&self.engine_state, &block.captures);
.stack
.gather_captures(&self.engine_state, &block.captures);
// Line // Line
if let Some(pos_arg) = block.signature.required_positional.first() { if let Some(pos_arg) = block.signature.required_positional.get(0) {
if let Some(var_id) = pos_arg.var_id { if let Some(var_id) = pos_arg.var_id {
callee_stack.add_var( callee_stack.add_var(
var_id, var_id,
@ -89,11 +82,13 @@ impl NuCompleter {
} }
} }
let result = eval_block::<WithoutDebug>( let result = eval_block(
&self.engine_state, &self.engine_state,
&mut callee_stack, &mut callee_stack,
block, block,
PipelineData::empty(), PipelineData::empty(),
true,
true,
); );
match result { match result {
@ -112,55 +107,52 @@ impl NuCompleter {
None None
} }
fn completion_helper(&mut self, line: &str, pos: usize) -> Vec<SemanticSuggestion> { fn completion_helper(&mut self, line: &str, pos: usize) -> Vec<Suggestion> {
let mut working_set = StateWorkingSet::new(&self.engine_state); let mut working_set = StateWorkingSet::new(&self.engine_state);
let offset = working_set.next_span_start(); let offset = working_set.next_span_start();
// TODO: Callers should be trimming the line themselves
let line = if line.len() > pos { &line[..pos] } else { line };
// Adjust offset so that the spans of the suggestions will start at the right
// place even with `only_buffer_difference: true`
let fake_offset = offset + line.len() - pos;
let pos = offset + line.len();
let initial_line = line.to_string(); let initial_line = line.to_string();
let mut line = line.to_string(); let mut line = line.to_string();
line.push('a'); line.insert(pos, 'a');
let pos = offset + pos;
let config = self.engine_state.get_config(); let config = self.engine_state.get_config();
let output = parse(&mut working_set, Some("completer"), line.as_bytes(), false); let output = parse(&mut working_set, Some("completer"), line.as_bytes(), false);
for pipeline in &output.pipelines { for pipeline in output.pipelines.into_iter() {
for pipeline_element in &pipeline.elements { for pipeline_element in pipeline.elements {
let flattened = flatten_pipeline_element(&working_set, pipeline_element); match pipeline_element {
PipelineElement::Expression(_, expr)
| PipelineElement::Redirection(_, _, expr)
| PipelineElement::And(_, expr)
| PipelineElement::Or(_, expr)
| PipelineElement::SameTargetRedirection { cmd: (_, expr), .. }
| PipelineElement::SeparateRedirection { out: (_, expr), .. } => {
let flattened: Vec<_> = flatten_expression(&working_set, &expr);
let mut spans: Vec<String> = vec![]; let mut spans: Vec<String> = vec![];
for (flat_idx, flat) in flattened.iter().enumerate() { for (flat_idx, flat) in flattened.iter().enumerate() {
let is_passthrough_command = spans let is_passthrough_command = spans
.first() .first()
.filter(|content| content.as_str() == "sudo" || content.as_str() == "doas") .filter(|content| {
content.as_str() == "sudo" || content.as_str() == "doas"
})
.is_some(); .is_some();
// Read the current spam to string // Read the current spam to string
let current_span = working_set.get_span_contents(flat.0).to_vec(); let current_span = working_set.get_span_contents(flat.0).to_vec();
let current_span_str = String::from_utf8_lossy(&current_span); let current_span_str = String::from_utf8_lossy(&current_span);
let is_last_span = pos >= flat.0.start && pos < flat.0.end;
// Skip the last 'a' as span item // Skip the last 'a' as span item
if is_last_span { if flat_idx == flattened.len() - 1 {
let offset = pos - flat.0.start; let mut chars = current_span_str.chars();
if offset == 0 { chars.next_back();
spans.push(String::new()) let current_span_str = chars.as_str().to_owned();
} else { spans.push(current_span_str.to_string());
let mut current_span_str = current_span_str.to_string();
current_span_str.remove(offset);
spans.push(current_span_str);
}
} else { } else {
spans.push(current_span_str.to_string()); spans.push(current_span_str.to_string());
} }
// Complete based on the last span // Complete based on the last span
if is_last_span { if pos >= flat.0.start && pos < flat.0.end {
// Context variables // Context variables
let most_left_var = let most_left_var =
most_left_variable(flat_idx, &working_set, flattened.clone()); most_left_variable(flat_idx, &working_set, flattened.clone());
@ -186,7 +178,7 @@ impl NuCompleter {
&working_set, &working_set,
prefix, prefix,
new_span, new_span,
fake_offset, offset,
pos, pos,
); );
} }
@ -194,13 +186,13 @@ impl NuCompleter {
// Flags completion // Flags completion
if prefix.starts_with(b"-") { if prefix.starts_with(b"-") {
// Try to complete flag internally // Try to complete flag internally
let mut completer = FlagCompletion::new(pipeline_element.expr.clone()); let mut completer = FlagCompletion::new(expr.clone());
let result = self.process_completion( let result = self.process_completion(
&mut completer, &mut completer,
&working_set, &working_set,
prefix.clone(), prefix.clone(),
new_span, new_span,
fake_offset, offset,
pos, pos,
); );
@ -211,12 +203,9 @@ impl NuCompleter {
// We got no results for internal completion // We got no results for internal completion
// now we can check if external completer is set and use it // now we can check if external completer is set and use it
if let Some(block_id) = config.external_completer { if let Some(block_id) = config.external_completer {
if let Some(external_result) = self.external_completion( if let Some(external_result) = self
block_id, .external_completion(block_id, &spans, offset, new_span)
&spans, {
fake_offset,
new_span,
) {
return external_result; return external_result;
} }
} }
@ -224,7 +213,8 @@ impl NuCompleter {
// specially check if it is currently empty - always complete commands // specially check if it is currently empty - always complete commands
if (is_passthrough_command && flat_idx == 1) if (is_passthrough_command && flat_idx == 1)
|| (flat_idx == 0 && working_set.get_span_contents(new_span).is_empty()) || (flat_idx == 0
&& working_set.get_span_contents(new_span).is_empty())
{ {
let mut completer = CommandCompletion::new( let mut completer = CommandCompletion::new(
self.engine_state.clone(), self.engine_state.clone(),
@ -239,7 +229,7 @@ impl NuCompleter {
&working_set, &working_set,
prefix, prefix,
new_span, new_span,
fake_offset, offset,
pos, pos,
); );
} }
@ -252,35 +242,29 @@ impl NuCompleter {
working_set.get_span_contents(previous_expr.0).to_vec(); working_set.get_span_contents(previous_expr.0).to_vec();
// Completion for .nu files // Completion for .nu files
if prev_expr_str == b"use" if prev_expr_str == b"use" || prev_expr_str == b"source-env"
|| prev_expr_str == b"overlay use"
|| prev_expr_str == b"source-env"
{ {
let mut completer = DotNuCompletion::new( let mut completer =
self.engine_state.clone(), DotNuCompletion::new(self.engine_state.clone());
self.stack.clone(),
);
return self.process_completion( return self.process_completion(
&mut completer, &mut completer,
&working_set, &working_set,
prefix, prefix,
new_span, new_span,
fake_offset, offset,
pos, pos,
); );
} else if prev_expr_str == b"ls" { } else if prev_expr_str == b"ls" {
let mut completer = FileCompletion::new( let mut completer =
self.engine_state.clone(), FileCompletion::new(self.engine_state.clone());
self.stack.clone(),
);
return self.process_completion( return self.process_completion(
&mut completer, &mut completer,
&working_set, &working_set,
prefix, prefix,
new_span, new_span,
fake_offset, offset,
pos, pos,
); );
} }
@ -302,37 +286,33 @@ impl NuCompleter {
&working_set, &working_set,
prefix, prefix,
new_span, new_span,
fake_offset, offset,
pos, pos,
); );
} }
FlatShape::Directory => { FlatShape::Directory => {
let mut completer = DirectoryCompletion::new( let mut completer =
self.engine_state.clone(), DirectoryCompletion::new(self.engine_state.clone());
self.stack.clone(),
);
return self.process_completion( return self.process_completion(
&mut completer, &mut completer,
&working_set, &working_set,
prefix, prefix,
new_span, new_span,
fake_offset, offset,
pos, pos,
); );
} }
FlatShape::Filepath | FlatShape::GlobPattern => { FlatShape::Filepath | FlatShape::GlobPattern => {
let mut completer = FileCompletion::new( let mut completer =
self.engine_state.clone(), FileCompletion::new(self.engine_state.clone());
self.stack.clone(),
);
return self.process_completion( return self.process_completion(
&mut completer, &mut completer,
&working_set, &working_set,
prefix, prefix,
new_span, new_span,
fake_offset, offset,
pos, pos,
); );
} }
@ -351,7 +331,7 @@ impl NuCompleter {
&working_set, &working_set,
prefix.clone(), prefix.clone(),
new_span, new_span,
fake_offset, offset,
pos, pos,
); );
@ -362,26 +342,23 @@ impl NuCompleter {
// Try to complete using an external completer (if set) // Try to complete using an external completer (if set)
if let Some(block_id) = config.external_completer { if let Some(block_id) = config.external_completer {
if let Some(external_result) = self.external_completion( if let Some(external_result) = self.external_completion(
block_id, block_id, &spans, offset, new_span,
&spans,
fake_offset,
new_span,
) { ) {
if !external_result.is_empty() {
return external_result; return external_result;
} }
} }
}
// Check for file completion // Check for file completion
let mut completer = FileCompletion::new( let mut completer =
self.engine_state.clone(), FileCompletion::new(self.engine_state.clone());
self.stack.clone(),
);
out = self.process_completion( out = self.process_completion(
&mut completer, &mut completer,
&working_set, &working_set,
prefix, prefix,
new_span, new_span,
fake_offset, offset,
pos, pos,
); );
@ -394,6 +371,8 @@ impl NuCompleter {
} }
} }
} }
}
}
vec![] vec![]
} }
@ -402,9 +381,6 @@ impl NuCompleter {
impl ReedlineCompleter for NuCompleter { impl ReedlineCompleter for NuCompleter {
fn complete(&mut self, line: &str, pos: usize) -> Vec<Suggestion> { fn complete(&mut self, line: &str, pos: usize) -> Vec<Suggestion> {
self.completion_helper(line, pos) self.completion_helper(line, pos)
.into_iter()
.map(|s| s.suggestion)
.collect()
} }
} }
@ -462,23 +438,19 @@ pub fn map_value_completions<'a>(
list: impl Iterator<Item = &'a Value>, list: impl Iterator<Item = &'a Value>,
span: Span, span: Span,
offset: usize, offset: usize,
) -> Vec<SemanticSuggestion> { ) -> Vec<Suggestion> {
list.filter_map(move |x| { list.filter_map(move |x| {
// Match for string values // Match for string values
if let Ok(s) = x.coerce_string() { if let Ok(s) = x.as_string() {
return Some(SemanticSuggestion { return Some(Suggestion {
suggestion: Suggestion {
value: s, value: s,
description: None, description: None,
style: None,
extra: None, extra: None,
span: reedline::Span { span: reedline::Span {
start: span.start - offset, start: span.start - offset,
end: span.end - offset, end: span.end - offset,
}, },
append_whitespace: false, append_whitespace: false,
},
kind: Some(SuggestionKind::Type(x.get_type())),
}); });
} }
@ -487,7 +459,6 @@ pub fn map_value_completions<'a>(
let mut suggestion = Suggestion { let mut suggestion = Suggestion {
value: String::from(""), // Initialize with empty string value: String::from(""), // Initialize with empty string
description: None, description: None,
style: None,
extra: None, extra: None,
span: reedline::Span { span: reedline::Span {
start: span.start - offset, start: span.start - offset,
@ -501,7 +472,7 @@ pub fn map_value_completions<'a>(
// Match `value` column // Match `value` column
if it.0 == "value" { if it.0 == "value" {
// Convert the value to string // Convert the value to string
if let Ok(val_str) = it.1.coerce_string() { if let Ok(val_str) = it.1.as_string() {
// Update the suggestion value // Update the suggestion value
suggestion.value = val_str; suggestion.value = val_str;
} }
@ -510,27 +481,14 @@ pub fn map_value_completions<'a>(
// Match `description` column // Match `description` column
if it.0 == "description" { if it.0 == "description" {
// Convert the value to string // Convert the value to string
if let Ok(desc_str) = it.1.coerce_string() { if let Ok(desc_str) = it.1.as_string() {
// Update the suggestion value // Update the suggestion value
suggestion.description = Some(desc_str); suggestion.description = Some(desc_str);
} }
} }
// Match `style` column
if it.0 == "style" {
// Convert the value to string
suggestion.style = match it.1 {
Value::String { val, .. } => Some(lookup_ansi_color_style(val)),
Value::Record { .. } => Some(color_record_to_nustyle(it.1)),
_ => None,
};
}
}); });
return Some(SemanticSuggestion { return Some(suggestion);
suggestion,
kind: Some(SuggestionKind::Type(x.get_type())),
});
} }
None None
@ -561,7 +519,7 @@ mod completer_tests {
); );
let mut completer = NuCompleter::new(engine_state.into(), Stack::new()); let mut completer = NuCompleter::new(engine_state.into(), Stack::new());
let dataset = [ let dataset = vec![
("sudo", false, "", Vec::new()), ("sudo", false, "", Vec::new()),
("sudo l", true, "l", vec!["ls", "let", "lines", "loop"]), ("sudo l", true, "l", vec!["ls", "let", "lines", "loop"]),
(" sudo", false, "", Vec::new()), (" sudo", false, "", Vec::new()),
@ -582,13 +540,13 @@ mod completer_tests {
// Test whether the result begins with the expected value // Test whether the result begins with the expected value
result result
.iter() .iter()
.for_each(|x| assert!(x.suggestion.value.starts_with(begins_with))); .for_each(|x| assert!(x.value.starts_with(begins_with)));
// Test whether the result contains all the expected values // Test whether the result contains all the expected values
assert_eq!( assert_eq!(
result result
.iter() .iter()
.map(|x| expected_values.contains(&x.suggestion.value.as_str())) .map(|x| expected_values.contains(&x.value.as_str()))
.filter(|x| *x) .filter(|x| *x)
.count(), .count(),
expected_values.len(), expected_values.len(),

View File

@ -1,16 +1,7 @@
use crate::completions::{matches, CompletionOptions}; use crate::completions::{matches, CompletionOptions};
use nu_ansi_term::Style;
use nu_engine::env_to_string;
use nu_path::home_dir; use nu_path::home_dir;
use nu_protocol::{ use nu_protocol::{engine::StateWorkingSet, Span};
engine::{EngineState, Stack, StateWorkingSet}, use std::path::{is_separator, Component, Path, PathBuf, MAIN_SEPARATOR as SEP};
Span,
};
use nu_utils::get_ls_colors;
use std::{
ffi::OsStr,
path::{is_separator, Component, Path, PathBuf, MAIN_SEPARATOR as SEP},
};
fn complete_rec( fn complete_rec(
partial: &[String], partial: &[String],
@ -31,10 +22,7 @@ fn complete_rec(
Some(base) if matches(base, &entry_name, options) => { Some(base) if matches(base, &entry_name, options) => {
let partial = &partial[1..]; let partial = &partial[1..];
if !partial.is_empty() || isdir { if !partial.is_empty() || isdir {
completions.extend(complete_rec(partial, &path, options, dir, isdir)); completions.extend(complete_rec(partial, &path, options, dir, isdir))
if entry_name.eq(base) {
break;
}
} else { } else {
completions.push(path) completions.push(path)
} }
@ -101,31 +89,12 @@ pub fn complete_item(
partial: &str, partial: &str,
cwd: &str, cwd: &str,
options: &CompletionOptions, options: &CompletionOptions,
engine_state: &EngineState, ) -> Vec<(nu_protocol::Span, String)> {
stack: &Stack,
) -> Vec<(nu_protocol::Span, String, Option<Style>)> {
let partial = surround_remove(partial); let partial = surround_remove(partial);
let isdir = partial.ends_with(is_separator); let isdir = partial.ends_with(is_separator);
let cwd_pathbuf = Path::new(cwd).to_path_buf(); let cwd_pathbuf = Path::new(cwd).to_path_buf();
let ls_colors = (engine_state.config.use_ls_colors_completions
&& engine_state.config.use_ansi_coloring)
.then(|| {
let ls_colors_env_str = match stack.get_env_var(engine_state, "LS_COLORS") {
Some(v) => env_to_string("LS_COLORS", &v, engine_state, stack).ok(),
None => None,
};
get_ls_colors(ls_colors_env_str)
});
let mut original_cwd = OriginalCwd::None; let mut original_cwd = OriginalCwd::None;
let mut components_vec: Vec<Component> = Path::new(&partial).components().collect(); let mut components = Path::new(&partial).components().peekable();
// Path components that end with a single "." get normalized away,
// so if the partial path ends in a literal "." we must add it back in manually
if partial.ends_with('.') && partial.len() > 1 {
components_vec.push(Component::Normal(OsStr::new(".")));
};
let mut components = components_vec.into_iter().peekable();
let mut cwd = match components.peek().cloned() { let mut cwd = match components.peek().cloned() {
Some(c @ Component::Prefix(..)) => { Some(c @ Component::Prefix(..)) => {
// windows only by definition // windows only by definition
@ -176,35 +145,12 @@ pub fn complete_item(
complete_rec(partial.as_slice(), &cwd, options, want_directory, isdir) complete_rec(partial.as_slice(), &cwd, options, want_directory, isdir)
.into_iter() .into_iter()
.map(|p| { .map(|p| (span, escape_path(original_cwd.apply(&p), want_directory)))
let path = original_cwd.apply(&p);
let style = ls_colors.as_ref().map(|lsc| {
lsc.style_for_path_with_metadata(
&path,
std::fs::symlink_metadata(&path).ok().as_ref(),
)
.map(lscolors::Style::to_nu_ansi_term_style)
.unwrap_or_default()
});
(span, escape_path(path, want_directory), style)
})
.collect() .collect()
} }
// Fix files or folders with quotes or hashes // Fix files or folders with quotes or hashes
pub fn escape_path(path: String, dir: bool) -> String { pub fn escape_path(path: String, dir: bool) -> String {
// make glob pattern have the highest priority.
let glob_contaminated = path.contains(['[', '*', ']', '?']);
if glob_contaminated {
return if path.contains('\'') {
// decide to use double quote, also need to escape `"` in path
// or else users can't do anything with completed path either.
format!("\"{}\"", path.replace('"', r#"\""#))
} else {
format!("'{path}'")
};
}
let filename_contaminated = !dir && path.contains(['\'', '"', ' ', '#', '(', ')']); let filename_contaminated = !dir && path.contains(['\'', '"', ' ', '#', '(', ')']);
let dirname_contaminated = dir && path.contains(['\'', '"', ' ', '#']); let dirname_contaminated = dir && path.contains(['\'', '"', ' ', '#']);
let maybe_flag = path.starts_with('-'); let maybe_flag = path.starts_with('-');

View File

@ -1,7 +1,8 @@
use std::fmt::Display;
use fuzzy_matcher::{skim::SkimMatcherV2, FuzzyMatcher}; use fuzzy_matcher::{skim::SkimMatcherV2, FuzzyMatcher};
use nu_parser::trim_quotes_str; use nu_parser::trim_quotes_str;
use nu_protocol::CompletionAlgorithm; use nu_protocol::CompletionAlgorithm;
use std::fmt::Display;
#[derive(Copy, Clone)] #[derive(Copy, Clone)]
pub enum SortBy { pub enum SortBy {
@ -95,6 +96,7 @@ impl std::error::Error for InvalidMatchAlgorithm {}
pub struct CompletionOptions { pub struct CompletionOptions {
pub case_sensitive: bool, pub case_sensitive: bool,
pub positional: bool, pub positional: bool,
pub sort_by: SortBy,
pub match_algorithm: MatchAlgorithm, pub match_algorithm: MatchAlgorithm,
} }
@ -103,6 +105,7 @@ impl Default for CompletionOptions {
Self { Self {
case_sensitive: true, case_sensitive: true,
positional: true, positional: true,
sort_by: SortBy::Ascending,
match_algorithm: MatchAlgorithm::Prefix, match_algorithm: MatchAlgorithm::Prefix,
} }
} }

View File

@ -1,16 +1,16 @@
use crate::completions::{ use crate::completions::{Completer, CompletionOptions, MatchAlgorithm, SortBy};
completer::map_value_completions, Completer, CompletionOptions, MatchAlgorithm,
SemanticSuggestion, SortBy,
};
use nu_engine::eval_call; use nu_engine::eval_call;
use nu_protocol::{ use nu_protocol::{
ast::{Argument, Call, Expr, Expression}, ast::{Argument, Call, Expr, Expression},
debugger::WithoutDebug,
engine::{EngineState, Stack, StateWorkingSet}, engine::{EngineState, Stack, StateWorkingSet},
PipelineData, Span, Type, Value, PipelineData, Span, Type, Value,
}; };
use nu_utils::IgnoreCaseExt; use nu_utils::IgnoreCaseExt;
use std::{collections::HashMap, sync::Arc}; use reedline::Suggestion;
use std::collections::HashMap;
use std::sync::Arc;
use super::completer::map_value_completions;
pub struct CustomCompletion { pub struct CustomCompletion {
engine_state: Arc<EngineState>, engine_state: Arc<EngineState>,
@ -24,7 +24,7 @@ impl CustomCompletion {
pub fn new(engine_state: Arc<EngineState>, stack: Stack, decl_id: usize, line: String) -> Self { pub fn new(engine_state: Arc<EngineState>, stack: Stack, decl_id: usize, line: String) -> Self {
Self { Self {
engine_state, engine_state,
stack: stack.reset_stdio().capture(), stack,
decl_id, decl_id,
line, line,
sort_by: SortBy::None, sort_by: SortBy::None,
@ -41,12 +41,12 @@ impl Completer for CustomCompletion {
offset: usize, offset: usize,
pos: usize, pos: usize,
completion_options: &CompletionOptions, completion_options: &CompletionOptions,
) -> Vec<SemanticSuggestion> { ) -> Vec<Suggestion> {
// Line position // Line position
let line_pos = pos - offset; let line_pos = pos - offset;
// Call custom declaration // Call custom declaration
let result = eval_call::<WithoutDebug>( let result = eval_call(
&self.engine_state, &self.engine_state,
&mut self.stack, &mut self.stack,
&Call { &Call {
@ -66,6 +66,8 @@ impl Completer for CustomCompletion {
custom_completion: None, custom_completion: None,
}), }),
], ],
redirect_stdout: true,
redirect_stderr: true,
parser_info: HashMap::new(), parser_info: HashMap::new(),
}, },
PipelineData::empty(), PipelineData::empty(),
@ -108,9 +110,14 @@ impl Completer for CustomCompletion {
.get("positional") .get("positional")
.and_then(|val| val.as_bool().ok()) .and_then(|val| val.as_bool().ok())
.unwrap_or(true), .unwrap_or(true),
sort_by: if should_sort {
SortBy::Ascending
} else {
SortBy::None
},
match_algorithm: match options.get("completion_algorithm") { match_algorithm: match options.get("completion_algorithm") {
Some(option) => option Some(option) => option
.coerce_string() .as_string()
.ok() .ok()
.and_then(|option| option.try_into().ok()) .and_then(|option| option.try_into().ok())
.unwrap_or(MatchAlgorithm::Prefix), .unwrap_or(MatchAlgorithm::Prefix),
@ -139,22 +146,15 @@ impl Completer for CustomCompletion {
} }
} }
fn filter( fn filter(prefix: &[u8], items: Vec<Suggestion>, options: &CompletionOptions) -> Vec<Suggestion> {
prefix: &[u8],
items: Vec<SemanticSuggestion>,
options: &CompletionOptions,
) -> Vec<SemanticSuggestion> {
items items
.into_iter() .into_iter()
.filter(|it| match options.match_algorithm { .filter(|it| match options.match_algorithm {
MatchAlgorithm::Prefix => match (options.case_sensitive, options.positional) { MatchAlgorithm::Prefix => match (options.case_sensitive, options.positional) {
(true, true) => it.suggestion.value.as_bytes().starts_with(prefix), (true, true) => it.value.as_bytes().starts_with(prefix),
(true, false) => it (true, false) => it.value.contains(std::str::from_utf8(prefix).unwrap_or("")),
.suggestion
.value
.contains(std::str::from_utf8(prefix).unwrap_or("")),
(false, positional) => { (false, positional) => {
let value = it.suggestion.value.to_folded_case(); let value = it.value.to_folded_case();
let prefix = std::str::from_utf8(prefix).unwrap_or("").to_folded_case(); let prefix = std::str::from_utf8(prefix).unwrap_or("").to_folded_case();
if positional { if positional {
value.starts_with(&prefix) value.starts_with(&prefix)
@ -165,7 +165,7 @@ fn filter(
}, },
MatchAlgorithm::Fuzzy => options MatchAlgorithm::Fuzzy => options
.match_algorithm .match_algorithm
.matches_u8(it.suggestion.value.as_bytes(), prefix), .matches_u8(it.value.as_bytes(), prefix),
}) })
.collect() .collect()
} }

View File

@ -2,31 +2,22 @@ use crate::completions::{
completion_common::{adjust_if_intermediate, complete_item, AdjustView}, completion_common::{adjust_if_intermediate, complete_item, AdjustView},
Completer, CompletionOptions, SortBy, Completer, CompletionOptions, SortBy,
}; };
use nu_ansi_term::Style;
use nu_protocol::{ use nu_protocol::{
engine::{EngineState, Stack, StateWorkingSet}, engine::{EngineState, StateWorkingSet},
levenshtein_distance, Span, levenshtein_distance, Span,
}; };
use reedline::Suggestion; use reedline::Suggestion;
use std::{ use std::path::{Path, MAIN_SEPARATOR as SEP};
path::{Path, MAIN_SEPARATOR as SEP}, use std::sync::Arc;
sync::Arc,
};
use super::SemanticSuggestion;
#[derive(Clone)] #[derive(Clone)]
pub struct DirectoryCompletion { pub struct DirectoryCompletion {
engine_state: Arc<EngineState>, engine_state: Arc<EngineState>,
stack: Stack,
} }
impl DirectoryCompletion { impl DirectoryCompletion {
pub fn new(engine_state: Arc<EngineState>, stack: Stack) -> Self { pub fn new(engine_state: Arc<EngineState>) -> Self {
Self { Self { engine_state }
engine_state,
stack,
}
} }
} }
@ -39,7 +30,7 @@ impl Completer for DirectoryCompletion {
offset: usize, offset: usize,
_: usize, _: usize,
options: &CompletionOptions, options: &CompletionOptions,
) -> Vec<SemanticSuggestion> { ) -> Vec<Suggestion> {
let AdjustView { prefix, span, .. } = adjust_if_intermediate(&prefix, working_set, span); let AdjustView { prefix, span, .. } = adjust_if_intermediate(&prefix, working_set, span);
// Filter only the folders // Filter only the folders
@ -48,24 +39,17 @@ impl Completer for DirectoryCompletion {
&prefix, &prefix,
&self.engine_state.current_work_dir(), &self.engine_state.current_work_dir(),
options, options,
self.engine_state.as_ref(),
&self.stack,
) )
.into_iter() .into_iter()
.map(move |x| SemanticSuggestion { .map(move |x| Suggestion {
suggestion: Suggestion {
value: x.1, value: x.1,
description: None, description: None,
style: x.2,
extra: None, extra: None,
span: reedline::Span { span: reedline::Span {
start: x.0.start - offset, start: x.0.start - offset,
end: x.0.end - offset, end: x.0.end - offset,
}, },
append_whitespace: false, append_whitespace: false,
},
// TODO????
kind: None,
}) })
.collect(); .collect();
@ -73,7 +57,7 @@ impl Completer for DirectoryCompletion {
} }
// Sort results prioritizing the non hidden folders // Sort results prioritizing the non hidden folders
fn sort(&self, items: Vec<SemanticSuggestion>, prefix: Vec<u8>) -> Vec<SemanticSuggestion> { fn sort(&self, items: Vec<Suggestion>, prefix: Vec<u8>) -> Vec<Suggestion> {
let prefix_str = String::from_utf8_lossy(&prefix).to_string(); let prefix_str = String::from_utf8_lossy(&prefix).to_string();
// Sort items // Sort items
@ -83,16 +67,15 @@ impl Completer for DirectoryCompletion {
SortBy::Ascending => { SortBy::Ascending => {
sorted_items.sort_by(|a, b| { sorted_items.sort_by(|a, b| {
// Ignore trailing slashes in folder names when sorting // Ignore trailing slashes in folder names when sorting
a.suggestion a.value
.value
.trim_end_matches(SEP) .trim_end_matches(SEP)
.cmp(b.suggestion.value.trim_end_matches(SEP)) .cmp(b.value.trim_end_matches(SEP))
}); });
} }
SortBy::LevenshteinDistance => { SortBy::LevenshteinDistance => {
sorted_items.sort_by(|a, b| { sorted_items.sort_by(|a, b| {
let a_distance = levenshtein_distance(&prefix_str, &a.suggestion.value); let a_distance = levenshtein_distance(&prefix_str, &a.value);
let b_distance = levenshtein_distance(&prefix_str, &b.suggestion.value); let b_distance = levenshtein_distance(&prefix_str, &b.value);
a_distance.cmp(&b_distance) a_distance.cmp(&b_distance)
}); });
} }
@ -100,11 +83,11 @@ impl Completer for DirectoryCompletion {
} }
// Separate the results between hidden and non hidden // Separate the results between hidden and non hidden
let mut hidden: Vec<SemanticSuggestion> = vec![]; let mut hidden: Vec<Suggestion> = vec![];
let mut non_hidden: Vec<SemanticSuggestion> = vec![]; let mut non_hidden: Vec<Suggestion> = vec![];
for item in sorted_items.into_iter() { for item in sorted_items.into_iter() {
let item_path = Path::new(&item.suggestion.value); let item_path = Path::new(&item.value);
if let Some(value) = item_path.file_name() { if let Some(value) = item_path.file_name() {
if let Some(value) = value.to_str() { if let Some(value) = value.to_str() {
@ -129,8 +112,6 @@ pub fn directory_completion(
partial: &str, partial: &str,
cwd: &str, cwd: &str,
options: &CompletionOptions, options: &CompletionOptions,
engine_state: &EngineState, ) -> Vec<(nu_protocol::Span, String)> {
stack: &Stack, complete_item(true, span, partial, cwd, options)
) -> Vec<(nu_protocol::Span, String, Option<Style>)> {
complete_item(true, span, partial, cwd, options, engine_state, stack)
} }

View File

@ -1,28 +1,22 @@
use crate::completions::{file_path_completion, Completer, CompletionOptions, SortBy}; use crate::completions::{file_path_completion, Completer, CompletionOptions, SortBy};
use nu_protocol::{ use nu_protocol::{
engine::{EngineState, Stack, StateWorkingSet}, engine::{EngineState, StateWorkingSet},
Span, Span,
}; };
use reedline::Suggestion; use reedline::Suggestion;
use std::{ use std::{
path::{is_separator, Path, MAIN_SEPARATOR as SEP, MAIN_SEPARATOR_STR}, path::{is_separator, MAIN_SEPARATOR as SEP, MAIN_SEPARATOR_STR},
sync::Arc, sync::Arc,
}; };
use super::SemanticSuggestion;
#[derive(Clone)] #[derive(Clone)]
pub struct DotNuCompletion { pub struct DotNuCompletion {
engine_state: Arc<EngineState>, engine_state: Arc<EngineState>,
stack: Stack,
} }
impl DotNuCompletion { impl DotNuCompletion {
pub fn new(engine_state: Arc<EngineState>, stack: Stack) -> Self { pub fn new(engine_state: Arc<EngineState>) -> Self {
Self { Self { engine_state }
engine_state,
stack,
}
} }
} }
@ -35,7 +29,7 @@ impl Completer for DotNuCompletion {
offset: usize, offset: usize,
_: usize, _: usize,
options: &CompletionOptions, options: &CompletionOptions,
) -> Vec<SemanticSuggestion> { ) -> Vec<Suggestion> {
let prefix_str = String::from_utf8_lossy(&prefix).replace('`', ""); let prefix_str = String::from_utf8_lossy(&prefix).replace('`', "");
let mut search_dirs: Vec<String> = vec![]; let mut search_dirs: Vec<String> = vec![];
@ -56,7 +50,7 @@ impl Completer for DotNuCompletion {
.into_iter() .into_iter()
.flat_map(|it| { .flat_map(|it| {
it.iter().map(|x| { it.iter().map(|x| {
x.to_path() x.as_path()
.expect("internal error: failed to convert lib path") .expect("internal error: failed to convert lib path")
}) })
}) })
@ -95,46 +89,29 @@ impl Completer for DotNuCompletion {
// Fetch the files filtering the ones that ends with .nu // Fetch the files filtering the ones that ends with .nu
// and transform them into suggestions // and transform them into suggestions
let output: Vec<SemanticSuggestion> = search_dirs let output: Vec<Suggestion> = search_dirs
.into_iter() .into_iter()
.flat_map(|search_dir| { .flat_map(|it| {
let completions = file_path_completion( file_path_completion(span, &partial, &it, options)
span,
&partial,
&search_dir,
options,
self.engine_state.as_ref(),
&self.stack,
);
completions
.into_iter() .into_iter()
.filter(move |it| { .filter(|it| {
// Different base dir, so we list the .nu files or folders // Different base dir, so we list the .nu files or folders
if !is_current_folder { if !is_current_folder {
it.1.ends_with(".nu") || it.1.ends_with(SEP) it.1.ends_with(".nu") || it.1.ends_with(SEP)
} else { } else {
// Lib dirs, so we filter only the .nu files or directory modules // Lib dirs, so we filter only the .nu files
if it.1.ends_with(SEP) {
Path::new(&search_dir).join(&it.1).join("mod.nu").exists()
} else {
it.1.ends_with(".nu") it.1.ends_with(".nu")
} }
}
}) })
.map(move |x| SemanticSuggestion { .map(move |x| Suggestion {
suggestion: Suggestion {
value: x.1, value: x.1,
description: None, description: None,
style: x.2,
extra: None, extra: None,
span: reedline::Span { span: reedline::Span {
start: x.0.start - offset, start: x.0.start - offset,
end: x.0.end - offset, end: x.0.end - offset,
}, },
append_whitespace: true, append_whitespace: true,
},
// TODO????
kind: None,
}) })
}) })
.collect(); .collect();

View File

@ -2,32 +2,23 @@ use crate::completions::{
completion_common::{adjust_if_intermediate, complete_item, AdjustView}, completion_common::{adjust_if_intermediate, complete_item, AdjustView},
Completer, CompletionOptions, SortBy, Completer, CompletionOptions, SortBy,
}; };
use nu_ansi_term::Style;
use nu_protocol::{ use nu_protocol::{
engine::{EngineState, Stack, StateWorkingSet}, engine::{EngineState, StateWorkingSet},
levenshtein_distance, Span, levenshtein_distance, Span,
}; };
use nu_utils::IgnoreCaseExt; use nu_utils::IgnoreCaseExt;
use reedline::Suggestion; use reedline::Suggestion;
use std::{ use std::path::{Path, MAIN_SEPARATOR as SEP};
path::{Path, MAIN_SEPARATOR as SEP}, use std::sync::Arc;
sync::Arc,
};
use super::SemanticSuggestion;
#[derive(Clone)] #[derive(Clone)]
pub struct FileCompletion { pub struct FileCompletion {
engine_state: Arc<EngineState>, engine_state: Arc<EngineState>,
stack: Stack,
} }
impl FileCompletion { impl FileCompletion {
pub fn new(engine_state: Arc<EngineState>, stack: Stack) -> Self { pub fn new(engine_state: Arc<EngineState>) -> Self {
Self { Self { engine_state }
engine_state,
stack,
}
} }
} }
@ -40,7 +31,7 @@ impl Completer for FileCompletion {
offset: usize, offset: usize,
_: usize, _: usize,
options: &CompletionOptions, options: &CompletionOptions,
) -> Vec<SemanticSuggestion> { ) -> Vec<Suggestion> {
let AdjustView { let AdjustView {
prefix, prefix,
span, span,
@ -53,24 +44,17 @@ impl Completer for FileCompletion {
&prefix, &prefix,
&self.engine_state.current_work_dir(), &self.engine_state.current_work_dir(),
options, options,
self.engine_state.as_ref(),
&self.stack,
) )
.into_iter() .into_iter()
.map(move |x| SemanticSuggestion { .map(move |x| Suggestion {
suggestion: Suggestion {
value: x.1, value: x.1,
description: None, description: None,
style: x.2,
extra: None, extra: None,
span: reedline::Span { span: reedline::Span {
start: x.0.start - offset, start: x.0.start - offset,
end: x.0.end - offset, end: x.0.end - offset,
}, },
append_whitespace: false, append_whitespace: false,
},
// TODO????
kind: None,
}) })
.collect(); .collect();
@ -78,7 +62,7 @@ impl Completer for FileCompletion {
} }
// Sort results prioritizing the non hidden folders // Sort results prioritizing the non hidden folders
fn sort(&self, items: Vec<SemanticSuggestion>, prefix: Vec<u8>) -> Vec<SemanticSuggestion> { fn sort(&self, items: Vec<Suggestion>, prefix: Vec<u8>) -> Vec<Suggestion> {
let prefix_str = String::from_utf8_lossy(&prefix).to_string(); let prefix_str = String::from_utf8_lossy(&prefix).to_string();
// Sort items // Sort items
@ -88,16 +72,15 @@ impl Completer for FileCompletion {
SortBy::Ascending => { SortBy::Ascending => {
sorted_items.sort_by(|a, b| { sorted_items.sort_by(|a, b| {
// Ignore trailing slashes in folder names when sorting // Ignore trailing slashes in folder names when sorting
a.suggestion a.value
.value
.trim_end_matches(SEP) .trim_end_matches(SEP)
.cmp(b.suggestion.value.trim_end_matches(SEP)) .cmp(b.value.trim_end_matches(SEP))
}); });
} }
SortBy::LevenshteinDistance => { SortBy::LevenshteinDistance => {
sorted_items.sort_by(|a, b| { sorted_items.sort_by(|a, b| {
let a_distance = levenshtein_distance(&prefix_str, &a.suggestion.value); let a_distance = levenshtein_distance(&prefix_str, &a.value);
let b_distance = levenshtein_distance(&prefix_str, &b.suggestion.value); let b_distance = levenshtein_distance(&prefix_str, &b.value);
a_distance.cmp(&b_distance) a_distance.cmp(&b_distance)
}); });
} }
@ -105,11 +88,11 @@ impl Completer for FileCompletion {
} }
// Separate the results between hidden and non hidden // Separate the results between hidden and non hidden
let mut hidden: Vec<SemanticSuggestion> = vec![]; let mut hidden: Vec<Suggestion> = vec![];
let mut non_hidden: Vec<SemanticSuggestion> = vec![]; let mut non_hidden: Vec<Suggestion> = vec![];
for item in sorted_items.into_iter() { for item in sorted_items.into_iter() {
let item_path = Path::new(&item.suggestion.value); let item_path = Path::new(&item.value);
if let Some(value) = item_path.file_name() { if let Some(value) = item_path.file_name() {
if let Some(value) = value.to_str() { if let Some(value) = value.to_str() {
@ -134,10 +117,8 @@ pub fn file_path_completion(
partial: &str, partial: &str,
cwd: &str, cwd: &str,
options: &CompletionOptions, options: &CompletionOptions,
engine_state: &EngineState, ) -> Vec<(nu_protocol::Span, String)> {
stack: &Stack, complete_item(false, span, partial, cwd, options)
) -> Vec<(nu_protocol::Span, String, Option<Style>)> {
complete_item(false, span, partial, cwd, options, engine_state, stack)
} }
pub fn matches(partial: &str, from: &str, options: &CompletionOptions) -> bool { pub fn matches(partial: &str, from: &str, options: &CompletionOptions) -> bool {

View File

@ -4,9 +4,8 @@ use nu_protocol::{
engine::StateWorkingSet, engine::StateWorkingSet,
Span, Span,
}; };
use reedline::Suggestion;
use super::SemanticSuggestion; use reedline::Suggestion;
#[derive(Clone)] #[derive(Clone)]
pub struct FlagCompletion { pub struct FlagCompletion {
@ -28,7 +27,7 @@ impl Completer for FlagCompletion {
offset: usize, offset: usize,
_: usize, _: usize,
options: &CompletionOptions, options: &CompletionOptions,
) -> Vec<SemanticSuggestion> { ) -> Vec<Suggestion> {
// Check if it's a flag // Check if it's a flag
if let Expr::Call(call) = &self.expression.expr { if let Expr::Call(call) = &self.expression.expr {
let decl = working_set.get_decl(call.decl_id); let decl = working_set.get_decl(call.decl_id);
@ -44,20 +43,15 @@ impl Completer for FlagCompletion {
named.insert(0, b'-'); named.insert(0, b'-');
if options.match_algorithm.matches_u8(&named, &prefix) { if options.match_algorithm.matches_u8(&named, &prefix) {
output.push(SemanticSuggestion { output.push(Suggestion {
suggestion: Suggestion {
value: String::from_utf8_lossy(&named).to_string(), value: String::from_utf8_lossy(&named).to_string(),
description: Some(flag_desc.to_string()), description: Some(flag_desc.to_string()),
style: None,
extra: None, extra: None,
span: reedline::Span { span: reedline::Span {
start: span.start - offset, start: span.start - offset,
end: span.end - offset, end: span.end - offset,
}, },
append_whitespace: true, append_whitespace: true,
},
// TODO????
kind: None,
}); });
} }
} }
@ -71,20 +65,15 @@ impl Completer for FlagCompletion {
named.insert(0, b'-'); named.insert(0, b'-');
if options.match_algorithm.matches_u8(&named, &prefix) { if options.match_algorithm.matches_u8(&named, &prefix) {
output.push(SemanticSuggestion { output.push(Suggestion {
suggestion: Suggestion {
value: String::from_utf8_lossy(&named).to_string(), value: String::from_utf8_lossy(&named).to_string(),
description: Some(flag_desc.to_string()), description: Some(flag_desc.to_string()),
style: None,
extra: None, extra: None,
span: reedline::Span { span: reedline::Span {
start: span.start - offset, start: span.start - offset,
end: span.end - offset, end: span.end - offset,
}, },
append_whitespace: true, append_whitespace: true,
},
// TODO????
kind: None,
}); });
} }
} }

View File

@ -10,7 +10,7 @@ mod file_completions;
mod flag_completions; mod flag_completions;
mod variable_completions; mod variable_completions;
pub use base::{Completer, SemanticSuggestion, SuggestionKind}; pub use base::Completer;
pub use command_completions::CommandCompletion; pub use command_completions::CommandCompletion;
pub use completer::NuCompleter; pub use completer::NuCompleter;
pub use completion_options::{CompletionOptions, MatchAlgorithm, SortBy}; pub use completion_options::{CompletionOptions, MatchAlgorithm, SortBy};

View File

@ -1,13 +1,15 @@
use crate::completions::{ use crate::completions::{Completer, CompletionOptions};
Completer, CompletionOptions, MatchAlgorithm, SemanticSuggestion, SuggestionKind,
};
use nu_engine::{column::get_columns, eval_variable}; use nu_engine::{column::get_columns, eval_variable};
use nu_protocol::{ use nu_protocol::{
engine::{EngineState, Stack, StateWorkingSet}, engine::{EngineState, Stack, StateWorkingSet},
Span, Value, Span, Value,
}; };
use reedline::Suggestion; use reedline::Suggestion;
use std::{str, sync::Arc}; use std::str;
use std::sync::Arc;
use super::MatchAlgorithm;
#[derive(Clone)] #[derive(Clone)]
pub struct VariableCompletion { pub struct VariableCompletion {
@ -39,7 +41,7 @@ impl Completer for VariableCompletion {
offset: usize, offset: usize,
_: usize, _: usize,
options: &CompletionOptions, options: &CompletionOptions,
) -> Vec<SemanticSuggestion> { ) -> Vec<Suggestion> {
let mut output = vec![]; let mut output = vec![];
let builtins = ["$nu", "$in", "$env"]; let builtins = ["$nu", "$in", "$env"];
let var_str = std::str::from_utf8(&self.var_context.0).unwrap_or(""); let var_str = std::str::from_utf8(&self.var_context.0).unwrap_or("");
@ -73,7 +75,7 @@ impl Completer for VariableCompletion {
{ {
if options.match_algorithm.matches_u8_insensitive( if options.match_algorithm.matches_u8_insensitive(
options.case_sensitive, options.case_sensitive,
suggestion.suggestion.value.as_bytes(), suggestion.value.as_bytes(),
&prefix, &prefix,
) { ) {
output.push(suggestion); output.push(suggestion);
@ -90,16 +92,12 @@ impl Completer for VariableCompletion {
env_var.0.as_bytes(), env_var.0.as_bytes(),
&prefix, &prefix,
) { ) {
output.push(SemanticSuggestion { output.push(Suggestion {
suggestion: Suggestion {
value: env_var.0, value: env_var.0,
description: None, description: None,
style: None,
extra: None, extra: None,
span: current_span, span: current_span,
append_whitespace: false, append_whitespace: false,
},
kind: Some(SuggestionKind::Type(env_var.1.get_type())),
}); });
} }
} }
@ -122,7 +120,7 @@ impl Completer for VariableCompletion {
{ {
if options.match_algorithm.matches_u8_insensitive( if options.match_algorithm.matches_u8_insensitive(
options.case_sensitive, options.case_sensitive,
suggestion.suggestion.value.as_bytes(), suggestion.value.as_bytes(),
&prefix, &prefix,
) { ) {
output.push(suggestion); output.push(suggestion);
@ -145,7 +143,7 @@ impl Completer for VariableCompletion {
{ {
if options.match_algorithm.matches_u8_insensitive( if options.match_algorithm.matches_u8_insensitive(
options.case_sensitive, options.case_sensitive,
suggestion.suggestion.value.as_bytes(), suggestion.value.as_bytes(),
&prefix, &prefix,
) { ) {
output.push(suggestion); output.push(suggestion);
@ -164,17 +162,12 @@ impl Completer for VariableCompletion {
builtin.as_bytes(), builtin.as_bytes(),
&prefix, &prefix,
) { ) {
output.push(SemanticSuggestion { output.push(Suggestion {
suggestion: Suggestion {
value: builtin.to_string(), value: builtin.to_string(),
description: None, description: None,
style: None,
extra: None, extra: None,
span: current_span, span: current_span,
append_whitespace: false, append_whitespace: false,
},
// TODO is there a way to get the VarId to get the type???
kind: None,
}); });
} }
} }
@ -191,18 +184,12 @@ impl Completer for VariableCompletion {
v.0, v.0,
&prefix, &prefix,
) { ) {
output.push(SemanticSuggestion { output.push(Suggestion {
suggestion: Suggestion {
value: String::from_utf8_lossy(v.0).to_string(), value: String::from_utf8_lossy(v.0).to_string(),
description: None, description: None,
style: None,
extra: None, extra: None,
span: current_span, span: current_span,
append_whitespace: false, append_whitespace: false,
},
kind: Some(SuggestionKind::Type(
working_set.get_variable(*v.1).ty.clone(),
)),
}); });
} }
} }
@ -218,18 +205,12 @@ impl Completer for VariableCompletion {
v.0, v.0,
&prefix, &prefix,
) { ) {
output.push(SemanticSuggestion { output.push(Suggestion {
suggestion: Suggestion {
value: String::from_utf8_lossy(v.0).to_string(), value: String::from_utf8_lossy(v.0).to_string(),
description: None, description: None,
style: None,
extra: None, extra: None,
span: current_span, span: current_span,
append_whitespace: false, append_whitespace: false,
},
kind: Some(SuggestionKind::Type(
working_set.get_variable(*v.1).ty.clone(),
)),
}); });
} }
} }
@ -247,25 +228,20 @@ fn nested_suggestions(
val: Value, val: Value,
sublevels: Vec<Vec<u8>>, sublevels: Vec<Vec<u8>>,
current_span: reedline::Span, current_span: reedline::Span,
) -> Vec<SemanticSuggestion> { ) -> Vec<Suggestion> {
let mut output: Vec<SemanticSuggestion> = vec![]; let mut output: Vec<Suggestion> = vec![];
let value = recursive_value(val, sublevels); let value = recursive_value(val, sublevels);
let kind = SuggestionKind::Type(value.get_type());
match value { match value {
Value::Record { val, .. } => { Value::Record { val, .. } => {
// Add all the columns as completion // Add all the columns as completion
for (col, _) in val.into_iter() { for (col, _) in val.into_iter() {
output.push(SemanticSuggestion { output.push(Suggestion {
suggestion: Suggestion {
value: col, value: col,
description: None, description: None,
style: None,
extra: None, extra: None,
span: current_span, span: current_span,
append_whitespace: false, append_whitespace: false,
},
kind: Some(kind.clone()),
}); });
} }
@ -274,16 +250,12 @@ fn nested_suggestions(
Value::LazyRecord { val, .. } => { Value::LazyRecord { val, .. } => {
// Add all the columns as completion // Add all the columns as completion
for column_name in val.column_names() { for column_name in val.column_names() {
output.push(SemanticSuggestion { output.push(Suggestion {
suggestion: Suggestion {
value: column_name.to_string(), value: column_name.to_string(),
description: None, description: None,
style: None,
extra: None, extra: None,
span: current_span, span: current_span,
append_whitespace: false, append_whitespace: false,
},
kind: Some(kind.clone()),
}); });
} }
@ -291,16 +263,12 @@ fn nested_suggestions(
} }
Value::List { vals, .. } => { Value::List { vals, .. } => {
for column_name in get_columns(vals.as_slice()) { for column_name in get_columns(vals.as_slice()) {
output.push(SemanticSuggestion { output.push(Suggestion {
suggestion: Suggestion {
value: column_name, value: column_name,
description: None, description: None,
style: None,
extra: None, extra: None,
span: current_span, span: current_span,
append_whitespace: false, append_whitespace: false,
},
kind: Some(kind.clone()),
}); });
} }
@ -317,7 +285,7 @@ fn recursive_value(val: Value, sublevels: Vec<Vec<u8>>) -> Value {
let span = val.span(); let span = val.span();
match val { match val {
Value::Record { val, .. } => { Value::Record { val, .. } => {
for item in *val { for item in val {
// Check if index matches with sublevel // Check if index matches with sublevel
if item.0.as_bytes().to_vec() == next_sublevel { if item.0.as_bytes().to_vec() == next_sublevel {
// If matches try to fetch recursively the next // If matches try to fetch recursively the next

View File

@ -1,10 +1,9 @@
use crate::util::eval_source; use crate::util::eval_source;
#[cfg(feature = "plugin")] #[cfg(feature = "plugin")]
use nu_path::canonicalize_with; use nu_path::canonicalize_with;
use nu_protocol::{ use nu_protocol::engine::{EngineState, Stack, StateWorkingSet};
engine::{EngineState, Stack, StateWorkingSet}, use nu_protocol::report_error;
report_error, HistoryFileFormat, PipelineData, use nu_protocol::{HistoryFileFormat, PipelineData};
};
#[cfg(feature = "plugin")] #[cfg(feature = "plugin")]
use nu_protocol::{ParseError, Spanned}; use nu_protocol::{ParseError, Spanned};
#[cfg(feature = "plugin")] #[cfg(feature = "plugin")]
@ -62,10 +61,10 @@ pub fn add_plugin_file(
plugin_file: Option<Spanned<String>>, plugin_file: Option<Spanned<String>>,
storage_path: &str, storage_path: &str,
) { ) {
if let Some(plugin_file) = plugin_file {
let working_set = StateWorkingSet::new(engine_state); let working_set = StateWorkingSet::new(engine_state);
let cwd = working_set.get_cwd(); let cwd = working_set.get_cwd();
if let Some(plugin_file) = plugin_file {
if let Ok(path) = canonicalize_with(&plugin_file.item, cwd) { if let Ok(path) = canonicalize_with(&plugin_file.item, cwd) {
engine_state.plugin_signatures = Some(path) engine_state.plugin_signatures = Some(path)
} else { } else {
@ -75,10 +74,8 @@ pub fn add_plugin_file(
} else if let Some(mut plugin_path) = nu_path::config_dir() { } else if let Some(mut plugin_path) = nu_path::config_dir() {
// Path to store plugins signatures // Path to store plugins signatures
plugin_path.push(storage_path); plugin_path.push(storage_path);
let mut plugin_path = canonicalize_with(&plugin_path, &cwd).unwrap_or(plugin_path);
plugin_path.push(PLUGIN_FILE); plugin_path.push(PLUGIN_FILE);
let plugin_path = canonicalize_with(&plugin_path, &cwd).unwrap_or(plugin_path); engine_state.plugin_signatures = Some(plugin_path.clone());
engine_state.plugin_signatures = Some(plugin_path);
} }
} }

View File

@ -2,10 +2,11 @@ use log::info;
use miette::Result; use miette::Result;
use nu_engine::{convert_env_values, eval_block}; use nu_engine::{convert_env_values, eval_block};
use nu_parser::parse; use nu_parser::parse;
use nu_protocol::engine::Stack;
use nu_protocol::report_error;
use nu_protocol::{ use nu_protocol::{
debugger::WithoutDebug, engine::{EngineState, StateWorkingSet},
engine::{EngineState, Stack, StateWorkingSet}, PipelineData, Spanned, Value,
report_error, PipelineData, Spanned, Value,
}; };
/// Run a command (or commands) given to us by the user /// Run a command (or commands) given to us by the user
@ -27,17 +28,13 @@ pub fn evaluate_commands(
let (block, delta) = { let (block, delta) = {
if let Some(ref t_mode) = table_mode { if let Some(ref t_mode) = table_mode {
let mut config = engine_state.get_config().clone(); let mut config = engine_state.get_config().clone();
config.table_mode = t_mode.coerce_str()?.parse().unwrap_or_default(); config.table_mode = t_mode.as_string()?.parse().unwrap_or_default();
engine_state.set_config(config); engine_state.set_config(config);
} }
let mut working_set = StateWorkingSet::new(engine_state); let mut working_set = StateWorkingSet::new(engine_state);
let output = parse(&mut working_set, None, commands.item.as_bytes(), false); let output = parse(&mut working_set, None, commands.item.as_bytes(), false);
if let Some(warning) = working_set.parse_warnings.first() {
report_error(&working_set, warning);
}
if let Some(err) = working_set.parse_errors.first() { if let Some(err) = working_set.parse_errors.first() {
report_error(&working_set, err); report_error(&working_set, err);
@ -54,11 +51,11 @@ pub fn evaluate_commands(
} }
// Run the block // Run the block
let exit_code = match eval_block::<WithoutDebug>(engine_state, stack, &block, input) { let exit_code = match eval_block(engine_state, stack, &block, input, false, false) {
Ok(pipeline_data) => { Ok(pipeline_data) => {
let mut config = engine_state.get_config().clone(); let mut config = engine_state.get_config().clone();
if let Some(t_mode) = table_mode { if let Some(t_mode) = table_mode {
config.table_mode = t_mode.coerce_str()?.parse().unwrap_or_default(); config.table_mode = t_mode.as_string()?.parse().unwrap_or_default();
} }
crate::eval_file::print_table_or_error(engine_state, stack, pipeline_data, &mut config) crate::eval_file::print_table_or_error(engine_state, stack, pipeline_data, &mut config)
} }

View File

@ -1,17 +1,18 @@
use crate::util::eval_source; use crate::util::eval_source;
use log::{info, trace}; use log::info;
use log::trace;
use miette::{IntoDiagnostic, Result}; use miette::{IntoDiagnostic, Result};
use nu_engine::{convert_env_values, current_dir, eval_block}; use nu_engine::eval_block;
use nu_engine::{convert_env_values, current_dir};
use nu_parser::parse; use nu_parser::parse;
use nu_path::canonicalize_with; use nu_path::canonicalize_with;
use nu_protocol::report_error;
use nu_protocol::{ use nu_protocol::{
ast::Call, ast::Call,
debugger::WithoutDebug,
engine::{EngineState, Stack, StateWorkingSet}, engine::{EngineState, Stack, StateWorkingSet},
report_error, Config, PipelineData, ShellError, Span, Value, Config, PipelineData, ShellError, Span, Value,
}; };
use nu_utils::stdout_write_all_and_flush; use nu_utils::stdout_write_all_and_flush;
use std::sync::Arc;
/// Main function used when a file path is found as argument for nu /// Main function used when a file path is found as argument for nu
pub fn evaluate_file( pub fn evaluate_file(
@ -34,10 +35,10 @@ pub fn evaluate_file(
let working_set = StateWorkingSet::new(engine_state); let working_set = StateWorkingSet::new(engine_state);
report_error( report_error(
&working_set, &working_set,
&ShellError::FileNotFoundCustom { &ShellError::FileNotFoundCustom(
msg: format!("Could not access file '{}': {:?}", path, e.to_string()), format!("Could not access file '{}': {:?}", path, e.to_string()),
span: Span::unknown(), Span::unknown(),
}, ),
); );
std::process::exit(1); std::process::exit(1);
}); });
@ -46,13 +47,13 @@ pub fn evaluate_file(
let working_set = StateWorkingSet::new(engine_state); let working_set = StateWorkingSet::new(engine_state);
report_error( report_error(
&working_set, &working_set,
&ShellError::NonUtf8Custom { &ShellError::NonUtf8Custom(
msg: format!( format!(
"Input file name '{}' is not valid UTF8", "Input file name '{}' is not valid UTF8",
file_path.to_string_lossy() file_path.to_string_lossy()
), ),
span: Span::unknown(), Span::unknown(),
}, ),
); );
std::process::exit(1); std::process::exit(1);
}); });
@ -63,14 +64,14 @@ pub fn evaluate_file(
let working_set = StateWorkingSet::new(engine_state); let working_set = StateWorkingSet::new(engine_state);
report_error( report_error(
&working_set, &working_set,
&ShellError::FileNotFoundCustom { &ShellError::FileNotFoundCustom(
msg: format!( format!(
"Could not read file '{}': {:?}", "Could not read file '{}': {:?}",
file_path_str, file_path_str,
e.to_string() e.to_string()
), ),
span: Span::unknown(), Span::unknown(),
}, ),
); );
std::process::exit(1); std::process::exit(1);
}); });
@ -81,10 +82,10 @@ pub fn evaluate_file(
let working_set = StateWorkingSet::new(engine_state); let working_set = StateWorkingSet::new(engine_state);
report_error( report_error(
&working_set, &working_set,
&ShellError::FileNotFoundCustom { &ShellError::FileNotFoundCustom(
msg: format!("The file path '{file_path_str}' does not have a parent"), format!("The file path '{file_path_str}' does not have a parent"),
span: Span::unknown(), Span::unknown(),
}, ),
); );
std::process::exit(1); std::process::exit(1);
}); });
@ -97,10 +98,6 @@ pub fn evaluate_file(
"CURRENT_FILE".to_string(), "CURRENT_FILE".to_string(),
Value::string(file_path.to_string_lossy(), Span::unknown()), Value::string(file_path.to_string_lossy(), Span::unknown()),
); );
stack.add_env_var(
"PROCESS_PATH".to_string(),
Value::string(path, Span::unknown()),
);
let source_filename = file_path let source_filename = file_path
.file_name() .file_name()
@ -115,7 +112,7 @@ pub fn evaluate_file(
std::process::exit(1); std::process::exit(1);
} }
for block in working_set.delta.blocks.iter_mut().map(Arc::make_mut) { for block in &mut working_set.delta.blocks {
if block.signature.name == "main" { if block.signature.name == "main" {
block.signature.name = source_filename.to_string_lossy().to_string(); block.signature.name = source_filename.to_string_lossy().to_string();
} else if block.signature.name.starts_with("main ") { } else if block.signature.name.starts_with("main ") {
@ -129,10 +126,16 @@ pub fn evaluate_file(
if engine_state.find_decl(b"main", &[]).is_some() { if engine_state.find_decl(b"main", &[]).is_some() {
let args = format!("main {}", args.join(" ")); let args = format!("main {}", args.join(" "));
let pipeline_data = let pipeline_data = eval_block(
eval_block::<WithoutDebug>(engine_state, stack, &block, PipelineData::empty()); engine_state,
stack,
&block,
PipelineData::empty(),
false,
false,
);
let pipeline_data = match pipeline_data { let pipeline_data = match pipeline_data {
Err(ShellError::Return { .. }) => { Err(ShellError::Return(_, _)) => {
// allows early exists before `main` is run. // allows early exists before `main` is run.
return Ok(()); return Ok(());
} }
@ -206,7 +209,8 @@ pub(crate) fn print_table_or_error(
print_or_exit(pipeline_data, engine_state, config); print_or_exit(pipeline_data, engine_state, config);
} else { } else {
// The final call on table command, it's ok to set redirect_output to false. // The final call on table command, it's ok to set redirect_output to false.
let call = Call::new(Span::new(0, 0)); let mut call = Call::new(Span::new(0, 0));
call.redirect_stdout = false;
let table = command.run(engine_state, stack, &call, pipeline_data); let table = command.run(engine_state, stack, &call, pipeline_data);
match table { match table {
@ -248,7 +252,7 @@ fn print_or_exit(pipeline_data: PipelineData, engine_state: &mut EngineState, co
std::process::exit(1); std::process::exit(1);
} }
let out = item.to_expanded_string("\n", config) + "\n"; let out = item.into_string("\n", config) + "\n";
let _ = stdout_write_all_and_flush(out).map_err(|err| eprintln!("{err}")); let _ = stdout_write_all_and_flush(out).map_err(|err| eprintln!("{err}"));
} }
} }

View File

@ -15,11 +15,11 @@ mod util;
mod validation; mod validation;
pub use commands::add_cli_context; pub use commands::add_cli_context;
pub use completions::{FileCompletion, NuCompleter, SemanticSuggestion, SuggestionKind}; pub use completions::{FileCompletion, NuCompleter};
pub use config_files::eval_config_contents; pub use config_files::eval_config_contents;
pub use eval_cmds::evaluate_commands; pub use eval_cmds::evaluate_commands;
pub use eval_file::evaluate_file; pub use eval_file::evaluate_file;
pub use menus::NuHelpCompleter; pub use menus::{DescriptionMenu, NuHelpCompleter};
pub use nu_cmd_base::util::get_init_cwd; pub use nu_cmd_base::util::get_init_cwd;
pub use nu_highlight::NuHighlight; pub use nu_highlight::NuHighlight;
pub use print::Print; pub use print::Print;

View File

@ -0,0 +1,730 @@
use {
nu_ansi_term::{ansi::RESET, Style},
reedline::{
menu_functions::string_difference, Completer, Editor, Menu, MenuEvent, MenuTextStyle,
Painter, Suggestion, UndoBehavior,
},
};
/// Default values used as reference for the menu. These values are set during
/// the initial declaration of the menu and are always kept as reference for the
/// changeable [`WorkingDetails`]
struct DefaultMenuDetails {
/// Number of columns that the menu will have
pub columns: u16,
/// Column width
pub col_width: Option<usize>,
/// Column padding
pub col_padding: usize,
/// Number of rows for commands
pub selection_rows: u16,
/// Number of rows allowed to display the description
pub description_rows: usize,
}
impl Default for DefaultMenuDetails {
fn default() -> Self {
Self {
columns: 4,
col_width: None,
col_padding: 2,
selection_rows: 4,
description_rows: 10,
}
}
}
/// Represents the actual column conditions of the menu. These conditions change
/// since they need to accommodate possible different line sizes for the column values
#[derive(Default)]
struct WorkingDetails {
/// Number of columns that the menu will have
pub columns: u16,
/// Column width
pub col_width: usize,
/// Number of rows for description
pub description_rows: usize,
}
/// Completion menu definition
pub struct DescriptionMenu {
/// Menu name
name: String,
/// Menu status
active: bool,
/// Menu coloring
color: MenuTextStyle,
/// Default column details that are set when creating the menu
/// These values are the reference for the working details
default_details: DefaultMenuDetails,
/// Number of minimum rows that are displayed when
/// the required lines is larger than the available lines
min_rows: u16,
/// Working column details keep changing based on the collected values
working_details: WorkingDetails,
/// Menu cached values
values: Vec<Suggestion>,
/// column position of the cursor. Starts from 0
col_pos: u16,
/// row position in the menu. Starts from 0
row_pos: u16,
/// Menu marker when active
marker: String,
/// Event sent to the menu
event: Option<MenuEvent>,
/// String collected after the menu is activated
input: Option<String>,
/// Examples to select
examples: Vec<String>,
/// Example index
example_index: Option<usize>,
/// Examples may not be shown if there is not enough space in the screen
show_examples: bool,
/// Skipped description rows
skipped_rows: usize,
/// Calls the completer using only the line buffer difference difference
/// after the menu was activated
only_buffer_difference: bool,
}
impl Default for DescriptionMenu {
fn default() -> Self {
Self {
name: "description_menu".to_string(),
active: false,
color: MenuTextStyle::default(),
default_details: DefaultMenuDetails::default(),
min_rows: 3,
working_details: WorkingDetails::default(),
values: Vec::new(),
col_pos: 0,
row_pos: 0,
marker: "? ".to_string(),
event: None,
input: None,
examples: Vec::new(),
example_index: None,
show_examples: true,
skipped_rows: 0,
only_buffer_difference: true,
}
}
}
// Menu configuration
impl DescriptionMenu {
/// Menu builder with new name
pub fn with_name(mut self, name: &str) -> Self {
self.name = name.into();
self
}
/// Menu builder with new value for text style
pub fn with_text_style(mut self, text_style: Style) -> Self {
self.color.text_style = text_style;
self
}
/// Menu builder with new value for text style
pub fn with_selected_text_style(mut self, selected_text_style: Style) -> Self {
self.color.selected_text_style = selected_text_style;
self
}
/// Menu builder with new value for text style
pub fn with_description_text_style(mut self, description_text_style: Style) -> Self {
self.color.description_style = description_text_style;
self
}
/// Menu builder with new columns value
pub fn with_columns(mut self, columns: u16) -> Self {
self.default_details.columns = columns;
self
}
/// Menu builder with new column width value
pub fn with_column_width(mut self, col_width: Option<usize>) -> Self {
self.default_details.col_width = col_width;
self
}
/// Menu builder with new column width value
pub fn with_column_padding(mut self, col_padding: usize) -> Self {
self.default_details.col_padding = col_padding;
self
}
/// Menu builder with new selection rows value
pub fn with_selection_rows(mut self, selection_rows: u16) -> Self {
self.default_details.selection_rows = selection_rows;
self
}
/// Menu builder with new description rows value
pub fn with_description_rows(mut self, description_rows: usize) -> Self {
self.default_details.description_rows = description_rows;
self
}
/// Menu builder with marker
pub fn with_marker(mut self, marker: String) -> Self {
self.marker = marker;
self
}
/// Menu builder with new only buffer difference
pub fn with_only_buffer_difference(mut self, only_buffer_difference: bool) -> Self {
self.only_buffer_difference = only_buffer_difference;
self
}
}
// Menu functionality
impl DescriptionMenu {
/// Move menu cursor to the next element
fn move_next(&mut self) {
let mut new_col = self.col_pos + 1;
let mut new_row = self.row_pos;
if new_col >= self.get_cols() {
new_row += 1;
new_col = 0;
}
if new_row >= self.get_rows() {
new_row = 0;
new_col = 0;
}
let position = new_row * self.get_cols() + new_col;
if position >= self.get_values().len() as u16 {
self.reset_position();
} else {
self.col_pos = new_col;
self.row_pos = new_row;
}
}
/// Move menu cursor to the previous element
fn move_previous(&mut self) {
let new_col = self.col_pos.checked_sub(1);
let (new_col, new_row) = match new_col {
Some(col) => (col, self.row_pos),
None => match self.row_pos.checked_sub(1) {
Some(row) => (self.get_cols().saturating_sub(1), row),
None => (
self.get_cols().saturating_sub(1),
self.get_rows().saturating_sub(1),
),
},
};
let position = new_row * self.get_cols() + new_col;
if position >= self.get_values().len() as u16 {
self.col_pos = (self.get_values().len() as u16 % self.get_cols()).saturating_sub(1);
self.row_pos = self.get_rows().saturating_sub(1);
} else {
self.col_pos = new_col;
self.row_pos = new_row;
}
}
/// Menu index based on column and row position
fn index(&self) -> usize {
let index = self.row_pos * self.get_cols() + self.col_pos;
index as usize
}
/// Get selected value from the menu
fn get_value(&self) -> Option<Suggestion> {
self.get_values().get(self.index()).cloned()
}
/// Calculates how many rows the Menu will use
fn get_rows(&self) -> u16 {
let values = self.get_values().len() as u16;
if values == 0 {
// When the values are empty the no_records_msg is shown, taking 1 line
return 1;
}
let rows = values / self.get_cols();
if values % self.get_cols() != 0 {
rows + 1
} else {
rows
}
}
/// Returns working details col width
fn get_width(&self) -> usize {
self.working_details.col_width
}
/// Reset menu position
fn reset_position(&mut self) {
self.col_pos = 0;
self.row_pos = 0;
self.skipped_rows = 0;
}
fn no_records_msg(&self, use_ansi_coloring: bool) -> String {
let msg = "TYPE TO START SEARCH";
if use_ansi_coloring {
format!(
"{}{}{}",
self.color.selected_text_style.prefix(),
msg,
RESET
)
} else {
msg.to_string()
}
}
/// Returns working details columns
fn get_cols(&self) -> u16 {
self.working_details.columns.max(1)
}
/// End of line for menu
fn end_of_line(&self, column: u16, index: usize) -> &str {
let is_last = index == self.values.len().saturating_sub(1);
if column == self.get_cols().saturating_sub(1) || is_last {
"\r\n"
} else {
""
}
}
/// Update list of examples from the actual value
fn update_examples(&mut self) {
self.examples = self
.get_value()
.and_then(|suggestion| suggestion.extra)
.unwrap_or_default();
self.example_index = None;
}
/// Creates default string that represents one suggestion from the menu
fn create_entry_string(
&self,
suggestion: &Suggestion,
index: usize,
column: u16,
empty_space: usize,
use_ansi_coloring: bool,
) -> String {
if use_ansi_coloring {
if index == self.index() {
format!(
"{}{}{}{:>empty$}{}",
self.color.selected_text_style.prefix(),
&suggestion.value,
RESET,
"",
self.end_of_line(column, index),
empty = empty_space,
)
} else {
format!(
"{}{}{}{:>empty$}{}",
self.color.text_style.prefix(),
&suggestion.value,
RESET,
"",
self.end_of_line(column, index),
empty = empty_space,
)
}
} else {
// If no ansi coloring is found, then the selection word is
// the line in uppercase
let (marker, empty_space) = if index == self.index() {
(">", empty_space.saturating_sub(1))
} else {
("", empty_space)
};
let line = format!(
"{}{}{:>empty$}{}",
marker,
&suggestion.value,
"",
self.end_of_line(column, index),
empty = empty_space,
);
if index == self.index() {
line.to_uppercase()
} else {
line
}
}
}
/// Description string with color
fn create_description_string(&self, use_ansi_coloring: bool) -> String {
let description = self
.get_value()
.and_then(|suggestion| suggestion.description)
.unwrap_or_default()
.lines()
.skip(self.skipped_rows)
.take(self.working_details.description_rows)
.collect::<Vec<&str>>()
.join("\r\n");
if use_ansi_coloring && !description.is_empty() {
format!(
"{}{}{}",
self.color.description_style.prefix(),
description,
RESET,
)
} else {
description
}
}
/// Selectable list of examples from the actual value
fn create_example_string(&self, use_ansi_coloring: bool) -> String {
if !self.show_examples {
return "".into();
}
let examples: String = self
.examples
.iter()
.enumerate()
.map(|(index, example)| {
if let Some(example_index) = self.example_index {
if index == example_index {
format!(
" {}{}{}\r\n",
self.color.selected_text_style.prefix(),
example,
RESET
)
} else {
format!(" {example}\r\n")
}
} else {
format!(" {example}\r\n")
}
})
.collect();
if examples.is_empty() {
"".into()
} else if use_ansi_coloring {
format!(
"{}\r\n\r\nExamples:\r\n{}{}",
self.color.description_style.prefix(),
RESET,
examples,
)
} else {
format!("\r\n\r\nExamples:\r\n{examples}",)
}
}
}
impl Menu for DescriptionMenu {
/// Menu name
fn name(&self) -> &str {
self.name.as_str()
}
/// Menu indicator
fn indicator(&self) -> &str {
self.marker.as_str()
}
/// Deactivates context menu
fn is_active(&self) -> bool {
self.active
}
/// The menu stays active even with one record
fn can_quick_complete(&self) -> bool {
false
}
/// The menu does not need to partially complete
fn can_partially_complete(
&mut self,
_values_updated: bool,
_editor: &mut Editor,
_completer: &mut dyn Completer,
) -> bool {
false
}
/// Selects what type of event happened with the menu
fn menu_event(&mut self, event: MenuEvent) {
match &event {
MenuEvent::Activate(_) => self.active = true,
MenuEvent::Deactivate => {
self.active = false;
self.input = None;
self.values = Vec::new();
}
_ => {}
};
self.event = Some(event);
}
/// Updates menu values
fn update_values(&mut self, editor: &mut Editor, completer: &mut dyn Completer) {
if self.only_buffer_difference {
if let Some(old_string) = &self.input {
let (start, input) = string_difference(editor.get_buffer(), old_string);
if !input.is_empty() {
self.reset_position();
self.values = completer.complete(input, start);
}
}
} else {
let trimmed_buffer = editor.get_buffer().replace('\n', " ");
self.values = completer.complete(
trimmed_buffer.as_str(),
editor.line_buffer().insertion_point(),
);
self.reset_position();
}
}
/// The working details for the menu changes based on the size of the lines
/// collected from the completer
fn update_working_details(
&mut self,
editor: &mut Editor,
completer: &mut dyn Completer,
painter: &Painter,
) {
if let Some(event) = self.event.take() {
// Updating all working parameters from the menu before executing any of the
// possible event
let max_width = self.get_values().iter().fold(0, |acc, suggestion| {
let str_len = suggestion.value.len() + self.default_details.col_padding;
if str_len > acc {
str_len
} else {
acc
}
});
// If no default width is found, then the total screen width is used to estimate
// the column width based on the default number of columns
let default_width = if let Some(col_width) = self.default_details.col_width {
col_width
} else {
let col_width = painter.screen_width() / self.default_details.columns;
col_width as usize
};
// Adjusting the working width of the column based the max line width found
// in the menu values
if max_width > default_width {
self.working_details.col_width = max_width;
} else {
self.working_details.col_width = default_width;
};
// The working columns is adjusted based on possible number of columns
// that could be fitted in the screen with the calculated column width
let possible_cols = painter.screen_width() / self.working_details.col_width as u16;
if possible_cols > self.default_details.columns {
self.working_details.columns = self.default_details.columns.max(1);
} else {
self.working_details.columns = possible_cols;
}
// Updating the working rows to display the description
if self.menu_required_lines(painter.screen_width()) <= painter.remaining_lines() {
self.working_details.description_rows = self.default_details.description_rows;
self.show_examples = true;
} else {
self.working_details.description_rows = painter
.remaining_lines()
.saturating_sub(self.default_details.selection_rows + 1)
as usize;
self.show_examples = false;
}
match event {
MenuEvent::Activate(_) => {
self.reset_position();
self.input = Some(editor.get_buffer().to_string());
self.update_values(editor, completer);
}
MenuEvent::Deactivate => self.active = false,
MenuEvent::Edit(_) => {
self.reset_position();
self.update_values(editor, completer);
self.update_examples()
}
MenuEvent::NextElement => {
self.skipped_rows = 0;
self.move_next();
self.update_examples();
}
MenuEvent::PreviousElement => {
self.skipped_rows = 0;
self.move_previous();
self.update_examples();
}
MenuEvent::MoveUp => {
if let Some(example_index) = self.example_index {
if let Some(index) = example_index.checked_sub(1) {
self.example_index = Some(index);
} else {
self.example_index = Some(self.examples.len().saturating_sub(1));
}
} else if !self.examples.is_empty() {
self.example_index = Some(0);
}
}
MenuEvent::MoveDown => {
if let Some(example_index) = self.example_index {
let index = example_index + 1;
if index < self.examples.len() {
self.example_index = Some(index);
} else {
self.example_index = Some(0);
}
} else if !self.examples.is_empty() {
self.example_index = Some(0);
}
}
MenuEvent::MoveLeft => self.skipped_rows = self.skipped_rows.saturating_sub(1),
MenuEvent::MoveRight => {
let skipped = self.skipped_rows + 1;
let description_rows = self
.get_value()
.and_then(|suggestion| suggestion.description)
.unwrap_or_default()
.lines()
.count();
let allowed_skips =
description_rows.saturating_sub(self.working_details.description_rows);
if skipped < allowed_skips {
self.skipped_rows = skipped;
} else {
self.skipped_rows = allowed_skips;
}
}
MenuEvent::PreviousPage | MenuEvent::NextPage => {}
}
}
}
/// The buffer gets replaced in the Span location
fn replace_in_buffer(&self, editor: &mut Editor) {
if let Some(Suggestion { value, span, .. }) = self.get_value() {
let start = span.start.min(editor.line_buffer().len());
let end = span.end.min(editor.line_buffer().len());
let replacement = if let Some(example_index) = self.example_index {
self.examples
.get(example_index)
.expect("the example index is always checked")
} else {
&value
};
editor.edit_buffer(
|lb| {
lb.replace_range(start..end, replacement);
let mut offset = lb.insertion_point();
offset += lb
.len()
.saturating_sub(end.saturating_sub(start))
.saturating_sub(start);
lb.set_insertion_point(offset);
},
UndoBehavior::CreateUndoPoint,
);
}
}
/// Minimum rows that should be displayed by the menu
fn min_rows(&self) -> u16 {
self.get_rows().min(self.min_rows)
}
/// Gets values from filler that will be displayed in the menu
fn get_values(&self) -> &[Suggestion] {
&self.values
}
fn menu_required_lines(&self, _terminal_columns: u16) -> u16 {
let example_lines = self
.examples
.iter()
.fold(0, |acc, example| example.lines().count() + acc);
self.default_details.selection_rows
+ self.default_details.description_rows as u16
+ example_lines as u16
+ 3
}
fn menu_string(&self, _available_lines: u16, use_ansi_coloring: bool) -> String {
if self.get_values().is_empty() {
self.no_records_msg(use_ansi_coloring)
} else {
// The skip values represent the number of lines that should be skipped
// while printing the menu
let available_lines = self.default_details.selection_rows;
let skip_values = if self.row_pos >= available_lines {
let skip_lines = self.row_pos.saturating_sub(available_lines) + 1;
(skip_lines * self.get_cols()) as usize
} else {
0
};
// It seems that crossterm prefers to have a complete string ready to be printed
// rather than looping through the values and printing multiple things
// This reduces the flickering when printing the menu
let available_values = (available_lines * self.get_cols()) as usize;
let selection_values: String = self
.get_values()
.iter()
.skip(skip_values)
.take(available_values)
.enumerate()
.map(|(index, suggestion)| {
// Correcting the enumerate index based on the number of skipped values
let index = index + skip_values;
let column = index as u16 % self.get_cols();
let empty_space = self.get_width().saturating_sub(suggestion.value.len());
self.create_entry_string(
suggestion,
index,
column,
empty_space,
use_ansi_coloring,
)
})
.collect();
format!(
"{}{}{}",
selection_values,
self.create_description_string(use_ansi_coloring),
self.create_example_string(use_ansi_coloring)
)
}
}
}

View File

@ -2,7 +2,8 @@ use nu_engine::documentation::get_flags_section;
use nu_protocol::{engine::EngineState, levenshtein_distance}; use nu_protocol::{engine::EngineState, levenshtein_distance};
use nu_utils::IgnoreCaseExt; use nu_utils::IgnoreCaseExt;
use reedline::{Completer, Suggestion}; use reedline::{Completer, Suggestion};
use std::{fmt::Write, sync::Arc}; use std::fmt::Write;
use std::sync::Arc;
pub struct NuHelpCompleter(Arc<EngineState>); pub struct NuHelpCompleter(Arc<EngineState>);
@ -56,7 +57,7 @@ impl NuHelpCompleter {
if !sig.named.is_empty() { if !sig.named.is_empty() {
long_desc.push_str(&get_flags_section(Some(&*self.0.clone()), sig, |v| { long_desc.push_str(&get_flags_section(Some(&*self.0.clone()), sig, |v| {
v.to_parsable_string(", ", &self.0.config) v.into_string_parsable(", ", &self.0.config)
})) }))
} }
@ -72,7 +73,7 @@ impl NuHelpCompleter {
let opt_suffix = if let Some(value) = &positional.default_value { let opt_suffix = if let Some(value) = &positional.default_value {
format!( format!(
" (optional, default: {})", " (optional, default: {})",
&value.to_parsable_string(", ", &self.0.config), &value.into_string_parsable(", ", &self.0.config),
) )
} else { } else {
(" (optional)").to_string() (" (optional)").to_string()
@ -101,11 +102,10 @@ impl NuHelpCompleter {
Suggestion { Suggestion {
value: sig.name.clone(), value: sig.name.clone(),
description: Some(long_desc), description: Some(long_desc),
style: None,
extra: Some(extra), extra: Some(extra),
span: reedline::Span { span: reedline::Span {
start: pos - line.len(), start: pos,
end: pos, end: pos + line.len(),
}, },
append_whitespace: false, append_whitespace: false,
} }
@ -119,42 +119,3 @@ impl Completer for NuHelpCompleter {
self.completion_helper(line, pos) self.completion_helper(line, pos)
} }
} }
#[cfg(test)]
mod test {
use super::*;
use rstest::rstest;
#[rstest]
#[case("who", 5, 8, &["whoami"])]
#[case("hash", 1, 5, &["hash", "hash md5", "hash sha256"])]
#[case("into f", 0, 6, &["into float", "into filesize"])]
#[case("into nonexistent", 0, 16, &[])]
fn test_help_completer(
#[case] line: &str,
#[case] start: usize,
#[case] end: usize,
#[case] expected: &[&str],
) {
let engine_state =
nu_command::add_shell_command_context(nu_cmd_lang::create_default_context());
let mut completer = NuHelpCompleter::new(engine_state.into());
let suggestions = completer.complete(line, end);
assert_eq!(
expected.len(),
suggestions.len(),
"expected {:?}, got {:?}",
expected,
suggestions
.iter()
.map(|s| s.value.clone())
.collect::<Vec<_>>()
);
for (exp, actual) in expected.iter().zip(suggestions) {
assert_eq!(exp, &actual.value);
assert_eq!(reedline::Span::new(start, end), actual.span);
}
}
}

View File

@ -1,6 +1,5 @@
use nu_engine::eval_block; use nu_engine::eval_block;
use nu_protocol::{ use nu_protocol::{
debugger::WithoutDebug,
engine::{EngineState, Stack}, engine::{EngineState, Stack},
IntoPipelineData, Span, Value, IntoPipelineData, Span, Value,
}; };
@ -28,7 +27,7 @@ impl NuMenuCompleter {
Self { Self {
block_id, block_id,
span, span,
stack: stack.reset_stdio().capture(), stack,
engine_state, engine_state,
only_buffer_difference, only_buffer_difference,
} }
@ -56,8 +55,14 @@ impl Completer for NuMenuCompleter {
} }
let input = Value::nothing(self.span).into_pipeline_data(); let input = Value::nothing(self.span).into_pipeline_data();
let res = eval_block(
let res = eval_block::<WithoutDebug>(&self.engine_state, &mut self.stack, block, input); &self.engine_state,
&mut self.stack,
block,
input,
false,
false,
);
if let Ok(values) = res { if let Ok(values) = res {
let values = values.into_value(self.span); let values = values.into_value(self.span);
@ -78,12 +83,10 @@ fn convert_to_suggestions(
Value::Record { val, .. } => { Value::Record { val, .. } => {
let text = val let text = val
.get("value") .get("value")
.and_then(|val| val.coerce_string().ok()) .and_then(|val| val.as_string().ok())
.unwrap_or_else(|| "No value key".to_string()); .unwrap_or_else(|| "No value key".to_string());
let description = val let description = val.get("description").and_then(|val| val.as_string().ok());
.get("description")
.and_then(|val| val.coerce_string().ok());
let span = match val.get("span") { let span = match val.get("span") {
Some(Value::Record { val: span, .. }) => { Some(Value::Record { val: span, .. }) => {
@ -98,13 +101,9 @@ fn convert_to_suggestions(
} }
} }
_ => reedline::Span { _ => reedline::Span {
start: if only_buffer_difference { start: if only_buffer_difference { pos } else { 0 },
pos - line.len()
} else {
0
},
end: if only_buffer_difference { end: if only_buffer_difference {
pos pos + line.len()
} else { } else {
line.len() line.len()
}, },
@ -112,13 +111,9 @@ fn convert_to_suggestions(
} }
} }
_ => reedline::Span { _ => reedline::Span {
start: if only_buffer_difference { start: if only_buffer_difference { pos } else { 0 },
pos - line.len()
} else {
0
},
end: if only_buffer_difference { end: if only_buffer_difference {
pos pos + line.len()
} else { } else {
line.len() line.len()
}, },
@ -143,7 +138,6 @@ fn convert_to_suggestions(
vec![Suggestion { vec![Suggestion {
value: text, value: text,
description, description,
style: None,
extra, extra,
span, span,
append_whitespace: false, append_whitespace: false,
@ -156,19 +150,10 @@ fn convert_to_suggestions(
_ => vec![Suggestion { _ => vec![Suggestion {
value: format!("Not a record: {value:?}"), value: format!("Not a record: {value:?}"),
description: None, description: None,
style: None,
extra: None, extra: None,
span: reedline::Span { span: reedline::Span {
start: if only_buffer_difference { start: 0,
pos - line.len() end: line.len(),
} else {
0
},
end: if only_buffer_difference {
pos
} else {
line.len()
},
}, },
append_whitespace: false, append_whitespace: false,
}], }],

View File

@ -1,5 +1,7 @@
mod description_menu;
mod help_completions; mod help_completions;
mod menu_completions; mod menu_completions;
pub use description_menu::DescriptionMenu;
pub use help_completions::NuHelpCompleter; pub use help_completions::NuHelpCompleter;
pub use menu_completions::NuMenuCompleter; pub use menu_completions::NuMenuCompleter;

View File

@ -1,5 +1,7 @@
use nu_engine::command_prelude::*; use nu_protocol::ast::Call;
use reedline::{Highlighter, StyledText}; use nu_protocol::engine::{Command, EngineState, Stack};
use nu_protocol::{Category, Example, PipelineData, ShellError, Signature, Type, Value};
use reedline::Highlighter;
#[derive(Clone)] #[derive(Clone)]
pub struct NuHighlight; pub struct NuHighlight;
@ -26,7 +28,7 @@ impl Command for NuHighlight {
fn run( fn run(
&self, &self,
engine_state: &EngineState, engine_state: &EngineState,
stack: &mut Stack, _stack: &mut Stack,
call: &Call, call: &Call,
input: PipelineData, input: PipelineData,
) -> Result<PipelineData, ShellError> { ) -> Result<PipelineData, ShellError> {
@ -38,14 +40,14 @@ impl Command for NuHighlight {
let highlighter = crate::NuHighlighter { let highlighter = crate::NuHighlighter {
engine_state, engine_state,
stack: std::sync::Arc::new(stack.clone()),
config, config,
}; };
input.map( input.map(
move |x| match x.coerce_into_string() { move |x| match x.as_string() {
Ok(line) => { Ok(line) => {
let highlights = highlighter.highlight(&line, line.len()); let highlights = highlighter.highlight(&line, line.len());
Value::string(highlights.render_simple(), head) Value::string(highlights.render_simple(), head)
} }
Err(err) => Value::error(err, head), Err(err) => Value::error(err, head),
@ -62,16 +64,3 @@ impl Command for NuHighlight {
}] }]
} }
} }
/// A highlighter that does nothing
///
/// Used to remove highlighting from a reedline instance
/// (letting NuHighlighter structs be dropped)
#[derive(Default)]
pub struct NoOpHighlighter {}
impl Highlighter for NoOpHighlighter {
fn highlight(&self, _line: &str, _cursor: usize) -> reedline::StyledText {
StyledText::new()
}
}

View File

@ -1,4 +1,10 @@
use nu_engine::command_prelude::*; use nu_engine::CallExt;
use nu_protocol::ast::Call;
use nu_protocol::engine::{Command, EngineState, Stack};
use nu_protocol::{
Category, Example, IntoPipelineData, PipelineData, ShellError, Signature, SyntaxShape, Type,
Value,
};
#[derive(Clone)] #[derive(Clone)]
pub struct Print; pub struct Print;
@ -48,8 +54,8 @@ Since this command has no output, there is no point in piping it with other comm
input: PipelineData, input: PipelineData,
) -> Result<PipelineData, ShellError> { ) -> Result<PipelineData, ShellError> {
let args: Vec<Value> = call.rest(engine_state, stack, 0)?; let args: Vec<Value> = call.rest(engine_state, stack, 0)?;
let no_newline = call.has_flag(engine_state, stack, "no-newline")?; let no_newline = call.has_flag("no-newline");
let to_stderr = call.has_flag(engine_state, stack, "stderr")?; let to_stderr = call.has_flag("stderr");
// This will allow for easy printing of pipelines as well // This will allow for easy printing of pipelines as well
if !args.is_empty() { if !args.is_empty() {

View File

@ -1,16 +1,16 @@
use crate::prompt_update::{POST_PROMPT_MARKER, PRE_PROMPT_MARKER};
#[cfg(windows)] #[cfg(windows)]
use nu_utils::enable_vt_processing; use nu_utils::enable_vt_processing;
use reedline::{ use reedline::DefaultPrompt;
DefaultPrompt, Prompt, PromptEditMode, PromptHistorySearch, PromptHistorySearchStatus, use {
PromptViMode, reedline::{
Prompt, PromptEditMode, PromptHistorySearch, PromptHistorySearchStatus, PromptViMode,
},
std::borrow::Cow,
}; };
use std::borrow::Cow;
/// Nushell prompt definition /// Nushell prompt definition
#[derive(Clone)] #[derive(Clone)]
pub struct NushellPrompt { pub struct NushellPrompt {
shell_integration: bool,
left_prompt_string: Option<String>, left_prompt_string: Option<String>,
right_prompt_string: Option<String>, right_prompt_string: Option<String>,
default_prompt_indicator: Option<String>, default_prompt_indicator: Option<String>,
@ -20,10 +20,15 @@ pub struct NushellPrompt {
render_right_prompt_on_last_line: bool, render_right_prompt_on_last_line: bool,
} }
impl Default for NushellPrompt {
fn default() -> Self {
NushellPrompt::new()
}
}
impl NushellPrompt { impl NushellPrompt {
pub fn new(shell_integration: bool) -> NushellPrompt { pub fn new() -> NushellPrompt {
NushellPrompt { NushellPrompt {
shell_integration,
left_prompt_string: None, left_prompt_string: None,
right_prompt_string: None, right_prompt_string: None,
default_prompt_indicator: None, default_prompt_indicator: None,
@ -106,13 +111,9 @@ impl Prompt for NushellPrompt {
.to_string() .to_string()
.replace('\n', "\r\n"); .replace('\n', "\r\n");
if self.shell_integration {
format!("{PRE_PROMPT_MARKER}{prompt}{POST_PROMPT_MARKER}").into()
} else {
prompt.into() prompt.into()
} }
} }
}
fn render_prompt_right(&self) -> Cow<str> { fn render_prompt_right(&self) -> Cow<str> {
if let Some(prompt_string) = &self.right_prompt_string { if let Some(prompt_string) = &self.right_prompt_string {

View File

@ -1,11 +1,14 @@
use crate::NushellPrompt; use crate::NushellPrompt;
use log::trace; use log::trace;
use nu_engine::get_eval_subexpression; use nu_engine::eval_subexpression;
use nu_protocol::report_error;
use nu_protocol::{ use nu_protocol::{
engine::{EngineState, Stack, StateWorkingSet}, engine::{EngineState, Stack, StateWorkingSet},
report_error, Config, PipelineData, Value, Config, PipelineData, Value,
}; };
use reedline::Prompt; use reedline::Prompt;
use std::borrow::Cow;
use std::sync::Arc;
// Name of environment variable where the prompt could be stored // Name of environment variable where the prompt could be stored
pub(crate) const PROMPT_COMMAND: &str = "PROMPT_COMMAND"; pub(crate) const PROMPT_COMMAND: &str = "PROMPT_COMMAND";
@ -25,8 +28,8 @@ pub(crate) const TRANSIENT_PROMPT_MULTILINE_INDICATOR: &str =
"TRANSIENT_PROMPT_MULTILINE_INDICATOR"; "TRANSIENT_PROMPT_MULTILINE_INDICATOR";
// According to Daniel Imms @Tyriar, we need to do these this way: // According to Daniel Imms @Tyriar, we need to do these this way:
// <133 A><prompt><133 B><command><133 C><command output> // <133 A><prompt><133 B><command><133 C><command output>
pub(crate) const PRE_PROMPT_MARKER: &str = "\x1b]133;A\x1b\\"; const PRE_PROMPT_MARKER: &str = "\x1b]133;A\x1b\\";
pub(crate) const POST_PROMPT_MARKER: &str = "\x1b]133;B\x1b\\"; const POST_PROMPT_MARKER: &str = "\x1b]133;B\x1b\\";
fn get_prompt_string( fn get_prompt_string(
prompt: &str, prompt: &str,
@ -34,8 +37,6 @@ fn get_prompt_string(
engine_state: &EngineState, engine_state: &EngineState,
stack: &mut Stack, stack: &mut Stack,
) -> Option<String> { ) -> Option<String> {
let eval_subexpression = get_eval_subexpression(engine_state);
stack stack
.get_env_var(engine_state, prompt) .get_env_var(engine_state, prompt)
.and_then(|v| match v { .and_then(|v| match v {
@ -97,13 +98,15 @@ fn get_prompt_string(
}) })
} }
pub(crate) fn update_prompt( pub(crate) fn update_prompt<'prompt>(
config: &Config, config: &Config,
engine_state: &EngineState, engine_state: &EngineState,
stack: &mut Stack, stack: &Stack,
nu_prompt: &mut NushellPrompt, nu_prompt: &'prompt mut NushellPrompt,
) { ) -> &'prompt dyn Prompt {
let left_prompt_string = get_prompt_string(PROMPT_COMMAND, config, engine_state, stack); let mut stack = stack.clone();
let left_prompt_string = get_prompt_string(PROMPT_COMMAND, config, engine_state, &mut stack);
// Now that we have the prompt string lets ansify it. // Now that we have the prompt string lets ansify it.
// <133 A><prompt><133 B><command><133 C><command output> // <133 A><prompt><133 B><command><133 C><command output>
@ -119,18 +122,20 @@ pub(crate) fn update_prompt(
left_prompt_string left_prompt_string
}; };
let right_prompt_string = get_prompt_string(PROMPT_COMMAND_RIGHT, config, engine_state, stack); let right_prompt_string =
get_prompt_string(PROMPT_COMMAND_RIGHT, config, engine_state, &mut stack);
let prompt_indicator_string = get_prompt_string(PROMPT_INDICATOR, config, engine_state, stack); let prompt_indicator_string =
get_prompt_string(PROMPT_INDICATOR, config, engine_state, &mut stack);
let prompt_multiline_string = let prompt_multiline_string =
get_prompt_string(PROMPT_MULTILINE_INDICATOR, config, engine_state, stack); get_prompt_string(PROMPT_MULTILINE_INDICATOR, config, engine_state, &mut stack);
let prompt_vi_insert_string = let prompt_vi_insert_string =
get_prompt_string(PROMPT_INDICATOR_VI_INSERT, config, engine_state, stack); get_prompt_string(PROMPT_INDICATOR_VI_INSERT, config, engine_state, &mut stack);
let prompt_vi_normal_string = let prompt_vi_normal_string =
get_prompt_string(PROMPT_INDICATOR_VI_NORMAL, config, engine_state, stack); get_prompt_string(PROMPT_INDICATOR_VI_NORMAL, config, engine_state, &mut stack);
// apply the other indicators // apply the other indicators
nu_prompt.update_all_prompt_strings( nu_prompt.update_all_prompt_strings(
@ -141,55 +146,125 @@ pub(crate) fn update_prompt(
(prompt_vi_insert_string, prompt_vi_normal_string), (prompt_vi_insert_string, prompt_vi_normal_string),
config.render_right_prompt_on_last_line, config.render_right_prompt_on_last_line,
); );
let ret_val = nu_prompt as &dyn Prompt;
trace!("update_prompt {}:{}:{}", file!(), line!(), column!()); trace!("update_prompt {}:{}:{}", file!(), line!(), column!());
ret_val
} }
/// Construct the transient prompt based on the normal nu_prompt struct TransientPrompt {
pub(crate) fn make_transient_prompt( engine_state: Arc<EngineState>,
stack: Stack,
}
/// Try getting `$env.TRANSIENT_PROMPT_<X>`, and get `$env.PROMPT_<X>` if that fails
fn get_transient_prompt_string(
transient_prompt: &str,
prompt: &str,
config: &Config, config: &Config,
engine_state: &EngineState, engine_state: &EngineState,
stack: &mut Stack, stack: &mut Stack,
nu_prompt: &NushellPrompt, ) -> Option<String> {
) -> Box<dyn Prompt> { get_prompt_string(transient_prompt, config, engine_state, stack)
let mut nu_prompt = nu_prompt.clone(); .or_else(|| get_prompt_string(prompt, config, engine_state, stack))
if let Some(s) = get_prompt_string(TRANSIENT_PROMPT_COMMAND, config, engine_state, stack) {
nu_prompt.update_prompt_left(Some(s))
} }
if let Some(s) = get_prompt_string(TRANSIENT_PROMPT_COMMAND_RIGHT, config, engine_state, stack) impl Prompt for TransientPrompt {
{ fn render_prompt_left(&self) -> Cow<str> {
nu_prompt.update_prompt_right(Some(s), config.render_right_prompt_on_last_line) let mut nu_prompt = NushellPrompt::new();
let config = &self.engine_state.get_config().clone();
let mut stack = self.stack.clone();
nu_prompt.update_prompt_left(get_transient_prompt_string(
TRANSIENT_PROMPT_COMMAND,
PROMPT_COMMAND,
config,
&self.engine_state,
&mut stack,
));
nu_prompt.render_prompt_left().to_string().into()
} }
if let Some(s) = get_prompt_string(TRANSIENT_PROMPT_INDICATOR, config, engine_state, stack) { fn render_prompt_right(&self) -> Cow<str> {
nu_prompt.update_prompt_indicator(Some(s)) let mut nu_prompt = NushellPrompt::new();
let config = &self.engine_state.get_config().clone();
let mut stack = self.stack.clone();
nu_prompt.update_prompt_right(
get_transient_prompt_string(
TRANSIENT_PROMPT_COMMAND_RIGHT,
PROMPT_COMMAND_RIGHT,
config,
&self.engine_state,
&mut stack,
),
config.render_right_prompt_on_last_line,
);
nu_prompt.render_prompt_right().to_string().into()
} }
if let Some(s) = get_prompt_string(
fn render_prompt_indicator(&self, prompt_mode: reedline::PromptEditMode) -> Cow<str> {
let mut nu_prompt = NushellPrompt::new();
let config = &self.engine_state.get_config().clone();
let mut stack = self.stack.clone();
nu_prompt.update_prompt_indicator(get_transient_prompt_string(
TRANSIENT_PROMPT_INDICATOR,
PROMPT_INDICATOR,
config,
&self.engine_state,
&mut stack,
));
nu_prompt.update_prompt_vi_insert(get_transient_prompt_string(
TRANSIENT_PROMPT_INDICATOR_VI_INSERT, TRANSIENT_PROMPT_INDICATOR_VI_INSERT,
PROMPT_INDICATOR_VI_INSERT,
config, config,
engine_state, &self.engine_state,
stack, &mut stack,
) { ));
nu_prompt.update_prompt_vi_insert(Some(s)) nu_prompt.update_prompt_vi_normal(get_transient_prompt_string(
}
if let Some(s) = get_prompt_string(
TRANSIENT_PROMPT_INDICATOR_VI_NORMAL, TRANSIENT_PROMPT_INDICATOR_VI_NORMAL,
PROMPT_INDICATOR_VI_NORMAL,
config, config,
engine_state, &self.engine_state,
stack, &mut stack,
) { ));
nu_prompt.update_prompt_vi_normal(Some(s)) nu_prompt
.render_prompt_indicator(prompt_mode)
.to_string()
.into()
} }
if let Some(s) = get_prompt_string( fn render_prompt_multiline_indicator(&self) -> Cow<str> {
let mut nu_prompt = NushellPrompt::new();
let config = &self.engine_state.get_config().clone();
let mut stack = self.stack.clone();
nu_prompt.update_prompt_multiline(get_transient_prompt_string(
TRANSIENT_PROMPT_MULTILINE_INDICATOR, TRANSIENT_PROMPT_MULTILINE_INDICATOR,
PROMPT_MULTILINE_INDICATOR,
config, config,
engine_state, &self.engine_state,
stack, &mut stack,
) { ));
nu_prompt.update_prompt_multiline(Some(s)) nu_prompt
.render_prompt_multiline_indicator()
.to_string()
.into()
} }
Box::new(nu_prompt) fn render_prompt_history_search_indicator(
&self,
history_search: reedline::PromptHistorySearch,
) -> Cow<str> {
NushellPrompt::new()
.render_prompt_history_search_indicator(history_search)
.to_string()
.into()
}
}
/// Construct the transient prompt
pub(crate) fn transient_prompt(engine_state: Arc<EngineState>, stack: &Stack) -> Box<dyn Prompt> {
Box::new(TransientPrompt {
engine_state,
stack: stack.clone(),
})
} }

View File

@ -1,20 +1,18 @@
use super::DescriptionMenu;
use crate::{menus::NuMenuCompleter, NuHelpCompleter}; use crate::{menus::NuMenuCompleter, NuHelpCompleter};
use crossterm::event::{KeyCode, KeyModifiers}; use crossterm::event::{KeyCode, KeyModifiers};
use log::trace;
use nu_color_config::{color_record_to_nustyle, lookup_ansi_color_style}; use nu_color_config::{color_record_to_nustyle, lookup_ansi_color_style};
use nu_engine::eval_block; use nu_engine::eval_block;
use nu_parser::parse; use nu_parser::parse;
use nu_protocol::{ use nu_protocol::{
create_menus, create_menus,
debugger::WithoutDebug,
engine::{EngineState, Stack, StateWorkingSet}, engine::{EngineState, Stack, StateWorkingSet},
extract_value, Config, EditBindings, ParsedKeybinding, ParsedMenu, PipelineData, Record, extract_value, Config, EditBindings, ParsedKeybinding, ParsedMenu, PipelineData, Record,
ShellError, Span, Value, ShellError, Span, Value,
}; };
use reedline::{ use reedline::{
default_emacs_keybindings, default_vi_insert_keybindings, default_vi_normal_keybindings, default_emacs_keybindings, default_vi_insert_keybindings, default_vi_normal_keybindings,
ColumnarMenu, DescriptionMenu, DescriptionMode, EditCommand, IdeMenu, Keybindings, ListMenu, ColumnarMenu, EditCommand, Keybindings, ListMenu, Reedline, ReedlineEvent, ReedlineMenu,
MenuBuilder, Reedline, ReedlineEvent, ReedlineMenu,
}; };
use std::sync::Arc; use std::sync::Arc;
@ -79,7 +77,6 @@ pub(crate) fn add_menus(
stack: &Stack, stack: &Stack,
config: &Config, config: &Config,
) -> Result<Reedline, ShellError> { ) -> Result<Reedline, ShellError> {
trace!("add_menus: config: {:#?}", &config);
line_editor = line_editor.clear_menus(); line_editor = line_editor.clear_menus();
for menu in &config.menus { for menu in &config.menus {
@ -87,7 +84,7 @@ pub(crate) fn add_menus(
} }
// Checking if the default menus have been added from the config file // Checking if the default menus have been added from the config file
let default_menus = [ let default_menus = vec![
("completion_menu", DEFAULT_COMPLETION_MENU), ("completion_menu", DEFAULT_COMPLETION_MENU),
("history_menu", DEFAULT_HISTORY_MENU), ("history_menu", DEFAULT_HISTORY_MENU),
("help_menu", DEFAULT_HELP_MENU), ("help_menu", DEFAULT_HELP_MENU),
@ -97,7 +94,7 @@ pub(crate) fn add_menus(
if !config if !config
.menus .menus
.iter() .iter()
.any(|menu| menu.name.to_expanded_string("", config) == name) .any(|menu| menu.name.into_string("", config) == name)
{ {
let (block, _) = { let (block, _) = {
let mut working_set = StateWorkingSet::new(&engine_state); let mut working_set = StateWorkingSet::new(&engine_state);
@ -111,9 +108,9 @@ pub(crate) fn add_menus(
(output, working_set.render()) (output, working_set.render())
}; };
let mut temp_stack = Stack::new().capture(); let mut temp_stack = Stack::new();
let input = PipelineData::Empty; let input = PipelineData::Empty;
let res = eval_block::<WithoutDebug>(&engine_state, &mut temp_stack, &block, input)?; let res = eval_block(&engine_state, &mut temp_stack, &block, input, false, false)?;
if let PipelineData::Value(value, None) = res { if let PipelineData::Value(value, None) = res {
for menu in create_menus(&value)? { for menu in create_menus(&value)? {
@ -136,25 +133,24 @@ fn add_menu(
) -> Result<Reedline, ShellError> { ) -> Result<Reedline, ShellError> {
let span = menu.menu_type.span(); let span = menu.menu_type.span();
if let Value::Record { val, .. } = &menu.menu_type { if let Value::Record { val, .. } = &menu.menu_type {
let layout = extract_value("layout", val, span)?.to_expanded_string("", config); let layout = extract_value("layout", val, span)?.into_string("", config);
match layout.as_str() { match layout.as_str() {
"columnar" => add_columnar_menu(line_editor, menu, engine_state, stack, config), "columnar" => add_columnar_menu(line_editor, menu, engine_state, stack, config),
"list" => add_list_menu(line_editor, menu, engine_state, stack, config), "list" => add_list_menu(line_editor, menu, engine_state, stack, config),
"ide" => add_ide_menu(line_editor, menu, engine_state, stack, config),
"description" => add_description_menu(line_editor, menu, engine_state, stack, config), "description" => add_description_menu(line_editor, menu, engine_state, stack, config),
_ => Err(ShellError::UnsupportedConfigValue { _ => Err(ShellError::UnsupportedConfigValue(
expected: "columnar, list, ide or description".to_string(), "columnar, list or description".to_string(),
value: menu.menu_type.to_abbreviated_string(config), menu.menu_type.into_abbreviated_string(config),
span: menu.menu_type.span(), menu.menu_type.span(),
}), )),
} }
} else { } else {
Err(ShellError::UnsupportedConfigValue { Err(ShellError::UnsupportedConfigValue(
expected: "only record type".to_string(), "only record type".to_string(),
value: menu.menu_type.to_abbreviated_string(config), menu.menu_type.into_abbreviated_string(config),
span: menu.menu_type.span(), menu.menu_type.span(),
}) ))
} }
} }
@ -184,7 +180,7 @@ pub(crate) fn add_columnar_menu(
config: &Config, config: &Config,
) -> Result<Reedline, ShellError> { ) -> Result<Reedline, ShellError> {
let span = menu.menu_type.span(); let span = menu.menu_type.span();
let name = menu.name.to_expanded_string("", config); let name = menu.name.into_string("", config);
let mut columnar_menu = ColumnarMenu::default().with_name(&name); let mut columnar_menu = ColumnarMenu::default().with_name(&name);
if let Value::Record { val, .. } = &menu.menu_type { if let Value::Record { val, .. } = &menu.menu_type {
@ -239,26 +235,10 @@ pub(crate) fn add_columnar_menu(
columnar_menu, columnar_menu,
ColumnarMenu::with_description_text_style ColumnarMenu::with_description_text_style
); );
add_style!(
"match_text",
val,
span,
config,
columnar_menu,
ColumnarMenu::with_match_text_style
);
add_style!(
"selected_match_text",
val,
span,
config,
columnar_menu,
ColumnarMenu::with_selected_match_text_style
);
} }
let marker = menu.marker.to_expanded_string("", config); let marker = menu.marker.into_string("", config);
columnar_menu = columnar_menu.with_marker(&marker); columnar_menu = columnar_menu.with_marker(marker);
let only_buffer_difference = menu.only_buffer_difference.as_bool()?; let only_buffer_difference = menu.only_buffer_difference.as_bool()?;
columnar_menu = columnar_menu.with_only_buffer_difference(only_buffer_difference); columnar_menu = columnar_menu.with_only_buffer_difference(only_buffer_difference);
@ -281,11 +261,11 @@ pub(crate) fn add_columnar_menu(
completer: Box::new(menu_completer), completer: Box::new(menu_completer),
})) }))
} }
_ => Err(ShellError::UnsupportedConfigValue { _ => Err(ShellError::UnsupportedConfigValue(
expected: "block or omitted value".to_string(), "block or omitted value".to_string(),
value: menu.source.to_abbreviated_string(config), menu.source.into_abbreviated_string(config),
span, span,
}), )),
} }
} }
@ -297,7 +277,7 @@ pub(crate) fn add_list_menu(
stack: &Stack, stack: &Stack,
config: &Config, config: &Config,
) -> Result<Reedline, ShellError> { ) -> Result<Reedline, ShellError> {
let name = menu.name.to_expanded_string("", config); let name = menu.name.into_string("", config);
let mut list_menu = ListMenu::default().with_name(&name); let mut list_menu = ListMenu::default().with_name(&name);
let span = menu.menu_type.span(); let span = menu.menu_type.span();
@ -339,8 +319,8 @@ pub(crate) fn add_list_menu(
); );
} }
let marker = menu.marker.to_expanded_string("", config); let marker = menu.marker.into_string("", config);
list_menu = list_menu.with_marker(&marker); list_menu = list_menu.with_marker(marker);
let only_buffer_difference = menu.only_buffer_difference.as_bool()?; let only_buffer_difference = menu.only_buffer_difference.as_bool()?;
list_menu = list_menu.with_only_buffer_difference(only_buffer_difference); list_menu = list_menu.with_only_buffer_difference(only_buffer_difference);
@ -363,234 +343,11 @@ pub(crate) fn add_list_menu(
completer: Box::new(menu_completer), completer: Box::new(menu_completer),
})) }))
} }
_ => Err(ShellError::UnsupportedConfigValue { _ => Err(ShellError::UnsupportedConfigValue(
expected: "block or omitted value".to_string(), "block or omitted value".to_string(),
value: menu.source.to_abbreviated_string(config), menu.source.into_abbreviated_string(config),
span: menu.source.span(), menu.source.span(),
}), )),
}
}
// Adds an IDE menu to the line editor
pub(crate) fn add_ide_menu(
line_editor: Reedline,
menu: &ParsedMenu,
engine_state: Arc<EngineState>,
stack: &Stack,
config: &Config,
) -> Result<Reedline, ShellError> {
let span = menu.menu_type.span();
let name = menu.name.to_expanded_string("", config);
let mut ide_menu = IdeMenu::default().with_name(&name);
if let Value::Record { val, .. } = &menu.menu_type {
ide_menu = match extract_value("min_completion_width", val, span) {
Ok(min_completion_width) => {
let min_completion_width = min_completion_width.as_int()?;
ide_menu.with_min_completion_width(min_completion_width as u16)
}
Err(_) => ide_menu,
};
ide_menu = match extract_value("max_completion_width", val, span) {
Ok(max_completion_width) => {
let max_completion_width = max_completion_width.as_int()?;
ide_menu.with_max_completion_width(max_completion_width as u16)
}
Err(_) => ide_menu,
};
ide_menu = match extract_value("max_completion_height", val, span) {
Ok(max_completion_height) => {
let max_completion_height = max_completion_height.as_int()?;
ide_menu.with_max_completion_height(max_completion_height as u16)
}
Err(_) => ide_menu.with_max_completion_height(10u16),
};
ide_menu = match extract_value("padding", val, span) {
Ok(padding) => {
let padding = padding.as_int()?;
ide_menu.with_padding(padding as u16)
}
Err(_) => ide_menu,
};
ide_menu = match extract_value("border", val, span) {
Ok(border) => {
if let Ok(border) = border.as_bool() {
if border {
ide_menu.with_default_border()
} else {
ide_menu
}
} else if let Ok(border_chars) = border.as_record() {
let top_right = extract_value("top_right", border_chars, span)?.as_char()?;
let top_left = extract_value("top_left", border_chars, span)?.as_char()?;
let bottom_right =
extract_value("bottom_right", border_chars, span)?.as_char()?;
let bottom_left =
extract_value("bottom_left", border_chars, span)?.as_char()?;
let horizontal = extract_value("horizontal", border_chars, span)?.as_char()?;
let vertical = extract_value("vertical", border_chars, span)?.as_char()?;
ide_menu.with_border(
top_right,
top_left,
bottom_right,
bottom_left,
horizontal,
vertical,
)
} else {
return Err(ShellError::UnsupportedConfigValue {
expected: "bool or record".to_string(),
value: border.to_abbreviated_string(config),
span: border.span(),
});
}
}
Err(_) => ide_menu.with_default_border(),
};
ide_menu = match extract_value("cursor_offset", val, span) {
Ok(cursor_offset) => {
let cursor_offset = cursor_offset.as_int()?;
ide_menu.with_cursor_offset(cursor_offset as i16)
}
Err(_) => ide_menu,
};
ide_menu = match extract_value("description_mode", val, span) {
Ok(description_mode) => match description_mode.coerce_str()?.as_ref() {
"left" => ide_menu.with_description_mode(DescriptionMode::Left),
"right" => ide_menu.with_description_mode(DescriptionMode::Right),
"prefer_right" => ide_menu.with_description_mode(DescriptionMode::PreferRight),
_ => {
return Err(ShellError::UnsupportedConfigValue {
expected: "\"left\", \"right\" or \"prefer_right\"".to_string(),
value: description_mode.to_abbreviated_string(config),
span: description_mode.span(),
});
}
},
Err(_) => ide_menu,
};
ide_menu = match extract_value("min_description_width", val, span) {
Ok(min_description_width) => {
let min_description_width = min_description_width.as_int()?;
ide_menu.with_min_description_width(min_description_width as u16)
}
Err(_) => ide_menu,
};
ide_menu = match extract_value("max_description_width", val, span) {
Ok(max_description_width) => {
let max_description_width = max_description_width.as_int()?;
ide_menu.with_max_description_width(max_description_width as u16)
}
Err(_) => ide_menu,
};
ide_menu = match extract_value("max_description_height", val, span) {
Ok(max_description_height) => {
let max_description_height = max_description_height.as_int()?;
ide_menu.with_max_description_height(max_description_height as u16)
}
Err(_) => ide_menu,
};
ide_menu = match extract_value("description_offset", val, span) {
Ok(description_padding) => {
let description_padding = description_padding.as_int()?;
ide_menu.with_description_offset(description_padding as u16)
}
Err(_) => ide_menu,
};
ide_menu = match extract_value("correct_cursor_pos", val, span) {
Ok(correct_cursor_pos) => {
let correct_cursor_pos = correct_cursor_pos.as_bool()?;
ide_menu.with_correct_cursor_pos(correct_cursor_pos)
}
Err(_) => ide_menu,
};
}
let span = menu.style.span();
if let Value::Record { val, .. } = &menu.style {
add_style!(
"text",
val,
span,
config,
ide_menu,
IdeMenu::with_text_style
);
add_style!(
"selected_text",
val,
span,
config,
ide_menu,
IdeMenu::with_selected_text_style
);
add_style!(
"description_text",
val,
span,
config,
ide_menu,
IdeMenu::with_description_text_style
);
add_style!(
"match_text",
val,
span,
config,
ide_menu,
IdeMenu::with_match_text_style
);
add_style!(
"selected_match_text",
val,
span,
config,
ide_menu,
IdeMenu::with_selected_match_text_style
);
}
let marker = menu.marker.to_expanded_string("", config);
ide_menu = ide_menu.with_marker(&marker);
let only_buffer_difference = menu.only_buffer_difference.as_bool()?;
ide_menu = ide_menu.with_only_buffer_difference(only_buffer_difference);
let span = menu.source.span();
match &menu.source {
Value::Nothing { .. } => {
Ok(line_editor.with_menu(ReedlineMenu::EngineCompleter(Box::new(ide_menu))))
}
Value::Closure { val, .. } => {
let menu_completer = NuMenuCompleter::new(
val.block_id,
span,
stack.captures_to_stack(val.captures.clone()),
engine_state,
only_buffer_difference,
);
Ok(line_editor.with_menu(ReedlineMenu::WithCompleter {
menu: Box::new(ide_menu),
completer: Box::new(menu_completer),
}))
}
_ => Err(ShellError::UnsupportedConfigValue {
expected: "block or omitted value".to_string(),
value: menu.source.to_abbreviated_string(config),
span,
}),
} }
} }
@ -602,7 +359,7 @@ pub(crate) fn add_description_menu(
stack: &Stack, stack: &Stack,
config: &Config, config: &Config,
) -> Result<Reedline, ShellError> { ) -> Result<Reedline, ShellError> {
let name = menu.name.to_expanded_string("", config); let name = menu.name.into_string("", config);
let mut description_menu = DescriptionMenu::default().with_name(&name); let mut description_menu = DescriptionMenu::default().with_name(&name);
let span = menu.menu_type.span(); let span = menu.menu_type.span();
@ -676,8 +433,8 @@ pub(crate) fn add_description_menu(
); );
} }
let marker = menu.marker.to_expanded_string("", config); let marker = menu.marker.into_string("", config);
description_menu = description_menu.with_marker(&marker); description_menu = description_menu.with_marker(marker);
let only_buffer_difference = menu.only_buffer_difference.as_bool()?; let only_buffer_difference = menu.only_buffer_difference.as_bool()?;
description_menu = description_menu.with_only_buffer_difference(only_buffer_difference); description_menu = description_menu.with_only_buffer_difference(only_buffer_difference);
@ -704,11 +461,11 @@ pub(crate) fn add_description_menu(
completer: Box::new(menu_completer), completer: Box::new(menu_completer),
})) }))
} }
_ => Err(ShellError::UnsupportedConfigValue { _ => Err(ShellError::UnsupportedConfigValue(
expected: "closure or omitted value".to_string(), "closure or omitted value".to_string(),
value: menu.source.to_abbreviated_string(config), menu.source.into_abbreviated_string(config),
span: menu.source.span(), menu.source.span(),
}), )),
} }
} }
@ -823,11 +580,11 @@ fn add_keybinding(
"emacs" => add_parsed_keybinding(emacs_keybindings, keybinding, config), "emacs" => add_parsed_keybinding(emacs_keybindings, keybinding, config),
"vi_insert" => add_parsed_keybinding(insert_keybindings, keybinding, config), "vi_insert" => add_parsed_keybinding(insert_keybindings, keybinding, config),
"vi_normal" => add_parsed_keybinding(normal_keybindings, keybinding, config), "vi_normal" => add_parsed_keybinding(normal_keybindings, keybinding, config),
m => Err(ShellError::UnsupportedConfigValue { m => Err(ShellError::UnsupportedConfigValue(
expected: "emacs, vi_insert or vi_normal".to_string(), "emacs, vi_insert or vi_normal".to_string(),
value: m.to_string(), m.to_string(),
span, span,
}), )),
}, },
Value::List { vals, .. } => { Value::List { vals, .. } => {
for inner_mode in vals { for inner_mode in vals {
@ -843,11 +600,11 @@ fn add_keybinding(
Ok(()) Ok(())
} }
v => Err(ShellError::UnsupportedConfigValue { v => Err(ShellError::UnsupportedConfigValue(
expected: "string or list of strings".to_string(), "string or list of strings".to_string(),
value: v.to_abbreviated_string(config), v.into_abbreviated_string(config),
span: v.span(), v.span(),
}), )),
} }
} }
@ -858,7 +615,7 @@ fn add_parsed_keybinding(
) -> Result<(), ShellError> { ) -> Result<(), ShellError> {
let modifier = match keybinding let modifier = match keybinding
.modifier .modifier
.to_expanded_string("", config) .into_string("", config)
.to_ascii_lowercase() .to_ascii_lowercase()
.as_str() .as_str()
{ {
@ -873,17 +630,17 @@ fn add_parsed_keybinding(
KeyModifiers::CONTROL | KeyModifiers::ALT | KeyModifiers::SHIFT KeyModifiers::CONTROL | KeyModifiers::ALT | KeyModifiers::SHIFT
} }
_ => { _ => {
return Err(ShellError::UnsupportedConfigValue { return Err(ShellError::UnsupportedConfigValue(
expected: "CONTROL, SHIFT, ALT or NONE".to_string(), "CONTROL, SHIFT, ALT or NONE".to_string(),
value: keybinding.modifier.to_abbreviated_string(config), keybinding.modifier.into_abbreviated_string(config),
span: keybinding.modifier.span(), keybinding.modifier.span(),
}) ))
} }
}; };
let keycode = match keybinding let keycode = match keybinding
.keycode .keycode
.to_expanded_string("", config) .into_string("", config)
.to_ascii_lowercase() .to_ascii_lowercase()
.as_str() .as_str()
{ {
@ -897,11 +654,11 @@ fn add_parsed_keybinding(
let char = if let (Some(char), None) = (pos1, pos2) { let char = if let (Some(char), None) = (pos1, pos2) {
char char
} else { } else {
return Err(ShellError::UnsupportedConfigValue { return Err(ShellError::UnsupportedConfigValue(
expected: "char_<CHAR: unicode codepoint>".to_string(), "char_<CHAR: unicode codepoint>".to_string(),
value: c.to_string(), c.to_string(),
span: keybinding.keycode.span(), keybinding.keycode.span(),
}); ));
}; };
KeyCode::Char(char) KeyCode::Char(char)
@ -924,21 +681,21 @@ fn add_parsed_keybinding(
.parse() .parse()
.ok() .ok()
.filter(|num| matches!(num, 1..=20)) .filter(|num| matches!(num, 1..=20))
.ok_or(ShellError::UnsupportedConfigValue { .ok_or(ShellError::UnsupportedConfigValue(
expected: "(f1|f2|...|f20)".to_string(), "(f1|f2|...|f20)".to_string(),
value: format!("unknown function key: {c}"), format!("unknown function key: {c}"),
span: keybinding.keycode.span(), keybinding.keycode.span(),
})?; ))?;
KeyCode::F(fn_num) KeyCode::F(fn_num)
} }
"null" => KeyCode::Null, "null" => KeyCode::Null,
"esc" | "escape" => KeyCode::Esc, "esc" | "escape" => KeyCode::Esc,
_ => { _ => {
return Err(ShellError::UnsupportedConfigValue { return Err(ShellError::UnsupportedConfigValue(
expected: "crossterm KeyCode".to_string(), "crossterm KeyCode".to_string(),
value: keybinding.keycode.to_abbreviated_string(config), keybinding.keycode.into_abbreviated_string(config),
span: keybinding.keycode.span(), keybinding.keycode.span(),
}) ))
} }
}; };
if let Some(event) = parse_event(&keybinding.event, config)? { if let Some(event) = parse_event(&keybinding.event, config)? {
@ -962,10 +719,7 @@ impl<'config> EventType<'config> {
.map(Self::Send) .map(Self::Send)
.or_else(|_| extract_value("edit", record, span).map(Self::Edit)) .or_else(|_| extract_value("edit", record, span).map(Self::Edit))
.or_else(|_| extract_value("until", record, span).map(Self::Until)) .or_else(|_| extract_value("until", record, span).map(Self::Until))
.map_err(|_| ShellError::MissingConfigValue { .map_err(|_| ShellError::MissingConfigValue("send, edit or until".to_string(), span))
missing_value: "send, edit or until".to_string(),
span,
})
} }
} }
@ -974,10 +728,7 @@ fn parse_event(value: &Value, config: &Config) -> Result<Option<ReedlineEvent>,
match value { match value {
Value::Record { val: record, .. } => match EventType::try_from_record(record, span)? { Value::Record { val: record, .. } => match EventType::try_from_record(record, span)? {
EventType::Send(value) => event_from_record( EventType::Send(value) => event_from_record(
value value.into_string("", config).to_ascii_lowercase().as_str(),
.to_expanded_string("", config)
.to_ascii_lowercase()
.as_str(),
record, record,
config, config,
span, span,
@ -985,10 +736,7 @@ fn parse_event(value: &Value, config: &Config) -> Result<Option<ReedlineEvent>,
.map(Some), .map(Some),
EventType::Edit(value) => { EventType::Edit(value) => {
let edit = edit_from_record( let edit = edit_from_record(
value value.into_string("", config).to_ascii_lowercase().as_str(),
.to_expanded_string("", config)
.to_ascii_lowercase()
.as_str(),
record, record,
config, config,
span, span,
@ -1001,11 +749,11 @@ fn parse_event(value: &Value, config: &Config) -> Result<Option<ReedlineEvent>,
.iter() .iter()
.map(|value| match parse_event(value, config) { .map(|value| match parse_event(value, config) {
Ok(inner) => match inner { Ok(inner) => match inner {
None => Err(ShellError::UnsupportedConfigValue { None => Err(ShellError::UnsupportedConfigValue(
expected: "List containing valid events".to_string(), "List containing valid events".to_string(),
value: "Nothing value (null)".to_string(), "Nothing value (null)".to_string(),
span: value.span(), value.span(),
}), )),
Some(event) => Ok(event), Some(event) => Ok(event),
}, },
Err(e) => Err(e), Err(e) => Err(e),
@ -1014,11 +762,11 @@ fn parse_event(value: &Value, config: &Config) -> Result<Option<ReedlineEvent>,
Ok(Some(ReedlineEvent::UntilFound(events))) Ok(Some(ReedlineEvent::UntilFound(events)))
} }
v => Err(ShellError::UnsupportedConfigValue { v => Err(ShellError::UnsupportedConfigValue(
expected: "list of events".to_string(), "list of events".to_string(),
value: v.to_abbreviated_string(config), v.into_abbreviated_string(config),
span: v.span(), v.span(),
}), )),
}, },
}, },
Value::List { vals, .. } => { Value::List { vals, .. } => {
@ -1026,11 +774,11 @@ fn parse_event(value: &Value, config: &Config) -> Result<Option<ReedlineEvent>,
.iter() .iter()
.map(|value| match parse_event(value, config) { .map(|value| match parse_event(value, config) {
Ok(inner) => match inner { Ok(inner) => match inner {
None => Err(ShellError::UnsupportedConfigValue { None => Err(ShellError::UnsupportedConfigValue(
expected: "List containing valid events".to_string(), "List containing valid events".to_string(),
value: "Nothing value (null)".to_string(), "Nothing value (null)".to_string(),
span: value.span(), value.span(),
}), )),
Some(event) => Ok(event), Some(event) => Ok(event),
}, },
Err(e) => Err(e), Err(e) => Err(e),
@ -1040,11 +788,11 @@ fn parse_event(value: &Value, config: &Config) -> Result<Option<ReedlineEvent>,
Ok(Some(ReedlineEvent::Multiple(events))) Ok(Some(ReedlineEvent::Multiple(events)))
} }
Value::Nothing { .. } => Ok(None), Value::Nothing { .. } => Ok(None),
v => Err(ShellError::UnsupportedConfigValue { v => Err(ShellError::UnsupportedConfigValue(
expected: "record or list of records, null to unbind key".to_string(), "record or list of records, null to unbind key".to_string(),
value: v.to_abbreviated_string(config), v.into_abbreviated_string(config),
span: v.span(), v.span(),
}), )),
} }
} }
@ -1085,18 +833,18 @@ fn event_from_record(
"openeditor" => ReedlineEvent::OpenEditor, "openeditor" => ReedlineEvent::OpenEditor,
"menu" => { "menu" => {
let menu = extract_value("name", record, span)?; let menu = extract_value("name", record, span)?;
ReedlineEvent::Menu(menu.to_expanded_string("", config)) ReedlineEvent::Menu(menu.into_string("", config))
} }
"executehostcommand" => { "executehostcommand" => {
let cmd = extract_value("cmd", record, span)?; let cmd = extract_value("cmd", record, span)?;
ReedlineEvent::ExecuteHostCommand(cmd.to_expanded_string("", config)) ReedlineEvent::ExecuteHostCommand(cmd.into_string("", config))
} }
v => { v => {
return Err(ShellError::UnsupportedConfigValue { return Err(ShellError::UnsupportedConfigValue(
expected: "Reedline event".to_string(), "Reedline event".to_string(),
value: v.to_string(), v.to_string(),
span, span,
}) ))
} }
}; };
@ -1110,82 +858,22 @@ fn edit_from_record(
span: Span, span: Span,
) -> Result<EditCommand, ShellError> { ) -> Result<EditCommand, ShellError> {
let edit = match name { let edit = match name {
"movetostart" => EditCommand::MoveToStart { "movetostart" => EditCommand::MoveToStart,
select: extract_value("select", record, span) "movetolinestart" => EditCommand::MoveToLineStart,
.and_then(|value| value.as_bool()) "movetoend" => EditCommand::MoveToEnd,
.unwrap_or(false), "movetolineend" => EditCommand::MoveToLineEnd,
}, "moveleft" => EditCommand::MoveLeft,
"movetolinestart" => EditCommand::MoveToLineStart { "moveright" => EditCommand::MoveRight,
select: extract_value("select", record, span) "movewordleft" => EditCommand::MoveWordLeft,
.and_then(|value| value.as_bool()) "movebigwordleft" => EditCommand::MoveBigWordLeft,
.unwrap_or(false), "movewordright" => EditCommand::MoveWordRight,
}, "movewordrightend" => EditCommand::MoveWordRightEnd,
"movebigwordrightend" => EditCommand::MoveBigWordRightEnd,
"movetoend" => EditCommand::MoveToEnd { "movewordrightstart" => EditCommand::MoveWordRightStart,
select: extract_value("select", record, span) "movebigwordrightstart" => EditCommand::MoveBigWordRightStart,
.and_then(|value| value.as_bool())
.unwrap_or(false),
},
"movetolineend" => EditCommand::MoveToLineEnd {
select: extract_value("select", record, span)
.and_then(|value| value.as_bool())
.unwrap_or(false),
},
"moveleft" => EditCommand::MoveLeft {
select: extract_value("select", record, span)
.and_then(|value| value.as_bool())
.unwrap_or(false),
},
"moveright" => EditCommand::MoveRight {
select: extract_value("select", record, span)
.and_then(|value| value.as_bool())
.unwrap_or(false),
},
"movewordleft" => EditCommand::MoveWordLeft {
select: extract_value("select", record, span)
.and_then(|value| value.as_bool())
.unwrap_or(false),
},
"movebigwordleft" => EditCommand::MoveBigWordLeft {
select: extract_value("select", record, span)
.and_then(|value| value.as_bool())
.unwrap_or(false),
},
"movewordright" => EditCommand::MoveWordRight {
select: extract_value("select", record, span)
.and_then(|value| value.as_bool())
.unwrap_or(false),
},
"movewordrightend" => EditCommand::MoveWordRightEnd {
select: extract_value("select", record, span)
.and_then(|value| value.as_bool())
.unwrap_or(false),
},
"movebigwordrightend" => EditCommand::MoveBigWordRightEnd {
select: extract_value("select", record, span)
.and_then(|value| value.as_bool())
.unwrap_or(false),
},
"movewordrightstart" => EditCommand::MoveWordRightStart {
select: extract_value("select", record, span)
.and_then(|value| value.as_bool())
.unwrap_or(false),
},
"movebigwordrightstart" => EditCommand::MoveBigWordRightStart {
select: extract_value("select", record, span)
.and_then(|value| value.as_bool())
.unwrap_or(false),
},
"movetoposition" => { "movetoposition" => {
let value = extract_value("value", record, span)?; let value = extract_value("value", record, span)?;
let select = extract_value("select", record, span) EditCommand::MoveToPosition(value.as_int()? as usize)
.and_then(|value| value.as_bool())
.unwrap_or(false);
EditCommand::MoveToPosition {
position: value.as_int()? as usize,
select,
}
} }
"insertchar" => { "insertchar" => {
let value = extract_value("value", record, span)?; let value = extract_value("value", record, span)?;
@ -1194,7 +882,7 @@ fn edit_from_record(
} }
"insertstring" => { "insertstring" => {
let value = extract_value("value", record, span)?; let value = extract_value("value", record, span)?;
EditCommand::InsertString(value.to_expanded_string("", config)) EditCommand::InsertString(value.into_string("", config))
} }
"insertnewline" => EditCommand::InsertNewline, "insertnewline" => EditCommand::InsertNewline,
"backspace" => EditCommand::Backspace, "backspace" => EditCommand::Backspace,
@ -1237,18 +925,12 @@ fn edit_from_record(
"moverightuntil" => { "moverightuntil" => {
let value = extract_value("value", record, span)?; let value = extract_value("value", record, span)?;
let char = extract_char(value, config)?; let char = extract_char(value, config)?;
let select = extract_value("select", record, span) EditCommand::MoveRightUntil(char)
.and_then(|value| value.as_bool())
.unwrap_or(false);
EditCommand::MoveRightUntil { c: char, select }
} }
"moverightbefore" => { "moverightbefore" => {
let value = extract_value("value", record, span)?; let value = extract_value("value", record, span)?;
let char = extract_char(value, config)?; let char = extract_char(value, config)?;
let select = extract_value("select", record, span) EditCommand::MoveRightBefore(char)
.and_then(|value| value.as_bool())
.unwrap_or(false);
EditCommand::MoveRightBefore { c: char, select }
} }
"cutleftuntil" => { "cutleftuntil" => {
let value = extract_value("value", record, span)?; let value = extract_value("value", record, span)?;
@ -1263,36 +945,20 @@ fn edit_from_record(
"moveleftuntil" => { "moveleftuntil" => {
let value = extract_value("value", record, span)?; let value = extract_value("value", record, span)?;
let char = extract_char(value, config)?; let char = extract_char(value, config)?;
let select = extract_value("select", record, span) EditCommand::MoveLeftUntil(char)
.and_then(|value| value.as_bool())
.unwrap_or(false);
EditCommand::MoveLeftUntil { c: char, select }
} }
"moveleftbefore" => { "moveleftbefore" => {
let value = extract_value("value", record, span)?; let value = extract_value("value", record, span)?;
let char = extract_char(value, config)?; let char = extract_char(value, config)?;
let select = extract_value("select", record, span) EditCommand::MoveLeftBefore(char)
.and_then(|value| value.as_bool())
.unwrap_or(false);
EditCommand::MoveLeftBefore { c: char, select }
} }
"complete" => EditCommand::Complete, "complete" => EditCommand::Complete,
"cutselection" => EditCommand::CutSelection,
#[cfg(feature = "system-clipboard")]
"cutselectionsystem" => EditCommand::CutSelectionSystem,
"copyselection" => EditCommand::CopySelection,
#[cfg(feature = "system-clipboard")]
"copyselectionsystem" => EditCommand::CopySelectionSystem,
"paste" => EditCommand::Paste,
#[cfg(feature = "system-clipboard")]
"pastesystem" => EditCommand::PasteSystem,
"selectall" => EditCommand::SelectAll,
e => { e => {
return Err(ShellError::UnsupportedConfigValue { return Err(ShellError::UnsupportedConfigValue(
expected: "reedline EditCommand".to_string(), "reedline EditCommand".to_string(),
value: e.to_string(), e.to_string(),
span, span,
}) ))
} }
}; };
@ -1302,20 +968,18 @@ fn edit_from_record(
fn extract_char(value: &Value, config: &Config) -> Result<char, ShellError> { fn extract_char(value: &Value, config: &Config) -> Result<char, ShellError> {
let span = value.span(); let span = value.span();
value value
.to_expanded_string("", config) .into_string("", config)
.chars() .chars()
.next() .next()
.ok_or_else(|| ShellError::MissingConfigValue { .ok_or_else(|| ShellError::MissingConfigValue("char to insert".to_string(), span))
missing_value: "char to insert".to_string(),
span,
})
} }
#[cfg(test)] #[cfg(test)]
mod test { mod test {
use super::*;
use nu_protocol::record; use nu_protocol::record;
use super::*;
#[test] #[test]
fn test_send_event() { fn test_send_event() {
let event = record! { let event = record! {
@ -1437,59 +1101,6 @@ mod test {
let span = Span::test_data(); let span = Span::test_data();
let b = EventType::try_from_record(&event, span); let b = EventType::try_from_record(&event, span);
assert!(matches!(b, Err(ShellError::MissingConfigValue { .. }))); assert!(matches!(b, Err(ShellError::MissingConfigValue(_, _))));
}
#[test]
fn test_move_without_optional_select() {
let event = record! {
"edit" => Value::test_string("moveleft")
};
let event = Value::test_record(event);
let config = Config::default();
let parsed_event = parse_event(&event, &config).unwrap();
assert_eq!(
parsed_event,
Some(ReedlineEvent::Edit(vec![EditCommand::MoveLeft {
select: false
}]))
);
}
#[test]
fn test_move_with_select_false() {
let event = record! {
"edit" => Value::test_string("moveleft"),
"select" => Value::test_bool(false)
};
let event = Value::test_record(event);
let config = Config::default();
let parsed_event = parse_event(&event, &config).unwrap();
assert_eq!(
parsed_event,
Some(ReedlineEvent::Edit(vec![EditCommand::MoveLeft {
select: false
}]))
);
}
#[test]
fn test_move_with_select_true() {
let event = record! {
"edit" => Value::test_string("moveleft"),
"select" => Value::test_bool(true)
};
let event = Value::test_record(event);
let config = Config::default();
let parsed_event = parse_event(&event, &config).unwrap();
assert_eq!(
parsed_event,
Some(ReedlineEvent::Edit(vec![EditCommand::MoveLeft {
select: true
}]))
);
} }
} }

File diff suppressed because it is too large Load Diff

View File

@ -1,19 +1,15 @@
use log::trace; use log::trace;
use nu_ansi_term::Style; use nu_ansi_term::Style;
use nu_color_config::{get_matching_brackets_style, get_shape_color}; use nu_color_config::{get_matching_brackets_style, get_shape_color};
use nu_engine::env;
use nu_parser::{flatten_block, parse, FlatShape}; use nu_parser::{flatten_block, parse, FlatShape};
use nu_protocol::{ use nu_protocol::ast::{Argument, Block, Expr, Expression, PipelineElement};
ast::{Argument, Block, Expr, Expression, PipelineRedirection, RecordItem}, use nu_protocol::engine::{EngineState, StateWorkingSet};
engine::{EngineState, Stack, StateWorkingSet}, use nu_protocol::{Config, Span};
Config, Span,
};
use reedline::{Highlighter, StyledText}; use reedline::{Highlighter, StyledText};
use std::sync::Arc; use std::sync::Arc;
pub struct NuHighlighter { pub struct NuHighlighter {
pub engine_state: Arc<EngineState>, pub engine_state: Arc<EngineState>,
pub stack: Arc<Stack>,
pub config: Config, pub config: Config,
} }
@ -21,37 +17,10 @@ impl Highlighter for NuHighlighter {
fn highlight(&self, line: &str, _cursor: usize) -> StyledText { fn highlight(&self, line: &str, _cursor: usize) -> StyledText {
trace!("highlighting: {}", line); trace!("highlighting: {}", line);
let highlight_resolved_externals =
self.engine_state.get_config().highlight_resolved_externals;
let mut working_set = StateWorkingSet::new(&self.engine_state); let mut working_set = StateWorkingSet::new(&self.engine_state);
let block = parse(&mut working_set, None, line.as_bytes(), false); let block = parse(&mut working_set, None, line.as_bytes(), false);
let (shapes, global_span_offset) = { let (shapes, global_span_offset) = {
let mut shapes = flatten_block(&working_set, &block); let shapes = flatten_block(&working_set, &block);
// Highlighting externals has a config point because of concerns that using which to resolve
// externals may slow down things too much.
if highlight_resolved_externals {
for (span, shape) in shapes.iter_mut() {
if *shape == FlatShape::External {
let str_contents =
working_set.get_span_contents(Span::new(span.start, span.end));
let str_word = String::from_utf8_lossy(str_contents).to_string();
let paths = env::path_str(&self.engine_state, &self.stack, *span).ok();
let res = if let Ok(cwd) =
env::current_dir_str(&self.engine_state, &self.stack)
{
which::which_in(str_word, paths.as_ref(), cwd).ok()
} else {
which::which_in_global(str_word, paths.as_ref())
.ok()
.and_then(|mut i| i.next())
};
if res.is_some() {
*shape = FlatShape::ExternalResolved;
}
}
}
}
(shapes, self.engine_state.next_span_start()) (shapes, self.engine_state.next_span_start())
}; };
@ -122,7 +91,6 @@ impl Highlighter for NuHighlighter {
FlatShape::InternalCall(_) => add_colored_token(&shape.1, next_token), FlatShape::InternalCall(_) => add_colored_token(&shape.1, next_token),
FlatShape::External => add_colored_token(&shape.1, next_token), FlatShape::External => add_colored_token(&shape.1, next_token),
FlatShape::ExternalArg => add_colored_token(&shape.1, next_token), FlatShape::ExternalArg => add_colored_token(&shape.1, next_token),
FlatShape::ExternalResolved => add_colored_token(&shape.1, next_token),
FlatShape::Keyword => add_colored_token(&shape.1, next_token), FlatShape::Keyword => add_colored_token(&shape.1, next_token),
FlatShape::Literal => add_colored_token(&shape.1, next_token), FlatShape::Literal => add_colored_token(&shape.1, next_token),
FlatShape::Operator => add_colored_token(&shape.1, next_token), FlatShape::Operator => add_colored_token(&shape.1, next_token),
@ -264,38 +232,24 @@ fn find_matching_block_end_in_block(
) -> Option<usize> { ) -> Option<usize> {
for p in &block.pipelines { for p in &block.pipelines {
for e in &p.elements { for e in &p.elements {
if e.expr.span.contains(global_cursor_offset) { match e {
PipelineElement::Expression(_, e)
| PipelineElement::Redirection(_, _, e)
| PipelineElement::And(_, e)
| PipelineElement::Or(_, e)
| PipelineElement::SameTargetRedirection { cmd: (_, e), .. }
| PipelineElement::SeparateRedirection { out: (_, e), .. } => {
if e.span.contains(global_cursor_offset) {
if let Some(pos) = find_matching_block_end_in_expr( if let Some(pos) = find_matching_block_end_in_expr(
line, line,
working_set, working_set,
&e.expr, e,
global_span_offset, global_span_offset,
global_cursor_offset, global_cursor_offset,
) { ) {
return Some(pos); return Some(pos);
} }
} }
if let Some(redirection) = e.redirection.as_ref() {
match redirection {
PipelineRedirection::Single { target, .. }
| PipelineRedirection::Separate { out: target, .. }
| PipelineRedirection::Separate { err: target, .. }
if target.span().contains(global_cursor_offset) =>
{
if let Some(pos) = target.expr().and_then(|expr| {
find_matching_block_end_in_expr(
line,
working_set,
expr,
global_span_offset,
global_cursor_offset,
)
}) {
return Some(pos);
}
}
_ => {}
} }
} }
} }
@ -349,18 +303,18 @@ fn find_matching_block_end_in_expr(
Expr::Keyword(..) => None, Expr::Keyword(..) => None,
Expr::ValueWithUnit(..) => None, Expr::ValueWithUnit(..) => None,
Expr::DateTime(_) => None, Expr::DateTime(_) => None,
Expr::Filepath(_, _) => None, Expr::Filepath(_) => None,
Expr::Directory(_, _) => None, Expr::Directory(_) => None,
Expr::GlobPattern(_, _) => None, Expr::GlobPattern(_) => None,
Expr::String(_) => None, Expr::String(_) => None,
Expr::CellPath(_) => None, Expr::CellPath(_) => None,
Expr::ImportPattern(_) => None, Expr::ImportPattern(_) => None,
Expr::Overlay(_) => None, Expr::Overlay(_) => None,
Expr::Signature(_) => None, Expr::Signature(_) => None,
Expr::MatchPattern(_) => None,
Expr::MatchBlock(_) => None, Expr::MatchBlock(_) => None,
Expr::Nothing => None, Expr::Nothing => None,
Expr::Garbage => None, Expr::Garbage => None,
Expr::Spread(_) => None,
Expr::Table(hdr, rows) => { Expr::Table(hdr, rows) => {
if expr_last == global_cursor_offset { if expr_last == global_cursor_offset {
@ -392,17 +346,10 @@ fn find_matching_block_end_in_expr(
Some(expr_last) Some(expr_last)
} else { } else {
// cursor is inside record // cursor is inside record
for expr in exprs { for (k, v) in exprs {
match expr {
RecordItem::Pair(k, v) => {
find_in_expr_or_continue!(k); find_in_expr_or_continue!(k);
find_in_expr_or_continue!(v); find_in_expr_or_continue!(v);
} }
RecordItem::Spread(_, record) => {
find_in_expr_or_continue!(record);
}
}
}
None None
} }
} }
@ -413,7 +360,6 @@ fn find_matching_block_end_in_expr(
Argument::Named((_, _, opt_expr)) => opt_expr.as_ref(), Argument::Named((_, _, opt_expr)) => opt_expr.as_ref(),
Argument::Positional(inner_expr) => Some(inner_expr), Argument::Positional(inner_expr) => Some(inner_expr),
Argument::Unknown(inner_expr) => Some(inner_expr), Argument::Unknown(inner_expr) => Some(inner_expr),
Argument::Spread(inner_expr) => Some(inner_expr),
}; };
if let Some(inner_expr) = opt_expr { if let Some(inner_expr) = opt_expr {

View File

@ -1,11 +1,12 @@
use nu_cmd_base::hook::eval_hook; use nu_cmd_base::hook::eval_hook;
use nu_engine::{eval_block, eval_block_with_early_return}; use nu_engine::{eval_block, eval_block_with_early_return};
use nu_parser::{escape_quote_string, lex, parse, unescape_unquote_string, Token, TokenContents}; use nu_parser::{escape_quote_string, lex, parse, unescape_unquote_string, Token, TokenContents};
use nu_protocol::engine::StateWorkingSet;
use nu_protocol::{ use nu_protocol::{
debugger::WithoutDebug, engine::{EngineState, Stack},
engine::{EngineState, Stack, StateWorkingSet}, print_if_stream, PipelineData, ShellError, Span, Value,
print_if_stream, report_error, report_error_new, PipelineData, ShellError, Span, Value,
}; };
use nu_protocol::{report_error, report_error_new};
#[cfg(windows)] #[cfg(windows)]
use nu_utils::enable_vt_processing; use nu_utils::enable_vt_processing;
use nu_utils::utils::perf; use nu_utils::utils::perf;
@ -42,13 +43,13 @@ fn gather_env_vars(
let working_set = StateWorkingSet::new(engine_state); let working_set = StateWorkingSet::new(engine_state);
report_error( report_error(
&working_set, &working_set,
&ShellError::GenericError { &ShellError::GenericError(
error: format!("Environment variable was not captured: {env_str}"), format!("Environment variable was not captured: {env_str}"),
msg: "".into(), "".to_string(),
span: None, None,
help: Some(msg.into()), Some(msg.into()),
inner: vec![], Vec::new(),
}, ),
); );
} }
@ -74,15 +75,15 @@ fn gather_env_vars(
let working_set = StateWorkingSet::new(engine_state); let working_set = StateWorkingSet::new(engine_state);
report_error( report_error(
&working_set, &working_set,
&ShellError::GenericError { &ShellError::GenericError(
error: "Current directory is not a valid utf-8 path".into(), "Current directory is not a valid utf-8 path".to_string(),
msg: "".into(), "".to_string(),
span: None, None,
help: Some(format!( Some(format!(
"Retrieving current directory failed: {init_cwd:?} not a valid utf-8 path" "Retrieving current directory failed: {init_cwd:?} not a valid utf-8 path"
)), )),
inner: vec![], Vec::new(),
}, ),
); );
} }
} }
@ -92,8 +93,8 @@ fn gather_env_vars(
let span_offset = engine_state.next_span_start(); let span_offset = engine_state.next_span_start();
engine_state.add_file( engine_state.add_file(
"Host Environment Variables".into(), "Host Environment Variables".to_string(),
fake_env_file.as_bytes().into(), fake_env_file.as_bytes().to_vec(),
); );
let (tokens, _) = lex(fake_env_file.as_bytes(), span_offset, &[], &[], true); let (tokens, _) = lex(fake_env_file.as_bytes(), span_offset, &[], &[], true);
@ -110,7 +111,7 @@ fn gather_env_vars(
let name = if let Some(Token { let name = if let Some(Token {
contents: TokenContents::Item, contents: TokenContents::Item,
span, span,
}) = parts.first() }) = parts.get(0)
{ {
let mut working_set = StateWorkingSet::new(engine_state); let mut working_set = StateWorkingSet::new(engine_state);
let bytes = working_set.get_span_contents(*span); let bytes = working_set.get_span_contents(*span);
@ -219,10 +220,6 @@ pub fn eval_source(
source, source,
false, false,
); );
if let Some(warning) = working_set.parse_warnings.first() {
report_error(&working_set, warning);
}
if let Some(err) = working_set.parse_errors.first() { if let Some(err) = working_set.parse_errors.first() {
set_last_exit_code(stack, 1); set_last_exit_code(stack, 1);
report_error(&working_set, err); report_error(&working_set, err);
@ -239,9 +236,9 @@ pub fn eval_source(
} }
let b = if allow_return { let b = if allow_return {
eval_block_with_early_return::<WithoutDebug>(engine_state, stack, &block, input) eval_block_with_early_return(engine_state, stack, &block, input, false, false)
} else { } else {
eval_block::<WithoutDebug>(engine_state, stack, &block, input) eval_block(engine_state, stack, &block, input, false, false)
}; };
match b { match b {

View File

@ -1,11 +1,12 @@
pub mod support; pub mod support;
use std::path::PathBuf;
use nu_cli::NuCompleter; use nu_cli::NuCompleter;
use nu_parser::parse; use nu_parser::parse;
use nu_protocol::engine::StateWorkingSet; use nu_protocol::engine::StateWorkingSet;
use reedline::{Completer, Suggestion}; use reedline::{Completer, Suggestion};
use rstest::{fixture, rstest}; use rstest::{fixture, rstest};
use std::path::PathBuf;
use support::{ use support::{
completions_helpers::{new_partial_engine, new_quote_engine}, completions_helpers::{new_partial_engine, new_quote_engine},
file, folder, match_suggestions, new_engine, file, folder, match_suggestions, new_engine,
@ -58,29 +59,6 @@ fn extern_completer() -> NuCompleter {
NuCompleter::new(std::sync::Arc::new(engine), stack) NuCompleter::new(std::sync::Arc::new(engine), stack)
} }
#[fixture]
fn custom_completer() -> NuCompleter {
// Create a new engine
let (dir, _, mut engine, mut stack) = new_engine();
// Add record value as example
let record = r#"
let external_completer = {|spans|
$spans
}
$env.config.completions.external = {
enable: true
max_results: 100
completer: $external_completer
}
"#;
assert!(support::merge_input(record.as_bytes(), &mut engine, &mut stack, dir).is_ok());
// Instantiate a new completer
NuCompleter::new(std::sync::Arc::new(engine), stack)
}
#[test] #[test]
fn variables_dollar_sign_with_varialblecompletion() { fn variables_dollar_sign_with_varialblecompletion() {
let (_, _, engine, stack) = new_engine(); let (_, _, engine, stack) = new_engine();
@ -90,7 +68,7 @@ fn variables_dollar_sign_with_varialblecompletion() {
let target_dir = "$ "; let target_dir = "$ ";
let suggestions = completer.complete(target_dir, target_dir.len()); let suggestions = completer.complete(target_dir, target_dir.len());
assert_eq!(8, suggestions.len()); assert_eq!(7, suggestions.len());
} }
#[rstest] #[rstest]
@ -143,34 +121,15 @@ fn dotnu_completions() {
let completion_str = "source-env ".to_string(); let completion_str = "source-env ".to_string();
let suggestions = completer.complete(&completion_str, completion_str.len()); let suggestions = completer.complete(&completion_str, completion_str.len());
assert_eq!(2, suggestions.len()); assert_eq!(1, suggestions.len());
assert_eq!("custom_completion.nu", suggestions.first().unwrap().value); assert_eq!("custom_completion.nu", suggestions.get(0).unwrap().value);
#[cfg(windows)]
assert_eq!("directory_completion\\", suggestions.get(1).unwrap().value);
#[cfg(not(windows))]
assert_eq!("directory_completion/", suggestions.get(1).unwrap().value);
// Test use completion // Test use completion
let completion_str = "use ".to_string(); let completion_str = "use ".to_string();
let suggestions = completer.complete(&completion_str, completion_str.len()); let suggestions = completer.complete(&completion_str, completion_str.len());
assert_eq!(2, suggestions.len()); assert_eq!(1, suggestions.len());
assert_eq!("custom_completion.nu", suggestions.first().unwrap().value); assert_eq!("custom_completion.nu", suggestions.get(0).unwrap().value);
#[cfg(windows)]
assert_eq!("directory_completion\\", suggestions.get(1).unwrap().value);
#[cfg(not(windows))]
assert_eq!("directory_completion/", suggestions.get(1).unwrap().value);
// Test overlay use completion
let completion_str = "overlay use ".to_string();
let suggestions = completer.complete(&completion_str, completion_str.len());
assert_eq!(2, suggestions.len());
assert_eq!("custom_completion.nu", suggestions.first().unwrap().value);
#[cfg(windows)]
assert_eq!("directory_completion\\", suggestions.get(1).unwrap().value);
#[cfg(not(windows))]
assert_eq!("directory_completion/", suggestions.get(1).unwrap().value);
} }
#[test] #[test]
@ -182,7 +141,7 @@ fn external_completer_trailing_space() {
let suggestions = run_external_completion(block, &input); let suggestions = run_external_completion(block, &input);
assert_eq!(3, suggestions.len()); assert_eq!(3, suggestions.len());
assert_eq!("gh", suggestions.first().unwrap().value); assert_eq!("gh", suggestions.get(0).unwrap().value);
assert_eq!("alias", suggestions.get(1).unwrap().value); assert_eq!("alias", suggestions.get(1).unwrap().value);
assert_eq!("", suggestions.get(2).unwrap().value); assert_eq!("", suggestions.get(2).unwrap().value);
} }
@ -194,7 +153,7 @@ fn external_completer_no_trailing_space() {
let suggestions = run_external_completion(block, &input); let suggestions = run_external_completion(block, &input);
assert_eq!(2, suggestions.len()); assert_eq!(2, suggestions.len());
assert_eq!("gh", suggestions.first().unwrap().value); assert_eq!("gh", suggestions.get(0).unwrap().value);
assert_eq!("alias", suggestions.get(1).unwrap().value); assert_eq!("alias", suggestions.get(1).unwrap().value);
} }
@ -205,7 +164,7 @@ fn external_completer_pass_flags() {
let suggestions = run_external_completion(block, &input); let suggestions = run_external_completion(block, &input);
assert_eq!(3, suggestions.len()); assert_eq!(3, suggestions.len());
assert_eq!("gh", suggestions.first().unwrap().value); assert_eq!("gh", suggestions.get(0).unwrap().value);
assert_eq!("api", suggestions.get(1).unwrap().value); assert_eq!("api", suggestions.get(1).unwrap().value);
assert_eq!("--", suggestions.get(2).unwrap().value); assert_eq!("--", suggestions.get(2).unwrap().value);
} }
@ -226,7 +185,6 @@ fn file_completions() {
let expected_paths: Vec<String> = vec![ let expected_paths: Vec<String> = vec![
folder(dir.join("another")), folder(dir.join("another")),
file(dir.join("custom_completion.nu")), file(dir.join("custom_completion.nu")),
folder(dir.join("directory_completion")),
file(dir.join("nushell")), file(dir.join("nushell")),
folder(dir.join("test_a")), folder(dir.join("test_a")),
folder(dir.join("test_b")), folder(dir.join("test_b")),
@ -246,16 +204,6 @@ fn file_completions() {
// Match the results // Match the results
match_suggestions(expected_paths, suggestions); match_suggestions(expected_paths, suggestions);
// Test completions for hidden files
let target_dir = format!("ls {}/.", folder(dir.join(".hidden_folder")));
let suggestions = completer.complete(&target_dir, target_dir.len());
let expected_paths: Vec<String> =
vec![file(dir.join(".hidden_folder").join(".hidden_subfile"))];
// Match the results
match_suggestions(expected_paths, suggestions);
} }
#[test] #[test]
@ -352,7 +300,6 @@ fn command_ls_with_filecompletion() {
let expected_paths: Vec<String> = vec![ let expected_paths: Vec<String> = vec![
"another\\".to_string(), "another\\".to_string(),
"custom_completion.nu".to_string(), "custom_completion.nu".to_string(),
"directory_completion\\".to_string(),
"nushell".to_string(), "nushell".to_string(),
"test_a\\".to_string(), "test_a\\".to_string(),
"test_b\\".to_string(), "test_b\\".to_string(),
@ -363,7 +310,6 @@ fn command_ls_with_filecompletion() {
let expected_paths: Vec<String> = vec![ let expected_paths: Vec<String> = vec![
"another/".to_string(), "another/".to_string(),
"custom_completion.nu".to_string(), "custom_completion.nu".to_string(),
"directory_completion/".to_string(),
"nushell".to_string(), "nushell".to_string(),
"test_a/".to_string(), "test_a/".to_string(),
"test_b/".to_string(), "test_b/".to_string(),
@ -386,7 +332,6 @@ fn command_open_with_filecompletion() {
let expected_paths: Vec<String> = vec![ let expected_paths: Vec<String> = vec![
"another\\".to_string(), "another\\".to_string(),
"custom_completion.nu".to_string(), "custom_completion.nu".to_string(),
"directory_completion\\".to_string(),
"nushell".to_string(), "nushell".to_string(),
"test_a\\".to_string(), "test_a\\".to_string(),
"test_b\\".to_string(), "test_b\\".to_string(),
@ -397,7 +342,6 @@ fn command_open_with_filecompletion() {
let expected_paths: Vec<String> = vec![ let expected_paths: Vec<String> = vec![
"another/".to_string(), "another/".to_string(),
"custom_completion.nu".to_string(), "custom_completion.nu".to_string(),
"directory_completion/".to_string(),
"nushell".to_string(), "nushell".to_string(),
"test_a/".to_string(), "test_a/".to_string(),
"test_b/".to_string(), "test_b/".to_string(),
@ -421,7 +365,6 @@ fn command_rm_with_globcompletion() {
let expected_paths: Vec<String> = vec![ let expected_paths: Vec<String> = vec![
"another\\".to_string(), "another\\".to_string(),
"custom_completion.nu".to_string(), "custom_completion.nu".to_string(),
"directory_completion\\".to_string(),
"nushell".to_string(), "nushell".to_string(),
"test_a\\".to_string(), "test_a\\".to_string(),
"test_b\\".to_string(), "test_b\\".to_string(),
@ -432,7 +375,6 @@ fn command_rm_with_globcompletion() {
let expected_paths: Vec<String> = vec![ let expected_paths: Vec<String> = vec![
"another/".to_string(), "another/".to_string(),
"custom_completion.nu".to_string(), "custom_completion.nu".to_string(),
"directory_completion/".to_string(),
"nushell".to_string(), "nushell".to_string(),
"test_a/".to_string(), "test_a/".to_string(),
"test_b/".to_string(), "test_b/".to_string(),
@ -456,7 +398,6 @@ fn command_cp_with_globcompletion() {
let expected_paths: Vec<String> = vec![ let expected_paths: Vec<String> = vec![
"another\\".to_string(), "another\\".to_string(),
"custom_completion.nu".to_string(), "custom_completion.nu".to_string(),
"directory_completion\\".to_string(),
"nushell".to_string(), "nushell".to_string(),
"test_a\\".to_string(), "test_a\\".to_string(),
"test_b\\".to_string(), "test_b\\".to_string(),
@ -467,7 +408,6 @@ fn command_cp_with_globcompletion() {
let expected_paths: Vec<String> = vec![ let expected_paths: Vec<String> = vec![
"another/".to_string(), "another/".to_string(),
"custom_completion.nu".to_string(), "custom_completion.nu".to_string(),
"directory_completion/".to_string(),
"nushell".to_string(), "nushell".to_string(),
"test_a/".to_string(), "test_a/".to_string(),
"test_b/".to_string(), "test_b/".to_string(),
@ -491,7 +431,6 @@ fn command_save_with_filecompletion() {
let expected_paths: Vec<String> = vec![ let expected_paths: Vec<String> = vec![
"another\\".to_string(), "another\\".to_string(),
"custom_completion.nu".to_string(), "custom_completion.nu".to_string(),
"directory_completion\\".to_string(),
"nushell".to_string(), "nushell".to_string(),
"test_a\\".to_string(), "test_a\\".to_string(),
"test_b\\".to_string(), "test_b\\".to_string(),
@ -502,7 +441,6 @@ fn command_save_with_filecompletion() {
let expected_paths: Vec<String> = vec![ let expected_paths: Vec<String> = vec![
"another/".to_string(), "another/".to_string(),
"custom_completion.nu".to_string(), "custom_completion.nu".to_string(),
"directory_completion/".to_string(),
"nushell".to_string(), "nushell".to_string(),
"test_a/".to_string(), "test_a/".to_string(),
"test_b/".to_string(), "test_b/".to_string(),
@ -526,7 +464,6 @@ fn command_touch_with_filecompletion() {
let expected_paths: Vec<String> = vec![ let expected_paths: Vec<String> = vec![
"another\\".to_string(), "another\\".to_string(),
"custom_completion.nu".to_string(), "custom_completion.nu".to_string(),
"directory_completion\\".to_string(),
"nushell".to_string(), "nushell".to_string(),
"test_a\\".to_string(), "test_a\\".to_string(),
"test_b\\".to_string(), "test_b\\".to_string(),
@ -537,7 +474,6 @@ fn command_touch_with_filecompletion() {
let expected_paths: Vec<String> = vec![ let expected_paths: Vec<String> = vec![
"another/".to_string(), "another/".to_string(),
"custom_completion.nu".to_string(), "custom_completion.nu".to_string(),
"directory_completion/".to_string(),
"nushell".to_string(), "nushell".to_string(),
"test_a/".to_string(), "test_a/".to_string(),
"test_b/".to_string(), "test_b/".to_string(),
@ -561,7 +497,6 @@ fn command_watch_with_filecompletion() {
let expected_paths: Vec<String> = vec![ let expected_paths: Vec<String> = vec![
"another\\".to_string(), "another\\".to_string(),
"custom_completion.nu".to_string(), "custom_completion.nu".to_string(),
"directory_completion\\".to_string(),
"nushell".to_string(), "nushell".to_string(),
"test_a\\".to_string(), "test_a\\".to_string(),
"test_b\\".to_string(), "test_b\\".to_string(),
@ -572,7 +507,6 @@ fn command_watch_with_filecompletion() {
let expected_paths: Vec<String> = vec![ let expected_paths: Vec<String> = vec![
"another/".to_string(), "another/".to_string(),
"custom_completion.nu".to_string(), "custom_completion.nu".to_string(),
"directory_completion/".to_string(),
"nushell".to_string(), "nushell".to_string(),
"test_a/".to_string(), "test_a/".to_string(),
"test_b/".to_string(), "test_b/".to_string(),
@ -593,7 +527,6 @@ fn file_completion_quoted() {
let suggestions = completer.complete(target_dir, target_dir.len()); let suggestions = completer.complete(target_dir, target_dir.len());
let expected_paths: Vec<String> = vec![ let expected_paths: Vec<String> = vec![
"\'[a] bc.txt\'".to_string(),
"`--help`".to_string(), "`--help`".to_string(),
"`-42`".to_string(), "`-42`".to_string(),
"`-inf`".to_string(), "`-inf`".to_string(),
@ -669,7 +602,6 @@ fn folder_with_directorycompletions() {
// Create the expected values // Create the expected values
let expected_paths: Vec<String> = vec![ let expected_paths: Vec<String> = vec![
folder(dir.join("another")), folder(dir.join("another")),
folder(dir.join("directory_completion")),
folder(dir.join("test_a")), folder(dir.join("test_a")),
folder(dir.join("test_b")), folder(dir.join("test_b")),
folder(dir.join(".hidden_folder")), folder(dir.join(".hidden_folder")),
@ -694,14 +626,13 @@ fn variables_completions() {
// Test completions for $nu // Test completions for $nu
let suggestions = completer.complete("$nu.", 4); let suggestions = completer.complete("$nu.", 4);
assert_eq!(15, suggestions.len()); assert_eq!(14, suggestions.len());
let expected: Vec<String> = vec![ let expected: Vec<String> = vec![
"config-path".into(), "config-path".into(),
"current-exe".into(), "current-exe".into(),
"default-config-dir".into(), "default-config-dir".into(),
"env-path".into(), "env-path".into(),
"history-enabled".into(),
"history-path".into(), "history-path".into(),
"home-path".into(), "home-path".into(),
"is-interactive".into(), "is-interactive".into(),
@ -720,13 +651,9 @@ fn variables_completions() {
// Test completions for $nu.h (filter) // Test completions for $nu.h (filter)
let suggestions = completer.complete("$nu.h", 5); let suggestions = completer.complete("$nu.h", 5);
assert_eq!(3, suggestions.len()); assert_eq!(2, suggestions.len());
let expected: Vec<String> = vec![ let expected: Vec<String> = vec!["history-path".into(), "home-path".into()];
"history-enabled".into(),
"history-path".into(),
"home-path".into(),
];
// Match results // Match results
match_suggestions(expected, suggestions); match_suggestions(expected, suggestions);
@ -889,7 +816,6 @@ fn unknown_command_completion() {
let expected_paths: Vec<String> = vec![ let expected_paths: Vec<String> = vec![
"another\\".to_string(), "another\\".to_string(),
"custom_completion.nu".to_string(), "custom_completion.nu".to_string(),
"directory_completion\\".to_string(),
"nushell".to_string(), "nushell".to_string(),
"test_a\\".to_string(), "test_a\\".to_string(),
"test_b\\".to_string(), "test_b\\".to_string(),
@ -900,7 +826,6 @@ fn unknown_command_completion() {
let expected_paths: Vec<String> = vec![ let expected_paths: Vec<String> = vec![
"another/".to_string(), "another/".to_string(),
"custom_completion.nu".to_string(), "custom_completion.nu".to_string(),
"directory_completion/".to_string(),
"nushell".to_string(), "nushell".to_string(),
"test_a/".to_string(), "test_a/".to_string(),
"test_b/".to_string(), "test_b/".to_string(),
@ -951,7 +876,6 @@ fn filecompletions_triggers_after_cursor() {
let expected_paths: Vec<String> = vec![ let expected_paths: Vec<String> = vec![
"another\\".to_string(), "another\\".to_string(),
"custom_completion.nu".to_string(), "custom_completion.nu".to_string(),
"directory_completion\\".to_string(),
"nushell".to_string(), "nushell".to_string(),
"test_a\\".to_string(), "test_a\\".to_string(),
"test_b\\".to_string(), "test_b\\".to_string(),
@ -962,7 +886,6 @@ fn filecompletions_triggers_after_cursor() {
let expected_paths: Vec<String> = vec![ let expected_paths: Vec<String> = vec![
"another/".to_string(), "another/".to_string(),
"custom_completion.nu".to_string(), "custom_completion.nu".to_string(),
"directory_completion/".to_string(),
"nushell".to_string(), "nushell".to_string(),
"test_a/".to_string(), "test_a/".to_string(),
"test_b/".to_string(), "test_b/".to_string(),
@ -1015,34 +938,6 @@ fn extern_complete_flags(mut extern_completer: NuCompleter) {
match_suggestions(expected, suggestions); match_suggestions(expected, suggestions);
} }
#[rstest]
fn custom_completer_triggers_cursor_before_word(mut custom_completer: NuCompleter) {
let suggestions = custom_completer.complete("cmd foo bar", 8);
let expected: Vec<String> = vec!["cmd".into(), "foo".into(), "".into()];
match_suggestions(expected, suggestions);
}
#[rstest]
fn custom_completer_triggers_cursor_on_word_left_boundary(mut custom_completer: NuCompleter) {
let suggestions = custom_completer.complete("cmd foo bar", 8);
let expected: Vec<String> = vec!["cmd".into(), "foo".into(), "".into()];
match_suggestions(expected, suggestions);
}
#[rstest]
fn custom_completer_triggers_cursor_next_to_word(mut custom_completer: NuCompleter) {
let suggestions = custom_completer.complete("cmd foo bar", 11);
let expected: Vec<String> = vec!["cmd".into(), "foo".into(), "bar".into()];
match_suggestions(expected, suggestions);
}
#[rstest]
fn custom_completer_triggers_cursor_after_word(mut custom_completer: NuCompleter) {
let suggestions = custom_completer.complete("cmd foo bar ", 12);
let expected: Vec<String> = vec!["cmd".into(), "foo".into(), "bar".into(), "".into()];
match_suggestions(expected, suggestions);
}
#[ignore = "was reverted, still needs fixing"] #[ignore = "was reverted, still needs fixing"]
#[rstest] #[rstest]
fn alias_offset_bug_7648() { fn alias_offset_bug_7648() {

View File

@ -1,15 +1,14 @@
use std::path::PathBuf;
use nu_engine::eval_block; use nu_engine::eval_block;
use nu_parser::parse; use nu_parser::parse;
use nu_protocol::{ use nu_protocol::{
debugger::WithoutDebug,
engine::{EngineState, Stack, StateWorkingSet}, engine::{EngineState, Stack, StateWorkingSet},
eval_const::create_nu_constant, eval_const::create_nu_constant,
PipelineData, ShellError, Span, Value, NU_VARIABLE_ID, PipelineData, ShellError, Span, Value, NU_VARIABLE_ID,
}; };
use nu_test_support::fs; use nu_test_support::fs;
use reedline::Suggestion; use reedline::Suggestion;
use std::path::PathBuf;
const SEP: char = std::path::MAIN_SEPARATOR; const SEP: char = std::path::MAIN_SEPARATOR;
fn create_default_context() -> EngineState { fn create_default_context() -> EngineState {
@ -195,11 +194,13 @@ pub fn merge_input(
engine_state.merge_delta(delta)?; engine_state.merge_delta(delta)?;
assert!(eval_block::<WithoutDebug>( assert!(eval_block(
engine_state, engine_state,
stack, stack,
&block, &block,
PipelineData::Value(Value::nothing(Span::unknown()), None), PipelineData::Value(Value::nothing(Span::unknown(),), None),
false,
false
) )
.is_ok()); .is_ok());

View File

@ -5,17 +5,21 @@ edition = "2021"
license = "MIT" license = "MIT"
name = "nu-cmd-base" name = "nu-cmd-base"
repository = "https://github.com/nushell/nushell/tree/main/crates/nu-cmd-base" repository = "https://github.com/nushell/nushell/tree/main/crates/nu-cmd-base"
version = "0.92.2" version = "0.87.1"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies] [dependencies]
nu-engine = { path = "../nu-engine", version = "0.92.2" } nu-engine = { path = "../nu-engine", version = "0.87.1" }
nu-parser = { path = "../nu-parser", version = "0.92.2" } nu-glob = { path = "../nu-glob", version = "0.87.1" }
nu-path = { path = "../nu-path", version = "0.92.2" } nu-parser = { path = "../nu-parser", version = "0.87.1" }
nu-protocol = { path = "../nu-protocol", version = "0.92.2" } nu-path = { path = "../nu-path", version = "0.87.1" }
nu-protocol = { path = "../nu-protocol", version = "0.87.1" }
nu-utils = { path = "../nu-utils", version = "0.87.1" }
indexmap = { workspace = true } indexmap = "2.1"
miette = { workspace = true } miette = "5.10.0"
[dev-dependencies] [dev-dependencies]
nu-test-support = { path = "../nu-test-support", version = "0.87.1" }
rstest = "0.18.2"

View File

@ -0,0 +1,207 @@
// utilities for expanding globs in command arguments
use nu_glob::{glob_with_parent, MatchOptions, Paths};
use nu_protocol::{ShellError, Spanned};
use std::fs;
use std::path::{Path, PathBuf};
// standard glob options to use for filesystem command arguments
const GLOB_PARAMS: MatchOptions = MatchOptions {
case_sensitive: true,
require_literal_separator: false,
require_literal_leading_dot: false,
recursive_match_hidden_dir: true,
};
// handle an argument that could be a literal path or a glob.
// if literal path, return just that (whether user can access it or not).
// if glob, expand into matching paths, using GLOB_PARAMS options.
pub fn arg_glob(
pattern: &Spanned<String>, // alleged path or glob
cwd: &Path, // current working directory
) -> Result<Paths, ShellError> {
arg_glob_opt(pattern, cwd, GLOB_PARAMS)
}
// variant of [arg_glob] that requires literal dot prefix in pattern to match dot-prefixed path.
pub fn arg_glob_leading_dot(pattern: &Spanned<String>, cwd: &Path) -> Result<Paths, ShellError> {
arg_glob_opt(
pattern,
cwd,
MatchOptions {
require_literal_leading_dot: true,
..GLOB_PARAMS
},
)
}
fn arg_glob_opt(
pattern: &Spanned<String>,
cwd: &Path,
options: MatchOptions,
) -> Result<Paths, ShellError> {
// remove ansi coloring (?)
let pattern = {
Spanned {
item: nu_utils::strip_ansi_string_unlikely(pattern.item.clone()),
span: pattern.span,
}
};
// if there's a file with same path as the pattern, just return that.
let pp = cwd.join(&pattern.item);
let md = fs::metadata(pp);
#[allow(clippy::single_match)]
match md {
Ok(_metadata) => {
return Ok(Paths::single(&PathBuf::from(pattern.item), cwd));
}
// file not found, but also "invalid chars in file" (e.g * on Windows). Fall through and glob
Err(_) => {}
}
// user wasn't referring to a specific thing in filesystem, try to glob it.
match glob_with_parent(&pattern.item, options, cwd) {
Ok(p) => Ok(p),
Err(pat_err) => {
Err(ShellError::InvalidGlobPattern(
pat_err.msg.into(),
pattern.span, // improve specificity
))
}
}
}
#[cfg(test)]
mod test {
use super::*;
use nu_glob::GlobResult;
use nu_protocol::{Span, Spanned};
use nu_test_support::fs::Stub::EmptyFile;
use nu_test_support::playground::Playground;
use rstest::rstest;
fn spanned_string(str: &str) -> Spanned<String> {
Spanned {
item: str.to_string(),
span: Span::test_data(),
}
}
#[test]
fn does_something() {
let act = arg_glob(&spanned_string("*"), &PathBuf::from("."));
assert!(act.is_ok());
for f in act.expect("checked ok") {
match f {
Ok(p) => {
assert!(!p.to_str().unwrap().is_empty());
}
Err(e) => panic!("unexpected error {:?}", e),
};
}
}
#[test]
fn glob_format_error() {
let act = arg_glob(&spanned_string(r#"ab]c[def"#), &PathBuf::from("."));
assert!(act.is_err());
}
#[rstest]
#[case("*", 4, "no dirs")]
#[case("**/*", 7, "incl dirs")]
fn glob_subdirs(#[case] pat: &str, #[case] exp_count: usize, #[case] case: &str) {
Playground::setup("glob_subdirs", |dirs, sandbox| {
sandbox.with_files(vec![
EmptyFile("yehuda.txt"),
EmptyFile("jttxt"),
EmptyFile("andres.txt"),
]);
sandbox.mkdir(".children");
sandbox.within(".children").with_files(vec![
EmptyFile("timothy.txt"),
EmptyFile("tiffany.txt"),
EmptyFile("trish.txt"),
]);
let p: Vec<GlobResult> = arg_glob(&spanned_string(pat), &dirs.test)
.expect("no error")
.collect();
assert_eq!(
exp_count,
p.iter().filter(|i| i.is_ok()).count(),
" case: {case} ",
);
// expected behavior -- that directories are included in results (if name matches pattern)
let t = p
.iter()
.any(|i| i.as_ref().unwrap().to_string_lossy().contains(".children"));
assert!(t, "check for dir, case {case}");
})
}
#[rstest]
#[case("yehuda.txt", true, 1, "matches literal path")]
#[case("*", false, 3, "matches glob")]
#[case(r#"bad[glob.foo"#, true, 1, "matches literal, would be bad glob pat")]
fn exact_vs_glob(
#[case] pat: &str,
#[case] exp_matches_input: bool,
#[case] exp_count: usize,
#[case] case: &str,
) {
Playground::setup("exact_vs_glob", |dirs, sandbox| {
sandbox.with_files(vec![
EmptyFile("yehuda.txt"),
EmptyFile("jttxt"),
EmptyFile("bad[glob.foo"),
]);
let res = arg_glob(&spanned_string(pat), &dirs.test)
.expect("no error")
.collect::<Vec<GlobResult>>();
eprintln!("res: {:?}", res);
if exp_matches_input {
assert_eq!(
exp_count,
res.len(),
" case {case}: matches input, but count not 1? "
);
assert_eq!(
&res[0].as_ref().unwrap().to_string_lossy(),
pat, // todo: is it OK for glob to return relative paths (not to current cwd, but to arg cwd of arg_glob)?
);
} else {
assert_eq!(exp_count, res.len(), " case: {}: matched glob", case);
}
})
}
#[rstest]
#[case(r#"realbad[glob.foo"#, true, 1, "error, bad glob")]
fn exact_vs_bad_glob(
// if path doesn't exist but pattern is not valid glob, should get error.
#[case] pat: &str,
#[case] _exp_matches_input: bool,
#[case] _exp_count: usize,
#[case] _tag: &str,
) {
Playground::setup("exact_vs_bad_glob", |dirs, sandbox| {
sandbox.with_files(vec![
EmptyFile("yehuda.txt"),
EmptyFile("jttxt"),
EmptyFile("bad[glob.foo"),
]);
let res = arg_glob(&spanned_string(pat), &dirs.test);
assert!(res
.expect_err("expected error")
.to_string()
.contains("Invalid glob pattern"));
})
}
}

View File

@ -2,13 +2,9 @@ use crate::util::get_guaranteed_cwd;
use miette::Result; use miette::Result;
use nu_engine::{eval_block, eval_block_with_early_return}; use nu_engine::{eval_block, eval_block_with_early_return};
use nu_parser::parse; use nu_parser::parse;
use nu_protocol::{ use nu_protocol::cli_error::{report_error, report_error_new};
cli_error::{report_error, report_error_new}, use nu_protocol::engine::{EngineState, Stack, StateWorkingSet};
debugger::WithoutDebug, use nu_protocol::{BlockId, PipelineData, PositionalArg, ShellError, Span, Type, Value, VarId};
engine::{EngineState, Stack, StateWorkingSet},
BlockId, PipelineData, PositionalArg, ShellError, Span, Type, Value, VarId,
};
use std::sync::Arc;
pub fn eval_env_change_hook( pub fn eval_env_change_hook(
env_change_hook: Option<Value>, env_change_hook: Option<Value>,
@ -18,7 +14,7 @@ pub fn eval_env_change_hook(
if let Some(hook) = env_change_hook { if let Some(hook) = env_change_hook {
match hook { match hook {
Value::Record { val, .. } => { Value::Record { val, .. } => {
for (env_name, hook_value) in &*val { for (env_name, hook_value) in &val {
let before = engine_state let before = engine_state
.previous_env_vars .previous_env_vars
.get(env_name) .get(env_name)
@ -39,7 +35,8 @@ pub fn eval_env_change_hook(
"env_change", "env_change",
)?; )?;
Arc::make_mut(&mut engine_state.previous_env_vars) engine_state
.previous_env_vars
.insert(env_name.to_string(), after); .insert(env_name.to_string(), after);
} }
} }
@ -93,11 +90,11 @@ pub fn eval_hook(
if let Some(err) = working_set.parse_errors.first() { if let Some(err) = working_set.parse_errors.first() {
report_error(&working_set, err); report_error(&working_set, err);
return Err(ShellError::UnsupportedConfigValue { return Err(ShellError::UnsupportedConfigValue(
expected: "valid source code".into(), "valid source code".into(),
value: "source code with syntax errors".into(), "source code with syntax errors".into(),
span, span,
}); ));
} }
(output, working_set.render(), vars) (output, working_set.render(), vars)
@ -118,7 +115,7 @@ pub fn eval_hook(
}) })
.collect(); .collect();
match eval_block::<WithoutDebug>(engine_state, stack, &block, input) { match eval_block(engine_state, stack, &block, input, false, false) {
Ok(pipeline_data) => { Ok(pipeline_data) => {
output = pipeline_data; output = pipeline_data;
} }
@ -153,7 +150,7 @@ pub fn eval_hook(
// If it returns true (the default if a condition block is not specified), the hook should be run. // If it returns true (the default if a condition block is not specified), the hook should be run.
let do_run_hook = if let Some(condition) = val.get("condition") { let do_run_hook = if let Some(condition) = val.get("condition") {
let other_span = condition.span(); let other_span = condition.span();
if let Ok(block_id) = condition.coerce_block() { if let Ok(block_id) = condition.as_block() {
match run_hook_block( match run_hook_block(
engine_state, engine_state,
stack, stack,
@ -167,11 +164,11 @@ pub fn eval_hook(
{ {
val val
} else { } else {
return Err(ShellError::UnsupportedConfigValue { return Err(ShellError::UnsupportedConfigValue(
expected: "boolean output".to_string(), "boolean output".to_string(),
value: "other PipelineData variant".to_string(), "other PipelineData variant".to_string(),
span: other_span, other_span,
}); ));
} }
} }
Err(err) => { Err(err) => {
@ -179,11 +176,11 @@ pub fn eval_hook(
} }
} }
} else { } else {
return Err(ShellError::UnsupportedConfigValue { return Err(ShellError::UnsupportedConfigValue(
expected: "block".to_string(), "block".to_string(),
value: format!("{}", condition.get_type()), format!("{}", condition.get_type()),
span: other_span, other_span,
}); ));
} }
} else { } else {
// always run the hook // always run the hook
@ -225,11 +222,11 @@ pub fn eval_hook(
if let Some(err) = working_set.parse_errors.first() { if let Some(err) = working_set.parse_errors.first() {
report_error(&working_set, err); report_error(&working_set, err);
return Err(ShellError::UnsupportedConfigValue { return Err(ShellError::UnsupportedConfigValue(
expected: "valid source code".into(), "valid source code".into(),
value: "source code with syntax errors".into(), "source code with syntax errors".into(),
span: source_span, source_span,
}); ));
} }
(output, working_set.render(), vars) (output, working_set.render(), vars)
@ -246,7 +243,7 @@ pub fn eval_hook(
}) })
.collect(); .collect();
match eval_block::<WithoutDebug>(engine_state, stack, &block, input) { match eval_block(engine_state, stack, &block, input, false, false) {
Ok(pipeline_data) => { Ok(pipeline_data) => {
output = pipeline_data; output = pipeline_data;
} }
@ -280,11 +277,11 @@ pub fn eval_hook(
)?; )?;
} }
other => { other => {
return Err(ShellError::UnsupportedConfigValue { return Err(ShellError::UnsupportedConfigValue(
expected: "block or string".to_string(), "block or string".to_string(),
value: format!("{}", other.get_type()), format!("{}", other.get_type()),
span: source_span, source_span,
}); ));
} }
} }
} }
@ -296,11 +293,11 @@ pub fn eval_hook(
output = run_hook_block(engine_state, stack, val.block_id, input, arguments, span)?; output = run_hook_block(engine_state, stack, val.block_id, input, arguments, span)?;
} }
other => { other => {
return Err(ShellError::UnsupportedConfigValue { return Err(ShellError::UnsupportedConfigValue(
expected: "string, block, record, or list of commands".into(), "string, block, record, or list of commands".into(),
value: format!("{}", other.get_type()), format!("{}", other.get_type()),
span: other.span(), other.span(),
}); ));
} }
} }
@ -322,9 +319,7 @@ fn run_hook_block(
let input = optional_input.unwrap_or_else(PipelineData::empty); let input = optional_input.unwrap_or_else(PipelineData::empty);
let mut callee_stack = stack let mut callee_stack = stack.gather_captures(engine_state, &block.captures);
.gather_captures(engine_state, &block.captures)
.reset_pipes();
for (idx, PositionalArg { var_id, .. }) in for (idx, PositionalArg { var_id, .. }) in
block.signature.required_positional.iter().enumerate() block.signature.required_positional.iter().enumerate()
@ -341,12 +336,8 @@ fn run_hook_block(
} }
} }
let pipeline_data = eval_block_with_early_return::<WithoutDebug>( let pipeline_data =
engine_state, eval_block_with_early_return(engine_state, &mut callee_stack, block, input, false, false)?;
&mut callee_stack,
block,
input,
)?;
if let PipelineData::Value(Value::Error { error, .. }, _) = pipeline_data { if let PipelineData::Value(Value::Error { error, .. }, _) = pipeline_data {
return Err(*error); return Err(*error);

View File

@ -1,5 +1,7 @@
use nu_protocol::{ast::CellPath, PipelineData, ShellError, Span, Value}; use nu_protocol::ast::CellPath;
use std::sync::{atomic::AtomicBool, Arc}; use nu_protocol::{PipelineData, ShellError, Span, Value};
use std::sync::atomic::AtomicBool;
use std::sync::Arc;
pub trait CmdArgument { pub trait CmdArgument {
fn take_cell_paths(&mut self) -> Option<Vec<CellPath>>; fn take_cell_paths(&mut self) -> Option<Vec<CellPath>>;

View File

@ -1,4 +1,7 @@
mod arg_glob;
pub mod formats; pub mod formats;
pub mod hook; pub mod hook;
pub mod input_handler; pub mod input_handler;
pub mod util; pub mod util;
pub use arg_glob::arg_glob;
pub use arg_glob::arg_glob_leading_dot;

View File

@ -1,7 +1,8 @@
use nu_protocol::report_error;
use nu_protocol::{ use nu_protocol::{
ast::RangeInclusion, ast::RangeInclusion,
engine::{EngineState, Stack, StateWorkingSet}, engine::{EngineState, Stack, StateWorkingSet},
report_error, Range, ShellError, Span, Value, Range, ShellError, Span, Value,
}; };
use std::path::PathBuf; use std::path::PathBuf;
@ -65,28 +66,28 @@ fn get_editor_commandline(
match value { match value {
Value::String { val, .. } if !val.is_empty() => Ok((val.to_string(), Vec::new())), Value::String { val, .. } if !val.is_empty() => Ok((val.to_string(), Vec::new())),
Value::List { vals, .. } if !vals.is_empty() => { Value::List { vals, .. } if !vals.is_empty() => {
let mut editor_cmd = vals.iter().map(|l| l.coerce_string()); let mut editor_cmd = vals.iter().map(|l| l.as_string());
match editor_cmd.next().transpose()? { match editor_cmd.next().transpose()? {
Some(editor) if !editor.is_empty() => { Some(editor) if !editor.is_empty() => {
let params = editor_cmd.collect::<Result<_, ShellError>>()?; let params = editor_cmd.collect::<Result<_, ShellError>>()?;
Ok((editor, params)) Ok((editor, params))
} }
_ => Err(ShellError::GenericError { _ => Err(ShellError::GenericError(
error: "Editor executable is missing".into(), "Editor executable is missing".into(),
msg: "Set the first element to an executable".into(), "Set the first element to an executable".into(),
span: Some(value.span()), Some(value.span()),
help: Some(HELP_MSG.into()), Some(HELP_MSG.into()),
inner: vec![], vec![],
}), )),
} }
} }
Value::String { .. } | Value::List { .. } => Err(ShellError::GenericError { Value::String { .. } | Value::List { .. } => Err(ShellError::GenericError(
error: format!("{var_name} should be a non-empty string or list<String>"), format!("{var_name} should be a non-empty string or list<String>"),
msg: "Specify an executable here".into(), "Specify an executable here".into(),
span: Some(value.span()), Some(value.span()),
help: Some(HELP_MSG.into()), Some(HELP_MSG.into()),
inner: vec![], vec![],
}), )),
x => Err(ShellError::CantConvert { x => Err(ShellError::CantConvert {
to_type: "string or list<string>".into(), to_type: "string or list<string>".into(),
from_type: x.get_type().to_string(), from_type: x.get_type().to_string(),
@ -98,7 +99,7 @@ fn get_editor_commandline(
pub fn get_editor( pub fn get_editor(
engine_state: &EngineState, engine_state: &EngineState,
stack: &Stack, stack: &mut Stack,
span: Span, span: Span,
) -> Result<(String, Vec<String>), ShellError> { ) -> Result<(String, Vec<String>), ShellError> {
let config = engine_state.get_config(); let config = engine_state.get_config();
@ -113,14 +114,13 @@ pub fn get_editor(
} else if let Some(value) = env_vars.get("VISUAL") { } else if let Some(value) = env_vars.get("VISUAL") {
get_editor_commandline(value, "$env.VISUAL") get_editor_commandline(value, "$env.VISUAL")
} else { } else {
Err(ShellError::GenericError { Err(ShellError::GenericError(
error: "No editor configured".into(), "No editor configured".into(),
msg:
"Please specify one via `$env.config.buffer_editor` or `$env.EDITOR`/`$env.VISUAL`" "Please specify one via `$env.config.buffer_editor` or `$env.EDITOR`/`$env.VISUAL`"
.into(), .into(),
span: Some(span), Some(span),
help: Some(HELP_MSG.into()), Some(HELP_MSG.into()),
inner: vec![], vec![],
}) ))
} }
} }

View File

@ -5,7 +5,7 @@ edition = "2021"
license = "MIT" license = "MIT"
name = "nu-cmd-dataframe" name = "nu-cmd-dataframe"
repository = "https://github.com/nushell/nushell/tree/main/crates/nu-cmd-dataframe" repository = "https://github.com/nushell/nushell/tree/main/crates/nu-cmd-dataframe"
version = "0.92.2" version = "0.87.1"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
@ -13,24 +13,19 @@ version = "0.92.2"
bench = false bench = false
[dependencies] [dependencies]
nu-engine = { path = "../nu-engine", version = "0.92.2" } nu-engine = { path = "../nu-engine", version = "0.87.1" }
nu-parser = { path = "../nu-parser", version = "0.92.2" } nu-parser = { path = "../nu-parser", version = "0.87.1" }
nu-protocol = { path = "../nu-protocol", version = "0.92.2" } nu-protocol = { path = "../nu-protocol", version = "0.87.1" }
# Potential dependencies for extras # Potential dependencies for extras
chrono = { workspace = true, features = ["std", "unstable-locales"], default-features = false } chrono = { version = "0.4", features = ["std", "unstable-locales"], default-features = false }
chrono-tz = { workspace = true } chrono-tz = "0.8"
fancy-regex = { workspace = true } fancy-regex = "0.11"
indexmap = { workspace = true } indexmap = { version = "2.1" }
num = { version = "0.4", optional = true } num = { version = "0.4", optional = true }
serde = { workspace = true, features = ["derive"] } serde = { version = "1.0", features = ["derive"] }
# keep sqlparser at 0.39.0 until we can update polars sqlparser = { version = "0.36.1", optional = true }
sqlparser = { version = "0.39.0", optional = true } polars-io = { version = "0.33", features = ["avro"], optional = true }
polars-io = { version = "0.37", features = ["avro"], optional = true }
polars-arrow = { version = "0.37", optional = true }
polars-ops = { version = "0.37", optional = true }
polars-plan = { version = "0.37", features = ["regex"], optional = true }
polars-utils = { version = "0.37", optional = true }
[dependencies.polars] [dependencies.polars]
features = [ features = [
@ -40,6 +35,7 @@ features = [
"cross_join", "cross_join",
"csv", "csv",
"cum_agg", "cum_agg",
"default",
"dtype-categorical", "dtype-categorical",
"dtype-datetime", "dtype-datetime",
"dtype-struct", "dtype-struct",
@ -60,16 +56,15 @@ features = [
"serde", "serde",
"serde-lazy", "serde-lazy",
"strings", "strings",
"temporal",
"to_dummies", "to_dummies",
] ]
default-features = false
optional = true optional = true
version = "0.37" version = "0.33"
[features] [features]
dataframe = ["num", "polars", "polars-io", "polars-arrow", "polars-ops", "polars-plan", "polars-utils", "sqlparser"] dataframe = ["num", "polars", "polars-io", "sqlparser"]
default = [] default = []
[dev-dependencies] [dev-dependencies]
nu-cmd-lang = { path = "../nu-cmd-lang", version = "0.92.2" } nu-cmd-lang = { path = "../nu-cmd-lang", version = "0.87.1" }
nu-test-support = { path = "../nu-test-support", version = "0.87.1" }

View File

@ -1,5 +1,11 @@
use crate::dataframe::values::{Axis, Column, NuDataFrame}; use nu_engine::CallExt;
use nu_engine::command_prelude::*; use nu_protocol::{
ast::Call,
engine::{Command, EngineState, Stack},
Category, Example, PipelineData, ShellError, Signature, Span, SyntaxShape, Type, Value,
};
use super::super::values::{Axis, Column, NuDataFrame};
#[derive(Clone)] #[derive(Clone)]
pub struct AppendDF; pub struct AppendDF;
@ -31,8 +37,7 @@ impl Command for AppendDF {
example: r#"let a = ([[a b]; [1 2] [3 4]] | dfr into-df); example: r#"let a = ([[a b]; [1 2] [3 4]] | dfr into-df);
$a | dfr append $a"#, $a | dfr append $a"#,
result: Some( result: Some(
NuDataFrame::try_from_columns( NuDataFrame::try_from_columns(vec![
vec![
Column::new( Column::new(
"a".to_string(), "a".to_string(),
vec![Value::test_int(1), Value::test_int(3)], vec![Value::test_int(1), Value::test_int(3)],
@ -49,9 +54,7 @@ impl Command for AppendDF {
"b_x".to_string(), "b_x".to_string(),
vec![Value::test_int(2), Value::test_int(4)], vec![Value::test_int(2), Value::test_int(4)],
), ),
], ])
None,
)
.expect("simple df for test should not fail") .expect("simple df for test should not fail")
.into_value(Span::test_data()), .into_value(Span::test_data()),
), ),
@ -61,8 +64,7 @@ impl Command for AppendDF {
example: r#"let a = ([[a b]; [1 2] [3 4]] | dfr into-df); example: r#"let a = ([[a b]; [1 2] [3 4]] | dfr into-df);
$a | dfr append $a --col"#, $a | dfr append $a --col"#,
result: Some( result: Some(
NuDataFrame::try_from_columns( NuDataFrame::try_from_columns(vec![
vec![
Column::new( Column::new(
"a".to_string(), "a".to_string(),
vec![ vec![
@ -81,9 +83,7 @@ impl Command for AppendDF {
Value::test_int(4), Value::test_int(4),
], ],
), ),
], ])
None,
)
.expect("simple df for test should not fail") .expect("simple df for test should not fail")
.into_value(Span::test_data()), .into_value(Span::test_data()),
), ),
@ -110,7 +110,7 @@ fn command(
) -> Result<PipelineData, ShellError> { ) -> Result<PipelineData, ShellError> {
let other: Value = call.req(engine_state, stack, 0)?; let other: Value = call.req(engine_state, stack, 0)?;
let axis = if call.has_flag(engine_state, stack, "col")? { let axis = if call.has_flag("col") {
Axis::Column Axis::Column
} else { } else {
Axis::Row Axis::Row

View File

@ -1,195 +0,0 @@
use crate::dataframe::values::{str_to_dtype, NuDataFrame, NuExpression, NuLazyFrame};
use nu_engine::command_prelude::*;
use polars::prelude::*;
#[derive(Clone)]
pub struct CastDF;
impl Command for CastDF {
fn name(&self) -> &str {
"dfr cast"
}
fn usage(&self) -> &str {
"Cast a column to a different dtype."
}
fn signature(&self) -> Signature {
Signature::build(self.name())
.input_output_types(vec![
(
Type::Custom("expression".into()),
Type::Custom("expression".into()),
),
(
Type::Custom("dataframe".into()),
Type::Custom("dataframe".into()),
),
])
.required(
"dtype",
SyntaxShape::String,
"The dtype to cast the column to",
)
.optional(
"column",
SyntaxShape::String,
"The column to cast. Required when used with a dataframe.",
)
.category(Category::Custom("dataframe".into()))
}
fn examples(&self) -> Vec<Example> {
vec![
Example {
description: "Cast a column in a dataframe to a different dtype",
example: "[[a b]; [1 2] [3 4]] | dfr into-df | dfr cast u8 a | dfr schema",
result: Some(Value::record(
record! {
"a" => Value::string("u8", Span::test_data()),
"b" => Value::string("i64", Span::test_data()),
},
Span::test_data(),
)),
},
Example {
description: "Cast a column in a lazy dataframe to a different dtype",
example: "[[a b]; [1 2] [3 4]] | dfr into-df | dfr into-lazy | dfr cast u8 a | dfr schema",
result: Some(Value::record(
record! {
"a" => Value::string("u8", Span::test_data()),
"b" => Value::string("i64", Span::test_data()),
},
Span::test_data(),
)),
},
Example {
description: "Cast a column in a expression to a different dtype",
example: r#"[[a b]; [1 2] [1 4]] | dfr into-df | dfr group-by a | dfr agg [ (dfr col b | dfr cast u8 | dfr min | dfr as "b_min") ] | dfr schema"#,
result: None
}
]
}
fn run(
&self,
engine_state: &EngineState,
stack: &mut Stack,
call: &Call,
input: PipelineData,
) -> Result<PipelineData, ShellError> {
let value = input.into_value(call.head);
if NuLazyFrame::can_downcast(&value) {
let (dtype, column_nm) = df_args(engine_state, stack, call)?;
let df = NuLazyFrame::try_from_value(value)?;
command_lazy(call, column_nm, dtype, df)
} else if NuDataFrame::can_downcast(&value) {
let (dtype, column_nm) = df_args(engine_state, stack, call)?;
let df = NuDataFrame::try_from_value(value)?;
command_eager(call, column_nm, dtype, df)
} else {
let dtype: String = call.req(engine_state, stack, 0)?;
let dtype = str_to_dtype(&dtype, call.head)?;
let expr = NuExpression::try_from_value(value)?;
let expr: NuExpression = expr.into_polars().cast(dtype).into();
Ok(PipelineData::Value(
NuExpression::into_value(expr, call.head),
None,
))
}
}
}
fn df_args(
engine_state: &EngineState,
stack: &mut Stack,
call: &Call,
) -> Result<(DataType, String), ShellError> {
let dtype = dtype_arg(engine_state, stack, call)?;
let column_nm: String =
call.opt(engine_state, stack, 1)?
.ok_or(ShellError::MissingParameter {
param_name: "column_name".into(),
span: call.head,
})?;
Ok((dtype, column_nm))
}
fn dtype_arg(
engine_state: &EngineState,
stack: &mut Stack,
call: &Call,
) -> Result<DataType, ShellError> {
let dtype: String = call.req(engine_state, stack, 0)?;
str_to_dtype(&dtype, call.head)
}
fn command_lazy(
call: &Call,
column_nm: String,
dtype: DataType,
lazy: NuLazyFrame,
) -> Result<PipelineData, ShellError> {
let column = col(&column_nm).cast(dtype);
let lazy = lazy.into_polars().with_columns(&[column]);
let lazy = NuLazyFrame::new(false, lazy);
Ok(PipelineData::Value(
NuLazyFrame::into_value(lazy, call.head)?,
None,
))
}
fn command_eager(
call: &Call,
column_nm: String,
dtype: DataType,
nu_df: NuDataFrame,
) -> Result<PipelineData, ShellError> {
let mut df = nu_df.df;
let column = df
.column(&column_nm)
.map_err(|e| ShellError::GenericError {
error: format!("{e}"),
msg: "".into(),
span: Some(call.head),
help: None,
inner: vec![],
})?;
let casted = column.cast(&dtype).map_err(|e| ShellError::GenericError {
error: format!("{e}"),
msg: "".into(),
span: Some(call.head),
help: None,
inner: vec![],
})?;
let _ = df
.with_column(casted)
.map_err(|e| ShellError::GenericError {
error: format!("{e}"),
msg: "".into(),
span: Some(call.head),
help: None,
inner: vec![],
})?;
let df = NuDataFrame::new(false, df);
Ok(PipelineData::Value(df.into_value(call.head), None))
}
#[cfg(test)]
mod test {
use super::super::super::test_dataframe::test_dataframe;
use super::*;
#[test]
fn test_examples() {
test_dataframe(vec![Box::new(CastDF {})])
}
}

View File

@ -1,5 +1,9 @@
use crate::dataframe::values::NuDataFrame; use super::super::values::NuDataFrame;
use nu_engine::command_prelude::*; use nu_protocol::{
ast::Call,
engine::{Command, EngineState, Stack},
Category, Example, PipelineData, ShellError, Signature, Span, Type, Value,
};
#[derive(Clone)] #[derive(Clone)]
pub struct ColumnsDF; pub struct ColumnsDF;

View File

@ -1,5 +1,12 @@
use crate::dataframe::values::{utils::convert_columns, Column, NuDataFrame}; use nu_engine::CallExt;
use nu_engine::command_prelude::*; use nu_protocol::{
ast::Call,
engine::{Command, EngineState, Stack},
Category, Example, PipelineData, ShellError, Signature, Span, SyntaxShape, Type, Value,
};
use super::super::values::utils::convert_columns;
use super::super::values::{Column, NuDataFrame};
#[derive(Clone)] #[derive(Clone)]
pub struct DropDF; pub struct DropDF;
@ -28,13 +35,10 @@ impl Command for DropDF {
description: "drop column a", description: "drop column a",
example: "[[a b]; [1 2] [3 4]] | dfr into-df | dfr drop a", example: "[[a b]; [1 2] [3 4]] | dfr into-df | dfr drop a",
result: Some( result: Some(
NuDataFrame::try_from_columns( NuDataFrame::try_from_columns(vec![Column::new(
vec![Column::new(
"b".to_string(), "b".to_string(),
vec![Value::test_int(2), Value::test_int(4)], vec![Value::test_int(2), Value::test_int(4)],
)], )])
None,
)
.expect("simple df for test should not fail") .expect("simple df for test should not fail")
.into_value(Span::test_data()), .into_value(Span::test_data()),
), ),
@ -64,23 +68,25 @@ fn command(
let df = NuDataFrame::try_from_pipeline(input, call.head)?; let df = NuDataFrame::try_from_pipeline(input, call.head)?;
let new_df = col_string let new_df = col_string
.first() .get(0)
.ok_or_else(|| ShellError::GenericError { .ok_or_else(|| {
error: "Empty names list".into(), ShellError::GenericError(
msg: "No column names were found".into(), "Empty names list".into(),
span: Some(col_span), "No column names were found".into(),
help: None, Some(col_span),
inner: vec![], None,
Vec::new(),
)
}) })
.and_then(|col| { .and_then(|col| {
df.as_ref() df.as_ref().drop(&col.item).map_err(|e| {
.drop(&col.item) ShellError::GenericError(
.map_err(|e| ShellError::GenericError { "Error dropping column".into(),
error: "Error dropping column".into(), e.to_string(),
msg: e.to_string(), Some(col.span),
span: Some(col.span), None,
help: None, Vec::new(),
inner: vec![], )
}) })
})?; })?;
@ -90,14 +96,14 @@ fn command(
.iter() .iter()
.skip(1) .skip(1)
.try_fold(new_df, |new_df, col| { .try_fold(new_df, |new_df, col| {
new_df new_df.drop(&col.item).map_err(|e| {
.drop(&col.item) ShellError::GenericError(
.map_err(|e| ShellError::GenericError { "Error dropping column".into(),
error: "Error dropping column".into(), e.to_string(),
msg: e.to_string(), Some(col.span),
span: Some(col.span), None,
help: None, Vec::new(),
inner: vec![], )
}) })
}) })
.map(|df| PipelineData::Value(NuDataFrame::dataframe_into_value(df, call.head), None)) .map(|df| PipelineData::Value(NuDataFrame::dataframe_into_value(df, call.head), None))

View File

@ -1,8 +1,14 @@
use crate::dataframe::values::{utils::convert_columns_string, Column, NuDataFrame}; use nu_engine::CallExt;
use nu_engine::command_prelude::*; use nu_protocol::{
ast::Call,
engine::{Command, EngineState, Stack},
Category, Example, PipelineData, ShellError, Signature, Span, SyntaxShape, Type, Value,
};
use polars::prelude::UniqueKeepStrategy; use polars::prelude::UniqueKeepStrategy;
use super::super::values::utils::convert_columns_string;
use super::super::values::{Column, NuDataFrame};
#[derive(Clone)] #[derive(Clone)]
pub struct DropDuplicates; pub struct DropDuplicates;
@ -40,8 +46,7 @@ impl Command for DropDuplicates {
description: "drop duplicates", description: "drop duplicates",
example: "[[a b]; [1 2] [3 4] [1 2]] | dfr into-df | dfr drop-duplicates", example: "[[a b]; [1 2] [3 4] [1 2]] | dfr into-df | dfr drop-duplicates",
result: Some( result: Some(
NuDataFrame::try_from_columns( NuDataFrame::try_from_columns(vec![
vec![
Column::new( Column::new(
"a".to_string(), "a".to_string(),
vec![Value::test_int(3), Value::test_int(1)], vec![Value::test_int(3), Value::test_int(1)],
@ -50,9 +55,7 @@ impl Command for DropDuplicates {
"b".to_string(), "b".to_string(),
vec![Value::test_int(4), Value::test_int(2)], vec![Value::test_int(4), Value::test_int(2)],
), ),
], ])
None,
)
.expect("simple df for test should not fail") .expect("simple df for test should not fail")
.into_value(Span::test_data()), .into_value(Span::test_data()),
), ),
@ -89,7 +92,7 @@ fn command(
let subset_slice = subset.as_ref().map(|cols| &cols[..]); let subset_slice = subset.as_ref().map(|cols| &cols[..]);
let keep_strategy = if call.has_flag(engine_state, stack, "last")? { let keep_strategy = if call.has_flag("last") {
UniqueKeepStrategy::Last UniqueKeepStrategy::Last
} else { } else {
UniqueKeepStrategy::First UniqueKeepStrategy::First
@ -97,12 +100,14 @@ fn command(
df.as_ref() df.as_ref()
.unique(subset_slice, keep_strategy, None) .unique(subset_slice, keep_strategy, None)
.map_err(|e| ShellError::GenericError { .map_err(|e| {
error: "Error dropping duplicates".into(), ShellError::GenericError(
msg: e.to_string(), "Error dropping duplicates".into(),
span: Some(col_span), e.to_string(),
help: None, Some(col_span),
inner: vec![], None,
Vec::new(),
)
}) })
.map(|df| PipelineData::Value(NuDataFrame::dataframe_into_value(df, call.head), None)) .map(|df| PipelineData::Value(NuDataFrame::dataframe_into_value(df, call.head), None))
} }

View File

@ -1,5 +1,12 @@
use crate::dataframe::values::{utils::convert_columns_string, Column, NuDataFrame}; use nu_engine::CallExt;
use nu_engine::command_prelude::*; use nu_protocol::{
ast::Call,
engine::{Command, EngineState, Stack},
Category, Example, PipelineData, ShellError, Signature, Span, SyntaxShape, Type, Value,
};
use super::super::values::utils::convert_columns_string;
use super::super::values::{Column, NuDataFrame};
#[derive(Clone)] #[derive(Clone)]
pub struct DropNulls; pub struct DropNulls;
@ -36,8 +43,7 @@ impl Command for DropNulls {
let a = ($df | dfr with-column $res --name res); let a = ($df | dfr with-column $res --name res);
$a | dfr drop-nulls"#, $a | dfr drop-nulls"#,
result: Some( result: Some(
NuDataFrame::try_from_columns( NuDataFrame::try_from_columns(vec![
vec![
Column::new( Column::new(
"a".to_string(), "a".to_string(),
vec![Value::test_int(1), Value::test_int(1)], vec![Value::test_int(1), Value::test_int(1)],
@ -50,9 +56,7 @@ impl Command for DropNulls {
"res".to_string(), "res".to_string(),
vec![Value::test_int(1), Value::test_int(1)], vec![Value::test_int(1), Value::test_int(1)],
), ),
], ])
None,
)
.expect("simple df for test should not fail") .expect("simple df for test should not fail")
.into_value(Span::test_data()), .into_value(Span::test_data()),
), ),
@ -62,8 +66,7 @@ impl Command for DropNulls {
example: r#"let s = ([1 2 0 0 3 4] | dfr into-df); example: r#"let s = ([1 2 0 0 3 4] | dfr into-df);
($s / $s) | dfr drop-nulls"#, ($s / $s) | dfr drop-nulls"#,
result: Some( result: Some(
NuDataFrame::try_from_columns( NuDataFrame::try_from_columns(vec![Column::new(
vec![Column::new(
"div_0_0".to_string(), "div_0_0".to_string(),
vec![ vec![
Value::test_int(1), Value::test_int(1),
@ -71,9 +74,7 @@ impl Command for DropNulls {
Value::test_int(1), Value::test_int(1),
Value::test_int(1), Value::test_int(1),
], ],
)], )])
None,
)
.expect("simple df for test should not fail") .expect("simple df for test should not fail")
.into_value(Span::test_data()), .into_value(Span::test_data()),
), ),
@ -114,12 +115,14 @@ fn command(
df.as_ref() df.as_ref()
.drop_nulls(subset_slice) .drop_nulls(subset_slice)
.map_err(|e| ShellError::GenericError { .map_err(|e| {
error: "Error dropping nulls".into(), ShellError::GenericError(
msg: e.to_string(), "Error dropping nulls".into(),
span: Some(col_span), e.to_string(),
help: None, Some(col_span),
inner: vec![], None,
Vec::new(),
)
}) })
.map(|df| PipelineData::Value(NuDataFrame::dataframe_into_value(df, call.head), None)) .map(|df| PipelineData::Value(NuDataFrame::dataframe_into_value(df, call.head), None))
} }

View File

@ -1,5 +1,9 @@
use crate::dataframe::values::{Column, NuDataFrame}; use super::super::values::{Column, NuDataFrame};
use nu_engine::command_prelude::*; use nu_protocol::{
ast::Call,
engine::{Command, EngineState, Stack},
Category, Example, PipelineData, ShellError, Signature, Span, Type, Value,
};
#[derive(Clone)] #[derive(Clone)]
pub struct DataTypes; pub struct DataTypes;
@ -27,8 +31,7 @@ impl Command for DataTypes {
description: "Dataframe dtypes", description: "Dataframe dtypes",
example: "[[a b]; [1 2] [3 4]] | dfr into-df | dfr dtypes", example: "[[a b]; [1 2] [3 4]] | dfr into-df | dfr dtypes",
result: Some( result: Some(
NuDataFrame::try_from_columns( NuDataFrame::try_from_columns(vec![
vec![
Column::new( Column::new(
"column".to_string(), "column".to_string(),
vec![Value::test_string("a"), Value::test_string("b")], vec![Value::test_string("a"), Value::test_string("b")],
@ -37,9 +40,7 @@ impl Command for DataTypes {
"dtype".to_string(), "dtype".to_string(),
vec![Value::test_string("i64"), Value::test_string("i64")], vec![Value::test_string("i64"), Value::test_string("i64")],
), ),
], ])
None,
)
.expect("simple df for test should not fail") .expect("simple df for test should not fail")
.into_value(Span::test_data()), .into_value(Span::test_data()),
), ),
@ -78,7 +79,6 @@ fn command(
.dtype(); .dtype();
let dtype_str = dtype.to_string(); let dtype_str = dtype.to_string();
dtypes.push(Value::string(dtype_str, call.head)); dtypes.push(Value::string(dtype_str, call.head));
Value::string(*v, call.head) Value::string(*v, call.head)
@ -88,7 +88,7 @@ fn command(
let names_col = Column::new("column".to_string(), names); let names_col = Column::new("column".to_string(), names);
let dtypes_col = Column::new("dtype".to_string(), dtypes); let dtypes_col = Column::new("dtype".to_string(), dtypes);
NuDataFrame::try_from_columns(vec![names_col, dtypes_col], None) NuDataFrame::try_from_columns(vec![names_col, dtypes_col])
.map(|df| PipelineData::Value(df.into_value(call.head), None)) .map(|df| PipelineData::Value(df.into_value(call.head), None))
} }

View File

@ -1,6 +1,9 @@
use crate::dataframe::values::NuDataFrame; use super::super::values::NuDataFrame;
use nu_engine::command_prelude::*; use nu_protocol::{
ast::Call,
engine::{Command, EngineState, Stack},
Category, Example, PipelineData, ShellError, Signature, Span, Type,
};
use polars::{prelude::*, series::Series}; use polars::{prelude::*, series::Series};
#[derive(Clone)] #[derive(Clone)]
@ -75,22 +78,24 @@ impl Command for Dummies {
} }
fn command( fn command(
engine_state: &EngineState, _engine_state: &EngineState,
stack: &mut Stack, _stack: &mut Stack,
call: &Call, call: &Call,
input: PipelineData, input: PipelineData,
) -> Result<PipelineData, ShellError> { ) -> Result<PipelineData, ShellError> {
let drop_first: bool = call.has_flag(engine_state, stack, "drop-first")?; let drop_first: bool = call.has_flag("drop-first");
let df = NuDataFrame::try_from_pipeline(input, call.head)?; let df = NuDataFrame::try_from_pipeline(input, call.head)?;
df.as_ref() df.as_ref()
.to_dummies(None, drop_first) .to_dummies(None, drop_first)
.map_err(|e| ShellError::GenericError { .map_err(|e| {
error: "Error calculating dummies".into(), ShellError::GenericError(
msg: e.to_string(), "Error calculating dummies".into(),
span: Some(call.head), e.to_string(),
help: Some("The only allowed column types for dummies are String or Int".into()), Some(call.head),
inner: vec![], Some("The only allowed column types for dummies are String or Int".into()),
Vec::new(),
)
}) })
.map(|df| PipelineData::Value(NuDataFrame::dataframe_into_value(df, call.head), None)) .map(|df| PipelineData::Value(NuDataFrame::dataframe_into_value(df, call.head), None))
} }

View File

@ -1,8 +1,15 @@
use crate::dataframe::values::{Column, NuDataFrame, NuExpression, NuLazyFrame}; use nu_engine::CallExt;
use nu_engine::command_prelude::*; use nu_protocol::{
ast::Call,
engine::{Command, EngineState, Stack},
Category, Example, PipelineData, ShellError, Signature, Span, SyntaxShape, Type, Value,
};
use polars::prelude::LazyFrame; use polars::prelude::LazyFrame;
use crate::dataframe::values::{NuExpression, NuLazyFrame};
use super::super::values::{Column, NuDataFrame};
#[derive(Clone)] #[derive(Clone)]
pub struct FilterWith; pub struct FilterWith;
@ -36,13 +43,10 @@ impl Command for FilterWith {
example: r#"let mask = ([true false] | dfr into-df); example: r#"let mask = ([true false] | dfr into-df);
[[a b]; [1 2] [3 4]] | dfr into-df | dfr filter-with $mask"#, [[a b]; [1 2] [3 4]] | dfr into-df | dfr filter-with $mask"#,
result: Some( result: Some(
NuDataFrame::try_from_columns( NuDataFrame::try_from_columns(vec![
vec![
Column::new("a".to_string(), vec![Value::test_int(1)]), Column::new("a".to_string(), vec![Value::test_int(1)]),
Column::new("b".to_string(), vec![Value::test_int(2)]), Column::new("b".to_string(), vec![Value::test_int(2)]),
], ])
None,
)
.expect("simple df for test should not fail") .expect("simple df for test should not fail")
.into_value(Span::test_data()), .into_value(Span::test_data()),
), ),
@ -51,13 +55,10 @@ impl Command for FilterWith {
description: "Filter dataframe using an expression", description: "Filter dataframe using an expression",
example: "[[a b]; [1 2] [3 4]] | dfr into-df | dfr filter-with ((dfr col a) > 1)", example: "[[a b]; [1 2] [3 4]] | dfr into-df | dfr filter-with ((dfr col a) > 1)",
result: Some( result: Some(
NuDataFrame::try_from_columns( NuDataFrame::try_from_columns(vec![
vec![
Column::new("a".to_string(), vec![Value::test_int(3)]), Column::new("a".to_string(), vec![Value::test_int(3)]),
Column::new("b".to_string(), vec![Value::test_int(4)]), Column::new("b".to_string(), vec![Value::test_int(4)]),
], ])
None,
)
.expect("simple df for test should not fail") .expect("simple df for test should not fail")
.into_value(Span::test_data()), .into_value(Span::test_data()),
), ),
@ -104,22 +105,26 @@ fn command_eager(
)) ))
} else { } else {
let mask = NuDataFrame::try_from_value(mask_value)?.as_series(mask_span)?; let mask = NuDataFrame::try_from_value(mask_value)?.as_series(mask_span)?;
let mask = mask.bool().map_err(|e| ShellError::GenericError { let mask = mask.bool().map_err(|e| {
error: "Error casting to bool".into(), ShellError::GenericError(
msg: e.to_string(), "Error casting to bool".into(),
span: Some(mask_span), e.to_string(),
help: Some("Perhaps you want to use a series with booleans as mask".into()), Some(mask_span),
inner: vec![], Some("Perhaps you want to use a series with booleans as mask".into()),
Vec::new(),
)
})?; })?;
df.as_ref() df.as_ref()
.filter(mask) .filter(mask)
.map_err(|e| ShellError::GenericError { .map_err(|e| {
error: "Error filtering dataframe".into(), ShellError::GenericError(
msg: e.to_string(), "Error filtering dataframe".into(),
span: Some(call.head), e.to_string(),
help: Some("The only allowed column types for dummies are String or Int".into()), Some(call.head),
inner: vec![], Some("The only allowed column types for dummies are String or Int".into()),
Vec::new(),
)
}) })
.map(|df| PipelineData::Value(NuDataFrame::dataframe_into_value(df, call.head), None)) .map(|df| PipelineData::Value(NuDataFrame::dataframe_into_value(df, call.head), None))
} }

View File

@ -1,5 +1,10 @@
use crate::dataframe::values::{Column, NuDataFrame, NuExpression}; use super::super::values::{Column, NuDataFrame, NuExpression};
use nu_engine::command_prelude::*; use nu_engine::CallExt;
use nu_protocol::{
ast::Call,
engine::{Command, EngineState, Stack},
Category, Example, PipelineData, ShellError, Signature, Span, SyntaxShape, Type, Value,
};
#[derive(Clone)] #[derive(Clone)]
pub struct FirstDF; pub struct FirstDF;
@ -39,13 +44,10 @@ impl Command for FirstDF {
description: "Return the first row of a dataframe", description: "Return the first row of a dataframe",
example: "[[a b]; [1 2] [3 4]] | dfr into-df | dfr first", example: "[[a b]; [1 2] [3 4]] | dfr into-df | dfr first",
result: Some( result: Some(
NuDataFrame::try_from_columns( NuDataFrame::try_from_columns(vec![
vec![
Column::new("a".to_string(), vec![Value::test_int(1)]), Column::new("a".to_string(), vec![Value::test_int(1)]),
Column::new("b".to_string(), vec![Value::test_int(2)]), Column::new("b".to_string(), vec![Value::test_int(2)]),
], ])
None,
)
.expect("should not fail") .expect("should not fail")
.into_value(Span::test_data()), .into_value(Span::test_data()),
), ),
@ -54,8 +56,7 @@ impl Command for FirstDF {
description: "Return the first two rows of a dataframe", description: "Return the first two rows of a dataframe",
example: "[[a b]; [1 2] [3 4]] | dfr into-df | dfr first 2", example: "[[a b]; [1 2] [3 4]] | dfr into-df | dfr first 2",
result: Some( result: Some(
NuDataFrame::try_from_columns( NuDataFrame::try_from_columns(vec![
vec![
Column::new( Column::new(
"a".to_string(), "a".to_string(),
vec![Value::test_int(1), Value::test_int(3)], vec![Value::test_int(1), Value::test_int(3)],
@ -64,9 +65,7 @@ impl Command for FirstDF {
"b".to_string(), "b".to_string(),
vec![Value::test_int(2), Value::test_int(4)], vec![Value::test_int(2), Value::test_int(4)],
), ),
], ])
None,
)
.expect("should not fail") .expect("should not fail")
.into_value(Span::test_data()), .into_value(Span::test_data()),
), ),

View File

@ -1,5 +1,13 @@
use crate::dataframe::values::{utils::convert_columns_string, Column, NuDataFrame}; use nu_engine::CallExt;
use nu_engine::command_prelude::*; use nu_protocol::{
ast::Call,
engine::{Command, EngineState, Stack},
Category, Example, PipelineData, ShellError, Signature, Span, SyntaxShape, Type, Value,
};
use crate::dataframe::values::utils::convert_columns_string;
use super::super::values::{Column, NuDataFrame};
#[derive(Clone)] #[derive(Clone)]
pub struct GetDF; pub struct GetDF;
@ -28,13 +36,10 @@ impl Command for GetDF {
description: "Returns the selected column", description: "Returns the selected column",
example: "[[a b]; [1 2] [3 4]] | dfr into-df | dfr get a", example: "[[a b]; [1 2] [3 4]] | dfr into-df | dfr get a",
result: Some( result: Some(
NuDataFrame::try_from_columns( NuDataFrame::try_from_columns(vec![Column::new(
vec![Column::new(
"a".to_string(), "a".to_string(),
vec![Value::test_int(1), Value::test_int(3)], vec![Value::test_int(1), Value::test_int(3)],
)], )])
None,
)
.expect("simple df for test should not fail") .expect("simple df for test should not fail")
.into_value(Span::test_data()), .into_value(Span::test_data()),
), ),
@ -65,12 +70,14 @@ fn command(
df.as_ref() df.as_ref()
.select(col_string) .select(col_string)
.map_err(|e| ShellError::GenericError { .map_err(|e| {
error: "Error selecting columns".into(), ShellError::GenericError(
msg: e.to_string(), "Error selecting columns".into(),
span: Some(col_span), e.to_string(),
help: None, Some(col_span),
inner: vec![], None,
Vec::new(),
)
}) })
.map(|df| PipelineData::Value(NuDataFrame::dataframe_into_value(df, call.head), None)) .map(|df| PipelineData::Value(NuDataFrame::dataframe_into_value(df, call.head), None))
} }

View File

@ -1,5 +1,10 @@
use crate::dataframe::values::{utils::DEFAULT_ROWS, Column, NuDataFrame, NuExpression}; use super::super::values::{utils::DEFAULT_ROWS, Column, NuDataFrame, NuExpression};
use nu_engine::command_prelude::*; use nu_engine::CallExt;
use nu_protocol::{
ast::Call,
engine::{Command, EngineState, Stack},
Category, Example, PipelineData, ShellError, Signature, Span, SyntaxShape, Type, Value,
};
#[derive(Clone)] #[derive(Clone)]
pub struct LastDF; pub struct LastDF;
@ -35,13 +40,10 @@ impl Command for LastDF {
description: "Create new dataframe with last rows", description: "Create new dataframe with last rows",
example: "[[a b]; [1 2] [3 4]] | dfr into-df | dfr last 1", example: "[[a b]; [1 2] [3 4]] | dfr into-df | dfr last 1",
result: Some( result: Some(
NuDataFrame::try_from_columns( NuDataFrame::try_from_columns(vec![
vec![
Column::new("a".to_string(), vec![Value::test_int(3)]), Column::new("a".to_string(), vec![Value::test_int(3)]),
Column::new("b".to_string(), vec![Value::test_int(4)]), Column::new("b".to_string(), vec![Value::test_int(4)]),
], ])
None,
)
.expect("simple df for test should not fail") .expect("simple df for test should not fail")
.into_value(Span::test_data()), .into_value(Span::test_data()),
), ),

View File

@ -1,5 +1,10 @@
use nu_protocol::{
ast::Call,
engine::{Command, EngineState, Stack},
record, Category, Example, IntoPipelineData, PipelineData, ShellError, Signature, Value,
};
use crate::dataframe::values::NuDataFrame; use crate::dataframe::values::NuDataFrame;
use nu_engine::command_prelude::*;
#[derive(Clone)] #[derive(Clone)]
pub struct ListDF; pub struct ListDF;

View File

@ -1,5 +1,14 @@
use crate::dataframe::values::{utils::convert_columns_string, Column, NuDataFrame}; use nu_engine::CallExt;
use nu_engine::command_prelude::*; use nu_protocol::{
ast::Call,
engine::{Command, EngineState, Stack},
Category, Example, PipelineData, ShellError, Signature, Span, Spanned, SyntaxShape, Type,
Value,
};
use crate::dataframe::values::utils::convert_columns_string;
use super::super::values::{Column, NuDataFrame};
#[derive(Clone)] #[derive(Clone)]
pub struct MeltDF; pub struct MeltDF;
@ -97,7 +106,7 @@ impl Command for MeltDF {
Value::test_string("c"), Value::test_string("c"),
], ],
), ),
], None) ])
.expect("simple df for test should not fail") .expect("simple df for test should not fail")
.into_value(Span::test_data()), .into_value(Span::test_data()),
), ),
@ -143,33 +152,37 @@ fn command(
let mut res = df let mut res = df
.as_ref() .as_ref()
.melt(&id_col_string, &val_col_string) .melt(&id_col_string, &val_col_string)
.map_err(|e| ShellError::GenericError { .map_err(|e| {
error: "Error calculating melt".into(), ShellError::GenericError(
msg: e.to_string(), "Error calculating melt".into(),
span: Some(call.head), e.to_string(),
help: None, Some(call.head),
inner: vec![], None,
Vec::new(),
)
})?; })?;
if let Some(name) = &variable_name { if let Some(name) = &variable_name {
res.rename("variable", &name.item) res.rename("variable", &name.item).map_err(|e| {
.map_err(|e| ShellError::GenericError { ShellError::GenericError(
error: "Error renaming column".into(), "Error renaming column".into(),
msg: e.to_string(), e.to_string(),
span: Some(name.span), Some(name.span),
help: None, None,
inner: vec![], Vec::new(),
)
})?; })?;
} }
if let Some(name) = &value_name { if let Some(name) = &value_name {
res.rename("value", &name.item) res.rename("value", &name.item).map_err(|e| {
.map_err(|e| ShellError::GenericError { ShellError::GenericError(
error: "Error renaming column".into(), "Error renaming column".into(),
msg: e.to_string(), e.to_string(),
span: Some(name.span), Some(name.span),
help: None, None,
inner: vec![], Vec::new(),
)
})?; })?;
} }
@ -185,50 +198,50 @@ fn check_column_datatypes<T: AsRef<str>>(
col_span: Span, col_span: Span,
) -> Result<(), ShellError> { ) -> Result<(), ShellError> {
if cols.is_empty() { if cols.is_empty() {
return Err(ShellError::GenericError { return Err(ShellError::GenericError(
error: "Merge error".into(), "Merge error".into(),
msg: "empty column list".into(), "empty column list".into(),
span: Some(col_span), Some(col_span),
help: None, None,
inner: vec![], Vec::new(),
}); ));
} }
// Checking if they are same type // Checking if they are same type
if cols.len() > 1 { if cols.len() > 1 {
for w in cols.windows(2) { for w in cols.windows(2) {
let l_series = df let l_series = df.column(w[0].as_ref()).map_err(|e| {
.column(w[0].as_ref()) ShellError::GenericError(
.map_err(|e| ShellError::GenericError { "Error selecting columns".into(),
error: "Error selecting columns".into(), e.to_string(),
msg: e.to_string(), Some(col_span),
span: Some(col_span), None,
help: None, Vec::new(),
inner: vec![], )
})?; })?;
let r_series = df let r_series = df.column(w[1].as_ref()).map_err(|e| {
.column(w[1].as_ref()) ShellError::GenericError(
.map_err(|e| ShellError::GenericError { "Error selecting columns".into(),
error: "Error selecting columns".into(), e.to_string(),
msg: e.to_string(), Some(col_span),
span: Some(col_span), None,
help: None, Vec::new(),
inner: vec![], )
})?; })?;
if l_series.dtype() != r_series.dtype() { if l_series.dtype() != r_series.dtype() {
return Err(ShellError::GenericError { return Err(ShellError::GenericError(
error: "Merge error".into(), "Merge error".into(),
msg: "found different column types in list".into(), "found different column types in list".into(),
span: Some(col_span), Some(col_span),
help: Some(format!( Some(format!(
"datatypes {} and {} are incompatible", "datatypes {} and {} are incompatible",
l_series.dtype(), l_series.dtype(),
r_series.dtype() r_series.dtype()
)), )),
inner: vec![], Vec::new(),
}); ));
} }
} }
} }

View File

@ -1,5 +1,4 @@
mod append; mod append;
mod cast;
mod columns; mod columns;
mod drop; mod drop;
mod drop_duplicates; mod drop_duplicates;
@ -16,7 +15,6 @@ mod open;
mod query_df; mod query_df;
mod rename; mod rename;
mod sample; mod sample;
mod schema;
mod shape; mod shape;
mod slice; mod slice;
mod sql_context; mod sql_context;
@ -36,7 +34,6 @@ use nu_protocol::engine::StateWorkingSet;
pub use self::open::OpenDataFrame; pub use self::open::OpenDataFrame;
pub use append::AppendDF; pub use append::AppendDF;
pub use cast::CastDF;
pub use columns::ColumnsDF; pub use columns::ColumnsDF;
pub use drop::DropDF; pub use drop::DropDF;
pub use drop_duplicates::DropDuplicates; pub use drop_duplicates::DropDuplicates;
@ -52,10 +49,10 @@ pub use melt::MeltDF;
pub use query_df::QueryDf; pub use query_df::QueryDf;
pub use rename::RenameDF; pub use rename::RenameDF;
pub use sample::SampleDF; pub use sample::SampleDF;
pub use schema::SchemaDF;
pub use shape::ShapeDF; pub use shape::ShapeDF;
pub use slice::SliceDF; pub use slice::SliceDF;
pub use sql_context::SQLContext; pub use sql_context::SQLContext;
pub use sql_expr::parse_sql_expr;
pub use summary::Summary; pub use summary::Summary;
pub use take::TakeDF; pub use take::TakeDF;
pub use to_arrow::ToArrow; pub use to_arrow::ToArrow;
@ -80,7 +77,6 @@ pub fn add_eager_decls(working_set: &mut StateWorkingSet) {
// Dataframe commands // Dataframe commands
bind_command!( bind_command!(
AppendDF, AppendDF,
CastDF,
ColumnsDF, ColumnsDF,
DataTypes, DataTypes,
Summary, Summary,
@ -98,7 +94,6 @@ pub fn add_eager_decls(working_set: &mut StateWorkingSet) {
QueryDf, QueryDf,
RenameDF, RenameDF,
SampleDF, SampleDF,
SchemaDF,
ShapeDF, ShapeDF,
SliceDF, SliceDF,
TakeDF, TakeDF,

View File

@ -1,12 +1,19 @@
use crate::dataframe::values::{NuDataFrame, NuLazyFrame, NuSchema}; use super::super::values::{NuDataFrame, NuLazyFrame};
use nu_engine::command_prelude::*; use nu_engine::CallExt;
use nu_protocol::{
ast::Call,
engine::{Command, EngineState, Stack},
Category, Example, PipelineData, ShellError, Signature, Spanned, SyntaxShape, Type, Value,
};
use std::{fs::File, io::BufReader, path::PathBuf};
use polars::prelude::{ use polars::prelude::{
CsvEncoding, CsvReader, IpcReader, JsonFormat, JsonReader, LazyCsvReader, LazyFileListReader, CsvEncoding, CsvReader, IpcReader, JsonFormat, JsonReader, LazyCsvReader, LazyFileListReader,
LazyFrame, ParallelStrategy, ParquetReader, ScanArgsIpc, ScanArgsParquet, SerReader, LazyFrame, ParallelStrategy, ParquetReader, ScanArgsIpc, ScanArgsParquet, SerReader,
}; };
use polars_io::avro::AvroReader; use polars_io::avro::AvroReader;
use std::{fs::File, io::BufReader, path::PathBuf};
#[derive(Clone)] #[derive(Clone)]
pub struct OpenDataFrame; pub struct OpenDataFrame;
@ -63,12 +70,6 @@ impl Command for OpenDataFrame {
"Columns to be selected from csv file. CSV and Parquet file", "Columns to be selected from csv file. CSV and Parquet file",
None, None,
) )
.named(
"schema",
SyntaxShape::Record(vec![]),
r#"Polars Schema in format [{name: str}]. CSV, JSON, and JSONL files"#,
Some('s')
)
.input_output_type(Type::Any, Type::Custom("dataframe".into())) .input_output_type(Type::Any, Type::Custom("dataframe".into()))
.category(Category::Custom("dataframe".into())) .category(Category::Custom("dataframe".into()))
} }
@ -120,15 +121,15 @@ fn command(
"json" => from_json(engine_state, stack, call), "json" => from_json(engine_state, stack, call),
"jsonl" => from_jsonl(engine_state, stack, call), "jsonl" => from_jsonl(engine_state, stack, call),
"avro" => from_avro(engine_state, stack, call), "avro" => from_avro(engine_state, stack, call),
_ => Err(ShellError::FileNotFoundCustom { _ => Err(ShellError::FileNotFoundCustom(
msg: format!("{msg}. Supported values: csv, tsv, parquet, ipc, arrow, json"), format!("{msg}. Supported values: csv, tsv, parquet, ipc, arrow, json"),
span: blamed, blamed,
}), )),
}, },
None => Err(ShellError::FileNotFoundCustom { None => Err(ShellError::FileNotFoundCustom(
msg: "File without extension".into(), "File without extension".into(),
span: file.span, file.span,
}), )),
} }
.map(|value| PipelineData::Value(value, None)) .map(|value| PipelineData::Value(value, None))
} }
@ -138,27 +139,28 @@ fn from_parquet(
stack: &mut Stack, stack: &mut Stack,
call: &Call, call: &Call,
) -> Result<Value, ShellError> { ) -> Result<Value, ShellError> {
if call.has_flag(engine_state, stack, "lazy")? { if call.has_flag("lazy") {
let file: String = call.req(engine_state, stack, 0)?; let file: String = call.req(engine_state, stack, 0)?;
let args = ScanArgsParquet { let args = ScanArgsParquet {
n_rows: None, n_rows: None,
cache: true, cache: true,
parallel: ParallelStrategy::Auto, parallel: ParallelStrategy::Auto,
rechunk: false, rechunk: false,
row_index: None, row_count: None,
low_memory: false, low_memory: false,
cloud_options: None, cloud_options: None,
use_statistics: false, use_statistics: false,
hive_partitioning: false,
}; };
let df: NuLazyFrame = LazyFrame::scan_parquet(file, args) let df: NuLazyFrame = LazyFrame::scan_parquet(file, args)
.map_err(|e| ShellError::GenericError { .map_err(|e| {
error: "Parquet reader error".into(), ShellError::GenericError(
msg: format!("{e:?}"), "Parquet reader error".into(),
span: Some(call.head), format!("{e:?}"),
help: None, Some(call.head),
inner: vec![], None,
Vec::new(),
)
})? })?
.into(); .into();
@ -167,12 +169,14 @@ fn from_parquet(
let file: Spanned<PathBuf> = call.req(engine_state, stack, 0)?; let file: Spanned<PathBuf> = call.req(engine_state, stack, 0)?;
let columns: Option<Vec<String>> = call.get_flag(engine_state, stack, "columns")?; let columns: Option<Vec<String>> = call.get_flag(engine_state, stack, "columns")?;
let r = File::open(&file.item).map_err(|e| ShellError::GenericError { let r = File::open(&file.item).map_err(|e| {
error: "Error opening file".into(), ShellError::GenericError(
msg: e.to_string(), "Error opening file".into(),
span: Some(file.span), e.to_string(),
help: None, Some(file.span),
inner: vec![], None,
Vec::new(),
)
})?; })?;
let reader = ParquetReader::new(r); let reader = ParquetReader::new(r);
@ -183,12 +187,14 @@ fn from_parquet(
let df: NuDataFrame = reader let df: NuDataFrame = reader
.finish() .finish()
.map_err(|e| ShellError::GenericError { .map_err(|e| {
error: "Parquet reader error".into(), ShellError::GenericError(
msg: format!("{e:?}"), "Parquet reader error".into(),
span: Some(call.head), format!("{e:?}"),
help: None, Some(call.head),
inner: vec![], None,
Vec::new(),
)
})? })?
.into(); .into();
@ -204,12 +210,14 @@ fn from_avro(
let file: Spanned<PathBuf> = call.req(engine_state, stack, 0)?; let file: Spanned<PathBuf> = call.req(engine_state, stack, 0)?;
let columns: Option<Vec<String>> = call.get_flag(engine_state, stack, "columns")?; let columns: Option<Vec<String>> = call.get_flag(engine_state, stack, "columns")?;
let r = File::open(&file.item).map_err(|e| ShellError::GenericError { let r = File::open(&file.item).map_err(|e| {
error: "Error opening file".into(), ShellError::GenericError(
msg: e.to_string(), "Error opening file".into(),
span: Some(file.span), e.to_string(),
help: None, Some(file.span),
inner: vec![], None,
Vec::new(),
)
})?; })?;
let reader = AvroReader::new(r); let reader = AvroReader::new(r);
@ -220,12 +228,14 @@ fn from_avro(
let df: NuDataFrame = reader let df: NuDataFrame = reader
.finish() .finish()
.map_err(|e| ShellError::GenericError { .map_err(|e| {
error: "Avro reader error".into(), ShellError::GenericError(
msg: format!("{e:?}"), "Avro reader error".into(),
span: Some(call.head), format!("{e:?}"),
help: None, Some(call.head),
inner: vec![], None,
Vec::new(),
)
})? })?
.into(); .into();
@ -237,23 +247,25 @@ fn from_ipc(
stack: &mut Stack, stack: &mut Stack,
call: &Call, call: &Call,
) -> Result<Value, ShellError> { ) -> Result<Value, ShellError> {
if call.has_flag(engine_state, stack, "lazy")? { if call.has_flag("lazy") {
let file: String = call.req(engine_state, stack, 0)?; let file: String = call.req(engine_state, stack, 0)?;
let args = ScanArgsIpc { let args = ScanArgsIpc {
n_rows: None, n_rows: None,
cache: true, cache: true,
rechunk: false, rechunk: false,
row_index: None, row_count: None,
memmap: true, memmap: true,
}; };
let df: NuLazyFrame = LazyFrame::scan_ipc(file, args) let df: NuLazyFrame = LazyFrame::scan_ipc(file, args)
.map_err(|e| ShellError::GenericError { .map_err(|e| {
error: "IPC reader error".into(), ShellError::GenericError(
msg: format!("{e:?}"), "IPC reader error".into(),
span: Some(call.head), format!("{e:?}"),
help: None, Some(call.head),
inner: vec![], None,
Vec::new(),
)
})? })?
.into(); .into();
@ -262,12 +274,14 @@ fn from_ipc(
let file: Spanned<PathBuf> = call.req(engine_state, stack, 0)?; let file: Spanned<PathBuf> = call.req(engine_state, stack, 0)?;
let columns: Option<Vec<String>> = call.get_flag(engine_state, stack, "columns")?; let columns: Option<Vec<String>> = call.get_flag(engine_state, stack, "columns")?;
let r = File::open(&file.item).map_err(|e| ShellError::GenericError { let r = File::open(&file.item).map_err(|e| {
error: "Error opening file".into(), ShellError::GenericError(
msg: e.to_string(), "Error opening file".into(),
span: Some(file.span), e.to_string(),
help: None, Some(file.span),
inner: vec![], None,
Vec::new(),
)
})?; })?;
let reader = IpcReader::new(r); let reader = IpcReader::new(r);
@ -278,12 +292,14 @@ fn from_ipc(
let df: NuDataFrame = reader let df: NuDataFrame = reader
.finish() .finish()
.map_err(|e| ShellError::GenericError { .map_err(|e| {
error: "IPC reader error".into(), ShellError::GenericError(
msg: format!("{e:?}"), "IPC reader error".into(),
span: Some(call.head), format!("{e:?}"),
help: None, Some(call.head),
inner: vec![], None,
Vec::new(),
)
})? })?
.into(); .into();
@ -297,34 +313,29 @@ fn from_json(
call: &Call, call: &Call,
) -> Result<Value, ShellError> { ) -> Result<Value, ShellError> {
let file: Spanned<PathBuf> = call.req(engine_state, stack, 0)?; let file: Spanned<PathBuf> = call.req(engine_state, stack, 0)?;
let file = File::open(&file.item).map_err(|e| ShellError::GenericError { let file = File::open(&file.item).map_err(|e| {
error: "Error opening file".into(), ShellError::GenericError(
msg: e.to_string(), "Error opening file".into(),
span: Some(file.span), e.to_string(),
help: None, Some(file.span),
inner: vec![], None,
Vec::new(),
)
})?; })?;
let maybe_schema = call
.get_flag(engine_state, stack, "schema")?
.map(|schema| NuSchema::try_from(&schema))
.transpose()?;
let buf_reader = BufReader::new(file); let buf_reader = BufReader::new(file);
let reader = JsonReader::new(buf_reader); let reader = JsonReader::new(buf_reader);
let reader = match maybe_schema {
Some(schema) => reader.with_schema(schema.into()),
None => reader,
};
let df: NuDataFrame = reader let df: NuDataFrame = reader
.finish() .finish()
.map_err(|e| ShellError::GenericError { .map_err(|e| {
error: "Json reader error".into(), ShellError::GenericError(
msg: format!("{e:?}"), "Json reader error".into(),
span: Some(call.head), format!("{e:?}"),
help: None, Some(call.head),
inner: vec![], None,
Vec::new(),
)
})? })?
.into(); .into();
@ -337,17 +348,15 @@ fn from_jsonl(
call: &Call, call: &Call,
) -> Result<Value, ShellError> { ) -> Result<Value, ShellError> {
let infer_schema: Option<usize> = call.get_flag(engine_state, stack, "infer-schema")?; let infer_schema: Option<usize> = call.get_flag(engine_state, stack, "infer-schema")?;
let maybe_schema = call
.get_flag(engine_state, stack, "schema")?
.map(|schema| NuSchema::try_from(&schema))
.transpose()?;
let file: Spanned<PathBuf> = call.req(engine_state, stack, 0)?; let file: Spanned<PathBuf> = call.req(engine_state, stack, 0)?;
let file = File::open(&file.item).map_err(|e| ShellError::GenericError { let file = File::open(&file.item).map_err(|e| {
error: "Error opening file".into(), ShellError::GenericError(
msg: e.to_string(), "Error opening file".into(),
span: Some(file.span), e.to_string(),
help: None, Some(file.span),
inner: vec![], None,
Vec::new(),
)
})?; })?;
let buf_reader = BufReader::new(file); let buf_reader = BufReader::new(file);
@ -355,19 +364,16 @@ fn from_jsonl(
.with_json_format(JsonFormat::JsonLines) .with_json_format(JsonFormat::JsonLines)
.infer_schema_len(infer_schema); .infer_schema_len(infer_schema);
let reader = match maybe_schema {
Some(schema) => reader.with_schema(schema.into()),
None => reader,
};
let df: NuDataFrame = reader let df: NuDataFrame = reader
.finish() .finish()
.map_err(|e| ShellError::GenericError { .map_err(|e| {
error: "Json lines reader error".into(), ShellError::GenericError(
msg: format!("{e:?}"), "Json lines reader error".into(),
span: Some(call.head), format!("{e:?}"),
help: None, Some(call.head),
inner: vec![], None,
Vec::new(),
)
})? })?
.into(); .into();
@ -380,17 +386,12 @@ fn from_csv(
call: &Call, call: &Call,
) -> Result<Value, ShellError> { ) -> Result<Value, ShellError> {
let delimiter: Option<Spanned<String>> = call.get_flag(engine_state, stack, "delimiter")?; let delimiter: Option<Spanned<String>> = call.get_flag(engine_state, stack, "delimiter")?;
let no_header: bool = call.has_flag(engine_state, stack, "no-header")?; let no_header: bool = call.has_flag("no-header");
let infer_schema: Option<usize> = call.get_flag(engine_state, stack, "infer-schema")?; let infer_schema: Option<usize> = call.get_flag(engine_state, stack, "infer-schema")?;
let skip_rows: Option<usize> = call.get_flag(engine_state, stack, "skip-rows")?; let skip_rows: Option<usize> = call.get_flag(engine_state, stack, "skip-rows")?;
let columns: Option<Vec<String>> = call.get_flag(engine_state, stack, "columns")?; let columns: Option<Vec<String>> = call.get_flag(engine_state, stack, "columns")?;
let maybe_schema = call if call.has_flag("lazy") {
.get_flag(engine_state, stack, "schema")?
.map(|schema| NuSchema::try_from(&schema))
.transpose()?;
if call.has_flag(engine_state, stack, "lazy")? {
let file: String = call.req(engine_state, stack, 0)?; let file: String = call.req(engine_state, stack, 0)?;
let csv_reader = LazyCsvReader::new(file); let csv_reader = LazyCsvReader::new(file);
@ -398,30 +399,25 @@ fn from_csv(
None => csv_reader, None => csv_reader,
Some(d) => { Some(d) => {
if d.item.len() != 1 { if d.item.len() != 1 {
return Err(ShellError::GenericError { return Err(ShellError::GenericError(
error: "Incorrect delimiter".into(), "Incorrect delimiter".into(),
msg: "Delimiter has to be one character".into(), "Delimiter has to be one character".into(),
span: Some(d.span), Some(d.span),
help: None, None,
inner: vec![], Vec::new(),
}); ));
} else { } else {
let delimiter = match d.item.chars().next() { let delimiter = match d.item.chars().next() {
Some(d) => d as u8, Some(d) => d as u8,
None => unreachable!(), None => unreachable!(),
}; };
csv_reader.with_separator(delimiter) csv_reader.with_delimiter(delimiter)
} }
} }
}; };
let csv_reader = csv_reader.has_header(!no_header); let csv_reader = csv_reader.has_header(!no_header);
let csv_reader = match maybe_schema {
Some(schema) => csv_reader.with_schema(Some(schema.into())),
None => csv_reader,
};
let csv_reader = match infer_schema { let csv_reader = match infer_schema {
None => csv_reader, None => csv_reader,
Some(r) => csv_reader.with_infer_schema_length(Some(r)), Some(r) => csv_reader.with_infer_schema_length(Some(r)),
@ -434,12 +430,14 @@ fn from_csv(
let df: NuLazyFrame = csv_reader let df: NuLazyFrame = csv_reader
.finish() .finish()
.map_err(|e| ShellError::GenericError { .map_err(|e| {
error: "Parquet reader error".into(), ShellError::GenericError(
msg: format!("{e:?}"), "Parquet reader error".into(),
span: Some(call.head), format!("{e:?}"),
help: None, Some(call.head),
inner: vec![], None,
Vec::new(),
)
})? })?
.into(); .into();
@ -447,12 +445,14 @@ fn from_csv(
} else { } else {
let file: Spanned<PathBuf> = call.req(engine_state, stack, 0)?; let file: Spanned<PathBuf> = call.req(engine_state, stack, 0)?;
let csv_reader = CsvReader::from_path(&file.item) let csv_reader = CsvReader::from_path(&file.item)
.map_err(|e| ShellError::GenericError { .map_err(|e| {
error: "Error creating CSV reader".into(), ShellError::GenericError(
msg: e.to_string(), "Error creating CSV reader".into(),
span: Some(file.span), e.to_string(),
help: None, Some(file.span),
inner: vec![], None,
Vec::new(),
)
})? })?
.with_encoding(CsvEncoding::LossyUtf8); .with_encoding(CsvEncoding::LossyUtf8);
@ -460,30 +460,25 @@ fn from_csv(
None => csv_reader, None => csv_reader,
Some(d) => { Some(d) => {
if d.item.len() != 1 { if d.item.len() != 1 {
return Err(ShellError::GenericError { return Err(ShellError::GenericError(
error: "Incorrect delimiter".into(), "Incorrect delimiter".into(),
msg: "Delimiter has to be one character".into(), "Delimiter has to be one character".into(),
span: Some(d.span), Some(d.span),
help: None, None,
inner: vec![], Vec::new(),
}); ));
} else { } else {
let delimiter = match d.item.chars().next() { let delimiter = match d.item.chars().next() {
Some(d) => d as u8, Some(d) => d as u8,
None => unreachable!(), None => unreachable!(),
}; };
csv_reader.with_separator(delimiter) csv_reader.with_delimiter(delimiter)
} }
} }
}; };
let csv_reader = csv_reader.has_header(!no_header); let csv_reader = csv_reader.has_header(!no_header);
let csv_reader = match maybe_schema {
Some(schema) => csv_reader.with_schema(Some(schema.into())),
None => csv_reader,
};
let csv_reader = match infer_schema { let csv_reader = match infer_schema {
None => csv_reader, None => csv_reader,
Some(r) => csv_reader.infer_schema(Some(r)), Some(r) => csv_reader.infer_schema(Some(r)),
@ -501,12 +496,14 @@ fn from_csv(
let df: NuDataFrame = csv_reader let df: NuDataFrame = csv_reader
.finish() .finish()
.map_err(|e| ShellError::GenericError { .map_err(|e| {
error: "Parquet reader error".into(), ShellError::GenericError(
msg: format!("{e:?}"), "Parquet reader error".into(),
span: Some(call.head), format!("{e:?}"),
help: None, Some(call.head),
inner: vec![], None,
Vec::new(),
)
})? })?
.into(); .into();

View File

@ -1,8 +1,12 @@
use crate::dataframe::{ use super::super::values::NuDataFrame;
eager::SQLContext, use crate::dataframe::values::Column;
values::{Column, NuDataFrame, NuLazyFrame}, use crate::dataframe::{eager::SQLContext, values::NuLazyFrame};
use nu_engine::CallExt;
use nu_protocol::{
ast::Call,
engine::{Command, EngineState, Stack},
Category, Example, PipelineData, ShellError, Signature, Span, SyntaxShape, Type, Value,
}; };
use nu_engine::command_prelude::*;
// attribution: // attribution:
// sql_context.rs, and sql_expr.rs were copied from polars-sql. thank you. // sql_context.rs, and sql_expr.rs were copied from polars-sql. thank you.
@ -40,13 +44,10 @@ impl Command for QueryDf {
description: "Query dataframe using SQL", description: "Query dataframe using SQL",
example: "[[a b]; [1 2] [3 4]] | dfr into-df | dfr query 'select a from df'", example: "[[a b]; [1 2] [3 4]] | dfr into-df | dfr query 'select a from df'",
result: Some( result: Some(
NuDataFrame::try_from_columns( NuDataFrame::try_from_columns(vec![Column::new(
vec![Column::new(
"a".to_string(), "a".to_string(),
vec![Value::test_int(1), Value::test_int(3)], vec![Value::test_int(1), Value::test_int(3)],
)], )])
None,
)
.expect("simple df for test should not fail") .expect("simple df for test should not fail")
.into_value(Span::test_data()), .into_value(Span::test_data()),
), ),
@ -75,19 +76,19 @@ fn command(
let mut ctx = SQLContext::new(); let mut ctx = SQLContext::new();
ctx.register("df", &df.df); ctx.register("df", &df.df);
let df_sql = ctx let df_sql = ctx.execute(&sql_query).map_err(|e| {
.execute(&sql_query) ShellError::GenericError(
.map_err(|e| ShellError::GenericError { "Dataframe Error".into(),
error: "Dataframe Error".into(), e.to_string(),
msg: e.to_string(), Some(call.head),
span: Some(call.head), None,
help: None, Vec::new(),
inner: vec![], )
})?; })?;
let lazy = NuLazyFrame::new(false, df_sql); let lazy = NuLazyFrame::new(false, df_sql);
let eager = lazy.collect(call.head)?; let eager = lazy.collect(call.head)?;
let value = Value::custom(Box::new(eager), call.head); let value = Value::custom_value(Box::new(eager), call.head);
Ok(PipelineData::Value(value, None)) Ok(PipelineData::Value(value, None))
} }

View File

@ -1,8 +1,13 @@
use crate::dataframe::{ use nu_engine::CallExt;
utils::extract_strings, use nu_protocol::{
values::{Column, NuDataFrame, NuLazyFrame}, ast::Call,
engine::{Command, EngineState, Stack},
Category, Example, PipelineData, ShellError, Signature, Span, SyntaxShape, Type, Value,
}; };
use nu_engine::command_prelude::*;
use crate::dataframe::{utils::extract_strings, values::NuLazyFrame};
use super::super::values::{Column, NuDataFrame};
#[derive(Clone)] #[derive(Clone)]
pub struct RenameDF; pub struct RenameDF;
@ -41,8 +46,7 @@ impl Command for RenameDF {
description: "Renames a series", description: "Renames a series",
example: "[5 6 7 8] | dfr into-df | dfr rename '0' new_name", example: "[5 6 7 8] | dfr into-df | dfr rename '0' new_name",
result: Some( result: Some(
NuDataFrame::try_from_columns( NuDataFrame::try_from_columns(vec![Column::new(
vec![Column::new(
"new_name".to_string(), "new_name".to_string(),
vec![ vec![
Value::test_int(5), Value::test_int(5),
@ -50,9 +54,7 @@ impl Command for RenameDF {
Value::test_int(7), Value::test_int(7),
Value::test_int(8), Value::test_int(8),
], ],
)], )])
None,
)
.expect("simple df for test should not fail") .expect("simple df for test should not fail")
.into_value(Span::test_data()), .into_value(Span::test_data()),
), ),
@ -61,8 +63,7 @@ impl Command for RenameDF {
description: "Renames a dataframe column", description: "Renames a dataframe column",
example: "[[a b]; [1 2] [3 4]] | dfr into-df | dfr rename a a_new", example: "[[a b]; [1 2] [3 4]] | dfr into-df | dfr rename a a_new",
result: Some( result: Some(
NuDataFrame::try_from_columns( NuDataFrame::try_from_columns(vec![
vec![
Column::new( Column::new(
"a_new".to_string(), "a_new".to_string(),
vec![Value::test_int(1), Value::test_int(3)], vec![Value::test_int(1), Value::test_int(3)],
@ -71,9 +72,7 @@ impl Command for RenameDF {
"b".to_string(), "b".to_string(),
vec![Value::test_int(2), Value::test_int(4)], vec![Value::test_int(2), Value::test_int(4)],
), ),
], ])
None,
)
.expect("simple df for test should not fail") .expect("simple df for test should not fail")
.into_value(Span::test_data()), .into_value(Span::test_data()),
), ),
@ -82,8 +81,7 @@ impl Command for RenameDF {
description: "Renames two dataframe columns", description: "Renames two dataframe columns",
example: "[[a b]; [1 2] [3 4]] | dfr into-df | dfr rename [a b] [a_new b_new]", example: "[[a b]; [1 2] [3 4]] | dfr into-df | dfr rename [a b] [a_new b_new]",
result: Some( result: Some(
NuDataFrame::try_from_columns( NuDataFrame::try_from_columns(vec![
vec![
Column::new( Column::new(
"a_new".to_string(), "a_new".to_string(),
vec![Value::test_int(1), Value::test_int(3)], vec![Value::test_int(1), Value::test_int(3)],
@ -92,9 +90,7 @@ impl Command for RenameDF {
"b_new".to_string(), "b_new".to_string(),
vec![Value::test_int(2), Value::test_int(4)], vec![Value::test_int(2), Value::test_int(4)],
), ),
], ])
None,
)
.expect("simple df for test should not fail") .expect("simple df for test should not fail")
.into_value(Span::test_data()), .into_value(Span::test_data()),
), ),
@ -134,14 +130,14 @@ fn command_eager(
let new_names = extract_strings(new_names)?; let new_names = extract_strings(new_names)?;
for (from, to) in columns.iter().zip(new_names.iter()) { for (from, to) in columns.iter().zip(new_names.iter()) {
df.as_mut() df.as_mut().rename(from, to).map_err(|e| {
.rename(from, to) ShellError::GenericError(
.map_err(|e| ShellError::GenericError { "Error renaming".into(),
error: "Error renaming".into(), e.to_string(),
msg: e.to_string(), Some(call.head),
span: Some(call.head), None,
help: None, Vec::new(),
inner: vec![], )
})?; })?;
} }

View File

@ -1,7 +1,11 @@
use crate::dataframe::values::NuDataFrame; use nu_engine::CallExt;
use nu_engine::command_prelude::*; use nu_protocol::{
ast::Call,
engine::{Command, EngineState, Stack},
Category, Example, PipelineData, ShellError, Signature, Spanned, SyntaxShape, Type,
};
use polars::{prelude::NamedFrom, series::Series}; use super::super::values::NuDataFrame;
#[derive(Clone)] #[derive(Clone)]
pub struct SampleDF; pub struct SampleDF;
@ -77,51 +81,55 @@ fn command(
call: &Call, call: &Call,
input: PipelineData, input: PipelineData,
) -> Result<PipelineData, ShellError> { ) -> Result<PipelineData, ShellError> {
let rows: Option<Spanned<i64>> = call.get_flag(engine_state, stack, "n-rows")?; let rows: Option<Spanned<usize>> = call.get_flag(engine_state, stack, "n-rows")?;
let fraction: Option<Spanned<f64>> = call.get_flag(engine_state, stack, "fraction")?; let fraction: Option<Spanned<f64>> = call.get_flag(engine_state, stack, "fraction")?;
let seed: Option<u64> = call let seed: Option<u64> = call
.get_flag::<i64>(engine_state, stack, "seed")? .get_flag::<i64>(engine_state, stack, "seed")?
.map(|val| val as u64); .map(|val| val as u64);
let replace: bool = call.has_flag(engine_state, stack, "replace")?; let replace: bool = call.has_flag("replace");
let shuffle: bool = call.has_flag(engine_state, stack, "shuffle")?; let shuffle: bool = call.has_flag("shuffle");
let df = NuDataFrame::try_from_pipeline(input, call.head)?; let df = NuDataFrame::try_from_pipeline(input, call.head)?;
match (rows, fraction) { match (rows, fraction) {
(Some(rows), None) => df (Some(rows), None) => df
.as_ref() .as_ref()
.sample_n(&Series::new("s", &[rows.item]), replace, shuffle, seed) .sample_n(rows.item, replace, shuffle, seed)
.map_err(|e| ShellError::GenericError { .map_err(|e| {
error: "Error creating sample".into(), ShellError::GenericError(
msg: e.to_string(), "Error creating sample".into(),
span: Some(rows.span), e.to_string(),
help: None, Some(rows.span),
inner: vec![], None,
Vec::new(),
)
}), }),
(None, Some(frac)) => df (None, Some(frac)) => df
.as_ref() .as_ref()
.sample_frac(&Series::new("frac", &[frac.item]), replace, shuffle, seed) .sample_frac(frac.item, replace, shuffle, seed)
.map_err(|e| ShellError::GenericError { .map_err(|e| {
error: "Error creating sample".into(), ShellError::GenericError(
msg: e.to_string(), "Error creating sample".into(),
span: Some(frac.span), e.to_string(),
help: None, Some(frac.span),
inner: vec![], None,
}), Vec::new(),
(Some(_), Some(_)) => Err(ShellError::GenericError { )
error: "Incompatible flags".into(),
msg: "Only one selection criterion allowed".into(),
span: Some(call.head),
help: None,
inner: vec![],
}),
(None, None) => Err(ShellError::GenericError {
error: "No selection".into(),
msg: "No selection criterion was found".into(),
span: Some(call.head),
help: Some("Perhaps you want to use the flag -n or -f".into()),
inner: vec![],
}), }),
(Some(_), Some(_)) => Err(ShellError::GenericError(
"Incompatible flags".into(),
"Only one selection criterion allowed".into(),
Some(call.head),
None,
Vec::new(),
)),
(None, None) => Err(ShellError::GenericError(
"No selection".into(),
"No selection criterion was found".into(),
Some(call.head),
Some("Perhaps you want to use the flag -n or -f".into()),
Vec::new(),
)),
} }
.map(|df| PipelineData::Value(NuDataFrame::dataframe_into_value(df, call.head), None)) .map(|df| PipelineData::Value(NuDataFrame::dataframe_into_value(df, call.head), None))
} }

View File

@ -1,112 +0,0 @@
use crate::dataframe::values::NuDataFrame;
use nu_engine::command_prelude::*;
#[derive(Clone)]
pub struct SchemaDF;
impl Command for SchemaDF {
fn name(&self) -> &str {
"dfr schema"
}
fn usage(&self) -> &str {
"Show schema for a dataframe."
}
fn signature(&self) -> Signature {
Signature::build(self.name())
.switch("datatype-list", "creates a lazy dataframe", Some('l'))
.input_output_type(
Type::Custom("dataframe".into()),
Type::Custom("dataframe".into()),
)
.category(Category::Custom("dataframe".into()))
}
fn examples(&self) -> Vec<Example> {
vec![Example {
description: "Dataframe schema",
example: r#"[[a b]; [1 "foo"] [3 "bar"]] | dfr into-df | dfr schema"#,
result: Some(Value::record(
record! {
"a" => Value::string("i64", Span::test_data()),
"b" => Value::string("str", Span::test_data()),
},
Span::test_data(),
)),
}]
}
fn run(
&self,
engine_state: &EngineState,
stack: &mut Stack,
call: &Call,
input: PipelineData,
) -> Result<PipelineData, ShellError> {
if call.has_flag(engine_state, stack, "datatype-list")? {
Ok(PipelineData::Value(datatype_list(Span::unknown()), None))
} else {
command(engine_state, stack, call, input)
}
}
}
fn command(
_engine_state: &EngineState,
_stack: &mut Stack,
call: &Call,
input: PipelineData,
) -> Result<PipelineData, ShellError> {
let df = NuDataFrame::try_from_pipeline(input, call.head)?;
let schema = df.schema();
let value: Value = schema.into();
Ok(PipelineData::Value(value, None))
}
fn datatype_list(span: Span) -> Value {
let types: Vec<Value> = [
("null", ""),
("bool", ""),
("u8", ""),
("u16", ""),
("u32", ""),
("u64", ""),
("i8", ""),
("i16", ""),
("i32", ""),
("i64", ""),
("f32", ""),
("f64", ""),
("str", ""),
("binary", ""),
("date", ""),
("datetime<time_unit: (ms, us, ns) timezone (optional)>", "Time Unit can be: milliseconds: ms, microseconds: us, nanoseconds: ns. Timezone wildcard is *. Other Timezone examples: UTC, America/Los_Angeles."),
("duration<time_unit: (ms, us, ns)>", "Time Unit can be: milliseconds: ms, microseconds: us, nanoseconds: ns."),
("time", ""),
("object", ""),
("unknown", ""),
("list<dtype>", ""),
]
.iter()
.map(|(dtype, note)| {
Value::record(record! {
"dtype" => Value::string(*dtype, span),
"note" => Value::string(*note, span),
},
span)
})
.collect();
Value::list(types, span)
}
#[cfg(test)]
mod test {
use super::super::super::test_dataframe::test_dataframe;
use super::*;
#[test]
fn test_examples() {
test_dataframe(vec![Box::new(SchemaDF {})])
}
}

View File

@ -1,5 +1,12 @@
use crate::dataframe::values::{Column, NuDataFrame}; use nu_protocol::{
use nu_engine::command_prelude::*; ast::Call,
engine::{Command, EngineState, Stack},
Category, Example, PipelineData, ShellError, Signature, Span, Type, Value,
};
use crate::dataframe::values::Column;
use super::super::values::NuDataFrame;
#[derive(Clone)] #[derive(Clone)]
pub struct ShapeDF; pub struct ShapeDF;
@ -27,13 +34,10 @@ impl Command for ShapeDF {
description: "Shows row and column shape", description: "Shows row and column shape",
example: "[[a b]; [1 2] [3 4]] | dfr into-df | dfr shape", example: "[[a b]; [1 2] [3 4]] | dfr into-df | dfr shape",
result: Some( result: Some(
NuDataFrame::try_from_columns( NuDataFrame::try_from_columns(vec![
vec![
Column::new("rows".to_string(), vec![Value::test_int(2)]), Column::new("rows".to_string(), vec![Value::test_int(2)]),
Column::new("columns".to_string(), vec![Value::test_int(2)]), Column::new("columns".to_string(), vec![Value::test_int(2)]),
], ])
None,
)
.expect("simple df for test should not fail") .expect("simple df for test should not fail")
.into_value(Span::test_data()), .into_value(Span::test_data()),
), ),
@ -66,7 +70,7 @@ fn command(
let rows_col = Column::new("rows".to_string(), vec![rows]); let rows_col = Column::new("rows".to_string(), vec![rows]);
let cols_col = Column::new("columns".to_string(), vec![cols]); let cols_col = Column::new("columns".to_string(), vec![cols]);
NuDataFrame::try_from_columns(vec![rows_col, cols_col], None) NuDataFrame::try_from_columns(vec![rows_col, cols_col])
.map(|df| PipelineData::Value(df.into_value(call.head), None)) .map(|df| PipelineData::Value(df.into_value(call.head), None))
} }

View File

@ -1,5 +1,13 @@
use crate::dataframe::values::{Column, NuDataFrame}; use nu_engine::CallExt;
use nu_engine::command_prelude::*; use nu_protocol::{
ast::Call,
engine::{Command, EngineState, Stack},
Category, Example, PipelineData, ShellError, Signature, Span, SyntaxShape, Type, Value,
};
use crate::dataframe::values::Column;
use super::super::values::NuDataFrame;
#[derive(Clone)] #[derive(Clone)]
pub struct SliceDF; pub struct SliceDF;
@ -29,13 +37,10 @@ impl Command for SliceDF {
description: "Create new dataframe from a slice of the rows", description: "Create new dataframe from a slice of the rows",
example: "[[a b]; [1 2] [3 4]] | dfr into-df | dfr slice 0 1", example: "[[a b]; [1 2] [3 4]] | dfr into-df | dfr slice 0 1",
result: Some( result: Some(
NuDataFrame::try_from_columns( NuDataFrame::try_from_columns(vec![
vec![
Column::new("a".to_string(), vec![Value::test_int(1)]), Column::new("a".to_string(), vec![Value::test_int(1)]),
Column::new("b".to_string(), vec![Value::test_int(2)]), Column::new("b".to_string(), vec![Value::test_int(2)]),
], ])
None,
)
.expect("simple df for test should not fail") .expect("simple df for test should not fail")
.into_value(Span::test_data()), .into_value(Span::test_data()),
), ),

View File

@ -2,8 +2,7 @@ use crate::dataframe::eager::sql_expr::parse_sql_expr;
use polars::error::{ErrString, PolarsError}; use polars::error::{ErrString, PolarsError};
use polars::prelude::{col, DataFrame, DataType, IntoLazy, LazyFrame}; use polars::prelude::{col, DataFrame, DataType, IntoLazy, LazyFrame};
use sqlparser::ast::{ use sqlparser::ast::{
Expr as SqlExpr, GroupByExpr, Select, SelectItem, SetExpr, Statement, TableFactor, Expr as SqlExpr, Select, SelectItem, SetExpr, Statement, TableFactor, Value as SQLValue,
Value as SQLValue,
}; };
use sqlparser::dialect::GenericDialect; use sqlparser::dialect::GenericDialect;
use sqlparser::parser::Parser; use sqlparser::parser::Parser;
@ -30,7 +29,7 @@ impl SQLContext {
fn execute_select(&self, select_stmt: &Select) -> Result<LazyFrame, PolarsError> { fn execute_select(&self, select_stmt: &Select) -> Result<LazyFrame, PolarsError> {
// Determine involved dataframe // Determine involved dataframe
// Implicit join require some more work in query parsers, Explicit join are preferred for now. // Implicit join require some more work in query parsers, Explicit join are preferred for now.
let tbl = select_stmt.from.first().ok_or_else(|| { let tbl = select_stmt.from.get(0).ok_or_else(|| {
PolarsError::ComputeError(ErrString::from("No table found in select statement")) PolarsError::ComputeError(ErrString::from("No table found in select statement"))
})?; })?;
let mut alias_map = HashMap::new(); let mut alias_map = HashMap::new();
@ -38,7 +37,7 @@ impl SQLContext {
TableFactor::Table { name, alias, .. } => { TableFactor::Table { name, alias, .. } => {
let tbl_name = name let tbl_name = name
.0 .0
.first() .get(0)
.ok_or_else(|| { .ok_or_else(|| {
PolarsError::ComputeError(ErrString::from( PolarsError::ComputeError(ErrString::from(
"No table found in select statement", "No table found in select statement",
@ -97,13 +96,8 @@ impl SQLContext {
.collect::<Result<Vec<_>, PolarsError>>()?; .collect::<Result<Vec<_>, PolarsError>>()?;
// Check for group by // Check for group by
// After projection since there might be number. // After projection since there might be number.
let group_by = match &select_stmt.group_by { let group_by = select_stmt
GroupByExpr::All => .group_by
Err(
PolarsError::ComputeError("Group-By Error: Only positive number or expression are supported, not all".into())
)?,
GroupByExpr::Expressions(expressions) => expressions
}
.iter() .iter()
.map( .map(
|e|match e { |e|match e {
@ -188,7 +182,7 @@ impl SQLContext {
)) ))
} else { } else {
let ast = ast let ast = ast
.first() .get(0)
.ok_or_else(|| PolarsError::ComputeError(ErrString::from("No statement found")))?; .ok_or_else(|| PolarsError::ComputeError(ErrString::from("No statement found")))?;
Ok(match ast { Ok(match ast {
Statement::Query(query) => { Statement::Query(query) => {

View File

@ -2,8 +2,8 @@ use polars::error::PolarsError;
use polars::prelude::{col, lit, DataType, Expr, LiteralValue, PolarsResult as Result, TimeUnit}; use polars::prelude::{col, lit, DataType, Expr, LiteralValue, PolarsResult as Result, TimeUnit};
use sqlparser::ast::{ use sqlparser::ast::{
ArrayElemTypeDef, BinaryOperator as SQLBinaryOperator, DataType as SQLDataType, BinaryOperator as SQLBinaryOperator, DataType as SQLDataType, Expr as SqlExpr,
Expr as SqlExpr, Function as SQLFunction, Value as SqlValue, WindowType, Function as SQLFunction, Value as SqlValue, WindowType,
}; };
fn map_sql_polars_datatype(data_type: &SQLDataType) -> Result<DataType> { fn map_sql_polars_datatype(data_type: &SQLDataType) -> Result<DataType> {
@ -13,7 +13,7 @@ fn map_sql_polars_datatype(data_type: &SQLDataType) -> Result<DataType> {
| SQLDataType::Uuid | SQLDataType::Uuid
| SQLDataType::Clob(_) | SQLDataType::Clob(_)
| SQLDataType::Text | SQLDataType::Text
| SQLDataType::String(_) => DataType::String, | SQLDataType::String => DataType::Utf8,
SQLDataType::Float(_) => DataType::Float32, SQLDataType::Float(_) => DataType::Float32,
SQLDataType::Real => DataType::Float32, SQLDataType::Real => DataType::Float32,
SQLDataType::Double => DataType::Float64, SQLDataType::Double => DataType::Float64,
@ -31,12 +31,9 @@ fn map_sql_polars_datatype(data_type: &SQLDataType) -> Result<DataType> {
SQLDataType::Time(_, _) => DataType::Time, SQLDataType::Time(_, _) => DataType::Time,
SQLDataType::Timestamp(_, _) => DataType::Datetime(TimeUnit::Microseconds, None), SQLDataType::Timestamp(_, _) => DataType::Datetime(TimeUnit::Microseconds, None),
SQLDataType::Interval => DataType::Duration(TimeUnit::Microseconds), SQLDataType::Interval => DataType::Duration(TimeUnit::Microseconds),
SQLDataType::Array(array_type_def) => match array_type_def { SQLDataType::Array(inner_type) => match inner_type {
ArrayElemTypeDef::AngleBracket(inner_type) Some(inner_type) => DataType::List(Box::new(map_sql_polars_datatype(inner_type)?)),
| ArrayElemTypeDef::SquareBracket(inner_type) => { None => {
DataType::List(Box::new(map_sql_polars_datatype(inner_type)?))
}
_ => {
return Err(PolarsError::ComputeError( return Err(PolarsError::ComputeError(
"SQL Datatype Array(None) was not supported in polars-sql yet!".into(), "SQL Datatype Array(None) was not supported in polars-sql yet!".into(),
)) ))
@ -62,9 +59,7 @@ fn binary_op_(left: Expr, right: Expr, op: &SQLBinaryOperator) -> Result<Expr> {
SQLBinaryOperator::Multiply => left * right, SQLBinaryOperator::Multiply => left * right,
SQLBinaryOperator::Divide => left / right, SQLBinaryOperator::Divide => left / right,
SQLBinaryOperator::Modulo => left % right, SQLBinaryOperator::Modulo => left % right,
SQLBinaryOperator::StringConcat => { SQLBinaryOperator::StringConcat => left.cast(DataType::Utf8) + right.cast(DataType::Utf8),
left.cast(DataType::String) + right.cast(DataType::String)
}
SQLBinaryOperator::Gt => left.gt(right), SQLBinaryOperator::Gt => left.gt(right),
SQLBinaryOperator::Lt => left.lt(right), SQLBinaryOperator::Lt => left.lt(right),
SQLBinaryOperator::GtEq => left.gt_eq(right), SQLBinaryOperator::GtEq => left.gt_eq(right),
@ -119,11 +114,7 @@ pub fn parse_sql_expr(expr: &SqlExpr) -> Result<Expr> {
binary_op_(left, right, op)? binary_op_(left, right, op)?
} }
SqlExpr::Function(sql_function) => parse_sql_function(sql_function)?, SqlExpr::Function(sql_function) => parse_sql_function(sql_function)?,
SqlExpr::Cast { SqlExpr::Cast { expr, data_type } => cast_(parse_sql_expr(expr)?, data_type)?,
expr,
data_type,
format: _,
} => cast_(parse_sql_expr(expr)?, data_type)?,
SqlExpr::Nested(expr) => parse_sql_expr(expr)?, SqlExpr::Nested(expr) => parse_sql_expr(expr)?,
SqlExpr::Value(value) => literal_expr(value)?, SqlExpr::Value(value) => literal_expr(value)?,
_ => { _ => {

View File

@ -1,11 +1,16 @@
use crate::dataframe::values::{Column, NuDataFrame}; use super::super::values::{Column, NuDataFrame};
use nu_engine::command_prelude::*;
use nu_engine::CallExt;
use nu_protocol::{
ast::Call,
engine::{Command, EngineState, Stack},
Category, Example, PipelineData, ShellError, Signature, Span, SyntaxShape, Type, Value,
};
use polars::{ use polars::{
chunked_array::ChunkedArray, chunked_array::ChunkedArray,
prelude::{ prelude::{
AnyValue, DataFrame, DataType, Float64Type, IntoSeries, NewChunkedArray, AnyValue, DataFrame, DataType, Float64Type, IntoSeries, NewChunkedArray,
QuantileInterpolOptions, Series, StringType, QuantileInterpolOptions, Series, Utf8Type,
}, },
}; };
@ -41,8 +46,7 @@ impl Command for Summary {
description: "list dataframe descriptives", description: "list dataframe descriptives",
example: "[[a b]; [1 1] [1 1]] | dfr into-df | dfr summary", example: "[[a b]; [1 1] [1 1]] | dfr into-df | dfr summary",
result: Some( result: Some(
NuDataFrame::try_from_columns( NuDataFrame::try_from_columns(vec![
vec![
Column::new( Column::new(
"descriptor".to_string(), "descriptor".to_string(),
vec![ vec![
@ -88,9 +92,7 @@ impl Command for Summary {
Value::test_float(1.0), Value::test_float(1.0),
], ],
), ),
], ])
None,
)
.expect("simple df for test should not fail") .expect("simple df for test should not fail")
.into_value(Span::test_data()), .into_value(Span::test_data()),
), ),
@ -125,23 +127,23 @@ fn command(
if (&0.0..=&1.0).contains(&val) { if (&0.0..=&1.0).contains(&val) {
Ok(*val) Ok(*val)
} else { } else {
Err(ShellError::GenericError { Err(ShellError::GenericError(
error: "Incorrect value for quantile".into(), "Incorrect value for quantile".to_string(),
msg: "value should be between 0 and 1".into(), "value should be between 0 and 1".to_string(),
span: Some(span), Some(span),
help: None, None,
inner: vec![], Vec::new(),
}) ))
} }
} }
Value::Error { error, .. } => Err(*error.clone()), Value::Error { error, .. } => Err(*error.clone()),
_ => Err(ShellError::GenericError { _ => Err(ShellError::GenericError(
error: "Incorrect value for quantile".into(), "Incorrect value for quantile".to_string(),
msg: "value should be a float".into(), "value should be a float".to_string(),
span: Some(span), Some(span),
help: None, None,
inner: vec![], Vec::new(),
}), )),
} }
}) })
.collect::<Result<Vec<f64>, ShellError>>() .collect::<Result<Vec<f64>, ShellError>>()
@ -169,7 +171,7 @@ fn command(
let df = NuDataFrame::try_from_pipeline(input, call.head)?; let df = NuDataFrame::try_from_pipeline(input, call.head)?;
let names = ChunkedArray::<StringType>::from_slice_options("descriptor", &labels).into_series(); let names = ChunkedArray::<Utf8Type>::from_slice_options("descriptor", &labels).into_series();
let head = std::iter::once(names); let head = std::iter::once(names);
@ -177,18 +179,17 @@ fn command(
.as_ref() .as_ref()
.get_columns() .get_columns()
.iter() .iter()
.filter(|col| !matches!(col.dtype(), &DataType::Object("object", _))) .filter(|col| col.dtype() != &DataType::Object("object"))
.map(|col| { .map(|col| {
let count = col.len() as f64; let count = col.len() as f64;
let sum = col.sum_as_series().ok().and_then(|series| { let sum = col
series .sum_as_series()
.cast(&DataType::Float64) .cast(&DataType::Float64)
.ok() .ok()
.and_then(|ca| match ca.get(0) { .and_then(|ca| match ca.get(0) {
Ok(AnyValue::Float64(v)) => Some(v), Ok(AnyValue::Float64(v)) => Some(v),
_ => None, _ => None,
})
}); });
let mean = match col.mean_as_series().get(0) { let mean = match col.mean_as_series().get(0) {
@ -196,30 +197,23 @@ fn command(
_ => None, _ => None,
}; };
let median = match col.median_as_series() { let median = match col.median_as_series().get(0) {
Ok(v) => match v.get(0) {
Ok(AnyValue::Float64(v)) => Some(v), Ok(AnyValue::Float64(v)) => Some(v),
_ => None, _ => None,
},
_ => None,
}; };
let std = match col.std_as_series(0) { let std = match col.std_as_series(0).get(0) {
Ok(v) => match v.get(0) {
Ok(AnyValue::Float64(v)) => Some(v), Ok(AnyValue::Float64(v)) => Some(v),
_ => None, _ => None,
},
_ => None,
}; };
let min = col.min_as_series().ok().and_then(|series| { let min = col
series .min_as_series()
.cast(&DataType::Float64) .cast(&DataType::Float64)
.ok() .ok()
.and_then(|ca| match ca.get(0) { .and_then(|ca| match ca.get(0) {
Ok(AnyValue::Float64(v)) => Some(v), Ok(AnyValue::Float64(v)) => Some(v),
_ => None, _ => None,
})
}); });
let mut quantiles = quantiles let mut quantiles = quantiles
@ -236,14 +230,13 @@ fn command(
}) })
.collect::<Vec<Option<f64>>>(); .collect::<Vec<Option<f64>>>();
let max = col.max_as_series().ok().and_then(|series| { let max = col
series .max_as_series()
.cast(&DataType::Float64) .cast(&DataType::Float64)
.ok() .ok()
.and_then(|ca| match ca.get(0) { .and_then(|ca| match ca.get(0) {
Ok(AnyValue::Float64(v)) => Some(v), Ok(AnyValue::Float64(v)) => Some(v),
_ => None, _ => None,
})
}); });
let mut descriptors = vec![Some(count), sum, mean, median, std, min]; let mut descriptors = vec![Some(count), sum, mean, median, std, min];
@ -257,12 +250,14 @@ fn command(
let res = head.chain(tail).collect::<Vec<Series>>(); let res = head.chain(tail).collect::<Vec<Series>>();
DataFrame::new(res) DataFrame::new(res)
.map_err(|e| ShellError::GenericError { .map_err(|e| {
error: "Dataframe Error".into(), ShellError::GenericError(
msg: e.to_string(), "Dataframe Error".into(),
span: Some(call.head), e.to_string(),
help: None, Some(call.head),
inner: vec![], None,
Vec::new(),
)
}) })
.map(|df| PipelineData::Value(NuDataFrame::dataframe_into_value(df, call.head), None)) .map(|df| PipelineData::Value(NuDataFrame::dataframe_into_value(df, call.head), None))
} }

View File

@ -1,8 +1,15 @@
use crate::dataframe::values::{Column, NuDataFrame}; use nu_engine::CallExt;
use nu_engine::command_prelude::*; use nu_protocol::{
ast::Call,
engine::{Command, EngineState, Stack},
Category, Example, PipelineData, ShellError, Signature, Span, SyntaxShape, Type, Value,
};
use polars::prelude::DataType; use polars::prelude::DataType;
use crate::dataframe::values::Column;
use super::super::values::NuDataFrame;
#[derive(Clone)] #[derive(Clone)]
pub struct TakeDF; pub struct TakeDF;
@ -37,8 +44,7 @@ impl Command for TakeDF {
let indices = ([0 2] | dfr into-df); let indices = ([0 2] | dfr into-df);
$df | dfr take $indices"#, $df | dfr take $indices"#,
result: Some( result: Some(
NuDataFrame::try_from_columns( NuDataFrame::try_from_columns(vec![
vec![
Column::new( Column::new(
"a".to_string(), "a".to_string(),
vec![Value::test_int(4), Value::test_int(4)], vec![Value::test_int(4), Value::test_int(4)],
@ -47,9 +53,7 @@ impl Command for TakeDF {
"b".to_string(), "b".to_string(),
vec![Value::test_int(1), Value::test_int(3)], vec![Value::test_int(1), Value::test_int(3)],
), ),
], ])
None,
)
.expect("simple df for test should not fail") .expect("simple df for test should not fail")
.into_value(Span::test_data()), .into_value(Span::test_data()),
), ),
@ -60,13 +64,10 @@ impl Command for TakeDF {
let indices = ([0 2] | dfr into-df); let indices = ([0 2] | dfr into-df);
$series | dfr take $indices"#, $series | dfr take $indices"#,
result: Some( result: Some(
NuDataFrame::try_from_columns( NuDataFrame::try_from_columns(vec![Column::new(
vec![Column::new(
"0".to_string(), "0".to_string(),
vec![Value::test_int(4), Value::test_int(5)], vec![Value::test_int(4), Value::test_int(5)],
)], )])
None,
)
.expect("simple df for test should not fail") .expect("simple df for test should not fail")
.into_value(Span::test_data()), .into_value(Span::test_data()),
), ),
@ -96,41 +97,47 @@ fn command(
let index = NuDataFrame::try_from_value(index_value)?.as_series(index_span)?; let index = NuDataFrame::try_from_value(index_value)?.as_series(index_span)?;
let casted = match index.dtype() { let casted = match index.dtype() {
DataType::UInt32 | DataType::UInt64 | DataType::Int32 | DataType::Int64 => index DataType::UInt32 | DataType::UInt64 | DataType::Int32 | DataType::Int64 => {
.cast(&DataType::UInt32) index.cast(&DataType::UInt32).map_err(|e| {
.map_err(|e| ShellError::GenericError { ShellError::GenericError(
error: "Error casting index list".into(), "Error casting index list".into(),
msg: e.to_string(), e.to_string(),
span: Some(index_span), Some(index_span),
help: None, None,
inner: vec![], Vec::new(),
}), )
_ => Err(ShellError::GenericError { })
error: "Incorrect type".into(), }
msg: "Series with incorrect type".into(), _ => Err(ShellError::GenericError(
span: Some(call.head), "Incorrect type".into(),
help: Some("Consider using a Series with type int type".into()), "Series with incorrect type".into(),
inner: vec![], Some(call.head),
}), Some("Consider using a Series with type int type".into()),
Vec::new(),
)),
}?; }?;
let indices = casted.u32().map_err(|e| ShellError::GenericError { let indices = casted.u32().map_err(|e| {
error: "Error casting index list".into(), ShellError::GenericError(
msg: e.to_string(), "Error casting index list".into(),
span: Some(index_span), e.to_string(),
help: None, Some(index_span),
inner: vec![], None,
Vec::new(),
)
})?; })?;
NuDataFrame::try_from_pipeline(input, call.head).and_then(|df| { NuDataFrame::try_from_pipeline(input, call.head).and_then(|df| {
df.as_ref() df.as_ref()
.take(indices) .take(indices)
.map_err(|e| ShellError::GenericError { .map_err(|e| {
error: "Error taking values".into(), ShellError::GenericError(
msg: e.to_string(), "Error taking values".into(),
span: Some(call.head), e.to_string(),
help: None, Some(call.head),
inner: vec![], None,
Vec::new(),
)
}) })
.map(|df| PipelineData::Value(NuDataFrame::dataframe_into_value(df, call.head), None)) .map(|df| PipelineData::Value(NuDataFrame::dataframe_into_value(df, call.head), None))
}) })

View File

@ -1,9 +1,15 @@
use crate::dataframe::values::NuDataFrame;
use nu_engine::command_prelude::*;
use polars::prelude::{IpcWriter, SerWriter};
use std::{fs::File, path::PathBuf}; use std::{fs::File, path::PathBuf};
use nu_engine::CallExt;
use nu_protocol::{
ast::Call,
engine::{Command, EngineState, Stack},
Category, Example, PipelineData, ShellError, Signature, Spanned, SyntaxShape, Type, Value,
};
use polars::prelude::{IpcWriter, SerWriter};
use super::super::values::NuDataFrame;
#[derive(Clone)] #[derive(Clone)]
pub struct ToArrow; pub struct ToArrow;
@ -52,22 +58,24 @@ fn command(
let mut df = NuDataFrame::try_from_pipeline(input, call.head)?; let mut df = NuDataFrame::try_from_pipeline(input, call.head)?;
let mut file = File::create(&file_name.item).map_err(|e| ShellError::GenericError { let mut file = File::create(&file_name.item).map_err(|e| {
error: "Error with file name".into(), ShellError::GenericError(
msg: e.to_string(), "Error with file name".into(),
span: Some(file_name.span), e.to_string(),
help: None, Some(file_name.span),
inner: vec![], None,
Vec::new(),
)
})?; })?;
IpcWriter::new(&mut file) IpcWriter::new(&mut file).finish(df.as_mut()).map_err(|e| {
.finish(df.as_mut()) ShellError::GenericError(
.map_err(|e| ShellError::GenericError { "Error saving file".into(),
error: "Error saving file".into(), e.to_string(),
msg: e.to_string(), Some(file_name.span),
span: Some(file_name.span), None,
help: None, Vec::new(),
inner: vec![], )
})?; })?;
let file_value = Value::string(format!("saved {:?}", &file_name.item), file_name.span); let file_value = Value::string(format!("saved {:?}", &file_name.item), file_name.span);

View File

@ -1,12 +1,16 @@
use crate::dataframe::values::NuDataFrame;
use nu_engine::command_prelude::*;
use polars_io::{
avro::{AvroCompression, AvroWriter},
SerWriter,
};
use std::{fs::File, path::PathBuf}; use std::{fs::File, path::PathBuf};
use nu_engine::CallExt;
use nu_protocol::{
ast::Call,
engine::{Command, EngineState, Stack},
Category, Example, PipelineData, ShellError, Signature, Spanned, SyntaxShape, Type, Value,
};
use polars_io::avro::{AvroCompression, AvroWriter};
use polars_io::SerWriter;
use super::super::values::NuDataFrame;
#[derive(Clone)] #[derive(Clone)]
pub struct ToAvro; pub struct ToAvro;
@ -81,23 +85,27 @@ fn command(
let mut df = NuDataFrame::try_from_pipeline(input, call.head)?; let mut df = NuDataFrame::try_from_pipeline(input, call.head)?;
let file = File::create(&file_name.item).map_err(|e| ShellError::GenericError { let file = File::create(&file_name.item).map_err(|e| {
error: "Error with file name".into(), ShellError::GenericError(
msg: e.to_string(), "Error with file name".into(),
span: Some(file_name.span), e.to_string(),
help: None, Some(file_name.span),
inner: vec![], None,
Vec::new(),
)
})?; })?;
AvroWriter::new(file) AvroWriter::new(file)
.with_compression(compression) .with_compression(compression)
.finish(df.as_mut()) .finish(df.as_mut())
.map_err(|e| ShellError::GenericError { .map_err(|e| {
error: "Error saving file".into(), ShellError::GenericError(
msg: e.to_string(), "Error saving file".into(),
span: Some(file_name.span), e.to_string(),
help: None, Some(file_name.span),
inner: vec![], None,
Vec::new(),
)
})?; })?;
let file_value = Value::string(format!("saved {:?}", &file_name.item), file_name.span); let file_value = Value::string(format!("saved {:?}", &file_name.item), file_name.span);

View File

@ -1,9 +1,15 @@
use crate::dataframe::values::NuDataFrame;
use nu_engine::command_prelude::*;
use polars::prelude::{CsvWriter, SerWriter};
use std::{fs::File, path::PathBuf}; use std::{fs::File, path::PathBuf};
use nu_engine::CallExt;
use nu_protocol::{
ast::Call,
engine::{Command, EngineState, Stack},
Category, Example, PipelineData, ShellError, Signature, Spanned, SyntaxShape, Type, Value,
};
use polars::prelude::{CsvWriter, SerWriter};
use super::super::values::NuDataFrame;
#[derive(Clone)] #[derive(Clone)]
pub struct ToCSV; pub struct ToCSV;
@ -64,56 +70,58 @@ fn command(
) -> Result<PipelineData, ShellError> { ) -> Result<PipelineData, ShellError> {
let file_name: Spanned<PathBuf> = call.req(engine_state, stack, 0)?; let file_name: Spanned<PathBuf> = call.req(engine_state, stack, 0)?;
let delimiter: Option<Spanned<String>> = call.get_flag(engine_state, stack, "delimiter")?; let delimiter: Option<Spanned<String>> = call.get_flag(engine_state, stack, "delimiter")?;
let no_header: bool = call.has_flag(engine_state, stack, "no-header")?; let no_header: bool = call.has_flag("no-header");
let mut df = NuDataFrame::try_from_pipeline(input, call.head)?; let mut df = NuDataFrame::try_from_pipeline(input, call.head)?;
let mut file = File::create(&file_name.item).map_err(|e| ShellError::GenericError { let mut file = File::create(&file_name.item).map_err(|e| {
error: "Error with file name".into(), ShellError::GenericError(
msg: e.to_string(), "Error with file name".into(),
span: Some(file_name.span), e.to_string(),
help: None, Some(file_name.span),
inner: vec![], None,
Vec::new(),
)
})?; })?;
let writer = CsvWriter::new(&mut file); let writer = CsvWriter::new(&mut file);
let writer = if no_header { let writer = if no_header {
writer.include_header(false) writer.has_header(false)
} else { } else {
writer.include_header(true) writer.has_header(true)
}; };
let mut writer = match delimiter { let mut writer = match delimiter {
None => writer, None => writer,
Some(d) => { Some(d) => {
if d.item.len() != 1 { if d.item.len() != 1 {
return Err(ShellError::GenericError { return Err(ShellError::GenericError(
error: "Incorrect delimiter".into(), "Incorrect delimiter".into(),
msg: "Delimiter has to be one char".into(), "Delimiter has to be one char".into(),
span: Some(d.span), Some(d.span),
help: None, None,
inner: vec![], Vec::new(),
}); ));
} else { } else {
let delimiter = match d.item.chars().next() { let delimiter = match d.item.chars().next() {
Some(d) => d as u8, Some(d) => d as u8,
None => unreachable!(), None => unreachable!(),
}; };
writer.with_separator(delimiter) writer.with_delimiter(delimiter)
} }
} }
}; };
writer writer.finish(df.as_mut()).map_err(|e| {
.finish(df.as_mut()) ShellError::GenericError(
.map_err(|e| ShellError::GenericError { "Error writing to file".into(),
error: "Error writing to file".into(), e.to_string(),
msg: e.to_string(), Some(file_name.span),
span: Some(file_name.span), None,
help: None, Vec::new(),
inner: vec![], )
})?; })?;
let file_value = Value::string(format!("saved {:?}", &file_name.item), file_name.span); let file_value = Value::string(format!("saved {:?}", &file_name.item), file_name.span);

View File

@ -1,7 +1,10 @@
use crate::dataframe::values::{Column, NuDataFrame, NuSchema}; use super::super::values::{Column, NuDataFrame};
use nu_engine::command_prelude::*;
use polars::prelude::*; use nu_protocol::{
ast::Call,
engine::{Command, EngineState, Stack},
Category, Example, PipelineData, ShellError, Signature, Span, Type, Value,
};
#[derive(Clone)] #[derive(Clone)]
pub struct ToDataFrame; pub struct ToDataFrame;
@ -17,12 +20,6 @@ impl Command for ToDataFrame {
fn signature(&self) -> Signature { fn signature(&self) -> Signature {
Signature::build(self.name()) Signature::build(self.name())
.named(
"schema",
SyntaxShape::Record(vec![]),
r#"Polars Schema in format [{name: str}]. CSV, JSON, and JSONL files"#,
Some('s'),
)
.input_output_type(Type::Any, Type::Custom("dataframe".into())) .input_output_type(Type::Any, Type::Custom("dataframe".into()))
.category(Category::Custom("dataframe".into())) .category(Category::Custom("dataframe".into()))
} }
@ -33,8 +30,7 @@ impl Command for ToDataFrame {
description: "Takes a dictionary and creates a dataframe", description: "Takes a dictionary and creates a dataframe",
example: "[[a b];[1 2] [3 4]] | dfr into-df", example: "[[a b];[1 2] [3 4]] | dfr into-df",
result: Some( result: Some(
NuDataFrame::try_from_columns( NuDataFrame::try_from_columns(vec![
vec![
Column::new( Column::new(
"a".to_string(), "a".to_string(),
vec![Value::test_int(1), Value::test_int(3)], vec![Value::test_int(1), Value::test_int(3)],
@ -43,9 +39,7 @@ impl Command for ToDataFrame {
"b".to_string(), "b".to_string(),
vec![Value::test_int(2), Value::test_int(4)], vec![Value::test_int(2), Value::test_int(4)],
), ),
], ])
None,
)
.expect("simple df for test should not fail") .expect("simple df for test should not fail")
.into_value(Span::test_data()), .into_value(Span::test_data()),
), ),
@ -54,8 +48,7 @@ impl Command for ToDataFrame {
description: "Takes a list of tables and creates a dataframe", description: "Takes a list of tables and creates a dataframe",
example: "[[1 2 a] [3 4 b] [5 6 c]] | dfr into-df", example: "[[1 2 a] [3 4 b] [5 6 c]] | dfr into-df",
result: Some( result: Some(
NuDataFrame::try_from_columns( NuDataFrame::try_from_columns(vec![
vec![
Column::new( Column::new(
"0".to_string(), "0".to_string(),
vec![Value::test_int(1), Value::test_int(3), Value::test_int(5)], vec![Value::test_int(1), Value::test_int(3), Value::test_int(5)],
@ -72,9 +65,7 @@ impl Command for ToDataFrame {
Value::test_string("c"), Value::test_string("c"),
], ],
), ),
], ])
None,
)
.expect("simple df for test should not fail") .expect("simple df for test should not fail")
.into_value(Span::test_data()), .into_value(Span::test_data()),
), ),
@ -83,17 +74,14 @@ impl Command for ToDataFrame {
description: "Takes a list and creates a dataframe", description: "Takes a list and creates a dataframe",
example: "[a b c] | dfr into-df", example: "[a b c] | dfr into-df",
result: Some( result: Some(
NuDataFrame::try_from_columns( NuDataFrame::try_from_columns(vec![Column::new(
vec![Column::new(
"0".to_string(), "0".to_string(),
vec![ vec![
Value::test_string("a"), Value::test_string("a"),
Value::test_string("b"), Value::test_string("b"),
Value::test_string("c"), Value::test_string("c"),
], ],
)], )])
None,
)
.expect("simple df for test should not fail") .expect("simple df for test should not fail")
.into_value(Span::test_data()), .into_value(Span::test_data()),
), ),
@ -102,78 +90,30 @@ impl Command for ToDataFrame {
description: "Takes a list of booleans and creates a dataframe", description: "Takes a list of booleans and creates a dataframe",
example: "[true true false] | dfr into-df", example: "[true true false] | dfr into-df",
result: Some( result: Some(
NuDataFrame::try_from_columns( NuDataFrame::try_from_columns(vec![Column::new(
vec![Column::new(
"0".to_string(), "0".to_string(),
vec![ vec![
Value::test_bool(true), Value::test_bool(true),
Value::test_bool(true), Value::test_bool(true),
Value::test_bool(false), Value::test_bool(false),
], ],
)], )])
None,
)
.expect("simple df for test should not fail") .expect("simple df for test should not fail")
.into_value(Span::test_data()), .into_value(Span::test_data()),
), ),
}, },
Example {
description: "Convert to a dataframe and provide a schema",
example: "{a: 1, b: {a: [1 2 3]}, c: [a b c]}| dfr into-df -s {a: u8, b: {a: list<u64>}, c: list<str>}",
result: Some(
NuDataFrame::try_from_series(vec![
Series::new("a", &[1u8]),
{
let dtype = DataType::Struct(vec![Field::new("a", DataType::List(Box::new(DataType::UInt64)))]);
let vals = vec![AnyValue::StructOwned(
Box::new((vec![AnyValue::List(Series::new("a", &[1u64, 2, 3]))], vec![Field::new("a", DataType::String)]))); 1];
Series::from_any_values_and_dtype("b", &vals, &dtype, false)
.expect("Struct series should not fail")
},
{
let dtype = DataType::List(Box::new(DataType::String));
let vals = vec![AnyValue::List(Series::new("c", &["a", "b", "c"]))];
Series::from_any_values_and_dtype("c", &vals, &dtype, false)
.expect("List series should not fail")
}
], Span::test_data())
.expect("simple df for test should not fail")
.into_value(Span::test_data()),
),
},
Example {
description: "Convert to a dataframe and provide a schema that adds a new column",
example: r#"[[a b]; [1 "foo"] [2 "bar"]] | dfr into-df -s {a: u8, b:str, c:i64} | dfr fill-null 3"#,
result: Some(NuDataFrame::try_from_series(vec![
Series::new("a", [1u8, 2]),
Series::new("b", ["foo", "bar"]),
Series::new("c", [3i64, 3]),
], Span::test_data())
.expect("simple df for test should not fail")
.into_value(Span::test_data()),
),
}
] ]
} }
fn run( fn run(
&self, &self,
engine_state: &EngineState, _engine_state: &EngineState,
stack: &mut Stack, _stack: &mut Stack,
call: &Call, call: &Call,
input: PipelineData, input: PipelineData,
) -> Result<PipelineData, ShellError> { ) -> Result<PipelineData, ShellError> {
let maybe_schema = call NuDataFrame::try_from_iter(input.into_iter())
.get_flag(engine_state, stack, "schema")? .map(|df| PipelineData::Value(NuDataFrame::into_value(df, call.head), None))
.map(|schema| NuSchema::try_from(&schema))
.transpose()?;
let df = NuDataFrame::try_from_iter(input.into_iter(), maybe_schema.clone())?;
Ok(PipelineData::Value(
NuDataFrame::into_value(df, call.head),
None,
))
} }
} }

Some files were not shown because too many files have changed in this diff Show More