Compare commits
136 Commits
Author | SHA1 | Date | |
---|---|---|---|
6c07bc10e2 | |||
6365ba0286 | |||
545b1dcd94 | |||
fb89f2f48c | |||
f6ee21f76b | |||
d69a4db2e7 | |||
d4bfbb5eaf | |||
507f24d029 | |||
230c36f2fb | |||
219c719e98 | |||
50146bdef3 | |||
2042f7f769 | |||
0594f9e7aa | |||
3b8deb9ec7 | |||
727ff5f2d4 | |||
3d62528d8c | |||
a42d419b66 | |||
9602e82029 | |||
8e98df8b28 | |||
2daf8ec72d | |||
afcacda35f | |||
06cf3fa5ad | |||
9a482ce284 | |||
8018ae3286 | |||
ef322a24c5 | |||
a8db4f0b0e | |||
98a4280c41 | |||
0e1bfae13d | |||
6ff717c0ba | |||
d534a89867 | |||
5bc9246f0f | |||
1e89cc3578 | |||
06f5199570 | |||
9e5e9819d6 | |||
1f8ccd8e5e | |||
e9d8b19d4d | |||
7c63ce15d8 | |||
a3a9571dac | |||
2cc5952c37 | |||
aa88449f29 | |||
06199d731b | |||
0ba86d7eb8 | |||
6efd1bcb3f | |||
0d06b6259f | |||
8fdc272bcc | |||
0ea7a38c21 | |||
1999e0dcf3 | |||
ac30b3d108 | |||
2b1e05aad0 | |||
6c56829976 | |||
2c58beec13 | |||
9c779b071b | |||
1e94793df5 | |||
7d9a77f179 | |||
bb079608dd | |||
5fa42eeb8c | |||
3e09158afc | |||
7a78171b34 | |||
633ebc7e43 | |||
f0cb2f38df | |||
f26d3bf8d7 | |||
498672f5e5 | |||
038391519b | |||
8004e8e2a0 | |||
e192684612 | |||
5d40fc2726 | |||
a22d70718f | |||
24a49f1b0a | |||
04473a5593 | |||
d1e7884d19 | |||
2b96c93b8d | |||
fc41a0f96b | |||
8bd68416e3 | |||
2062e33c37 | |||
c6383874e9 | |||
d90b25c633 | |||
44bcfb3403 | |||
c047fd4778 | |||
16bd7b6d0d | |||
3cef94ba39 | |||
f818193b53 | |||
1aec4a343a | |||
852de79212 | |||
06f40405fe | |||
65bac77e8a | |||
32d1939a95 | |||
53e35670ea | |||
a92567489f | |||
2145feff5d | |||
0b95465ea1 | |||
ec804f4568 | |||
4717ac70fd | |||
9969fbfbb1 | |||
5f39267a80 | |||
94a9380e8b | |||
1d64863585 | |||
8218f72eea | |||
c0b99b7131 | |||
75c033e4d1 | |||
d88d057bf6 | |||
b00098ccc6 | |||
7e5e9c28dd | |||
8ffffe9bcc | |||
8030f7e9f0 | |||
e4959d2f9f | |||
f311da9623 | |||
14d80d54fe | |||
23b467061b | |||
8d8f25b210 | |||
7ee22603ac | |||
4052a99ff5 | |||
ccfa35289b | |||
54fc164e1c | |||
3a35bf7d4e | |||
a61d09222f | |||
07ac3c3aab | |||
061e9294b3 | |||
374757f286 | |||
ca75cd7c0a | |||
d08c072f19 | |||
9b99b2f6ac | |||
1cb449b2d1 | |||
6cc66c8afd | |||
08e495ea67 | |||
b0647f780d | |||
2dfd975940 | |||
fbdb125141 | |||
c2ea993f7e | |||
e14e60dd2c | |||
768ff47d28 | |||
78a1879e36 | |||
0b9c0fea9d | |||
02a3430ef0 | |||
6623ed9061 | |||
48cf103439 | |||
1bcb87c48d |
4
.github/pull_request_template.md
vendored
@ -7,5 +7,5 @@
|
|||||||
Make sure you've run and fixed any issues with these commands:
|
Make sure you've run and fixed any issues with these commands:
|
||||||
|
|
||||||
- [ ] `cargo fmt --all -- --check` to check standard code formatting (`cargo fmt --all` applies these changes)
|
- [ ] `cargo fmt --all -- --check` to check standard code formatting (`cargo fmt --all` applies these changes)
|
||||||
- [ ] `cargo clippy --all --all-features -- -D warnings -D clippy::unwrap_used -A clippy::needless_collect` to check that you're using the standard code style
|
- [ ] `cargo clippy --workspace --features=extra -- -D warnings -D clippy::unwrap_used -A clippy::needless_collect` to check that you're using the standard code style
|
||||||
- [ ] `cargo build; cargo test --all --all-features` to check that all the tests pass
|
- [ ] `cargo test --workspace --features=extra` to check that all the tests pass
|
||||||
|
96
.github/workflows/ci.yml
vendored
@ -7,26 +7,17 @@ on:
|
|||||||
name: continuous-integration
|
name: continuous-integration
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
build-clippy:
|
nu-fmt-clippy:
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: true
|
||||||
matrix:
|
matrix:
|
||||||
platform: [windows-latest, macos-latest, ubuntu-latest]
|
platform: [windows-latest, macos-latest, ubuntu-latest]
|
||||||
style: [all, default]
|
|
||||||
rust:
|
rust:
|
||||||
- stable
|
- stable
|
||||||
include:
|
|
||||||
- style: all
|
|
||||||
flags: "--all-features"
|
|
||||||
- style: default
|
|
||||||
flags: ""
|
|
||||||
exclude:
|
|
||||||
- platform: windows-latest
|
|
||||||
style: default
|
|
||||||
- platform: macos-latest
|
|
||||||
style: default
|
|
||||||
|
|
||||||
runs-on: ${{ matrix.platform }}
|
runs-on: ${{ matrix.platform }}
|
||||||
|
env:
|
||||||
|
NUSHELL_CARGO_TARGET: ci
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v2
|
||||||
@ -41,7 +32,7 @@ jobs:
|
|||||||
|
|
||||||
- uses: Swatinem/rust-cache@v1
|
- uses: Swatinem/rust-cache@v1
|
||||||
with:
|
with:
|
||||||
key: ${{ matrix.style }}v1 # increment this to bust the cache if needed
|
key: "v2" # increment this to bust the cache if needed
|
||||||
|
|
||||||
- name: Rustfmt
|
- name: Rustfmt
|
||||||
uses: actions-rs/cargo@v1
|
uses: actions-rs/cargo@v1
|
||||||
@ -49,29 +40,26 @@ jobs:
|
|||||||
command: fmt
|
command: fmt
|
||||||
args: --all -- --check
|
args: --all -- --check
|
||||||
|
|
||||||
- name: Build Nushell
|
|
||||||
uses: actions-rs/cargo@v1
|
|
||||||
with:
|
|
||||||
command: build
|
|
||||||
args: --workspace ${{ matrix.flags }}
|
|
||||||
|
|
||||||
- name: Clippy
|
- name: Clippy
|
||||||
uses: actions-rs/cargo@v1
|
uses: actions-rs/cargo@v1
|
||||||
with:
|
with:
|
||||||
command: clippy
|
command: clippy
|
||||||
args: --workspace ${{ matrix.flags }} -- -D warnings -D clippy::unwrap_used -A clippy::needless_collect
|
args: --features=extra --workspace --exclude nu_plugin_* -- -D warnings -D clippy::unwrap_used -A clippy::needless_collect
|
||||||
|
|
||||||
|
nu-tests:
|
||||||
|
env:
|
||||||
|
NUSHELL_CARGO_TARGET: ci
|
||||||
|
|
||||||
test:
|
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: true
|
||||||
matrix:
|
matrix:
|
||||||
platform: [windows-latest, macos-latest, ubuntu-latest]
|
platform: [windows-latest, macos-latest, ubuntu-latest]
|
||||||
style: [all, default]
|
style: [extra, default]
|
||||||
rust:
|
rust:
|
||||||
- stable
|
- stable
|
||||||
include:
|
include:
|
||||||
- style: all
|
- style: extra
|
||||||
flags: "--all-features"
|
flags: "--features=extra"
|
||||||
- style: default
|
- style: default
|
||||||
flags: ""
|
flags: ""
|
||||||
exclude:
|
exclude:
|
||||||
@ -91,29 +79,23 @@ jobs:
|
|||||||
profile: minimal
|
profile: minimal
|
||||||
toolchain: ${{ matrix.rust }}
|
toolchain: ${{ matrix.rust }}
|
||||||
override: true
|
override: true
|
||||||
components: rustfmt, clippy
|
|
||||||
|
|
||||||
- uses: Swatinem/rust-cache@v1
|
- uses: Swatinem/rust-cache@v1
|
||||||
with:
|
with:
|
||||||
key: ${{ matrix.style }}v1 # increment this to bust the cache if needed
|
key: ${{ matrix.style }}v3 # increment this to bust the cache if needed
|
||||||
|
|
||||||
- uses: taiki-e/install-action@nextest
|
|
||||||
|
|
||||||
- name: Tests
|
- name: Tests
|
||||||
uses: actions-rs/cargo@v1
|
|
||||||
with:
|
|
||||||
command: nextest
|
|
||||||
args: run --all ${{ matrix.flags }}
|
|
||||||
|
|
||||||
- name: Doctests
|
|
||||||
uses: actions-rs/cargo@v1
|
uses: actions-rs/cargo@v1
|
||||||
with:
|
with:
|
||||||
command: test
|
command: test
|
||||||
args: --workspace --doc ${{ matrix.flags }}
|
args: --workspace --profile ci --exclude nu_plugin_* ${{ matrix.flags }}
|
||||||
|
|
||||||
python-virtualenv:
|
python-virtualenv:
|
||||||
|
env:
|
||||||
|
NUSHELL_CARGO_TARGET: ci
|
||||||
|
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: true
|
||||||
matrix:
|
matrix:
|
||||||
platform: [ubuntu-latest, macos-latest, windows-latest]
|
platform: [ubuntu-latest, macos-latest, windows-latest]
|
||||||
rust:
|
rust:
|
||||||
@ -135,13 +117,13 @@ jobs:
|
|||||||
|
|
||||||
- uses: Swatinem/rust-cache@v1
|
- uses: Swatinem/rust-cache@v1
|
||||||
with:
|
with:
|
||||||
key: "1" # increment this to bust the cache if needed
|
key: "2" # increment this to bust the cache if needed
|
||||||
|
|
||||||
- name: Install Nushell
|
- name: Install Nushell
|
||||||
uses: actions-rs/cargo@v1
|
uses: actions-rs/cargo@v1
|
||||||
with:
|
with:
|
||||||
command: install
|
command: install
|
||||||
args: --path=. --no-default-features --debug
|
args: --path=. --profile ci --no-default-features
|
||||||
|
|
||||||
- name: Setup Python
|
- name: Setup Python
|
||||||
uses: actions/setup-python@v2
|
uses: actions/setup-python@v2
|
||||||
@ -161,9 +143,14 @@ jobs:
|
|||||||
run: cd virtualenv && tox -e ${{ matrix.py }} -- -k nushell
|
run: cd virtualenv && tox -e ${{ matrix.py }} -- -k nushell
|
||||||
shell: bash
|
shell: bash
|
||||||
|
|
||||||
|
# Build+test plugins on their own, without the rest of Nu. This helps with CI parallelization and
|
||||||
|
# also helps test that the plugins build without any feature unification shenanigans
|
||||||
plugins:
|
plugins:
|
||||||
|
env:
|
||||||
|
NUSHELL_CARGO_TARGET: ci
|
||||||
|
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: true
|
||||||
matrix:
|
matrix:
|
||||||
platform: [windows-latest, macos-latest, ubuntu-latest]
|
platform: [windows-latest, macos-latest, ubuntu-latest]
|
||||||
rust:
|
rust:
|
||||||
@ -181,29 +168,14 @@ jobs:
|
|||||||
toolchain: ${{ matrix.rust }}
|
toolchain: ${{ matrix.rust }}
|
||||||
override: true
|
override: true
|
||||||
|
|
||||||
# This job does not use rust-cache because 1) we have limited cache space, 2) even
|
- name: Clippy
|
||||||
# without caching, it's not the slowest job. Revisit if those facts change.
|
|
||||||
|
|
||||||
- name: Build nu_plugin_example
|
|
||||||
uses: actions-rs/cargo@v1
|
uses: actions-rs/cargo@v1
|
||||||
with:
|
with:
|
||||||
command: build
|
command: clippy
|
||||||
args: --package nu_plugin_example
|
args: --package nu_plugin_* ${{ matrix.flags }} -- -D warnings -D clippy::unwrap_used -A clippy::needless_collect
|
||||||
|
|
||||||
- name: Build nu_plugin_gstat
|
- name: Tests
|
||||||
uses: actions-rs/cargo@v1
|
uses: actions-rs/cargo@v1
|
||||||
with:
|
with:
|
||||||
command: build
|
command: test
|
||||||
args: --package nu_plugin_gstat
|
args: --profile ci --package nu_plugin_*
|
||||||
|
|
||||||
- name: Build nu_plugin_inc
|
|
||||||
uses: actions-rs/cargo@v1
|
|
||||||
with:
|
|
||||||
command: build
|
|
||||||
args: --package nu_plugin_inc
|
|
||||||
|
|
||||||
- name: Build nu_plugin_query
|
|
||||||
uses: actions-rs/cargo@v1
|
|
||||||
with:
|
|
||||||
command: build
|
|
||||||
args: --package nu_plugin_query
|
|
||||||
|
155
.github/workflows/release-pkg.nu
vendored
Executable file
@ -0,0 +1,155 @@
|
|||||||
|
#!/usr/bin/env nu
|
||||||
|
|
||||||
|
# Created: 2022/05/26 19:05:20
|
||||||
|
# Description:
|
||||||
|
# A script to do the github release task, need nushell to be installed.
|
||||||
|
# REF:
|
||||||
|
# 1. https://github.com/volks73/cargo-wix
|
||||||
|
|
||||||
|
# The main binary file to be released
|
||||||
|
let bin = 'nu'
|
||||||
|
let os = $env.OS
|
||||||
|
let target = $env.TARGET
|
||||||
|
# Repo source dir like `/home/runner/work/nushell/nushell`
|
||||||
|
let src = $env.GITHUB_WORKSPACE
|
||||||
|
let flags = $env.TARGET_RUSTFLAGS
|
||||||
|
let dist = $'($env.GITHUB_WORKSPACE)/output'
|
||||||
|
let version = (open Cargo.toml | get package.version)
|
||||||
|
|
||||||
|
# $env
|
||||||
|
|
||||||
|
$'(char nl)Packaging ($bin) v($version) for ($target) in ($src)...'; hr-line -b
|
||||||
|
if not ('Cargo.lock' | path exists) { cargo generate-lockfile }
|
||||||
|
|
||||||
|
$'Start building ($bin)...'; hr-line
|
||||||
|
|
||||||
|
# ----------------------------------------------------------------------------
|
||||||
|
# Build for Ubuntu and macOS
|
||||||
|
# ----------------------------------------------------------------------------
|
||||||
|
if $os in ['ubuntu-latest', 'macos-latest'] {
|
||||||
|
if $os == 'ubuntu-latest' {
|
||||||
|
sudo apt-get install libxcb-composite0-dev -y
|
||||||
|
}
|
||||||
|
if $target == 'aarch64-unknown-linux-gnu' {
|
||||||
|
sudo apt-get install gcc-aarch64-linux-gnu -y
|
||||||
|
let-env CARGO_TARGET_AARCH64_UNKNOWN_LINUX_GNU_LINKER = 'aarch64-linux-gnu-gcc'
|
||||||
|
cargo-build-nu $flags
|
||||||
|
} else if $target == 'armv7-unknown-linux-gnueabihf' {
|
||||||
|
sudo apt-get install pkg-config gcc-arm-linux-gnueabihf -y
|
||||||
|
let-env CARGO_TARGET_ARMV7_UNKNOWN_LINUX_GNUEABIHF_LINKER = 'arm-linux-gnueabihf-gcc'
|
||||||
|
cargo-build-nu $flags
|
||||||
|
} else {
|
||||||
|
# musl-tools to fix 'Failed to find tool. Is `musl-gcc` installed?'
|
||||||
|
# Actually just for x86_64-unknown-linux-musl target
|
||||||
|
sudo apt install musl-tools -y
|
||||||
|
cargo-build-nu $flags
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
# ----------------------------------------------------------------------------
|
||||||
|
# Build for Windows without static-link-openssl feature
|
||||||
|
# ----------------------------------------------------------------------------
|
||||||
|
if $os in ['windows-latest'] {
|
||||||
|
if ($flags | str trim | empty?) {
|
||||||
|
cargo build --release --all --target $target --features=extra
|
||||||
|
} else {
|
||||||
|
cargo build --release --all --target $target --features=extra $flags
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
# ----------------------------------------------------------------------------
|
||||||
|
# Prepare for the release archive
|
||||||
|
# ----------------------------------------------------------------------------
|
||||||
|
let suffix = if $os == 'windows-latest' { '.exe' }
|
||||||
|
# nu, nu_plugin_* were all included
|
||||||
|
let executable = $'target/($target)/release/($bin)*($suffix)'
|
||||||
|
$'Current executable file: ($executable)'
|
||||||
|
|
||||||
|
cd $src; mkdir $dist;
|
||||||
|
rm -rf $'target/($target)/release/*.d' $'target/($target)/release/nu_pretty_hex*'
|
||||||
|
$'(char nl)All executable files:'; hr-line
|
||||||
|
ls -f $executable
|
||||||
|
|
||||||
|
$'(char nl)Copying release files...'; hr-line
|
||||||
|
cp -v README.release.txt $'($dist)/README.txt'
|
||||||
|
[LICENSE $executable] | each {|it| cp -rv $it $dist } | flatten
|
||||||
|
|
||||||
|
$'(char nl)Check binary release version detail:'; hr-line
|
||||||
|
let ver = if $os == 'windows-latest' {
|
||||||
|
(do -i { ./output/nu.exe -c 'version' }) | str collect
|
||||||
|
} else {
|
||||||
|
(do -i { ./output/nu -c 'version' }) | str collect
|
||||||
|
}
|
||||||
|
if ($ver | str trim | empty?) {
|
||||||
|
$'(ansi r)Incompatible nu binary...(ansi reset)'
|
||||||
|
} else { $ver }
|
||||||
|
|
||||||
|
# ----------------------------------------------------------------------------
|
||||||
|
# Create a release archive and send it to output for the following steps
|
||||||
|
# ----------------------------------------------------------------------------
|
||||||
|
cd $dist; $'(char nl)Creating release archive...'; hr-line
|
||||||
|
if $os in ['ubuntu-latest', 'macos-latest'] {
|
||||||
|
|
||||||
|
$'(char nl)(ansi g)Archive contents:(ansi reset)'; hr-line; ls
|
||||||
|
|
||||||
|
let archive = $'($dist)/($bin)-($version)-($target).tar.gz'
|
||||||
|
tar czf $archive *
|
||||||
|
print $'archive: ---> ($archive)'; ls $archive
|
||||||
|
echo $'::set-output name=archive::($archive)'
|
||||||
|
|
||||||
|
} else if $os == 'windows-latest' {
|
||||||
|
|
||||||
|
let releaseStem = $'($bin)-($version)-($target)'
|
||||||
|
|
||||||
|
$'(char nl)Download less related stuffs...'; hr-line
|
||||||
|
curl https://github.com/jftuga/less-Windows/releases/download/less-v590/less.exe -o $'($dist)\less.exe'
|
||||||
|
curl https://raw.githubusercontent.com/jftuga/less-Windows/master/LICENSE -o $'($dist)\LICENSE-for-less.txt'
|
||||||
|
|
||||||
|
# Create Windows msi release package
|
||||||
|
if (get-env _EXTRA_) == 'msi' {
|
||||||
|
|
||||||
|
let wixRelease = $'($src)/target/wix/($releaseStem).msi'
|
||||||
|
$'(char nl)Start creating Windows msi package...'
|
||||||
|
cd $src; hr-line
|
||||||
|
# Wix need the binaries be stored in target/release/
|
||||||
|
cp -r $'($dist)/*' target/release/
|
||||||
|
cargo install cargo-wix --version 0.3.2
|
||||||
|
cargo wix --no-build --nocapture --package nu --output $wixRelease
|
||||||
|
echo $'::set-output name=archive::($wixRelease)'
|
||||||
|
|
||||||
|
} else {
|
||||||
|
|
||||||
|
$'(char nl)(ansi g)Archive contents:(ansi reset)'; hr-line; ls
|
||||||
|
let archive = $'($dist)/($releaseStem).zip'
|
||||||
|
7z a $archive *
|
||||||
|
print $'archive: ---> ($archive)';
|
||||||
|
let pkg = (ls -f $archive | get name)
|
||||||
|
if not ($pkg | empty?) {
|
||||||
|
echo $'::set-output name=archive::($pkg | get 0)'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
def 'cargo-build-nu' [ options: string ] {
|
||||||
|
if ($options | str trim | empty?) {
|
||||||
|
cargo build --release --all --target $target --features=extra,static-link-openssl
|
||||||
|
} else {
|
||||||
|
cargo build --release --all --target $target --features=extra,static-link-openssl $options
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
# Print a horizontal line marker
|
||||||
|
def 'hr-line' [
|
||||||
|
--blank-line(-b): bool
|
||||||
|
] {
|
||||||
|
print $'(ansi g)---------------------------------------------------------------------------->(ansi reset)'
|
||||||
|
if $blank-line { char nl }
|
||||||
|
}
|
||||||
|
|
||||||
|
# Get the specified env key's value or ''
|
||||||
|
def 'get-env' [
|
||||||
|
key: string # The key to get it's env value
|
||||||
|
default: string = '' # The default value for an empty env
|
||||||
|
] {
|
||||||
|
$env | get -i $key | default $default
|
||||||
|
}
|
517
.github/workflows/release.yml
vendored
@ -1,3 +1,7 @@
|
|||||||
|
#
|
||||||
|
# REF:
|
||||||
|
# 1. https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstrategymatrixinclude
|
||||||
|
#
|
||||||
name: Create Release Draft
|
name: Create Release Draft
|
||||||
|
|
||||||
on:
|
on:
|
||||||
@ -5,434 +9,89 @@ on:
|
|||||||
push:
|
push:
|
||||||
tags: ["[0-9]+.[0-9]+.[0-9]+*"]
|
tags: ["[0-9]+.[0-9]+.[0-9]+*"]
|
||||||
|
|
||||||
|
defaults:
|
||||||
|
run:
|
||||||
|
shell: bash
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
linux:
|
all:
|
||||||
name: Build Linux
|
name: All
|
||||||
runs-on: ubuntu-latest
|
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
target:
|
||||||
|
- aarch64-apple-darwin
|
||||||
|
- x86_64-apple-darwin
|
||||||
|
- x86_64-pc-windows-msvc
|
||||||
|
- x86_64-unknown-linux-gnu
|
||||||
|
- x86_64-unknown-linux-musl
|
||||||
|
- aarch64-unknown-linux-gnu
|
||||||
|
- armv7-unknown-linux-gnueabihf
|
||||||
|
extra: ['bin']
|
||||||
|
include:
|
||||||
|
- target: aarch64-apple-darwin
|
||||||
|
os: macos-latest
|
||||||
|
target_rustflags: ''
|
||||||
|
- target: x86_64-apple-darwin
|
||||||
|
os: macos-latest
|
||||||
|
target_rustflags: ''
|
||||||
|
- target: x86_64-pc-windows-msvc
|
||||||
|
extra: 'bin'
|
||||||
|
os: windows-latest
|
||||||
|
target_rustflags: ''
|
||||||
|
- target: x86_64-pc-windows-msvc
|
||||||
|
extra: msi
|
||||||
|
os: windows-latest
|
||||||
|
target_rustflags: ''
|
||||||
|
- target: x86_64-unknown-linux-gnu
|
||||||
|
os: ubuntu-latest
|
||||||
|
target_rustflags: ''
|
||||||
|
- target: x86_64-unknown-linux-musl
|
||||||
|
os: ubuntu-latest
|
||||||
|
target_rustflags: ''
|
||||||
|
- target: aarch64-unknown-linux-gnu
|
||||||
|
os: ubuntu-latest
|
||||||
|
target_rustflags: ''
|
||||||
|
- target: armv7-unknown-linux-gnueabihf
|
||||||
|
os: ubuntu-latest
|
||||||
|
target_rustflags: ''
|
||||||
|
|
||||||
|
runs-on: ${{matrix.os}}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code
|
- uses: actions/checkout@v3.0.2
|
||||||
uses: actions/checkout@v2
|
|
||||||
|
- name: Install Rust Toolchain Components
|
||||||
- name: Install libxcb
|
uses: actions-rs/toolchain@v1.0.6
|
||||||
run: sudo apt-get install libxcb-composite0-dev
|
with:
|
||||||
|
override: true
|
||||||
- name: Set up cargo
|
profile: minimal
|
||||||
uses: actions-rs/toolchain@v1
|
toolchain: stable
|
||||||
with:
|
target: ${{ matrix.target }}
|
||||||
profile: minimal
|
|
||||||
toolchain: stable
|
- name: Setup Nushell
|
||||||
override: true
|
uses: hustcer/setup-nu@v1
|
||||||
|
with:
|
||||||
- name: Build
|
version: 0.63.0
|
||||||
uses: actions-rs/cargo@v1
|
env:
|
||||||
with:
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
command: build
|
|
||||||
args: --release --all --features=extra,static-link-openssl
|
- name: Release Nu Binary
|
||||||
|
id: nu
|
||||||
# - name: Strip binaries (nu)
|
run: nu .github/workflows/release-pkg.nu
|
||||||
# run: strip target/release/nu
|
env:
|
||||||
|
OS: ${{ matrix.os }}
|
||||||
# - name: Strip binaries (nu_plugin_inc)
|
REF: ${{ github.ref }}
|
||||||
# run: strip target/release/nu_plugin_inc
|
TARGET: ${{ matrix.target }}
|
||||||
|
_EXTRA_: ${{ matrix.extra }}
|
||||||
# - name: Strip binaries (nu_plugin_match)
|
TARGET_RUSTFLAGS: ${{ matrix.target_rustflags }}
|
||||||
# run: strip target/release/nu_plugin_match
|
|
||||||
|
# REF: https://github.com/marketplace/actions/gh-release
|
||||||
# - name: Strip binaries (nu_plugin_textview)
|
- name: Publish Archive
|
||||||
# run: strip target/release/nu_plugin_textview
|
uses: softprops/action-gh-release@v1
|
||||||
|
if: ${{ startsWith(github.ref, 'refs/tags/') }}
|
||||||
# - name: Strip binaries (nu_plugin_binaryview)
|
with:
|
||||||
# run: strip target/release/nu_plugin_binaryview
|
draft: true
|
||||||
|
files: ${{ steps.nu.outputs.archive }}
|
||||||
# - name: Strip binaries (nu_plugin_chart_bar)
|
env:
|
||||||
# run: strip target/release/nu_plugin_chart_bar
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
# - name: Strip binaries (nu_plugin_chart_line)
|
|
||||||
# run: strip target/release/nu_plugin_chart_line
|
|
||||||
|
|
||||||
# - name: Strip binaries (nu_plugin_from_bson)
|
|
||||||
# run: strip target/release/nu_plugin_from_bson
|
|
||||||
|
|
||||||
# - name: Strip binaries (nu_plugin_from_sqlite)
|
|
||||||
# run: strip target/release/nu_plugin_from_sqlite
|
|
||||||
|
|
||||||
# - name: Strip binaries (nu_plugin_from_mp4)
|
|
||||||
# run: strip target/release/nu_plugin_from_mp4
|
|
||||||
|
|
||||||
# - name: Strip binaries (nu_plugin_query_json)
|
|
||||||
# run: strip target/release/nu_plugin_query_json
|
|
||||||
|
|
||||||
# - name: Strip binaries (nu_plugin_s3)
|
|
||||||
# run: strip target/release/nu_plugin_s3
|
|
||||||
|
|
||||||
# - name: Strip binaries (nu_plugin_selector)
|
|
||||||
# run: strip target/release/nu_plugin_selector
|
|
||||||
|
|
||||||
# - name: Strip binaries (nu_plugin_start)
|
|
||||||
# run: strip target/release/nu_plugin_start
|
|
||||||
|
|
||||||
# - name: Strip binaries (nu_plugin_to_bson)
|
|
||||||
# run: strip target/release/nu_plugin_to_bson
|
|
||||||
|
|
||||||
# - name: Strip binaries (nu_plugin_to_sqlite)
|
|
||||||
# run: strip target/release/nu_plugin_to_sqlite
|
|
||||||
|
|
||||||
# - name: Strip binaries (nu_plugin_tree)
|
|
||||||
# run: strip target/release/nu_plugin_tree
|
|
||||||
|
|
||||||
# - name: Strip binaries (nu_plugin_xpath)
|
|
||||||
# run: strip target/release/nu_plugin_xpath
|
|
||||||
|
|
||||||
- name: Create output directory
|
|
||||||
run: mkdir output
|
|
||||||
|
|
||||||
- name: Copy files to output
|
|
||||||
run: |
|
|
||||||
cp target/release/nu target/release/nu_plugin_* output/
|
|
||||||
cp README.release.txt output/README.txt
|
|
||||||
cp LICENSE output/LICENSE
|
|
||||||
rm output/*.d
|
|
||||||
|
|
||||||
- name: Upload artifact
|
|
||||||
uses: actions/upload-artifact@v2
|
|
||||||
with:
|
|
||||||
name: linux
|
|
||||||
path: output/*
|
|
||||||
|
|
||||||
macos:
|
|
||||||
name: Build macOS
|
|
||||||
runs-on: macos-latest
|
|
||||||
steps:
|
|
||||||
- name: Check out code
|
|
||||||
uses: actions/checkout@v2
|
|
||||||
|
|
||||||
- name: Set up cargo
|
|
||||||
uses: actions-rs/toolchain@v1
|
|
||||||
with:
|
|
||||||
profile: minimal
|
|
||||||
toolchain: stable
|
|
||||||
override: true
|
|
||||||
|
|
||||||
- name: Build
|
|
||||||
uses: actions-rs/cargo@v1
|
|
||||||
with:
|
|
||||||
command: build
|
|
||||||
args: --release --all --features=extra,static-link-openssl
|
|
||||||
|
|
||||||
# - name: Strip binaries (nu)
|
|
||||||
# run: strip target/release/nu
|
|
||||||
|
|
||||||
# - name: Strip binaries (nu_plugin_inc)
|
|
||||||
# run: strip target/release/nu_plugin_inc
|
|
||||||
|
|
||||||
# - name: Strip binaries (nu_plugin_match)
|
|
||||||
# run: strip target/release/nu_plugin_match
|
|
||||||
|
|
||||||
# - name: Strip binaries (nu_plugin_textview)
|
|
||||||
# run: strip target/release/nu_plugin_textview
|
|
||||||
|
|
||||||
# - name: Strip binaries (nu_plugin_binaryview)
|
|
||||||
# run: strip target/release/nu_plugin_binaryview
|
|
||||||
|
|
||||||
# - name: Strip binaries (nu_plugin_chart_bar)
|
|
||||||
# run: strip target/release/nu_plugin_chart_bar
|
|
||||||
|
|
||||||
# - name: Strip binaries (nu_plugin_chart_line)
|
|
||||||
# run: strip target/release/nu_plugin_chart_line
|
|
||||||
|
|
||||||
# - name: Strip binaries (nu_plugin_from_bson)
|
|
||||||
# run: strip target/release/nu_plugin_from_bson
|
|
||||||
|
|
||||||
# - name: Strip binaries (nu_plugin_from_sqlite)
|
|
||||||
# run: strip target/release/nu_plugin_from_sqlite
|
|
||||||
|
|
||||||
# - name: Strip binaries (nu_plugin_from_mp4)
|
|
||||||
# run: strip target/release/nu_plugin_from_mp4
|
|
||||||
|
|
||||||
# - name: Strip binaries (nu_plugin_query_json)
|
|
||||||
# run: strip target/release/nu_plugin_query_json
|
|
||||||
|
|
||||||
# - name: Strip binaries (nu_plugin_s3)
|
|
||||||
# run: strip target/release/nu_plugin_s3
|
|
||||||
|
|
||||||
# - name: Strip binaries (nu_plugin_selector)
|
|
||||||
# run: strip target/release/nu_plugin_selector
|
|
||||||
|
|
||||||
# - name: Strip binaries (nu_plugin_start)
|
|
||||||
# run: strip target/release/nu_plugin_start
|
|
||||||
|
|
||||||
# - name: Strip binaries (nu_plugin_to_bson)
|
|
||||||
# run: strip target/release/nu_plugin_to_bson
|
|
||||||
|
|
||||||
# - name: Strip binaries (nu_plugin_to_sqlite)
|
|
||||||
# run: strip target/release/nu_plugin_to_sqlite
|
|
||||||
|
|
||||||
# - name: Strip binaries (nu_plugin_tree)
|
|
||||||
# run: strip target/release/nu_plugin_tree
|
|
||||||
|
|
||||||
# - name: Strip binaries (nu_plugin_xpath)
|
|
||||||
# run: strip target/release/nu_plugin_xpath
|
|
||||||
|
|
||||||
- name: Create output directory
|
|
||||||
run: mkdir output
|
|
||||||
|
|
||||||
- name: Copy files to output
|
|
||||||
run: |
|
|
||||||
cp target/release/nu target/release/nu_plugin_* output/
|
|
||||||
cp README.release.txt output/README.txt
|
|
||||||
cp LICENSE output/LICENSE
|
|
||||||
rm output/*.d
|
|
||||||
|
|
||||||
- name: Upload artifact
|
|
||||||
uses: actions/upload-artifact@v2
|
|
||||||
with:
|
|
||||||
name: macos
|
|
||||||
path: output/*
|
|
||||||
|
|
||||||
windows:
|
|
||||||
name: Build Windows
|
|
||||||
runs-on: windows-latest
|
|
||||||
steps:
|
|
||||||
- name: Check out code
|
|
||||||
uses: actions/checkout@v2
|
|
||||||
|
|
||||||
- name: Set up cargo
|
|
||||||
uses: actions-rs/toolchain@v1
|
|
||||||
with:
|
|
||||||
profile: minimal
|
|
||||||
toolchain: stable
|
|
||||||
override: true
|
|
||||||
|
|
||||||
- name: Add cargo-wix subcommand
|
|
||||||
uses: actions-rs/cargo@v1
|
|
||||||
with:
|
|
||||||
command: install
|
|
||||||
args: cargo-wix --version 0.3.1
|
|
||||||
|
|
||||||
- name: Build
|
|
||||||
uses: actions-rs/cargo@v1
|
|
||||||
with:
|
|
||||||
command: build
|
|
||||||
args: --release --all --features=extra,static-link-openssl
|
|
||||||
|
|
||||||
# - name: Strip binaries (nu.exe)
|
|
||||||
# run: strip target/release/nu.exe
|
|
||||||
|
|
||||||
# - name: Strip binaries (nu_plugin_inc.exe)
|
|
||||||
# run: strip target/release/nu_plugin_inc.exe
|
|
||||||
|
|
||||||
# - name: Strip binaries (nu_plugin_match.exe)
|
|
||||||
# run: strip target/release/nu_plugin_match.exe
|
|
||||||
|
|
||||||
# - name: Strip binaries (nu_plugin_textview.exe)
|
|
||||||
# run: strip target/release/nu_plugin_textview.exe
|
|
||||||
|
|
||||||
# - name: Strip binaries (nu_plugin_binaryview.exe)
|
|
||||||
# run: strip target/release/nu_plugin_binaryview.exe
|
|
||||||
|
|
||||||
# - name: Strip binaries (nu_plugin_chart_bar.exe)
|
|
||||||
# run: strip target/release/nu_plugin_chart_bar.exe
|
|
||||||
|
|
||||||
# - name: Strip binaries (nu_plugin_chart_line.exe)
|
|
||||||
# run: strip target/release/nu_plugin_chart_line.exe
|
|
||||||
|
|
||||||
# - name: Strip binaries (nu_plugin_from_bson.exe)
|
|
||||||
# run: strip target/release/nu_plugin_from_bson.exe
|
|
||||||
|
|
||||||
# - name: Strip binaries (nu_plugin_from_sqlite.exe)
|
|
||||||
# run: strip target/release/nu_plugin_from_sqlite.exe
|
|
||||||
|
|
||||||
# - name: Strip binaries (nu_plugin_from_mp4.exe)
|
|
||||||
# run: strip target/release/nu_plugin_from_mp4.exe
|
|
||||||
|
|
||||||
# - name: Strip binaries (nu_plugin_query_json.exe)
|
|
||||||
# run: strip target/release/nu_plugin_query_json.exe
|
|
||||||
|
|
||||||
# - name: Strip binaries (nu_plugin_s3.exe)
|
|
||||||
# run: strip target/release/nu_plugin_s3.exe
|
|
||||||
|
|
||||||
# - name: Strip binaries (nu_plugin_selector.exe)
|
|
||||||
# run: strip target/release/nu_plugin_selector.exe
|
|
||||||
|
|
||||||
# - name: Strip binaries (nu_plugin_start.exe)
|
|
||||||
# run: strip target/release/nu_plugin_start.exe
|
|
||||||
|
|
||||||
# - name: Strip binaries (nu_plugin_to_bson.exe)
|
|
||||||
# run: strip target/release/nu_plugin_to_bson.exe
|
|
||||||
|
|
||||||
# - name: Strip binaries (nu_plugin_to_sqlite.exe)
|
|
||||||
# run: strip target/release/nu_plugin_to_sqlite.exe
|
|
||||||
|
|
||||||
# - name: Strip binaries (nu_plugin_tree.exe)
|
|
||||||
# run: strip target/release/nu_plugin_tree.exe
|
|
||||||
|
|
||||||
# - name: Strip binaries (nu_plugin_xpath.exe)
|
|
||||||
# run: strip target/release/nu_plugin_xpath.exe
|
|
||||||
|
|
||||||
- name: Create output directory
|
|
||||||
run: mkdir output
|
|
||||||
|
|
||||||
- name: Download Less Binary
|
|
||||||
run: Invoke-WebRequest -Uri "https://github.com/jftuga/less-Windows/releases/download/less-v562.0/less.exe" -OutFile "target\release\less.exe"
|
|
||||||
|
|
||||||
- name: Download Less License
|
|
||||||
run: Invoke-WebRequest -Uri "https://raw.githubusercontent.com/jftuga/less-Windows/master/LICENSE" -OutFile "target\release\LICENSE-for-less.txt"
|
|
||||||
|
|
||||||
- name: Copy files to output
|
|
||||||
run: |
|
|
||||||
cp target\release\nu.exe output\
|
|
||||||
cp LICENSE output\
|
|
||||||
cp target\release\LICENSE-for-less.txt output\
|
|
||||||
cp target\release\nu_plugin_*.exe output\
|
|
||||||
cp README.release.txt output\README.txt
|
|
||||||
cp target\release\less.exe output\
|
|
||||||
# Note: If the version of `less.exe` needs to be changed, update this URL
|
|
||||||
# Similarly, if `less.exe` is checked into the repo, copy from the local path here
|
|
||||||
# moved this stuff down to create wix after we download less
|
|
||||||
|
|
||||||
- name: Create msi with wix
|
|
||||||
uses: actions-rs/cargo@v1
|
|
||||||
with:
|
|
||||||
command: wix
|
|
||||||
args: --no-build --nocapture --output target\wix\nushell-windows.msi
|
|
||||||
|
|
||||||
- name: Upload installer
|
|
||||||
uses: actions/upload-artifact@v2
|
|
||||||
with:
|
|
||||||
name: windows-installer
|
|
||||||
path: target\wix\nushell-windows.msi
|
|
||||||
|
|
||||||
- name: Upload zip
|
|
||||||
uses: actions/upload-artifact@v2
|
|
||||||
with:
|
|
||||||
name: windows-zip
|
|
||||||
path: output\*
|
|
||||||
|
|
||||||
release:
|
|
||||||
name: Publish Release
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
needs:
|
|
||||||
- linux
|
|
||||||
- macos
|
|
||||||
- windows
|
|
||||||
steps:
|
|
||||||
- name: Check out code
|
|
||||||
uses: actions/checkout@v2
|
|
||||||
|
|
||||||
- name: Determine Release Info
|
|
||||||
id: info
|
|
||||||
env:
|
|
||||||
GITHUB_REF: ${{ github.ref }}
|
|
||||||
run: |
|
|
||||||
VERSION=${GITHUB_REF##*/}
|
|
||||||
MAJOR=${VERSION%%.*}
|
|
||||||
MINOR=${VERSION%.*}
|
|
||||||
MINOR=${MINOR#*.}
|
|
||||||
PATCH=${VERSION##*.}
|
|
||||||
echo "::set-output name=version::${VERSION}"
|
|
||||||
echo "::set-output name=linuxdir::nu_${MAJOR}_${MINOR}_${PATCH}_linux"
|
|
||||||
echo "::set-output name=macosdir::nu_${MAJOR}_${MINOR}_${PATCH}_macOS"
|
|
||||||
echo "::set-output name=windowsdir::nu_${MAJOR}_${MINOR}_${PATCH}_windows"
|
|
||||||
echo "::set-output name=innerdir::nushell-${VERSION}"
|
|
||||||
|
|
||||||
- name: Create Release Draft
|
|
||||||
id: create_release
|
|
||||||
uses: actions/create-release@v1
|
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
with:
|
|
||||||
tag_name: ${{ github.ref }}
|
|
||||||
release_name: ${{ steps.info.outputs.version }} Release
|
|
||||||
draft: true
|
|
||||||
|
|
||||||
- name: Create Linux Directory
|
|
||||||
run: mkdir -p ${{ steps.info.outputs.linuxdir }}/${{ steps.info.outputs.innerdir }}
|
|
||||||
|
|
||||||
- name: Download Linux Artifacts
|
|
||||||
uses: actions/download-artifact@v2
|
|
||||||
with:
|
|
||||||
name: linux
|
|
||||||
path: ${{ steps.info.outputs.linuxdir }}/${{ steps.info.outputs.innerdir }}
|
|
||||||
|
|
||||||
- name: Restore Linux File Modes
|
|
||||||
run: |
|
|
||||||
chmod 755 ${{ steps.info.outputs.linuxdir }}/${{ steps.info.outputs.innerdir }}/nu*
|
|
||||||
|
|
||||||
- name: Create Linux tarball
|
|
||||||
run: tar -zcvf ${{ steps.info.outputs.linuxdir }}.tar.gz ${{ steps.info.outputs.linuxdir }}
|
|
||||||
|
|
||||||
- name: Upload Linux Artifact
|
|
||||||
uses: actions/upload-release-asset@v1
|
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
with:
|
|
||||||
upload_url: ${{ steps.create_release.outputs.upload_url }}
|
|
||||||
asset_path: ./${{ steps.info.outputs.linuxdir }}.tar.gz
|
|
||||||
asset_name: ${{ steps.info.outputs.linuxdir }}.tar.gz
|
|
||||||
asset_content_type: application/gzip
|
|
||||||
|
|
||||||
- name: Create macOS Directory
|
|
||||||
run: mkdir -p ${{ steps.info.outputs.macosdir }}/${{ steps.info.outputs.innerdir }}
|
|
||||||
|
|
||||||
- name: Download macOS Artifacts
|
|
||||||
uses: actions/download-artifact@v2
|
|
||||||
with:
|
|
||||||
name: macos
|
|
||||||
path: ${{ steps.info.outputs.macosdir }}/${{ steps.info.outputs.innerdir }}
|
|
||||||
|
|
||||||
- name: Restore macOS File Modes
|
|
||||||
run: chmod 755 ${{ steps.info.outputs.macosdir }}/${{ steps.info.outputs.innerdir }}/nu*
|
|
||||||
|
|
||||||
- name: Create macOS Archive
|
|
||||||
run: zip -r ${{ steps.info.outputs.macosdir }}.zip ${{ steps.info.outputs.macosdir }}
|
|
||||||
|
|
||||||
- name: Upload macOS Artifact
|
|
||||||
uses: actions/upload-release-asset@v1
|
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
with:
|
|
||||||
upload_url: ${{ steps.create_release.outputs.upload_url }}
|
|
||||||
asset_path: ./${{ steps.info.outputs.macosdir }}.zip
|
|
||||||
asset_name: ${{ steps.info.outputs.macosdir }}.zip
|
|
||||||
asset_content_type: application/zip
|
|
||||||
|
|
||||||
- name: Create Windows Directory
|
|
||||||
run: mkdir -p ${{ steps.info.outputs.windowsdir }}/${{ steps.info.outputs.innerdir }}
|
|
||||||
|
|
||||||
- name: Download Windows zip
|
|
||||||
uses: actions/download-artifact@v2
|
|
||||||
with:
|
|
||||||
name: windows-zip
|
|
||||||
path: ${{ steps.info.outputs.windowsdir }}/${{ steps.info.outputs.innerdir }}
|
|
||||||
|
|
||||||
- name: Show Windows Artifacts
|
|
||||||
run: ls -la ${{ steps.info.outputs.windowsdir }}/${{ steps.info.outputs.innerdir }}
|
|
||||||
|
|
||||||
- name: Create macOS Archive
|
|
||||||
run: zip -r ${{ steps.info.outputs.windowsdir }}.zip ${{ steps.info.outputs.windowsdir }}
|
|
||||||
|
|
||||||
- name: Upload Windows zip
|
|
||||||
uses: actions/upload-release-asset@v1
|
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
with:
|
|
||||||
upload_url: ${{ steps.create_release.outputs.upload_url }}
|
|
||||||
asset_path: ./${{ steps.info.outputs.windowsdir }}.zip
|
|
||||||
asset_name: ${{ steps.info.outputs.windowsdir }}.zip
|
|
||||||
asset_content_type: application/zip
|
|
||||||
|
|
||||||
- name: Download Windows installer
|
|
||||||
uses: actions/download-artifact@v2
|
|
||||||
with:
|
|
||||||
name: windows-installer
|
|
||||||
path: ./
|
|
||||||
|
|
||||||
- name: Upload Windows installer
|
|
||||||
uses: actions/upload-release-asset@v1
|
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
with:
|
|
||||||
upload_url: ${{ steps.create_release.outputs.upload_url }}
|
|
||||||
asset_path: ./nushell-windows.msi
|
|
||||||
asset_name: ${{ steps.info.outputs.windowsdir }}.msi
|
|
||||||
asset_content_type: application/x-msi
|
|
||||||
|
5
.gitignore
vendored
@ -23,4 +23,9 @@ debian/nu/
|
|||||||
.vscode/*
|
.vscode/*
|
||||||
|
|
||||||
# Helix configuration folder
|
# Helix configuration folder
|
||||||
|
.helix/*
|
||||||
.helix
|
.helix
|
||||||
|
|
||||||
|
# Coverage tools
|
||||||
|
lcov.info
|
||||||
|
tarpaulin-report.html
|
||||||
|
815
Cargo.lock
generated
49
Cargo.toml
@ -10,8 +10,8 @@ license = "MIT"
|
|||||||
name = "nu"
|
name = "nu"
|
||||||
readme = "README.md"
|
readme = "README.md"
|
||||||
repository = "https://github.com/nushell/nushell"
|
repository = "https://github.com/nushell/nushell"
|
||||||
rust-version = "1.59"
|
rust-version = "1.60"
|
||||||
version = "0.62.0"
|
version = "0.63.1"
|
||||||
|
|
||||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
|
|
||||||
@ -38,27 +38,31 @@ ctrlc = "3.2.1"
|
|||||||
log = "0.4"
|
log = "0.4"
|
||||||
miette = "4.5.0"
|
miette = "4.5.0"
|
||||||
nu-ansi-term = "0.45.1"
|
nu-ansi-term = "0.45.1"
|
||||||
nu-cli = { path="./crates/nu-cli", version = "0.62.0" }
|
nu-cli = { path="./crates/nu-cli", version = "0.63.1" }
|
||||||
nu-color-config = { path = "./crates/nu-color-config", version = "0.62.0" }
|
nu-color-config = { path = "./crates/nu-color-config", version = "0.63.1" }
|
||||||
nu-command = { path="./crates/nu-command", version = "0.62.0" }
|
nu-command = { path="./crates/nu-command", version = "0.63.1" }
|
||||||
nu-engine = { path="./crates/nu-engine", version = "0.62.0" }
|
nu-engine = { path="./crates/nu-engine", version = "0.63.1" }
|
||||||
nu-json = { path="./crates/nu-json", version = "0.62.0" }
|
nu-json = { path="./crates/nu-json", version = "0.63.1" }
|
||||||
nu-parser = { path="./crates/nu-parser", version = "0.62.0" }
|
nu-parser = { path="./crates/nu-parser", version = "0.63.1" }
|
||||||
nu-path = { path="./crates/nu-path", version = "0.62.0" }
|
nu-path = { path="./crates/nu-path", version = "0.63.1" }
|
||||||
nu-plugin = { path = "./crates/nu-plugin", optional = true, version = "0.62.0" }
|
nu-plugin = { path = "./crates/nu-plugin", optional = true, version = "0.63.1" }
|
||||||
nu-pretty-hex = { path = "./crates/nu-pretty-hex", version = "0.62.0" }
|
nu-pretty-hex = { path = "./crates/nu-pretty-hex", version = "0.63.1" }
|
||||||
nu-protocol = { path = "./crates/nu-protocol", version = "0.62.0" }
|
nu-protocol = { path = "./crates/nu-protocol", version = "0.63.1" }
|
||||||
nu-system = { path = "./crates/nu-system", version = "0.62.0" }
|
nu-system = { path = "./crates/nu-system", version = "0.63.1" }
|
||||||
nu-table = { path = "./crates/nu-table", version = "0.62.0" }
|
nu-table = { path = "./crates/nu-table", version = "0.63.1" }
|
||||||
nu-term-grid = { path = "./crates/nu-term-grid", version = "0.62.0" }
|
nu-term-grid = { path = "./crates/nu-term-grid", version = "0.63.1" }
|
||||||
openssl = { version = "0.10.38", features = ["vendored"], optional = true }
|
nu-utils = { path = "./crates/nu-utils", version = "0.63.1" }
|
||||||
pretty_env_logger = "0.4.0"
|
pretty_env_logger = "0.4.0"
|
||||||
rayon = "1.5.1"
|
rayon = "1.5.1"
|
||||||
reedline = { version = "0.5.0", features = ["bashisms"]}
|
reedline = { git = "https://github.com/nushell/reedline", branch = "main", features = ["bashisms"]}
|
||||||
is_executable = "1.0.1"
|
is_executable = "1.0.1"
|
||||||
|
|
||||||
|
[target.'cfg(not(target_os = "windows"))'.dependencies]
|
||||||
|
# Our dependencies don't use OpenSSL on Windows
|
||||||
|
openssl = { version = "0.10.38", features = ["vendored"], optional = true }
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
nu-test-support = { path="./crates/nu-test-support", version = "0.62.0" }
|
nu-test-support = { path="./crates/nu-test-support", version = "0.63.1" }
|
||||||
tempfile = "3.2.0"
|
tempfile = "3.2.0"
|
||||||
assert_cmd = "2.0.2"
|
assert_cmd = "2.0.2"
|
||||||
pretty_assertions = "1.0.0"
|
pretty_assertions = "1.0.0"
|
||||||
@ -68,7 +72,7 @@ rstest = "0.12.0"
|
|||||||
itertools = "0.10.3"
|
itertools = "0.10.3"
|
||||||
|
|
||||||
[target.'cfg(windows)'.build-dependencies]
|
[target.'cfg(windows)'.build-dependencies]
|
||||||
embed-resource = "1"
|
winres = "0.1"
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
plugin = ["nu-plugin", "nu-cli/plugin", "nu-parser/plugin", "nu-command/plugin", "nu-protocol/plugin", "nu-engine/plugin"]
|
plugin = ["nu-plugin", "nu-cli/plugin", "nu-parser/plugin", "nu-command/plugin", "nu-protocol/plugin", "nu-engine/plugin"]
|
||||||
@ -103,6 +107,13 @@ inherits = "release"
|
|||||||
strip = false
|
strip = false
|
||||||
debug = true
|
debug = true
|
||||||
|
|
||||||
|
# build with `cargo build --profile ci`
|
||||||
|
# to analyze performance with tooling like linux perf
|
||||||
|
[profile.ci]
|
||||||
|
inherits = "dev"
|
||||||
|
strip = false
|
||||||
|
debug = false
|
||||||
|
|
||||||
# Main nu binary
|
# Main nu binary
|
||||||
[[bin]]
|
[[bin]]
|
||||||
name = "nu"
|
name = "nu"
|
||||||
|
@ -43,7 +43,7 @@ You can also find information on more specific topics in our [cookbook](https://
|
|||||||
|
|
||||||
Up-to-date installation instructions can be found in the [installation chapter of the book](https://www.nushell.sh/book/installation.html). **Windows users**: please note that Nu works on Windows 10 and does not currently have Windows 7/8.1 support.
|
Up-to-date installation instructions can be found in the [installation chapter of the book](https://www.nushell.sh/book/installation.html). **Windows users**: please note that Nu works on Windows 10 and does not currently have Windows 7/8.1 support.
|
||||||
|
|
||||||
To build Nu, you will need to use the **latest stable (1.59 or later)** version of the compiler.
|
To build Nu, you will need to use the **latest stable (1.60 or later)** version of the compiler.
|
||||||
|
|
||||||
Required dependencies:
|
Required dependencies:
|
||||||
|
|
||||||
|
Before Width: | Height: | Size: 166 KiB |
Before Width: | Height: | Size: 206 KiB |
Before Width: | Height: | Size: 167 KiB |
Before Width: | Height: | Size: 91 KiB |
Before Width: | Height: | Size: 182 KiB |
Before Width: | Height: | Size: 144 KiB |
Before Width: | Height: | Size: 146 KiB |
Before Width: | Height: | Size: 2.2 KiB |
Before Width: | Height: | Size: 1.6 KiB |
Before Width: | Height: | Size: 40 KiB |
Before Width: | Height: | Size: 68 KiB |
Before Width: | Height: | Size: 77 KiB |
@ -1,49 +0,0 @@
|
|||||||
#include <winver.h>
|
|
||||||
|
|
||||||
#define VER_FILEVERSION 0,59,1,0
|
|
||||||
#define VER_FILEVERSION_STR "0.59.1"
|
|
||||||
|
|
||||||
#define VER_PRODUCTVERSION 0,59,1,0
|
|
||||||
#define VER_PRODUCTVERSION_STR "0.59.1"
|
|
||||||
|
|
||||||
#ifdef RC_INVOKED
|
|
||||||
|
|
||||||
#ifdef DEBUG // TODO: Actually define DEBUG
|
|
||||||
#define VER_DEBUG VS_FF_DEBUG
|
|
||||||
#else
|
|
||||||
#define VER_DEBUG 0
|
|
||||||
#endif
|
|
||||||
|
|
||||||
VS_VERSION_INFO VERSIONINFO
|
|
||||||
FILEVERSION VER_FILEVERSION
|
|
||||||
PRODUCTVERSION VER_PRODUCTVERSION
|
|
||||||
FILEFLAGSMASK VS_FFI_FILEFLAGSMASK
|
|
||||||
FILEFLAGS VER_DEBUG
|
|
||||||
FILEOS VOS__WINDOWS32
|
|
||||||
FILETYPE VFT_APP
|
|
||||||
FILESUBTYPE VFT2_UNKNOWN
|
|
||||||
BEGIN
|
|
||||||
BLOCK "StringFileInfo"
|
|
||||||
BEGIN
|
|
||||||
BLOCK "040904b0"
|
|
||||||
BEGIN
|
|
||||||
VALUE "CompanyName", "nushell"
|
|
||||||
VALUE "FileDescription", "Nushell"
|
|
||||||
VALUE "FileVersion", VER_FILEVERSION_STR
|
|
||||||
VALUE "InternalName", "nu.exe"
|
|
||||||
VALUE "LegalCopyright", "Copyright (C) 2022"
|
|
||||||
VALUE "OriginalFilename", "nu.exe"
|
|
||||||
VALUE "ProductName", "Nushell"
|
|
||||||
VALUE "ProductVersion", VER_PRODUCTVERSION_STR
|
|
||||||
END
|
|
||||||
END
|
|
||||||
|
|
||||||
BLOCK "VarFileInfo"
|
|
||||||
BEGIN
|
|
||||||
VALUE "Translation", 0x409, 1200
|
|
||||||
END
|
|
||||||
END
|
|
||||||
|
|
||||||
#define IDI_ICON 0x101
|
|
||||||
IDI_ICON ICON "assets/nu_logo.ico"
|
|
||||||
#endif
|
|
8
build.rs
@ -1,6 +1,12 @@
|
|||||||
#[cfg(windows)]
|
#[cfg(windows)]
|
||||||
fn main() {
|
fn main() {
|
||||||
embed_resource::compile_for("assets/nushell.rc", &["nu"])
|
let mut res = winres::WindowsResource::new();
|
||||||
|
res.set("ProductName", "Nushell");
|
||||||
|
res.set("FileDescription", "Nushell");
|
||||||
|
res.set("LegalCopyright", "Copyright (C) 2022");
|
||||||
|
res.set_icon("assets/nu_logo.ico");
|
||||||
|
res.compile()
|
||||||
|
.expect("Failed to run the Windows resource compiler (rc.exe)");
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(not(windows))]
|
#[cfg(not(windows))]
|
||||||
|
@ -4,21 +4,21 @@ description = "CLI-related functionality for Nushell"
|
|||||||
edition = "2021"
|
edition = "2021"
|
||||||
license = "MIT"
|
license = "MIT"
|
||||||
name = "nu-cli"
|
name = "nu-cli"
|
||||||
version = "0.62.0"
|
version = "0.63.1"
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
nu-test-support = { path="../nu-test-support", version = "0.62.0" }
|
nu-test-support = { path="../nu-test-support", version = "0.63.1" }
|
||||||
nu-command = { path = "../nu-command", version = "0.62.0" }
|
nu-command = { path = "../nu-command", version = "0.63.1" }
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
nu-engine = { path = "../nu-engine", version = "0.62.0" }
|
nu-engine = { path = "../nu-engine", version = "0.63.1" }
|
||||||
nu-path = { path = "../nu-path", version = "0.62.0" }
|
nu-path = { path = "../nu-path", version = "0.63.1" }
|
||||||
nu-parser = { path = "../nu-parser", version = "0.62.0" }
|
nu-parser = { path = "../nu-parser", version = "0.63.1" }
|
||||||
nu-protocol = { path = "../nu-protocol", version = "0.62.0" }
|
nu-protocol = { path = "../nu-protocol", version = "0.63.1" }
|
||||||
nu-utils = { path = "../nu-utils", version = "0.62.0" }
|
nu-utils = { path = "../nu-utils", version = "0.63.1" }
|
||||||
nu-ansi-term = "0.45.1"
|
nu-ansi-term = "0.45.1"
|
||||||
nu-color-config = { path = "../nu-color-config", version = "0.62.0" }
|
reedline = { git = "https://github.com/nushell/reedline", branch = "main", features = ["bashisms"]}
|
||||||
reedline = { version = "0.5.0", features = ["bashisms"]}
|
nu-color-config = { path = "../nu-color-config", version = "0.63.1" }
|
||||||
crossterm = "0.23.0"
|
crossterm = "0.23.0"
|
||||||
miette = { version = "4.5.0", features = ["fancy"] }
|
miette = { version = "4.5.0", features = ["fancy"] }
|
||||||
thiserror = "1.0.29"
|
thiserror = "1.0.29"
|
||||||
|
@ -6,7 +6,7 @@ use nu_parser::parse;
|
|||||||
use nu_protocol::engine::Stack;
|
use nu_protocol::engine::Stack;
|
||||||
use nu_protocol::{
|
use nu_protocol::{
|
||||||
engine::{EngineState, StateDelta, StateWorkingSet},
|
engine::{EngineState, StateDelta, StateWorkingSet},
|
||||||
PipelineData, Spanned,
|
PipelineData, Spanned, Value,
|
||||||
};
|
};
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
|
|
||||||
@ -17,9 +17,16 @@ pub fn evaluate_commands(
|
|||||||
stack: &mut Stack,
|
stack: &mut Stack,
|
||||||
input: PipelineData,
|
input: PipelineData,
|
||||||
is_perf_true: bool,
|
is_perf_true: bool,
|
||||||
|
table_mode: Option<Value>,
|
||||||
) -> Result<()> {
|
) -> Result<()> {
|
||||||
// Run a command (or commands) given to us by the user
|
// Run a command (or commands) given to us by the user
|
||||||
let (block, delta) = {
|
let (block, delta) = {
|
||||||
|
if let Some(ref t_mode) = table_mode {
|
||||||
|
let mut config = engine_state.get_config().clone();
|
||||||
|
config.table_mode = t_mode.as_string()?;
|
||||||
|
engine_state.set_config(&config);
|
||||||
|
}
|
||||||
|
|
||||||
let mut working_set = StateWorkingSet::new(engine_state);
|
let mut working_set = StateWorkingSet::new(engine_state);
|
||||||
|
|
||||||
let (output, err) = parse(&mut working_set, None, commands.item.as_bytes(), false, &[]);
|
let (output, err) = parse(&mut working_set, None, commands.item.as_bytes(), false, &[]);
|
||||||
@ -37,12 +44,17 @@ pub fn evaluate_commands(
|
|||||||
report_error(&working_set, &err);
|
report_error(&working_set, &err);
|
||||||
}
|
}
|
||||||
|
|
||||||
let config = engine_state.get_config().clone();
|
let mut config = engine_state.get_config().clone();
|
||||||
|
if let Some(t_mode) = table_mode {
|
||||||
|
config.table_mode = t_mode.as_string()?;
|
||||||
|
}
|
||||||
|
|
||||||
// Merge the delta in case env vars changed in the config
|
// Merge the delta in case env vars changed in the config
|
||||||
match nu_engine::env::current_dir(engine_state, stack) {
|
match nu_engine::env::current_dir(engine_state, stack) {
|
||||||
Ok(cwd) => {
|
Ok(cwd) => {
|
||||||
if let Err(e) = engine_state.merge_delta(StateDelta::new(), Some(stack), cwd) {
|
if let Err(e) =
|
||||||
|
engine_state.merge_delta(StateDelta::new(engine_state), Some(stack), cwd)
|
||||||
|
{
|
||||||
let working_set = StateWorkingSet::new(engine_state);
|
let working_set = StateWorkingSet::new(engine_state);
|
||||||
report_error(&working_set, &e);
|
report_error(&working_set, &e);
|
||||||
std::process::exit(1);
|
std::process::exit(1);
|
||||||
@ -64,7 +76,7 @@ pub fn evaluate_commands(
|
|||||||
|
|
||||||
match eval_block(engine_state, stack, &block, input, false, false) {
|
match eval_block(engine_state, stack, &block, input, false, false) {
|
||||||
Ok(pipeline_data) => {
|
Ok(pipeline_data) => {
|
||||||
crate::eval_file::print_table_or_error(engine_state, stack, pipeline_data, &config)
|
crate::eval_file::print_table_or_error(engine_state, stack, pipeline_data, &mut config)
|
||||||
}
|
}
|
||||||
Err(err) => {
|
Err(err) => {
|
||||||
let working_set = StateWorkingSet::new(engine_state);
|
let working_set = StateWorkingSet::new(engine_state);
|
||||||
|
@ -1,7 +1,5 @@
|
|||||||
use crate::completions::{
|
use crate::completions::{Completer, CompletionOptions, MatchAlgorithm, SortBy};
|
||||||
file_completions::file_path_completion, Completer, CompletionOptions, MatchAlgorithm, SortBy,
|
use nu_parser::FlatShape;
|
||||||
};
|
|
||||||
use nu_parser::{unescape_unquote_string, FlatShape};
|
|
||||||
use nu_protocol::{
|
use nu_protocol::{
|
||||||
engine::{EngineState, StateWorkingSet},
|
engine::{EngineState, StateWorkingSet},
|
||||||
Span,
|
Span,
|
||||||
@ -12,7 +10,6 @@ use std::sync::Arc;
|
|||||||
pub struct CommandCompletion {
|
pub struct CommandCompletion {
|
||||||
engine_state: Arc<EngineState>,
|
engine_state: Arc<EngineState>,
|
||||||
flattened: Vec<(Span, FlatShape)>,
|
flattened: Vec<(Span, FlatShape)>,
|
||||||
flat_idx: usize,
|
|
||||||
flat_shape: FlatShape,
|
flat_shape: FlatShape,
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -21,13 +18,11 @@ impl CommandCompletion {
|
|||||||
engine_state: Arc<EngineState>,
|
engine_state: Arc<EngineState>,
|
||||||
_: &StateWorkingSet,
|
_: &StateWorkingSet,
|
||||||
flattened: Vec<(Span, FlatShape)>,
|
flattened: Vec<(Span, FlatShape)>,
|
||||||
flat_idx: usize,
|
|
||||||
flat_shape: FlatShape,
|
flat_shape: FlatShape,
|
||||||
) -> Self {
|
) -> Self {
|
||||||
Self {
|
Self {
|
||||||
engine_state,
|
engine_state,
|
||||||
flattened,
|
flattened,
|
||||||
flat_idx,
|
|
||||||
flat_shape,
|
flat_shape,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -39,7 +34,7 @@ impl CommandCompletion {
|
|||||||
) -> Vec<String> {
|
) -> Vec<String> {
|
||||||
let mut executables = vec![];
|
let mut executables = vec![];
|
||||||
|
|
||||||
let paths = self.engine_state.env_vars.get("PATH");
|
let paths = self.engine_state.get_env_var("PATH");
|
||||||
|
|
||||||
if let Some(paths) = paths {
|
if let Some(paths) = paths {
|
||||||
if let Ok(paths) = paths.as_list() {
|
if let Ok(paths) = paths.as_list() {
|
||||||
@ -161,7 +156,7 @@ impl Completer for CommandCompletion {
|
|||||||
fn fetch(
|
fn fetch(
|
||||||
&mut self,
|
&mut self,
|
||||||
working_set: &StateWorkingSet,
|
working_set: &StateWorkingSet,
|
||||||
prefix: Vec<u8>,
|
_prefix: Vec<u8>,
|
||||||
span: Span,
|
span: Span,
|
||||||
offset: usize,
|
offset: usize,
|
||||||
pos: usize,
|
pos: usize,
|
||||||
@ -214,66 +209,8 @@ impl Completer for CommandCompletion {
|
|||||||
vec![]
|
vec![]
|
||||||
};
|
};
|
||||||
|
|
||||||
let cwd = if let Some(d) = self.engine_state.env_vars.get("PWD") {
|
subcommands
|
||||||
match d.as_string() {
|
|
||||||
Ok(s) => s,
|
|
||||||
Err(_) => "".to_string(),
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
"".to_string()
|
|
||||||
};
|
|
||||||
|
|
||||||
let preceding_byte = if span.start > offset {
|
|
||||||
working_set
|
|
||||||
.get_span_contents(Span {
|
|
||||||
start: span.start - 1,
|
|
||||||
end: span.start,
|
|
||||||
})
|
|
||||||
.to_vec()
|
|
||||||
} else {
|
|
||||||
vec![]
|
|
||||||
};
|
|
||||||
// let prefix = working_set.get_span_contents(flat.0);
|
|
||||||
let prefix = String::from_utf8_lossy(&prefix).to_string();
|
|
||||||
|
|
||||||
file_path_completion(span, &prefix, &cwd, options.match_algorithm)
|
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(move |x| {
|
|
||||||
if self.flat_idx == 0 {
|
|
||||||
// We're in the command position
|
|
||||||
if (x.1.starts_with('"') || x.1.starts_with('\'') || x.1.starts_with('`'))
|
|
||||||
&& !matches!(preceding_byte.get(0), Some(b'^'))
|
|
||||||
{
|
|
||||||
let (trimmed, _) = unescape_unquote_string(x.1.as_bytes(), span);
|
|
||||||
let expanded = nu_path::canonicalize_with(trimmed, &cwd);
|
|
||||||
|
|
||||||
if let Ok(expanded) = expanded {
|
|
||||||
if is_executable::is_executable(expanded) {
|
|
||||||
(x.0, format!("^{}", x.1))
|
|
||||||
} else {
|
|
||||||
(x.0, x.1)
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
(x.0, x.1)
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
(x.0, x.1)
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
(x.0, x.1)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.map(move |x| Suggestion {
|
|
||||||
value: x.1,
|
|
||||||
description: None,
|
|
||||||
extra: None,
|
|
||||||
span: reedline::Span {
|
|
||||||
start: x.0.start - offset,
|
|
||||||
end: x.0.end - offset,
|
|
||||||
},
|
|
||||||
append_whitespace: false,
|
|
||||||
})
|
|
||||||
.chain(subcommands.into_iter())
|
|
||||||
.chain(commands.into_iter())
|
.chain(commands.into_iter())
|
||||||
.collect::<Vec<_>>()
|
.collect::<Vec<_>>()
|
||||||
}
|
}
|
||||||
|
@ -37,7 +37,10 @@ impl NuCompleter {
|
|||||||
) -> Vec<Suggestion> {
|
) -> Vec<Suggestion> {
|
||||||
let config = self.engine_state.get_config();
|
let config = self.engine_state.get_config();
|
||||||
|
|
||||||
let mut options = CompletionOptions::default();
|
let mut options = CompletionOptions {
|
||||||
|
case_sensitive: config.case_sensitive_completions,
|
||||||
|
..Default::default()
|
||||||
|
};
|
||||||
|
|
||||||
if config.completion_algorithm == "fuzzy" {
|
if config.completion_algorithm == "fuzzy" {
|
||||||
options.match_algorithm = MatchAlgorithm::Fuzzy;
|
options.match_algorithm = MatchAlgorithm::Fuzzy;
|
||||||
@ -56,6 +59,7 @@ impl NuCompleter {
|
|||||||
fn completion_helper(&mut self, line: &str, pos: usize) -> Vec<Suggestion> {
|
fn completion_helper(&mut self, line: &str, pos: usize) -> Vec<Suggestion> {
|
||||||
let mut working_set = StateWorkingSet::new(&self.engine_state);
|
let mut working_set = StateWorkingSet::new(&self.engine_state);
|
||||||
let offset = working_set.next_span_start();
|
let offset = working_set.next_span_start();
|
||||||
|
let initial_line = line.to_string();
|
||||||
let mut line = line.to_string();
|
let mut line = line.to_string();
|
||||||
line.insert(pos, 'a');
|
line.insert(pos, 'a');
|
||||||
let pos = offset + pos;
|
let pos = offset + pos;
|
||||||
@ -150,7 +154,7 @@ impl NuCompleter {
|
|||||||
self.engine_state.clone(),
|
self.engine_state.clone(),
|
||||||
self.stack.clone(),
|
self.stack.clone(),
|
||||||
*decl_id,
|
*decl_id,
|
||||||
line,
|
initial_line,
|
||||||
);
|
);
|
||||||
|
|
||||||
return self.process_completion(
|
return self.process_completion(
|
||||||
@ -175,37 +179,39 @@ impl NuCompleter {
|
|||||||
pos,
|
pos,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
FlatShape::Filepath
|
|
||||||
| FlatShape::GlobPattern
|
|
||||||
| FlatShape::ExternalArg => {
|
|
||||||
let mut completer = FileCompletion::new(self.engine_state.clone());
|
|
||||||
|
|
||||||
return self.process_completion(
|
|
||||||
&mut completer,
|
|
||||||
&working_set,
|
|
||||||
prefix,
|
|
||||||
new_span,
|
|
||||||
offset,
|
|
||||||
pos,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
flat_shape => {
|
flat_shape => {
|
||||||
let mut completer = CommandCompletion::new(
|
let mut completer = CommandCompletion::new(
|
||||||
self.engine_state.clone(),
|
self.engine_state.clone(),
|
||||||
&working_set,
|
&working_set,
|
||||||
flattened.clone(),
|
flattened.clone(),
|
||||||
flat_idx,
|
// flat_idx,
|
||||||
flat_shape.clone(),
|
flat_shape.clone(),
|
||||||
);
|
);
|
||||||
|
|
||||||
return self.process_completion(
|
let out: Vec<_> = self.process_completion(
|
||||||
&mut completer,
|
&mut completer,
|
||||||
&working_set,
|
&working_set,
|
||||||
prefix,
|
prefix.clone(),
|
||||||
new_span,
|
new_span,
|
||||||
offset,
|
offset,
|
||||||
pos,
|
pos,
|
||||||
);
|
);
|
||||||
|
|
||||||
|
if out.is_empty() {
|
||||||
|
let mut completer =
|
||||||
|
FileCompletion::new(self.engine_state.clone());
|
||||||
|
|
||||||
|
return self.process_completion(
|
||||||
|
&mut completer,
|
||||||
|
&working_set,
|
||||||
|
prefix,
|
||||||
|
new_span,
|
||||||
|
offset,
|
||||||
|
pos,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
return out;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
@ -4,10 +4,11 @@ use nu_protocol::{
|
|||||||
levenshtein_distance, Span,
|
levenshtein_distance, Span,
|
||||||
};
|
};
|
||||||
use reedline::Suggestion;
|
use reedline::Suggestion;
|
||||||
|
use std::fs;
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
use super::{partial_from, prepend_base_dir, MatchAlgorithm};
|
use super::{partial_from, prepend_base_dir};
|
||||||
|
|
||||||
const SEP: char = std::path::MAIN_SEPARATOR;
|
const SEP: char = std::path::MAIN_SEPARATOR;
|
||||||
|
|
||||||
@ -32,7 +33,7 @@ impl Completer for DirectoryCompletion {
|
|||||||
_: usize,
|
_: usize,
|
||||||
options: &CompletionOptions,
|
options: &CompletionOptions,
|
||||||
) -> Vec<Suggestion> {
|
) -> Vec<Suggestion> {
|
||||||
let cwd = if let Some(d) = self.engine_state.env_vars.get("PWD") {
|
let cwd = if let Some(d) = self.engine_state.get_env_var("PWD") {
|
||||||
match d.as_string() {
|
match d.as_string() {
|
||||||
Ok(s) => s,
|
Ok(s) => s,
|
||||||
Err(_) => "".to_string(),
|
Err(_) => "".to_string(),
|
||||||
@ -43,7 +44,7 @@ impl Completer for DirectoryCompletion {
|
|||||||
let partial = String::from_utf8_lossy(&prefix).to_string();
|
let partial = String::from_utf8_lossy(&prefix).to_string();
|
||||||
|
|
||||||
// Filter only the folders
|
// Filter only the folders
|
||||||
let output: Vec<_> = directory_completion(span, &partial, &cwd, options.match_algorithm)
|
let output: Vec<_> = directory_completion(span, &partial, &cwd, options)
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(move |x| Suggestion {
|
.map(move |x| Suggestion {
|
||||||
value: x.1,
|
value: x.1,
|
||||||
@ -102,7 +103,7 @@ pub fn directory_completion(
|
|||||||
span: nu_protocol::Span,
|
span: nu_protocol::Span,
|
||||||
partial: &str,
|
partial: &str,
|
||||||
cwd: &str,
|
cwd: &str,
|
||||||
match_algorithm: MatchAlgorithm,
|
options: &CompletionOptions,
|
||||||
) -> Vec<(nu_protocol::Span, String)> {
|
) -> Vec<(nu_protocol::Span, String)> {
|
||||||
let original_input = partial;
|
let original_input = partial;
|
||||||
|
|
||||||
@ -120,10 +121,10 @@ pub fn directory_completion(
|
|||||||
return result
|
return result
|
||||||
.filter_map(|entry| {
|
.filter_map(|entry| {
|
||||||
entry.ok().and_then(|entry| {
|
entry.ok().and_then(|entry| {
|
||||||
if let Ok(metadata) = entry.metadata() {
|
if let Ok(metadata) = fs::metadata(entry.path()) {
|
||||||
if metadata.is_dir() {
|
if metadata.is_dir() {
|
||||||
let mut file_name = entry.file_name().to_string_lossy().into_owned();
|
let mut file_name = entry.file_name().to_string_lossy().into_owned();
|
||||||
if matches(&partial, &file_name, match_algorithm) {
|
if matches(&partial, &file_name, options) {
|
||||||
let mut path = if prepend_base_dir(original_input, &base_dir_name) {
|
let mut path = if prepend_base_dir(original_input, &base_dir_name) {
|
||||||
format!("{}{}", base_dir_name, file_name)
|
format!("{}{}", base_dir_name, file_name)
|
||||||
} else {
|
} else {
|
||||||
|
@ -37,7 +37,7 @@ impl Completer for DotNuCompletion {
|
|||||||
|
|
||||||
// Fetch the lib dirs
|
// Fetch the lib dirs
|
||||||
let lib_dirs: Vec<String> =
|
let lib_dirs: Vec<String> =
|
||||||
if let Some(lib_dirs) = self.engine_state.env_vars.get("NU_LIB_DIRS") {
|
if let Some(lib_dirs) = self.engine_state.get_env_var("NU_LIB_DIRS") {
|
||||||
lib_dirs
|
lib_dirs
|
||||||
.as_list()
|
.as_list()
|
||||||
.into_iter()
|
.into_iter()
|
||||||
@ -58,7 +58,7 @@ impl Completer for DotNuCompletion {
|
|||||||
};
|
};
|
||||||
|
|
||||||
// Check if the base_dir is a folder
|
// Check if the base_dir is a folder
|
||||||
if base_dir != "./" {
|
if base_dir != format!(".{}", SEP) {
|
||||||
// Add the base dir into the directories to be searched
|
// Add the base dir into the directories to be searched
|
||||||
search_dirs.push(base_dir.clone());
|
search_dirs.push(base_dir.clone());
|
||||||
|
|
||||||
@ -70,7 +70,7 @@ impl Completer for DotNuCompletion {
|
|||||||
partial = base_dir_partial;
|
partial = base_dir_partial;
|
||||||
} else {
|
} else {
|
||||||
// Fetch the current folder
|
// Fetch the current folder
|
||||||
let current_folder = if let Some(d) = self.engine_state.env_vars.get("PWD") {
|
let current_folder = if let Some(d) = self.engine_state.get_env_var("PWD") {
|
||||||
match d.as_string() {
|
match d.as_string() {
|
||||||
Ok(s) => s,
|
Ok(s) => s,
|
||||||
Err(_) => "".to_string(),
|
Err(_) => "".to_string(),
|
||||||
@ -91,7 +91,7 @@ impl Completer for DotNuCompletion {
|
|||||||
let output: Vec<Suggestion> = search_dirs
|
let output: Vec<Suggestion> = search_dirs
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.flat_map(|it| {
|
.flat_map(|it| {
|
||||||
file_path_completion(span, &partial, &it, options.match_algorithm)
|
file_path_completion(span, &partial, &it, options)
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.filter(|it| {
|
.filter(|it| {
|
||||||
// Different base dir, so we list the .nu files or folders
|
// Different base dir, so we list the .nu files or folders
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
use crate::completions::{Completer, CompletionOptions, MatchAlgorithm};
|
use crate::completions::{Completer, CompletionOptions};
|
||||||
use nu_protocol::{
|
use nu_protocol::{
|
||||||
engine::{EngineState, StateWorkingSet},
|
engine::{EngineState, StateWorkingSet},
|
||||||
levenshtein_distance, Span,
|
levenshtein_distance, Span,
|
||||||
@ -30,7 +30,7 @@ impl Completer for FileCompletion {
|
|||||||
_: usize,
|
_: usize,
|
||||||
options: &CompletionOptions,
|
options: &CompletionOptions,
|
||||||
) -> Vec<Suggestion> {
|
) -> Vec<Suggestion> {
|
||||||
let cwd = if let Some(d) = self.engine_state.env_vars.get("PWD") {
|
let cwd = if let Some(d) = self.engine_state.get_env_var("PWD") {
|
||||||
match d.as_string() {
|
match d.as_string() {
|
||||||
Ok(s) => s,
|
Ok(s) => s,
|
||||||
Err(_) => "".to_string(),
|
Err(_) => "".to_string(),
|
||||||
@ -39,7 +39,7 @@ impl Completer for FileCompletion {
|
|||||||
"".to_string()
|
"".to_string()
|
||||||
};
|
};
|
||||||
let prefix = String::from_utf8_lossy(&prefix).to_string();
|
let prefix = String::from_utf8_lossy(&prefix).to_string();
|
||||||
let output: Vec<_> = file_path_completion(span, &prefix, &cwd, options.match_algorithm)
|
let output: Vec<_> = file_path_completion(span, &prefix, &cwd, options)
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(move |x| Suggestion {
|
.map(move |x| Suggestion {
|
||||||
value: x.1,
|
value: x.1,
|
||||||
@ -112,7 +112,7 @@ pub fn file_path_completion(
|
|||||||
span: nu_protocol::Span,
|
span: nu_protocol::Span,
|
||||||
partial: &str,
|
partial: &str,
|
||||||
cwd: &str,
|
cwd: &str,
|
||||||
match_algorithm: MatchAlgorithm,
|
options: &CompletionOptions,
|
||||||
) -> Vec<(nu_protocol::Span, String)> {
|
) -> Vec<(nu_protocol::Span, String)> {
|
||||||
let original_input = partial;
|
let original_input = partial;
|
||||||
let (base_dir_name, partial) = partial_from(partial);
|
let (base_dir_name, partial) = partial_from(partial);
|
||||||
@ -129,7 +129,7 @@ pub fn file_path_completion(
|
|||||||
.filter_map(|entry| {
|
.filter_map(|entry| {
|
||||||
entry.ok().and_then(|entry| {
|
entry.ok().and_then(|entry| {
|
||||||
let mut file_name = entry.file_name().to_string_lossy().into_owned();
|
let mut file_name = entry.file_name().to_string_lossy().into_owned();
|
||||||
if matches(&partial, &file_name, match_algorithm) {
|
if matches(&partial, &file_name, options) {
|
||||||
let mut path = if prepend_base_dir(original_input, &base_dir_name) {
|
let mut path = if prepend_base_dir(original_input, &base_dir_name) {
|
||||||
format!("{}{}", base_dir_name, file_name)
|
format!("{}{}", base_dir_name, file_name)
|
||||||
} else {
|
} else {
|
||||||
@ -158,8 +158,15 @@ pub fn file_path_completion(
|
|||||||
Vec::new()
|
Vec::new()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn matches(partial: &str, from: &str, match_algorithm: MatchAlgorithm) -> bool {
|
pub fn matches(partial: &str, from: &str, options: &CompletionOptions) -> bool {
|
||||||
match_algorithm.matches_str(&from.to_ascii_lowercase(), &partial.to_ascii_lowercase())
|
// Check for case sensitive
|
||||||
|
if !options.case_sensitive {
|
||||||
|
return options
|
||||||
|
.match_algorithm
|
||||||
|
.matches_str(&from.to_ascii_lowercase(), &partial.to_ascii_lowercase());
|
||||||
|
}
|
||||||
|
|
||||||
|
options.match_algorithm.matches_str(from, partial)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns whether the base_dir should be prepended to the file path
|
/// Returns whether the base_dir should be prepended to the file path
|
||||||
|
@ -11,7 +11,7 @@ use std::sync::Arc;
|
|||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct VariableCompletion {
|
pub struct VariableCompletion {
|
||||||
engine_state: Arc<EngineState>,
|
engine_state: Arc<EngineState>, // TODO: Is engine state necessary? It's already a part of working set in fetch()
|
||||||
stack: Stack,
|
stack: Stack,
|
||||||
var_context: (Vec<u8>, Vec<Vec<u8>>), // tuple with $var and the sublevels (.b.c.d)
|
var_context: (Vec<u8>, Vec<Vec<u8>>), // tuple with $var and the sublevels (.b.c.d)
|
||||||
}
|
}
|
||||||
@ -70,7 +70,18 @@ impl Completer for VariableCompletion {
|
|||||||
self.var_context.1.clone().into_iter().skip(1).collect();
|
self.var_context.1.clone().into_iter().skip(1).collect();
|
||||||
|
|
||||||
if let Some(val) = env_vars.get(&target_var_str) {
|
if let Some(val) = env_vars.get(&target_var_str) {
|
||||||
return nested_suggestions(val.clone(), nested_levels, current_span);
|
for suggestion in
|
||||||
|
nested_suggestions(val.clone(), nested_levels, current_span)
|
||||||
|
{
|
||||||
|
if options
|
||||||
|
.match_algorithm
|
||||||
|
.matches_u8(suggestion.value.as_bytes(), &prefix)
|
||||||
|
{
|
||||||
|
output.push(suggestion);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return output;
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
// No nesting provided, return all env vars
|
// No nesting provided, return all env vars
|
||||||
@ -105,7 +116,18 @@ impl Completer for VariableCompletion {
|
|||||||
end: current_span.end,
|
end: current_span.end,
|
||||||
},
|
},
|
||||||
) {
|
) {
|
||||||
return nested_suggestions(nuval, self.var_context.1.clone(), current_span);
|
for suggestion in
|
||||||
|
nested_suggestions(nuval, self.var_context.1.clone(), current_span)
|
||||||
|
{
|
||||||
|
if options
|
||||||
|
.match_algorithm
|
||||||
|
.matches_u8(suggestion.value.as_bytes(), &prefix)
|
||||||
|
{
|
||||||
|
output.push(suggestion);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return output;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -122,7 +144,18 @@ impl Completer for VariableCompletion {
|
|||||||
|
|
||||||
// If the value exists and it's of type Record
|
// If the value exists and it's of type Record
|
||||||
if let Ok(value) = var {
|
if let Ok(value) = var {
|
||||||
return nested_suggestions(value, self.var_context.1.clone(), current_span);
|
for suggestion in
|
||||||
|
nested_suggestions(value, self.var_context.1.clone(), current_span)
|
||||||
|
{
|
||||||
|
if options
|
||||||
|
.match_algorithm
|
||||||
|
.matches_u8(suggestion.value.as_bytes(), &prefix)
|
||||||
|
{
|
||||||
|
output.push(suggestion);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return output;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -143,24 +176,39 @@ impl Completer for VariableCompletion {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// TODO: The following can be refactored (see find_commands_by_predicate() used in
|
||||||
|
// command_completions).
|
||||||
|
let mut removed_overlays = vec![];
|
||||||
// Working set scope vars
|
// Working set scope vars
|
||||||
for scope in &working_set.delta.scope {
|
for scope_frame in working_set.delta.scope.iter().rev() {
|
||||||
for v in &scope.vars {
|
for overlay_frame in scope_frame
|
||||||
if options.match_algorithm.matches_u8(v.0, &prefix) {
|
.active_overlays(&mut removed_overlays)
|
||||||
output.push(Suggestion {
|
.iter()
|
||||||
value: String::from_utf8_lossy(v.0).to_string(),
|
.rev()
|
||||||
description: None,
|
{
|
||||||
extra: None,
|
for v in &overlay_frame.vars {
|
||||||
span: current_span,
|
if options.match_algorithm.matches_u8(v.0, &prefix) {
|
||||||
append_whitespace: false,
|
output.push(Suggestion {
|
||||||
});
|
value: String::from_utf8_lossy(v.0).to_string(),
|
||||||
|
description: None,
|
||||||
|
extra: None,
|
||||||
|
span: current_span,
|
||||||
|
append_whitespace: false,
|
||||||
|
});
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Permanent state vars
|
// Permanent state vars
|
||||||
for scope in &self.engine_state.scope {
|
// for scope in &self.engine_state.scope {
|
||||||
for v in &scope.vars {
|
for overlay_frame in self
|
||||||
|
.engine_state
|
||||||
|
.active_overlays(&removed_overlays)
|
||||||
|
.iter()
|
||||||
|
.rev()
|
||||||
|
{
|
||||||
|
for v in &overlay_frame.vars {
|
||||||
if options.match_algorithm.matches_u8(v.0, &prefix) {
|
if options.match_algorithm.matches_u8(v.0, &prefix) {
|
||||||
output.push(Suggestion {
|
output.push(Suggestion {
|
||||||
value: String::from_utf8_lossy(v.0).to_string(),
|
value: String::from_utf8_lossy(v.0).to_string(),
|
||||||
@ -173,7 +221,7 @@ impl Completer for VariableCompletion {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
output.dedup();
|
output.dedup(); // TODO: Removes only consecutive duplicates, is it intended?
|
||||||
|
|
||||||
output
|
output
|
||||||
}
|
}
|
||||||
|
@ -69,7 +69,9 @@ pub fn eval_config_contents(
|
|||||||
// Merge the delta in case env vars changed in the config
|
// Merge the delta in case env vars changed in the config
|
||||||
match nu_engine::env::current_dir(engine_state, stack) {
|
match nu_engine::env::current_dir(engine_state, stack) {
|
||||||
Ok(cwd) => {
|
Ok(cwd) => {
|
||||||
if let Err(e) = engine_state.merge_delta(StateDelta::new(), Some(stack), cwd) {
|
if let Err(e) =
|
||||||
|
engine_state.merge_delta(StateDelta::new(engine_state), Some(stack), cwd)
|
||||||
|
{
|
||||||
let working_set = StateWorkingSet::new(engine_state);
|
let working_set = StateWorkingSet::new(engine_state);
|
||||||
report_error(&working_set, &e);
|
report_error(&working_set, &e);
|
||||||
}
|
}
|
||||||
|
@ -9,7 +9,7 @@ use nu_protocol::{
|
|||||||
engine::{EngineState, Stack, StateWorkingSet},
|
engine::{EngineState, Stack, StateWorkingSet},
|
||||||
Config, PipelineData, Span, Value,
|
Config, PipelineData, Span, Value,
|
||||||
};
|
};
|
||||||
use std::io::Write;
|
use nu_utils::stdout_write_all_and_flush;
|
||||||
|
|
||||||
/// Main function used when a file path is found as argument for nu
|
/// Main function used when a file path is found as argument for nu
|
||||||
pub fn evaluate_file(
|
pub fn evaluate_file(
|
||||||
@ -61,17 +61,20 @@ pub fn evaluate_file(
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn print_table_or_error(
|
pub fn print_table_or_error(
|
||||||
engine_state: &EngineState,
|
engine_state: &mut EngineState,
|
||||||
stack: &mut Stack,
|
stack: &mut Stack,
|
||||||
mut pipeline_data: PipelineData,
|
mut pipeline_data: PipelineData,
|
||||||
config: &Config,
|
config: &mut Config,
|
||||||
) {
|
) {
|
||||||
let exit_code = match &mut pipeline_data {
|
let exit_code = match &mut pipeline_data {
|
||||||
PipelineData::ExternalStream { exit_code, .. } => exit_code.take(),
|
PipelineData::ExternalStream { exit_code, .. } => exit_code.take(),
|
||||||
_ => None,
|
_ => None,
|
||||||
};
|
};
|
||||||
|
|
||||||
match engine_state.find_decl("table".as_bytes()) {
|
// Change the engine_state config to use the passed in configuration
|
||||||
|
engine_state.set_config(config);
|
||||||
|
|
||||||
|
match engine_state.find_decl("table".as_bytes(), &[]) {
|
||||||
Some(decl_id) => {
|
Some(decl_id) => {
|
||||||
let table = engine_state.get_decl(decl_id).run(
|
let table = engine_state.get_decl(decl_id).run(
|
||||||
engine_state,
|
engine_state,
|
||||||
@ -83,8 +86,6 @@ pub fn print_table_or_error(
|
|||||||
match table {
|
match table {
|
||||||
Ok(table) => {
|
Ok(table) => {
|
||||||
for item in table {
|
for item in table {
|
||||||
let stdout = std::io::stdout();
|
|
||||||
|
|
||||||
if let Value::Error { error } = item {
|
if let Value::Error { error } = item {
|
||||||
let working_set = StateWorkingSet::new(engine_state);
|
let working_set = StateWorkingSet::new(engine_state);
|
||||||
|
|
||||||
@ -96,10 +97,7 @@ pub fn print_table_or_error(
|
|||||||
let mut out = item.into_string("\n", config);
|
let mut out = item.into_string("\n", config);
|
||||||
out.push('\n');
|
out.push('\n');
|
||||||
|
|
||||||
match stdout.lock().write_all(out.as_bytes()) {
|
let _ = stdout_write_all_and_flush(out).map_err(|err| eprintln!("{}", err));
|
||||||
Ok(_) => (),
|
|
||||||
Err(err) => eprintln!("{}", err),
|
|
||||||
};
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Err(error) => {
|
Err(error) => {
|
||||||
@ -113,8 +111,6 @@ pub fn print_table_or_error(
|
|||||||
}
|
}
|
||||||
None => {
|
None => {
|
||||||
for item in pipeline_data {
|
for item in pipeline_data {
|
||||||
let stdout = std::io::stdout();
|
|
||||||
|
|
||||||
if let Value::Error { error } = item {
|
if let Value::Error { error } = item {
|
||||||
let working_set = StateWorkingSet::new(engine_state);
|
let working_set = StateWorkingSet::new(engine_state);
|
||||||
|
|
||||||
@ -126,10 +122,7 @@ pub fn print_table_or_error(
|
|||||||
let mut out = item.into_string("\n", config);
|
let mut out = item.into_string("\n", config);
|
||||||
out.push('\n');
|
out.push('\n');
|
||||||
|
|
||||||
match stdout.lock().write_all(out.as_bytes()) {
|
let _ = stdout_write_all_and_flush(out).map_err(|err| eprintln!("{}", err));
|
||||||
Ok(_) => (),
|
|
||||||
Err(err) => eprintln!("{}", err),
|
|
||||||
};
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
@ -586,7 +586,7 @@ impl Menu for DescriptionMenu {
|
|||||||
} else {
|
} else {
|
||||||
self.example_index = Some(self.examples.len().saturating_sub(1));
|
self.example_index = Some(self.examples.len().saturating_sub(1));
|
||||||
}
|
}
|
||||||
} else {
|
} else if !self.examples.is_empty() {
|
||||||
self.example_index = Some(0);
|
self.example_index = Some(0);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -598,7 +598,7 @@ impl Menu for DescriptionMenu {
|
|||||||
} else {
|
} else {
|
||||||
self.example_index = Some(0);
|
self.example_index = Some(0);
|
||||||
}
|
}
|
||||||
} else {
|
} else if !self.examples.is_empty() {
|
||||||
self.example_index = Some(0);
|
self.example_index = Some(0);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -16,6 +16,11 @@ impl Command for Print {
|
|||||||
fn signature(&self) -> Signature {
|
fn signature(&self) -> Signature {
|
||||||
Signature::build("print")
|
Signature::build("print")
|
||||||
.rest("rest", SyntaxShape::Any, "the values to print")
|
.rest("rest", SyntaxShape::Any, "the values to print")
|
||||||
|
.switch(
|
||||||
|
"no_newline",
|
||||||
|
"print without inserting a newline for the line ending",
|
||||||
|
Some('n'),
|
||||||
|
)
|
||||||
.category(Category::Strings)
|
.category(Category::Strings)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -31,10 +36,12 @@ impl Command for Print {
|
|||||||
_input: PipelineData,
|
_input: PipelineData,
|
||||||
) -> Result<PipelineData, ShellError> {
|
) -> Result<PipelineData, ShellError> {
|
||||||
let args: Vec<Value> = call.rest(engine_state, stack, 0)?;
|
let args: Vec<Value> = call.rest(engine_state, stack, 0)?;
|
||||||
|
let no_newline = call.has_flag("no_newline");
|
||||||
let head = call.head;
|
let head = call.head;
|
||||||
|
|
||||||
for arg in args {
|
for arg in args {
|
||||||
arg.into_pipeline_data().print(engine_state, stack)?;
|
arg.into_pipeline_data()
|
||||||
|
.print(engine_state, stack, no_newline)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(PipelineData::new(head))
|
Ok(PipelineData::new(head))
|
||||||
|
@ -7,9 +7,6 @@ use {
|
|||||||
std::borrow::Cow,
|
std::borrow::Cow,
|
||||||
};
|
};
|
||||||
|
|
||||||
const PROMPT_MARKER_BEFORE_PS1: &str = "\x1b]133;A\x1b\\"; // OSC 133;A ST
|
|
||||||
const PROMPT_MARKER_BEFORE_PS2: &str = "\x1b]133;A;k=s\x1b\\"; // OSC 133;A;k=s ST
|
|
||||||
|
|
||||||
/// Nushell prompt definition
|
/// Nushell prompt definition
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct NushellPrompt {
|
pub struct NushellPrompt {
|
||||||
@ -19,7 +16,6 @@ pub struct NushellPrompt {
|
|||||||
default_vi_insert_prompt_indicator: Option<String>,
|
default_vi_insert_prompt_indicator: Option<String>,
|
||||||
default_vi_normal_prompt_indicator: Option<String>,
|
default_vi_normal_prompt_indicator: Option<String>,
|
||||||
default_multiline_indicator: Option<String>,
|
default_multiline_indicator: Option<String>,
|
||||||
shell_integration: bool,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Default for NushellPrompt {
|
impl Default for NushellPrompt {
|
||||||
@ -37,7 +33,6 @@ impl NushellPrompt {
|
|||||||
default_vi_insert_prompt_indicator: None,
|
default_vi_insert_prompt_indicator: None,
|
||||||
default_vi_normal_prompt_indicator: None,
|
default_vi_normal_prompt_indicator: None,
|
||||||
default_multiline_indicator: None,
|
default_multiline_indicator: None,
|
||||||
shell_integration: false,
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -87,34 +82,20 @@ impl NushellPrompt {
|
|||||||
fn default_wrapped_custom_string(&self, str: String) -> String {
|
fn default_wrapped_custom_string(&self, str: String) -> String {
|
||||||
format!("({})", str)
|
format!("({})", str)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn enable_shell_integration(&mut self) {
|
|
||||||
self.shell_integration = true
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Prompt for NushellPrompt {
|
impl Prompt for NushellPrompt {
|
||||||
fn render_prompt_left(&self) -> Cow<str> {
|
fn render_prompt_left(&self) -> Cow<str> {
|
||||||
// Just before starting to draw the PS1 prompt send the escape code (see
|
if let Some(prompt_string) = &self.left_prompt_string {
|
||||||
// https://sw.kovidgoyal.net/kitty/shell-integration/#notes-for-shell-developers)
|
prompt_string.replace('\n', "\r\n").into()
|
||||||
let mut prompt = if self.shell_integration {
|
|
||||||
String::from(PROMPT_MARKER_BEFORE_PS1)
|
|
||||||
} else {
|
} else {
|
||||||
String::new()
|
let default = DefaultPrompt::new();
|
||||||
};
|
default
|
||||||
|
.render_prompt_left()
|
||||||
prompt.push_str(&match &self.left_prompt_string {
|
.to_string()
|
||||||
Some(prompt_string) => prompt_string.replace('\n', "\r\n"),
|
.replace('\n', "\r\n")
|
||||||
None => {
|
.into()
|
||||||
let default = DefaultPrompt::new();
|
}
|
||||||
default
|
|
||||||
.render_prompt_left()
|
|
||||||
.to_string()
|
|
||||||
.replace('\n', "\r\n")
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
prompt.into()
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn render_prompt_right(&self) -> Cow<str> {
|
fn render_prompt_right(&self) -> Cow<str> {
|
||||||
@ -155,21 +136,10 @@ impl Prompt for NushellPrompt {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn render_prompt_multiline_indicator(&self) -> Cow<str> {
|
fn render_prompt_multiline_indicator(&self) -> Cow<str> {
|
||||||
// Just before starting to draw the PS1 prompt send the escape code (see
|
match &self.default_multiline_indicator {
|
||||||
// https://sw.kovidgoyal.net/kitty/shell-integration/#notes-for-shell-developers)
|
Some(indicator) => indicator.as_str().into(),
|
||||||
let mut prompt = if self.shell_integration {
|
None => "::: ".into(),
|
||||||
String::from(PROMPT_MARKER_BEFORE_PS2)
|
}
|
||||||
} else {
|
|
||||||
String::new()
|
|
||||||
};
|
|
||||||
|
|
||||||
prompt.push_str(
|
|
||||||
self.default_multiline_indicator
|
|
||||||
.as_ref()
|
|
||||||
.unwrap_or(&String::from("::: ")),
|
|
||||||
);
|
|
||||||
|
|
||||||
prompt.into()
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn render_prompt_history_search_indicator(
|
fn render_prompt_history_search_indicator(
|
||||||
|
@ -147,10 +147,6 @@ pub(crate) fn update_prompt<'prompt>(
|
|||||||
(prompt_vi_insert_string, prompt_vi_normal_string),
|
(prompt_vi_insert_string, prompt_vi_normal_string),
|
||||||
);
|
);
|
||||||
|
|
||||||
if config.shell_integration {
|
|
||||||
nu_prompt.enable_shell_integration();
|
|
||||||
}
|
|
||||||
|
|
||||||
let ret_val = nu_prompt as &dyn Prompt;
|
let ret_val = nu_prompt as &dyn Prompt;
|
||||||
if is_perf_true {
|
if is_perf_true {
|
||||||
info!("update_prompt {}:{}:{}", file!(), line!(), column!());
|
info!("update_prompt {}:{}:{}", file!(), line!(), column!());
|
||||||
|
@ -501,14 +501,16 @@ fn add_menu_keybindings(keybindings: &mut Keybindings) {
|
|||||||
ReedlineEvent::MenuPrevious,
|
ReedlineEvent::MenuPrevious,
|
||||||
);
|
);
|
||||||
|
|
||||||
// History menu keybinding
|
keybindings.add_binding(
|
||||||
|
KeyModifiers::CONTROL,
|
||||||
|
KeyCode::Char('r'),
|
||||||
|
ReedlineEvent::Menu("history_menu".to_string()),
|
||||||
|
);
|
||||||
|
|
||||||
keybindings.add_binding(
|
keybindings.add_binding(
|
||||||
KeyModifiers::CONTROL,
|
KeyModifiers::CONTROL,
|
||||||
KeyCode::Char('x'),
|
KeyCode::Char('x'),
|
||||||
ReedlineEvent::UntilFound(vec![
|
ReedlineEvent::MenuPageNext,
|
||||||
ReedlineEvent::Menu("history_menu".to_string()),
|
|
||||||
ReedlineEvent::MenuPageNext,
|
|
||||||
]),
|
|
||||||
);
|
);
|
||||||
|
|
||||||
keybindings.add_binding(
|
keybindings.add_binding(
|
||||||
@ -522,8 +524,8 @@ fn add_menu_keybindings(keybindings: &mut Keybindings) {
|
|||||||
|
|
||||||
// Help menu keybinding
|
// Help menu keybinding
|
||||||
keybindings.add_binding(
|
keybindings.add_binding(
|
||||||
KeyModifiers::CONTROL,
|
KeyModifiers::NONE,
|
||||||
KeyCode::Char('q'),
|
KeyCode::F(1),
|
||||||
ReedlineEvent::Menu("help_menu".to_string()),
|
ReedlineEvent::Menu("help_menu".to_string()),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
@ -1,28 +1,26 @@
|
|||||||
use crate::reedline_config::add_menus;
|
|
||||||
use crate::{completions::NuCompleter, NuHighlighter, NuValidator, NushellPrompt};
|
|
||||||
use crate::{prompt_update, reedline_config};
|
|
||||||
use crate::{
|
use crate::{
|
||||||
reedline_config::KeybindingsMode,
|
completions::NuCompleter,
|
||||||
|
prompt_update,
|
||||||
|
reedline_config::{add_menus, create_keybindings, KeybindingsMode},
|
||||||
util::{eval_source, report_error},
|
util::{eval_source, report_error},
|
||||||
|
NuHighlighter, NuValidator, NushellPrompt,
|
||||||
};
|
};
|
||||||
use log::info;
|
use log::{info, trace};
|
||||||
use log::trace;
|
|
||||||
use miette::{IntoDiagnostic, Result};
|
use miette::{IntoDiagnostic, Result};
|
||||||
use nu_color_config::get_color_config;
|
use nu_color_config::get_color_config;
|
||||||
use nu_engine::convert_env_values;
|
use nu_engine::{convert_env_values, eval_block};
|
||||||
use nu_parser::lex;
|
use nu_parser::lex;
|
||||||
use nu_protocol::engine::Stack;
|
|
||||||
use nu_protocol::PipelineData;
|
|
||||||
use nu_protocol::{
|
use nu_protocol::{
|
||||||
engine::{EngineState, StateWorkingSet},
|
engine::{EngineState, Stack, StateWorkingSet},
|
||||||
ShellError, Span, Value,
|
BlockId, PipelineData, PositionalArg, ShellError, Span, Value,
|
||||||
};
|
};
|
||||||
use reedline::{DefaultHinter, Emacs, Vi};
|
use reedline::{DefaultHinter, Emacs, Vi};
|
||||||
use std::io::{self, Write};
|
use std::io::{self, Write};
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
use std::{sync::atomic::Ordering, time::Instant};
|
use std::{sync::atomic::Ordering, time::Instant};
|
||||||
|
|
||||||
const PROMPT_MARKER_BEFORE_CMD: &str = "\x1b]133;C\x1b\\"; // OSC 133;C ST
|
const PRE_EXECUTE_MARKER: &str = "\x1b]133;A\x1b\\";
|
||||||
|
const PRE_PROMPT_MARKER: &str = "\x1b]133;C\x1b\\";
|
||||||
const RESET_APPLICATION_MODE: &str = "\x1b[?1l";
|
const RESET_APPLICATION_MODE: &str = "\x1b[?1l";
|
||||||
|
|
||||||
pub fn evaluate_repl(
|
pub fn evaluate_repl(
|
||||||
@ -161,7 +159,26 @@ pub fn evaluate_repl(
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
line_editor = line_editor.with_buffer_editor(config.buffer_editor.clone(), "nu".into());
|
let buffer_editor = if !config.buffer_editor.is_empty() {
|
||||||
|
Some(config.buffer_editor.clone())
|
||||||
|
} else {
|
||||||
|
stack
|
||||||
|
.get_env_var(engine_state, "EDITOR")
|
||||||
|
.map(|v| v.as_string().unwrap_or_default())
|
||||||
|
.filter(|v| !v.is_empty())
|
||||||
|
.or_else(|| {
|
||||||
|
stack
|
||||||
|
.get_env_var(engine_state, "VISUAL")
|
||||||
|
.map(|v| v.as_string().unwrap_or_default())
|
||||||
|
.filter(|v| !v.is_empty())
|
||||||
|
})
|
||||||
|
};
|
||||||
|
|
||||||
|
line_editor = if let Some(buffer_editor) = buffer_editor {
|
||||||
|
line_editor.with_buffer_editor(buffer_editor, "nu".into())
|
||||||
|
} else {
|
||||||
|
line_editor
|
||||||
|
};
|
||||||
|
|
||||||
if config.sync_history_on_enter {
|
if config.sync_history_on_enter {
|
||||||
if is_perf_true {
|
if is_perf_true {
|
||||||
@ -175,7 +192,7 @@ pub fn evaluate_repl(
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Changing the line editor based on the found keybindings
|
// Changing the line editor based on the found keybindings
|
||||||
line_editor = match reedline_config::create_keybindings(config) {
|
line_editor = match create_keybindings(config) {
|
||||||
Ok(keybindings) => match keybindings {
|
Ok(keybindings) => match keybindings {
|
||||||
KeybindingsMode::Emacs(keybindings) => {
|
KeybindingsMode::Emacs(keybindings) => {
|
||||||
let edit_mode = Box::new(Emacs::new(keybindings));
|
let edit_mode = Box::new(Emacs::new(keybindings));
|
||||||
@ -200,6 +217,65 @@ pub fn evaluate_repl(
|
|||||||
info!("prompt_update {}:{}:{}", file!(), line!(), column!());
|
info!("prompt_update {}:{}:{}", file!(), line!(), column!());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Right before we start our prompt and take input from the user,
|
||||||
|
// fire the "pre_prompt" hook
|
||||||
|
if let Some(hook) = &config.hooks.pre_prompt {
|
||||||
|
if let Err(err) = run_hook(engine_state, stack, vec![], hook) {
|
||||||
|
let working_set = StateWorkingSet::new(engine_state);
|
||||||
|
report_error(&working_set, &err);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Next, check all the environment variables they ask for
|
||||||
|
// fire the "env_change" hook
|
||||||
|
if let Some(hook) = config.hooks.env_change.clone() {
|
||||||
|
match hook {
|
||||||
|
Value::Record {
|
||||||
|
cols, vals: blocks, ..
|
||||||
|
} => {
|
||||||
|
for (idx, env_var) in cols.iter().enumerate() {
|
||||||
|
let before = engine_state
|
||||||
|
.previous_env_vars
|
||||||
|
.get(env_var)
|
||||||
|
.cloned()
|
||||||
|
.unwrap_or_default();
|
||||||
|
let after = stack.get_env_var(engine_state, env_var).unwrap_or_default();
|
||||||
|
if before != after {
|
||||||
|
if let Err(err) = run_hook(
|
||||||
|
engine_state,
|
||||||
|
stack,
|
||||||
|
vec![before, after.clone()],
|
||||||
|
&blocks[idx],
|
||||||
|
) {
|
||||||
|
let working_set = StateWorkingSet::new(engine_state);
|
||||||
|
report_error(&working_set, &err);
|
||||||
|
}
|
||||||
|
|
||||||
|
engine_state
|
||||||
|
.previous_env_vars
|
||||||
|
.insert(env_var.to_string(), after);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
x => {
|
||||||
|
let working_set = StateWorkingSet::new(engine_state);
|
||||||
|
report_error(
|
||||||
|
&working_set,
|
||||||
|
&ShellError::TypeMismatch(
|
||||||
|
"record for 'env_change' hook".to_string(),
|
||||||
|
x.span().unwrap_or_else(|_| Span::new(0, 0)),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
config = engine_state.get_config();
|
||||||
|
|
||||||
|
if config.shell_integration {
|
||||||
|
run_ansi_sequence(PRE_EXECUTE_MARKER)?;
|
||||||
|
}
|
||||||
|
|
||||||
let prompt =
|
let prompt =
|
||||||
prompt_update::update_prompt(config, engine_state, stack, &mut nu_prompt, is_perf_true);
|
prompt_update::update_prompt(config, engine_state, stack, &mut nu_prompt, is_perf_true);
|
||||||
|
|
||||||
@ -215,10 +291,39 @@ pub fn evaluate_repl(
|
|||||||
}
|
}
|
||||||
|
|
||||||
let input = line_editor.read_line(prompt);
|
let input = line_editor.read_line(prompt);
|
||||||
let use_shell_integration = config.shell_integration;
|
|
||||||
|
|
||||||
match input {
|
match input {
|
||||||
Ok(Signal::Success(s)) => {
|
Ok(Signal::Success(s)) => {
|
||||||
|
// Right before we start running the code the user gave us,
|
||||||
|
// fire the "pre_execution" hook
|
||||||
|
if let Some(hook) = &config.hooks.pre_execution {
|
||||||
|
if let Err(err) = run_hook(engine_state, stack, vec![], hook) {
|
||||||
|
let working_set = StateWorkingSet::new(engine_state);
|
||||||
|
report_error(&working_set, &err);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if config.shell_integration {
|
||||||
|
run_ansi_sequence(RESET_APPLICATION_MODE)?;
|
||||||
|
run_ansi_sequence(PRE_PROMPT_MARKER)?;
|
||||||
|
if let Some(cwd) = stack.get_env_var(engine_state, "PWD") {
|
||||||
|
let path = cwd.as_string()?;
|
||||||
|
// Try to abbreviate string for windows title
|
||||||
|
let maybe_abbrev_path = if let Some(p) = nu_path::home_dir() {
|
||||||
|
path.replace(&p.as_path().display().to_string(), "~")
|
||||||
|
} else {
|
||||||
|
path
|
||||||
|
};
|
||||||
|
|
||||||
|
// Set window title too
|
||||||
|
// https://tldp.org/HOWTO/Xterm-Title-3.html
|
||||||
|
// ESC]0;stringBEL -- Set icon name and window title to string
|
||||||
|
// ESC]1;stringBEL -- Set icon name to string
|
||||||
|
// ESC]2;stringBEL -- Set window title to string
|
||||||
|
run_ansi_sequence(&format!("\x1b]2;{}\x07", maybe_abbrev_path))?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
let start_time = Instant::now();
|
let start_time = Instant::now();
|
||||||
let tokens = lex(s.as_bytes(), 0, &[], &[], false);
|
let tokens = lex(s.as_bytes(), 0, &[], &[], false);
|
||||||
// Check if this is a single call to a directory, if so auto-cd
|
// Check if this is a single call to a directory, if so auto-cd
|
||||||
@ -244,7 +349,6 @@ pub fn evaluate_repl(
|
|||||||
&ShellError::DirectoryNotFound(tokens.0[0].span, None),
|
&ShellError::DirectoryNotFound(tokens.0[0].span, None),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
let path = nu_path::canonicalize_with(path, &cwd)
|
let path = nu_path::canonicalize_with(path, &cwd)
|
||||||
.expect("internal error: cannot canonicalize known path");
|
.expect("internal error: cannot canonicalize known path");
|
||||||
(path.to_string_lossy().to_string(), tokens.0[0].span)
|
(path.to_string_lossy().to_string(), tokens.0[0].span)
|
||||||
@ -290,37 +394,22 @@ pub fn evaluate_repl(
|
|||||||
&format!("entry #{}", entry_num),
|
&format!("entry #{}", entry_num),
|
||||||
PipelineData::new(Span::new(0, 0)),
|
PipelineData::new(Span::new(0, 0)),
|
||||||
);
|
);
|
||||||
|
|
||||||
stack.add_env_var(
|
|
||||||
"CMD_DURATION_MS".into(),
|
|
||||||
Value::String {
|
|
||||||
val: format!("{}", start_time.elapsed().as_millis()),
|
|
||||||
span: Span { start: 0, end: 0 },
|
|
||||||
},
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
stack.add_env_var(
|
||||||
|
"CMD_DURATION_MS".into(),
|
||||||
|
Value::String {
|
||||||
|
val: format!("{}", start_time.elapsed().as_millis()),
|
||||||
|
span: Span { start: 0, end: 0 },
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
// FIXME: permanent state changes like this hopefully in time can be removed
|
// FIXME: permanent state changes like this hopefully in time can be removed
|
||||||
// and be replaced by just passing the cwd in where needed
|
// and be replaced by just passing the cwd in where needed
|
||||||
if let Some(cwd) = stack.get_env_var(engine_state, "PWD") {
|
if let Some(cwd) = stack.get_env_var(engine_state, "PWD") {
|
||||||
let path = cwd.as_string()?;
|
let path = cwd.as_string()?;
|
||||||
let _ = std::env::set_current_dir(path);
|
let _ = std::env::set_current_dir(path);
|
||||||
engine_state.env_vars.insert("PWD".into(), cwd);
|
engine_state.add_env_var("PWD".into(), cwd);
|
||||||
}
|
|
||||||
|
|
||||||
if use_shell_integration {
|
|
||||||
// Just before running a command/program, send the escape code (see
|
|
||||||
// https://sw.kovidgoyal.net/kitty/shell-integration/#notes-for-shell-developers)
|
|
||||||
let mut ansi_escapes = String::from(PROMPT_MARKER_BEFORE_CMD);
|
|
||||||
ansi_escapes.push_str(RESET_APPLICATION_MODE);
|
|
||||||
if let Some(cwd) = stack.get_env_var(engine_state, "PWD") {
|
|
||||||
let path = cwd.as_string()?;
|
|
||||||
ansi_escapes.push_str(&format!("\x1b]2;{}\x07", path));
|
|
||||||
}
|
|
||||||
// print!("{}", ansi_escapes);
|
|
||||||
match io::stdout().write_all(ansi_escapes.as_bytes()) {
|
|
||||||
Ok(it) => it,
|
|
||||||
Err(err) => print!("error: {}", err),
|
|
||||||
};
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Ok(Signal::CtrlC) => {
|
Ok(Signal::CtrlC) => {
|
||||||
@ -342,3 +431,87 @@ pub fn evaluate_repl(
|
|||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn run_ansi_sequence(seq: &str) -> Result<(), ShellError> {
|
||||||
|
match io::stdout().write_all(seq.as_bytes()) {
|
||||||
|
Ok(it) => it,
|
||||||
|
Err(err) => {
|
||||||
|
return Err(ShellError::GenericError(
|
||||||
|
"Error writing ansi sequence".into(),
|
||||||
|
err.to_string(),
|
||||||
|
Some(Span { start: 0, end: 0 }),
|
||||||
|
None,
|
||||||
|
Vec::new(),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
};
|
||||||
|
io::stdout().flush().map_err(|e| {
|
||||||
|
ShellError::GenericError(
|
||||||
|
"Error flushing stdio".into(),
|
||||||
|
e.to_string(),
|
||||||
|
Some(Span { start: 0, end: 0 }),
|
||||||
|
None,
|
||||||
|
Vec::new(),
|
||||||
|
)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn run_hook(
|
||||||
|
engine_state: &EngineState,
|
||||||
|
stack: &mut Stack,
|
||||||
|
arguments: Vec<Value>,
|
||||||
|
value: &Value,
|
||||||
|
) -> Result<(), ShellError> {
|
||||||
|
match value {
|
||||||
|
Value::List { vals, .. } => {
|
||||||
|
for val in vals {
|
||||||
|
run_hook(engine_state, stack, arguments.clone(), val)?
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
Value::Block {
|
||||||
|
val: block_id,
|
||||||
|
span,
|
||||||
|
..
|
||||||
|
} => run_hook_block(engine_state, stack, *block_id, arguments, *span),
|
||||||
|
x => match x.span() {
|
||||||
|
Ok(span) => Err(ShellError::MissingConfigValue(
|
||||||
|
"block for hook in config".into(),
|
||||||
|
span,
|
||||||
|
)),
|
||||||
|
_ => Err(ShellError::MissingConfigValue(
|
||||||
|
"block for hook in config".into(),
|
||||||
|
Span { start: 0, end: 0 },
|
||||||
|
)),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn run_hook_block(
|
||||||
|
engine_state: &EngineState,
|
||||||
|
stack: &mut Stack,
|
||||||
|
block_id: BlockId,
|
||||||
|
arguments: Vec<Value>,
|
||||||
|
span: Span,
|
||||||
|
) -> Result<(), ShellError> {
|
||||||
|
let block = engine_state.get_block(block_id);
|
||||||
|
let input = PipelineData::new(span);
|
||||||
|
|
||||||
|
let mut callee_stack = stack.gather_captures(&block.captures);
|
||||||
|
|
||||||
|
for (idx, PositionalArg { var_id, .. }) in
|
||||||
|
block.signature.required_positional.iter().enumerate()
|
||||||
|
{
|
||||||
|
if let Some(var_id) = var_id {
|
||||||
|
callee_stack.add_var(*var_id, arguments[idx].clone())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
match eval_block(engine_state, &mut callee_stack, block, input, false, false) {
|
||||||
|
Ok(pipeline_data) => match pipeline_data.into_value(span) {
|
||||||
|
Value::Error { error } => Err(error),
|
||||||
|
_ => Ok(()),
|
||||||
|
},
|
||||||
|
Err(err) => Err(err),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
@ -179,7 +179,7 @@ fn gather_env_vars(vars: impl Iterator<Item = (String, String)>, engine_state: &
|
|||||||
};
|
};
|
||||||
|
|
||||||
// stack.add_env_var(name, value);
|
// stack.add_env_var(name, value);
|
||||||
engine_state.env_vars.insert(name, value);
|
engine_state.add_env_var(name, value);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -211,8 +211,8 @@ pub fn eval_source(
|
|||||||
(output, working_set.render())
|
(output, working_set.render())
|
||||||
};
|
};
|
||||||
|
|
||||||
let cwd = match nu_engine::env::current_dir_str(engine_state, stack) {
|
let cwd = match nu_engine::env::current_dir(engine_state, stack) {
|
||||||
Ok(p) => PathBuf::from(p),
|
Ok(p) => p,
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
let working_set = StateWorkingSet::new(engine_state);
|
let working_set = StateWorkingSet::new(engine_state);
|
||||||
report_error(&working_set, &e);
|
report_error(&working_set, &e);
|
||||||
@ -220,10 +220,7 @@ pub fn eval_source(
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
if let Err(err) = engine_state.merge_delta(delta, Some(stack), &cwd) {
|
let _ = engine_state.merge_delta(delta, Some(stack), &cwd);
|
||||||
let working_set = StateWorkingSet::new(engine_state);
|
|
||||||
report_error(&working_set, &err);
|
|
||||||
}
|
|
||||||
|
|
||||||
match eval_block(engine_state, stack, &block, input, false, false) {
|
match eval_block(engine_state, stack, &block, input, false, false) {
|
||||||
Ok(mut pipeline_data) => {
|
Ok(mut pipeline_data) => {
|
||||||
@ -237,7 +234,7 @@ pub fn eval_source(
|
|||||||
set_last_exit_code(stack, 0);
|
set_last_exit_code(stack, 0);
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Err(err) = pipeline_data.print(engine_state, stack) {
|
if let Err(err) = pipeline_data.print(engine_state, stack, false) {
|
||||||
let working_set = StateWorkingSet::new(engine_state);
|
let working_set = StateWorkingSet::new(engine_state);
|
||||||
|
|
||||||
report_error(&working_set, &err);
|
report_error(&working_set, &err);
|
||||||
@ -319,12 +316,18 @@ mod test {
|
|||||||
&mut engine_state,
|
&mut engine_state,
|
||||||
);
|
);
|
||||||
|
|
||||||
let env = engine_state.env_vars;
|
let env = engine_state.render_env_vars();
|
||||||
|
|
||||||
assert!(matches!(env.get("FOO"), Some(Value::String { val, .. }) if val == "foo"));
|
assert!(
|
||||||
assert!(matches!(env.get("SYMBOLS"), Some(Value::String { val, .. }) if val == symbols));
|
matches!(env.get(&"FOO".to_string()), Some(&Value::String { val, .. }) if val == "foo")
|
||||||
assert!(matches!(env.get(symbols), Some(Value::String { val, .. }) if val == "symbols"));
|
);
|
||||||
assert!(env.get("PWD").is_some());
|
assert!(
|
||||||
|
matches!(env.get(&"SYMBOLS".to_string()), Some(&Value::String { val, .. }) if val == symbols)
|
||||||
|
);
|
||||||
|
assert!(
|
||||||
|
matches!(env.get(&symbols.to_string()), Some(&Value::String { val, .. }) if val == "symbols")
|
||||||
|
);
|
||||||
|
assert!(env.get(&"PWD".to_string()).is_some());
|
||||||
assert_eq!(env.len(), 4);
|
assert_eq!(env.len(), 4);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
29
crates/nu-cli/tests/custom_completions.rs
Normal file
@ -0,0 +1,29 @@
|
|||||||
|
pub mod support;
|
||||||
|
|
||||||
|
use nu_cli::NuCompleter;
|
||||||
|
use reedline::Completer;
|
||||||
|
use support::{match_suggestions, new_engine};
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn variables_completions() {
|
||||||
|
// Create a new engine
|
||||||
|
let (dir, _, mut engine, mut stack) = new_engine();
|
||||||
|
|
||||||
|
// Add record value as example
|
||||||
|
let record = r#"def animals [] { ["cat", "dog", "eel" ] }
|
||||||
|
def my-command [animal: string@animals] { print $animal }"#;
|
||||||
|
assert!(support::merge_input(record.as_bytes(), &mut engine, &mut stack, dir).is_ok());
|
||||||
|
|
||||||
|
// Instatiate a new completer
|
||||||
|
let mut completer = NuCompleter::new(std::sync::Arc::new(engine), stack);
|
||||||
|
|
||||||
|
// Test completions for $nu
|
||||||
|
let suggestions = completer.complete("my-command ".into(), 11);
|
||||||
|
|
||||||
|
assert_eq!(3, suggestions.len());
|
||||||
|
|
||||||
|
let expected: Vec<String> = vec!["cat".into(), "dog".into(), "eel".into()];
|
||||||
|
|
||||||
|
// Match results
|
||||||
|
match_suggestions(expected, suggestions);
|
||||||
|
}
|
28
crates/nu-cli/tests/dotnu_completions.rs
Normal file
@ -0,0 +1,28 @@
|
|||||||
|
pub mod support;
|
||||||
|
|
||||||
|
use nu_cli::NuCompleter;
|
||||||
|
use reedline::Completer;
|
||||||
|
use support::new_engine;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn dotnu_completions() {
|
||||||
|
// Create a new engine
|
||||||
|
let (_, _, engine, stack) = new_engine();
|
||||||
|
|
||||||
|
// Instatiate a new completer
|
||||||
|
let mut completer = NuCompleter::new(std::sync::Arc::new(engine), stack);
|
||||||
|
|
||||||
|
// Test source completion
|
||||||
|
let completion_str = "source ".to_string();
|
||||||
|
let suggestions = completer.complete(&completion_str, completion_str.len());
|
||||||
|
|
||||||
|
assert_eq!(1, suggestions.len());
|
||||||
|
assert_eq!("custom_completion.nu", suggestions.get(0).unwrap().value);
|
||||||
|
|
||||||
|
// Test use completion
|
||||||
|
let completion_str = "use ".to_string();
|
||||||
|
let suggestions = completer.complete(&completion_str, completion_str.len());
|
||||||
|
|
||||||
|
assert_eq!(1, suggestions.len());
|
||||||
|
assert_eq!("custom_completion.nu", suggestions.get(0).unwrap().value);
|
||||||
|
}
|
42
crates/nu-cli/tests/file_completions.rs
Normal file
@ -0,0 +1,42 @@
|
|||||||
|
pub mod support;
|
||||||
|
|
||||||
|
use nu_cli::NuCompleter;
|
||||||
|
use reedline::Completer;
|
||||||
|
use support::{file, folder, match_suggestions, new_engine};
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn file_completions() {
|
||||||
|
// Create a new engine
|
||||||
|
let (dir, dir_str, engine, stack) = new_engine();
|
||||||
|
|
||||||
|
// Instatiate a new completer
|
||||||
|
let mut completer = NuCompleter::new(std::sync::Arc::new(engine), stack);
|
||||||
|
|
||||||
|
// Test completions for the current folder
|
||||||
|
let target_dir = format!("cp {}", dir_str);
|
||||||
|
let suggestions = completer.complete(&target_dir, target_dir.len());
|
||||||
|
|
||||||
|
// Create the expected values
|
||||||
|
let expected_paths: Vec<String> = vec![
|
||||||
|
file(dir.join("nushell")),
|
||||||
|
folder(dir.join("test_a")),
|
||||||
|
folder(dir.join("test_b")),
|
||||||
|
folder(dir.join("another")),
|
||||||
|
file(dir.join("custom_completion.nu")),
|
||||||
|
file(dir.join(".hidden_file")),
|
||||||
|
folder(dir.join(".hidden_folder")),
|
||||||
|
];
|
||||||
|
|
||||||
|
// Match the results
|
||||||
|
match_suggestions(expected_paths, suggestions);
|
||||||
|
|
||||||
|
// Test completions for the completions/another folder
|
||||||
|
let target_dir = format!("cd {}", folder(dir.join("another")));
|
||||||
|
let suggestions = completer.complete(&target_dir, target_dir.len());
|
||||||
|
|
||||||
|
// Create the expected values
|
||||||
|
let expected_paths: Vec<String> = vec![file(dir.join("another").join("newfile"))];
|
||||||
|
|
||||||
|
// Match the results
|
||||||
|
match_suggestions(expected_paths, suggestions);
|
||||||
|
}
|
36
crates/nu-cli/tests/flag_completions.rs
Normal file
@ -0,0 +1,36 @@
|
|||||||
|
pub mod support;
|
||||||
|
|
||||||
|
use nu_cli::NuCompleter;
|
||||||
|
use reedline::Completer;
|
||||||
|
use support::{match_suggestions, new_engine};
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn flag_completions() {
|
||||||
|
// Create a new engine
|
||||||
|
let (_, _, engine, stack) = new_engine();
|
||||||
|
|
||||||
|
// Instatiate a new completer
|
||||||
|
let mut completer = NuCompleter::new(std::sync::Arc::new(engine), stack);
|
||||||
|
// Test completions for the 'ls' flags
|
||||||
|
let suggestions = completer.complete("ls -".into(), 4);
|
||||||
|
|
||||||
|
assert_eq!(12, suggestions.len());
|
||||||
|
|
||||||
|
let expected: Vec<String> = vec![
|
||||||
|
"--all".into(),
|
||||||
|
"--du".into(),
|
||||||
|
"--full-paths".into(),
|
||||||
|
"--help".into(),
|
||||||
|
"--long".into(),
|
||||||
|
"--short-names".into(),
|
||||||
|
"-a".into(),
|
||||||
|
"-d".into(),
|
||||||
|
"-f".into(),
|
||||||
|
"-h".into(),
|
||||||
|
"-l".into(),
|
||||||
|
"-s".into(),
|
||||||
|
];
|
||||||
|
|
||||||
|
// Match results
|
||||||
|
match_suggestions(expected, suggestions);
|
||||||
|
}
|
29
crates/nu-cli/tests/folder_completions.rs
Normal file
@ -0,0 +1,29 @@
|
|||||||
|
pub mod support;
|
||||||
|
|
||||||
|
use nu_cli::NuCompleter;
|
||||||
|
use reedline::Completer;
|
||||||
|
use support::{folder, match_suggestions, new_engine};
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn folder_completions() {
|
||||||
|
// Create a new engine
|
||||||
|
let (dir, dir_str, engine, stack) = new_engine();
|
||||||
|
|
||||||
|
// Instatiate a new completer
|
||||||
|
let mut completer = NuCompleter::new(std::sync::Arc::new(engine), stack);
|
||||||
|
|
||||||
|
// Test completions for the current folder
|
||||||
|
let target_dir = format!("cd {}", dir_str);
|
||||||
|
let suggestions = completer.complete(&target_dir, target_dir.len());
|
||||||
|
|
||||||
|
// Create the expected values
|
||||||
|
let expected_paths: Vec<String> = vec![
|
||||||
|
folder(dir.join("test_a")),
|
||||||
|
folder(dir.join("test_b")),
|
||||||
|
folder(dir.join("another")),
|
||||||
|
folder(dir.join(".hidden_folder")),
|
||||||
|
];
|
||||||
|
|
||||||
|
// Match the results
|
||||||
|
match_suggestions(expected_paths, suggestions);
|
||||||
|
}
|
117
crates/nu-cli/tests/support/completions_helpers.rs
Normal file
@ -0,0 +1,117 @@
|
|||||||
|
use std::path::PathBuf;
|
||||||
|
|
||||||
|
use nu_command::create_default_context;
|
||||||
|
use nu_engine::eval_block;
|
||||||
|
use nu_parser::parse;
|
||||||
|
use nu_protocol::{
|
||||||
|
engine::{EngineState, Stack, StateDelta, StateWorkingSet},
|
||||||
|
PipelineData, ShellError, Span, Value,
|
||||||
|
};
|
||||||
|
use nu_test_support::fs;
|
||||||
|
use reedline::Suggestion;
|
||||||
|
const SEP: char = std::path::MAIN_SEPARATOR;
|
||||||
|
|
||||||
|
// creates a new engine with the current path into the completions fixtures folder
|
||||||
|
pub fn new_engine() -> (PathBuf, String, EngineState, Stack) {
|
||||||
|
// Target folder inside assets
|
||||||
|
let dir = fs::fixtures().join("completions");
|
||||||
|
let mut dir_str = dir
|
||||||
|
.clone()
|
||||||
|
.into_os_string()
|
||||||
|
.into_string()
|
||||||
|
.unwrap_or_default();
|
||||||
|
dir_str.push(SEP);
|
||||||
|
|
||||||
|
// Create a new engine with default context
|
||||||
|
let mut engine_state = create_default_context(&dir);
|
||||||
|
|
||||||
|
// New stack
|
||||||
|
let mut stack = Stack::new();
|
||||||
|
|
||||||
|
// New delta state
|
||||||
|
let delta = StateDelta::new(&engine_state);
|
||||||
|
|
||||||
|
// Add pwd as env var
|
||||||
|
stack.add_env_var(
|
||||||
|
"PWD".to_string(),
|
||||||
|
Value::String {
|
||||||
|
val: dir_str.clone(),
|
||||||
|
span: nu_protocol::Span {
|
||||||
|
start: 0,
|
||||||
|
end: dir_str.len(),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
);
|
||||||
|
stack.add_env_var(
|
||||||
|
"TEST".to_string(),
|
||||||
|
Value::String {
|
||||||
|
val: "NUSHELL".to_string(),
|
||||||
|
span: nu_protocol::Span {
|
||||||
|
start: 0,
|
||||||
|
end: dir_str.len(),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
// Merge delta
|
||||||
|
let merge_result = engine_state.merge_delta(delta, Some(&mut stack), &dir);
|
||||||
|
assert!(merge_result.is_ok());
|
||||||
|
|
||||||
|
(dir, dir_str, engine_state, stack)
|
||||||
|
}
|
||||||
|
|
||||||
|
// match a list of suggestions with the expected values
|
||||||
|
pub fn match_suggestions(expected: Vec<String>, suggestions: Vec<Suggestion>) {
|
||||||
|
expected.iter().zip(suggestions).for_each(|it| {
|
||||||
|
assert_eq!(it.0, &it.1.value);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// append the separator to the converted path
|
||||||
|
pub fn folder(path: PathBuf) -> String {
|
||||||
|
let mut converted_path = file(path);
|
||||||
|
converted_path.push(SEP);
|
||||||
|
|
||||||
|
converted_path
|
||||||
|
}
|
||||||
|
|
||||||
|
// convert a given path to string
|
||||||
|
pub fn file(path: PathBuf) -> String {
|
||||||
|
path.into_os_string().into_string().unwrap_or_default()
|
||||||
|
}
|
||||||
|
|
||||||
|
// merge_input executes the given input into the engine
|
||||||
|
// and merges the state
|
||||||
|
pub fn merge_input(
|
||||||
|
input: &[u8],
|
||||||
|
engine_state: &mut EngineState,
|
||||||
|
stack: &mut Stack,
|
||||||
|
dir: PathBuf,
|
||||||
|
) -> Result<(), ShellError> {
|
||||||
|
let (block, delta) = {
|
||||||
|
let mut working_set = StateWorkingSet::new(&engine_state);
|
||||||
|
|
||||||
|
let (block, err) = parse(&mut working_set, None, input, false, &[]);
|
||||||
|
|
||||||
|
assert!(err.is_none());
|
||||||
|
|
||||||
|
(block, working_set.render())
|
||||||
|
};
|
||||||
|
assert!(eval_block(
|
||||||
|
&engine_state,
|
||||||
|
stack,
|
||||||
|
&block,
|
||||||
|
PipelineData::Value(
|
||||||
|
Value::Nothing {
|
||||||
|
span: Span { start: 0, end: 0 },
|
||||||
|
},
|
||||||
|
None
|
||||||
|
),
|
||||||
|
false,
|
||||||
|
false
|
||||||
|
)
|
||||||
|
.is_ok());
|
||||||
|
|
||||||
|
// Merge delta
|
||||||
|
engine_state.merge_delta(delta, Some(stack), &dir)
|
||||||
|
}
|
3
crates/nu-cli/tests/support/mod.rs
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
pub mod completions_helpers;
|
||||||
|
|
||||||
|
pub use completions_helpers::{file, folder, match_suggestions, merge_input, new_engine};
|
@ -1,107 +0,0 @@
|
|||||||
use std::path::PathBuf;
|
|
||||||
|
|
||||||
use nu_cli::NuCompleter;
|
|
||||||
use nu_command::create_default_context;
|
|
||||||
use nu_protocol::engine::{EngineState, Stack};
|
|
||||||
use nu_test_support::fs;
|
|
||||||
use reedline::{Completer, Suggestion};
|
|
||||||
const SEP: char = std::path::MAIN_SEPARATOR;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn file_completions() {
|
|
||||||
// Create a new engine
|
|
||||||
let (dir, dir_str, engine) = new_engine();
|
|
||||||
|
|
||||||
let stack = Stack::new();
|
|
||||||
|
|
||||||
// Instatiate a new completer
|
|
||||||
let mut completer = NuCompleter::new(std::sync::Arc::new(engine), stack);
|
|
||||||
|
|
||||||
// Test completions for the current folder
|
|
||||||
let target_dir = format!("cp {}", dir_str);
|
|
||||||
let suggestions = completer.complete(&target_dir, target_dir.len());
|
|
||||||
|
|
||||||
// Create the expected values
|
|
||||||
let expected_paths: Vec<String> = vec![
|
|
||||||
file(dir.join("nushell")),
|
|
||||||
folder(dir.join("test_a")),
|
|
||||||
folder(dir.join("test_b")),
|
|
||||||
folder(dir.join("another")),
|
|
||||||
file(dir.join(".hidden_file")),
|
|
||||||
folder(dir.join(".hidden_folder")),
|
|
||||||
];
|
|
||||||
|
|
||||||
// Match the results
|
|
||||||
match_suggestions(expected_paths, suggestions);
|
|
||||||
|
|
||||||
// Test completions for the completions/another folder
|
|
||||||
let target_dir = format!("cd {}", folder(dir.join("another")));
|
|
||||||
let suggestions = completer.complete(&target_dir, target_dir.len());
|
|
||||||
|
|
||||||
// Create the expected values
|
|
||||||
let expected_paths: Vec<String> = vec![file(dir.join("another").join("newfile"))];
|
|
||||||
|
|
||||||
// Match the results
|
|
||||||
match_suggestions(expected_paths, suggestions);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn folder_completions() {
|
|
||||||
// Create a new engine
|
|
||||||
let (dir, dir_str, engine) = new_engine();
|
|
||||||
|
|
||||||
let stack = Stack::new();
|
|
||||||
|
|
||||||
// Instatiate a new completer
|
|
||||||
let mut completer = NuCompleter::new(std::sync::Arc::new(engine), stack);
|
|
||||||
|
|
||||||
// Test completions for the current folder
|
|
||||||
let target_dir = format!("cd {}", dir_str);
|
|
||||||
let suggestions = completer.complete(&target_dir, target_dir.len());
|
|
||||||
|
|
||||||
// Create the expected values
|
|
||||||
let expected_paths: Vec<String> = vec![
|
|
||||||
folder(dir.join("test_a")),
|
|
||||||
folder(dir.join("test_b")),
|
|
||||||
folder(dir.join("another")),
|
|
||||||
folder(dir.join(".hidden_folder")),
|
|
||||||
];
|
|
||||||
|
|
||||||
// Match the results
|
|
||||||
match_suggestions(expected_paths, suggestions);
|
|
||||||
}
|
|
||||||
|
|
||||||
// creates a new engine with the current path into the completions fixtures folder
|
|
||||||
pub fn new_engine() -> (PathBuf, String, EngineState) {
|
|
||||||
// Target folder inside assets
|
|
||||||
let dir = fs::fixtures().join("completions");
|
|
||||||
let mut dir_str = dir
|
|
||||||
.clone()
|
|
||||||
.into_os_string()
|
|
||||||
.into_string()
|
|
||||||
.unwrap_or_default();
|
|
||||||
dir_str.push(SEP);
|
|
||||||
|
|
||||||
// Create a default engine
|
|
||||||
(dir.clone(), dir_str, create_default_context(dir))
|
|
||||||
}
|
|
||||||
|
|
||||||
// match a list of suggestions with the expected values
|
|
||||||
pub fn match_suggestions(expected: Vec<String>, suggestions: Vec<Suggestion>) {
|
|
||||||
expected.iter().zip(suggestions).for_each(|it| {
|
|
||||||
assert_eq!(it.0, &it.1.value);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
// append the separator to the converted path
|
|
||||||
pub fn folder(path: PathBuf) -> String {
|
|
||||||
let mut converted_path = file(path);
|
|
||||||
converted_path.push(SEP);
|
|
||||||
|
|
||||||
converted_path
|
|
||||||
}
|
|
||||||
|
|
||||||
// convert a given path to string
|
|
||||||
pub fn file(path: PathBuf) -> String {
|
|
||||||
path.into_os_string().into_string().unwrap_or_default()
|
|
||||||
}
|
|
87
crates/nu-cli/tests/variables_completions.rs
Normal file
@ -0,0 +1,87 @@
|
|||||||
|
pub mod support;
|
||||||
|
|
||||||
|
use nu_cli::NuCompleter;
|
||||||
|
use reedline::Completer;
|
||||||
|
use support::{match_suggestions, new_engine};
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn variables_completions() {
|
||||||
|
// Create a new engine
|
||||||
|
let (dir, _, mut engine, mut stack) = new_engine();
|
||||||
|
|
||||||
|
// Add record value as example
|
||||||
|
let record = "let actor = { name: 'Tom Hardy', age: 44 }";
|
||||||
|
assert!(support::merge_input(record.as_bytes(), &mut engine, &mut stack, dir).is_ok());
|
||||||
|
|
||||||
|
// Instatiate a new completer
|
||||||
|
let mut completer = NuCompleter::new(std::sync::Arc::new(engine), stack);
|
||||||
|
|
||||||
|
// Test completions for $nu
|
||||||
|
let suggestions = completer.complete("$nu.".into(), 4);
|
||||||
|
|
||||||
|
assert_eq!(8, suggestions.len());
|
||||||
|
|
||||||
|
let expected: Vec<String> = vec![
|
||||||
|
"config-path".into(),
|
||||||
|
"env-path".into(),
|
||||||
|
"history-path".into(),
|
||||||
|
"home-path".into(),
|
||||||
|
"os-info".into(),
|
||||||
|
"pid".into(),
|
||||||
|
"scope".into(),
|
||||||
|
"temp-path".into(),
|
||||||
|
];
|
||||||
|
|
||||||
|
// Match results
|
||||||
|
match_suggestions(expected, suggestions);
|
||||||
|
|
||||||
|
// Test completions for $nu.h (filter)
|
||||||
|
let suggestions = completer.complete("$nu.h".into(), 5);
|
||||||
|
|
||||||
|
assert_eq!(2, suggestions.len());
|
||||||
|
|
||||||
|
let expected: Vec<String> = vec!["history-path".into(), "home-path".into()];
|
||||||
|
|
||||||
|
// Match results
|
||||||
|
match_suggestions(expected, suggestions);
|
||||||
|
|
||||||
|
// Test completions for custom var
|
||||||
|
let suggestions = completer.complete("$actor.".into(), 7);
|
||||||
|
|
||||||
|
assert_eq!(2, suggestions.len());
|
||||||
|
|
||||||
|
let expected: Vec<String> = vec!["age".into(), "name".into()];
|
||||||
|
|
||||||
|
// Match results
|
||||||
|
match_suggestions(expected, suggestions);
|
||||||
|
|
||||||
|
// Test completions for custom var (filtering)
|
||||||
|
let suggestions = completer.complete("$actor.n".into(), 7);
|
||||||
|
|
||||||
|
assert_eq!(1, suggestions.len());
|
||||||
|
|
||||||
|
let expected: Vec<String> = vec!["name".into()];
|
||||||
|
|
||||||
|
// Match results
|
||||||
|
match_suggestions(expected, suggestions);
|
||||||
|
|
||||||
|
// Test completions for $env
|
||||||
|
let suggestions = completer.complete("$env.".into(), 5);
|
||||||
|
|
||||||
|
assert_eq!(2, suggestions.len());
|
||||||
|
|
||||||
|
let expected: Vec<String> = vec!["PWD".into(), "TEST".into()];
|
||||||
|
|
||||||
|
// Match results
|
||||||
|
match_suggestions(expected, suggestions);
|
||||||
|
|
||||||
|
// Test completions for $env
|
||||||
|
let suggestions = completer.complete("$env.T".into(), 5);
|
||||||
|
|
||||||
|
assert_eq!(1, suggestions.len());
|
||||||
|
|
||||||
|
let expected: Vec<String> = vec!["TEST".into()];
|
||||||
|
|
||||||
|
// Match results
|
||||||
|
match_suggestions(expected, suggestions);
|
||||||
|
}
|
@ -4,11 +4,11 @@ description = "Color configuration code used by Nushell"
|
|||||||
edition = "2021"
|
edition = "2021"
|
||||||
license = "MIT"
|
license = "MIT"
|
||||||
name = "nu-color-config"
|
name = "nu-color-config"
|
||||||
version = "0.62.0"
|
version = "0.63.1"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
nu-protocol = { path = "../nu-protocol", version = "0.62.0" }
|
nu-protocol = { path = "../nu-protocol", version = "0.63.1" }
|
||||||
nu-ansi-term = "0.45.1"
|
nu-ansi-term = "0.45.1"
|
||||||
nu-json = { path = "../nu-json", version = "0.62.0" }
|
nu-json = { path = "../nu-json", version = "0.63.1" }
|
||||||
nu-table = { path = "../nu-table", version = "0.62.0" }
|
nu-table = { path = "../nu-table", version = "0.63.1" }
|
||||||
serde = { version="1.0.123", features=["derive"] }
|
serde = { version="1.0.123", features=["derive"] }
|
||||||
|
@ -4,28 +4,29 @@ description = "Nushell's built-in commands"
|
|||||||
edition = "2021"
|
edition = "2021"
|
||||||
license = "MIT"
|
license = "MIT"
|
||||||
name = "nu-command"
|
name = "nu-command"
|
||||||
version = "0.62.0"
|
version = "0.63.1"
|
||||||
build = "build.rs"
|
build = "build.rs"
|
||||||
|
|
||||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
nu-color-config = { path = "../nu-color-config", version = "0.62.0" }
|
nu-color-config = { path = "../nu-color-config", version = "0.63.1" }
|
||||||
nu-engine = { path = "../nu-engine", version = "0.62.0" }
|
nu-engine = { path = "../nu-engine", version = "0.63.1" }
|
||||||
nu-glob = { path = "../nu-glob", version = "0.62.0" }
|
nu-glob = { path = "../nu-glob", version = "0.63.1" }
|
||||||
nu-json = { path = "../nu-json", version = "0.62.0" }
|
nu-json = { path = "../nu-json", version = "0.63.1" }
|
||||||
nu-parser = { path = "../nu-parser", version = "0.62.0" }
|
nu-parser = { path = "../nu-parser", version = "0.63.1" }
|
||||||
nu-path = { path = "../nu-path", version = "0.62.0" }
|
nu-path = { path = "../nu-path", version = "0.63.1" }
|
||||||
nu-pretty-hex = { path = "../nu-pretty-hex", version = "0.62.0" }
|
nu-pretty-hex = { path = "../nu-pretty-hex", version = "0.63.1" }
|
||||||
nu-protocol = { path = "../nu-protocol", version = "0.62.0" }
|
nu-protocol = { path = "../nu-protocol", version = "0.63.1" }
|
||||||
nu-system = { path = "../nu-system", version = "0.62.0" }
|
nu-system = { path = "../nu-system", version = "0.63.1" }
|
||||||
nu-table = { path = "../nu-table", version = "0.62.0" }
|
nu-table = { path = "../nu-table", version = "0.63.1" }
|
||||||
nu-term-grid = { path = "../nu-term-grid", version = "0.62.0" }
|
nu-term-grid = { path = "../nu-term-grid", version = "0.63.1" }
|
||||||
nu-test-support = { path = "../nu-test-support", version = "0.62.0" }
|
nu-test-support = { path = "../nu-test-support", version = "0.63.1" }
|
||||||
nu-utils = { path = "../nu-utils", version = "0.62.0" }
|
nu-utils = { path = "../nu-utils", version = "0.63.1" }
|
||||||
nu-ansi-term = "0.45.1"
|
nu-ansi-term = "0.45.1"
|
||||||
|
|
||||||
# Potential dependencies for extras
|
# Potential dependencies for extras
|
||||||
|
alphanumeric-sort = "1.4.4"
|
||||||
base64 = "0.13.0"
|
base64 = "0.13.0"
|
||||||
bytesize = "1.1.0"
|
bytesize = "1.1.0"
|
||||||
calamine = "0.18.0"
|
calamine = "0.18.0"
|
||||||
@ -56,6 +57,7 @@ mime = "0.3.16"
|
|||||||
notify = "4.0.17"
|
notify = "4.0.17"
|
||||||
num = { version = "0.4.0", optional = true }
|
num = { version = "0.4.0", optional = true }
|
||||||
pathdiff = "0.2.1"
|
pathdiff = "0.2.1"
|
||||||
|
powierza-coefficient = "1.0"
|
||||||
quick-xml = "0.22"
|
quick-xml = "0.22"
|
||||||
rand = "0.8"
|
rand = "0.8"
|
||||||
rayon = "1.5.1"
|
rayon = "1.5.1"
|
||||||
@ -68,7 +70,8 @@ serde_ini = "0.2.0"
|
|||||||
serde_urlencoded = "0.7.0"
|
serde_urlencoded = "0.7.0"
|
||||||
serde_yaml = "0.8.16"
|
serde_yaml = "0.8.16"
|
||||||
sha2 = "0.10.0"
|
sha2 = "0.10.0"
|
||||||
shadow-rs = "0.11.0"
|
# Disable default features b/c the default features build Git (very slow to compile)
|
||||||
|
shadow-rs = { version = "0.11.0", default-features = false }
|
||||||
strip-ansi-escapes = "0.1.1"
|
strip-ansi-escapes = "0.1.1"
|
||||||
sysinfo = "0.23.5"
|
sysinfo = "0.23.5"
|
||||||
terminal_size = "0.1.17"
|
terminal_size = "0.1.17"
|
||||||
@ -79,27 +82,29 @@ unicode-segmentation = "1.8.0"
|
|||||||
url = "2.2.1"
|
url = "2.2.1"
|
||||||
uuid = { version = "0.8.2", features = ["v4"] }
|
uuid = { version = "0.8.2", features = ["v4"] }
|
||||||
which = { version = "4.2.2", optional = true }
|
which = { version = "4.2.2", optional = true }
|
||||||
reedline = { version = "0.5.0", features = ["bashisms"]}
|
reedline = { git = "https://github.com/nushell/reedline", branch = "main", features = ["bashisms"]}
|
||||||
wax = { version = "0.4.0", features = ["diagnostics"] }
|
wax = { version = "0.4.0", features = ["diagnostics"] }
|
||||||
rusqlite = { version = "0.27.0", features = ["bundled"], optional = true }
|
rusqlite = { version = "0.27.0", features = ["bundled"], optional = true }
|
||||||
sqlparser = { version = "0.16.0", features = ["serde"], optional = true }
|
sqlparser = { version = "0.16.0", features = ["serde"], optional = true }
|
||||||
|
|
||||||
[target.'cfg(unix)'.dependencies]
|
[target.'cfg(unix)'.dependencies]
|
||||||
umask = "1.0.0"
|
umask = "2.0.0"
|
||||||
users = "0.11.0"
|
users = "0.11.0"
|
||||||
|
|
||||||
[target.'cfg(not(any(target_os = "android", target_os = "ios")))'.dependencies.trash]
|
[target.'cfg(not(any(target_os = "android", target_os = "ios")))'.dependencies.trash]
|
||||||
version = "2.0.2"
|
version = "2.1.3"
|
||||||
optional = true
|
optional = true
|
||||||
|
|
||||||
[dependencies.polars]
|
[dependencies.polars]
|
||||||
version = "0.20.0"
|
version = "0.21.1"
|
||||||
|
# path = "../../../../polars/polars"
|
||||||
optional = true
|
optional = true
|
||||||
features = [
|
features = [
|
||||||
"default", "parquet", "json", "serde", "object",
|
"default", "to_dummies", "parquet", "json", "serde", "serde-lazy",
|
||||||
"checked_arithmetic", "strings", "cum_agg", "is_in",
|
"object", "checked_arithmetic", "strings", "cum_agg", "is_in",
|
||||||
"rolling_window", "strings", "rows", "random",
|
"rolling_window", "strings", "rows", "random",
|
||||||
"dtype-datetime"
|
"dtype-datetime", "dtype-struct", "lazy", "cross_join",
|
||||||
|
"dynamic_groupby"
|
||||||
]
|
]
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
@ -110,7 +115,7 @@ dataframe = ["polars", "num"]
|
|||||||
database = ["sqlparser", "rusqlite"]
|
database = ["sqlparser", "rusqlite"]
|
||||||
|
|
||||||
[build-dependencies]
|
[build-dependencies]
|
||||||
shadow-rs = "0.11.0"
|
shadow-rs = { version = "0.11.0", default-features = false }
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
hamcrest2 = "0.3.0"
|
hamcrest2 = "0.3.0"
|
||||||
|
@ -1,3 +1,18 @@
|
|||||||
|
use std::process::Command;
|
||||||
|
|
||||||
fn main() -> shadow_rs::SdResult<()> {
|
fn main() -> shadow_rs::SdResult<()> {
|
||||||
|
// Look up the current Git commit ourselves instead of relying on shadow_rs,
|
||||||
|
// because shadow_rs does it in a really slow-to-compile way (it builds libgit2)
|
||||||
|
let hash = get_git_hash().expect("failed to get latest git commit hash");
|
||||||
|
println!("cargo:rustc-env=NU_COMMIT_HASH={}", hash);
|
||||||
|
|
||||||
shadow_rs::new()
|
shadow_rs::new()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn get_git_hash() -> Result<String, std::io::Error> {
|
||||||
|
let out = Command::new("git").args(["rev-parse", "HEAD"]).output()?;
|
||||||
|
Ok(String::from_utf8(out.stdout)
|
||||||
|
.expect("could not convert stdout to string")
|
||||||
|
.trim()
|
||||||
|
.to_string())
|
||||||
|
}
|
||||||
|
317
crates/nu-command/src/charting/hashable_value.rs
Normal file
@ -0,0 +1,317 @@
|
|||||||
|
use chrono::{DateTime, FixedOffset};
|
||||||
|
use nu_protocol::{ShellError, Span, Value};
|
||||||
|
use std::hash::{Hash, Hasher};
|
||||||
|
|
||||||
|
/// A subset of [Value](crate::Value), which is hashable.
|
||||||
|
/// And it means that we can put the value into something like [HashMap](std::collections::HashMap) or [HashSet](std::collections::HashSet)
|
||||||
|
/// for further usage like value statistics.
|
||||||
|
///
|
||||||
|
/// For now the main way to crate a [HashableValue] is using [from_value](HashableValue::from_value)
|
||||||
|
///
|
||||||
|
/// Please note that although each variant contains `span` field, but during hashing, this field will not be concerned.
|
||||||
|
/// Which means that the following will be true:
|
||||||
|
/// ```text
|
||||||
|
/// assert_eq!(HashableValue::Bool {val: true, span: Span{start: 0, end: 1}}, HashableValue::Bool {val: true, span: Span{start: 90, end: 1000}})
|
||||||
|
/// ```
|
||||||
|
#[derive(Eq, Debug)]
|
||||||
|
pub enum HashableValue {
|
||||||
|
Bool {
|
||||||
|
val: bool,
|
||||||
|
span: Span,
|
||||||
|
},
|
||||||
|
Int {
|
||||||
|
val: i64,
|
||||||
|
span: Span,
|
||||||
|
},
|
||||||
|
Float {
|
||||||
|
val: [u8; 8], // because f64 is not hashable, we save it as [u8;8] array to make it hashable.
|
||||||
|
span: Span,
|
||||||
|
},
|
||||||
|
Filesize {
|
||||||
|
val: i64,
|
||||||
|
span: Span,
|
||||||
|
},
|
||||||
|
Duration {
|
||||||
|
val: i64,
|
||||||
|
span: Span,
|
||||||
|
},
|
||||||
|
Date {
|
||||||
|
val: DateTime<FixedOffset>,
|
||||||
|
span: Span,
|
||||||
|
},
|
||||||
|
String {
|
||||||
|
val: String,
|
||||||
|
span: Span,
|
||||||
|
},
|
||||||
|
Binary {
|
||||||
|
val: Vec<u8>,
|
||||||
|
span: Span,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Default for HashableValue {
|
||||||
|
fn default() -> Self {
|
||||||
|
HashableValue::Bool {
|
||||||
|
val: false,
|
||||||
|
span: Span { start: 0, end: 0 },
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl HashableValue {
|
||||||
|
/// Try to convert from `value` to self
|
||||||
|
///
|
||||||
|
/// A `span` is required because when there is an error in value, it may not contain `span` field.
|
||||||
|
///
|
||||||
|
/// If the given value is not hashable(mainly because of it is structured data), an error will returned.
|
||||||
|
pub fn from_value(value: Value, span: Span) -> Result<Self, ShellError> {
|
||||||
|
match value {
|
||||||
|
Value::Bool { val, span } => Ok(HashableValue::Bool { val, span }),
|
||||||
|
Value::Int { val, span } => Ok(HashableValue::Int { val, span }),
|
||||||
|
Value::Filesize { val, span } => Ok(HashableValue::Filesize { val, span }),
|
||||||
|
Value::Duration { val, span } => Ok(HashableValue::Duration { val, span }),
|
||||||
|
Value::Date { val, span } => Ok(HashableValue::Date { val, span }),
|
||||||
|
Value::Float { val, span } => Ok(HashableValue::Float {
|
||||||
|
val: val.to_ne_bytes(),
|
||||||
|
span,
|
||||||
|
}),
|
||||||
|
Value::String { val, span } => Ok(HashableValue::String { val, span }),
|
||||||
|
Value::Binary { val, span } => Ok(HashableValue::Binary { val, span }),
|
||||||
|
|
||||||
|
_ => {
|
||||||
|
let input_span = value.span().unwrap_or(span);
|
||||||
|
Err(ShellError::UnsupportedInput(
|
||||||
|
format!("input value {value:?} is not hashable"),
|
||||||
|
input_span,
|
||||||
|
))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Convert from self to nu's core data type `Value`.
|
||||||
|
pub fn into_value(self) -> Value {
|
||||||
|
match self {
|
||||||
|
HashableValue::Bool { val, span } => Value::Bool { val, span },
|
||||||
|
HashableValue::Int { val, span } => Value::Int { val, span },
|
||||||
|
HashableValue::Filesize { val, span } => Value::Filesize { val, span },
|
||||||
|
HashableValue::Duration { val, span } => Value::Duration { val, span },
|
||||||
|
HashableValue::Date { val, span } => Value::Date { val, span },
|
||||||
|
HashableValue::Float { val, span } => Value::Float {
|
||||||
|
val: f64::from_ne_bytes(val),
|
||||||
|
span,
|
||||||
|
},
|
||||||
|
HashableValue::String { val, span } => Value::String { val, span },
|
||||||
|
HashableValue::Binary { val, span } => Value::Binary { val, span },
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Hash for HashableValue {
|
||||||
|
fn hash<H: Hasher>(&self, state: &mut H) {
|
||||||
|
match self {
|
||||||
|
HashableValue::Bool { val, .. } => val.hash(state),
|
||||||
|
HashableValue::Int { val, .. } => val.hash(state),
|
||||||
|
HashableValue::Filesize { val, .. } => val.hash(state),
|
||||||
|
HashableValue::Duration { val, .. } => val.hash(state),
|
||||||
|
HashableValue::Date { val, .. } => val.hash(state),
|
||||||
|
HashableValue::Float { val, .. } => val.hash(state),
|
||||||
|
HashableValue::String { val, .. } => val.hash(state),
|
||||||
|
HashableValue::Binary { val, .. } => val.hash(state),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PartialEq for HashableValue {
|
||||||
|
fn eq(&self, other: &Self) -> bool {
|
||||||
|
match (self, other) {
|
||||||
|
(HashableValue::Bool { val: lhs, .. }, HashableValue::Bool { val: rhs, .. }) => {
|
||||||
|
lhs == rhs
|
||||||
|
}
|
||||||
|
(HashableValue::Int { val: lhs, .. }, HashableValue::Int { val: rhs, .. }) => {
|
||||||
|
lhs == rhs
|
||||||
|
}
|
||||||
|
(
|
||||||
|
HashableValue::Filesize { val: lhs, .. },
|
||||||
|
HashableValue::Filesize { val: rhs, .. },
|
||||||
|
) => lhs == rhs,
|
||||||
|
(
|
||||||
|
HashableValue::Duration { val: lhs, .. },
|
||||||
|
HashableValue::Duration { val: rhs, .. },
|
||||||
|
) => lhs == rhs,
|
||||||
|
(HashableValue::Date { val: lhs, .. }, HashableValue::Date { val: rhs, .. }) => {
|
||||||
|
lhs == rhs
|
||||||
|
}
|
||||||
|
(HashableValue::Float { val: lhs, .. }, HashableValue::Float { val: rhs, .. }) => {
|
||||||
|
lhs == rhs
|
||||||
|
}
|
||||||
|
(HashableValue::String { val: lhs, .. }, HashableValue::String { val: rhs, .. }) => {
|
||||||
|
lhs == rhs
|
||||||
|
}
|
||||||
|
(HashableValue::Binary { val: lhs, .. }, HashableValue::Binary { val: rhs, .. }) => {
|
||||||
|
lhs == rhs
|
||||||
|
}
|
||||||
|
_ => false,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod test {
|
||||||
|
use super::*;
|
||||||
|
use nu_protocol::ast::{CellPath, PathMember};
|
||||||
|
use std::collections::{HashMap, HashSet};
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn from_value() {
|
||||||
|
let span = Span::test_data();
|
||||||
|
let values = vec![
|
||||||
|
(
|
||||||
|
Value::Bool { val: true, span },
|
||||||
|
HashableValue::Bool { val: true, span },
|
||||||
|
),
|
||||||
|
(
|
||||||
|
Value::Int { val: 1, span },
|
||||||
|
HashableValue::Int { val: 1, span },
|
||||||
|
),
|
||||||
|
(
|
||||||
|
Value::Filesize { val: 1, span },
|
||||||
|
HashableValue::Filesize { val: 1, span },
|
||||||
|
),
|
||||||
|
(
|
||||||
|
Value::Duration { val: 1, span },
|
||||||
|
HashableValue::Duration { val: 1, span },
|
||||||
|
),
|
||||||
|
(
|
||||||
|
Value::Date {
|
||||||
|
val: DateTime::<FixedOffset>::parse_from_rfc2822(
|
||||||
|
"Wed, 18 Feb 2015 23:16:09 GMT",
|
||||||
|
)
|
||||||
|
.unwrap(),
|
||||||
|
span,
|
||||||
|
},
|
||||||
|
HashableValue::Date {
|
||||||
|
val: DateTime::<FixedOffset>::parse_from_rfc2822(
|
||||||
|
"Wed, 18 Feb 2015 23:16:09 GMT",
|
||||||
|
)
|
||||||
|
.unwrap(),
|
||||||
|
span,
|
||||||
|
},
|
||||||
|
),
|
||||||
|
(
|
||||||
|
Value::String {
|
||||||
|
val: "1".to_string(),
|
||||||
|
span,
|
||||||
|
},
|
||||||
|
HashableValue::String {
|
||||||
|
val: "1".to_string(),
|
||||||
|
span,
|
||||||
|
},
|
||||||
|
),
|
||||||
|
(
|
||||||
|
Value::Binary { val: vec![1], span },
|
||||||
|
HashableValue::Binary { val: vec![1], span },
|
||||||
|
),
|
||||||
|
];
|
||||||
|
for (val, expect_hashable_val) in values.into_iter() {
|
||||||
|
assert_eq!(
|
||||||
|
HashableValue::from_value(val, Span { start: 0, end: 0 }).unwrap(),
|
||||||
|
expect_hashable_val
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn from_unhashable_value() {
|
||||||
|
let span = Span::test_data();
|
||||||
|
let values = [
|
||||||
|
Value::List {
|
||||||
|
vals: vec![Value::Bool { val: true, span }],
|
||||||
|
span,
|
||||||
|
},
|
||||||
|
Value::Block {
|
||||||
|
val: 0,
|
||||||
|
captures: HashMap::new(),
|
||||||
|
span,
|
||||||
|
},
|
||||||
|
Value::Nothing { span },
|
||||||
|
Value::Error {
|
||||||
|
error: ShellError::DidYouMean("what?".to_string(), span),
|
||||||
|
},
|
||||||
|
Value::CellPath {
|
||||||
|
val: CellPath {
|
||||||
|
members: vec![PathMember::Int { val: 0, span }],
|
||||||
|
},
|
||||||
|
span,
|
||||||
|
},
|
||||||
|
];
|
||||||
|
for v in values {
|
||||||
|
assert!(HashableValue::from_value(v, Span { start: 0, end: 0 }).is_err())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn from_to_tobe_same() {
|
||||||
|
let span = Span::test_data();
|
||||||
|
let values = vec![
|
||||||
|
Value::Bool { val: true, span },
|
||||||
|
Value::Int { val: 1, span },
|
||||||
|
Value::Filesize { val: 1, span },
|
||||||
|
Value::Duration { val: 1, span },
|
||||||
|
Value::String {
|
||||||
|
val: "1".to_string(),
|
||||||
|
span,
|
||||||
|
},
|
||||||
|
Value::Binary { val: vec![1], span },
|
||||||
|
];
|
||||||
|
for val in values.into_iter() {
|
||||||
|
let expected_val = val.clone();
|
||||||
|
assert_eq!(
|
||||||
|
HashableValue::from_value(val, Span { start: 0, end: 0 })
|
||||||
|
.unwrap()
|
||||||
|
.into_value(),
|
||||||
|
expected_val
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn hashable_value_eq_without_concern_span() {
|
||||||
|
assert_eq!(
|
||||||
|
HashableValue::Bool {
|
||||||
|
val: true,
|
||||||
|
span: Span { start: 0, end: 1 }
|
||||||
|
},
|
||||||
|
HashableValue::Bool {
|
||||||
|
val: true,
|
||||||
|
span: Span {
|
||||||
|
start: 90,
|
||||||
|
end: 1000
|
||||||
|
}
|
||||||
|
}
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn put_to_hashset() {
|
||||||
|
let span = Span::test_data();
|
||||||
|
let mut set = HashSet::new();
|
||||||
|
set.insert(HashableValue::Bool { val: true, span });
|
||||||
|
assert!(set.contains(&HashableValue::Bool { val: true, span }));
|
||||||
|
|
||||||
|
// hashable value doesn't care about span.
|
||||||
|
let diff_span = Span { start: 1, end: 2 };
|
||||||
|
set.insert(HashableValue::Bool {
|
||||||
|
val: true,
|
||||||
|
span: diff_span,
|
||||||
|
});
|
||||||
|
assert!(set.contains(&HashableValue::Bool { val: true, span }));
|
||||||
|
assert!(set.contains(&HashableValue::Bool {
|
||||||
|
val: true,
|
||||||
|
span: diff_span
|
||||||
|
}));
|
||||||
|
assert_eq!(set.len(), 1);
|
||||||
|
|
||||||
|
set.insert(HashableValue::Int { val: 2, span });
|
||||||
|
assert_eq!(set.len(), 2);
|
||||||
|
}
|
||||||
|
}
|
256
crates/nu-command/src/charting/histogram.rs
Normal file
@ -0,0 +1,256 @@
|
|||||||
|
use super::hashable_value::HashableValue;
|
||||||
|
use nu_engine::CallExt;
|
||||||
|
use nu_protocol::ast::Call;
|
||||||
|
use nu_protocol::engine::{Command, EngineState, Stack};
|
||||||
|
use nu_protocol::{
|
||||||
|
Example, IntoPipelineData, PipelineData, ShellError, Signature, Span, Spanned, SyntaxShape,
|
||||||
|
Value,
|
||||||
|
};
|
||||||
|
use std::collections::HashMap;
|
||||||
|
use std::iter;
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct Histogram;
|
||||||
|
|
||||||
|
enum PercentageCalcMethod {
|
||||||
|
Normalize,
|
||||||
|
Relative,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Command for Histogram {
|
||||||
|
fn name(&self) -> &str {
|
||||||
|
"histogram"
|
||||||
|
}
|
||||||
|
|
||||||
|
fn signature(&self) -> Signature {
|
||||||
|
Signature::build("histogram")
|
||||||
|
.optional("column-name", SyntaxShape::String, "column name to calc frequency, no need to provide if input is just a list")
|
||||||
|
.optional("frequency-column-name", SyntaxShape::String, "histogram's frequency column, default to be frequency column output")
|
||||||
|
.named("percentage-type", SyntaxShape::String, "percentage calculate method, can be 'normalize' or 'relative', in 'normalize', defaults to be 'normalize'", Some('t'))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn usage(&self) -> &str {
|
||||||
|
"Creates a new table with a histogram based on the column name passed in."
|
||||||
|
}
|
||||||
|
|
||||||
|
fn examples(&self) -> Vec<Example> {
|
||||||
|
vec![
|
||||||
|
Example {
|
||||||
|
description: "Get a histogram for the types of files",
|
||||||
|
example: "ls | histogram type",
|
||||||
|
result: None,
|
||||||
|
},
|
||||||
|
Example {
|
||||||
|
description:
|
||||||
|
"Get a histogram for the types of files, with frequency column named freq",
|
||||||
|
example: "ls | histogram type freq",
|
||||||
|
result: None,
|
||||||
|
},
|
||||||
|
Example {
|
||||||
|
description: "Get a histogram for a list of numbers",
|
||||||
|
example: "echo [1 2 3 1 1 1 2 2 1 1] | histogram",
|
||||||
|
result: None,
|
||||||
|
},
|
||||||
|
Example {
|
||||||
|
description: "Get a histogram for a list of numbers, and percentage is based on the maximum value",
|
||||||
|
example: "echo [1 2 3 1 1 1 2 2 1 1] | histogram --percentage-type relative",
|
||||||
|
result: None,
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
fn run(
|
||||||
|
&self,
|
||||||
|
engine_state: &EngineState,
|
||||||
|
stack: &mut Stack,
|
||||||
|
call: &Call,
|
||||||
|
input: PipelineData,
|
||||||
|
) -> Result<PipelineData, ShellError> {
|
||||||
|
// input check.
|
||||||
|
let column_name: Option<Spanned<String>> = call.opt(engine_state, stack, 0)?;
|
||||||
|
let frequency_name_arg = call.opt::<Spanned<String>>(engine_state, stack, 1)?;
|
||||||
|
let frequency_column_name = match frequency_name_arg {
|
||||||
|
Some(inner) => {
|
||||||
|
let span = inner.span;
|
||||||
|
if ["value", "count", "quantile", "percentage"].contains(&inner.item.as_str()) {
|
||||||
|
return Err(ShellError::UnsupportedInput(
|
||||||
|
"frequency-column-name can't be 'value', 'count' or 'percentage'"
|
||||||
|
.to_string(),
|
||||||
|
span,
|
||||||
|
));
|
||||||
|
}
|
||||||
|
inner.item
|
||||||
|
}
|
||||||
|
None => "frequency".to_string(),
|
||||||
|
};
|
||||||
|
|
||||||
|
let calc_method: Option<Spanned<String>> =
|
||||||
|
call.get_flag(engine_state, stack, "percentage-type")?;
|
||||||
|
let calc_method = match calc_method {
|
||||||
|
None => PercentageCalcMethod::Normalize,
|
||||||
|
Some(inner) => match inner.item.as_str() {
|
||||||
|
"normalize" => PercentageCalcMethod::Normalize,
|
||||||
|
"relative" => PercentageCalcMethod::Relative,
|
||||||
|
_ => {
|
||||||
|
return Err(ShellError::UnsupportedInput(
|
||||||
|
"calc method can only be 'normalize' or 'relative'".to_string(),
|
||||||
|
inner.span,
|
||||||
|
))
|
||||||
|
}
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
let span = call.head;
|
||||||
|
let data_as_value = input.into_value(span);
|
||||||
|
// `input` is not a list, here we can return an error.
|
||||||
|
match data_as_value.as_list() {
|
||||||
|
Ok(list_value) => run_histogram(
|
||||||
|
list_value.to_vec(),
|
||||||
|
column_name,
|
||||||
|
frequency_column_name,
|
||||||
|
calc_method,
|
||||||
|
span,
|
||||||
|
),
|
||||||
|
Err(e) => Err(e),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn run_histogram(
|
||||||
|
values: Vec<Value>,
|
||||||
|
column_name: Option<Spanned<String>>,
|
||||||
|
freq_column: String,
|
||||||
|
calc_method: PercentageCalcMethod,
|
||||||
|
head_span: Span,
|
||||||
|
) -> Result<PipelineData, ShellError> {
|
||||||
|
let mut inputs = vec![];
|
||||||
|
// convert from inputs to hashable values.
|
||||||
|
match column_name {
|
||||||
|
None => {
|
||||||
|
// some invalid input scenario needs to handle:
|
||||||
|
// Expect input is a list of hashable value, if one value is not hashable, throw out error.
|
||||||
|
for v in values {
|
||||||
|
let current_span = v.span().unwrap_or(head_span);
|
||||||
|
inputs.push(HashableValue::from_value(v, head_span).map_err(|_| {
|
||||||
|
ShellError::UnsupportedInput(
|
||||||
|
"--column-name is not provided, can only support a list of simple value."
|
||||||
|
.to_string(),
|
||||||
|
current_span,
|
||||||
|
)
|
||||||
|
})?);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Some(ref col) => {
|
||||||
|
// some invalid input scenario needs to handle:
|
||||||
|
// * item in `input` is not a record, just skip it.
|
||||||
|
// * a record doesn't contain specific column, just skip it.
|
||||||
|
// * all records don't contain specific column, throw out error, indicate at least one row should contains specific column.
|
||||||
|
// * a record contain a value which can't be hashed, skip it.
|
||||||
|
let col_name = &col.item;
|
||||||
|
for v in values {
|
||||||
|
match v {
|
||||||
|
// parse record, and fill valid value to actual input.
|
||||||
|
Value::Record { cols, vals, .. } => {
|
||||||
|
for (c, v) in iter::zip(cols, vals) {
|
||||||
|
if &c == col_name {
|
||||||
|
if let Ok(v) = HashableValue::from_value(v, head_span) {
|
||||||
|
inputs.push(v);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ => continue,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if inputs.is_empty() {
|
||||||
|
return Err(ShellError::UnsupportedInput(
|
||||||
|
format!("expect input is table, and inputs doesn't contain any value which has {col_name} column"),
|
||||||
|
head_span,
|
||||||
|
));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let value_column_name = column_name
|
||||||
|
.map(|x| x.item)
|
||||||
|
.unwrap_or_else(|| "value".to_string());
|
||||||
|
Ok(histogram_impl(
|
||||||
|
inputs,
|
||||||
|
&value_column_name,
|
||||||
|
calc_method,
|
||||||
|
&freq_column,
|
||||||
|
head_span,
|
||||||
|
))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn histogram_impl(
|
||||||
|
inputs: Vec<HashableValue>,
|
||||||
|
value_column_name: &str,
|
||||||
|
calc_method: PercentageCalcMethod,
|
||||||
|
freq_column: &str,
|
||||||
|
span: Span,
|
||||||
|
) -> PipelineData {
|
||||||
|
// here we can make sure that inputs is not empty, and every elements
|
||||||
|
// is a simple val and ok to make count.
|
||||||
|
let mut counter = HashMap::new();
|
||||||
|
let mut max_cnt = 0;
|
||||||
|
let total_cnt = inputs.len();
|
||||||
|
for i in inputs {
|
||||||
|
let new_cnt = *counter.get(&i).unwrap_or(&0) + 1;
|
||||||
|
counter.insert(i, new_cnt);
|
||||||
|
if new_cnt > max_cnt {
|
||||||
|
max_cnt = new_cnt;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut result = vec![];
|
||||||
|
let result_cols = vec![
|
||||||
|
value_column_name.to_string(),
|
||||||
|
"count".to_string(),
|
||||||
|
"quantile".to_string(),
|
||||||
|
"percentage".to_string(),
|
||||||
|
freq_column.to_string(),
|
||||||
|
];
|
||||||
|
const MAX_FREQ_COUNT: f64 = 100.0;
|
||||||
|
for (val, count) in counter.into_iter() {
|
||||||
|
let quantile = match calc_method {
|
||||||
|
PercentageCalcMethod::Normalize => (count as f64 / total_cnt as f64),
|
||||||
|
PercentageCalcMethod::Relative => (count as f64 / max_cnt as f64),
|
||||||
|
};
|
||||||
|
|
||||||
|
let percentage = format!("{:.2}%", quantile * 100_f64);
|
||||||
|
let freq = "*".repeat((MAX_FREQ_COUNT * quantile).floor() as usize);
|
||||||
|
|
||||||
|
result.push(Value::Record {
|
||||||
|
cols: result_cols.clone(),
|
||||||
|
vals: vec![
|
||||||
|
val.into_value(),
|
||||||
|
Value::Int { val: count, span },
|
||||||
|
Value::Float {
|
||||||
|
val: quantile,
|
||||||
|
span,
|
||||||
|
},
|
||||||
|
Value::String {
|
||||||
|
val: percentage,
|
||||||
|
span,
|
||||||
|
},
|
||||||
|
Value::String { val: freq, span },
|
||||||
|
],
|
||||||
|
span,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
Value::List { vals: result, span }.into_pipeline_data()
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_examples() {
|
||||||
|
use crate::test_examples;
|
||||||
|
|
||||||
|
test_examples(Histogram)
|
||||||
|
}
|
||||||
|
}
|
4
crates/nu-command/src/charting/mod.rs
Normal file
@ -0,0 +1,4 @@
|
|||||||
|
mod hashable_value;
|
||||||
|
mod histogram;
|
||||||
|
|
||||||
|
pub use histogram::Histogram;
|
@ -24,13 +24,13 @@ enum Zone {
|
|||||||
Local,
|
Local,
|
||||||
East(u8),
|
East(u8),
|
||||||
West(u8),
|
West(u8),
|
||||||
Error, // we want the nullshell to cast it instead of rust
|
Error, // we want Nushell to cast it instead of Rust
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Zone {
|
impl Zone {
|
||||||
fn new(i: i64) -> Self {
|
fn new(i: i64) -> Self {
|
||||||
if i.abs() <= 12 {
|
if i.abs() <= 12 {
|
||||||
// guanranteed here
|
// guaranteed here
|
||||||
if i >= 0 {
|
if i >= 0 {
|
||||||
Self::East(i as u8) // won't go out of range
|
Self::East(i as u8) // won't go out of range
|
||||||
} else {
|
} else {
|
||||||
@ -59,29 +59,29 @@ impl Command for SubCommand {
|
|||||||
|
|
||||||
fn signature(&self) -> Signature {
|
fn signature(&self) -> Signature {
|
||||||
Signature::build("into datetime")
|
Signature::build("into datetime")
|
||||||
.switch(
|
|
||||||
"list",
|
|
||||||
"lists strftime cheatsheet",
|
|
||||||
Some('l'),
|
|
||||||
)
|
|
||||||
.named(
|
.named(
|
||||||
"timezone",
|
"timezone",
|
||||||
SyntaxShape::String,
|
SyntaxShape::String,
|
||||||
"Specify timezone if the input is timestamp, like 'UTC/u' or 'LOCAL/l'",
|
"Specify timezone if the input is a Unix timestamp. Valid options: 'UTC' ('u') or 'LOCAL' ('l')",
|
||||||
Some('z'),
|
Some('z'),
|
||||||
)
|
)
|
||||||
.named(
|
.named(
|
||||||
"offset",
|
"offset",
|
||||||
SyntaxShape::Int,
|
SyntaxShape::Int,
|
||||||
"Specify timezone by offset if the input is timestamp, like '+8', '-4', prior than timezone",
|
"Specify timezone by offset from UTC if the input is a Unix timestamp, like '+8', '-4'",
|
||||||
Some('o'),
|
Some('o'),
|
||||||
)
|
)
|
||||||
.named(
|
.named(
|
||||||
"format",
|
"format",
|
||||||
SyntaxShape::String,
|
SyntaxShape::String,
|
||||||
"Specify date and time formatting",
|
"Specify an expected format for parsing strings to datetimes. Use --list to see all possible options",
|
||||||
Some('f'),
|
Some('f'),
|
||||||
)
|
)
|
||||||
|
.switch(
|
||||||
|
"list",
|
||||||
|
"Show all possible variables for use with the --format flag",
|
||||||
|
Some('l'),
|
||||||
|
)
|
||||||
.rest(
|
.rest(
|
||||||
"rest",
|
"rest",
|
||||||
SyntaxShape::CellPath,
|
SyntaxShape::CellPath,
|
||||||
@ -112,28 +112,40 @@ impl Command for SubCommand {
|
|||||||
vec![
|
vec![
|
||||||
Example {
|
Example {
|
||||||
description: "Convert to datetime",
|
description: "Convert to datetime",
|
||||||
example: "'16.11.1984 8:00 am +0000' | into datetime",
|
example: "'27.02.2021 1:55 pm +0000' | into datetime",
|
||||||
result: None,
|
result: Some(Value::Date {
|
||||||
|
val: Utc.timestamp(1614434100, 0).into(),
|
||||||
|
span: Span::test_data(),
|
||||||
|
}),
|
||||||
},
|
},
|
||||||
Example {
|
Example {
|
||||||
description: "Convert to datetime",
|
description: "Convert to datetime",
|
||||||
example: "'2020-08-04T16:39:18+00:00' | into datetime",
|
example: "'2021-02-27T13:55:40+00:00' | into datetime",
|
||||||
result: None,
|
result: Some(Value::Date {
|
||||||
|
val: Utc.timestamp(1614434140, 0).into(),
|
||||||
|
span: Span::test_data(),
|
||||||
|
}),
|
||||||
},
|
},
|
||||||
Example {
|
Example {
|
||||||
description: "Convert to datetime using a custom format",
|
description: "Convert to datetime using a custom format",
|
||||||
example: "'20200904_163918+0000' | into datetime -f '%Y%m%d_%H%M%S%z'",
|
example: "'20210227_135540+0000' | into datetime -f '%Y%m%d_%H%M%S%z'",
|
||||||
result: None,
|
result: Some(Value::Date {
|
||||||
|
val: Utc.timestamp(1614434140, 0).into(),
|
||||||
|
span: Span::test_data(),
|
||||||
|
}),
|
||||||
},
|
},
|
||||||
Example {
|
Example {
|
||||||
description: "Convert timestamp (no larger than 8e+12) to datetime using a specified timezone",
|
description: "Convert timestamp (no larger than 8e+12) to a UTC datetime",
|
||||||
example: "'1614434140' | into datetime -z 'UTC'",
|
example: "1614434140 | into datetime",
|
||||||
result: None,
|
result: Some(Value::Date {
|
||||||
|
val: Utc.timestamp(1614434140, 0).into(),
|
||||||
|
span: Span::test_data(),
|
||||||
|
}),
|
||||||
},
|
},
|
||||||
Example {
|
Example {
|
||||||
description:
|
description:
|
||||||
"Convert timestamp (no larger than 8e+12) to datetime using a specified timezone offset (between -12 and 12)",
|
"Convert timestamp (no larger than 8e+12) to datetime using a specified timezone offset (between -12 and 12)",
|
||||||
example: "'1614434140' | into datetime -o +9",
|
example: "1614434140 | into datetime -o +9",
|
||||||
result: None,
|
result: None,
|
||||||
},
|
},
|
||||||
]
|
]
|
||||||
@ -209,58 +221,78 @@ fn action(
|
|||||||
dateformat: &Option<DatetimeFormat>,
|
dateformat: &Option<DatetimeFormat>,
|
||||||
head: Span,
|
head: Span,
|
||||||
) -> Value {
|
) -> Value {
|
||||||
match input {
|
// Check to see if input looks like a Unix timestamp (i.e. can it be parsed to an int?)
|
||||||
Value::String { val: s, span } => {
|
let timestamp = match input {
|
||||||
let ts = s.parse::<i64>();
|
Value::Int { val, .. } => Ok(*val),
|
||||||
// if timezone if specified, first check if the input is a timestamp.
|
Value::String { val, .. } => val.parse::<i64>(),
|
||||||
if let Some(tz) = timezone {
|
other => {
|
||||||
const TIMESTAMP_BOUND: i64 = 8.2e+12 as i64;
|
return Value::Error {
|
||||||
// Since the timestamp method of chrono itself don't throw an error (it just panicked)
|
error: ShellError::UnsupportedInput(
|
||||||
// We have to manually guard it.
|
format!("Expected string or int, got {} instead", other.get_type()),
|
||||||
if let Ok(t) = ts {
|
head,
|
||||||
if t.abs() > TIMESTAMP_BOUND {
|
),
|
||||||
return Value::Error{error: ShellError::UnsupportedInput(
|
|
||||||
"Given timestamp is out of range, it should between -8e+12 and 8e+12".to_string(),
|
|
||||||
head,
|
|
||||||
)};
|
|
||||||
}
|
|
||||||
const HOUR: i32 = 3600;
|
|
||||||
let stampout = match tz.item {
|
|
||||||
Zone::Utc => Value::Date {
|
|
||||||
val: Utc.timestamp(t, 0).into(),
|
|
||||||
span: head,
|
|
||||||
},
|
|
||||||
Zone::Local => Value::Date {
|
|
||||||
val: Local.timestamp(t, 0).into(),
|
|
||||||
span: head,
|
|
||||||
},
|
|
||||||
Zone::East(i) => {
|
|
||||||
let eastoffset = FixedOffset::east((i as i32) * HOUR);
|
|
||||||
Value::Date {
|
|
||||||
val: eastoffset.timestamp(t, 0),
|
|
||||||
span: head,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Zone::West(i) => {
|
|
||||||
let westoffset = FixedOffset::west((i as i32) * HOUR);
|
|
||||||
Value::Date {
|
|
||||||
val: westoffset.timestamp(t, 0),
|
|
||||||
span: head,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Zone::Error => Value::Error {
|
|
||||||
error: ShellError::UnsupportedInput(
|
|
||||||
"Cannot convert given timezone or offset to timestamp".to_string(),
|
|
||||||
tz.span,
|
|
||||||
),
|
|
||||||
},
|
|
||||||
};
|
|
||||||
return stampout;
|
|
||||||
}
|
|
||||||
};
|
};
|
||||||
// if it's not, continue and default to the system's local timezone.
|
}
|
||||||
let out = match dateformat {
|
};
|
||||||
Some(dt) => match DateTime::parse_from_str(s, &dt.0) {
|
|
||||||
|
if let Ok(ts) = timestamp {
|
||||||
|
const TIMESTAMP_BOUND: i64 = 8.2e+12 as i64;
|
||||||
|
const HOUR: i32 = 3600;
|
||||||
|
|
||||||
|
if ts.abs() > TIMESTAMP_BOUND {
|
||||||
|
return Value::Error {
|
||||||
|
error: ShellError::UnsupportedInput(
|
||||||
|
"Given timestamp is out of range, it should between -8e+12 and 8e+12"
|
||||||
|
.to_string(),
|
||||||
|
head,
|
||||||
|
),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
return match timezone {
|
||||||
|
// default to UTC
|
||||||
|
None => Value::Date {
|
||||||
|
val: Utc.timestamp(ts, 0).into(),
|
||||||
|
span: head,
|
||||||
|
},
|
||||||
|
Some(Spanned { item, span }) => match item {
|
||||||
|
Zone::Utc => Value::Date {
|
||||||
|
val: Utc.timestamp(ts, 0).into(),
|
||||||
|
span: head,
|
||||||
|
},
|
||||||
|
Zone::Local => Value::Date {
|
||||||
|
val: Local.timestamp(ts, 0).into(),
|
||||||
|
span: head,
|
||||||
|
},
|
||||||
|
Zone::East(i) => {
|
||||||
|
let eastoffset = FixedOffset::east((*i as i32) * HOUR);
|
||||||
|
Value::Date {
|
||||||
|
val: eastoffset.timestamp(ts, 0),
|
||||||
|
span: head,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Zone::West(i) => {
|
||||||
|
let westoffset = FixedOffset::west((*i as i32) * HOUR);
|
||||||
|
Value::Date {
|
||||||
|
val: westoffset.timestamp(ts, 0),
|
||||||
|
span: head,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Zone::Error => Value::Error {
|
||||||
|
error: ShellError::UnsupportedInput(
|
||||||
|
"Cannot convert given timezone or offset to timestamp".to_string(),
|
||||||
|
*span,
|
||||||
|
),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// If input is not a timestamp, try parsing it as a string
|
||||||
|
match input {
|
||||||
|
Value::String { val, span } => {
|
||||||
|
match dateformat {
|
||||||
|
Some(dt) => match DateTime::parse_from_str(val, &dt.0) {
|
||||||
Ok(d) => Value::Date { val: d, span: head },
|
Ok(d) => Value::Date { val: d, span: head },
|
||||||
Err(reason) => {
|
Err(reason) => {
|
||||||
return Value::Error {
|
return Value::Error {
|
||||||
@ -276,23 +308,21 @@ fn action(
|
|||||||
// Tries to automatically parse the date
|
// Tries to automatically parse the date
|
||||||
// (i.e. without a format string)
|
// (i.e. without a format string)
|
||||||
// and assumes the system's local timezone if none is specified
|
// and assumes the system's local timezone if none is specified
|
||||||
None => match parse_date_from_string(s, *span) {
|
None => match parse_date_from_string(val, *span) {
|
||||||
Ok(date) => Value::Date {
|
Ok(date) => Value::Date {
|
||||||
val: date,
|
val: date,
|
||||||
span: *span,
|
span: *span,
|
||||||
},
|
},
|
||||||
Err(err) => err,
|
Err(err) => err,
|
||||||
},
|
},
|
||||||
};
|
|
||||||
|
|
||||||
out
|
|
||||||
}
|
|
||||||
other => {
|
|
||||||
let got = format!("Expected string, got {} instead", other.get_type());
|
|
||||||
Value::Error {
|
|
||||||
error: ShellError::UnsupportedInput(got, head),
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
other => Value::Error {
|
||||||
|
error: ShellError::UnsupportedInput(
|
||||||
|
format!("Expected string, got {} instead", other.get_type()),
|
||||||
|
head,
|
||||||
|
),
|
||||||
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -351,6 +381,23 @@ mod tests {
|
|||||||
assert_eq!(actual, expected)
|
assert_eq!(actual, expected)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn takes_timestamp_offset_as_int() {
|
||||||
|
let date_int = Value::test_int(1614434140);
|
||||||
|
let timezone_option = Some(Spanned {
|
||||||
|
item: Zone::East(8),
|
||||||
|
span: Span::test_data(),
|
||||||
|
});
|
||||||
|
let actual = action(&date_int, &timezone_option, &None, Span::test_data());
|
||||||
|
let expected = Value::Date {
|
||||||
|
val: DateTime::parse_from_str("2021-02-27 21:55:40 +08:00", "%Y-%m-%d %H:%M:%S %z")
|
||||||
|
.unwrap(),
|
||||||
|
span: Span::test_data(),
|
||||||
|
};
|
||||||
|
|
||||||
|
assert_eq!(actual, expected)
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn takes_timestamp() {
|
fn takes_timestamp() {
|
||||||
let date_str = Value::test_string("1614434140");
|
let date_str = Value::test_string("1614434140");
|
||||||
@ -367,6 +414,20 @@ mod tests {
|
|||||||
assert_eq!(actual, expected)
|
assert_eq!(actual, expected)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn takes_timestamp_without_timezone() {
|
||||||
|
let date_str = Value::test_string("1614434140");
|
||||||
|
let timezone_option = None;
|
||||||
|
let actual = action(&date_str, &timezone_option, &None, Span::test_data());
|
||||||
|
|
||||||
|
let expected = Value::Date {
|
||||||
|
val: Utc.timestamp(1614434140, 0).into(),
|
||||||
|
span: Span::test_data(),
|
||||||
|
};
|
||||||
|
|
||||||
|
assert_eq!(actual, expected)
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn takes_invalid_timestamp() {
|
fn takes_invalid_timestamp() {
|
||||||
let date_str = Value::test_string("10440970000000");
|
let date_str = Value::test_string("10440970000000");
|
||||||
|
@ -133,6 +133,10 @@ pub fn action(input: &Value, span: Span) -> Value {
|
|||||||
},
|
},
|
||||||
Err(error) => Value::Error { error },
|
Err(error) => Value::Error { error },
|
||||||
},
|
},
|
||||||
|
Value::Nothing { .. } => Value::Filesize {
|
||||||
|
val: 0,
|
||||||
|
span: value_span,
|
||||||
|
},
|
||||||
_ => Value::Error {
|
_ => Value::Error {
|
||||||
error: ShellError::UnsupportedInput(
|
error: ShellError::UnsupportedInput(
|
||||||
"'into filesize' for unsupported type".into(),
|
"'into filesize' for unsupported type".into(),
|
||||||
|
@ -85,6 +85,11 @@ impl Command for SubCommand {
|
|||||||
span: Span::test_data(),
|
span: Span::test_data(),
|
||||||
}),
|
}),
|
||||||
},
|
},
|
||||||
|
Example {
|
||||||
|
description: "Convert date to integer (Unix timestamp)",
|
||||||
|
example: "2022-02-02 | into int",
|
||||||
|
result: Some(Value::test_int(1643760000)),
|
||||||
|
},
|
||||||
Example {
|
Example {
|
||||||
description: "Convert to integer from binary",
|
description: "Convert to integer from binary",
|
||||||
example: "'1101' | into int -r 2",
|
example: "'1101' | into int -r 2",
|
||||||
@ -181,6 +186,10 @@ pub fn action(input: &Value, span: Span, radix: u32) -> Value {
|
|||||||
Value::Int { val: 0, span }
|
Value::Int { val: 0, span }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
Value::Date { val, .. } => Value::Int {
|
||||||
|
val: val.timestamp(),
|
||||||
|
span,
|
||||||
|
},
|
||||||
_ => Value::Error {
|
_ => Value::Error {
|
||||||
error: ShellError::UnsupportedInput("'into int' for unsupported type".into(), span),
|
error: ShellError::UnsupportedInput("'into int' for unsupported type".into(), span),
|
||||||
},
|
},
|
||||||
|
@ -248,7 +248,7 @@ pub fn action(
|
|||||||
span,
|
span,
|
||||||
},
|
},
|
||||||
Value::Nothing { .. } => Value::String {
|
Value::Nothing { .. } => Value::String {
|
||||||
val: "nothing".to_string(),
|
val: "".to_string(),
|
||||||
span,
|
span,
|
||||||
},
|
},
|
||||||
Value::Record {
|
Value::Record {
|
||||||
|
@ -26,8 +26,8 @@ impl Command for Alias {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn extra_usage(&self) -> &str {
|
fn extra_usage(&self) -> &str {
|
||||||
r#"This command is a parser keyword. For details, check
|
r#"This command is a parser keyword. For details, check:
|
||||||
https://www.nushell.sh/book/thinking_in_nushell.html#parsing-and-evaluation-are-different-stages"#
|
https://www.nushell.sh/book/thinking_in_nushell.html"#
|
||||||
}
|
}
|
||||||
|
|
||||||
fn is_parser_keyword(&self) -> bool {
|
fn is_parser_keyword(&self) -> bool {
|
||||||
|
@ -27,8 +27,8 @@ impl Command for Def {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn extra_usage(&self) -> &str {
|
fn extra_usage(&self) -> &str {
|
||||||
r#"This command is a parser keyword. For details, check
|
r#"This command is a parser keyword. For details, check:
|
||||||
https://www.nushell.sh/book/thinking_in_nushell.html#parsing-and-evaluation-are-different-stages"#
|
https://www.nushell.sh/book/thinking_in_nushell.html"#
|
||||||
}
|
}
|
||||||
|
|
||||||
fn is_parser_keyword(&self) -> bool {
|
fn is_parser_keyword(&self) -> bool {
|
||||||
|
@ -27,8 +27,8 @@ impl Command for DefEnv {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn extra_usage(&self) -> &str {
|
fn extra_usage(&self) -> &str {
|
||||||
r#"This command is a parser keyword. For details, check
|
r#"This command is a parser keyword. For details, check:
|
||||||
https://www.nushell.sh/book/thinking_in_nushell.html#parsing-and-evaluation-are-different-stages"#
|
https://www.nushell.sh/book/thinking_in_nushell.html"#
|
||||||
}
|
}
|
||||||
|
|
||||||
fn is_parser_keyword(&self) -> bool {
|
fn is_parser_keyword(&self) -> bool {
|
||||||
|
@ -13,7 +13,7 @@ impl Command for Describe {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn usage(&self) -> &str {
|
fn usage(&self) -> &str {
|
||||||
"Describe the value(s) piped in."
|
"Describe the type and structure of the value(s) piped in."
|
||||||
}
|
}
|
||||||
|
|
||||||
fn signature(&self) -> Signature {
|
fn signature(&self) -> Signature {
|
||||||
@ -55,6 +55,10 @@ impl Command for Describe {
|
|||||||
result: Some(Value::test_string("string")),
|
result: Some(Value::test_string("string")),
|
||||||
}]
|
}]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn search_terms(&self) -> Vec<&str> {
|
||||||
|
vec!["type", "typeof", "info", "structure"]
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
|
@ -24,6 +24,10 @@ impl Command for ErrorMake {
|
|||||||
"Create an error."
|
"Create an error."
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn search_terms(&self) -> Vec<&str> {
|
||||||
|
vec!["err", "panic", "crash", "throw"]
|
||||||
|
}
|
||||||
|
|
||||||
fn run(
|
fn run(
|
||||||
&self,
|
&self,
|
||||||
engine_state: &EngineState,
|
engine_state: &EngineState,
|
||||||
@ -36,7 +40,7 @@ impl Command for ErrorMake {
|
|||||||
let arg: Option<Value> = call.opt(engine_state, stack, 0)?;
|
let arg: Option<Value> = call.opt(engine_state, stack, 0)?;
|
||||||
|
|
||||||
if let Some(arg) = arg {
|
if let Some(arg) = arg {
|
||||||
Ok(make_error(&arg)
|
Ok(make_error(&arg, span)
|
||||||
.map(|err| Value::Error { error: err })
|
.map(|err| Value::Error { error: err })
|
||||||
.unwrap_or_else(|| Value::Error {
|
.unwrap_or_else(|| Value::Error {
|
||||||
error: ShellError::GenericError(
|
error: ShellError::GenericError(
|
||||||
@ -51,7 +55,7 @@ impl Command for ErrorMake {
|
|||||||
} else {
|
} else {
|
||||||
input.map(
|
input.map(
|
||||||
move |value| {
|
move |value| {
|
||||||
make_error(&value)
|
make_error(&value, span)
|
||||||
.map(|err| Value::Error { error: err })
|
.map(|err| Value::Error { error: err })
|
||||||
.unwrap_or_else(|| Value::Error {
|
.unwrap_or_else(|| Value::Error {
|
||||||
error: ShellError::GenericError(
|
error: ShellError::GenericError(
|
||||||
@ -89,7 +93,7 @@ impl Command for ErrorMake {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn make_error(value: &Value) -> Option<ShellError> {
|
fn make_error(value: &Value, throw_span: Span) -> Option<ShellError> {
|
||||||
if let Value::Record { .. } = &value {
|
if let Value::Record { .. } = &value {
|
||||||
let msg = value.get_data_by_key("msg");
|
let msg = value.get_data_by_key("msg");
|
||||||
let label = value.get_data_by_key("label");
|
let label = value.get_data_by_key("label");
|
||||||
@ -117,13 +121,26 @@ fn make_error(value: &Value) -> Option<ShellError> {
|
|||||||
None,
|
None,
|
||||||
Vec::new(),
|
Vec::new(),
|
||||||
)),
|
)),
|
||||||
|
(
|
||||||
|
None,
|
||||||
|
None,
|
||||||
|
Some(Value::String {
|
||||||
|
val: label_text, ..
|
||||||
|
}),
|
||||||
|
) => Some(ShellError::GenericError(
|
||||||
|
message,
|
||||||
|
label_text,
|
||||||
|
Some(throw_span),
|
||||||
|
None,
|
||||||
|
Vec::new(),
|
||||||
|
)),
|
||||||
_ => None,
|
_ => None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
(Some(Value::String { val: message, .. }), None) => Some(ShellError::GenericError(
|
(Some(Value::String { val: message, .. }), None) => Some(ShellError::GenericError(
|
||||||
message,
|
message,
|
||||||
"".to_string(),
|
"originates from here".to_string(),
|
||||||
None,
|
Some(throw_span),
|
||||||
None,
|
None,
|
||||||
Vec::new(),
|
Vec::new(),
|
||||||
)),
|
)),
|
||||||
|
@ -22,8 +22,8 @@ impl Command for ExportCommand {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn extra_usage(&self) -> &str {
|
fn extra_usage(&self) -> &str {
|
||||||
r#"This command is a parser keyword. For details, check
|
r#"This command is a parser keyword. For details, check:
|
||||||
https://www.nushell.sh/book/thinking_in_nushell.html#parsing-and-evaluation-are-different-stages"#
|
https://www.nushell.sh/book/thinking_in_nushell.html"#
|
||||||
}
|
}
|
||||||
|
|
||||||
fn is_parser_keyword(&self) -> bool {
|
fn is_parser_keyword(&self) -> bool {
|
||||||
|
@ -26,8 +26,8 @@ impl Command for ExportAlias {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn extra_usage(&self) -> &str {
|
fn extra_usage(&self) -> &str {
|
||||||
r#"This command is a parser keyword. For details, check
|
r#"This command is a parser keyword. For details, check:
|
||||||
https://www.nushell.sh/book/thinking_in_nushell.html#parsing-and-evaluation-are-different-stages"#
|
https://www.nushell.sh/book/thinking_in_nushell.html"#
|
||||||
}
|
}
|
||||||
|
|
||||||
fn is_parser_keyword(&self) -> bool {
|
fn is_parser_keyword(&self) -> bool {
|
||||||
|
@ -27,8 +27,8 @@ impl Command for ExportDef {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn extra_usage(&self) -> &str {
|
fn extra_usage(&self) -> &str {
|
||||||
r#"This command is a parser keyword. For details, check
|
r#"This command is a parser keyword. For details, check:
|
||||||
https://www.nushell.sh/book/thinking_in_nushell.html#parsing-and-evaluation-are-different-stages"#
|
https://www.nushell.sh/book/thinking_in_nushell.html"#
|
||||||
}
|
}
|
||||||
|
|
||||||
fn is_parser_keyword(&self) -> bool {
|
fn is_parser_keyword(&self) -> bool {
|
||||||
|
@ -27,8 +27,8 @@ impl Command for ExportDefEnv {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn extra_usage(&self) -> &str {
|
fn extra_usage(&self) -> &str {
|
||||||
r#"This command is a parser keyword. For details, check
|
r#"This command is a parser keyword. For details, check:
|
||||||
https://www.nushell.sh/book/thinking_in_nushell.html#parsing-and-evaluation-are-different-stages"#
|
https://www.nushell.sh/book/thinking_in_nushell.html"#
|
||||||
}
|
}
|
||||||
|
|
||||||
fn is_parser_keyword(&self) -> bool {
|
fn is_parser_keyword(&self) -> bool {
|
||||||
|
@ -30,8 +30,8 @@ impl Command for ExportEnv {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn extra_usage(&self) -> &str {
|
fn extra_usage(&self) -> &str {
|
||||||
r#"This command is a parser keyword. For details, check
|
r#"This command is a parser keyword. For details, check:
|
||||||
https://www.nushell.sh/book/thinking_in_nushell.html#parsing-and-evaluation-are-different-stages"#
|
https://www.nushell.sh/book/thinking_in_nushell.html"#
|
||||||
}
|
}
|
||||||
|
|
||||||
fn is_parser_keyword(&self) -> bool {
|
fn is_parser_keyword(&self) -> bool {
|
||||||
|
@ -22,8 +22,8 @@ impl Command for ExportExtern {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn extra_usage(&self) -> &str {
|
fn extra_usage(&self) -> &str {
|
||||||
r#"This command is a parser keyword. For details, check
|
r#"This command is a parser keyword. For details, check:
|
||||||
https://www.nushell.sh/book/thinking_in_nushell.html#parsing-and-evaluation-are-different-stages"#
|
https://www.nushell.sh/book/thinking_in_nushell.html"#
|
||||||
}
|
}
|
||||||
|
|
||||||
fn is_parser_keyword(&self) -> bool {
|
fn is_parser_keyword(&self) -> bool {
|
||||||
|
@ -22,8 +22,8 @@ impl Command for Extern {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn extra_usage(&self) -> &str {
|
fn extra_usage(&self) -> &str {
|
||||||
r#"This command is a parser keyword. For details, check
|
r#"This command is a parser keyword. For details, check:
|
||||||
https://www.nushell.sh/book/thinking_in_nushell.html#parsing-and-evaluation-are-different-stages"#
|
https://www.nushell.sh/book/thinking_in_nushell.html"#
|
||||||
}
|
}
|
||||||
|
|
||||||
fn is_parser_keyword(&self) -> bool {
|
fn is_parser_keyword(&self) -> bool {
|
||||||
|
@ -45,8 +45,8 @@ impl Command for For {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn extra_usage(&self) -> &str {
|
fn extra_usage(&self) -> &str {
|
||||||
r#"This command is a parser keyword. For details, check
|
r#"This command is a parser keyword. For details, check:
|
||||||
https://www.nushell.sh/book/thinking_in_nushell.html#parsing-and-evaluation-are-different-stages"#
|
https://www.nushell.sh/book/thinking_in_nushell.html"#
|
||||||
}
|
}
|
||||||
|
|
||||||
fn is_parser_keyword(&self) -> bool {
|
fn is_parser_keyword(&self) -> bool {
|
||||||
|
@ -25,9 +25,8 @@ impl Command for Hide {
|
|||||||
fn extra_usage(&self) -> &str {
|
fn extra_usage(&self) -> &str {
|
||||||
r#"Symbols are hidden by priority: First aliases, then custom commands, then environment variables.
|
r#"Symbols are hidden by priority: First aliases, then custom commands, then environment variables.
|
||||||
|
|
||||||
This command is a parser keyword. For details, check
|
This command is a parser keyword. For details, check:
|
||||||
https://www.nushell.sh/book/thinking_in_nushell.html#parsing-and-evaluation-are-different-stages
|
https://www.nushell.sh/book/thinking_in_nushell.html"#
|
||||||
"#
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn is_parser_keyword(&self) -> bool {
|
fn is_parser_keyword(&self) -> bool {
|
||||||
@ -63,23 +62,23 @@ https://www.nushell.sh/book/thinking_in_nushell.html#parsing-and-evaluation-are-
|
|||||||
return Err(ShellError::NonUtf8(import_pattern.head.span));
|
return Err(ShellError::NonUtf8(import_pattern.head.span));
|
||||||
};
|
};
|
||||||
|
|
||||||
if let Some(overlay_id) = engine_state.find_overlay(&import_pattern.head.name) {
|
if let Some(module_id) = engine_state.find_module(&import_pattern.head.name, &[]) {
|
||||||
// The first word is a module
|
// The first word is a module
|
||||||
let overlay = engine_state.get_overlay(overlay_id);
|
let module = engine_state.get_module(module_id);
|
||||||
|
|
||||||
let env_vars_to_hide = if import_pattern.members.is_empty() {
|
let env_vars_to_hide = if import_pattern.members.is_empty() {
|
||||||
overlay.env_vars_with_head(&import_pattern.head.name)
|
module.env_vars_with_head(&import_pattern.head.name)
|
||||||
} else {
|
} else {
|
||||||
match &import_pattern.members[0] {
|
match &import_pattern.members[0] {
|
||||||
ImportPatternMember::Glob { .. } => overlay.env_vars(),
|
ImportPatternMember::Glob { .. } => module.env_vars(),
|
||||||
ImportPatternMember::Name { name, span } => {
|
ImportPatternMember::Name { name, span } => {
|
||||||
let mut output = vec![];
|
let mut output = vec![];
|
||||||
|
|
||||||
if let Some((name, id)) =
|
if let Some((name, id)) =
|
||||||
overlay.env_var_with_head(name, &import_pattern.head.name)
|
module.env_var_with_head(name, &import_pattern.head.name)
|
||||||
{
|
{
|
||||||
output.push((name, id));
|
output.push((name, id));
|
||||||
} else if !(overlay.has_alias(name) || overlay.has_decl(name)) {
|
} else if !(module.has_alias(name) || module.has_decl(name)) {
|
||||||
return Err(ShellError::EnvVarNotFoundAtRuntime(
|
return Err(ShellError::EnvVarNotFoundAtRuntime(
|
||||||
String::from_utf8_lossy(name).into(),
|
String::from_utf8_lossy(name).into(),
|
||||||
*span,
|
*span,
|
||||||
@ -93,10 +92,10 @@ https://www.nushell.sh/book/thinking_in_nushell.html#parsing-and-evaluation-are-
|
|||||||
|
|
||||||
for (name, span) in names {
|
for (name, span) in names {
|
||||||
if let Some((name, id)) =
|
if let Some((name, id)) =
|
||||||
overlay.env_var_with_head(name, &import_pattern.head.name)
|
module.env_var_with_head(name, &import_pattern.head.name)
|
||||||
{
|
{
|
||||||
output.push((name, id));
|
output.push((name, id));
|
||||||
} else if !(overlay.has_alias(name) || overlay.has_decl(name)) {
|
} else if !(module.has_alias(name) || module.has_decl(name)) {
|
||||||
return Err(ShellError::EnvVarNotFoundAtRuntime(
|
return Err(ShellError::EnvVarNotFoundAtRuntime(
|
||||||
String::from_utf8_lossy(name).into(),
|
String::from_utf8_lossy(name).into(),
|
||||||
*span,
|
*span,
|
||||||
|
@ -34,8 +34,8 @@ impl Command for If {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn extra_usage(&self) -> &str {
|
fn extra_usage(&self) -> &str {
|
||||||
r#"This command is a parser keyword. For details, check
|
r#"This command is a parser keyword. For details, check:
|
||||||
https://www.nushell.sh/book/thinking_in_nushell.html#parsing-and-evaluation-are-different-stages"#
|
https://www.nushell.sh/book/thinking_in_nushell.html"#
|
||||||
}
|
}
|
||||||
|
|
||||||
fn is_parser_keyword(&self) -> bool {
|
fn is_parser_keyword(&self) -> bool {
|
||||||
|
@ -27,8 +27,8 @@ impl Command for Let {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn extra_usage(&self) -> &str {
|
fn extra_usage(&self) -> &str {
|
||||||
r#"This command is a parser keyword. For details, check
|
r#"This command is a parser keyword. For details, check:
|
||||||
https://www.nushell.sh/book/thinking_in_nushell.html#parsing-and-evaluation-are-different-stages"#
|
https://www.nushell.sh/book/thinking_in_nushell.html"#
|
||||||
}
|
}
|
||||||
|
|
||||||
fn is_parser_keyword(&self) -> bool {
|
fn is_parser_keyword(&self) -> bool {
|
||||||
|
@ -22,6 +22,7 @@ mod ignore;
|
|||||||
mod let_;
|
mod let_;
|
||||||
mod metadata;
|
mod metadata;
|
||||||
mod module;
|
mod module;
|
||||||
|
pub(crate) mod overlay;
|
||||||
mod source;
|
mod source;
|
||||||
mod tutor;
|
mod tutor;
|
||||||
mod use_;
|
mod use_;
|
||||||
@ -51,6 +52,7 @@ pub use ignore::Ignore;
|
|||||||
pub use let_::Let;
|
pub use let_::Let;
|
||||||
pub use metadata::Metadata;
|
pub use metadata::Metadata;
|
||||||
pub use module::Module;
|
pub use module::Module;
|
||||||
|
pub use overlay::*;
|
||||||
pub use source::Source;
|
pub use source::Source;
|
||||||
pub use tutor::Tutor;
|
pub use tutor::Tutor;
|
||||||
pub use use_::Use;
|
pub use use_::Use;
|
||||||
|
@ -26,8 +26,8 @@ impl Command for Module {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn extra_usage(&self) -> &str {
|
fn extra_usage(&self) -> &str {
|
||||||
r#"This command is a parser keyword. For details, check
|
r#"This command is a parser keyword. For details, check:
|
||||||
https://www.nushell.sh/book/thinking_in_nushell.html#parsing-and-evaluation-are-different-stages"#
|
https://www.nushell.sh/book/thinking_in_nushell.html"#
|
||||||
}
|
}
|
||||||
|
|
||||||
fn is_parser_keyword(&self) -> bool {
|
fn is_parser_keyword(&self) -> bool {
|
||||||
|
148
crates/nu-command/src/core_commands/overlay/add.rs
Normal file
@ -0,0 +1,148 @@
|
|||||||
|
use nu_engine::{eval_block, CallExt};
|
||||||
|
use nu_protocol::ast::Call;
|
||||||
|
use nu_protocol::engine::{Command, EngineState, Stack};
|
||||||
|
use nu_protocol::{Category, Example, PipelineData, ShellError, Signature, Spanned, SyntaxShape};
|
||||||
|
|
||||||
|
use std::path::Path;
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct OverlayAdd;
|
||||||
|
|
||||||
|
impl Command for OverlayAdd {
|
||||||
|
fn name(&self) -> &str {
|
||||||
|
"overlay add"
|
||||||
|
}
|
||||||
|
|
||||||
|
fn usage(&self) -> &str {
|
||||||
|
"Add definitions from a module as an overlay"
|
||||||
|
}
|
||||||
|
|
||||||
|
fn signature(&self) -> nu_protocol::Signature {
|
||||||
|
Signature::build("overlay add")
|
||||||
|
.required(
|
||||||
|
"name",
|
||||||
|
SyntaxShape::String,
|
||||||
|
"Module name to create overlay for",
|
||||||
|
)
|
||||||
|
// TODO:
|
||||||
|
// .switch(
|
||||||
|
// "prefix",
|
||||||
|
// "Prepend module name to the imported symbols",
|
||||||
|
// Some('p'),
|
||||||
|
// )
|
||||||
|
.category(Category::Core)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn extra_usage(&self) -> &str {
|
||||||
|
r#"This command is a parser keyword. For details, check:
|
||||||
|
https://www.nushell.sh/book/thinking_in_nushell.html"#
|
||||||
|
}
|
||||||
|
|
||||||
|
fn is_parser_keyword(&self) -> bool {
|
||||||
|
true
|
||||||
|
}
|
||||||
|
|
||||||
|
fn run(
|
||||||
|
&self,
|
||||||
|
engine_state: &EngineState,
|
||||||
|
stack: &mut Stack,
|
||||||
|
call: &Call,
|
||||||
|
_input: PipelineData,
|
||||||
|
) -> Result<PipelineData, ShellError> {
|
||||||
|
let name_arg: Spanned<String> = call.req(engine_state, stack, 0)?;
|
||||||
|
|
||||||
|
let maybe_overlay_name = if engine_state
|
||||||
|
.find_overlay(name_arg.item.as_bytes())
|
||||||
|
.is_some()
|
||||||
|
{
|
||||||
|
Some(name_arg.item.clone())
|
||||||
|
} else if let Some(os_str) = Path::new(&name_arg.item).file_stem() {
|
||||||
|
os_str.to_str().map(|name| name.to_string())
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
};
|
||||||
|
|
||||||
|
if let Some(overlay_name) = maybe_overlay_name {
|
||||||
|
if let Some(overlay_id) = engine_state.find_overlay(overlay_name.as_bytes()) {
|
||||||
|
let old_module_id = engine_state.get_overlay(overlay_id).origin;
|
||||||
|
|
||||||
|
stack.add_overlay(overlay_name.clone());
|
||||||
|
|
||||||
|
if let Some(new_module_id) = engine_state.find_module(overlay_name.as_bytes(), &[])
|
||||||
|
{
|
||||||
|
if !stack.has_env_overlay(&overlay_name, engine_state)
|
||||||
|
|| (old_module_id != new_module_id)
|
||||||
|
{
|
||||||
|
// Add environment variables only if:
|
||||||
|
// a) adding a new overlay
|
||||||
|
// b) refreshing an active overlay (the origin module changed)
|
||||||
|
let module = engine_state.get_module(new_module_id);
|
||||||
|
|
||||||
|
for (name, block_id) in module.env_vars() {
|
||||||
|
let name = if let Ok(s) = String::from_utf8(name.clone()) {
|
||||||
|
s
|
||||||
|
} else {
|
||||||
|
return Err(ShellError::NonUtf8(call.head));
|
||||||
|
};
|
||||||
|
|
||||||
|
let block = engine_state.get_block(block_id);
|
||||||
|
|
||||||
|
let val = eval_block(
|
||||||
|
engine_state,
|
||||||
|
stack,
|
||||||
|
block,
|
||||||
|
PipelineData::new(call.head),
|
||||||
|
false,
|
||||||
|
true,
|
||||||
|
)?
|
||||||
|
.into_value(call.head);
|
||||||
|
|
||||||
|
stack.add_env_var(name, val);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
return Err(ShellError::OverlayNotFoundAtRuntime(
|
||||||
|
name_arg.item,
|
||||||
|
name_arg.span,
|
||||||
|
));
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
return Err(ShellError::OverlayNotFoundAtRuntime(
|
||||||
|
name_arg.item,
|
||||||
|
name_arg.span,
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(PipelineData::new(call.head))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn examples(&self) -> Vec<Example> {
|
||||||
|
vec![
|
||||||
|
Example {
|
||||||
|
description: "Create an overlay from a module",
|
||||||
|
example: r#"module spam { export def foo [] { "foo" } }
|
||||||
|
overlay add spam"#,
|
||||||
|
result: None,
|
||||||
|
},
|
||||||
|
Example {
|
||||||
|
description: "Create an overlay from a file",
|
||||||
|
example: r#"echo 'export env FOO { "foo" }' | save spam.nu
|
||||||
|
overlay add spam.nu"#,
|
||||||
|
result: None,
|
||||||
|
},
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod test {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_examples() {
|
||||||
|
use crate::test_examples;
|
||||||
|
|
||||||
|
test_examples(OverlayAdd {})
|
||||||
|
}
|
||||||
|
}
|
58
crates/nu-command/src/core_commands/overlay/command.rs
Normal file
@ -0,0 +1,58 @@
|
|||||||
|
use nu_engine::get_full_help;
|
||||||
|
use nu_protocol::{
|
||||||
|
ast::Call,
|
||||||
|
engine::{Command, EngineState, Stack},
|
||||||
|
Category, IntoPipelineData, PipelineData, Signature, Value,
|
||||||
|
};
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct Overlay;
|
||||||
|
|
||||||
|
impl Command for Overlay {
|
||||||
|
fn name(&self) -> &str {
|
||||||
|
"overlay"
|
||||||
|
}
|
||||||
|
|
||||||
|
fn signature(&self) -> Signature {
|
||||||
|
Signature::build("overlay").category(Category::Core)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn usage(&self) -> &str {
|
||||||
|
"Commands for manipulating overlays."
|
||||||
|
}
|
||||||
|
|
||||||
|
fn extra_usage(&self) -> &str {
|
||||||
|
r#"This command is a parser keyword. For details, check:
|
||||||
|
https://www.nushell.sh/book/thinking_in_nushell.html"#
|
||||||
|
}
|
||||||
|
|
||||||
|
fn is_parser_keyword(&self) -> bool {
|
||||||
|
true
|
||||||
|
}
|
||||||
|
|
||||||
|
fn run(
|
||||||
|
&self,
|
||||||
|
engine_state: &EngineState,
|
||||||
|
stack: &mut Stack,
|
||||||
|
call: &Call,
|
||||||
|
_input: PipelineData,
|
||||||
|
) -> Result<nu_protocol::PipelineData, nu_protocol::ShellError> {
|
||||||
|
Ok(Value::String {
|
||||||
|
val: get_full_help(&Overlay.signature(), &[], engine_state, stack),
|
||||||
|
span: call.head,
|
||||||
|
}
|
||||||
|
.into_pipeline_data())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod test {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_examples() {
|
||||||
|
use crate::test_examples;
|
||||||
|
|
||||||
|
test_examples(Overlay {})
|
||||||
|
}
|
||||||
|
}
|
85
crates/nu-command/src/core_commands/overlay/list.rs
Normal file
@ -0,0 +1,85 @@
|
|||||||
|
use nu_protocol::ast::Call;
|
||||||
|
use nu_protocol::engine::{Command, EngineState, Stack};
|
||||||
|
use nu_protocol::{
|
||||||
|
Category, Example, IntoPipelineData, PipelineData, ShellError, Signature, Span, Value,
|
||||||
|
};
|
||||||
|
|
||||||
|
use log::trace;
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct OverlayList;
|
||||||
|
|
||||||
|
impl Command for OverlayList {
|
||||||
|
fn name(&self) -> &str {
|
||||||
|
"overlay list"
|
||||||
|
}
|
||||||
|
|
||||||
|
fn usage(&self) -> &str {
|
||||||
|
"List all active overlays"
|
||||||
|
}
|
||||||
|
|
||||||
|
fn signature(&self) -> nu_protocol::Signature {
|
||||||
|
Signature::build("overlay list").category(Category::Core)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn extra_usage(&self) -> &str {
|
||||||
|
"The overlays are listed in the order they were activated."
|
||||||
|
}
|
||||||
|
|
||||||
|
fn run(
|
||||||
|
&self,
|
||||||
|
engine_state: &EngineState,
|
||||||
|
stack: &mut Stack,
|
||||||
|
call: &Call,
|
||||||
|
_input: PipelineData,
|
||||||
|
) -> Result<PipelineData, ShellError> {
|
||||||
|
let active_overlays_parser: Vec<Value> = engine_state
|
||||||
|
.active_overlay_names(&[])
|
||||||
|
.iter()
|
||||||
|
.map(|s| Value::string(String::from_utf8_lossy(s), call.head))
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
let active_overlays_engine: Vec<Value> = stack
|
||||||
|
.active_overlays
|
||||||
|
.iter()
|
||||||
|
.map(|s| Value::string(s, call.head))
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
// Check if the overlays in the engine match the overlays in the parser
|
||||||
|
if (active_overlays_parser.len() != active_overlays_engine.len())
|
||||||
|
|| active_overlays_parser
|
||||||
|
.iter()
|
||||||
|
.zip(active_overlays_engine.iter())
|
||||||
|
.any(|(op, oe)| op != oe)
|
||||||
|
{
|
||||||
|
trace!("parser overlays: {:?}", active_overlays_parser);
|
||||||
|
trace!("engine overlays: {:?}", active_overlays_engine);
|
||||||
|
|
||||||
|
return Err(ShellError::NushellFailedSpannedHelp(
|
||||||
|
"Overlay mismatch".into(),
|
||||||
|
"Active overlays do not match between the engine and the parser.".into(),
|
||||||
|
call.head,
|
||||||
|
"Run Nushell with --log-level=trace to see what went wrong.".into(),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(Value::List {
|
||||||
|
vals: active_overlays_engine,
|
||||||
|
span: call.head,
|
||||||
|
}
|
||||||
|
.into_pipeline_data())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn examples(&self) -> Vec<Example> {
|
||||||
|
vec![Example {
|
||||||
|
description: "Get the last activated overlay",
|
||||||
|
example: r#"module spam { export def foo [] { "foo" } }
|
||||||
|
overlay add spam
|
||||||
|
overlay list | last"#,
|
||||||
|
result: Some(Value::String {
|
||||||
|
val: "spam".to_string(),
|
||||||
|
span: Span::test_data(),
|
||||||
|
}),
|
||||||
|
}]
|
||||||
|
}
|
||||||
|
}
|
11
crates/nu-command/src/core_commands/overlay/mod.rs
Normal file
@ -0,0 +1,11 @@
|
|||||||
|
mod add;
|
||||||
|
mod command;
|
||||||
|
mod list;
|
||||||
|
mod new;
|
||||||
|
mod remove;
|
||||||
|
|
||||||
|
pub use add::OverlayAdd;
|
||||||
|
pub use command::Overlay;
|
||||||
|
pub use list::OverlayList;
|
||||||
|
pub use new::OverlayNew;
|
||||||
|
pub use remove::OverlayRemove;
|
74
crates/nu-command/src/core_commands/overlay/new.rs
Normal file
@ -0,0 +1,74 @@
|
|||||||
|
use nu_engine::CallExt;
|
||||||
|
use nu_protocol::ast::Call;
|
||||||
|
use nu_protocol::engine::{Command, EngineState, Stack};
|
||||||
|
use nu_protocol::{Category, Example, PipelineData, ShellError, Signature, Spanned, SyntaxShape};
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct OverlayNew;
|
||||||
|
|
||||||
|
impl Command for OverlayNew {
|
||||||
|
fn name(&self) -> &str {
|
||||||
|
"overlay new"
|
||||||
|
}
|
||||||
|
|
||||||
|
fn usage(&self) -> &str {
|
||||||
|
"Create an empty overlay"
|
||||||
|
}
|
||||||
|
|
||||||
|
fn signature(&self) -> nu_protocol::Signature {
|
||||||
|
Signature::build("overlay new")
|
||||||
|
.required("name", SyntaxShape::String, "Name of the overlay")
|
||||||
|
// TODO:
|
||||||
|
// .switch(
|
||||||
|
// "prefix",
|
||||||
|
// "Prepend module name to the imported symbols",
|
||||||
|
// Some('p'),
|
||||||
|
// )
|
||||||
|
.category(Category::Core)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn extra_usage(&self) -> &str {
|
||||||
|
r#"The command will first create an empty module, then add it as an overlay.
|
||||||
|
|
||||||
|
This command is a parser keyword. For details, check:
|
||||||
|
https://www.nushell.sh/book/thinking_in_nushell.html"#
|
||||||
|
}
|
||||||
|
|
||||||
|
fn is_parser_keyword(&self) -> bool {
|
||||||
|
true
|
||||||
|
}
|
||||||
|
|
||||||
|
fn run(
|
||||||
|
&self,
|
||||||
|
engine_state: &EngineState,
|
||||||
|
stack: &mut Stack,
|
||||||
|
call: &Call,
|
||||||
|
_input: PipelineData,
|
||||||
|
) -> Result<PipelineData, ShellError> {
|
||||||
|
let name_arg: Spanned<String> = call.req(engine_state, stack, 0)?;
|
||||||
|
|
||||||
|
stack.add_overlay(name_arg.item);
|
||||||
|
|
||||||
|
Ok(PipelineData::new(call.head))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn examples(&self) -> Vec<Example> {
|
||||||
|
vec![Example {
|
||||||
|
description: "Create an empty overlay",
|
||||||
|
example: r#"overlay new spam"#,
|
||||||
|
result: None,
|
||||||
|
}]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod test {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_examples() {
|
||||||
|
use crate::test_examples;
|
||||||
|
|
||||||
|
test_examples(OverlayNew {})
|
||||||
|
}
|
||||||
|
}
|
117
crates/nu-command/src/core_commands/overlay/remove.rs
Normal file
@ -0,0 +1,117 @@
|
|||||||
|
use nu_engine::CallExt;
|
||||||
|
use nu_protocol::ast::Call;
|
||||||
|
use nu_protocol::engine::{Command, EngineState, Stack};
|
||||||
|
use nu_protocol::{
|
||||||
|
Category, Example, PipelineData, ShellError, Signature, Spanned, SyntaxShape, Value,
|
||||||
|
};
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct OverlayRemove;
|
||||||
|
|
||||||
|
impl Command for OverlayRemove {
|
||||||
|
fn name(&self) -> &str {
|
||||||
|
"overlay remove"
|
||||||
|
}
|
||||||
|
|
||||||
|
fn usage(&self) -> &str {
|
||||||
|
"Remove an active overlay"
|
||||||
|
}
|
||||||
|
|
||||||
|
fn signature(&self) -> nu_protocol::Signature {
|
||||||
|
Signature::build("overlay remove")
|
||||||
|
.optional("name", SyntaxShape::String, "Overlay to remove")
|
||||||
|
.switch(
|
||||||
|
"keep-custom",
|
||||||
|
"Keep newly added symbols within the next activated overlay",
|
||||||
|
Some('k'),
|
||||||
|
)
|
||||||
|
.category(Category::Core)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn extra_usage(&self) -> &str {
|
||||||
|
r#"This command is a parser keyword. For details, check:
|
||||||
|
https://www.nushell.sh/book/thinking_in_nushell.html"#
|
||||||
|
}
|
||||||
|
|
||||||
|
fn is_parser_keyword(&self) -> bool {
|
||||||
|
true
|
||||||
|
}
|
||||||
|
|
||||||
|
fn run(
|
||||||
|
&self,
|
||||||
|
engine_state: &EngineState,
|
||||||
|
stack: &mut Stack,
|
||||||
|
call: &Call,
|
||||||
|
_input: PipelineData,
|
||||||
|
) -> Result<nu_protocol::PipelineData, nu_protocol::ShellError> {
|
||||||
|
let overlay_name: Spanned<String> = if let Some(name) = call.opt(engine_state, stack, 0)? {
|
||||||
|
name
|
||||||
|
} else {
|
||||||
|
Spanned {
|
||||||
|
item: stack.last_overlay_name()?,
|
||||||
|
span: call.head,
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
if !stack.is_overlay_active(&overlay_name.item) {
|
||||||
|
return Err(ShellError::OverlayNotFoundAtRuntime(
|
||||||
|
overlay_name.item,
|
||||||
|
overlay_name.span,
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
if call.has_flag("keep-custom") {
|
||||||
|
if let Some(overlay_id) = engine_state.find_overlay(overlay_name.item.as_bytes()) {
|
||||||
|
let overlay_frame = engine_state.get_overlay(overlay_id);
|
||||||
|
let origin_module = engine_state.get_module(overlay_frame.origin);
|
||||||
|
|
||||||
|
let env_vars_to_keep: Vec<(String, Value)> = stack
|
||||||
|
.get_overlay_env_vars(engine_state, &overlay_name.item)
|
||||||
|
.into_iter()
|
||||||
|
.filter(|(name, _)| !origin_module.has_env_var(name.as_bytes()))
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
stack.remove_overlay(&overlay_name.item);
|
||||||
|
|
||||||
|
for (name, val) in env_vars_to_keep {
|
||||||
|
stack.add_env_var(name, val);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
return Err(ShellError::OverlayNotFoundAtRuntime(
|
||||||
|
overlay_name.item,
|
||||||
|
overlay_name.span,
|
||||||
|
));
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
stack.remove_overlay(&overlay_name.item);
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(PipelineData::new(call.head))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn examples(&self) -> Vec<Example> {
|
||||||
|
vec![
|
||||||
|
Example {
|
||||||
|
description: "Remove an overlay created from a module",
|
||||||
|
example: r#"module spam { export def foo [] { "foo" } }
|
||||||
|
overlay add spam
|
||||||
|
overlay remove spam"#,
|
||||||
|
result: None,
|
||||||
|
},
|
||||||
|
Example {
|
||||||
|
description: "Remove an overlay created from a file",
|
||||||
|
example: r#"echo 'export alias f = "foo"' | save spam.nu
|
||||||
|
overlay add spam.nu
|
||||||
|
overlay remove spam"#,
|
||||||
|
result: None,
|
||||||
|
},
|
||||||
|
Example {
|
||||||
|
description: "Remove the last activated overlay",
|
||||||
|
example: r#"module spam { export env FOO { "foo" } }
|
||||||
|
overlay add spam
|
||||||
|
overlay remove"#,
|
||||||
|
result: None,
|
||||||
|
},
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
@ -42,8 +42,8 @@ impl Command for Register {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn extra_usage(&self) -> &str {
|
fn extra_usage(&self) -> &str {
|
||||||
r#"This command is a parser keyword. For details, check
|
r#"This command is a parser keyword. For details, check:
|
||||||
https://www.nushell.sh/book/thinking_in_nushell.html#parsing-and-evaluation-are-different-stages"#
|
https://www.nushell.sh/book/thinking_in_nushell.html"#
|
||||||
}
|
}
|
||||||
|
|
||||||
fn is_parser_keyword(&self) -> bool {
|
fn is_parser_keyword(&self) -> bool {
|
||||||
|
@ -27,8 +27,8 @@ impl Command for Source {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn extra_usage(&self) -> &str {
|
fn extra_usage(&self) -> &str {
|
||||||
r#"This command is a parser keyword. For details, check
|
r#"This command is a parser keyword. For details, check:
|
||||||
https://www.nushell.sh/book/thinking_in_nushell.html#parsing-and-evaluation-are-different-stages"#
|
https://www.nushell.sh/book/thinking_in_nushell.html"#
|
||||||
}
|
}
|
||||||
|
|
||||||
fn is_parser_keyword(&self) -> bool {
|
fn is_parser_keyword(&self) -> bool {
|
||||||
|
@ -74,6 +74,7 @@ fn tutor(
|
|||||||
|
|
||||||
let search: Option<String> = call.opt(engine_state, stack, 0).unwrap_or(None);
|
let search: Option<String> = call.opt(engine_state, stack, 0).unwrap_or(None);
|
||||||
let find: Option<String> = call.get_flag(engine_state, stack, "find")?;
|
let find: Option<String> = call.get_flag(engine_state, stack, "find")?;
|
||||||
|
let notes = "You can learn about a topic using `tutor` followed by the name of the topic.\nFor example: `tutor table` to open the table topic.\n\n";
|
||||||
|
|
||||||
let search_space = [
|
let search_space = [
|
||||||
(vec!["begin"], begin_tutor()),
|
(vec!["begin"], begin_tutor()),
|
||||||
@ -100,7 +101,6 @@ fn tutor(
|
|||||||
vec!["var", "vars", "variable", "variables"],
|
vec!["var", "vars", "variable", "variables"],
|
||||||
variable_tutor(),
|
variable_tutor(),
|
||||||
),
|
),
|
||||||
(vec!["engine-q", "e-q"], engineq_tutor()),
|
|
||||||
(vec!["block", "blocks"], block_tutor()),
|
(vec!["block", "blocks"], block_tutor()),
|
||||||
(vec!["shorthand", "shorthands"], shorthand_tutor()),
|
(vec!["shorthand", "shorthands"], shorthand_tutor()),
|
||||||
];
|
];
|
||||||
@ -113,13 +113,22 @@ fn tutor(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let message = format!("You can find '{}' in the following topics:\n{}\n\nYou can learn about a topic using `tutor` followed by the name of the topic.\nFor example: `tutor table` to open the table topic.\n\n",
|
let message = format!(
|
||||||
find,
|
"You can find '{find}' in the following topics:\n\n{}\n\n{notes}",
|
||||||
results.into_iter().map(|x| format!("- {}", x)).join("\n")
|
results.into_iter().map(|x| format!("- {}", x)).join("\n")
|
||||||
);
|
);
|
||||||
|
|
||||||
return Ok(display(&message, engine_state, stack, span));
|
return Ok(display(&message, engine_state, stack, span));
|
||||||
} else if let Some(search) = search {
|
} else if let Some(search) = search {
|
||||||
|
if search == "list" {
|
||||||
|
let results = search_space.map(|s| s.0[0].to_string());
|
||||||
|
let message = format!(
|
||||||
|
"This tutorial contains the following topics:\n\n{}\n\n{notes}",
|
||||||
|
results.map(|x| format!("- {}", x)).join("\n")
|
||||||
|
);
|
||||||
|
return Ok(display(&message, engine_state, stack, span));
|
||||||
|
}
|
||||||
|
|
||||||
for search_group in search_space {
|
for search_group in search_space {
|
||||||
if search_group.0.contains(&search.as_str()) {
|
if search_group.0.contains(&search.as_str()) {
|
||||||
return Ok(display(search_group.1, engine_state, stack, span));
|
return Ok(display(search_group.1, engine_state, stack, span));
|
||||||
@ -136,7 +145,8 @@ Welcome to the Nushell tutorial!
|
|||||||
With the `tutor` command, you'll be able to learn a lot about how Nushell
|
With the `tutor` command, you'll be able to learn a lot about how Nushell
|
||||||
works along with many fun tips and tricks to speed up everyday tasks.
|
works along with many fun tips and tricks to speed up everyday tasks.
|
||||||
|
|
||||||
To get started, you can use `tutor begin`.
|
To get started, you can use `tutor begin`, and to see all the available
|
||||||
|
tutorials just run `tutor list`.
|
||||||
|
|
||||||
"#
|
"#
|
||||||
}
|
}
|
||||||
@ -390,29 +400,6 @@ same value using:
|
|||||||
"#
|
"#
|
||||||
}
|
}
|
||||||
|
|
||||||
fn engineq_tutor() -> &'static str {
|
|
||||||
r#"
|
|
||||||
Engine-q is the upcoming engine for Nushell. Build for speed and correctness,
|
|
||||||
it also comes with a set of changes from Nushell versions prior to 0.60. To
|
|
||||||
get ready for engine-q look for some of these changes that might impact your
|
|
||||||
current scripts:
|
|
||||||
|
|
||||||
* Engine-q now uses a few new data structures, including a record syntax
|
|
||||||
that allows you to model key-value pairs similar to JSON objects.
|
|
||||||
* Environment variables can now contain more than just strings. Structured
|
|
||||||
values are converted to strings for external commands using converters.
|
|
||||||
* `if` will now use an `else` keyword before the else block.
|
|
||||||
* We're moving from "config.toml" to "config.nu". This means startup will
|
|
||||||
now be a script file.
|
|
||||||
* `config` and its subcommands are being replaced by a record that you can
|
|
||||||
update in the shell which contains all the settings under the variable
|
|
||||||
`$config`.
|
|
||||||
* bigint/bigdecimal values are now machine i64 and f64 values
|
|
||||||
* And more, you can read more about upcoming changes in the up-to-date list
|
|
||||||
at: https://github.com/nushell/engine-q/issues/522
|
|
||||||
"#
|
|
||||||
}
|
|
||||||
|
|
||||||
fn display(help: &str, engine_state: &EngineState, stack: &mut Stack, span: Span) -> PipelineData {
|
fn display(help: &str, engine_state: &EngineState, stack: &mut Stack, span: Span) -> PipelineData {
|
||||||
let help = help.split('`');
|
let help = help.split('`');
|
||||||
|
|
||||||
@ -424,7 +411,7 @@ fn display(help: &str, engine_state: &EngineState, stack: &mut Stack, span: Span
|
|||||||
code_mode = false;
|
code_mode = false;
|
||||||
|
|
||||||
//TODO: support no-color mode
|
//TODO: support no-color mode
|
||||||
if let Some(highlighter) = engine_state.find_decl(b"nu-highlight") {
|
if let Some(highlighter) = engine_state.find_decl(b"nu-highlight", &[]) {
|
||||||
let decl = engine_state.get_decl(highlighter);
|
let decl = engine_state.get_decl(highlighter);
|
||||||
|
|
||||||
if let Ok(output) = decl.run(
|
if let Ok(output) = decl.run(
|
||||||
|
@ -24,8 +24,8 @@ impl Command for Use {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn extra_usage(&self) -> &str {
|
fn extra_usage(&self) -> &str {
|
||||||
r#"This command is a parser keyword. For details, check
|
r#"This command is a parser keyword. For details, check:
|
||||||
https://www.nushell.sh/book/thinking_in_nushell.html#parsing-and-evaluation-are-different-stages"#
|
https://www.nushell.sh/book/thinking_in_nushell.html"#
|
||||||
}
|
}
|
||||||
|
|
||||||
fn is_parser_keyword(&self) -> bool {
|
fn is_parser_keyword(&self) -> bool {
|
||||||
@ -55,20 +55,20 @@ https://www.nushell.sh/book/thinking_in_nushell.html#parsing-and-evaluation-are-
|
|||||||
));
|
));
|
||||||
};
|
};
|
||||||
|
|
||||||
if let Some(overlay_id) = import_pattern.head.id {
|
if let Some(module_id) = import_pattern.head.id {
|
||||||
let overlay = engine_state.get_overlay(overlay_id);
|
let module = engine_state.get_module(module_id);
|
||||||
|
|
||||||
let env_vars_to_use = if import_pattern.members.is_empty() {
|
let env_vars_to_use = if import_pattern.members.is_empty() {
|
||||||
overlay.env_vars_with_head(&import_pattern.head.name)
|
module.env_vars_with_head(&import_pattern.head.name)
|
||||||
} else {
|
} else {
|
||||||
match &import_pattern.members[0] {
|
match &import_pattern.members[0] {
|
||||||
ImportPatternMember::Glob { .. } => overlay.env_vars(),
|
ImportPatternMember::Glob { .. } => module.env_vars(),
|
||||||
ImportPatternMember::Name { name, span } => {
|
ImportPatternMember::Name { name, span } => {
|
||||||
let mut output = vec![];
|
let mut output = vec![];
|
||||||
|
|
||||||
if let Some(id) = overlay.get_env_var_id(name) {
|
if let Some(id) = module.get_env_var_id(name) {
|
||||||
output.push((name.clone(), id));
|
output.push((name.clone(), id));
|
||||||
} else if !overlay.has_decl(name) && !overlay.has_alias(name) {
|
} else if !module.has_decl(name) && !module.has_alias(name) {
|
||||||
return Err(ShellError::EnvVarNotFoundAtRuntime(
|
return Err(ShellError::EnvVarNotFoundAtRuntime(
|
||||||
String::from_utf8_lossy(name).into(),
|
String::from_utf8_lossy(name).into(),
|
||||||
*span,
|
*span,
|
||||||
@ -81,9 +81,9 @@ https://www.nushell.sh/book/thinking_in_nushell.html#parsing-and-evaluation-are-
|
|||||||
let mut output = vec![];
|
let mut output = vec![];
|
||||||
|
|
||||||
for (name, span) in names {
|
for (name, span) in names {
|
||||||
if let Some(id) = overlay.get_env_var_id(name) {
|
if let Some(id) = module.get_env_var_id(name) {
|
||||||
output.push((name.clone(), id));
|
output.push((name.clone(), id));
|
||||||
} else if !overlay.has_decl(name) && !overlay.has_alias(name) {
|
} else if !module.has_decl(name) && !module.has_alias(name) {
|
||||||
return Err(ShellError::EnvVarNotFoundAtRuntime(
|
return Err(ShellError::EnvVarNotFoundAtRuntime(
|
||||||
String::from_utf8_lossy(name).into(),
|
String::from_utf8_lossy(name).into(),
|
||||||
*span,
|
*span,
|
||||||
@ -105,8 +105,6 @@ https://www.nushell.sh/book/thinking_in_nushell.html#parsing-and-evaluation-are-
|
|||||||
|
|
||||||
let block = engine_state.get_block(block_id);
|
let block = engine_state.get_block(block_id);
|
||||||
|
|
||||||
// TODO: Add string conversions (e.g. int to string)
|
|
||||||
// TODO: Later expand env to take all Values
|
|
||||||
let val = eval_block(
|
let val = eval_block(
|
||||||
engine_state,
|
engine_state,
|
||||||
stack,
|
stack,
|
||||||
|
@ -63,21 +63,7 @@ pub fn version(
|
|||||||
span: call.head,
|
span: call.head,
|
||||||
});
|
});
|
||||||
|
|
||||||
cols.push("tag".to_string());
|
let commit_hash: Option<&str> = option_env!("NU_COMMIT_HASH");
|
||||||
vals.push(Value::String {
|
|
||||||
val: shadow_rs::tag(),
|
|
||||||
span: call.head,
|
|
||||||
});
|
|
||||||
|
|
||||||
let short_commit: Option<&str> = Some(shadow::SHORT_COMMIT).filter(|x| !x.is_empty());
|
|
||||||
if let Some(short_commit) = short_commit {
|
|
||||||
cols.push("short_commit".to_string());
|
|
||||||
vals.push(Value::String {
|
|
||||||
val: short_commit.to_string(),
|
|
||||||
span: call.head,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
let commit_hash: Option<&str> = Some(shadow::COMMIT_HASH).filter(|x| !x.is_empty());
|
|
||||||
if let Some(commit_hash) = commit_hash {
|
if let Some(commit_hash) = commit_hash {
|
||||||
cols.push("commit_hash".to_string());
|
cols.push("commit_hash".to_string());
|
||||||
vals.push(Value::String {
|
vals.push(Value::String {
|
||||||
@ -85,14 +71,6 @@ pub fn version(
|
|||||||
span: call.head,
|
span: call.head,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
let commit_date: Option<&str> = Some(shadow::COMMIT_DATE).filter(|x| !x.is_empty());
|
|
||||||
if let Some(commit_date) = commit_date {
|
|
||||||
cols.push("commit_date".to_string());
|
|
||||||
vals.push(Value::String {
|
|
||||||
val: commit_date.to_string(),
|
|
||||||
span: call.head,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
let build_os: Option<&str> = Some(shadow::BUILD_OS).filter(|x| !x.is_empty());
|
let build_os: Option<&str> = Some(shadow::BUILD_OS).filter(|x| !x.is_empty());
|
||||||
if let Some(build_os) = build_os {
|
if let Some(build_os) = build_os {
|
||||||
@ -105,7 +83,7 @@ pub fn version(
|
|||||||
|
|
||||||
let build_target: Option<&str> = Some(shadow::BUILD_TARGET).filter(|x| !x.is_empty());
|
let build_target: Option<&str> = Some(shadow::BUILD_TARGET).filter(|x| !x.is_empty());
|
||||||
if let Some(build_target) = build_target {
|
if let Some(build_target) = build_target {
|
||||||
cols.push("build_os".to_string());
|
cols.push("build_target".to_string());
|
||||||
vals.push(Value::String {
|
vals.push(Value::String {
|
||||||
val: build_target.to_string(),
|
val: build_target.to_string(),
|
||||||
span: call.head,
|
span: call.head,
|
||||||
|
@ -8,7 +8,7 @@ use nu_protocol::{
|
|||||||
engine::{Command, EngineState, Stack},
|
engine::{Command, EngineState, Stack},
|
||||||
Category, Example, IntoPipelineData, PipelineData, ShellError, Signature, SyntaxShape,
|
Category, Example, IntoPipelineData, PipelineData, ShellError, Signature, SyntaxShape,
|
||||||
};
|
};
|
||||||
use sqlparser::ast::{Ident, SelectItem, SetExpr, TableAlias, TableFactor};
|
use sqlparser::ast::{Ident, SelectItem, SetExpr, Statement, TableAlias, TableFactor};
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct AliasExpr;
|
pub struct AliasExpr;
|
||||||
@ -29,26 +29,15 @@ impl Command for AliasExpr {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn examples(&self) -> Vec<Example> {
|
fn examples(&self) -> Vec<Example> {
|
||||||
vec![
|
vec![Example {
|
||||||
Example {
|
description: "Creates an alias for a column selection",
|
||||||
description: "Creates an alias for a column selection",
|
example: "db col name_a | db as new_a",
|
||||||
example: "db col name_a | db as new_a",
|
result: None,
|
||||||
result: None,
|
}]
|
||||||
},
|
|
||||||
Example {
|
|
||||||
description: "Creates an alias for a table",
|
|
||||||
example: r#"db open name
|
|
||||||
| db select a
|
|
||||||
| db from table_a
|
|
||||||
| db as table_a_new
|
|
||||||
| db describe"#,
|
|
||||||
result: None,
|
|
||||||
},
|
|
||||||
]
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn search_terms(&self) -> Vec<&str> {
|
fn search_terms(&self) -> Vec<&str> {
|
||||||
vec!["database", "column", "expression"]
|
vec!["database", "alias", "column"]
|
||||||
}
|
}
|
||||||
|
|
||||||
fn run(
|
fn run(
|
||||||
@ -110,44 +99,56 @@ fn alias_db(
|
|||||||
new_alias: String,
|
new_alias: String,
|
||||||
call: &Call,
|
call: &Call,
|
||||||
) -> Result<PipelineData, ShellError> {
|
) -> Result<PipelineData, ShellError> {
|
||||||
match db.query {
|
match db.statement.as_mut() {
|
||||||
None => Err(ShellError::GenericError(
|
None => Err(ShellError::GenericError(
|
||||||
"Error creating alias".into(),
|
"Error creating alias".into(),
|
||||||
"there is no query defined yet".into(),
|
"there is no statement defined yet".into(),
|
||||||
Some(call.head),
|
Some(call.head),
|
||||||
None,
|
None,
|
||||||
Vec::new(),
|
Vec::new(),
|
||||||
)),
|
)),
|
||||||
Some(ref mut query) => match &mut query.body {
|
Some(statement) => match statement {
|
||||||
SetExpr::Select(ref mut select) => {
|
Statement::Query(query) => match &mut query.body {
|
||||||
select.as_mut().from.iter_mut().for_each(|table| {
|
SetExpr::Select(select) => {
|
||||||
let new_alias = Some(TableAlias {
|
select.as_mut().from.iter_mut().for_each(|table| {
|
||||||
name: Ident {
|
let new_alias = Some(TableAlias {
|
||||||
value: new_alias.clone(),
|
name: Ident {
|
||||||
quote_style: None,
|
value: new_alias.clone(),
|
||||||
},
|
quote_style: None,
|
||||||
columns: Vec::new(),
|
},
|
||||||
|
columns: Vec::new(),
|
||||||
|
});
|
||||||
|
|
||||||
|
if let TableFactor::Table { ref mut alias, .. } = table.relation {
|
||||||
|
*alias = new_alias;
|
||||||
|
} else if let TableFactor::Derived { ref mut alias, .. } = table.relation {
|
||||||
|
*alias = new_alias;
|
||||||
|
} else if let TableFactor::TableFunction { ref mut alias, .. } =
|
||||||
|
table.relation
|
||||||
|
{
|
||||||
|
*alias = new_alias;
|
||||||
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
if let TableFactor::Table { ref mut alias, .. } = table.relation {
|
Ok(db.into_value(call.head).into_pipeline_data())
|
||||||
*alias = new_alias;
|
}
|
||||||
} else if let TableFactor::Derived { ref mut alias, .. } = table.relation {
|
_ => Err(ShellError::GenericError(
|
||||||
*alias = new_alias;
|
"Error creating alias".into(),
|
||||||
} else if let TableFactor::TableFunction { ref mut alias, .. } = table.relation
|
"Query has no select from defined".into(),
|
||||||
{
|
Some(call.head),
|
||||||
*alias = new_alias;
|
None,
|
||||||
}
|
Vec::new(),
|
||||||
});
|
)),
|
||||||
|
},
|
||||||
Ok(db.into_value(call.head).into_pipeline_data())
|
s => {
|
||||||
|
return Err(ShellError::GenericError(
|
||||||
|
"Connection doesnt define a query".into(),
|
||||||
|
format!("Expected a connection with query. Got {}", s),
|
||||||
|
Some(call.head),
|
||||||
|
None,
|
||||||
|
Vec::new(),
|
||||||
|
))
|
||||||
}
|
}
|
||||||
_ => Err(ShellError::GenericError(
|
|
||||||
"Error creating alias".into(),
|
|
||||||
"Query has no select from defined".into(),
|
|
||||||
Some(call.head),
|
|
||||||
None,
|
|
||||||
Vec::new(),
|
|
||||||
)),
|
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
@ -8,7 +8,7 @@ use nu_protocol::{
|
|||||||
Category, Example, IntoPipelineData, PipelineData, ShellError, Signature, Span, SyntaxShape,
|
Category, Example, IntoPipelineData, PipelineData, ShellError, Signature, Span, SyntaxShape,
|
||||||
Value,
|
Value,
|
||||||
};
|
};
|
||||||
use sqlparser::ast::{BinaryOperator, Expr, Query, Select, SetExpr};
|
use sqlparser::ast::{BinaryOperator, Expr, Query, Select, SetExpr, Statement};
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct AndDb;
|
pub struct AndDb;
|
||||||
@ -78,12 +78,23 @@ impl Command for AndDb {
|
|||||||
|
|
||||||
Ok(expression.into_value(call.head).into_pipeline_data())
|
Ok(expression.into_value(call.head).into_pipeline_data())
|
||||||
} else if let Ok(mut db) = SQLiteDatabase::try_from_value(value.clone()) {
|
} else if let Ok(mut db) = SQLiteDatabase::try_from_value(value.clone()) {
|
||||||
db.query = match db.query {
|
match db.statement.as_mut() {
|
||||||
Some(query) => Some(modify_query(query, expr, call.head)?),
|
Some(statement) => match statement {
|
||||||
|
Statement::Query(query) => modify_query(query, expr, call.head)?,
|
||||||
|
s => {
|
||||||
|
return Err(ShellError::GenericError(
|
||||||
|
"Connection doesnt define a query".into(),
|
||||||
|
format!("Expected a connection with query. Got {}", s),
|
||||||
|
Some(call.head),
|
||||||
|
None,
|
||||||
|
Vec::new(),
|
||||||
|
))
|
||||||
|
}
|
||||||
|
},
|
||||||
None => {
|
None => {
|
||||||
return Err(ShellError::GenericError(
|
return Err(ShellError::GenericError(
|
||||||
"Connection without query".into(),
|
"Connection without statement".into(),
|
||||||
"Missing query in the connection".into(),
|
"The connection needs a statement defined".into(),
|
||||||
Some(call.head),
|
Some(call.head),
|
||||||
None,
|
None,
|
||||||
Vec::new(),
|
Vec::new(),
|
||||||
@ -103,26 +114,24 @@ impl Command for AndDb {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn modify_query(mut query: Query, expression: Expr, span: Span) -> Result<Query, ShellError> {
|
fn modify_query(query: &mut Box<Query>, expression: Expr, span: Span) -> Result<(), ShellError> {
|
||||||
query.body = match query.body {
|
match query.body {
|
||||||
SetExpr::Select(select) => Ok(SetExpr::Select(modify_select(select, expression, span)?)),
|
SetExpr::Select(ref mut select) => modify_select(select, expression, span)?,
|
||||||
_ => Err(ShellError::GenericError(
|
_ => {
|
||||||
"Query without a select".into(),
|
return Err(ShellError::GenericError(
|
||||||
"Missing a WHERE clause before an AND clause".into(),
|
"Query without a select".into(),
|
||||||
Some(span),
|
"Missing a WHERE clause before an AND clause".into(),
|
||||||
None,
|
Some(span),
|
||||||
Vec::new(),
|
None,
|
||||||
)),
|
Vec::new(),
|
||||||
}?;
|
))
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
Ok(query)
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn modify_select(
|
fn modify_select(select: &mut Box<Select>, expression: Expr, span: Span) -> Result<(), ShellError> {
|
||||||
mut select: Box<Select>,
|
|
||||||
expression: Expr,
|
|
||||||
span: Span,
|
|
||||||
) -> Result<Box<Select>, ShellError> {
|
|
||||||
let new_expression = match &select.selection {
|
let new_expression = match &select.selection {
|
||||||
Some(expr) => Ok(Expr::BinaryOp {
|
Some(expr) => Ok(Expr::BinaryOp {
|
||||||
left: Box::new(expr.clone()),
|
left: Box::new(expr.clone()),
|
||||||
@ -139,5 +148,5 @@ fn modify_select(
|
|||||||
}?;
|
}?;
|
||||||
|
|
||||||
select.as_mut().selection = Some(new_expression);
|
select.as_mut().selection = Some(new_expression);
|
||||||
Ok(select)
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -27,7 +27,7 @@ impl Command for ColExpr {
|
|||||||
fn examples(&self) -> Vec<Example> {
|
fn examples(&self) -> Vec<Example> {
|
||||||
vec![Example {
|
vec![Example {
|
||||||
description: "Creates a named column expression",
|
description: "Creates a named column expression",
|
||||||
example: "col name_1",
|
example: "db col name_1",
|
||||||
result: None,
|
result: None,
|
||||||
}]
|
}]
|
||||||
}
|
}
|
66
crates/nu-command/src/database/commands/conversions.rs
Normal file
@ -0,0 +1,66 @@
|
|||||||
|
use crate::{database::values::definitions::ConnectionDb, SQLiteDatabase};
|
||||||
|
use nu_protocol::{ShellError, Value};
|
||||||
|
use sqlparser::ast::{ObjectName, Statement, TableAlias, TableFactor};
|
||||||
|
|
||||||
|
pub fn value_into_table_factor(
|
||||||
|
table: Value,
|
||||||
|
connection: &ConnectionDb,
|
||||||
|
alias: Option<TableAlias>,
|
||||||
|
) -> Result<TableFactor, ShellError> {
|
||||||
|
match table {
|
||||||
|
Value::String { val, .. } => {
|
||||||
|
let ident = sqlparser::ast::Ident {
|
||||||
|
value: val,
|
||||||
|
quote_style: None,
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok(TableFactor::Table {
|
||||||
|
name: ObjectName(vec![ident]),
|
||||||
|
alias,
|
||||||
|
args: Vec::new(),
|
||||||
|
with_hints: Vec::new(),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
Value::CustomValue { span, .. } => {
|
||||||
|
let db = SQLiteDatabase::try_from_value(table)?;
|
||||||
|
|
||||||
|
if &db.connection != connection {
|
||||||
|
return Err(ShellError::GenericError(
|
||||||
|
"Incompatible connections".into(),
|
||||||
|
"trying to join on table with different connection".into(),
|
||||||
|
Some(span),
|
||||||
|
None,
|
||||||
|
Vec::new(),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
match db.statement {
|
||||||
|
Some(statement) => match statement {
|
||||||
|
Statement::Query(query) => Ok(TableFactor::Derived {
|
||||||
|
lateral: false,
|
||||||
|
subquery: query,
|
||||||
|
alias,
|
||||||
|
}),
|
||||||
|
s => Err(ShellError::GenericError(
|
||||||
|
"Connection doesnt define a query".into(),
|
||||||
|
format!("Expected a connection with query. Got {}", s),
|
||||||
|
Some(span),
|
||||||
|
None,
|
||||||
|
Vec::new(),
|
||||||
|
)),
|
||||||
|
},
|
||||||
|
None => Err(ShellError::GenericError(
|
||||||
|
"Error creating derived table".into(),
|
||||||
|
"there is no statement defined yet".into(),
|
||||||
|
Some(span),
|
||||||
|
None,
|
||||||
|
Vec::new(),
|
||||||
|
)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ => Err(ShellError::UnsupportedInput(
|
||||||
|
"String or connection".into(),
|
||||||
|
table.span()?,
|
||||||
|
)),
|
||||||
|
}
|
||||||
|
}
|
@ -1,11 +1,13 @@
|
|||||||
use super::super::SQLiteDatabase;
|
use crate::database::values::definitions::ConnectionDb;
|
||||||
|
|
||||||
|
use super::{super::SQLiteDatabase, conversions::value_into_table_factor};
|
||||||
use nu_engine::CallExt;
|
use nu_engine::CallExt;
|
||||||
use nu_protocol::{
|
use nu_protocol::{
|
||||||
ast::Call,
|
ast::Call,
|
||||||
engine::{Command, EngineState, Stack},
|
engine::{Command, EngineState, Stack},
|
||||||
Category, Example, IntoPipelineData, PipelineData, ShellError, Signature, SyntaxShape,
|
Category, Example, IntoPipelineData, PipelineData, ShellError, Signature, SyntaxShape, Value,
|
||||||
};
|
};
|
||||||
use sqlparser::ast::{Ident, ObjectName, Query, Select, SetExpr, TableFactor, TableWithJoins};
|
use sqlparser::ast::{Ident, Query, Select, SetExpr, Statement, TableAlias, TableWithJoins};
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct FromDb;
|
pub struct FromDb;
|
||||||
@ -23,8 +25,14 @@ impl Command for FromDb {
|
|||||||
Signature::build(self.name())
|
Signature::build(self.name())
|
||||||
.required(
|
.required(
|
||||||
"select",
|
"select",
|
||||||
|
SyntaxShape::Any,
|
||||||
|
"table of derived table to select from",
|
||||||
|
)
|
||||||
|
.named(
|
||||||
|
"as",
|
||||||
SyntaxShape::String,
|
SyntaxShape::String,
|
||||||
"Name of table to select from",
|
"Alias for the selected table",
|
||||||
|
Some('a'),
|
||||||
)
|
)
|
||||||
.category(Category::Custom("database".into()))
|
.category(Category::Custom("database".into()))
|
||||||
}
|
}
|
||||||
@ -48,51 +56,94 @@ impl Command for FromDb {
|
|||||||
call: &Call,
|
call: &Call,
|
||||||
input: PipelineData,
|
input: PipelineData,
|
||||||
) -> Result<PipelineData, ShellError> {
|
) -> Result<PipelineData, ShellError> {
|
||||||
let table: String = call.req(engine_state, stack, 0)?;
|
|
||||||
|
|
||||||
let mut db = SQLiteDatabase::try_from_pipeline(input, call.head)?;
|
let mut db = SQLiteDatabase::try_from_pipeline(input, call.head)?;
|
||||||
db.query = match db.query {
|
db.statement = match db.statement {
|
||||||
None => Some(create_query(table)),
|
None => Some(create_statement(&db.connection, engine_state, stack, call)?),
|
||||||
Some(query) => Some(modify_query(query, table)),
|
Some(statement) => Some(modify_statement(
|
||||||
|
&db.connection,
|
||||||
|
statement,
|
||||||
|
engine_state,
|
||||||
|
stack,
|
||||||
|
call,
|
||||||
|
)?),
|
||||||
};
|
};
|
||||||
|
|
||||||
Ok(db.into_value(call.head).into_pipeline_data())
|
Ok(db.into_value(call.head).into_pipeline_data())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn create_query(table: String) -> Query {
|
fn create_statement(
|
||||||
Query {
|
connection: &ConnectionDb,
|
||||||
|
engine_state: &EngineState,
|
||||||
|
stack: &mut Stack,
|
||||||
|
call: &Call,
|
||||||
|
) -> Result<Statement, ShellError> {
|
||||||
|
let query = Query {
|
||||||
with: None,
|
with: None,
|
||||||
body: SetExpr::Select(Box::new(create_select(table))),
|
body: SetExpr::Select(Box::new(create_select(
|
||||||
|
connection,
|
||||||
|
engine_state,
|
||||||
|
stack,
|
||||||
|
call,
|
||||||
|
)?)),
|
||||||
order_by: Vec::new(),
|
order_by: Vec::new(),
|
||||||
limit: None,
|
limit: None,
|
||||||
offset: None,
|
offset: None,
|
||||||
fetch: None,
|
fetch: None,
|
||||||
lock: None,
|
lock: None,
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok(Statement::Query(Box::new(query)))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn modify_statement(
|
||||||
|
connection: &ConnectionDb,
|
||||||
|
mut statement: Statement,
|
||||||
|
engine_state: &EngineState,
|
||||||
|
stack: &mut Stack,
|
||||||
|
call: &Call,
|
||||||
|
) -> Result<Statement, ShellError> {
|
||||||
|
match statement {
|
||||||
|
Statement::Query(ref mut query) => {
|
||||||
|
match query.body {
|
||||||
|
SetExpr::Select(ref mut select) => {
|
||||||
|
let table = create_table(connection, engine_state, stack, call)?;
|
||||||
|
select.from.push(table);
|
||||||
|
}
|
||||||
|
_ => {
|
||||||
|
query.as_mut().body = SetExpr::Select(Box::new(create_select(
|
||||||
|
connection,
|
||||||
|
engine_state,
|
||||||
|
stack,
|
||||||
|
call,
|
||||||
|
)?));
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok(statement)
|
||||||
|
}
|
||||||
|
s => Err(ShellError::GenericError(
|
||||||
|
"Connection doesnt define a query".into(),
|
||||||
|
format!("Expected a connection with query. Got {}", s),
|
||||||
|
Some(call.head),
|
||||||
|
None,
|
||||||
|
Vec::new(),
|
||||||
|
)),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn modify_query(mut query: Query, table: String) -> Query {
|
fn create_select(
|
||||||
query.body = match query.body {
|
connection: &ConnectionDb,
|
||||||
SetExpr::Select(select) => SetExpr::Select(modify_select(select, table)),
|
engine_state: &EngineState,
|
||||||
_ => SetExpr::Select(Box::new(create_select(table))),
|
stack: &mut Stack,
|
||||||
};
|
call: &Call,
|
||||||
|
) -> Result<Select, ShellError> {
|
||||||
query
|
Ok(Select {
|
||||||
}
|
|
||||||
|
|
||||||
fn modify_select(mut select: Box<Select>, table: String) -> Box<Select> {
|
|
||||||
select.as_mut().from = create_from(table);
|
|
||||||
select
|
|
||||||
}
|
|
||||||
|
|
||||||
fn create_select(table: String) -> Select {
|
|
||||||
Select {
|
|
||||||
distinct: false,
|
distinct: false,
|
||||||
top: None,
|
top: None,
|
||||||
projection: Vec::new(),
|
projection: Vec::new(),
|
||||||
into: None,
|
into: None,
|
||||||
from: create_from(table),
|
from: vec![create_table(connection, engine_state, stack, call)?],
|
||||||
lateral_views: Vec::new(),
|
lateral_views: Vec::new(),
|
||||||
selection: None,
|
selection: None,
|
||||||
group_by: Vec::new(),
|
group_by: Vec::new(),
|
||||||
@ -100,29 +151,32 @@ fn create_select(table: String) -> Select {
|
|||||||
distribute_by: Vec::new(),
|
distribute_by: Vec::new(),
|
||||||
sort_by: Vec::new(),
|
sort_by: Vec::new(),
|
||||||
having: None,
|
having: None,
|
||||||
}
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
// This function needs more work
|
fn create_table(
|
||||||
// It needs to define multi tables and joins
|
connection: &ConnectionDb,
|
||||||
// I assume we will need to define expressions for the columns instead of strings
|
engine_state: &EngineState,
|
||||||
fn create_from(table: String) -> Vec<TableWithJoins> {
|
stack: &mut Stack,
|
||||||
let ident = Ident {
|
call: &Call,
|
||||||
value: table,
|
) -> Result<TableWithJoins, ShellError> {
|
||||||
quote_style: None,
|
let alias = call
|
||||||
};
|
.get_flag::<String>(engine_state, stack, "as")?
|
||||||
|
.map(|alias| TableAlias {
|
||||||
|
name: Ident {
|
||||||
|
value: alias,
|
||||||
|
quote_style: None,
|
||||||
|
},
|
||||||
|
columns: Vec::new(),
|
||||||
|
});
|
||||||
|
|
||||||
let table_factor = TableFactor::Table {
|
let select_table: Value = call.req(engine_state, stack, 0)?;
|
||||||
name: ObjectName(vec![ident]),
|
let table_factor = value_into_table_factor(select_table, connection, alias)?;
|
||||||
alias: None,
|
|
||||||
args: Vec::new(),
|
|
||||||
with_hints: Vec::new(),
|
|
||||||
};
|
|
||||||
|
|
||||||
let table = TableWithJoins {
|
let table = TableWithJoins {
|
||||||
relation: table_factor,
|
relation: table_factor,
|
||||||
joins: Vec::new(),
|
joins: Vec::new(),
|
||||||
};
|
};
|
||||||
|
|
||||||
vec![table]
|
Ok(table)
|
||||||
}
|
}
|
||||||
|
85
crates/nu-command/src/database/commands/function.rs
Normal file
@ -0,0 +1,85 @@
|
|||||||
|
use crate::database::values::dsl::ExprDb;
|
||||||
|
use nu_engine::CallExt;
|
||||||
|
use nu_protocol::{
|
||||||
|
ast::Call,
|
||||||
|
engine::{Command, EngineState, Stack},
|
||||||
|
Category, Example, IntoPipelineData, PipelineData, ShellError, Signature, SyntaxShape, Value,
|
||||||
|
};
|
||||||
|
use sqlparser::ast::{Expr, Function, FunctionArg, FunctionArgExpr, Ident, ObjectName};
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct FunctionExpr;
|
||||||
|
|
||||||
|
impl Command for FunctionExpr {
|
||||||
|
fn name(&self) -> &str {
|
||||||
|
"db fn"
|
||||||
|
}
|
||||||
|
|
||||||
|
fn signature(&self) -> Signature {
|
||||||
|
Signature::build(self.name())
|
||||||
|
.required("name", SyntaxShape::String, "function name")
|
||||||
|
.switch("distinct", "distict values", Some('d'))
|
||||||
|
.rest("arguments", SyntaxShape::Any, "function arguments")
|
||||||
|
.category(Category::Custom("database".into()))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn usage(&self) -> &str {
|
||||||
|
"Creates function expression for a select operation"
|
||||||
|
}
|
||||||
|
|
||||||
|
fn examples(&self) -> Vec<Example> {
|
||||||
|
vec![Example {
|
||||||
|
description: "Creates a function expression",
|
||||||
|
example: "db fn count name_1",
|
||||||
|
result: None,
|
||||||
|
}]
|
||||||
|
}
|
||||||
|
|
||||||
|
fn search_terms(&self) -> Vec<&str> {
|
||||||
|
vec!["database", "function", "expression"]
|
||||||
|
}
|
||||||
|
|
||||||
|
fn run(
|
||||||
|
&self,
|
||||||
|
engine_state: &EngineState,
|
||||||
|
stack: &mut Stack,
|
||||||
|
call: &Call,
|
||||||
|
_input: PipelineData,
|
||||||
|
) -> Result<PipelineData, ShellError> {
|
||||||
|
let name: String = call.req(engine_state, stack, 0)?;
|
||||||
|
let vals: Vec<Value> = call.rest(engine_state, stack, 1)?;
|
||||||
|
let value = Value::List {
|
||||||
|
vals,
|
||||||
|
span: call.head,
|
||||||
|
};
|
||||||
|
let expressions = ExprDb::extract_exprs(value)?;
|
||||||
|
|
||||||
|
let name: Vec<Ident> = name
|
||||||
|
.split('.')
|
||||||
|
.map(|part| Ident {
|
||||||
|
value: part.to_string(),
|
||||||
|
quote_style: None,
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
let name = ObjectName(name);
|
||||||
|
|
||||||
|
let args: Vec<FunctionArg> = expressions
|
||||||
|
.into_iter()
|
||||||
|
.map(|expr| {
|
||||||
|
let arg = FunctionArgExpr::Expr(expr);
|
||||||
|
|
||||||
|
FunctionArg::Unnamed(arg)
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
let expression: ExprDb = Expr::Function(Function {
|
||||||
|
name,
|
||||||
|
args,
|
||||||
|
over: None,
|
||||||
|
distinct: call.has_flag("distinct"),
|
||||||
|
})
|
||||||
|
.into();
|
||||||
|
|
||||||
|
Ok(expression.into_value(call.head).into_pipeline_data())
|
||||||
|
}
|
||||||
|
}
|
102
crates/nu-command/src/database/commands/group_by.rs
Normal file
@ -0,0 +1,102 @@
|
|||||||
|
use crate::database::values::dsl::ExprDb;
|
||||||
|
|
||||||
|
use super::super::SQLiteDatabase;
|
||||||
|
use nu_engine::CallExt;
|
||||||
|
use nu_protocol::{
|
||||||
|
ast::Call,
|
||||||
|
engine::{Command, EngineState, Stack},
|
||||||
|
Category, Example, IntoPipelineData, PipelineData, ShellError, Signature, SyntaxShape, Value,
|
||||||
|
};
|
||||||
|
use sqlparser::ast::{SetExpr, Statement};
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct GroupByDb;
|
||||||
|
|
||||||
|
impl Command for GroupByDb {
|
||||||
|
fn name(&self) -> &str {
|
||||||
|
"db group-by"
|
||||||
|
}
|
||||||
|
|
||||||
|
fn usage(&self) -> &str {
|
||||||
|
"Group by query"
|
||||||
|
}
|
||||||
|
|
||||||
|
fn signature(&self) -> Signature {
|
||||||
|
Signature::build(self.name())
|
||||||
|
.rest(
|
||||||
|
"select",
|
||||||
|
SyntaxShape::Any,
|
||||||
|
"Select expression(s) on the table",
|
||||||
|
)
|
||||||
|
.category(Category::Custom("database".into()))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn search_terms(&self) -> Vec<&str> {
|
||||||
|
vec!["database", "select"]
|
||||||
|
}
|
||||||
|
|
||||||
|
fn examples(&self) -> Vec<Example> {
|
||||||
|
vec![Example {
|
||||||
|
description: "orders query by a column",
|
||||||
|
example: r#"db open db.mysql
|
||||||
|
| db from table_a
|
||||||
|
| db select a
|
||||||
|
| db group-by a
|
||||||
|
| db describe"#,
|
||||||
|
result: None,
|
||||||
|
}]
|
||||||
|
}
|
||||||
|
|
||||||
|
fn run(
|
||||||
|
&self,
|
||||||
|
engine_state: &EngineState,
|
||||||
|
stack: &mut Stack,
|
||||||
|
call: &Call,
|
||||||
|
input: PipelineData,
|
||||||
|
) -> Result<PipelineData, ShellError> {
|
||||||
|
let vals: Vec<Value> = call.rest(engine_state, stack, 0)?;
|
||||||
|
let value = Value::List {
|
||||||
|
vals,
|
||||||
|
span: call.head,
|
||||||
|
};
|
||||||
|
let expressions = ExprDb::extract_exprs(value)?;
|
||||||
|
|
||||||
|
let mut db = SQLiteDatabase::try_from_pipeline(input, call.head)?;
|
||||||
|
match db.statement.as_mut() {
|
||||||
|
Some(statement) => match statement {
|
||||||
|
Statement::Query(ref mut query) => match &mut query.body {
|
||||||
|
SetExpr::Select(ref mut select) => select.group_by = expressions,
|
||||||
|
s => {
|
||||||
|
return Err(ShellError::GenericError(
|
||||||
|
"Connection doesnt define a select".into(),
|
||||||
|
format!("Expected a connection with select query. Got {}", s),
|
||||||
|
Some(call.head),
|
||||||
|
None,
|
||||||
|
Vec::new(),
|
||||||
|
))
|
||||||
|
}
|
||||||
|
},
|
||||||
|
s => {
|
||||||
|
return Err(ShellError::GenericError(
|
||||||
|
"Connection doesnt define a query".into(),
|
||||||
|
format!("Expected a connection with query. Got {}", s),
|
||||||
|
Some(call.head),
|
||||||
|
None,
|
||||||
|
Vec::new(),
|
||||||
|
))
|
||||||
|
}
|
||||||
|
},
|
||||||
|
None => {
|
||||||
|
return Err(ShellError::GenericError(
|
||||||
|
"Connection without statement".into(),
|
||||||
|
"The connection needs a statement defined".into(),
|
||||||
|
Some(call.head),
|
||||||
|
None,
|
||||||
|
Vec::new(),
|
||||||
|
))
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok(db.into_value(call.head).into_pipeline_data())
|
||||||
|
}
|
||||||
|
}
|
178
crates/nu-command/src/database/commands/join.rs
Normal file
@ -0,0 +1,178 @@
|
|||||||
|
use super::{super::SQLiteDatabase, conversions::value_into_table_factor};
|
||||||
|
use crate::database::values::{definitions::ConnectionDb, dsl::ExprDb};
|
||||||
|
use nu_engine::CallExt;
|
||||||
|
use nu_protocol::{
|
||||||
|
ast::Call,
|
||||||
|
engine::{Command, EngineState, Stack},
|
||||||
|
Category, Example, IntoPipelineData, PipelineData, ShellError, Signature, SyntaxShape, Value,
|
||||||
|
};
|
||||||
|
use sqlparser::ast::{
|
||||||
|
Ident, Join, JoinConstraint, JoinOperator, Select, SetExpr, Statement, TableAlias,
|
||||||
|
};
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct JoinDb;
|
||||||
|
|
||||||
|
impl Command for JoinDb {
|
||||||
|
fn name(&self) -> &str {
|
||||||
|
"db join"
|
||||||
|
}
|
||||||
|
|
||||||
|
fn usage(&self) -> &str {
|
||||||
|
"Joins with another table or derived table. Default join type is inner"
|
||||||
|
}
|
||||||
|
|
||||||
|
fn signature(&self) -> Signature {
|
||||||
|
Signature::build(self.name())
|
||||||
|
.required(
|
||||||
|
"table",
|
||||||
|
SyntaxShape::Any,
|
||||||
|
"table or derived table to join on",
|
||||||
|
)
|
||||||
|
.required("on", SyntaxShape::Any, "expression to join tables")
|
||||||
|
.named(
|
||||||
|
"as",
|
||||||
|
SyntaxShape::String,
|
||||||
|
"Alias for the selected join",
|
||||||
|
Some('a'),
|
||||||
|
)
|
||||||
|
.switch("left", "left outer join", Some('l'))
|
||||||
|
.switch("right", "right outer join", Some('r'))
|
||||||
|
.switch("outer", "full outer join", Some('o'))
|
||||||
|
.switch("cross", "cross join", Some('c'))
|
||||||
|
.category(Category::Custom("database".into()))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn search_terms(&self) -> Vec<&str> {
|
||||||
|
vec!["database", "join"]
|
||||||
|
}
|
||||||
|
|
||||||
|
fn examples(&self) -> Vec<Example> {
|
||||||
|
vec![Example {
|
||||||
|
description: "",
|
||||||
|
example: "",
|
||||||
|
result: None,
|
||||||
|
}]
|
||||||
|
}
|
||||||
|
|
||||||
|
fn run(
|
||||||
|
&self,
|
||||||
|
engine_state: &EngineState,
|
||||||
|
stack: &mut Stack,
|
||||||
|
call: &Call,
|
||||||
|
input: PipelineData,
|
||||||
|
) -> Result<PipelineData, ShellError> {
|
||||||
|
let mut db = SQLiteDatabase::try_from_pipeline(input, call.head)?;
|
||||||
|
|
||||||
|
db.statement = match db.statement {
|
||||||
|
Some(statement) => Some(modify_statement(
|
||||||
|
&db.connection,
|
||||||
|
statement,
|
||||||
|
engine_state,
|
||||||
|
stack,
|
||||||
|
call,
|
||||||
|
)?),
|
||||||
|
None => {
|
||||||
|
return Err(ShellError::GenericError(
|
||||||
|
"Error creating join".into(),
|
||||||
|
"there is no statement defined yet".into(),
|
||||||
|
Some(call.head),
|
||||||
|
None,
|
||||||
|
Vec::new(),
|
||||||
|
))
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok(db.into_value(call.head).into_pipeline_data())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn modify_statement(
|
||||||
|
connection: &ConnectionDb,
|
||||||
|
mut statement: Statement,
|
||||||
|
engine_state: &EngineState,
|
||||||
|
stack: &mut Stack,
|
||||||
|
call: &Call,
|
||||||
|
) -> Result<Statement, ShellError> {
|
||||||
|
match statement {
|
||||||
|
Statement::Query(ref mut query) => {
|
||||||
|
match &mut query.body {
|
||||||
|
SetExpr::Select(ref mut select) => {
|
||||||
|
modify_from(connection, select, engine_state, stack, call)?
|
||||||
|
}
|
||||||
|
s => {
|
||||||
|
return Err(ShellError::GenericError(
|
||||||
|
"Connection doesnt define a select".into(),
|
||||||
|
format!("Expected a connection with select. Got {}", s),
|
||||||
|
Some(call.head),
|
||||||
|
None,
|
||||||
|
Vec::new(),
|
||||||
|
))
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok(statement)
|
||||||
|
}
|
||||||
|
s => Err(ShellError::GenericError(
|
||||||
|
"Connection doesnt define a query".into(),
|
||||||
|
format!("Expected a connection with query. Got {}", s),
|
||||||
|
Some(call.head),
|
||||||
|
None,
|
||||||
|
Vec::new(),
|
||||||
|
)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn modify_from(
|
||||||
|
connection: &ConnectionDb,
|
||||||
|
select: &mut Select,
|
||||||
|
engine_state: &EngineState,
|
||||||
|
stack: &mut Stack,
|
||||||
|
call: &Call,
|
||||||
|
) -> Result<(), ShellError> {
|
||||||
|
match select.from.last_mut() {
|
||||||
|
Some(table) => {
|
||||||
|
let alias = call
|
||||||
|
.get_flag::<String>(engine_state, stack, "as")?
|
||||||
|
.map(|alias| TableAlias {
|
||||||
|
name: Ident {
|
||||||
|
value: alias,
|
||||||
|
quote_style: None,
|
||||||
|
},
|
||||||
|
columns: Vec::new(),
|
||||||
|
});
|
||||||
|
|
||||||
|
let join_table: Value = call.req(engine_state, stack, 0)?;
|
||||||
|
let table_factor = value_into_table_factor(join_table, connection, alias)?;
|
||||||
|
|
||||||
|
let on_expr: Value = call.req(engine_state, stack, 1)?;
|
||||||
|
let on_expr = ExprDb::try_from_value(&on_expr)?;
|
||||||
|
|
||||||
|
let join_on = if call.has_flag("left") {
|
||||||
|
JoinOperator::LeftOuter(JoinConstraint::On(on_expr.into_native()))
|
||||||
|
} else if call.has_flag("right") {
|
||||||
|
JoinOperator::RightOuter(JoinConstraint::On(on_expr.into_native()))
|
||||||
|
} else if call.has_flag("outer") {
|
||||||
|
JoinOperator::FullOuter(JoinConstraint::On(on_expr.into_native()))
|
||||||
|
} else {
|
||||||
|
JoinOperator::Inner(JoinConstraint::On(on_expr.into_native()))
|
||||||
|
};
|
||||||
|
|
||||||
|
let join = Join {
|
||||||
|
relation: table_factor,
|
||||||
|
join_operator: join_on,
|
||||||
|
};
|
||||||
|
|
||||||
|
table.joins.push(join);
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
None => Err(ShellError::GenericError(
|
||||||
|
"Connection without table defined".into(),
|
||||||
|
"Expected a table defined".into(),
|
||||||
|
Some(call.head),
|
||||||
|
None,
|
||||||
|
Vec::new(),
|
||||||
|
)),
|
||||||
|
}
|
||||||
|
}
|
@ -6,6 +6,7 @@ use nu_protocol::{
|
|||||||
engine::{Command, EngineState, Stack},
|
engine::{Command, EngineState, Stack},
|
||||||
Category, Example, IntoPipelineData, PipelineData, ShellError, Signature, SyntaxShape, Value,
|
Category, Example, IntoPipelineData, PipelineData, ShellError, Signature, SyntaxShape, Value,
|
||||||
};
|
};
|
||||||
|
use sqlparser::ast::Statement;
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct LimitDb;
|
pub struct LimitDb;
|
||||||
@ -56,11 +57,19 @@ impl Command for LimitDb {
|
|||||||
let expr = ExprDb::try_from_value(&limit)?.into_native();
|
let expr = ExprDb::try_from_value(&limit)?.into_native();
|
||||||
|
|
||||||
let mut db = SQLiteDatabase::try_from_pipeline(input, call.head)?;
|
let mut db = SQLiteDatabase::try_from_pipeline(input, call.head)?;
|
||||||
db.query = match db.query {
|
match db.statement {
|
||||||
Some(mut query) => {
|
Some(ref mut statement) => match statement {
|
||||||
query.limit = Some(expr);
|
Statement::Query(query) => query.as_mut().limit = Some(expr),
|
||||||
Some(query)
|
s => {
|
||||||
}
|
return Err(ShellError::GenericError(
|
||||||
|
"Connection doesnt define a statement".into(),
|
||||||
|
format!("Expected a connection with query. Got {}", s),
|
||||||
|
Some(call.head),
|
||||||
|
None,
|
||||||
|
Vec::new(),
|
||||||
|
))
|
||||||
|
}
|
||||||
|
},
|
||||||
None => {
|
None => {
|
||||||
return Err(ShellError::GenericError(
|
return Err(ShellError::GenericError(
|
||||||
"Connection without query".into(),
|
"Connection without query".into(),
|
||||||
|
@ -1,12 +1,21 @@
|
|||||||
|
// Conversions between value and sqlparser objects
|
||||||
|
pub mod conversions;
|
||||||
|
|
||||||
|
mod alias;
|
||||||
mod and;
|
mod and;
|
||||||
|
mod col;
|
||||||
mod collect;
|
mod collect;
|
||||||
mod command;
|
mod command;
|
||||||
mod describe;
|
mod describe;
|
||||||
mod from;
|
mod from;
|
||||||
|
mod function;
|
||||||
|
mod group_by;
|
||||||
|
mod join;
|
||||||
mod limit;
|
mod limit;
|
||||||
mod open;
|
mod open;
|
||||||
mod or;
|
mod or;
|
||||||
mod order_by;
|
mod order_by;
|
||||||
|
mod over;
|
||||||
mod query;
|
mod query;
|
||||||
mod schema;
|
mod schema;
|
||||||
mod select;
|
mod select;
|
||||||
@ -18,21 +27,27 @@ use testing::TestingDb;
|
|||||||
|
|
||||||
use nu_protocol::engine::StateWorkingSet;
|
use nu_protocol::engine::StateWorkingSet;
|
||||||
|
|
||||||
|
use alias::AliasExpr;
|
||||||
use and::AndDb;
|
use and::AndDb;
|
||||||
|
use col::ColExpr;
|
||||||
use collect::CollectDb;
|
use collect::CollectDb;
|
||||||
use command::Database;
|
use command::Database;
|
||||||
use describe::DescribeDb;
|
use describe::DescribeDb;
|
||||||
use from::FromDb;
|
use from::FromDb;
|
||||||
|
use function::FunctionExpr;
|
||||||
|
use group_by::GroupByDb;
|
||||||
|
use join::JoinDb;
|
||||||
use limit::LimitDb;
|
use limit::LimitDb;
|
||||||
use open::OpenDb;
|
use open::OpenDb;
|
||||||
use or::OrDb;
|
use or::OrDb;
|
||||||
use order_by::OrderByDb;
|
use order_by::OrderByDb;
|
||||||
|
use over::OverExpr;
|
||||||
use query::QueryDb;
|
use query::QueryDb;
|
||||||
use schema::SchemaDb;
|
use schema::SchemaDb;
|
||||||
use select::ProjectionDb;
|
use select::ProjectionDb;
|
||||||
use where_::WhereDb;
|
use where_::WhereDb;
|
||||||
|
|
||||||
pub fn add_commands_decls(working_set: &mut StateWorkingSet) {
|
pub fn add_database_decls(working_set: &mut StateWorkingSet) {
|
||||||
macro_rules! bind_command {
|
macro_rules! bind_command {
|
||||||
( $command:expr ) => {
|
( $command:expr ) => {
|
||||||
working_set.add_decl(Box::new($command));
|
working_set.add_decl(Box::new($command));
|
||||||
@ -44,17 +59,23 @@ pub fn add_commands_decls(working_set: &mut StateWorkingSet) {
|
|||||||
|
|
||||||
// Series commands
|
// Series commands
|
||||||
bind_command!(
|
bind_command!(
|
||||||
|
AliasExpr,
|
||||||
AndDb,
|
AndDb,
|
||||||
|
ColExpr,
|
||||||
CollectDb,
|
CollectDb,
|
||||||
Database,
|
Database,
|
||||||
DescribeDb,
|
DescribeDb,
|
||||||
FromDb,
|
FromDb,
|
||||||
QueryDb,
|
FunctionExpr,
|
||||||
|
GroupByDb,
|
||||||
|
JoinDb,
|
||||||
LimitDb,
|
LimitDb,
|
||||||
ProjectionDb,
|
|
||||||
OpenDb,
|
OpenDb,
|
||||||
OrderByDb,
|
OrderByDb,
|
||||||
OrDb,
|
OrDb,
|
||||||
|
OverExpr,
|
||||||
|
QueryDb,
|
||||||
|
ProjectionDb,
|
||||||
SchemaDb,
|
SchemaDb,
|
||||||
TestingDb,
|
TestingDb,
|
||||||
WhereDb
|
WhereDb
|
||||||
|
@ -8,7 +8,7 @@ use nu_protocol::{
|
|||||||
Category, Example, IntoPipelineData, PipelineData, ShellError, Signature, Span, SyntaxShape,
|
Category, Example, IntoPipelineData, PipelineData, ShellError, Signature, Span, SyntaxShape,
|
||||||
Value,
|
Value,
|
||||||
};
|
};
|
||||||
use sqlparser::ast::{BinaryOperator, Expr, Query, Select, SetExpr};
|
use sqlparser::ast::{BinaryOperator, Expr, Query, Select, SetExpr, Statement};
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct OrDb;
|
pub struct OrDb;
|
||||||
@ -78,12 +78,23 @@ impl Command for OrDb {
|
|||||||
|
|
||||||
Ok(expression.into_value(call.head).into_pipeline_data())
|
Ok(expression.into_value(call.head).into_pipeline_data())
|
||||||
} else if let Ok(mut db) = SQLiteDatabase::try_from_value(value.clone()) {
|
} else if let Ok(mut db) = SQLiteDatabase::try_from_value(value.clone()) {
|
||||||
db.query = match db.query {
|
match db.statement {
|
||||||
Some(query) => Some(modify_query(query, expr, call.head)?),
|
Some(ref mut statement) => match statement {
|
||||||
|
Statement::Query(query) => modify_query(query, expr, call.head)?,
|
||||||
|
s => {
|
||||||
|
return Err(ShellError::GenericError(
|
||||||
|
"Connection doesnt define a query".into(),
|
||||||
|
format!("Expected a connection with query. Got {}", s),
|
||||||
|
Some(call.head),
|
||||||
|
None,
|
||||||
|
Vec::new(),
|
||||||
|
))
|
||||||
|
}
|
||||||
|
},
|
||||||
None => {
|
None => {
|
||||||
return Err(ShellError::GenericError(
|
return Err(ShellError::GenericError(
|
||||||
"Connection without query".into(),
|
"Connection without statement".into(),
|
||||||
"Missing query in the connection".into(),
|
"The connection needs a statement defined".into(),
|
||||||
Some(call.head),
|
Some(call.head),
|
||||||
None,
|
None,
|
||||||
Vec::new(),
|
Vec::new(),
|
||||||
@ -103,26 +114,24 @@ impl Command for OrDb {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn modify_query(mut query: Query, expression: Expr, span: Span) -> Result<Query, ShellError> {
|
fn modify_query(query: &mut Box<Query>, expression: Expr, span: Span) -> Result<(), ShellError> {
|
||||||
query.body = match query.body {
|
match query.body {
|
||||||
SetExpr::Select(select) => Ok(SetExpr::Select(modify_select(select, expression, span)?)),
|
SetExpr::Select(ref mut select) => modify_select(select, expression, span)?,
|
||||||
_ => Err(ShellError::GenericError(
|
_ => {
|
||||||
"Query without a select".into(),
|
return Err(ShellError::GenericError(
|
||||||
"Missing a WHERE clause before an OR clause".into(),
|
"Query without a select".into(),
|
||||||
Some(span),
|
"Missing a WHERE clause before an OR clause".into(),
|
||||||
None,
|
Some(span),
|
||||||
Vec::new(),
|
None,
|
||||||
)),
|
Vec::new(),
|
||||||
}?;
|
))
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
Ok(query)
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn modify_select(
|
fn modify_select(select: &mut Box<Select>, expression: Expr, span: Span) -> Result<(), ShellError> {
|
||||||
mut select: Box<Select>,
|
|
||||||
expression: Expr,
|
|
||||||
span: Span,
|
|
||||||
) -> Result<Box<Select>, ShellError> {
|
|
||||||
let new_expression = match &select.selection {
|
let new_expression = match &select.selection {
|
||||||
Some(expr) => Ok(Expr::BinaryOp {
|
Some(expr) => Ok(Expr::BinaryOp {
|
||||||
left: Box::new(expr.clone()),
|
left: Box::new(expr.clone()),
|
||||||
@ -139,5 +148,5 @@ fn modify_select(
|
|||||||
}?;
|
}?;
|
||||||
|
|
||||||
select.as_mut().selection = Some(new_expression);
|
select.as_mut().selection = Some(new_expression);
|
||||||
Ok(select)
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -7,7 +7,7 @@ use nu_protocol::{
|
|||||||
engine::{Command, EngineState, Stack},
|
engine::{Command, EngineState, Stack},
|
||||||
Category, Example, IntoPipelineData, PipelineData, ShellError, Signature, SyntaxShape, Value,
|
Category, Example, IntoPipelineData, PipelineData, ShellError, Signature, SyntaxShape, Value,
|
||||||
};
|
};
|
||||||
use sqlparser::ast::OrderByExpr;
|
use sqlparser::ast::{Expr, OrderByExpr, Statement};
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct OrderByDb;
|
pub struct OrderByDb;
|
||||||
@ -58,40 +58,100 @@ impl Command for OrderByDb {
|
|||||||
) -> Result<PipelineData, ShellError> {
|
) -> Result<PipelineData, ShellError> {
|
||||||
let asc = call.has_flag("ascending");
|
let asc = call.has_flag("ascending");
|
||||||
let nulls_first = call.has_flag("nulls_first");
|
let nulls_first = call.has_flag("nulls_first");
|
||||||
|
let expressions: Vec<Value> = call.rest(engine_state, stack, 0)?;
|
||||||
let vals: Vec<Value> = call.rest(engine_state, stack, 0)?;
|
let expressions = Value::List {
|
||||||
let value = Value::List {
|
vals: expressions,
|
||||||
vals,
|
|
||||||
span: call.head,
|
span: call.head,
|
||||||
};
|
};
|
||||||
let expressions = ExprDb::extract_exprs(value)?;
|
let expressions = ExprDb::extract_exprs(expressions)?;
|
||||||
|
let expressions: Vec<OrderByExpr> = expressions
|
||||||
|
.into_iter()
|
||||||
|
.map(|expr| OrderByExpr {
|
||||||
|
expr,
|
||||||
|
asc: if asc { Some(asc) } else { None },
|
||||||
|
nulls_first: if nulls_first { Some(nulls_first) } else { None },
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
let mut db = SQLiteDatabase::try_from_pipeline(input, call.head)?;
|
let value = input.into_value(call.head);
|
||||||
db.query = match db.query {
|
|
||||||
Some(mut query) => {
|
|
||||||
let mut order_expr: Vec<OrderByExpr> = expressions
|
|
||||||
.into_iter()
|
|
||||||
.map(|expr| OrderByExpr {
|
|
||||||
expr,
|
|
||||||
asc: if asc { Some(asc) } else { None },
|
|
||||||
nulls_first: if nulls_first { Some(nulls_first) } else { None },
|
|
||||||
})
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
query.order_by.append(&mut order_expr);
|
if let Ok(expr) = ExprDb::try_from_value(&value) {
|
||||||
Some(query)
|
update_expressions(expr, expressions, call)
|
||||||
}
|
} else if let Ok(db) = SQLiteDatabase::try_from_value(value.clone()) {
|
||||||
|
update_connection(db, expressions, call)
|
||||||
|
} else {
|
||||||
|
Err(ShellError::CantConvert(
|
||||||
|
"expression or query".into(),
|
||||||
|
value.get_type().to_string(),
|
||||||
|
value.span()?,
|
||||||
|
None,
|
||||||
|
))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn update_expressions(
|
||||||
|
mut expr: ExprDb,
|
||||||
|
mut expressions: Vec<OrderByExpr>,
|
||||||
|
call: &Call,
|
||||||
|
) -> Result<PipelineData, ShellError> {
|
||||||
|
match expr.as_mut() {
|
||||||
|
Expr::Function(function) => match &mut function.over {
|
||||||
|
Some(over) => over.order_by.append(&mut expressions),
|
||||||
None => {
|
None => {
|
||||||
return Err(ShellError::GenericError(
|
return Err(ShellError::GenericError(
|
||||||
"Connection without query".into(),
|
"Expression doesnt define a partition to order".into(),
|
||||||
"The connection needs a query defined".into(),
|
"Expected an expression with partition".into(),
|
||||||
Some(call.head),
|
Some(call.head),
|
||||||
None,
|
None,
|
||||||
Vec::new(),
|
Vec::new(),
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
};
|
},
|
||||||
|
s => {
|
||||||
|
return Err(ShellError::GenericError(
|
||||||
|
"Expression doesnt define a function".into(),
|
||||||
|
format!("Expected an expression with a function. Got {}", s),
|
||||||
|
Some(call.head),
|
||||||
|
None,
|
||||||
|
Vec::new(),
|
||||||
|
))
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
Ok(db.into_value(call.head).into_pipeline_data())
|
Ok(expr.into_value(call.head).into_pipeline_data())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn update_connection(
|
||||||
|
mut db: SQLiteDatabase,
|
||||||
|
mut expressions: Vec<OrderByExpr>,
|
||||||
|
call: &Call,
|
||||||
|
) -> Result<PipelineData, ShellError> {
|
||||||
|
match db.statement.as_mut() {
|
||||||
|
Some(statement) => match statement {
|
||||||
|
Statement::Query(query) => {
|
||||||
|
query.order_by.append(&mut expressions);
|
||||||
|
}
|
||||||
|
s => {
|
||||||
|
return Err(ShellError::GenericError(
|
||||||
|
"Connection doesnt define a query".into(),
|
||||||
|
format!("Expected a connection with query. Got {}", s),
|
||||||
|
Some(call.head),
|
||||||
|
None,
|
||||||
|
Vec::new(),
|
||||||
|
))
|
||||||
|
}
|
||||||
|
},
|
||||||
|
None => {
|
||||||
|
return Err(ShellError::GenericError(
|
||||||
|
"Connection without statement".into(),
|
||||||
|
"The connection needs a statement defined".into(),
|
||||||
|
Some(call.head),
|
||||||
|
None,
|
||||||
|
Vec::new(),
|
||||||
|
))
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok(db.into_value(call.head).into_pipeline_data())
|
||||||
}
|
}
|
||||||
|
80
crates/nu-command/src/database/commands/over.rs
Normal file
@ -0,0 +1,80 @@
|
|||||||
|
use crate::database::values::dsl::ExprDb;
|
||||||
|
use nu_engine::CallExt;
|
||||||
|
use nu_protocol::{
|
||||||
|
ast::Call,
|
||||||
|
engine::{Command, EngineState, Stack},
|
||||||
|
Category, Example, IntoPipelineData, PipelineData, ShellError, Signature, SyntaxShape, Value,
|
||||||
|
};
|
||||||
|
use sqlparser::ast::{Expr, WindowSpec};
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct OverExpr;
|
||||||
|
|
||||||
|
impl Command for OverExpr {
|
||||||
|
fn name(&self) -> &str {
|
||||||
|
"db over"
|
||||||
|
}
|
||||||
|
|
||||||
|
fn signature(&self) -> Signature {
|
||||||
|
Signature::build(self.name())
|
||||||
|
.rest(
|
||||||
|
"partition-by",
|
||||||
|
SyntaxShape::Any,
|
||||||
|
"columns to partition the window function",
|
||||||
|
)
|
||||||
|
.category(Category::Custom("database".into()))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn usage(&self) -> &str {
|
||||||
|
"Adds a partition to an expression function"
|
||||||
|
}
|
||||||
|
|
||||||
|
fn examples(&self) -> Vec<Example> {
|
||||||
|
vec![Example {
|
||||||
|
description: "Adds a partition to a function expresssion",
|
||||||
|
example: "db function avg col_a | db over col_b",
|
||||||
|
result: None,
|
||||||
|
}]
|
||||||
|
}
|
||||||
|
|
||||||
|
fn search_terms(&self) -> Vec<&str> {
|
||||||
|
vec!["database", "column", "expression"]
|
||||||
|
}
|
||||||
|
|
||||||
|
fn run(
|
||||||
|
&self,
|
||||||
|
engine_state: &EngineState,
|
||||||
|
stack: &mut Stack,
|
||||||
|
call: &Call,
|
||||||
|
input: PipelineData,
|
||||||
|
) -> Result<PipelineData, ShellError> {
|
||||||
|
let vals: Vec<Value> = call.rest(engine_state, stack, 0)?;
|
||||||
|
let value = Value::List {
|
||||||
|
vals,
|
||||||
|
span: call.head,
|
||||||
|
};
|
||||||
|
let partitions = ExprDb::extract_exprs(value)?;
|
||||||
|
|
||||||
|
let mut expression = ExprDb::try_from_pipeline(input, call.head)?;
|
||||||
|
match expression.as_mut() {
|
||||||
|
Expr::Function(function) => {
|
||||||
|
function.over = Some(WindowSpec {
|
||||||
|
partition_by: partitions,
|
||||||
|
order_by: Vec::new(),
|
||||||
|
window_frame: None,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
s => {
|
||||||
|
return Err(ShellError::GenericError(
|
||||||
|
"Expression doesnt define a function".into(),
|
||||||
|
format!("Expected an expression with a function. Got {}", s),
|
||||||
|
Some(call.head),
|
||||||
|
None,
|
||||||
|
Vec::new(),
|
||||||
|
))
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok(expression.into_value(call.head).into_pipeline_data())
|
||||||
|
}
|
||||||
|
}
|