Compare commits
201 Commits
Author | SHA1 | Date | |
---|---|---|---|
d1c719a8cc | |||
4d854f36af | |||
8d5848c955 | |||
fe88d58b1e | |||
42dbfd1fa0 | |||
534e1fc3ce | |||
ff946a2f21 | |||
3c0cbec993 | |||
48e29e9ed6 | |||
ff53352afe | |||
4fd4136d50 | |||
dc1248a454 | |||
de554f8e5f | |||
44979f3051 | |||
7ae7394c85 | |||
9dbf7556b8 | |||
caafd26deb | |||
43a218240c | |||
11d7d8ea1e | |||
2dea9e6f1f | |||
c5cb369d8d | |||
b6959197bf | |||
d5b99ae316 | |||
9d10007085 | |||
2e0b964d5b | |||
5bae7e56ef | |||
b42ef45c7c | |||
3423cd54a1 | |||
837f0463eb | |||
56f6f683fc | |||
c57f41e5f2 | |||
8c74b1e437 | |||
8318d59ef1 | |||
64efa30f3e | |||
820a6bfb08 | |||
b8d253cbd7 | |||
3c421c5726 | |||
75b2d26187 | |||
17a5aa3052 | |||
e4a22799d5 | |||
fda456e469 | |||
e5d38dcff6 | |||
a82fa75c31 | |||
0c16464320 | |||
888758b813 | |||
cb909f810e | |||
a75318d7e8 | |||
7a9bf06005 | |||
a06299c77a | |||
e4bcd1934d | |||
4673adecc5 | |||
1b8051ece5 | |||
d44059c36b | |||
b79abdb2a5 | |||
ee8a0c9477 | |||
41853b9f18 | |||
997d56a288 | |||
0769e9b750 | |||
f5519e2a09 | |||
8259d463aa | |||
e2c015f725 | |||
eb12fffbc6 | |||
c42096c34e | |||
46eb34b35d | |||
23a73cd31f | |||
6c07bc10e2 | |||
6365ba0286 | |||
545b1dcd94 | |||
fb89f2f48c | |||
f6ee21f76b | |||
d69a4db2e7 | |||
d4bfbb5eaf | |||
507f24d029 | |||
230c36f2fb | |||
219c719e98 | |||
50146bdef3 | |||
2042f7f769 | |||
0594f9e7aa | |||
3b8deb9ec7 | |||
727ff5f2d4 | |||
3d62528d8c | |||
a42d419b66 | |||
9602e82029 | |||
8e98df8b28 | |||
2daf8ec72d | |||
afcacda35f | |||
06cf3fa5ad | |||
9a482ce284 | |||
8018ae3286 | |||
ef322a24c5 | |||
a8db4f0b0e | |||
98a4280c41 | |||
0e1bfae13d | |||
6ff717c0ba | |||
d534a89867 | |||
5bc9246f0f | |||
1e89cc3578 | |||
06f5199570 | |||
9e5e9819d6 | |||
1f8ccd8e5e | |||
e9d8b19d4d | |||
7c63ce15d8 | |||
a3a9571dac | |||
2cc5952c37 | |||
aa88449f29 | |||
06199d731b | |||
0ba86d7eb8 | |||
6efd1bcb3f | |||
0d06b6259f | |||
8fdc272bcc | |||
0ea7a38c21 | |||
1999e0dcf3 | |||
ac30b3d108 | |||
2b1e05aad0 | |||
6c56829976 | |||
2c58beec13 | |||
9c779b071b | |||
1e94793df5 | |||
7d9a77f179 | |||
bb079608dd | |||
5fa42eeb8c | |||
3e09158afc | |||
7a78171b34 | |||
633ebc7e43 | |||
f0cb2f38df | |||
f26d3bf8d7 | |||
498672f5e5 | |||
038391519b | |||
8004e8e2a0 | |||
e192684612 | |||
5d40fc2726 | |||
a22d70718f | |||
24a49f1b0a | |||
04473a5593 | |||
d1e7884d19 | |||
2b96c93b8d | |||
fc41a0f96b | |||
8bd68416e3 | |||
2062e33c37 | |||
c6383874e9 | |||
d90b25c633 | |||
44bcfb3403 | |||
c047fd4778 | |||
16bd7b6d0d | |||
3cef94ba39 | |||
f818193b53 | |||
1aec4a343a | |||
852de79212 | |||
06f40405fe | |||
65bac77e8a | |||
32d1939a95 | |||
53e35670ea | |||
a92567489f | |||
2145feff5d | |||
0b95465ea1 | |||
ec804f4568 | |||
4717ac70fd | |||
9969fbfbb1 | |||
5f39267a80 | |||
94a9380e8b | |||
1d64863585 | |||
8218f72eea | |||
c0b99b7131 | |||
75c033e4d1 | |||
d88d057bf6 | |||
b00098ccc6 | |||
7e5e9c28dd | |||
8ffffe9bcc | |||
8030f7e9f0 | |||
e4959d2f9f | |||
f311da9623 | |||
14d80d54fe | |||
23b467061b | |||
8d8f25b210 | |||
7ee22603ac | |||
4052a99ff5 | |||
ccfa35289b | |||
54fc164e1c | |||
3a35bf7d4e | |||
a61d09222f | |||
07ac3c3aab | |||
061e9294b3 | |||
374757f286 | |||
ca75cd7c0a | |||
d08c072f19 | |||
9b99b2f6ac | |||
1cb449b2d1 | |||
6cc66c8afd | |||
08e495ea67 | |||
b0647f780d | |||
2dfd975940 | |||
fbdb125141 | |||
c2ea993f7e | |||
e14e60dd2c | |||
768ff47d28 | |||
78a1879e36 | |||
0b9c0fea9d | |||
02a3430ef0 | |||
6623ed9061 | |||
48cf103439 | |||
1bcb87c48d |
@ -1,6 +1,7 @@
|
|||||||
# increase the default windows stack size
|
|
||||||
[target.x86_64-pc-windows-msvc]
|
[target.x86_64-pc-windows-msvc]
|
||||||
rustflags = ["-C", "link-args=-stack:10000000"]
|
# increase the default windows stack size
|
||||||
|
# statically link the CRT so users don't have to install it
|
||||||
|
rustflags = ["-C", "link-args=-stack:10000000", "-C", "target-feature=+crt-static"]
|
||||||
|
|
||||||
# keeping this but commentting out in case we need them in the future
|
# keeping this but commentting out in case we need them in the future
|
||||||
|
|
||||||
|
4
.github/pull_request_template.md
vendored
@ -7,5 +7,5 @@
|
|||||||
Make sure you've run and fixed any issues with these commands:
|
Make sure you've run and fixed any issues with these commands:
|
||||||
|
|
||||||
- [ ] `cargo fmt --all -- --check` to check standard code formatting (`cargo fmt --all` applies these changes)
|
- [ ] `cargo fmt --all -- --check` to check standard code formatting (`cargo fmt --all` applies these changes)
|
||||||
- [ ] `cargo clippy --all --all-features -- -D warnings -D clippy::unwrap_used -A clippy::needless_collect` to check that you're using the standard code style
|
- [ ] `cargo clippy --workspace --features=extra -- -D warnings -D clippy::unwrap_used -A clippy::needless_collect` to check that you're using the standard code style
|
||||||
- [ ] `cargo build; cargo test --all --all-features` to check that all the tests pass
|
- [ ] `cargo test --workspace --features=extra` to check that all the tests pass
|
||||||
|
102
.github/workflows/ci.yml
vendored
@ -7,26 +7,17 @@ on:
|
|||||||
name: continuous-integration
|
name: continuous-integration
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
build-clippy:
|
nu-fmt-clippy:
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: true
|
||||||
matrix:
|
matrix:
|
||||||
platform: [windows-latest, macos-latest, ubuntu-latest]
|
platform: [windows-latest, macos-latest, ubuntu-latest]
|
||||||
style: [all, default]
|
|
||||||
rust:
|
rust:
|
||||||
- stable
|
- stable
|
||||||
include:
|
|
||||||
- style: all
|
|
||||||
flags: "--all-features"
|
|
||||||
- style: default
|
|
||||||
flags: ""
|
|
||||||
exclude:
|
|
||||||
- platform: windows-latest
|
|
||||||
style: default
|
|
||||||
- platform: macos-latest
|
|
||||||
style: default
|
|
||||||
|
|
||||||
runs-on: ${{ matrix.platform }}
|
runs-on: ${{ matrix.platform }}
|
||||||
|
env:
|
||||||
|
NUSHELL_CARGO_TARGET: ci
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v2
|
||||||
@ -41,7 +32,7 @@ jobs:
|
|||||||
|
|
||||||
- uses: Swatinem/rust-cache@v1
|
- uses: Swatinem/rust-cache@v1
|
||||||
with:
|
with:
|
||||||
key: ${{ matrix.style }}v1 # increment this to bust the cache if needed
|
key: "v2" # increment this to bust the cache if needed
|
||||||
|
|
||||||
- name: Rustfmt
|
- name: Rustfmt
|
||||||
uses: actions-rs/cargo@v1
|
uses: actions-rs/cargo@v1
|
||||||
@ -49,29 +40,26 @@ jobs:
|
|||||||
command: fmt
|
command: fmt
|
||||||
args: --all -- --check
|
args: --all -- --check
|
||||||
|
|
||||||
- name: Build Nushell
|
|
||||||
uses: actions-rs/cargo@v1
|
|
||||||
with:
|
|
||||||
command: build
|
|
||||||
args: --workspace ${{ matrix.flags }}
|
|
||||||
|
|
||||||
- name: Clippy
|
- name: Clippy
|
||||||
uses: actions-rs/cargo@v1
|
uses: actions-rs/cargo@v1
|
||||||
with:
|
with:
|
||||||
command: clippy
|
command: clippy
|
||||||
args: --workspace ${{ matrix.flags }} -- -D warnings -D clippy::unwrap_used -A clippy::needless_collect
|
args: --features=extra --workspace --exclude nu_plugin_* -- -D warnings -D clippy::unwrap_used -A clippy::needless_collect
|
||||||
|
|
||||||
|
nu-tests:
|
||||||
|
env:
|
||||||
|
NUSHELL_CARGO_TARGET: ci
|
||||||
|
|
||||||
test:
|
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: true
|
||||||
matrix:
|
matrix:
|
||||||
platform: [windows-latest, macos-latest, ubuntu-latest]
|
platform: [windows-latest, macos-latest, ubuntu-latest]
|
||||||
style: [all, default]
|
style: [extra, default]
|
||||||
rust:
|
rust:
|
||||||
- stable
|
- stable
|
||||||
include:
|
include:
|
||||||
- style: all
|
- style: extra
|
||||||
flags: "--all-features"
|
flags: "--features=extra"
|
||||||
- style: default
|
- style: default
|
||||||
flags: ""
|
flags: ""
|
||||||
exclude:
|
exclude:
|
||||||
@ -91,29 +79,25 @@ jobs:
|
|||||||
profile: minimal
|
profile: minimal
|
||||||
toolchain: ${{ matrix.rust }}
|
toolchain: ${{ matrix.rust }}
|
||||||
override: true
|
override: true
|
||||||
components: rustfmt, clippy
|
|
||||||
|
|
||||||
- uses: Swatinem/rust-cache@v1
|
# Temporarily disabled; the cache was getting huge (2.6GB compressed) on Windows and causing issues.
|
||||||
with:
|
# TODO: investigate why the cache was so big
|
||||||
key: ${{ matrix.style }}v1 # increment this to bust the cache if needed
|
# - uses: Swatinem/rust-cache@v1
|
||||||
|
# with:
|
||||||
- uses: taiki-e/install-action@nextest
|
# key: ${{ matrix.style }}v3 # increment this to bust the cache if needed
|
||||||
|
|
||||||
- name: Tests
|
- name: Tests
|
||||||
uses: actions-rs/cargo@v1
|
|
||||||
with:
|
|
||||||
command: nextest
|
|
||||||
args: run --all ${{ matrix.flags }}
|
|
||||||
|
|
||||||
- name: Doctests
|
|
||||||
uses: actions-rs/cargo@v1
|
uses: actions-rs/cargo@v1
|
||||||
with:
|
with:
|
||||||
command: test
|
command: test
|
||||||
args: --workspace --doc ${{ matrix.flags }}
|
args: --workspace --profile ci --exclude nu_plugin_* ${{ matrix.flags }}
|
||||||
|
|
||||||
python-virtualenv:
|
python-virtualenv:
|
||||||
|
env:
|
||||||
|
NUSHELL_CARGO_TARGET: ci
|
||||||
|
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: true
|
||||||
matrix:
|
matrix:
|
||||||
platform: [ubuntu-latest, macos-latest, windows-latest]
|
platform: [ubuntu-latest, macos-latest, windows-latest]
|
||||||
rust:
|
rust:
|
||||||
@ -135,13 +119,13 @@ jobs:
|
|||||||
|
|
||||||
- uses: Swatinem/rust-cache@v1
|
- uses: Swatinem/rust-cache@v1
|
||||||
with:
|
with:
|
||||||
key: "1" # increment this to bust the cache if needed
|
key: "2" # increment this to bust the cache if needed
|
||||||
|
|
||||||
- name: Install Nushell
|
- name: Install Nushell
|
||||||
uses: actions-rs/cargo@v1
|
uses: actions-rs/cargo@v1
|
||||||
with:
|
with:
|
||||||
command: install
|
command: install
|
||||||
args: --path=. --no-default-features --debug
|
args: --path=. --profile ci --no-default-features
|
||||||
|
|
||||||
- name: Setup Python
|
- name: Setup Python
|
||||||
uses: actions/setup-python@v2
|
uses: actions/setup-python@v2
|
||||||
@ -161,9 +145,14 @@ jobs:
|
|||||||
run: cd virtualenv && tox -e ${{ matrix.py }} -- -k nushell
|
run: cd virtualenv && tox -e ${{ matrix.py }} -- -k nushell
|
||||||
shell: bash
|
shell: bash
|
||||||
|
|
||||||
|
# Build+test plugins on their own, without the rest of Nu. This helps with CI parallelization and
|
||||||
|
# also helps test that the plugins build without any feature unification shenanigans
|
||||||
plugins:
|
plugins:
|
||||||
|
env:
|
||||||
|
NUSHELL_CARGO_TARGET: ci
|
||||||
|
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: true
|
||||||
matrix:
|
matrix:
|
||||||
platform: [windows-latest, macos-latest, ubuntu-latest]
|
platform: [windows-latest, macos-latest, ubuntu-latest]
|
||||||
rust:
|
rust:
|
||||||
@ -181,29 +170,14 @@ jobs:
|
|||||||
toolchain: ${{ matrix.rust }}
|
toolchain: ${{ matrix.rust }}
|
||||||
override: true
|
override: true
|
||||||
|
|
||||||
# This job does not use rust-cache because 1) we have limited cache space, 2) even
|
- name: Clippy
|
||||||
# without caching, it's not the slowest job. Revisit if those facts change.
|
|
||||||
|
|
||||||
- name: Build nu_plugin_example
|
|
||||||
uses: actions-rs/cargo@v1
|
uses: actions-rs/cargo@v1
|
||||||
with:
|
with:
|
||||||
command: build
|
command: clippy
|
||||||
args: --package nu_plugin_example
|
args: --package nu_plugin_* ${{ matrix.flags }} -- -D warnings -D clippy::unwrap_used -A clippy::needless_collect
|
||||||
|
|
||||||
- name: Build nu_plugin_gstat
|
- name: Tests
|
||||||
uses: actions-rs/cargo@v1
|
uses: actions-rs/cargo@v1
|
||||||
with:
|
with:
|
||||||
command: build
|
command: test
|
||||||
args: --package nu_plugin_gstat
|
args: --profile ci --package nu_plugin_*
|
||||||
|
|
||||||
- name: Build nu_plugin_inc
|
|
||||||
uses: actions-rs/cargo@v1
|
|
||||||
with:
|
|
||||||
command: build
|
|
||||||
args: --package nu_plugin_inc
|
|
||||||
|
|
||||||
- name: Build nu_plugin_query
|
|
||||||
uses: actions-rs/cargo@v1
|
|
||||||
with:
|
|
||||||
command: build
|
|
||||||
args: --package nu_plugin_query
|
|
||||||
|
155
.github/workflows/release-pkg.nu
vendored
Executable file
@ -0,0 +1,155 @@
|
|||||||
|
#!/usr/bin/env nu
|
||||||
|
|
||||||
|
# Created: 2022/05/26 19:05:20
|
||||||
|
# Description:
|
||||||
|
# A script to do the github release task, need nushell to be installed.
|
||||||
|
# REF:
|
||||||
|
# 1. https://github.com/volks73/cargo-wix
|
||||||
|
|
||||||
|
# The main binary file to be released
|
||||||
|
let bin = 'nu'
|
||||||
|
let os = $env.OS
|
||||||
|
let target = $env.TARGET
|
||||||
|
# Repo source dir like `/home/runner/work/nushell/nushell`
|
||||||
|
let src = $env.GITHUB_WORKSPACE
|
||||||
|
let flags = $env.TARGET_RUSTFLAGS
|
||||||
|
let dist = $'($env.GITHUB_WORKSPACE)/output'
|
||||||
|
let version = (open Cargo.toml | get package.version)
|
||||||
|
|
||||||
|
# $env
|
||||||
|
|
||||||
|
$'(char nl)Packaging ($bin) v($version) for ($target) in ($src)...'; hr-line -b
|
||||||
|
if not ('Cargo.lock' | path exists) { cargo generate-lockfile }
|
||||||
|
|
||||||
|
$'Start building ($bin)...'; hr-line
|
||||||
|
|
||||||
|
# ----------------------------------------------------------------------------
|
||||||
|
# Build for Ubuntu and macOS
|
||||||
|
# ----------------------------------------------------------------------------
|
||||||
|
if $os in ['ubuntu-latest', 'macos-latest'] {
|
||||||
|
if $os == 'ubuntu-latest' {
|
||||||
|
sudo apt-get install libxcb-composite0-dev -y
|
||||||
|
}
|
||||||
|
if $target == 'aarch64-unknown-linux-gnu' {
|
||||||
|
sudo apt-get install gcc-aarch64-linux-gnu -y
|
||||||
|
let-env CARGO_TARGET_AARCH64_UNKNOWN_LINUX_GNU_LINKER = 'aarch64-linux-gnu-gcc'
|
||||||
|
cargo-build-nu $flags
|
||||||
|
} else if $target == 'armv7-unknown-linux-gnueabihf' {
|
||||||
|
sudo apt-get install pkg-config gcc-arm-linux-gnueabihf -y
|
||||||
|
let-env CARGO_TARGET_ARMV7_UNKNOWN_LINUX_GNUEABIHF_LINKER = 'arm-linux-gnueabihf-gcc'
|
||||||
|
cargo-build-nu $flags
|
||||||
|
} else {
|
||||||
|
# musl-tools to fix 'Failed to find tool. Is `musl-gcc` installed?'
|
||||||
|
# Actually just for x86_64-unknown-linux-musl target
|
||||||
|
sudo apt install musl-tools -y
|
||||||
|
cargo-build-nu $flags
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
# ----------------------------------------------------------------------------
|
||||||
|
# Build for Windows without static-link-openssl feature
|
||||||
|
# ----------------------------------------------------------------------------
|
||||||
|
if $os in ['windows-latest'] {
|
||||||
|
if ($flags | str trim | empty?) {
|
||||||
|
cargo build --release --all --target $target --features=extra
|
||||||
|
} else {
|
||||||
|
cargo build --release --all --target $target --features=extra $flags
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
# ----------------------------------------------------------------------------
|
||||||
|
# Prepare for the release archive
|
||||||
|
# ----------------------------------------------------------------------------
|
||||||
|
let suffix = if $os == 'windows-latest' { '.exe' }
|
||||||
|
# nu, nu_plugin_* were all included
|
||||||
|
let executable = $'target/($target)/release/($bin)*($suffix)'
|
||||||
|
$'Current executable file: ($executable)'
|
||||||
|
|
||||||
|
cd $src; mkdir $dist;
|
||||||
|
rm -rf $'target/($target)/release/*.d' $'target/($target)/release/nu_pretty_hex*'
|
||||||
|
$'(char nl)All executable files:'; hr-line
|
||||||
|
ls -f $executable
|
||||||
|
|
||||||
|
$'(char nl)Copying release files...'; hr-line
|
||||||
|
cp -v README.release.txt $'($dist)/README.txt'
|
||||||
|
[LICENSE $executable] | each {|it| cp -rv $it $dist } | flatten
|
||||||
|
|
||||||
|
$'(char nl)Check binary release version detail:'; hr-line
|
||||||
|
let ver = if $os == 'windows-latest' {
|
||||||
|
(do -i { ./output/nu.exe -c 'version' }) | str collect
|
||||||
|
} else {
|
||||||
|
(do -i { ./output/nu -c 'version' }) | str collect
|
||||||
|
}
|
||||||
|
if ($ver | str trim | empty?) {
|
||||||
|
$'(ansi r)Incompatible nu binary...(ansi reset)'
|
||||||
|
} else { $ver }
|
||||||
|
|
||||||
|
# ----------------------------------------------------------------------------
|
||||||
|
# Create a release archive and send it to output for the following steps
|
||||||
|
# ----------------------------------------------------------------------------
|
||||||
|
cd $dist; $'(char nl)Creating release archive...'; hr-line
|
||||||
|
if $os in ['ubuntu-latest', 'macos-latest'] {
|
||||||
|
|
||||||
|
$'(char nl)(ansi g)Archive contents:(ansi reset)'; hr-line; ls
|
||||||
|
|
||||||
|
let archive = $'($dist)/($bin)-($version)-($target).tar.gz'
|
||||||
|
tar czf $archive *
|
||||||
|
print $'archive: ---> ($archive)'; ls $archive
|
||||||
|
echo $'::set-output name=archive::($archive)'
|
||||||
|
|
||||||
|
} else if $os == 'windows-latest' {
|
||||||
|
|
||||||
|
let releaseStem = $'($bin)-($version)-($target)'
|
||||||
|
|
||||||
|
$'(char nl)Download less related stuffs...'; hr-line
|
||||||
|
curl https://github.com/jftuga/less-Windows/releases/download/less-v590/less.exe -o $'($dist)\less.exe'
|
||||||
|
curl https://raw.githubusercontent.com/jftuga/less-Windows/master/LICENSE -o $'($dist)\LICENSE-for-less.txt'
|
||||||
|
|
||||||
|
# Create Windows msi release package
|
||||||
|
if (get-env _EXTRA_) == 'msi' {
|
||||||
|
|
||||||
|
let wixRelease = $'($src)/target/wix/($releaseStem).msi'
|
||||||
|
$'(char nl)Start creating Windows msi package...'
|
||||||
|
cd $src; hr-line
|
||||||
|
# Wix need the binaries be stored in target/release/
|
||||||
|
cp -r $'($dist)/*' target/release/
|
||||||
|
cargo install cargo-wix --version 0.3.2
|
||||||
|
cargo wix --no-build --nocapture --package nu --output $wixRelease
|
||||||
|
echo $'::set-output name=archive::($wixRelease)'
|
||||||
|
|
||||||
|
} else {
|
||||||
|
|
||||||
|
$'(char nl)(ansi g)Archive contents:(ansi reset)'; hr-line; ls
|
||||||
|
let archive = $'($dist)/($releaseStem).zip'
|
||||||
|
7z a $archive *
|
||||||
|
print $'archive: ---> ($archive)';
|
||||||
|
let pkg = (ls -f $archive | get name)
|
||||||
|
if not ($pkg | empty?) {
|
||||||
|
echo $'::set-output name=archive::($pkg | get 0)'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
def 'cargo-build-nu' [ options: string ] {
|
||||||
|
if ($options | str trim | empty?) {
|
||||||
|
cargo build --release --all --target $target --features=extra,static-link-openssl
|
||||||
|
} else {
|
||||||
|
cargo build --release --all --target $target --features=extra,static-link-openssl $options
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
# Print a horizontal line marker
|
||||||
|
def 'hr-line' [
|
||||||
|
--blank-line(-b): bool
|
||||||
|
] {
|
||||||
|
print $'(ansi g)---------------------------------------------------------------------------->(ansi reset)'
|
||||||
|
if $blank-line { char nl }
|
||||||
|
}
|
||||||
|
|
||||||
|
# Get the specified env key's value or ''
|
||||||
|
def 'get-env' [
|
||||||
|
key: string # The key to get it's env value
|
||||||
|
default: string = '' # The default value for an empty env
|
||||||
|
] {
|
||||||
|
$env | get -i $key | default $default
|
||||||
|
}
|
517
.github/workflows/release.yml
vendored
@ -1,3 +1,7 @@
|
|||||||
|
#
|
||||||
|
# REF:
|
||||||
|
# 1. https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstrategymatrixinclude
|
||||||
|
#
|
||||||
name: Create Release Draft
|
name: Create Release Draft
|
||||||
|
|
||||||
on:
|
on:
|
||||||
@ -5,434 +9,89 @@ on:
|
|||||||
push:
|
push:
|
||||||
tags: ["[0-9]+.[0-9]+.[0-9]+*"]
|
tags: ["[0-9]+.[0-9]+.[0-9]+*"]
|
||||||
|
|
||||||
|
defaults:
|
||||||
|
run:
|
||||||
|
shell: bash
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
linux:
|
all:
|
||||||
name: Build Linux
|
name: All
|
||||||
runs-on: ubuntu-latest
|
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
target:
|
||||||
|
- aarch64-apple-darwin
|
||||||
|
- x86_64-apple-darwin
|
||||||
|
- x86_64-pc-windows-msvc
|
||||||
|
- x86_64-unknown-linux-gnu
|
||||||
|
- x86_64-unknown-linux-musl
|
||||||
|
- aarch64-unknown-linux-gnu
|
||||||
|
- armv7-unknown-linux-gnueabihf
|
||||||
|
extra: ['bin']
|
||||||
|
include:
|
||||||
|
- target: aarch64-apple-darwin
|
||||||
|
os: macos-latest
|
||||||
|
target_rustflags: ''
|
||||||
|
- target: x86_64-apple-darwin
|
||||||
|
os: macos-latest
|
||||||
|
target_rustflags: ''
|
||||||
|
- target: x86_64-pc-windows-msvc
|
||||||
|
extra: 'bin'
|
||||||
|
os: windows-latest
|
||||||
|
target_rustflags: ''
|
||||||
|
- target: x86_64-pc-windows-msvc
|
||||||
|
extra: msi
|
||||||
|
os: windows-latest
|
||||||
|
target_rustflags: ''
|
||||||
|
- target: x86_64-unknown-linux-gnu
|
||||||
|
os: ubuntu-latest
|
||||||
|
target_rustflags: ''
|
||||||
|
- target: x86_64-unknown-linux-musl
|
||||||
|
os: ubuntu-latest
|
||||||
|
target_rustflags: ''
|
||||||
|
- target: aarch64-unknown-linux-gnu
|
||||||
|
os: ubuntu-latest
|
||||||
|
target_rustflags: ''
|
||||||
|
- target: armv7-unknown-linux-gnueabihf
|
||||||
|
os: ubuntu-latest
|
||||||
|
target_rustflags: ''
|
||||||
|
|
||||||
|
runs-on: ${{matrix.os}}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code
|
- uses: actions/checkout@v3.0.2
|
||||||
uses: actions/checkout@v2
|
|
||||||
|
- name: Install Rust Toolchain Components
|
||||||
- name: Install libxcb
|
uses: actions-rs/toolchain@v1.0.6
|
||||||
run: sudo apt-get install libxcb-composite0-dev
|
with:
|
||||||
|
override: true
|
||||||
- name: Set up cargo
|
profile: minimal
|
||||||
uses: actions-rs/toolchain@v1
|
toolchain: stable
|
||||||
with:
|
target: ${{ matrix.target }}
|
||||||
profile: minimal
|
|
||||||
toolchain: stable
|
- name: Setup Nushell
|
||||||
override: true
|
uses: hustcer/setup-nu@v1
|
||||||
|
with:
|
||||||
- name: Build
|
version: 0.63.0
|
||||||
uses: actions-rs/cargo@v1
|
env:
|
||||||
with:
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
command: build
|
|
||||||
args: --release --all --features=extra,static-link-openssl
|
- name: Release Nu Binary
|
||||||
|
id: nu
|
||||||
# - name: Strip binaries (nu)
|
run: nu .github/workflows/release-pkg.nu
|
||||||
# run: strip target/release/nu
|
env:
|
||||||
|
OS: ${{ matrix.os }}
|
||||||
# - name: Strip binaries (nu_plugin_inc)
|
REF: ${{ github.ref }}
|
||||||
# run: strip target/release/nu_plugin_inc
|
TARGET: ${{ matrix.target }}
|
||||||
|
_EXTRA_: ${{ matrix.extra }}
|
||||||
# - name: Strip binaries (nu_plugin_match)
|
TARGET_RUSTFLAGS: ${{ matrix.target_rustflags }}
|
||||||
# run: strip target/release/nu_plugin_match
|
|
||||||
|
# REF: https://github.com/marketplace/actions/gh-release
|
||||||
# - name: Strip binaries (nu_plugin_textview)
|
- name: Publish Archive
|
||||||
# run: strip target/release/nu_plugin_textview
|
uses: softprops/action-gh-release@v1
|
||||||
|
if: ${{ startsWith(github.ref, 'refs/tags/') }}
|
||||||
# - name: Strip binaries (nu_plugin_binaryview)
|
with:
|
||||||
# run: strip target/release/nu_plugin_binaryview
|
draft: true
|
||||||
|
files: ${{ steps.nu.outputs.archive }}
|
||||||
# - name: Strip binaries (nu_plugin_chart_bar)
|
env:
|
||||||
# run: strip target/release/nu_plugin_chart_bar
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
# - name: Strip binaries (nu_plugin_chart_line)
|
|
||||||
# run: strip target/release/nu_plugin_chart_line
|
|
||||||
|
|
||||||
# - name: Strip binaries (nu_plugin_from_bson)
|
|
||||||
# run: strip target/release/nu_plugin_from_bson
|
|
||||||
|
|
||||||
# - name: Strip binaries (nu_plugin_from_sqlite)
|
|
||||||
# run: strip target/release/nu_plugin_from_sqlite
|
|
||||||
|
|
||||||
# - name: Strip binaries (nu_plugin_from_mp4)
|
|
||||||
# run: strip target/release/nu_plugin_from_mp4
|
|
||||||
|
|
||||||
# - name: Strip binaries (nu_plugin_query_json)
|
|
||||||
# run: strip target/release/nu_plugin_query_json
|
|
||||||
|
|
||||||
# - name: Strip binaries (nu_plugin_s3)
|
|
||||||
# run: strip target/release/nu_plugin_s3
|
|
||||||
|
|
||||||
# - name: Strip binaries (nu_plugin_selector)
|
|
||||||
# run: strip target/release/nu_plugin_selector
|
|
||||||
|
|
||||||
# - name: Strip binaries (nu_plugin_start)
|
|
||||||
# run: strip target/release/nu_plugin_start
|
|
||||||
|
|
||||||
# - name: Strip binaries (nu_plugin_to_bson)
|
|
||||||
# run: strip target/release/nu_plugin_to_bson
|
|
||||||
|
|
||||||
# - name: Strip binaries (nu_plugin_to_sqlite)
|
|
||||||
# run: strip target/release/nu_plugin_to_sqlite
|
|
||||||
|
|
||||||
# - name: Strip binaries (nu_plugin_tree)
|
|
||||||
# run: strip target/release/nu_plugin_tree
|
|
||||||
|
|
||||||
# - name: Strip binaries (nu_plugin_xpath)
|
|
||||||
# run: strip target/release/nu_plugin_xpath
|
|
||||||
|
|
||||||
- name: Create output directory
|
|
||||||
run: mkdir output
|
|
||||||
|
|
||||||
- name: Copy files to output
|
|
||||||
run: |
|
|
||||||
cp target/release/nu target/release/nu_plugin_* output/
|
|
||||||
cp README.release.txt output/README.txt
|
|
||||||
cp LICENSE output/LICENSE
|
|
||||||
rm output/*.d
|
|
||||||
|
|
||||||
- name: Upload artifact
|
|
||||||
uses: actions/upload-artifact@v2
|
|
||||||
with:
|
|
||||||
name: linux
|
|
||||||
path: output/*
|
|
||||||
|
|
||||||
macos:
|
|
||||||
name: Build macOS
|
|
||||||
runs-on: macos-latest
|
|
||||||
steps:
|
|
||||||
- name: Check out code
|
|
||||||
uses: actions/checkout@v2
|
|
||||||
|
|
||||||
- name: Set up cargo
|
|
||||||
uses: actions-rs/toolchain@v1
|
|
||||||
with:
|
|
||||||
profile: minimal
|
|
||||||
toolchain: stable
|
|
||||||
override: true
|
|
||||||
|
|
||||||
- name: Build
|
|
||||||
uses: actions-rs/cargo@v1
|
|
||||||
with:
|
|
||||||
command: build
|
|
||||||
args: --release --all --features=extra,static-link-openssl
|
|
||||||
|
|
||||||
# - name: Strip binaries (nu)
|
|
||||||
# run: strip target/release/nu
|
|
||||||
|
|
||||||
# - name: Strip binaries (nu_plugin_inc)
|
|
||||||
# run: strip target/release/nu_plugin_inc
|
|
||||||
|
|
||||||
# - name: Strip binaries (nu_plugin_match)
|
|
||||||
# run: strip target/release/nu_plugin_match
|
|
||||||
|
|
||||||
# - name: Strip binaries (nu_plugin_textview)
|
|
||||||
# run: strip target/release/nu_plugin_textview
|
|
||||||
|
|
||||||
# - name: Strip binaries (nu_plugin_binaryview)
|
|
||||||
# run: strip target/release/nu_plugin_binaryview
|
|
||||||
|
|
||||||
# - name: Strip binaries (nu_plugin_chart_bar)
|
|
||||||
# run: strip target/release/nu_plugin_chart_bar
|
|
||||||
|
|
||||||
# - name: Strip binaries (nu_plugin_chart_line)
|
|
||||||
# run: strip target/release/nu_plugin_chart_line
|
|
||||||
|
|
||||||
# - name: Strip binaries (nu_plugin_from_bson)
|
|
||||||
# run: strip target/release/nu_plugin_from_bson
|
|
||||||
|
|
||||||
# - name: Strip binaries (nu_plugin_from_sqlite)
|
|
||||||
# run: strip target/release/nu_plugin_from_sqlite
|
|
||||||
|
|
||||||
# - name: Strip binaries (nu_plugin_from_mp4)
|
|
||||||
# run: strip target/release/nu_plugin_from_mp4
|
|
||||||
|
|
||||||
# - name: Strip binaries (nu_plugin_query_json)
|
|
||||||
# run: strip target/release/nu_plugin_query_json
|
|
||||||
|
|
||||||
# - name: Strip binaries (nu_plugin_s3)
|
|
||||||
# run: strip target/release/nu_plugin_s3
|
|
||||||
|
|
||||||
# - name: Strip binaries (nu_plugin_selector)
|
|
||||||
# run: strip target/release/nu_plugin_selector
|
|
||||||
|
|
||||||
# - name: Strip binaries (nu_plugin_start)
|
|
||||||
# run: strip target/release/nu_plugin_start
|
|
||||||
|
|
||||||
# - name: Strip binaries (nu_plugin_to_bson)
|
|
||||||
# run: strip target/release/nu_plugin_to_bson
|
|
||||||
|
|
||||||
# - name: Strip binaries (nu_plugin_to_sqlite)
|
|
||||||
# run: strip target/release/nu_plugin_to_sqlite
|
|
||||||
|
|
||||||
# - name: Strip binaries (nu_plugin_tree)
|
|
||||||
# run: strip target/release/nu_plugin_tree
|
|
||||||
|
|
||||||
# - name: Strip binaries (nu_plugin_xpath)
|
|
||||||
# run: strip target/release/nu_plugin_xpath
|
|
||||||
|
|
||||||
- name: Create output directory
|
|
||||||
run: mkdir output
|
|
||||||
|
|
||||||
- name: Copy files to output
|
|
||||||
run: |
|
|
||||||
cp target/release/nu target/release/nu_plugin_* output/
|
|
||||||
cp README.release.txt output/README.txt
|
|
||||||
cp LICENSE output/LICENSE
|
|
||||||
rm output/*.d
|
|
||||||
|
|
||||||
- name: Upload artifact
|
|
||||||
uses: actions/upload-artifact@v2
|
|
||||||
with:
|
|
||||||
name: macos
|
|
||||||
path: output/*
|
|
||||||
|
|
||||||
windows:
|
|
||||||
name: Build Windows
|
|
||||||
runs-on: windows-latest
|
|
||||||
steps:
|
|
||||||
- name: Check out code
|
|
||||||
uses: actions/checkout@v2
|
|
||||||
|
|
||||||
- name: Set up cargo
|
|
||||||
uses: actions-rs/toolchain@v1
|
|
||||||
with:
|
|
||||||
profile: minimal
|
|
||||||
toolchain: stable
|
|
||||||
override: true
|
|
||||||
|
|
||||||
- name: Add cargo-wix subcommand
|
|
||||||
uses: actions-rs/cargo@v1
|
|
||||||
with:
|
|
||||||
command: install
|
|
||||||
args: cargo-wix --version 0.3.1
|
|
||||||
|
|
||||||
- name: Build
|
|
||||||
uses: actions-rs/cargo@v1
|
|
||||||
with:
|
|
||||||
command: build
|
|
||||||
args: --release --all --features=extra,static-link-openssl
|
|
||||||
|
|
||||||
# - name: Strip binaries (nu.exe)
|
|
||||||
# run: strip target/release/nu.exe
|
|
||||||
|
|
||||||
# - name: Strip binaries (nu_plugin_inc.exe)
|
|
||||||
# run: strip target/release/nu_plugin_inc.exe
|
|
||||||
|
|
||||||
# - name: Strip binaries (nu_plugin_match.exe)
|
|
||||||
# run: strip target/release/nu_plugin_match.exe
|
|
||||||
|
|
||||||
# - name: Strip binaries (nu_plugin_textview.exe)
|
|
||||||
# run: strip target/release/nu_plugin_textview.exe
|
|
||||||
|
|
||||||
# - name: Strip binaries (nu_plugin_binaryview.exe)
|
|
||||||
# run: strip target/release/nu_plugin_binaryview.exe
|
|
||||||
|
|
||||||
# - name: Strip binaries (nu_plugin_chart_bar.exe)
|
|
||||||
# run: strip target/release/nu_plugin_chart_bar.exe
|
|
||||||
|
|
||||||
# - name: Strip binaries (nu_plugin_chart_line.exe)
|
|
||||||
# run: strip target/release/nu_plugin_chart_line.exe
|
|
||||||
|
|
||||||
# - name: Strip binaries (nu_plugin_from_bson.exe)
|
|
||||||
# run: strip target/release/nu_plugin_from_bson.exe
|
|
||||||
|
|
||||||
# - name: Strip binaries (nu_plugin_from_sqlite.exe)
|
|
||||||
# run: strip target/release/nu_plugin_from_sqlite.exe
|
|
||||||
|
|
||||||
# - name: Strip binaries (nu_plugin_from_mp4.exe)
|
|
||||||
# run: strip target/release/nu_plugin_from_mp4.exe
|
|
||||||
|
|
||||||
# - name: Strip binaries (nu_plugin_query_json.exe)
|
|
||||||
# run: strip target/release/nu_plugin_query_json.exe
|
|
||||||
|
|
||||||
# - name: Strip binaries (nu_plugin_s3.exe)
|
|
||||||
# run: strip target/release/nu_plugin_s3.exe
|
|
||||||
|
|
||||||
# - name: Strip binaries (nu_plugin_selector.exe)
|
|
||||||
# run: strip target/release/nu_plugin_selector.exe
|
|
||||||
|
|
||||||
# - name: Strip binaries (nu_plugin_start.exe)
|
|
||||||
# run: strip target/release/nu_plugin_start.exe
|
|
||||||
|
|
||||||
# - name: Strip binaries (nu_plugin_to_bson.exe)
|
|
||||||
# run: strip target/release/nu_plugin_to_bson.exe
|
|
||||||
|
|
||||||
# - name: Strip binaries (nu_plugin_to_sqlite.exe)
|
|
||||||
# run: strip target/release/nu_plugin_to_sqlite.exe
|
|
||||||
|
|
||||||
# - name: Strip binaries (nu_plugin_tree.exe)
|
|
||||||
# run: strip target/release/nu_plugin_tree.exe
|
|
||||||
|
|
||||||
# - name: Strip binaries (nu_plugin_xpath.exe)
|
|
||||||
# run: strip target/release/nu_plugin_xpath.exe
|
|
||||||
|
|
||||||
- name: Create output directory
|
|
||||||
run: mkdir output
|
|
||||||
|
|
||||||
- name: Download Less Binary
|
|
||||||
run: Invoke-WebRequest -Uri "https://github.com/jftuga/less-Windows/releases/download/less-v562.0/less.exe" -OutFile "target\release\less.exe"
|
|
||||||
|
|
||||||
- name: Download Less License
|
|
||||||
run: Invoke-WebRequest -Uri "https://raw.githubusercontent.com/jftuga/less-Windows/master/LICENSE" -OutFile "target\release\LICENSE-for-less.txt"
|
|
||||||
|
|
||||||
- name: Copy files to output
|
|
||||||
run: |
|
|
||||||
cp target\release\nu.exe output\
|
|
||||||
cp LICENSE output\
|
|
||||||
cp target\release\LICENSE-for-less.txt output\
|
|
||||||
cp target\release\nu_plugin_*.exe output\
|
|
||||||
cp README.release.txt output\README.txt
|
|
||||||
cp target\release\less.exe output\
|
|
||||||
# Note: If the version of `less.exe` needs to be changed, update this URL
|
|
||||||
# Similarly, if `less.exe` is checked into the repo, copy from the local path here
|
|
||||||
# moved this stuff down to create wix after we download less
|
|
||||||
|
|
||||||
- name: Create msi with wix
|
|
||||||
uses: actions-rs/cargo@v1
|
|
||||||
with:
|
|
||||||
command: wix
|
|
||||||
args: --no-build --nocapture --output target\wix\nushell-windows.msi
|
|
||||||
|
|
||||||
- name: Upload installer
|
|
||||||
uses: actions/upload-artifact@v2
|
|
||||||
with:
|
|
||||||
name: windows-installer
|
|
||||||
path: target\wix\nushell-windows.msi
|
|
||||||
|
|
||||||
- name: Upload zip
|
|
||||||
uses: actions/upload-artifact@v2
|
|
||||||
with:
|
|
||||||
name: windows-zip
|
|
||||||
path: output\*
|
|
||||||
|
|
||||||
release:
|
|
||||||
name: Publish Release
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
needs:
|
|
||||||
- linux
|
|
||||||
- macos
|
|
||||||
- windows
|
|
||||||
steps:
|
|
||||||
- name: Check out code
|
|
||||||
uses: actions/checkout@v2
|
|
||||||
|
|
||||||
- name: Determine Release Info
|
|
||||||
id: info
|
|
||||||
env:
|
|
||||||
GITHUB_REF: ${{ github.ref }}
|
|
||||||
run: |
|
|
||||||
VERSION=${GITHUB_REF##*/}
|
|
||||||
MAJOR=${VERSION%%.*}
|
|
||||||
MINOR=${VERSION%.*}
|
|
||||||
MINOR=${MINOR#*.}
|
|
||||||
PATCH=${VERSION##*.}
|
|
||||||
echo "::set-output name=version::${VERSION}"
|
|
||||||
echo "::set-output name=linuxdir::nu_${MAJOR}_${MINOR}_${PATCH}_linux"
|
|
||||||
echo "::set-output name=macosdir::nu_${MAJOR}_${MINOR}_${PATCH}_macOS"
|
|
||||||
echo "::set-output name=windowsdir::nu_${MAJOR}_${MINOR}_${PATCH}_windows"
|
|
||||||
echo "::set-output name=innerdir::nushell-${VERSION}"
|
|
||||||
|
|
||||||
- name: Create Release Draft
|
|
||||||
id: create_release
|
|
||||||
uses: actions/create-release@v1
|
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
with:
|
|
||||||
tag_name: ${{ github.ref }}
|
|
||||||
release_name: ${{ steps.info.outputs.version }} Release
|
|
||||||
draft: true
|
|
||||||
|
|
||||||
- name: Create Linux Directory
|
|
||||||
run: mkdir -p ${{ steps.info.outputs.linuxdir }}/${{ steps.info.outputs.innerdir }}
|
|
||||||
|
|
||||||
- name: Download Linux Artifacts
|
|
||||||
uses: actions/download-artifact@v2
|
|
||||||
with:
|
|
||||||
name: linux
|
|
||||||
path: ${{ steps.info.outputs.linuxdir }}/${{ steps.info.outputs.innerdir }}
|
|
||||||
|
|
||||||
- name: Restore Linux File Modes
|
|
||||||
run: |
|
|
||||||
chmod 755 ${{ steps.info.outputs.linuxdir }}/${{ steps.info.outputs.innerdir }}/nu*
|
|
||||||
|
|
||||||
- name: Create Linux tarball
|
|
||||||
run: tar -zcvf ${{ steps.info.outputs.linuxdir }}.tar.gz ${{ steps.info.outputs.linuxdir }}
|
|
||||||
|
|
||||||
- name: Upload Linux Artifact
|
|
||||||
uses: actions/upload-release-asset@v1
|
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
with:
|
|
||||||
upload_url: ${{ steps.create_release.outputs.upload_url }}
|
|
||||||
asset_path: ./${{ steps.info.outputs.linuxdir }}.tar.gz
|
|
||||||
asset_name: ${{ steps.info.outputs.linuxdir }}.tar.gz
|
|
||||||
asset_content_type: application/gzip
|
|
||||||
|
|
||||||
- name: Create macOS Directory
|
|
||||||
run: mkdir -p ${{ steps.info.outputs.macosdir }}/${{ steps.info.outputs.innerdir }}
|
|
||||||
|
|
||||||
- name: Download macOS Artifacts
|
|
||||||
uses: actions/download-artifact@v2
|
|
||||||
with:
|
|
||||||
name: macos
|
|
||||||
path: ${{ steps.info.outputs.macosdir }}/${{ steps.info.outputs.innerdir }}
|
|
||||||
|
|
||||||
- name: Restore macOS File Modes
|
|
||||||
run: chmod 755 ${{ steps.info.outputs.macosdir }}/${{ steps.info.outputs.innerdir }}/nu*
|
|
||||||
|
|
||||||
- name: Create macOS Archive
|
|
||||||
run: zip -r ${{ steps.info.outputs.macosdir }}.zip ${{ steps.info.outputs.macosdir }}
|
|
||||||
|
|
||||||
- name: Upload macOS Artifact
|
|
||||||
uses: actions/upload-release-asset@v1
|
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
with:
|
|
||||||
upload_url: ${{ steps.create_release.outputs.upload_url }}
|
|
||||||
asset_path: ./${{ steps.info.outputs.macosdir }}.zip
|
|
||||||
asset_name: ${{ steps.info.outputs.macosdir }}.zip
|
|
||||||
asset_content_type: application/zip
|
|
||||||
|
|
||||||
- name: Create Windows Directory
|
|
||||||
run: mkdir -p ${{ steps.info.outputs.windowsdir }}/${{ steps.info.outputs.innerdir }}
|
|
||||||
|
|
||||||
- name: Download Windows zip
|
|
||||||
uses: actions/download-artifact@v2
|
|
||||||
with:
|
|
||||||
name: windows-zip
|
|
||||||
path: ${{ steps.info.outputs.windowsdir }}/${{ steps.info.outputs.innerdir }}
|
|
||||||
|
|
||||||
- name: Show Windows Artifacts
|
|
||||||
run: ls -la ${{ steps.info.outputs.windowsdir }}/${{ steps.info.outputs.innerdir }}
|
|
||||||
|
|
||||||
- name: Create macOS Archive
|
|
||||||
run: zip -r ${{ steps.info.outputs.windowsdir }}.zip ${{ steps.info.outputs.windowsdir }}
|
|
||||||
|
|
||||||
- name: Upload Windows zip
|
|
||||||
uses: actions/upload-release-asset@v1
|
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
with:
|
|
||||||
upload_url: ${{ steps.create_release.outputs.upload_url }}
|
|
||||||
asset_path: ./${{ steps.info.outputs.windowsdir }}.zip
|
|
||||||
asset_name: ${{ steps.info.outputs.windowsdir }}.zip
|
|
||||||
asset_content_type: application/zip
|
|
||||||
|
|
||||||
- name: Download Windows installer
|
|
||||||
uses: actions/download-artifact@v2
|
|
||||||
with:
|
|
||||||
name: windows-installer
|
|
||||||
path: ./
|
|
||||||
|
|
||||||
- name: Upload Windows installer
|
|
||||||
uses: actions/upload-release-asset@v1
|
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
with:
|
|
||||||
upload_url: ${{ steps.create_release.outputs.upload_url }}
|
|
||||||
asset_path: ./nushell-windows.msi
|
|
||||||
asset_name: ${{ steps.info.outputs.windowsdir }}.msi
|
|
||||||
asset_content_type: application/x-msi
|
|
||||||
|
5
.gitignore
vendored
@ -23,4 +23,9 @@ debian/nu/
|
|||||||
.vscode/*
|
.vscode/*
|
||||||
|
|
||||||
# Helix configuration folder
|
# Helix configuration folder
|
||||||
|
.helix/*
|
||||||
.helix
|
.helix
|
||||||
|
|
||||||
|
# Coverage tools
|
||||||
|
lcov.info
|
||||||
|
tarpaulin-report.html
|
||||||
|
@ -1,21 +1,14 @@
|
|||||||
# Contributing
|
# Contributing
|
||||||
|
|
||||||
Welcome to nushell!
|
Welcome to Nushell!
|
||||||
|
|
||||||
*Note: for a more complete guide see [The nu contributor book](https://www.nushell.sh/contributor-book/)*
|
|
||||||
|
|
||||||
For speedy contributions open it in Gitpod, nu will be pre-installed with the latest build in a VSCode like editor all from your browser.
|
|
||||||
|
|
||||||
[](https://gitpod.io/#https://github.com/nushell/nushell)
|
|
||||||
|
|
||||||
To get live support from the community see our [Discord](https://discordapp.com/invite/NtAbbGn), [Twitter](https://twitter.com/nu_shell) or file an issue or feature request here on [GitHub](https://github.com/nushell/nushell/issues/new/choose)!
|
To get live support from the community see our [Discord](https://discordapp.com/invite/NtAbbGn), [Twitter](https://twitter.com/nu_shell) or file an issue or feature request here on [GitHub](https://github.com/nushell/nushell/issues/new/choose)!
|
||||||
<!--WIP-->
|
|
||||||
|
|
||||||
## Developing
|
## Developing
|
||||||
|
|
||||||
### Set up
|
### Setup
|
||||||
|
|
||||||
This is no different than other Rust projects.
|
Nushell requires a recent Rust toolchain and some dependencies; [refer to the Nu Book for up-to-date requirements](https://www.nushell.sh/book/installation.html#build-from-source). After installing dependencies, you should be able to clone+build Nu like any other Rust project:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
git clone https://github.com/nushell/nushell
|
git clone https://github.com/nushell/nushell
|
||||||
@ -28,24 +21,24 @@ cargo build
|
|||||||
- Build and run Nushell:
|
- Build and run Nushell:
|
||||||
|
|
||||||
```shell
|
```shell
|
||||||
cargo build --release && cargo run --release
|
cargo run
|
||||||
```
|
```
|
||||||
|
|
||||||
- Build and run with extra features:
|
- Build and run with extra features. Currently extra features include dataframes and sqlite database support.
|
||||||
```shell
|
```shell
|
||||||
cargo build --release --features=extra && cargo run --release --features=extra
|
cargo run --features=extra
|
||||||
```
|
```
|
||||||
|
|
||||||
- Run Clippy on Nushell:
|
- Run Clippy on Nushell:
|
||||||
|
|
||||||
```shell
|
```shell
|
||||||
cargo clippy --all --features=stable
|
cargo clippy --workspace --features=extra -- -D warnings -D clippy::unwrap_used -A clippy::needless_collect
|
||||||
```
|
```
|
||||||
|
|
||||||
- Run all tests:
|
- Run all tests:
|
||||||
|
|
||||||
```shell
|
```shell
|
||||||
cargo test --all --features=stable
|
cargo test --workspace --features=extra
|
||||||
```
|
```
|
||||||
|
|
||||||
- Run all tests for a specific command
|
- Run all tests for a specific command
|
||||||
@ -71,5 +64,5 @@ cargo build
|
|||||||
- To view verbose logs when developing, enable the `trace` log level.
|
- To view verbose logs when developing, enable the `trace` log level.
|
||||||
|
|
||||||
```shell
|
```shell
|
||||||
cargo build --release --features=extra && cargo run --release --features=extra -- --log-level trace
|
cargo run --release --features=extra -- --log-level trace
|
||||||
```
|
```
|
||||||
|
928
Cargo.lock
generated
54
Cargo.toml
@ -10,8 +10,8 @@ license = "MIT"
|
|||||||
name = "nu"
|
name = "nu"
|
||||||
readme = "README.md"
|
readme = "README.md"
|
||||||
repository = "https://github.com/nushell/nushell"
|
repository = "https://github.com/nushell/nushell"
|
||||||
rust-version = "1.59"
|
rust-version = "1.60"
|
||||||
version = "0.62.0"
|
version = "0.64.0"
|
||||||
|
|
||||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
|
|
||||||
@ -32,33 +32,38 @@ members = [
|
|||||||
]
|
]
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
chrono = "0.4.19"
|
chrono = { version = "0.4.19", features = ["serde"] }
|
||||||
crossterm = "0.23.0"
|
crossterm = "0.23.0"
|
||||||
ctrlc = "3.2.1"
|
ctrlc = "3.2.1"
|
||||||
log = "0.4"
|
log = "0.4"
|
||||||
miette = "4.5.0"
|
miette = "4.5.0"
|
||||||
nu-ansi-term = "0.45.1"
|
nu-ansi-term = "0.46.0"
|
||||||
nu-cli = { path="./crates/nu-cli", version = "0.62.0" }
|
nu-cli = { path="./crates/nu-cli", version = "0.64.0" }
|
||||||
nu-color-config = { path = "./crates/nu-color-config", version = "0.62.0" }
|
nu-color-config = { path = "./crates/nu-color-config", version = "0.64.0" }
|
||||||
nu-command = { path="./crates/nu-command", version = "0.62.0" }
|
nu-command = { path="./crates/nu-command", version = "0.64.0" }
|
||||||
nu-engine = { path="./crates/nu-engine", version = "0.62.0" }
|
nu-engine = { path="./crates/nu-engine", version = "0.64.0" }
|
||||||
nu-json = { path="./crates/nu-json", version = "0.62.0" }
|
nu-json = { path="./crates/nu-json", version = "0.64.0" }
|
||||||
nu-parser = { path="./crates/nu-parser", version = "0.62.0" }
|
nu-parser = { path="./crates/nu-parser", version = "0.64.0" }
|
||||||
nu-path = { path="./crates/nu-path", version = "0.62.0" }
|
nu-path = { path="./crates/nu-path", version = "0.64.0" }
|
||||||
nu-plugin = { path = "./crates/nu-plugin", optional = true, version = "0.62.0" }
|
nu-plugin = { path = "./crates/nu-plugin", optional = true, version = "0.64.0" }
|
||||||
nu-pretty-hex = { path = "./crates/nu-pretty-hex", version = "0.62.0" }
|
nu-pretty-hex = { path = "./crates/nu-pretty-hex", version = "0.64.0" }
|
||||||
nu-protocol = { path = "./crates/nu-protocol", version = "0.62.0" }
|
nu-protocol = { path = "./crates/nu-protocol", version = "0.64.0" }
|
||||||
nu-system = { path = "./crates/nu-system", version = "0.62.0" }
|
nu-system = { path = "./crates/nu-system", version = "0.64.0" }
|
||||||
nu-table = { path = "./crates/nu-table", version = "0.62.0" }
|
nu-table = { path = "./crates/nu-table", version = "0.64.0" }
|
||||||
nu-term-grid = { path = "./crates/nu-term-grid", version = "0.62.0" }
|
nu-term-grid = { path = "./crates/nu-term-grid", version = "0.64.0" }
|
||||||
openssl = { version = "0.10.38", features = ["vendored"], optional = true }
|
nu-utils = { path = "./crates/nu-utils", version = "0.64.0" }
|
||||||
|
reedline = { version = "0.7.0", features = ["bashisms", "sqlite"]}
|
||||||
pretty_env_logger = "0.4.0"
|
pretty_env_logger = "0.4.0"
|
||||||
rayon = "1.5.1"
|
rayon = "1.5.1"
|
||||||
reedline = { version = "0.5.0", features = ["bashisms"]}
|
|
||||||
is_executable = "1.0.1"
|
is_executable = "1.0.1"
|
||||||
|
|
||||||
|
[target.'cfg(not(target_os = "windows"))'.dependencies]
|
||||||
|
# Our dependencies don't use OpenSSL on Windows
|
||||||
|
openssl = { version = "0.10.38", features = ["vendored"], optional = true }
|
||||||
|
signal-hook = { version = "0.3.14", default-features = false }
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
nu-test-support = { path="./crates/nu-test-support", version = "0.62.0" }
|
nu-test-support = { path="./crates/nu-test-support", version = "0.64.0" }
|
||||||
tempfile = "3.2.0"
|
tempfile = "3.2.0"
|
||||||
assert_cmd = "2.0.2"
|
assert_cmd = "2.0.2"
|
||||||
pretty_assertions = "1.0.0"
|
pretty_assertions = "1.0.0"
|
||||||
@ -68,7 +73,7 @@ rstest = "0.12.0"
|
|||||||
itertools = "0.10.3"
|
itertools = "0.10.3"
|
||||||
|
|
||||||
[target.'cfg(windows)'.build-dependencies]
|
[target.'cfg(windows)'.build-dependencies]
|
||||||
embed-resource = "1"
|
winres = "0.1"
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
plugin = ["nu-plugin", "nu-cli/plugin", "nu-parser/plugin", "nu-command/plugin", "nu-protocol/plugin", "nu-engine/plugin"]
|
plugin = ["nu-plugin", "nu-cli/plugin", "nu-parser/plugin", "nu-command/plugin", "nu-protocol/plugin", "nu-engine/plugin"]
|
||||||
@ -103,6 +108,13 @@ inherits = "release"
|
|||||||
strip = false
|
strip = false
|
||||||
debug = true
|
debug = true
|
||||||
|
|
||||||
|
# build with `cargo build --profile ci`
|
||||||
|
# to analyze performance with tooling like linux perf
|
||||||
|
[profile.ci]
|
||||||
|
inherits = "dev"
|
||||||
|
strip = false
|
||||||
|
debug = false
|
||||||
|
|
||||||
# Main nu binary
|
# Main nu binary
|
||||||
[[bin]]
|
[[bin]]
|
||||||
name = "nu"
|
name = "nu"
|
||||||
|
249
README.md
@ -1,5 +1,4 @@
|
|||||||
# README
|
# Nushell <!-- omit in toc -->
|
||||||
|
|
||||||
[](https://crates.io/crates/nu)
|
[](https://crates.io/crates/nu)
|
||||||

|

|
||||||
[](https://discord.gg/NtAbbGn)
|
[](https://discord.gg/NtAbbGn)
|
||||||
@ -8,128 +7,100 @@
|
|||||||

|

|
||||||

|

|
||||||
|
|
||||||
## Nushell
|
|
||||||
|
|
||||||
A new type of shell.
|
A new type of shell.
|
||||||
|
|
||||||

|

|
||||||
|
|
||||||
|
## Table of Contents <!-- omit in toc -->
|
||||||
|
|
||||||
|
- [Status](#status)
|
||||||
|
- [Learning About Nu](#learning-about-nu)
|
||||||
|
- [Installation](#installation)
|
||||||
|
- [Philosophy](#philosophy)
|
||||||
|
- [Pipelines](#pipelines)
|
||||||
|
- [Opening files](#opening-files)
|
||||||
|
- [Plugins](#plugins)
|
||||||
|
- [Goals](#goals)
|
||||||
|
- [Progress](#progress)
|
||||||
|
- [Officially Supported By](#officially-supported-by)
|
||||||
|
- [Contributing](#contributing)
|
||||||
|
- [License](#license)
|
||||||
|
|
||||||
## Status
|
## Status
|
||||||
|
|
||||||
This project has reached a minimum-viable product level of quality.
|
This project has reached a minimum-viable-product level of quality. Many people use it as their daily driver, but it may be unstable for some commands. Nu's design is subject to change as it matures.
|
||||||
While contributors dogfood it as their daily driver, it may be unstable for some commands.
|
|
||||||
Future releases will work to fill out missing features and improve stability.
|
|
||||||
Its design is also subject to change as it matures.
|
|
||||||
|
|
||||||
Nu comes with a set of built-in commands (listed below).
|
## Learning About Nu
|
||||||
If a command is unknown, the command will shell-out and execute it (using cmd on Windows or bash on Linux and macOS), correctly passing through stdin, stdout, and stderr, so things like your daily git workflows and even `vim` will work just fine.
|
|
||||||
|
|
||||||
## Learning more
|
The [Nushell book](https://www.nushell.sh/book/) is the primary source of Nushell documentation. You can find [a full list of Nu commands in the book](https://www.nushell.sh/book/command_reference.html), and we have many examples of using Nu in our [cookbook](https://www.nushell.sh/cookbook/).
|
||||||
|
|
||||||
There are a few good resources to learn about Nu.
|
We're also active on [Discord](https://discord.gg/NtAbbGn) and [Twitter](https://twitter.com/nu_shell); come and chat with us!
|
||||||
There is a [book](https://www.nushell.sh/book/) about Nu that is currently in progress.
|
|
||||||
The book focuses on using Nu and its core concepts.
|
|
||||||
|
|
||||||
If you're a developer who would like to contribute to Nu, we're also working on a [book for developers](https://www.nushell.sh/contributor-book/) to help you get started.
|
|
||||||
There are also [good first issues](https://github.com/nushell/nushell/issues?q=is%3Aopen+is%3Aissue+label%3A%22good+first+issue%22) to help you dive in.
|
|
||||||
|
|
||||||
We also have an active [Discord](https://discord.gg/NtAbbGn) and [Twitter](https://twitter.com/nu_shell) if you'd like to come and chat with us.
|
|
||||||
|
|
||||||
You can also find information on more specific topics in our [cookbook](https://www.nushell.sh/cookbook/).
|
|
||||||
|
|
||||||
## Installation
|
## Installation
|
||||||
|
|
||||||
### Local
|
To quickly install Nu:
|
||||||
|
|
||||||
Up-to-date installation instructions can be found in the [installation chapter of the book](https://www.nushell.sh/book/installation.html). **Windows users**: please note that Nu works on Windows 10 and does not currently have Windows 7/8.1 support.
|
```bash
|
||||||
|
# Linux and macOS
|
||||||
To build Nu, you will need to use the **latest stable (1.59 or later)** version of the compiler.
|
brew install nushell
|
||||||
|
# Windows
|
||||||
Required dependencies:
|
|
||||||
|
|
||||||
- pkg-config and libssl (only needed on Linux)
|
|
||||||
- On Debian/Ubuntu: `apt install pkg-config libssl-dev`
|
|
||||||
|
|
||||||
Optional dependencies:
|
|
||||||
|
|
||||||
- To use Nu with all possible optional features enabled, you'll also need the following:
|
|
||||||
- On Linux (on Debian/Ubuntu): `apt install libxcb-composite0-dev libx11-dev`
|
|
||||||
|
|
||||||
To install Nu via cargo (make sure you have installed [rustup](https://rustup.rs/) and the latest stable compiler via `rustup install stable`):
|
|
||||||
|
|
||||||
For Windows users, you may also need to install the [Microsoft Visual C++ 2015 Redistributables](https://docs.microsoft.com/cpp/windows/latest-supported-vc-redist).
|
|
||||||
|
|
||||||
```shell
|
|
||||||
cargo install nu
|
|
||||||
```
|
|
||||||
|
|
||||||
To install Nu via the [Windows Package Manager](https://aka.ms/winget-cli):
|
|
||||||
|
|
||||||
```shell
|
|
||||||
winget install nushell
|
winget install nushell
|
||||||
```
|
```
|
||||||
|
|
||||||
To install Nu via the [Chocolatey](https://chocolatey.org) package manager:
|
To use `Nu` in Github Action, check [setup-nu](https://github.com/marketplace/actions/setup-nu) for more detail.
|
||||||
|
|
||||||
```shell
|
Detailed installation instructions can be found in the [installation chapter of the book](https://www.nushell.sh/book/installation.html). Nu is available via many package managers:
|
||||||
choco install nushell
|
|
||||||
```
|
|
||||||
|
|
||||||
You can also build Nu yourself with all the bells and whistles (be sure to have installed the [dependencies](https://www.nushell.sh/book/installation.html#dependencies) for your platform), once you have checked out this repo with git:
|
|
||||||
|
|
||||||
```shell
|
|
||||||
cargo build --workspace --features=extra
|
|
||||||
```
|
|
||||||
### Packaging status
|
|
||||||
|
|
||||||
[](https://repology.org/project/nushell/versions)
|
[](https://repology.org/project/nushell/versions)
|
||||||
|
|
||||||
#### Fedora
|
|
||||||
|
|
||||||
[COPR repo](https://copr.fedorainfracloud.org/coprs/atim/nushell/): `sudo dnf copr enable atim/nushell -y && sudo dnf install nushell -y`
|
|
||||||
|
|
||||||
## Philosophy
|
## Philosophy
|
||||||
|
|
||||||
Nu draws inspiration from projects like PowerShell, functional programming languages, and modern CLI tools.
|
Nu draws inspiration from projects like PowerShell, functional programming languages, and modern CLI tools.
|
||||||
Rather than thinking of files and services as raw streams of text, Nu looks at each input as something with structure.
|
Rather than thinking of files and data as raw streams of text, Nu looks at each input as something with structure.
|
||||||
For example, when you list the contents of a directory, what you get back is a table of rows, where each row represents an item in that directory.
|
For example, when you list the contents of a directory what you get back is a table of rows, where each row represents an item in that directory.
|
||||||
These values can be piped through a series of steps, in a series of commands called a 'pipeline'.
|
These values can be piped through a series of steps, in a series of commands called a 'pipeline'.
|
||||||
|
|
||||||
### Pipelines
|
### Pipelines
|
||||||
|
|
||||||
In Unix, it's common to pipe between commands to split up a sophisticated command over multiple steps.
|
In Unix, it's common to pipe between commands to split up a sophisticated command over multiple steps.
|
||||||
Nu takes this a step further and builds heavily on the idea of _pipelines_.
|
Nu takes this a step further and builds heavily on the idea of _pipelines_.
|
||||||
Just as the Unix philosophy, Nu allows commands to output to stdout and read from stdin.
|
As in the Unix philosophy, Nu allows commands to output to stdout and read from stdin.
|
||||||
Additionally, commands can output structured data (you can think of this as a third kind of stream).
|
Additionally, commands can output structured data (you can think of this as a third kind of stream).
|
||||||
Commands that work in the pipeline fit into one of three categories:
|
Commands that work in the pipeline fit into one of three categories:
|
||||||
|
|
||||||
- Commands that produce a stream (e.g., `ls`)
|
- Commands that produce a stream (e.g., `ls`)
|
||||||
- Commands that filter a stream (eg, `where type == "Dir"`)
|
- Commands that filter a stream (eg, `where type == "dir"`)
|
||||||
- Commands that consume the output of the pipeline (e.g., `autoview`)
|
- Commands that consume the output of the pipeline (e.g., `table`)
|
||||||
|
|
||||||
Commands are separated by the pipe symbol (`|`) to denote a pipeline flowing left to right.
|
Commands are separated by the pipe symbol (`|`) to denote a pipeline flowing left to right.
|
||||||
|
|
||||||
```shell
|
```shell
|
||||||
> ls | where type == "Dir" | autoview
|
> ls | where type == "dir" | table
|
||||||
───┬────────┬──────┬───────┬──────────────
|
╭────┬──────────┬──────┬─────────┬───────────────╮
|
||||||
# │ name │ type │ size │ modified
|
│ # │ name │ type │ size │ modified │
|
||||||
───┼────────┼──────┼───────┼──────────────
|
├────┼──────────┼──────┼─────────┼───────────────┤
|
||||||
0 │ assets │ Dir │ 128 B │ 5 months ago
|
│ 0 │ .cargo │ dir │ 0 B │ 9 minutes ago │
|
||||||
1 │ crates │ Dir │ 704 B │ 50 mins ago
|
│ 1 │ assets │ dir │ 0 B │ 2 weeks ago │
|
||||||
2 │ debian │ Dir │ 352 B │ 5 months ago
|
│ 2 │ crates │ dir │ 4.0 KiB │ 2 weeks ago │
|
||||||
3 │ docs │ Dir │ 192 B │ 50 mins ago
|
│ 3 │ docker │ dir │ 0 B │ 2 weeks ago │
|
||||||
4 │ images │ Dir │ 160 B │ 5 months ago
|
│ 4 │ docs │ dir │ 0 B │ 2 weeks ago │
|
||||||
5 │ src │ Dir │ 128 B │ 1 day ago
|
│ 5 │ images │ dir │ 0 B │ 2 weeks ago │
|
||||||
6 │ target │ Dir │ 160 B │ 5 days ago
|
│ 6 │ pkg_mgrs │ dir │ 0 B │ 2 weeks ago │
|
||||||
7 │ tests │ Dir │ 192 B │ 3 months ago
|
│ 7 │ samples │ dir │ 0 B │ 2 weeks ago │
|
||||||
───┴────────┴──────┴───────┴──────────────
|
│ 8 │ src │ dir │ 4.0 KiB │ 2 weeks ago │
|
||||||
|
│ 9 │ target │ dir │ 0 B │ a day ago │
|
||||||
|
│ 10 │ tests │ dir │ 4.0 KiB │ 2 weeks ago │
|
||||||
|
│ 11 │ wix │ dir │ 0 B │ 2 weeks ago │
|
||||||
|
╰────┴──────────┴──────┴─────────┴───────────────╯
|
||||||
```
|
```
|
||||||
|
|
||||||
Because most of the time you'll want to see the output of a pipeline, `autoview` is assumed.
|
Because most of the time you'll want to see the output of a pipeline, `table` is assumed.
|
||||||
We could have also written the above:
|
We could have also written the above:
|
||||||
|
|
||||||
```shell
|
```shell
|
||||||
> ls | where type == Dir
|
> ls | where type == "dir"
|
||||||
```
|
```
|
||||||
|
|
||||||
Being able to use the same commands and compose them differently is an important philosophy in Nu.
|
Being able to use the same commands and compose them differently is an important philosophy in Nu.
|
||||||
@ -137,15 +108,13 @@ For example, we could use the built-in `ps` command to get a list of the running
|
|||||||
|
|
||||||
```shell
|
```shell
|
||||||
> ps | where cpu > 0
|
> ps | where cpu > 0
|
||||||
───┬────────┬───────────────────┬──────────┬─────────┬──────────┬──────────
|
╭───┬───────┬───────────┬───────┬───────────┬───────────╮
|
||||||
# │ pid │ name │ status │ cpu │ mem │ virtual
|
│ # │ pid │ name │ cpu │ mem │ virtual │
|
||||||
───┼────────┼───────────────────┼──────────┼─────────┼──────────┼──────────
|
├───┼───────┼───────────┼───────┼───────────┼───────────┤
|
||||||
0 │ 435 │ irq/142-SYNA327 │ Sleeping │ 7.5699 │ 0 B │ 0 B
|
│ 0 │ 2240 │ Slack.exe │ 16.40 │ 178.3 MiB │ 232.6 MiB │
|
||||||
1 │ 1609 │ pulseaudio │ Sleeping │ 6.5605 │ 10.6 MB │ 2.3 GB
|
│ 1 │ 16948 │ Slack.exe │ 16.32 │ 205.0 MiB │ 197.9 MiB │
|
||||||
2 │ 1625 │ gnome-shell │ Sleeping │ 6.5684 │ 639.6 MB │ 7.3 GB
|
│ 2 │ 17700 │ nu.exe │ 3.77 │ 26.1 MiB │ 8.8 MiB │
|
||||||
3 │ 2202 │ Web Content │ Sleeping │ 6.8157 │ 320.8 MB │ 3.0 GB
|
╰───┴───────┴───────────┴───────┴───────────┴───────────╯
|
||||||
4 │ 328788 │ nu_plugin_core_ps │ Sleeping │ 92.5750 │ 5.9 MB │ 633.2 MB
|
|
||||||
───┴────────┴───────────────────┴──────────┴─────────┴──────────┴──────────
|
|
||||||
```
|
```
|
||||||
|
|
||||||
### Opening files
|
### Opening files
|
||||||
@ -155,72 +124,49 @@ For example, you can load a .toml file as structured data and explore it:
|
|||||||
|
|
||||||
```shell
|
```shell
|
||||||
> open Cargo.toml
|
> open Cargo.toml
|
||||||
────────────────────┬───────────────────────────
|
╭──────────────────┬────────────────────╮
|
||||||
bin │ [table 18 rows]
|
│ bin │ [table 1 row] │
|
||||||
build-dependencies │ [row serde toml]
|
│ dependencies │ {record 24 fields} │
|
||||||
dependencies │ [row 29 columns]
|
│ dev-dependencies │ {record 8 fields} │
|
||||||
dev-dependencies │ [row nu-test-support]
|
│ features │ {record 10 fields} │
|
||||||
features │ [row 19 columns]
|
│ package │ {record 13 fields} │
|
||||||
package │ [row 12 columns]
|
│ profile │ {record 3 fields} │
|
||||||
workspace │ [row members]
|
│ target │ {record 2 fields} │
|
||||||
────────────────────┴───────────────────────────
|
│ workspace │ {record 1 field} │
|
||||||
|
╰──────────────────┴────────────────────╯
|
||||||
```
|
```
|
||||||
|
|
||||||
We can pipeline this into a command that gets the contents of one of the columns:
|
We can pipe this into a command that gets the contents of one of the columns:
|
||||||
|
|
||||||
```shell
|
```shell
|
||||||
> open Cargo.toml | get package
|
> open Cargo.toml | get package
|
||||||
───────────────┬────────────────────────────────────
|
╭───────────────┬────────────────────────────────────╮
|
||||||
authors │ [table 1 rows]
|
│ authors │ [list 1 item] │
|
||||||
default-run │ nu
|
│ default-run │ nu │
|
||||||
description │ A new type of shell
|
│ description │ A new type of shell │
|
||||||
documentation │ https://www.nushell.sh/book/
|
│ documentation │ https://www.nushell.sh/book/ │
|
||||||
edition │ 2018
|
│ edition │ 2018 │
|
||||||
exclude │ [table 1 rows]
|
│ exclude │ [list 1 item] │
|
||||||
homepage │ https://www.nushell.sh
|
│ homepage │ https://www.nushell.sh │
|
||||||
license │ MIT
|
│ license │ MIT │
|
||||||
name │ nu
|
│ name │ nu │
|
||||||
readme │ README.md
|
│ readme │ README.md │
|
||||||
repository │ https://github.com/nushell/nushell
|
│ repository │ https://github.com/nushell/nushell │
|
||||||
version │ 0.32.0
|
│ rust-version │ 1.60 │
|
||||||
───────────────┴────────────────────────────────────
|
│ version │ 0.63.1 │
|
||||||
|
╰───────────────┴────────────────────────────────────╯
|
||||||
```
|
```
|
||||||
|
|
||||||
Finally, we can use commands outside of Nu once we have the data we want:
|
And if needed we can drill down further:
|
||||||
|
|
||||||
```shell
|
```shell
|
||||||
> open Cargo.toml | get package.version
|
> open Cargo.toml | get package.version
|
||||||
0.32.0
|
0.63.1
|
||||||
```
|
```
|
||||||
|
|
||||||
### Configuration
|
|
||||||
|
|
||||||
Nu has early support for configuring the shell. You can refer to the book for a list of [all supported variables](https://www.nushell.sh/book/configuration.html).
|
|
||||||
|
|
||||||
To set one of these variables, you can use `config set`. For example:
|
|
||||||
|
|
||||||
```shell
|
|
||||||
> config set line_editor.edit_mode "vi"
|
|
||||||
> config set path $nu.path
|
|
||||||
```
|
|
||||||
|
|
||||||
### Shells
|
|
||||||
|
|
||||||
Nu will work inside of a single directory and allow you to navigate around your filesystem by default.
|
|
||||||
Nu also offers a way of adding additional working directories that you can jump between, allowing you to work in multiple directories simultaneously.
|
|
||||||
|
|
||||||
To do so, use the `enter` command, which will allow you to create a new "shell" and enter it at the specified path.
|
|
||||||
You can toggle between this new shell and the original shell with the `p` (for previous) and `n` (for next), allowing you to navigate around a ring buffer of shells.
|
|
||||||
Once you're done with a shell, you can `exit` it and remove it from the ring buffer.
|
|
||||||
|
|
||||||
Finally, to get a list of all the current shells, you can use the `shells` command.
|
|
||||||
|
|
||||||
### Plugins
|
### Plugins
|
||||||
|
|
||||||
Nu supports plugins that offer additional functionality to the shell and follow the same structured data model that built-in commands use.
|
Nu supports plugins that offer additional functionality to the shell and follow the same structured data model that built-in commands use. There are a few examples in the `crates/nu_plugins_*` directories.
|
||||||
This allows you to extend nu for your needs.
|
|
||||||
|
|
||||||
There are a few examples in the `plugins` directory.
|
|
||||||
|
|
||||||
Plugins are binaries that are available in your path and follow a `nu_plugin_*` naming convention.
|
Plugins are binaries that are available in your path and follow a `nu_plugin_*` naming convention.
|
||||||
These binaries interact with nu via a simple JSON-RPC protocol where the command identifies itself and passes along its configuration, making it available for use.
|
These binaries interact with nu via a simple JSON-RPC protocol where the command identifies itself and passes along its configuration, making it available for use.
|
||||||
@ -231,23 +177,19 @@ If the plugin is a sink, it is given the full vector of final data and is given
|
|||||||
|
|
||||||
Nu adheres closely to a set of goals that make up its design philosophy. As features are added, they are checked against these goals.
|
Nu adheres closely to a set of goals that make up its design philosophy. As features are added, they are checked against these goals.
|
||||||
|
|
||||||
- First and foremost, Nu is cross-platform. Commands and techniques should carry between platforms and offer consistent first-class support for Windows, macOS, and Linux.
|
- First and foremost, Nu is cross-platform. Commands and techniques should work across platforms and Nu has first-class support for Windows, macOS, and Linux.
|
||||||
|
|
||||||
- Nu ensures direct compatibility with existing platform-specific executables that make up people's workflows.
|
- Nu ensures compatibility with existing platform-specific executables.
|
||||||
|
|
||||||
- Nu's workflow and tools should have the usability in day-to-day experience of using a shell in 2019 (and beyond).
|
- Nu's workflow and tools should have the usability expected of modern software in 2022 (and beyond).
|
||||||
|
|
||||||
- Nu views data as both structured and unstructured. It is a structured shell like PowerShell.
|
- Nu views data as either structured or unstructured. It is a structured shell like PowerShell.
|
||||||
|
|
||||||
- Finally, Nu views data functionally. Rather than using mutation, pipelines act as a means to load, change, and save data without mutable state.
|
- Finally, Nu views data functionally. Rather than using mutation, pipelines act as a means to load, change, and save data without mutable state.
|
||||||
|
|
||||||
## Commands
|
|
||||||
|
|
||||||
You can find a list of Nu commands, complete with documentation, in [quick command references](https://www.nushell.sh/book/command_reference.html).
|
|
||||||
|
|
||||||
## Progress
|
## Progress
|
||||||
|
|
||||||
Nu is in heavy development and will naturally change as it matures and people use it. The chart below isn't meant to be exhaustive, but rather helps give an idea for some of the areas of development and their relative completion:
|
Nu is under heavy development and will naturally change as it matures. The chart below isn't meant to be exhaustive, but it helps give an idea for some of the areas of development and their relative maturity:
|
||||||
|
|
||||||
| Features | Not started | Prototype | MVP | Preview | Mature | Notes |
|
| Features | Not started | Prototype | MVP | Preview | Mature | Notes |
|
||||||
| ------------- | :---------: | :-------: | :-: | :-----: | :----: | -------------------------------------------------------------------- |
|
| ------------- | :---------: | :-------: | :-: | :-----: | :----: | -------------------------------------------------------------------- |
|
||||||
@ -270,20 +212,15 @@ Nu is in heavy development and will naturally change as it matures and people us
|
|||||||
|
|
||||||
Please submit an issue or PR to be added to this list.
|
Please submit an issue or PR to be added to this list.
|
||||||
|
|
||||||
### Integrations
|
|
||||||
- [zoxide](https://github.com/ajeetdsouza/zoxide)
|
- [zoxide](https://github.com/ajeetdsouza/zoxide)
|
||||||
- [starship](https://github.com/starship/starship)
|
- [starship](https://github.com/starship/starship)
|
||||||
- [oh-my-posh](https://ohmyposh.dev)
|
- [oh-my-posh](https://ohmyposh.dev)
|
||||||
- [Couchbase Shell](https://couchbase.sh)
|
- [Couchbase Shell](https://couchbase.sh)
|
||||||
- [virtualenv](https://github.com/pypa/virtualenv)
|
- [virtualenv](https://github.com/pypa/virtualenv)
|
||||||
### Mentions
|
|
||||||
- [The Python Launcher for Unix](https://github.com/brettcannon/python-launcher#how-do-i-get-a-table-of-python-executables-in-nushell)
|
|
||||||
|
|
||||||
## Contributing
|
## Contributing
|
||||||
|
|
||||||
See [Contributing](CONTRIBUTING.md) for details.
|
See [Contributing](CONTRIBUTING.md) for details. Thanks to all the people who already contributed!
|
||||||
|
|
||||||
Thanks to all the people who already contributed!
|
|
||||||
|
|
||||||
<a href="https://github.com/nushell/nushell/graphs/contributors">
|
<a href="https://github.com/nushell/nushell/graphs/contributors">
|
||||||
<img src="https://contributors-img.web.app/image?repo=nushell/nushell&max=500" />
|
<img src="https://contributors-img.web.app/image?repo=nushell/nushell&max=500" />
|
||||||
|
Before Width: | Height: | Size: 166 KiB |
Before Width: | Height: | Size: 206 KiB |
Before Width: | Height: | Size: 167 KiB |
Before Width: | Height: | Size: 91 KiB |
Before Width: | Height: | Size: 182 KiB |
Before Width: | Height: | Size: 144 KiB |
Before Width: | Height: | Size: 146 KiB |
Before Width: | Height: | Size: 2.2 KiB |
Before Width: | Height: | Size: 1.6 KiB |
Before Width: | Height: | Size: 40 KiB |
Before Width: | Height: | Size: 68 KiB |
Before Width: | Height: | Size: 77 KiB |
@ -1,49 +0,0 @@
|
|||||||
#include <winver.h>
|
|
||||||
|
|
||||||
#define VER_FILEVERSION 0,59,1,0
|
|
||||||
#define VER_FILEVERSION_STR "0.59.1"
|
|
||||||
|
|
||||||
#define VER_PRODUCTVERSION 0,59,1,0
|
|
||||||
#define VER_PRODUCTVERSION_STR "0.59.1"
|
|
||||||
|
|
||||||
#ifdef RC_INVOKED
|
|
||||||
|
|
||||||
#ifdef DEBUG // TODO: Actually define DEBUG
|
|
||||||
#define VER_DEBUG VS_FF_DEBUG
|
|
||||||
#else
|
|
||||||
#define VER_DEBUG 0
|
|
||||||
#endif
|
|
||||||
|
|
||||||
VS_VERSION_INFO VERSIONINFO
|
|
||||||
FILEVERSION VER_FILEVERSION
|
|
||||||
PRODUCTVERSION VER_PRODUCTVERSION
|
|
||||||
FILEFLAGSMASK VS_FFI_FILEFLAGSMASK
|
|
||||||
FILEFLAGS VER_DEBUG
|
|
||||||
FILEOS VOS__WINDOWS32
|
|
||||||
FILETYPE VFT_APP
|
|
||||||
FILESUBTYPE VFT2_UNKNOWN
|
|
||||||
BEGIN
|
|
||||||
BLOCK "StringFileInfo"
|
|
||||||
BEGIN
|
|
||||||
BLOCK "040904b0"
|
|
||||||
BEGIN
|
|
||||||
VALUE "CompanyName", "nushell"
|
|
||||||
VALUE "FileDescription", "Nushell"
|
|
||||||
VALUE "FileVersion", VER_FILEVERSION_STR
|
|
||||||
VALUE "InternalName", "nu.exe"
|
|
||||||
VALUE "LegalCopyright", "Copyright (C) 2022"
|
|
||||||
VALUE "OriginalFilename", "nu.exe"
|
|
||||||
VALUE "ProductName", "Nushell"
|
|
||||||
VALUE "ProductVersion", VER_PRODUCTVERSION_STR
|
|
||||||
END
|
|
||||||
END
|
|
||||||
|
|
||||||
BLOCK "VarFileInfo"
|
|
||||||
BEGIN
|
|
||||||
VALUE "Translation", 0x409, 1200
|
|
||||||
END
|
|
||||||
END
|
|
||||||
|
|
||||||
#define IDI_ICON 0x101
|
|
||||||
IDI_ICON ICON "assets/nu_logo.ico"
|
|
||||||
#endif
|
|
8
build.rs
@ -1,6 +1,12 @@
|
|||||||
#[cfg(windows)]
|
#[cfg(windows)]
|
||||||
fn main() {
|
fn main() {
|
||||||
embed_resource::compile_for("assets/nushell.rc", &["nu"])
|
let mut res = winres::WindowsResource::new();
|
||||||
|
res.set("ProductName", "Nushell");
|
||||||
|
res.set("FileDescription", "Nushell");
|
||||||
|
res.set("LegalCopyright", "Copyright (C) 2022");
|
||||||
|
res.set_icon("assets/nu_logo.ico");
|
||||||
|
res.compile()
|
||||||
|
.expect("Failed to run the Windows resource compiler (rc.exe)");
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(not(windows))]
|
#[cfg(not(windows))]
|
||||||
|
@ -4,21 +4,21 @@ description = "CLI-related functionality for Nushell"
|
|||||||
edition = "2021"
|
edition = "2021"
|
||||||
license = "MIT"
|
license = "MIT"
|
||||||
name = "nu-cli"
|
name = "nu-cli"
|
||||||
version = "0.62.0"
|
version = "0.64.0"
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
nu-test-support = { path="../nu-test-support", version = "0.62.0" }
|
nu-test-support = { path="../nu-test-support", version = "0.64.0" }
|
||||||
nu-command = { path = "../nu-command", version = "0.62.0" }
|
nu-command = { path = "../nu-command", version = "0.64.0" }
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
nu-engine = { path = "../nu-engine", version = "0.62.0" }
|
nu-engine = { path = "../nu-engine", version = "0.64.0" }
|
||||||
nu-path = { path = "../nu-path", version = "0.62.0" }
|
nu-path = { path = "../nu-path", version = "0.64.0" }
|
||||||
nu-parser = { path = "../nu-parser", version = "0.62.0" }
|
nu-parser = { path = "../nu-parser", version = "0.64.0" }
|
||||||
nu-protocol = { path = "../nu-protocol", version = "0.62.0" }
|
nu-protocol = { path = "../nu-protocol", version = "0.64.0" }
|
||||||
nu-utils = { path = "../nu-utils", version = "0.62.0" }
|
nu-utils = { path = "../nu-utils", version = "0.64.0" }
|
||||||
nu-ansi-term = "0.45.1"
|
nu-ansi-term = "0.46.0"
|
||||||
nu-color-config = { path = "../nu-color-config", version = "0.62.0" }
|
nu-color-config = { path = "../nu-color-config", version = "0.64.0" }
|
||||||
reedline = { version = "0.5.0", features = ["bashisms"]}
|
reedline = { version = "0.7.0", features = ["bashisms", "sqlite"]}
|
||||||
crossterm = "0.23.0"
|
crossterm = "0.23.0"
|
||||||
miette = { version = "4.5.0", features = ["fancy"] }
|
miette = { version = "4.5.0", features = ["fancy"] }
|
||||||
thiserror = "1.0.29"
|
thiserror = "1.0.29"
|
||||||
@ -26,6 +26,8 @@ fuzzy-matcher = "0.3.7"
|
|||||||
|
|
||||||
log = "0.4"
|
log = "0.4"
|
||||||
is_executable = "1.0.1"
|
is_executable = "1.0.1"
|
||||||
|
chrono = "0.4.19"
|
||||||
|
sysinfo = "0.24.1"
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
plugin = []
|
plugin = []
|
||||||
|
@ -6,7 +6,7 @@ use nu_parser::parse;
|
|||||||
use nu_protocol::engine::Stack;
|
use nu_protocol::engine::Stack;
|
||||||
use nu_protocol::{
|
use nu_protocol::{
|
||||||
engine::{EngineState, StateDelta, StateWorkingSet},
|
engine::{EngineState, StateDelta, StateWorkingSet},
|
||||||
PipelineData, Spanned,
|
PipelineData, Spanned, Value,
|
||||||
};
|
};
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
|
|
||||||
@ -17,9 +17,16 @@ pub fn evaluate_commands(
|
|||||||
stack: &mut Stack,
|
stack: &mut Stack,
|
||||||
input: PipelineData,
|
input: PipelineData,
|
||||||
is_perf_true: bool,
|
is_perf_true: bool,
|
||||||
|
table_mode: Option<Value>,
|
||||||
) -> Result<()> {
|
) -> Result<()> {
|
||||||
// Run a command (or commands) given to us by the user
|
// Run a command (or commands) given to us by the user
|
||||||
let (block, delta) = {
|
let (block, delta) = {
|
||||||
|
if let Some(ref t_mode) = table_mode {
|
||||||
|
let mut config = engine_state.get_config().clone();
|
||||||
|
config.table_mode = t_mode.as_string()?;
|
||||||
|
engine_state.set_config(&config);
|
||||||
|
}
|
||||||
|
|
||||||
let mut working_set = StateWorkingSet::new(engine_state);
|
let mut working_set = StateWorkingSet::new(engine_state);
|
||||||
|
|
||||||
let (output, err) = parse(&mut working_set, None, commands.item.as_bytes(), false, &[]);
|
let (output, err) = parse(&mut working_set, None, commands.item.as_bytes(), false, &[]);
|
||||||
@ -37,12 +44,17 @@ pub fn evaluate_commands(
|
|||||||
report_error(&working_set, &err);
|
report_error(&working_set, &err);
|
||||||
}
|
}
|
||||||
|
|
||||||
let config = engine_state.get_config().clone();
|
let mut config = engine_state.get_config().clone();
|
||||||
|
if let Some(t_mode) = table_mode {
|
||||||
|
config.table_mode = t_mode.as_string()?;
|
||||||
|
}
|
||||||
|
|
||||||
// Merge the delta in case env vars changed in the config
|
// Merge the delta in case env vars changed in the config
|
||||||
match nu_engine::env::current_dir(engine_state, stack) {
|
match nu_engine::env::current_dir(engine_state, stack) {
|
||||||
Ok(cwd) => {
|
Ok(cwd) => {
|
||||||
if let Err(e) = engine_state.merge_delta(StateDelta::new(), Some(stack), cwd) {
|
if let Err(e) =
|
||||||
|
engine_state.merge_delta(StateDelta::new(engine_state), Some(stack), cwd)
|
||||||
|
{
|
||||||
let working_set = StateWorkingSet::new(engine_state);
|
let working_set = StateWorkingSet::new(engine_state);
|
||||||
report_error(&working_set, &e);
|
report_error(&working_set, &e);
|
||||||
std::process::exit(1);
|
std::process::exit(1);
|
||||||
@ -64,7 +76,7 @@ pub fn evaluate_commands(
|
|||||||
|
|
||||||
match eval_block(engine_state, stack, &block, input, false, false) {
|
match eval_block(engine_state, stack, &block, input, false, false) {
|
||||||
Ok(pipeline_data) => {
|
Ok(pipeline_data) => {
|
||||||
crate::eval_file::print_table_or_error(engine_state, stack, pipeline_data, &config)
|
crate::eval_file::print_table_or_error(engine_state, stack, pipeline_data, &mut config)
|
||||||
}
|
}
|
||||||
Err(err) => {
|
Err(err) => {
|
||||||
let working_set = StateWorkingSet::new(engine_state);
|
let working_set = StateWorkingSet::new(engine_state);
|
||||||
|
@ -1,7 +1,5 @@
|
|||||||
use crate::completions::{
|
use crate::completions::{Completer, CompletionOptions, MatchAlgorithm, SortBy};
|
||||||
file_completions::file_path_completion, Completer, CompletionOptions, MatchAlgorithm, SortBy,
|
use nu_parser::FlatShape;
|
||||||
};
|
|
||||||
use nu_parser::{unescape_unquote_string, FlatShape};
|
|
||||||
use nu_protocol::{
|
use nu_protocol::{
|
||||||
engine::{EngineState, StateWorkingSet},
|
engine::{EngineState, StateWorkingSet},
|
||||||
Span,
|
Span,
|
||||||
@ -12,7 +10,6 @@ use std::sync::Arc;
|
|||||||
pub struct CommandCompletion {
|
pub struct CommandCompletion {
|
||||||
engine_state: Arc<EngineState>,
|
engine_state: Arc<EngineState>,
|
||||||
flattened: Vec<(Span, FlatShape)>,
|
flattened: Vec<(Span, FlatShape)>,
|
||||||
flat_idx: usize,
|
|
||||||
flat_shape: FlatShape,
|
flat_shape: FlatShape,
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -21,13 +18,11 @@ impl CommandCompletion {
|
|||||||
engine_state: Arc<EngineState>,
|
engine_state: Arc<EngineState>,
|
||||||
_: &StateWorkingSet,
|
_: &StateWorkingSet,
|
||||||
flattened: Vec<(Span, FlatShape)>,
|
flattened: Vec<(Span, FlatShape)>,
|
||||||
flat_idx: usize,
|
|
||||||
flat_shape: FlatShape,
|
flat_shape: FlatShape,
|
||||||
) -> Self {
|
) -> Self {
|
||||||
Self {
|
Self {
|
||||||
engine_state,
|
engine_state,
|
||||||
flattened,
|
flattened,
|
||||||
flat_idx,
|
|
||||||
flat_shape,
|
flat_shape,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -39,7 +34,7 @@ impl CommandCompletion {
|
|||||||
) -> Vec<String> {
|
) -> Vec<String> {
|
||||||
let mut executables = vec![];
|
let mut executables = vec![];
|
||||||
|
|
||||||
let paths = self.engine_state.env_vars.get("PATH");
|
let paths = self.engine_state.get_env_var("PATH");
|
||||||
|
|
||||||
if let Some(paths) = paths {
|
if let Some(paths) = paths {
|
||||||
if let Ok(paths) = paths.as_list() {
|
if let Ok(paths) = paths.as_list() {
|
||||||
@ -161,7 +156,7 @@ impl Completer for CommandCompletion {
|
|||||||
fn fetch(
|
fn fetch(
|
||||||
&mut self,
|
&mut self,
|
||||||
working_set: &StateWorkingSet,
|
working_set: &StateWorkingSet,
|
||||||
prefix: Vec<u8>,
|
_prefix: Vec<u8>,
|
||||||
span: Span,
|
span: Span,
|
||||||
offset: usize,
|
offset: usize,
|
||||||
pos: usize,
|
pos: usize,
|
||||||
@ -204,76 +199,25 @@ impl Completer for CommandCompletion {
|
|||||||
return subcommands;
|
return subcommands;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let config = working_set.get_config();
|
||||||
let commands = if matches!(self.flat_shape, nu_parser::FlatShape::External)
|
let commands = if matches!(self.flat_shape, nu_parser::FlatShape::External)
|
||||||
|| matches!(self.flat_shape, nu_parser::FlatShape::InternalCall)
|
|| matches!(self.flat_shape, nu_parser::FlatShape::InternalCall)
|
||||||
|| ((span.end - span.start) == 0)
|
|| ((span.end - span.start) == 0)
|
||||||
{
|
{
|
||||||
// we're in a gap or at a command
|
// we're in a gap or at a command
|
||||||
self.complete_commands(working_set, span, offset, true, options.match_algorithm)
|
self.complete_commands(
|
||||||
|
working_set,
|
||||||
|
span,
|
||||||
|
offset,
|
||||||
|
config.enable_external_completion,
|
||||||
|
options.match_algorithm,
|
||||||
|
)
|
||||||
} else {
|
} else {
|
||||||
vec![]
|
vec![]
|
||||||
};
|
};
|
||||||
|
|
||||||
let cwd = if let Some(d) = self.engine_state.env_vars.get("PWD") {
|
subcommands
|
||||||
match d.as_string() {
|
|
||||||
Ok(s) => s,
|
|
||||||
Err(_) => "".to_string(),
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
"".to_string()
|
|
||||||
};
|
|
||||||
|
|
||||||
let preceding_byte = if span.start > offset {
|
|
||||||
working_set
|
|
||||||
.get_span_contents(Span {
|
|
||||||
start: span.start - 1,
|
|
||||||
end: span.start,
|
|
||||||
})
|
|
||||||
.to_vec()
|
|
||||||
} else {
|
|
||||||
vec![]
|
|
||||||
};
|
|
||||||
// let prefix = working_set.get_span_contents(flat.0);
|
|
||||||
let prefix = String::from_utf8_lossy(&prefix).to_string();
|
|
||||||
|
|
||||||
file_path_completion(span, &prefix, &cwd, options.match_algorithm)
|
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(move |x| {
|
|
||||||
if self.flat_idx == 0 {
|
|
||||||
// We're in the command position
|
|
||||||
if (x.1.starts_with('"') || x.1.starts_with('\'') || x.1.starts_with('`'))
|
|
||||||
&& !matches!(preceding_byte.get(0), Some(b'^'))
|
|
||||||
{
|
|
||||||
let (trimmed, _) = unescape_unquote_string(x.1.as_bytes(), span);
|
|
||||||
let expanded = nu_path::canonicalize_with(trimmed, &cwd);
|
|
||||||
|
|
||||||
if let Ok(expanded) = expanded {
|
|
||||||
if is_executable::is_executable(expanded) {
|
|
||||||
(x.0, format!("^{}", x.1))
|
|
||||||
} else {
|
|
||||||
(x.0, x.1)
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
(x.0, x.1)
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
(x.0, x.1)
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
(x.0, x.1)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.map(move |x| Suggestion {
|
|
||||||
value: x.1,
|
|
||||||
description: None,
|
|
||||||
extra: None,
|
|
||||||
span: reedline::Span {
|
|
||||||
start: x.0.start - offset,
|
|
||||||
end: x.0.end - offset,
|
|
||||||
},
|
|
||||||
append_whitespace: false,
|
|
||||||
})
|
|
||||||
.chain(subcommands.into_iter())
|
|
||||||
.chain(commands.into_iter())
|
.chain(commands.into_iter())
|
||||||
.collect::<Vec<_>>()
|
.collect::<Vec<_>>()
|
||||||
}
|
}
|
||||||
|
@ -37,7 +37,10 @@ impl NuCompleter {
|
|||||||
) -> Vec<Suggestion> {
|
) -> Vec<Suggestion> {
|
||||||
let config = self.engine_state.get_config();
|
let config = self.engine_state.get_config();
|
||||||
|
|
||||||
let mut options = CompletionOptions::default();
|
let mut options = CompletionOptions {
|
||||||
|
case_sensitive: config.case_sensitive_completions,
|
||||||
|
..Default::default()
|
||||||
|
};
|
||||||
|
|
||||||
if config.completion_algorithm == "fuzzy" {
|
if config.completion_algorithm == "fuzzy" {
|
||||||
options.match_algorithm = MatchAlgorithm::Fuzzy;
|
options.match_algorithm = MatchAlgorithm::Fuzzy;
|
||||||
@ -56,36 +59,43 @@ impl NuCompleter {
|
|||||||
fn completion_helper(&mut self, line: &str, pos: usize) -> Vec<Suggestion> {
|
fn completion_helper(&mut self, line: &str, pos: usize) -> Vec<Suggestion> {
|
||||||
let mut working_set = StateWorkingSet::new(&self.engine_state);
|
let mut working_set = StateWorkingSet::new(&self.engine_state);
|
||||||
let offset = working_set.next_span_start();
|
let offset = working_set.next_span_start();
|
||||||
let mut line = line.to_string();
|
let (mut new_line, alias_offset) = try_find_alias(line.as_bytes(), &working_set);
|
||||||
line.insert(pos, 'a');
|
let initial_line = line.to_string();
|
||||||
|
new_line.push(b'a');
|
||||||
let pos = offset + pos;
|
let pos = offset + pos;
|
||||||
let (output, _err) = parse(
|
let (output, _err) = parse(&mut working_set, Some("completer"), &new_line, false, &[]);
|
||||||
&mut working_set,
|
|
||||||
Some("completer"),
|
|
||||||
line.as_bytes(),
|
|
||||||
false,
|
|
||||||
&[],
|
|
||||||
);
|
|
||||||
|
|
||||||
for pipeline in output.pipelines.into_iter() {
|
for pipeline in output.pipelines.into_iter() {
|
||||||
for expr in pipeline.expressions {
|
for expr in pipeline.expressions {
|
||||||
let flattened: Vec<_> = flatten_expression(&working_set, &expr);
|
let flattened: Vec<_> = flatten_expression(&working_set, &expr);
|
||||||
|
|
||||||
for (flat_idx, flat) in flattened.iter().enumerate() {
|
for (flat_idx, flat) in flattened.iter().enumerate() {
|
||||||
if pos >= flat.0.start && pos < flat.0.end {
|
let alias = if alias_offset.is_empty() {
|
||||||
|
0
|
||||||
|
} else {
|
||||||
|
alias_offset[flat_idx]
|
||||||
|
};
|
||||||
|
if pos >= flat.0.start - alias && pos < flat.0.end - alias {
|
||||||
// Context variables
|
// Context variables
|
||||||
let most_left_var =
|
let most_left_var =
|
||||||
most_left_variable(flat_idx, &working_set, flattened.clone());
|
most_left_variable(flat_idx, &working_set, flattened.clone());
|
||||||
|
|
||||||
// Create a new span
|
// Create a new span
|
||||||
let new_span = Span {
|
let new_span = if flat_idx == 0 {
|
||||||
start: flat.0.start,
|
Span {
|
||||||
end: flat.0.end - 1,
|
start: flat.0.start,
|
||||||
|
end: flat.0.end - 1 - alias,
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
Span {
|
||||||
|
start: flat.0.start - alias,
|
||||||
|
end: flat.0.end - 1 - alias,
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
// Parses the prefix
|
// Parses the prefix
|
||||||
let mut prefix = working_set.get_span_contents(flat.0).to_vec();
|
let mut prefix = working_set.get_span_contents(flat.0).to_vec();
|
||||||
prefix.remove(pos - flat.0.start);
|
prefix.remove(pos - (flat.0.start - alias));
|
||||||
|
|
||||||
// Completions that depends on the previous expression (e.g: use, source)
|
// Completions that depends on the previous expression (e.g: use, source)
|
||||||
if flat_idx > 0 {
|
if flat_idx > 0 {
|
||||||
@ -150,7 +160,7 @@ impl NuCompleter {
|
|||||||
self.engine_state.clone(),
|
self.engine_state.clone(),
|
||||||
self.stack.clone(),
|
self.stack.clone(),
|
||||||
*decl_id,
|
*decl_id,
|
||||||
line,
|
initial_line,
|
||||||
);
|
);
|
||||||
|
|
||||||
return self.process_completion(
|
return self.process_completion(
|
||||||
@ -175,37 +185,39 @@ impl NuCompleter {
|
|||||||
pos,
|
pos,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
FlatShape::Filepath
|
|
||||||
| FlatShape::GlobPattern
|
|
||||||
| FlatShape::ExternalArg => {
|
|
||||||
let mut completer = FileCompletion::new(self.engine_state.clone());
|
|
||||||
|
|
||||||
return self.process_completion(
|
|
||||||
&mut completer,
|
|
||||||
&working_set,
|
|
||||||
prefix,
|
|
||||||
new_span,
|
|
||||||
offset,
|
|
||||||
pos,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
flat_shape => {
|
flat_shape => {
|
||||||
let mut completer = CommandCompletion::new(
|
let mut completer = CommandCompletion::new(
|
||||||
self.engine_state.clone(),
|
self.engine_state.clone(),
|
||||||
&working_set,
|
&working_set,
|
||||||
flattened.clone(),
|
flattened.clone(),
|
||||||
flat_idx,
|
// flat_idx,
|
||||||
flat_shape.clone(),
|
flat_shape.clone(),
|
||||||
);
|
);
|
||||||
|
|
||||||
return self.process_completion(
|
let out: Vec<_> = self.process_completion(
|
||||||
&mut completer,
|
&mut completer,
|
||||||
&working_set,
|
&working_set,
|
||||||
prefix,
|
prefix.clone(),
|
||||||
new_span,
|
new_span,
|
||||||
offset,
|
offset,
|
||||||
pos,
|
pos,
|
||||||
);
|
);
|
||||||
|
|
||||||
|
if out.is_empty() {
|
||||||
|
let mut completer =
|
||||||
|
FileCompletion::new(self.engine_state.clone());
|
||||||
|
|
||||||
|
return self.process_completion(
|
||||||
|
&mut completer,
|
||||||
|
&working_set,
|
||||||
|
prefix,
|
||||||
|
new_span,
|
||||||
|
offset,
|
||||||
|
pos,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
return out;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
@ -223,6 +235,74 @@ impl ReedlineCompleter for NuCompleter {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type MatchedAlias<'a> = Vec<(&'a [u8], &'a [u8])>;
|
||||||
|
|
||||||
|
// Handler the completion when giving lines contains at least one alias. (e.g: `g checkout`)
|
||||||
|
// that `g` is an alias of `git`
|
||||||
|
fn try_find_alias(line: &[u8], working_set: &StateWorkingSet) -> (Vec<u8>, Vec<usize>) {
|
||||||
|
// An vector represents the offsets of alias
|
||||||
|
// e.g: the offset is 2 for the alias `g` of `git`
|
||||||
|
let mut alias_offset = vec![];
|
||||||
|
let mut output = vec![];
|
||||||
|
if let Some(matched_alias) = search_alias(line, working_set) {
|
||||||
|
let mut lens = matched_alias.len();
|
||||||
|
for (input_vec, line_vec) in matched_alias {
|
||||||
|
alias_offset.push(line_vec.len() - input_vec.len());
|
||||||
|
output.extend(line_vec);
|
||||||
|
if lens > 1 {
|
||||||
|
output.push(b' ');
|
||||||
|
lens -= 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
output = line.to_vec();
|
||||||
|
}
|
||||||
|
|
||||||
|
(output, alias_offset)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn search_alias<'a>(input: &'a [u8], working_set: &'a StateWorkingSet) -> Option<MatchedAlias<'a>> {
|
||||||
|
let mut vec_names = vec![];
|
||||||
|
let mut vec_alias = vec![];
|
||||||
|
let mut pos = 0;
|
||||||
|
let mut is_alias = false;
|
||||||
|
for (index, character) in input.iter().enumerate() {
|
||||||
|
if *character == b' ' {
|
||||||
|
let range = &input[pos..index];
|
||||||
|
vec_names.push(range);
|
||||||
|
pos = index + 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// Push the rest to names vector.
|
||||||
|
if pos < input.len() {
|
||||||
|
vec_names.push(&input[pos..]);
|
||||||
|
}
|
||||||
|
|
||||||
|
for name in &vec_names {
|
||||||
|
if let Some(alias_id) = working_set.find_alias(name) {
|
||||||
|
let alias_span = working_set.get_alias(alias_id);
|
||||||
|
is_alias = true;
|
||||||
|
for alias in alias_span {
|
||||||
|
let name = working_set.get_span_contents(*alias);
|
||||||
|
if !name.is_empty() {
|
||||||
|
vec_alias.push(name);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
vec_alias.push(name);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if is_alias {
|
||||||
|
// Zip names and alias vectors, the original inputs and its aliases mapping.
|
||||||
|
// e.g:(['g'], ['g','i','t'])
|
||||||
|
let output = vec_names.into_iter().zip(vec_alias).collect();
|
||||||
|
Some(output)
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// reads the most left variable returning it's name (e.g: $myvar)
|
// reads the most left variable returning it's name (e.g: $myvar)
|
||||||
// and the depth (a.b.c)
|
// and the depth (a.b.c)
|
||||||
fn most_left_variable(
|
fn most_left_variable(
|
||||||
|
@ -4,10 +4,11 @@ use nu_protocol::{
|
|||||||
levenshtein_distance, Span,
|
levenshtein_distance, Span,
|
||||||
};
|
};
|
||||||
use reedline::Suggestion;
|
use reedline::Suggestion;
|
||||||
|
use std::fs;
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
use super::{partial_from, prepend_base_dir, MatchAlgorithm};
|
use super::{partial_from, prepend_base_dir};
|
||||||
|
|
||||||
const SEP: char = std::path::MAIN_SEPARATOR;
|
const SEP: char = std::path::MAIN_SEPARATOR;
|
||||||
|
|
||||||
@ -32,7 +33,7 @@ impl Completer for DirectoryCompletion {
|
|||||||
_: usize,
|
_: usize,
|
||||||
options: &CompletionOptions,
|
options: &CompletionOptions,
|
||||||
) -> Vec<Suggestion> {
|
) -> Vec<Suggestion> {
|
||||||
let cwd = if let Some(d) = self.engine_state.env_vars.get("PWD") {
|
let cwd = if let Some(d) = self.engine_state.get_env_var("PWD") {
|
||||||
match d.as_string() {
|
match d.as_string() {
|
||||||
Ok(s) => s,
|
Ok(s) => s,
|
||||||
Err(_) => "".to_string(),
|
Err(_) => "".to_string(),
|
||||||
@ -43,7 +44,7 @@ impl Completer for DirectoryCompletion {
|
|||||||
let partial = String::from_utf8_lossy(&prefix).to_string();
|
let partial = String::from_utf8_lossy(&prefix).to_string();
|
||||||
|
|
||||||
// Filter only the folders
|
// Filter only the folders
|
||||||
let output: Vec<_> = directory_completion(span, &partial, &cwd, options.match_algorithm)
|
let output: Vec<_> = directory_completion(span, &partial, &cwd, options)
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(move |x| Suggestion {
|
.map(move |x| Suggestion {
|
||||||
value: x.1,
|
value: x.1,
|
||||||
@ -102,7 +103,7 @@ pub fn directory_completion(
|
|||||||
span: nu_protocol::Span,
|
span: nu_protocol::Span,
|
||||||
partial: &str,
|
partial: &str,
|
||||||
cwd: &str,
|
cwd: &str,
|
||||||
match_algorithm: MatchAlgorithm,
|
options: &CompletionOptions,
|
||||||
) -> Vec<(nu_protocol::Span, String)> {
|
) -> Vec<(nu_protocol::Span, String)> {
|
||||||
let original_input = partial;
|
let original_input = partial;
|
||||||
|
|
||||||
@ -120,10 +121,10 @@ pub fn directory_completion(
|
|||||||
return result
|
return result
|
||||||
.filter_map(|entry| {
|
.filter_map(|entry| {
|
||||||
entry.ok().and_then(|entry| {
|
entry.ok().and_then(|entry| {
|
||||||
if let Ok(metadata) = entry.metadata() {
|
if let Ok(metadata) = fs::metadata(entry.path()) {
|
||||||
if metadata.is_dir() {
|
if metadata.is_dir() {
|
||||||
let mut file_name = entry.file_name().to_string_lossy().into_owned();
|
let mut file_name = entry.file_name().to_string_lossy().into_owned();
|
||||||
if matches(&partial, &file_name, match_algorithm) {
|
if matches(&partial, &file_name, options) {
|
||||||
let mut path = if prepend_base_dir(original_input, &base_dir_name) {
|
let mut path = if prepend_base_dir(original_input, &base_dir_name) {
|
||||||
format!("{}{}", base_dir_name, file_name)
|
format!("{}{}", base_dir_name, file_name)
|
||||||
} else {
|
} else {
|
||||||
|
@ -37,7 +37,7 @@ impl Completer for DotNuCompletion {
|
|||||||
|
|
||||||
// Fetch the lib dirs
|
// Fetch the lib dirs
|
||||||
let lib_dirs: Vec<String> =
|
let lib_dirs: Vec<String> =
|
||||||
if let Some(lib_dirs) = self.engine_state.env_vars.get("NU_LIB_DIRS") {
|
if let Some(lib_dirs) = self.engine_state.get_env_var("NU_LIB_DIRS") {
|
||||||
lib_dirs
|
lib_dirs
|
||||||
.as_list()
|
.as_list()
|
||||||
.into_iter()
|
.into_iter()
|
||||||
@ -58,7 +58,7 @@ impl Completer for DotNuCompletion {
|
|||||||
};
|
};
|
||||||
|
|
||||||
// Check if the base_dir is a folder
|
// Check if the base_dir is a folder
|
||||||
if base_dir != "./" {
|
if base_dir != format!(".{}", SEP) {
|
||||||
// Add the base dir into the directories to be searched
|
// Add the base dir into the directories to be searched
|
||||||
search_dirs.push(base_dir.clone());
|
search_dirs.push(base_dir.clone());
|
||||||
|
|
||||||
@ -70,7 +70,7 @@ impl Completer for DotNuCompletion {
|
|||||||
partial = base_dir_partial;
|
partial = base_dir_partial;
|
||||||
} else {
|
} else {
|
||||||
// Fetch the current folder
|
// Fetch the current folder
|
||||||
let current_folder = if let Some(d) = self.engine_state.env_vars.get("PWD") {
|
let current_folder = if let Some(d) = self.engine_state.get_env_var("PWD") {
|
||||||
match d.as_string() {
|
match d.as_string() {
|
||||||
Ok(s) => s,
|
Ok(s) => s,
|
||||||
Err(_) => "".to_string(),
|
Err(_) => "".to_string(),
|
||||||
@ -91,7 +91,7 @@ impl Completer for DotNuCompletion {
|
|||||||
let output: Vec<Suggestion> = search_dirs
|
let output: Vec<Suggestion> = search_dirs
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.flat_map(|it| {
|
.flat_map(|it| {
|
||||||
file_path_completion(span, &partial, &it, options.match_algorithm)
|
file_path_completion(span, &partial, &it, options)
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.filter(|it| {
|
.filter(|it| {
|
||||||
// Different base dir, so we list the .nu files or folders
|
// Different base dir, so we list the .nu files or folders
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
use crate::completions::{Completer, CompletionOptions, MatchAlgorithm};
|
use crate::completions::{Completer, CompletionOptions};
|
||||||
use nu_protocol::{
|
use nu_protocol::{
|
||||||
engine::{EngineState, StateWorkingSet},
|
engine::{EngineState, StateWorkingSet},
|
||||||
levenshtein_distance, Span,
|
levenshtein_distance, Span,
|
||||||
@ -30,7 +30,7 @@ impl Completer for FileCompletion {
|
|||||||
_: usize,
|
_: usize,
|
||||||
options: &CompletionOptions,
|
options: &CompletionOptions,
|
||||||
) -> Vec<Suggestion> {
|
) -> Vec<Suggestion> {
|
||||||
let cwd = if let Some(d) = self.engine_state.env_vars.get("PWD") {
|
let cwd = if let Some(d) = self.engine_state.get_env_var("PWD") {
|
||||||
match d.as_string() {
|
match d.as_string() {
|
||||||
Ok(s) => s,
|
Ok(s) => s,
|
||||||
Err(_) => "".to_string(),
|
Err(_) => "".to_string(),
|
||||||
@ -39,7 +39,7 @@ impl Completer for FileCompletion {
|
|||||||
"".to_string()
|
"".to_string()
|
||||||
};
|
};
|
||||||
let prefix = String::from_utf8_lossy(&prefix).to_string();
|
let prefix = String::from_utf8_lossy(&prefix).to_string();
|
||||||
let output: Vec<_> = file_path_completion(span, &prefix, &cwd, options.match_algorithm)
|
let output: Vec<_> = file_path_completion(span, &prefix, &cwd, options)
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(move |x| Suggestion {
|
.map(move |x| Suggestion {
|
||||||
value: x.1,
|
value: x.1,
|
||||||
@ -112,7 +112,7 @@ pub fn file_path_completion(
|
|||||||
span: nu_protocol::Span,
|
span: nu_protocol::Span,
|
||||||
partial: &str,
|
partial: &str,
|
||||||
cwd: &str,
|
cwd: &str,
|
||||||
match_algorithm: MatchAlgorithm,
|
options: &CompletionOptions,
|
||||||
) -> Vec<(nu_protocol::Span, String)> {
|
) -> Vec<(nu_protocol::Span, String)> {
|
||||||
let original_input = partial;
|
let original_input = partial;
|
||||||
let (base_dir_name, partial) = partial_from(partial);
|
let (base_dir_name, partial) = partial_from(partial);
|
||||||
@ -129,7 +129,7 @@ pub fn file_path_completion(
|
|||||||
.filter_map(|entry| {
|
.filter_map(|entry| {
|
||||||
entry.ok().and_then(|entry| {
|
entry.ok().and_then(|entry| {
|
||||||
let mut file_name = entry.file_name().to_string_lossy().into_owned();
|
let mut file_name = entry.file_name().to_string_lossy().into_owned();
|
||||||
if matches(&partial, &file_name, match_algorithm) {
|
if matches(&partial, &file_name, options) {
|
||||||
let mut path = if prepend_base_dir(original_input, &base_dir_name) {
|
let mut path = if prepend_base_dir(original_input, &base_dir_name) {
|
||||||
format!("{}{}", base_dir_name, file_name)
|
format!("{}{}", base_dir_name, file_name)
|
||||||
} else {
|
} else {
|
||||||
@ -158,8 +158,15 @@ pub fn file_path_completion(
|
|||||||
Vec::new()
|
Vec::new()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn matches(partial: &str, from: &str, match_algorithm: MatchAlgorithm) -> bool {
|
pub fn matches(partial: &str, from: &str, options: &CompletionOptions) -> bool {
|
||||||
match_algorithm.matches_str(&from.to_ascii_lowercase(), &partial.to_ascii_lowercase())
|
// Check for case sensitive
|
||||||
|
if !options.case_sensitive {
|
||||||
|
return options
|
||||||
|
.match_algorithm
|
||||||
|
.matches_str(&from.to_ascii_lowercase(), &partial.to_ascii_lowercase());
|
||||||
|
}
|
||||||
|
|
||||||
|
options.match_algorithm.matches_str(from, partial)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns whether the base_dir should be prepended to the file path
|
/// Returns whether the base_dir should be prepended to the file path
|
||||||
|
@ -11,7 +11,7 @@ use std::sync::Arc;
|
|||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct VariableCompletion {
|
pub struct VariableCompletion {
|
||||||
engine_state: Arc<EngineState>,
|
engine_state: Arc<EngineState>, // TODO: Is engine state necessary? It's already a part of working set in fetch()
|
||||||
stack: Stack,
|
stack: Stack,
|
||||||
var_context: (Vec<u8>, Vec<Vec<u8>>), // tuple with $var and the sublevels (.b.c.d)
|
var_context: (Vec<u8>, Vec<Vec<u8>>), // tuple with $var and the sublevels (.b.c.d)
|
||||||
}
|
}
|
||||||
@ -70,7 +70,18 @@ impl Completer for VariableCompletion {
|
|||||||
self.var_context.1.clone().into_iter().skip(1).collect();
|
self.var_context.1.clone().into_iter().skip(1).collect();
|
||||||
|
|
||||||
if let Some(val) = env_vars.get(&target_var_str) {
|
if let Some(val) = env_vars.get(&target_var_str) {
|
||||||
return nested_suggestions(val.clone(), nested_levels, current_span);
|
for suggestion in
|
||||||
|
nested_suggestions(val.clone(), nested_levels, current_span)
|
||||||
|
{
|
||||||
|
if options
|
||||||
|
.match_algorithm
|
||||||
|
.matches_u8(suggestion.value.as_bytes(), &prefix)
|
||||||
|
{
|
||||||
|
output.push(suggestion);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return output;
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
// No nesting provided, return all env vars
|
// No nesting provided, return all env vars
|
||||||
@ -105,7 +116,18 @@ impl Completer for VariableCompletion {
|
|||||||
end: current_span.end,
|
end: current_span.end,
|
||||||
},
|
},
|
||||||
) {
|
) {
|
||||||
return nested_suggestions(nuval, self.var_context.1.clone(), current_span);
|
for suggestion in
|
||||||
|
nested_suggestions(nuval, self.var_context.1.clone(), current_span)
|
||||||
|
{
|
||||||
|
if options
|
||||||
|
.match_algorithm
|
||||||
|
.matches_u8(suggestion.value.as_bytes(), &prefix)
|
||||||
|
{
|
||||||
|
output.push(suggestion);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return output;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -122,7 +144,18 @@ impl Completer for VariableCompletion {
|
|||||||
|
|
||||||
// If the value exists and it's of type Record
|
// If the value exists and it's of type Record
|
||||||
if let Ok(value) = var {
|
if let Ok(value) = var {
|
||||||
return nested_suggestions(value, self.var_context.1.clone(), current_span);
|
for suggestion in
|
||||||
|
nested_suggestions(value, self.var_context.1.clone(), current_span)
|
||||||
|
{
|
||||||
|
if options
|
||||||
|
.match_algorithm
|
||||||
|
.matches_u8(suggestion.value.as_bytes(), &prefix)
|
||||||
|
{
|
||||||
|
output.push(suggestion);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return output;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -143,24 +176,39 @@ impl Completer for VariableCompletion {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// TODO: The following can be refactored (see find_commands_by_predicate() used in
|
||||||
|
// command_completions).
|
||||||
|
let mut removed_overlays = vec![];
|
||||||
// Working set scope vars
|
// Working set scope vars
|
||||||
for scope in &working_set.delta.scope {
|
for scope_frame in working_set.delta.scope.iter().rev() {
|
||||||
for v in &scope.vars {
|
for overlay_frame in scope_frame
|
||||||
if options.match_algorithm.matches_u8(v.0, &prefix) {
|
.active_overlays(&mut removed_overlays)
|
||||||
output.push(Suggestion {
|
.iter()
|
||||||
value: String::from_utf8_lossy(v.0).to_string(),
|
.rev()
|
||||||
description: None,
|
{
|
||||||
extra: None,
|
for v in &overlay_frame.vars {
|
||||||
span: current_span,
|
if options.match_algorithm.matches_u8(v.0, &prefix) {
|
||||||
append_whitespace: false,
|
output.push(Suggestion {
|
||||||
});
|
value: String::from_utf8_lossy(v.0).to_string(),
|
||||||
|
description: None,
|
||||||
|
extra: None,
|
||||||
|
span: current_span,
|
||||||
|
append_whitespace: false,
|
||||||
|
});
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Permanent state vars
|
// Permanent state vars
|
||||||
for scope in &self.engine_state.scope {
|
// for scope in &self.engine_state.scope {
|
||||||
for v in &scope.vars {
|
for overlay_frame in self
|
||||||
|
.engine_state
|
||||||
|
.active_overlays(&removed_overlays)
|
||||||
|
.iter()
|
||||||
|
.rev()
|
||||||
|
{
|
||||||
|
for v in &overlay_frame.vars {
|
||||||
if options.match_algorithm.matches_u8(v.0, &prefix) {
|
if options.match_algorithm.matches_u8(v.0, &prefix) {
|
||||||
output.push(Suggestion {
|
output.push(Suggestion {
|
||||||
value: String::from_utf8_lossy(v.0).to_string(),
|
value: String::from_utf8_lossy(v.0).to_string(),
|
||||||
@ -173,7 +221,7 @@ impl Completer for VariableCompletion {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
output.dedup();
|
output.dedup(); // TODO: Removes only consecutive duplicates, is it intended?
|
||||||
|
|
||||||
output
|
output
|
||||||
}
|
}
|
||||||
|
@ -2,12 +2,15 @@ use crate::util::{eval_source, report_error};
|
|||||||
#[cfg(feature = "plugin")]
|
#[cfg(feature = "plugin")]
|
||||||
use log::info;
|
use log::info;
|
||||||
use nu_protocol::engine::{EngineState, Stack, StateDelta, StateWorkingSet};
|
use nu_protocol::engine::{EngineState, Stack, StateDelta, StateWorkingSet};
|
||||||
use nu_protocol::{PipelineData, Span};
|
use nu_protocol::{HistoryFileFormat, PipelineData, Span};
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
|
|
||||||
#[cfg(feature = "plugin")]
|
#[cfg(feature = "plugin")]
|
||||||
const PLUGIN_FILE: &str = "plugin.nu";
|
const PLUGIN_FILE: &str = "plugin.nu";
|
||||||
|
|
||||||
|
const HISTORY_FILE_TXT: &str = "history.txt";
|
||||||
|
const HISTORY_FILE_SQLITE: &str = "history.sqlite3";
|
||||||
|
|
||||||
#[cfg(feature = "plugin")]
|
#[cfg(feature = "plugin")]
|
||||||
pub fn read_plugin_file(
|
pub fn read_plugin_file(
|
||||||
engine_state: &mut EngineState,
|
engine_state: &mut EngineState,
|
||||||
@ -69,7 +72,9 @@ pub fn eval_config_contents(
|
|||||||
// Merge the delta in case env vars changed in the config
|
// Merge the delta in case env vars changed in the config
|
||||||
match nu_engine::env::current_dir(engine_state, stack) {
|
match nu_engine::env::current_dir(engine_state, stack) {
|
||||||
Ok(cwd) => {
|
Ok(cwd) => {
|
||||||
if let Err(e) = engine_state.merge_delta(StateDelta::new(), Some(stack), cwd) {
|
if let Err(e) =
|
||||||
|
engine_state.merge_delta(StateDelta::new(engine_state), Some(stack), cwd)
|
||||||
|
{
|
||||||
let working_set = StateWorkingSet::new(engine_state);
|
let working_set = StateWorkingSet::new(engine_state);
|
||||||
report_error(&working_set, &e);
|
report_error(&working_set, &e);
|
||||||
}
|
}
|
||||||
@ -82,3 +87,14 @@ pub fn eval_config_contents(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub(crate) fn get_history_path(storage_path: &str, mode: HistoryFileFormat) -> Option<PathBuf> {
|
||||||
|
nu_path::config_dir().map(|mut history_path| {
|
||||||
|
history_path.push(storage_path);
|
||||||
|
history_path.push(match mode {
|
||||||
|
HistoryFileFormat::PlainText => HISTORY_FILE_TXT,
|
||||||
|
HistoryFileFormat::Sqlite => HISTORY_FILE_SQLITE,
|
||||||
|
});
|
||||||
|
history_path
|
||||||
|
})
|
||||||
|
}
|
||||||
|
@ -4,12 +4,13 @@ use log::trace;
|
|||||||
use miette::{IntoDiagnostic, Result};
|
use miette::{IntoDiagnostic, Result};
|
||||||
use nu_engine::convert_env_values;
|
use nu_engine::convert_env_values;
|
||||||
use nu_parser::parse;
|
use nu_parser::parse;
|
||||||
|
use nu_protocol::Type;
|
||||||
use nu_protocol::{
|
use nu_protocol::{
|
||||||
ast::Call,
|
ast::Call,
|
||||||
engine::{EngineState, Stack, StateWorkingSet},
|
engine::{EngineState, Stack, StateWorkingSet},
|
||||||
Config, PipelineData, Span, Value,
|
Config, PipelineData, Span, Value,
|
||||||
};
|
};
|
||||||
use std::io::Write;
|
use nu_utils::stdout_write_all_and_flush;
|
||||||
|
|
||||||
/// Main function used when a file path is found as argument for nu
|
/// Main function used when a file path is found as argument for nu
|
||||||
pub fn evaluate_file(
|
pub fn evaluate_file(
|
||||||
@ -34,7 +35,7 @@ pub fn evaluate_file(
|
|||||||
|
|
||||||
let _ = parse(&mut working_set, Some(&path), &file, false, &[]);
|
let _ = parse(&mut working_set, Some(&path), &file, false, &[]);
|
||||||
|
|
||||||
if working_set.find_decl(b"main").is_some() {
|
if working_set.find_decl(b"main", &Type::Any).is_some() {
|
||||||
let args = format!("main {}", args.join(" "));
|
let args = format!("main {}", args.join(" "));
|
||||||
|
|
||||||
if !eval_source(
|
if !eval_source(
|
||||||
@ -61,17 +62,20 @@ pub fn evaluate_file(
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn print_table_or_error(
|
pub fn print_table_or_error(
|
||||||
engine_state: &EngineState,
|
engine_state: &mut EngineState,
|
||||||
stack: &mut Stack,
|
stack: &mut Stack,
|
||||||
mut pipeline_data: PipelineData,
|
mut pipeline_data: PipelineData,
|
||||||
config: &Config,
|
config: &mut Config,
|
||||||
) {
|
) {
|
||||||
let exit_code = match &mut pipeline_data {
|
let exit_code = match &mut pipeline_data {
|
||||||
PipelineData::ExternalStream { exit_code, .. } => exit_code.take(),
|
PipelineData::ExternalStream { exit_code, .. } => exit_code.take(),
|
||||||
_ => None,
|
_ => None,
|
||||||
};
|
};
|
||||||
|
|
||||||
match engine_state.find_decl("table".as_bytes()) {
|
// Change the engine_state config to use the passed in configuration
|
||||||
|
engine_state.set_config(config);
|
||||||
|
|
||||||
|
match engine_state.find_decl("table".as_bytes(), &[]) {
|
||||||
Some(decl_id) => {
|
Some(decl_id) => {
|
||||||
let table = engine_state.get_decl(decl_id).run(
|
let table = engine_state.get_decl(decl_id).run(
|
||||||
engine_state,
|
engine_state,
|
||||||
@ -83,8 +87,6 @@ pub fn print_table_or_error(
|
|||||||
match table {
|
match table {
|
||||||
Ok(table) => {
|
Ok(table) => {
|
||||||
for item in table {
|
for item in table {
|
||||||
let stdout = std::io::stdout();
|
|
||||||
|
|
||||||
if let Value::Error { error } = item {
|
if let Value::Error { error } = item {
|
||||||
let working_set = StateWorkingSet::new(engine_state);
|
let working_set = StateWorkingSet::new(engine_state);
|
||||||
|
|
||||||
@ -96,10 +98,7 @@ pub fn print_table_or_error(
|
|||||||
let mut out = item.into_string("\n", config);
|
let mut out = item.into_string("\n", config);
|
||||||
out.push('\n');
|
out.push('\n');
|
||||||
|
|
||||||
match stdout.lock().write_all(out.as_bytes()) {
|
let _ = stdout_write_all_and_flush(out).map_err(|err| eprintln!("{}", err));
|
||||||
Ok(_) => (),
|
|
||||||
Err(err) => eprintln!("{}", err),
|
|
||||||
};
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Err(error) => {
|
Err(error) => {
|
||||||
@ -113,8 +112,6 @@ pub fn print_table_or_error(
|
|||||||
}
|
}
|
||||||
None => {
|
None => {
|
||||||
for item in pipeline_data {
|
for item in pipeline_data {
|
||||||
let stdout = std::io::stdout();
|
|
||||||
|
|
||||||
if let Value::Error { error } = item {
|
if let Value::Error { error } = item {
|
||||||
let working_set = StateWorkingSet::new(engine_state);
|
let working_set = StateWorkingSet::new(engine_state);
|
||||||
|
|
||||||
@ -126,10 +123,7 @@ pub fn print_table_or_error(
|
|||||||
let mut out = item.into_string("\n", config);
|
let mut out = item.into_string("\n", config);
|
||||||
out.push('\n');
|
out.push('\n');
|
||||||
|
|
||||||
match stdout.lock().write_all(out.as_bytes()) {
|
let _ = stdout_write_all_and_flush(out).map_err(|err| eprintln!("{}", err));
|
||||||
Ok(_) => (),
|
|
||||||
Err(err) => eprintln!("{}", err),
|
|
||||||
};
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
@ -586,7 +586,7 @@ impl Menu for DescriptionMenu {
|
|||||||
} else {
|
} else {
|
||||||
self.example_index = Some(self.examples.len().saturating_sub(1));
|
self.example_index = Some(self.examples.len().saturating_sub(1));
|
||||||
}
|
}
|
||||||
} else {
|
} else if !self.examples.is_empty() {
|
||||||
self.example_index = Some(0);
|
self.example_index = Some(0);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -598,7 +598,7 @@ impl Menu for DescriptionMenu {
|
|||||||
} else {
|
} else {
|
||||||
self.example_index = Some(0);
|
self.example_index = Some(0);
|
||||||
}
|
}
|
||||||
} else {
|
} else if !self.examples.is_empty() {
|
||||||
self.example_index = Some(0);
|
self.example_index = Some(0);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
use nu_engine::documentation::get_flags_section;
|
use nu_engine::documentation::get_flags_section;
|
||||||
use nu_protocol::{engine::EngineState, levenshtein_distance};
|
use nu_protocol::{engine::EngineState, levenshtein_distance};
|
||||||
use reedline::{Completer, Suggestion};
|
use reedline::{Completer, Suggestion};
|
||||||
|
use std::fmt::Write;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
pub struct NuHelpCompleter(Arc<EngineState>);
|
pub struct NuHelpCompleter(Arc<EngineState>);
|
||||||
@ -19,6 +20,10 @@ impl NuHelpCompleter {
|
|||||||
.filter(|(sig, _, _, _)| {
|
.filter(|(sig, _, _, _)| {
|
||||||
sig.name.to_lowercase().contains(&line.to_lowercase())
|
sig.name.to_lowercase().contains(&line.to_lowercase())
|
||||||
|| sig.usage.to_lowercase().contains(&line.to_lowercase())
|
|| sig.usage.to_lowercase().contains(&line.to_lowercase())
|
||||||
|
|| sig
|
||||||
|
.search_terms
|
||||||
|
.iter()
|
||||||
|
.any(|term| term.to_lowercase().contains(&line.to_lowercase()))
|
||||||
|| sig
|
|| sig
|
||||||
.extra_usage
|
.extra_usage
|
||||||
.to_lowercase()
|
.to_lowercase()
|
||||||
@ -49,7 +54,7 @@ impl NuHelpCompleter {
|
|||||||
long_desc.push_str("\r\n\r\n");
|
long_desc.push_str("\r\n\r\n");
|
||||||
}
|
}
|
||||||
|
|
||||||
long_desc.push_str(&format!("Usage:\r\n > {}\r\n", sig.call_signature()));
|
let _ = write!(long_desc, "Usage:\r\n > {}\r\n", sig.call_signature());
|
||||||
|
|
||||||
if !sig.named.is_empty() {
|
if !sig.named.is_empty() {
|
||||||
long_desc.push_str(&get_flags_section(sig))
|
long_desc.push_str(&get_flags_section(sig))
|
||||||
@ -61,27 +66,28 @@ impl NuHelpCompleter {
|
|||||||
{
|
{
|
||||||
long_desc.push_str("\r\nParameters:\r\n");
|
long_desc.push_str("\r\nParameters:\r\n");
|
||||||
for positional in &sig.required_positional {
|
for positional in &sig.required_positional {
|
||||||
long_desc
|
let _ = write!(long_desc, " {}: {}\r\n", positional.name, positional.desc);
|
||||||
.push_str(&format!(" {}: {}\r\n", positional.name, positional.desc));
|
|
||||||
}
|
}
|
||||||
for positional in &sig.optional_positional {
|
for positional in &sig.optional_positional {
|
||||||
long_desc.push_str(&format!(
|
let _ = write!(
|
||||||
|
long_desc,
|
||||||
" (optional) {}: {}\r\n",
|
" (optional) {}: {}\r\n",
|
||||||
positional.name, positional.desc
|
positional.name, positional.desc
|
||||||
));
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(rest_positional) = &sig.rest_positional {
|
if let Some(rest_positional) = &sig.rest_positional {
|
||||||
long_desc.push_str(&format!(
|
let _ = write!(
|
||||||
|
long_desc,
|
||||||
" ...{}: {}\r\n",
|
" ...{}: {}\r\n",
|
||||||
rest_positional.name, rest_positional.desc
|
rest_positional.name, rest_positional.desc
|
||||||
));
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let extra: Vec<String> = examples
|
let extra: Vec<String> = examples
|
||||||
.iter()
|
.iter()
|
||||||
.map(|example| example.example.to_string())
|
.map(|example| example.example.replace('\n', "\r\n"))
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
Suggestion {
|
Suggestion {
|
||||||
|
@ -19,6 +19,10 @@ impl Command for NuHighlight {
|
|||||||
"Syntax highlight the input string."
|
"Syntax highlight the input string."
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn search_terms(&self) -> Vec<&str> {
|
||||||
|
vec!["syntax", "color", "convert"]
|
||||||
|
}
|
||||||
|
|
||||||
fn run(
|
fn run(
|
||||||
&self,
|
&self,
|
||||||
engine_state: &EngineState,
|
engine_state: &EngineState,
|
||||||
|
@ -16,6 +16,11 @@ impl Command for Print {
|
|||||||
fn signature(&self) -> Signature {
|
fn signature(&self) -> Signature {
|
||||||
Signature::build("print")
|
Signature::build("print")
|
||||||
.rest("rest", SyntaxShape::Any, "the values to print")
|
.rest("rest", SyntaxShape::Any, "the values to print")
|
||||||
|
.switch(
|
||||||
|
"no-newline",
|
||||||
|
"print without inserting a newline for the line ending",
|
||||||
|
Some('n'),
|
||||||
|
)
|
||||||
.category(Category::Strings)
|
.category(Category::Strings)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -23,6 +28,10 @@ impl Command for Print {
|
|||||||
"Prints the values given"
|
"Prints the values given"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn search_terms(&self) -> Vec<&str> {
|
||||||
|
vec!["display"]
|
||||||
|
}
|
||||||
|
|
||||||
fn run(
|
fn run(
|
||||||
&self,
|
&self,
|
||||||
engine_state: &EngineState,
|
engine_state: &EngineState,
|
||||||
@ -31,10 +40,12 @@ impl Command for Print {
|
|||||||
_input: PipelineData,
|
_input: PipelineData,
|
||||||
) -> Result<PipelineData, ShellError> {
|
) -> Result<PipelineData, ShellError> {
|
||||||
let args: Vec<Value> = call.rest(engine_state, stack, 0)?;
|
let args: Vec<Value> = call.rest(engine_state, stack, 0)?;
|
||||||
|
let no_newline = call.has_flag("no-newline");
|
||||||
let head = call.head;
|
let head = call.head;
|
||||||
|
|
||||||
for arg in args {
|
for arg in args {
|
||||||
arg.into_pipeline_data().print(engine_state, stack)?;
|
arg.into_pipeline_data()
|
||||||
|
.print(engine_state, stack, no_newline)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(PipelineData::new(head))
|
Ok(PipelineData::new(head))
|
||||||
|
@ -7,9 +7,6 @@ use {
|
|||||||
std::borrow::Cow,
|
std::borrow::Cow,
|
||||||
};
|
};
|
||||||
|
|
||||||
const PROMPT_MARKER_BEFORE_PS1: &str = "\x1b]133;A\x1b\\"; // OSC 133;A ST
|
|
||||||
const PROMPT_MARKER_BEFORE_PS2: &str = "\x1b]133;A;k=s\x1b\\"; // OSC 133;A;k=s ST
|
|
||||||
|
|
||||||
/// Nushell prompt definition
|
/// Nushell prompt definition
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct NushellPrompt {
|
pub struct NushellPrompt {
|
||||||
@ -19,7 +16,6 @@ pub struct NushellPrompt {
|
|||||||
default_vi_insert_prompt_indicator: Option<String>,
|
default_vi_insert_prompt_indicator: Option<String>,
|
||||||
default_vi_normal_prompt_indicator: Option<String>,
|
default_vi_normal_prompt_indicator: Option<String>,
|
||||||
default_multiline_indicator: Option<String>,
|
default_multiline_indicator: Option<String>,
|
||||||
shell_integration: bool,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Default for NushellPrompt {
|
impl Default for NushellPrompt {
|
||||||
@ -37,7 +33,6 @@ impl NushellPrompt {
|
|||||||
default_vi_insert_prompt_indicator: None,
|
default_vi_insert_prompt_indicator: None,
|
||||||
default_vi_normal_prompt_indicator: None,
|
default_vi_normal_prompt_indicator: None,
|
||||||
default_multiline_indicator: None,
|
default_multiline_indicator: None,
|
||||||
shell_integration: false,
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -87,34 +82,20 @@ impl NushellPrompt {
|
|||||||
fn default_wrapped_custom_string(&self, str: String) -> String {
|
fn default_wrapped_custom_string(&self, str: String) -> String {
|
||||||
format!("({})", str)
|
format!("({})", str)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn enable_shell_integration(&mut self) {
|
|
||||||
self.shell_integration = true
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Prompt for NushellPrompt {
|
impl Prompt for NushellPrompt {
|
||||||
fn render_prompt_left(&self) -> Cow<str> {
|
fn render_prompt_left(&self) -> Cow<str> {
|
||||||
// Just before starting to draw the PS1 prompt send the escape code (see
|
if let Some(prompt_string) = &self.left_prompt_string {
|
||||||
// https://sw.kovidgoyal.net/kitty/shell-integration/#notes-for-shell-developers)
|
prompt_string.replace('\n', "\r\n").into()
|
||||||
let mut prompt = if self.shell_integration {
|
|
||||||
String::from(PROMPT_MARKER_BEFORE_PS1)
|
|
||||||
} else {
|
} else {
|
||||||
String::new()
|
let default = DefaultPrompt::new();
|
||||||
};
|
default
|
||||||
|
.render_prompt_left()
|
||||||
prompt.push_str(&match &self.left_prompt_string {
|
.to_string()
|
||||||
Some(prompt_string) => prompt_string.replace('\n', "\r\n"),
|
.replace('\n', "\r\n")
|
||||||
None => {
|
.into()
|
||||||
let default = DefaultPrompt::new();
|
}
|
||||||
default
|
|
||||||
.render_prompt_left()
|
|
||||||
.to_string()
|
|
||||||
.replace('\n', "\r\n")
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
prompt.into()
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn render_prompt_right(&self) -> Cow<str> {
|
fn render_prompt_right(&self) -> Cow<str> {
|
||||||
@ -155,21 +136,10 @@ impl Prompt for NushellPrompt {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn render_prompt_multiline_indicator(&self) -> Cow<str> {
|
fn render_prompt_multiline_indicator(&self) -> Cow<str> {
|
||||||
// Just before starting to draw the PS1 prompt send the escape code (see
|
match &self.default_multiline_indicator {
|
||||||
// https://sw.kovidgoyal.net/kitty/shell-integration/#notes-for-shell-developers)
|
Some(indicator) => indicator.as_str().into(),
|
||||||
let mut prompt = if self.shell_integration {
|
None => "::: ".into(),
|
||||||
String::from(PROMPT_MARKER_BEFORE_PS2)
|
}
|
||||||
} else {
|
|
||||||
String::new()
|
|
||||||
};
|
|
||||||
|
|
||||||
prompt.push_str(
|
|
||||||
self.default_multiline_indicator
|
|
||||||
.as_ref()
|
|
||||||
.unwrap_or(&String::from("::: ")),
|
|
||||||
);
|
|
||||||
|
|
||||||
prompt.into()
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn render_prompt_history_search_indicator(
|
fn render_prompt_history_search_indicator(
|
||||||
|
@ -147,10 +147,6 @@ pub(crate) fn update_prompt<'prompt>(
|
|||||||
(prompt_vi_insert_string, prompt_vi_normal_string),
|
(prompt_vi_insert_string, prompt_vi_normal_string),
|
||||||
);
|
);
|
||||||
|
|
||||||
if config.shell_integration {
|
|
||||||
nu_prompt.enable_shell_integration();
|
|
||||||
}
|
|
||||||
|
|
||||||
let ret_val = nu_prompt as &dyn Prompt;
|
let ret_val = nu_prompt as &dyn Prompt;
|
||||||
if is_perf_true {
|
if is_perf_true {
|
||||||
info!("update_prompt {}:{}:{}", file!(), line!(), column!());
|
info!("update_prompt {}:{}:{}", file!(), line!(), column!());
|
||||||
|
@ -24,7 +24,7 @@ const DEFAULT_COMPLETION_MENU: &str = r#"
|
|||||||
type: {
|
type: {
|
||||||
layout: columnar
|
layout: columnar
|
||||||
columns: 4
|
columns: 4
|
||||||
col_width: 20
|
col_width: 20
|
||||||
col_padding: 2
|
col_padding: 2
|
||||||
}
|
}
|
||||||
style: {
|
style: {
|
||||||
@ -58,7 +58,7 @@ const DEFAULT_HELP_MENU: &str = r#"
|
|||||||
type: {
|
type: {
|
||||||
layout: description
|
layout: description
|
||||||
columns: 4
|
columns: 4
|
||||||
col_width: 20
|
col_width: 20
|
||||||
col_padding: 2
|
col_padding: 2
|
||||||
selection_rows: 4
|
selection_rows: 4
|
||||||
description_rows: 10
|
description_rows: 10
|
||||||
@ -501,14 +501,16 @@ fn add_menu_keybindings(keybindings: &mut Keybindings) {
|
|||||||
ReedlineEvent::MenuPrevious,
|
ReedlineEvent::MenuPrevious,
|
||||||
);
|
);
|
||||||
|
|
||||||
// History menu keybinding
|
keybindings.add_binding(
|
||||||
|
KeyModifiers::CONTROL,
|
||||||
|
KeyCode::Char('r'),
|
||||||
|
ReedlineEvent::Menu("history_menu".to_string()),
|
||||||
|
);
|
||||||
|
|
||||||
keybindings.add_binding(
|
keybindings.add_binding(
|
||||||
KeyModifiers::CONTROL,
|
KeyModifiers::CONTROL,
|
||||||
KeyCode::Char('x'),
|
KeyCode::Char('x'),
|
||||||
ReedlineEvent::UntilFound(vec![
|
ReedlineEvent::MenuPageNext,
|
||||||
ReedlineEvent::Menu("history_menu".to_string()),
|
|
||||||
ReedlineEvent::MenuPageNext,
|
|
||||||
]),
|
|
||||||
);
|
);
|
||||||
|
|
||||||
keybindings.add_binding(
|
keybindings.add_binding(
|
||||||
@ -522,8 +524,8 @@ fn add_menu_keybindings(keybindings: &mut Keybindings) {
|
|||||||
|
|
||||||
// Help menu keybinding
|
// Help menu keybinding
|
||||||
keybindings.add_binding(
|
keybindings.add_binding(
|
||||||
KeyModifiers::CONTROL,
|
KeyModifiers::NONE,
|
||||||
KeyCode::Char('q'),
|
KeyCode::F(1),
|
||||||
ReedlineEvent::Menu("help_menu".to_string()),
|
ReedlineEvent::Menu("help_menu".to_string()),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
@ -1,34 +1,33 @@
|
|||||||
use crate::reedline_config::add_menus;
|
|
||||||
use crate::{completions::NuCompleter, NuHighlighter, NuValidator, NushellPrompt};
|
|
||||||
use crate::{prompt_update, reedline_config};
|
|
||||||
use crate::{
|
use crate::{
|
||||||
reedline_config::KeybindingsMode,
|
completions::NuCompleter,
|
||||||
|
prompt_update,
|
||||||
|
reedline_config::{add_menus, create_keybindings, KeybindingsMode},
|
||||||
util::{eval_source, report_error},
|
util::{eval_source, report_error},
|
||||||
|
NuHighlighter, NuValidator, NushellPrompt,
|
||||||
};
|
};
|
||||||
use log::info;
|
use log::{info, trace};
|
||||||
use log::trace;
|
|
||||||
use miette::{IntoDiagnostic, Result};
|
use miette::{IntoDiagnostic, Result};
|
||||||
use nu_color_config::get_color_config;
|
use nu_color_config::get_color_config;
|
||||||
use nu_engine::convert_env_values;
|
use nu_engine::{convert_env_values, eval_block};
|
||||||
use nu_parser::lex;
|
use nu_parser::lex;
|
||||||
use nu_protocol::engine::Stack;
|
|
||||||
use nu_protocol::PipelineData;
|
|
||||||
use nu_protocol::{
|
use nu_protocol::{
|
||||||
engine::{EngineState, StateWorkingSet},
|
engine::{EngineState, Stack, StateWorkingSet},
|
||||||
ShellError, Span, Value,
|
BlockId, HistoryFileFormat, PipelineData, PositionalArg, ShellError, Span, Value,
|
||||||
};
|
};
|
||||||
use reedline::{DefaultHinter, Emacs, Vi};
|
use reedline::{DefaultHinter, Emacs, SqliteBackedHistory, Vi};
|
||||||
use std::io::{self, Write};
|
use std::io::{self, Write};
|
||||||
use std::path::PathBuf;
|
|
||||||
use std::{sync::atomic::Ordering, time::Instant};
|
use std::{sync::atomic::Ordering, time::Instant};
|
||||||
|
use sysinfo::SystemExt;
|
||||||
|
|
||||||
const PROMPT_MARKER_BEFORE_CMD: &str = "\x1b]133;C\x1b\\"; // OSC 133;C ST
|
const PRE_PROMPT_MARKER: &str = "\x1b]133;A\x1b\\";
|
||||||
|
const PRE_EXECUTE_MARKER: &str = "\x1b]133;C\x1b\\";
|
||||||
|
const CMD_FINISHED_MARKER: &str = "\x1b]133;D\x1b\\";
|
||||||
const RESET_APPLICATION_MODE: &str = "\x1b[?1l";
|
const RESET_APPLICATION_MODE: &str = "\x1b[?1l";
|
||||||
|
|
||||||
pub fn evaluate_repl(
|
pub fn evaluate_repl(
|
||||||
engine_state: &mut EngineState,
|
engine_state: &mut EngineState,
|
||||||
stack: &mut Stack,
|
stack: &mut Stack,
|
||||||
history_path: Option<PathBuf>,
|
nushell_path: &str,
|
||||||
is_perf_true: bool,
|
is_perf_true: bool,
|
||||||
) -> Result<()> {
|
) -> Result<()> {
|
||||||
use reedline::{FileBackedHistory, Reedline, Signal};
|
use reedline::{FileBackedHistory, Reedline, Signal};
|
||||||
@ -86,20 +85,32 @@ pub fn evaluate_repl(
|
|||||||
info!("setup reedline {}:{}:{}", file!(), line!(), column!());
|
info!("setup reedline {}:{}:{}", file!(), line!(), column!());
|
||||||
}
|
}
|
||||||
let mut line_editor = Reedline::create();
|
let mut line_editor = Reedline::create();
|
||||||
|
let history_path = crate::config_files::get_history_path(
|
||||||
|
nushell_path,
|
||||||
|
engine_state.config.history_file_format,
|
||||||
|
);
|
||||||
if let Some(history_path) = history_path.as_deref() {
|
if let Some(history_path) = history_path.as_deref() {
|
||||||
if is_perf_true {
|
if is_perf_true {
|
||||||
info!("setup history {}:{}:{}", file!(), line!(), column!());
|
info!("setup history {}:{}:{}", file!(), line!(), column!());
|
||||||
}
|
}
|
||||||
let history = Box::new(
|
|
||||||
FileBackedHistory::with_file(
|
let history: Box<dyn reedline::History> = match engine_state.config.history_file_format {
|
||||||
config.max_history_size as usize,
|
HistoryFileFormat::PlainText => Box::new(
|
||||||
history_path.to_path_buf(),
|
FileBackedHistory::with_file(
|
||||||
)
|
config.max_history_size as usize,
|
||||||
.into_diagnostic()?,
|
history_path.to_path_buf(),
|
||||||
);
|
)
|
||||||
|
.into_diagnostic()?,
|
||||||
|
),
|
||||||
|
HistoryFileFormat::Sqlite => Box::new(
|
||||||
|
SqliteBackedHistory::with_file(history_path.to_path_buf()).into_diagnostic()?,
|
||||||
|
),
|
||||||
|
};
|
||||||
line_editor = line_editor.with_history(history);
|
line_editor = line_editor.with_history(history);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
let sys = sysinfo::System::new();
|
||||||
|
|
||||||
loop {
|
loop {
|
||||||
if is_perf_true {
|
if is_perf_true {
|
||||||
info!(
|
info!(
|
||||||
@ -114,6 +125,10 @@ pub fn evaluate_repl(
|
|||||||
if let Some(ctrlc) = &mut engine_state.ctrlc {
|
if let Some(ctrlc) = &mut engine_state.ctrlc {
|
||||||
ctrlc.store(false, Ordering::SeqCst);
|
ctrlc.store(false, Ordering::SeqCst);
|
||||||
}
|
}
|
||||||
|
// Reset the SIGQUIT handler
|
||||||
|
if let Some(sig_quit) = engine_state.get_sig_quit() {
|
||||||
|
sig_quit.store(false, Ordering::SeqCst);
|
||||||
|
}
|
||||||
|
|
||||||
config = engine_state.get_config();
|
config = engine_state.get_config();
|
||||||
|
|
||||||
@ -161,7 +176,26 @@ pub fn evaluate_repl(
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
line_editor = line_editor.with_buffer_editor(config.buffer_editor.clone(), "nu".into());
|
let buffer_editor = if !config.buffer_editor.is_empty() {
|
||||||
|
Some(config.buffer_editor.clone())
|
||||||
|
} else {
|
||||||
|
stack
|
||||||
|
.get_env_var(engine_state, "EDITOR")
|
||||||
|
.map(|v| v.as_string().unwrap_or_default())
|
||||||
|
.filter(|v| !v.is_empty())
|
||||||
|
.or_else(|| {
|
||||||
|
stack
|
||||||
|
.get_env_var(engine_state, "VISUAL")
|
||||||
|
.map(|v| v.as_string().unwrap_or_default())
|
||||||
|
.filter(|v| !v.is_empty())
|
||||||
|
})
|
||||||
|
};
|
||||||
|
|
||||||
|
line_editor = if let Some(buffer_editor) = buffer_editor {
|
||||||
|
line_editor.with_buffer_editor(buffer_editor, "nu".into())
|
||||||
|
} else {
|
||||||
|
line_editor
|
||||||
|
};
|
||||||
|
|
||||||
if config.sync_history_on_enter {
|
if config.sync_history_on_enter {
|
||||||
if is_perf_true {
|
if is_perf_true {
|
||||||
@ -175,7 +209,7 @@ pub fn evaluate_repl(
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Changing the line editor based on the found keybindings
|
// Changing the line editor based on the found keybindings
|
||||||
line_editor = match reedline_config::create_keybindings(config) {
|
line_editor = match create_keybindings(config) {
|
||||||
Ok(keybindings) => match keybindings {
|
Ok(keybindings) => match keybindings {
|
||||||
KeybindingsMode::Emacs(keybindings) => {
|
KeybindingsMode::Emacs(keybindings) => {
|
||||||
let edit_mode = Box::new(Emacs::new(keybindings));
|
let edit_mode = Box::new(Emacs::new(keybindings));
|
||||||
@ -200,6 +234,66 @@ pub fn evaluate_repl(
|
|||||||
info!("prompt_update {}:{}:{}", file!(), line!(), column!());
|
info!("prompt_update {}:{}:{}", file!(), line!(), column!());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Right before we start our prompt and take input from the user,
|
||||||
|
// fire the "pre_prompt" hook
|
||||||
|
if let Some(hook) = &config.hooks.pre_prompt {
|
||||||
|
if let Err(err) = run_hook(engine_state, stack, vec![], hook) {
|
||||||
|
let working_set = StateWorkingSet::new(engine_state);
|
||||||
|
report_error(&working_set, &err);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Next, check all the environment variables they ask for
|
||||||
|
// fire the "env_change" hook
|
||||||
|
if let Some(hook) = config.hooks.env_change.clone() {
|
||||||
|
match hook {
|
||||||
|
Value::Record {
|
||||||
|
cols, vals: blocks, ..
|
||||||
|
} => {
|
||||||
|
for (idx, env_var) in cols.iter().enumerate() {
|
||||||
|
let before = engine_state
|
||||||
|
.previous_env_vars
|
||||||
|
.get(env_var)
|
||||||
|
.cloned()
|
||||||
|
.unwrap_or_default();
|
||||||
|
let after = stack.get_env_var(engine_state, env_var).unwrap_or_default();
|
||||||
|
if before != after {
|
||||||
|
if let Err(err) = run_hook(
|
||||||
|
engine_state,
|
||||||
|
stack,
|
||||||
|
vec![before, after.clone()],
|
||||||
|
&blocks[idx],
|
||||||
|
) {
|
||||||
|
let working_set = StateWorkingSet::new(engine_state);
|
||||||
|
report_error(&working_set, &err);
|
||||||
|
}
|
||||||
|
|
||||||
|
engine_state
|
||||||
|
.previous_env_vars
|
||||||
|
.insert(env_var.to_string(), after);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
x => {
|
||||||
|
let working_set = StateWorkingSet::new(engine_state);
|
||||||
|
report_error(
|
||||||
|
&working_set,
|
||||||
|
&ShellError::TypeMismatch(
|
||||||
|
"record for 'env_change' hook".to_string(),
|
||||||
|
x.span().unwrap_or_else(|_| Span::new(0, 0)),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
config = engine_state.get_config();
|
||||||
|
|
||||||
|
let shell_integration = config.shell_integration;
|
||||||
|
if shell_integration {
|
||||||
|
run_ansi_sequence(PRE_PROMPT_MARKER)?;
|
||||||
|
}
|
||||||
|
|
||||||
let prompt =
|
let prompt =
|
||||||
prompt_update::update_prompt(config, engine_state, stack, &mut nu_prompt, is_perf_true);
|
prompt_update::update_prompt(config, engine_state, stack, &mut nu_prompt, is_perf_true);
|
||||||
|
|
||||||
@ -215,10 +309,53 @@ pub fn evaluate_repl(
|
|||||||
}
|
}
|
||||||
|
|
||||||
let input = line_editor.read_line(prompt);
|
let input = line_editor.read_line(prompt);
|
||||||
let use_shell_integration = config.shell_integration;
|
|
||||||
|
|
||||||
match input {
|
match input {
|
||||||
Ok(Signal::Success(s)) => {
|
Ok(Signal::Success(s)) => {
|
||||||
|
let history_supports_meta =
|
||||||
|
matches!(config.history_file_format, HistoryFileFormat::Sqlite);
|
||||||
|
if history_supports_meta && !s.is_empty() {
|
||||||
|
line_editor
|
||||||
|
.update_last_command_context(&|mut c| {
|
||||||
|
c.start_timestamp = Some(chrono::Utc::now());
|
||||||
|
c.hostname = sys.host_name();
|
||||||
|
|
||||||
|
c.cwd = Some(StateWorkingSet::new(engine_state).get_cwd());
|
||||||
|
c
|
||||||
|
})
|
||||||
|
.into_diagnostic()?; // todo: don't stop repl if error here?
|
||||||
|
}
|
||||||
|
|
||||||
|
// Right before we start running the code the user gave us,
|
||||||
|
// fire the "pre_execution" hook
|
||||||
|
if let Some(hook) = &config.hooks.pre_execution {
|
||||||
|
if let Err(err) = run_hook(engine_state, stack, vec![], hook) {
|
||||||
|
let working_set = StateWorkingSet::new(engine_state);
|
||||||
|
report_error(&working_set, &err);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if shell_integration {
|
||||||
|
run_ansi_sequence(RESET_APPLICATION_MODE)?;
|
||||||
|
run_ansi_sequence(PRE_EXECUTE_MARKER)?;
|
||||||
|
if let Some(cwd) = stack.get_env_var(engine_state, "PWD") {
|
||||||
|
let path = cwd.as_string()?;
|
||||||
|
// Try to abbreviate string for windows title
|
||||||
|
let maybe_abbrev_path = if let Some(p) = nu_path::home_dir() {
|
||||||
|
path.replace(&p.as_path().display().to_string(), "~")
|
||||||
|
} else {
|
||||||
|
path
|
||||||
|
};
|
||||||
|
|
||||||
|
// Set window title too
|
||||||
|
// https://tldp.org/HOWTO/Xterm-Title-3.html
|
||||||
|
// ESC]0;stringBEL -- Set icon name and window title to string
|
||||||
|
// ESC]1;stringBEL -- Set icon name to string
|
||||||
|
// ESC]2;stringBEL -- Set window title to string
|
||||||
|
run_ansi_sequence(&format!("\x1b]2;{}\x07", maybe_abbrev_path))?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
let start_time = Instant::now();
|
let start_time = Instant::now();
|
||||||
let tokens = lex(s.as_bytes(), 0, &[], &[], false);
|
let tokens = lex(s.as_bytes(), 0, &[], &[], false);
|
||||||
// Check if this is a single call to a directory, if so auto-cd
|
// Check if this is a single call to a directory, if so auto-cd
|
||||||
@ -244,7 +381,6 @@ pub fn evaluate_repl(
|
|||||||
&ShellError::DirectoryNotFound(tokens.0[0].span, None),
|
&ShellError::DirectoryNotFound(tokens.0[0].span, None),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
let path = nu_path::canonicalize_with(path, &cwd)
|
let path = nu_path::canonicalize_with(path, &cwd)
|
||||||
.expect("internal error: cannot canonicalize known path");
|
.expect("internal error: cannot canonicalize known path");
|
||||||
(path.to_string_lossy().to_string(), tokens.0[0].span)
|
(path.to_string_lossy().to_string(), tokens.0[0].span)
|
||||||
@ -290,44 +426,53 @@ pub fn evaluate_repl(
|
|||||||
&format!("entry #{}", entry_num),
|
&format!("entry #{}", entry_num),
|
||||||
PipelineData::new(Span::new(0, 0)),
|
PipelineData::new(Span::new(0, 0)),
|
||||||
);
|
);
|
||||||
|
|
||||||
stack.add_env_var(
|
|
||||||
"CMD_DURATION_MS".into(),
|
|
||||||
Value::String {
|
|
||||||
val: format!("{}", start_time.elapsed().as_millis()),
|
|
||||||
span: Span { start: 0, end: 0 },
|
|
||||||
},
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
let cmd_duration = start_time.elapsed();
|
||||||
|
|
||||||
|
stack.add_env_var(
|
||||||
|
"CMD_DURATION_MS".into(),
|
||||||
|
Value::String {
|
||||||
|
val: format!("{}", cmd_duration.as_millis()),
|
||||||
|
span: Span { start: 0, end: 0 },
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
// FIXME: permanent state changes like this hopefully in time can be removed
|
// FIXME: permanent state changes like this hopefully in time can be removed
|
||||||
// and be replaced by just passing the cwd in where needed
|
// and be replaced by just passing the cwd in where needed
|
||||||
if let Some(cwd) = stack.get_env_var(engine_state, "PWD") {
|
if let Some(cwd) = stack.get_env_var(engine_state, "PWD") {
|
||||||
let path = cwd.as_string()?;
|
let path = cwd.as_string()?;
|
||||||
let _ = std::env::set_current_dir(path);
|
let _ = std::env::set_current_dir(path);
|
||||||
engine_state.env_vars.insert("PWD".into(), cwd);
|
engine_state.add_env_var("PWD".into(), cwd);
|
||||||
}
|
}
|
||||||
|
|
||||||
if use_shell_integration {
|
if history_supports_meta && !s.is_empty() {
|
||||||
// Just before running a command/program, send the escape code (see
|
line_editor
|
||||||
// https://sw.kovidgoyal.net/kitty/shell-integration/#notes-for-shell-developers)
|
.update_last_command_context(&|mut c| {
|
||||||
let mut ansi_escapes = String::from(PROMPT_MARKER_BEFORE_CMD);
|
c.duration = Some(cmd_duration);
|
||||||
ansi_escapes.push_str(RESET_APPLICATION_MODE);
|
c.exit_status = stack
|
||||||
if let Some(cwd) = stack.get_env_var(engine_state, "PWD") {
|
.get_env_var(engine_state, "LAST_EXIT_CODE")
|
||||||
let path = cwd.as_string()?;
|
.and_then(|e| e.as_i64().ok());
|
||||||
ansi_escapes.push_str(&format!("\x1b]2;{}\x07", path));
|
c
|
||||||
}
|
})
|
||||||
// print!("{}", ansi_escapes);
|
.into_diagnostic()?; // todo: don't stop repl if error here?
|
||||||
match io::stdout().write_all(ansi_escapes.as_bytes()) {
|
}
|
||||||
Ok(it) => it,
|
|
||||||
Err(err) => print!("error: {}", err),
|
if shell_integration {
|
||||||
};
|
// FIXME: use variant with exit code, if apropriate
|
||||||
|
run_ansi_sequence(CMD_FINISHED_MARKER)?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Ok(Signal::CtrlC) => {
|
Ok(Signal::CtrlC) => {
|
||||||
// `Reedline` clears the line content. New prompt is shown
|
// `Reedline` clears the line content. New prompt is shown
|
||||||
|
if shell_integration {
|
||||||
|
run_ansi_sequence(CMD_FINISHED_MARKER)?;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
Ok(Signal::CtrlD) => {
|
Ok(Signal::CtrlD) => {
|
||||||
// When exiting clear to a new line
|
// When exiting clear to a new line
|
||||||
|
if shell_integration {
|
||||||
|
run_ansi_sequence(CMD_FINISHED_MARKER)?;
|
||||||
|
}
|
||||||
println!();
|
println!();
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
@ -336,9 +481,96 @@ pub fn evaluate_repl(
|
|||||||
if !message.contains("duration") {
|
if !message.contains("duration") {
|
||||||
println!("Error: {:?}", err);
|
println!("Error: {:?}", err);
|
||||||
}
|
}
|
||||||
|
if shell_integration {
|
||||||
|
run_ansi_sequence(CMD_FINISHED_MARKER)?;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn run_ansi_sequence(seq: &str) -> Result<(), ShellError> {
|
||||||
|
match io::stdout().write_all(seq.as_bytes()) {
|
||||||
|
Ok(it) => it,
|
||||||
|
Err(err) => {
|
||||||
|
return Err(ShellError::GenericError(
|
||||||
|
"Error writing ansi sequence".into(),
|
||||||
|
err.to_string(),
|
||||||
|
Some(Span { start: 0, end: 0 }),
|
||||||
|
None,
|
||||||
|
Vec::new(),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
};
|
||||||
|
io::stdout().flush().map_err(|e| {
|
||||||
|
ShellError::GenericError(
|
||||||
|
"Error flushing stdio".into(),
|
||||||
|
e.to_string(),
|
||||||
|
Some(Span { start: 0, end: 0 }),
|
||||||
|
None,
|
||||||
|
Vec::new(),
|
||||||
|
)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn run_hook(
|
||||||
|
engine_state: &EngineState,
|
||||||
|
stack: &mut Stack,
|
||||||
|
arguments: Vec<Value>,
|
||||||
|
value: &Value,
|
||||||
|
) -> Result<(), ShellError> {
|
||||||
|
match value {
|
||||||
|
Value::List { vals, .. } => {
|
||||||
|
for val in vals {
|
||||||
|
run_hook(engine_state, stack, arguments.clone(), val)?
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
Value::Block {
|
||||||
|
val: block_id,
|
||||||
|
span,
|
||||||
|
..
|
||||||
|
} => run_hook_block(engine_state, stack, *block_id, arguments, *span),
|
||||||
|
x => match x.span() {
|
||||||
|
Ok(span) => Err(ShellError::MissingConfigValue(
|
||||||
|
"block for hook in config".into(),
|
||||||
|
span,
|
||||||
|
)),
|
||||||
|
_ => Err(ShellError::MissingConfigValue(
|
||||||
|
"block for hook in config".into(),
|
||||||
|
Span { start: 0, end: 0 },
|
||||||
|
)),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn run_hook_block(
|
||||||
|
engine_state: &EngineState,
|
||||||
|
stack: &mut Stack,
|
||||||
|
block_id: BlockId,
|
||||||
|
arguments: Vec<Value>,
|
||||||
|
span: Span,
|
||||||
|
) -> Result<(), ShellError> {
|
||||||
|
let block = engine_state.get_block(block_id);
|
||||||
|
let input = PipelineData::new(span);
|
||||||
|
|
||||||
|
let mut callee_stack = stack.gather_captures(&block.captures);
|
||||||
|
|
||||||
|
for (idx, PositionalArg { var_id, .. }) in
|
||||||
|
block.signature.required_positional.iter().enumerate()
|
||||||
|
{
|
||||||
|
if let Some(var_id) = var_id {
|
||||||
|
callee_stack.add_var(*var_id, arguments[idx].clone())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
match eval_block(engine_state, &mut callee_stack, block, input, false, false) {
|
||||||
|
Ok(pipeline_data) => match pipeline_data.into_value(span) {
|
||||||
|
Value::Error { error } => Err(error),
|
||||||
|
_ => Ok(()),
|
||||||
|
},
|
||||||
|
Err(err) => Err(err),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
@ -9,18 +9,35 @@ use nu_protocol::{
|
|||||||
};
|
};
|
||||||
#[cfg(windows)]
|
#[cfg(windows)]
|
||||||
use nu_utils::enable_vt_processing;
|
use nu_utils::enable_vt_processing;
|
||||||
use std::path::PathBuf;
|
use std::path::{Path, PathBuf};
|
||||||
|
|
||||||
// This will collect environment variables from std::env and adds them to a stack.
|
// This will collect environment variables from std::env and adds them to a stack.
|
||||||
//
|
//
|
||||||
// In order to ensure the values have spans, it first creates a dummy file, writes the collected
|
// In order to ensure the values have spans, it first creates a dummy file, writes the collected
|
||||||
// env vars into it (in a "NAME"="value" format, quite similar to the output of the Unix 'env'
|
// env vars into it (in a "NAME"="value" format, quite similar to the output of the Unix 'env'
|
||||||
// tool), then uses the file to get the spans. The file stays in memory, no filesystem IO is done.
|
// tool), then uses the file to get the spans. The file stays in memory, no filesystem IO is done.
|
||||||
pub fn gather_parent_env_vars(engine_state: &mut EngineState) {
|
//
|
||||||
gather_env_vars(std::env::vars(), engine_state);
|
// The "PWD" env value will be forced to `init_cwd`.
|
||||||
|
// The reason to use `init_cwd`:
|
||||||
|
//
|
||||||
|
// While gathering parent env vars, the parent `PWD` may not be the same as `current working directory`.
|
||||||
|
// Consider to the following command as the case (assume we execute command inside `/tmp`):
|
||||||
|
//
|
||||||
|
// tmux split-window -v -c "#{pane_current_path}"
|
||||||
|
//
|
||||||
|
// Here nu execute external command `tmux`, and tmux starts a new `nushell`, with `init_cwd` value "#{pane_current_path}".
|
||||||
|
// But at the same time `PWD` still remains to be `/tmp`.
|
||||||
|
//
|
||||||
|
// In this scenario, the new `nushell`'s PWD should be "#{pane_current_path}" rather init_cwd.
|
||||||
|
pub fn gather_parent_env_vars(engine_state: &mut EngineState, init_cwd: &Path) {
|
||||||
|
gather_env_vars(std::env::vars(), engine_state, init_cwd);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn gather_env_vars(vars: impl Iterator<Item = (String, String)>, engine_state: &mut EngineState) {
|
fn gather_env_vars(
|
||||||
|
vars: impl Iterator<Item = (String, String)>,
|
||||||
|
engine_state: &mut EngineState,
|
||||||
|
init_cwd: &Path,
|
||||||
|
) {
|
||||||
fn report_capture_error(engine_state: &EngineState, env_str: &str, msg: &str) {
|
fn report_capture_error(engine_state: &EngineState, env_str: &str, msg: &str) {
|
||||||
let working_set = StateWorkingSet::new(engine_state);
|
let working_set = StateWorkingSet::new(engine_state);
|
||||||
report_error(
|
report_error(
|
||||||
@ -43,35 +60,31 @@ fn gather_env_vars(vars: impl Iterator<Item = (String, String)>, engine_state: &
|
|||||||
}
|
}
|
||||||
|
|
||||||
let mut fake_env_file = String::new();
|
let mut fake_env_file = String::new();
|
||||||
let mut has_pwd = false;
|
|
||||||
|
|
||||||
// Write all the env vars into a fake file
|
// Write all the env vars into a fake file
|
||||||
for (name, val) in vars {
|
for (name, val) in vars {
|
||||||
if name == "PWD" {
|
|
||||||
has_pwd = true;
|
|
||||||
}
|
|
||||||
put_env_to_fake_file(&name, &val, &mut fake_env_file);
|
put_env_to_fake_file(&name, &val, &mut fake_env_file);
|
||||||
}
|
}
|
||||||
|
|
||||||
if !has_pwd {
|
match init_cwd.to_str() {
|
||||||
match std::env::current_dir() {
|
Some(cwd) => {
|
||||||
Ok(cwd) => {
|
put_env_to_fake_file("PWD", cwd, &mut fake_env_file);
|
||||||
put_env_to_fake_file("PWD", &cwd.to_string_lossy(), &mut fake_env_file);
|
}
|
||||||
}
|
None => {
|
||||||
Err(e) => {
|
// Could not capture current working directory
|
||||||
// Could not capture current working directory
|
let working_set = StateWorkingSet::new(engine_state);
|
||||||
let working_set = StateWorkingSet::new(engine_state);
|
report_error(
|
||||||
report_error(
|
&working_set,
|
||||||
&working_set,
|
&ShellError::GenericError(
|
||||||
&ShellError::GenericError(
|
"Current directory is not a valid utf-8 path".to_string(),
|
||||||
"Current directory not found".to_string(),
|
"".to_string(),
|
||||||
"".to_string(),
|
None,
|
||||||
None,
|
Some(format!(
|
||||||
Some(format!("Retrieving current directory failed: {:?}", e)),
|
"Retrieving current directory failed: {:?} not a valid utf-8 path",
|
||||||
Vec::new(),
|
init_cwd
|
||||||
),
|
)),
|
||||||
);
|
Vec::new(),
|
||||||
}
|
),
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -179,7 +192,7 @@ fn gather_env_vars(vars: impl Iterator<Item = (String, String)>, engine_state: &
|
|||||||
};
|
};
|
||||||
|
|
||||||
// stack.add_env_var(name, value);
|
// stack.add_env_var(name, value);
|
||||||
engine_state.env_vars.insert(name, value);
|
engine_state.add_env_var(name, value);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -211,8 +224,8 @@ pub fn eval_source(
|
|||||||
(output, working_set.render())
|
(output, working_set.render())
|
||||||
};
|
};
|
||||||
|
|
||||||
let cwd = match nu_engine::env::current_dir_str(engine_state, stack) {
|
let cwd = match nu_engine::env::current_dir(engine_state, stack) {
|
||||||
Ok(p) => PathBuf::from(p),
|
Ok(p) => p,
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
let working_set = StateWorkingSet::new(engine_state);
|
let working_set = StateWorkingSet::new(engine_state);
|
||||||
report_error(&working_set, &e);
|
report_error(&working_set, &e);
|
||||||
@ -220,10 +233,7 @@ pub fn eval_source(
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
if let Err(err) = engine_state.merge_delta(delta, Some(stack), &cwd) {
|
let _ = engine_state.merge_delta(delta, Some(stack), &cwd);
|
||||||
let working_set = StateWorkingSet::new(engine_state);
|
|
||||||
report_error(&working_set, &err);
|
|
||||||
}
|
|
||||||
|
|
||||||
match eval_block(engine_state, stack, &block, input, false, false) {
|
match eval_block(engine_state, stack, &block, input, false, false) {
|
||||||
Ok(mut pipeline_data) => {
|
Ok(mut pipeline_data) => {
|
||||||
@ -237,7 +247,7 @@ pub fn eval_source(
|
|||||||
set_last_exit_code(stack, 0);
|
set_last_exit_code(stack, 0);
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Err(err) = pipeline_data.print(engine_state, stack) {
|
if let Err(err) = pipeline_data.print(engine_state, stack, false) {
|
||||||
let working_set = StateWorkingSet::new(engine_state);
|
let working_set = StateWorkingSet::new(engine_state);
|
||||||
|
|
||||||
report_error(&working_set, &err);
|
report_error(&working_set, &err);
|
||||||
@ -317,14 +327,21 @@ mod test {
|
|||||||
]
|
]
|
||||||
.into_iter(),
|
.into_iter(),
|
||||||
&mut engine_state,
|
&mut engine_state,
|
||||||
|
Path::new("t"),
|
||||||
);
|
);
|
||||||
|
|
||||||
let env = engine_state.env_vars;
|
let env = engine_state.render_env_vars();
|
||||||
|
|
||||||
assert!(matches!(env.get("FOO"), Some(Value::String { val, .. }) if val == "foo"));
|
assert!(
|
||||||
assert!(matches!(env.get("SYMBOLS"), Some(Value::String { val, .. }) if val == symbols));
|
matches!(env.get(&"FOO".to_string()), Some(&Value::String { val, .. }) if val == "foo")
|
||||||
assert!(matches!(env.get(symbols), Some(Value::String { val, .. }) if val == "symbols"));
|
);
|
||||||
assert!(env.get("PWD").is_some());
|
assert!(
|
||||||
|
matches!(env.get(&"SYMBOLS".to_string()), Some(&Value::String { val, .. }) if val == symbols)
|
||||||
|
);
|
||||||
|
assert!(
|
||||||
|
matches!(env.get(&symbols.to_string()), Some(&Value::String { val, .. }) if val == "symbols")
|
||||||
|
);
|
||||||
|
assert!(env.get(&"PWD".to_string()).is_some());
|
||||||
assert_eq!(env.len(), 4);
|
assert_eq!(env.len(), 4);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
29
crates/nu-cli/tests/custom_completions.rs
Normal file
@ -0,0 +1,29 @@
|
|||||||
|
pub mod support;
|
||||||
|
|
||||||
|
use nu_cli::NuCompleter;
|
||||||
|
use reedline::Completer;
|
||||||
|
use support::{match_suggestions, new_engine};
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn variables_completions() {
|
||||||
|
// Create a new engine
|
||||||
|
let (dir, _, mut engine, mut stack) = new_engine();
|
||||||
|
|
||||||
|
// Add record value as example
|
||||||
|
let record = r#"def animals [] { ["cat", "dog", "eel" ] }
|
||||||
|
def my-command [animal: string@animals] { print $animal }"#;
|
||||||
|
assert!(support::merge_input(record.as_bytes(), &mut engine, &mut stack, dir).is_ok());
|
||||||
|
|
||||||
|
// Instatiate a new completer
|
||||||
|
let mut completer = NuCompleter::new(std::sync::Arc::new(engine), stack);
|
||||||
|
|
||||||
|
// Test completions for $nu
|
||||||
|
let suggestions = completer.complete("my-command ", 11);
|
||||||
|
|
||||||
|
assert_eq!(3, suggestions.len());
|
||||||
|
|
||||||
|
let expected: Vec<String> = vec!["cat".into(), "dog".into(), "eel".into()];
|
||||||
|
|
||||||
|
// Match results
|
||||||
|
match_suggestions(expected, suggestions);
|
||||||
|
}
|
28
crates/nu-cli/tests/dotnu_completions.rs
Normal file
@ -0,0 +1,28 @@
|
|||||||
|
pub mod support;
|
||||||
|
|
||||||
|
use nu_cli::NuCompleter;
|
||||||
|
use reedline::Completer;
|
||||||
|
use support::new_engine;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn dotnu_completions() {
|
||||||
|
// Create a new engine
|
||||||
|
let (_, _, engine, stack) = new_engine();
|
||||||
|
|
||||||
|
// Instatiate a new completer
|
||||||
|
let mut completer = NuCompleter::new(std::sync::Arc::new(engine), stack);
|
||||||
|
|
||||||
|
// Test source completion
|
||||||
|
let completion_str = "source ".to_string();
|
||||||
|
let suggestions = completer.complete(&completion_str, completion_str.len());
|
||||||
|
|
||||||
|
assert_eq!(1, suggestions.len());
|
||||||
|
assert_eq!("custom_completion.nu", suggestions.get(0).unwrap().value);
|
||||||
|
|
||||||
|
// Test use completion
|
||||||
|
let completion_str = "use ".to_string();
|
||||||
|
let suggestions = completer.complete(&completion_str, completion_str.len());
|
||||||
|
|
||||||
|
assert_eq!(1, suggestions.len());
|
||||||
|
assert_eq!("custom_completion.nu", suggestions.get(0).unwrap().value);
|
||||||
|
}
|
42
crates/nu-cli/tests/file_completions.rs
Normal file
@ -0,0 +1,42 @@
|
|||||||
|
pub mod support;
|
||||||
|
|
||||||
|
use nu_cli::NuCompleter;
|
||||||
|
use reedline::Completer;
|
||||||
|
use support::{file, folder, match_suggestions, new_engine};
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn file_completions() {
|
||||||
|
// Create a new engine
|
||||||
|
let (dir, dir_str, engine, stack) = new_engine();
|
||||||
|
|
||||||
|
// Instatiate a new completer
|
||||||
|
let mut completer = NuCompleter::new(std::sync::Arc::new(engine), stack);
|
||||||
|
|
||||||
|
// Test completions for the current folder
|
||||||
|
let target_dir = format!("cp {}", dir_str);
|
||||||
|
let suggestions = completer.complete(&target_dir, target_dir.len());
|
||||||
|
|
||||||
|
// Create the expected values
|
||||||
|
let expected_paths: Vec<String> = vec![
|
||||||
|
file(dir.join("nushell")),
|
||||||
|
folder(dir.join("test_a")),
|
||||||
|
folder(dir.join("test_b")),
|
||||||
|
folder(dir.join("another")),
|
||||||
|
file(dir.join("custom_completion.nu")),
|
||||||
|
file(dir.join(".hidden_file")),
|
||||||
|
folder(dir.join(".hidden_folder")),
|
||||||
|
];
|
||||||
|
|
||||||
|
// Match the results
|
||||||
|
match_suggestions(expected_paths, suggestions);
|
||||||
|
|
||||||
|
// Test completions for the completions/another folder
|
||||||
|
let target_dir = format!("cd {}", folder(dir.join("another")));
|
||||||
|
let suggestions = completer.complete(&target_dir, target_dir.len());
|
||||||
|
|
||||||
|
// Create the expected values
|
||||||
|
let expected_paths: Vec<String> = vec![file(dir.join("another").join("newfile"))];
|
||||||
|
|
||||||
|
// Match the results
|
||||||
|
match_suggestions(expected_paths, suggestions);
|
||||||
|
}
|
36
crates/nu-cli/tests/flag_completions.rs
Normal file
@ -0,0 +1,36 @@
|
|||||||
|
pub mod support;
|
||||||
|
|
||||||
|
use nu_cli::NuCompleter;
|
||||||
|
use reedline::Completer;
|
||||||
|
use support::{match_suggestions, new_engine};
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn flag_completions() {
|
||||||
|
// Create a new engine
|
||||||
|
let (_, _, engine, stack) = new_engine();
|
||||||
|
|
||||||
|
// Instatiate a new completer
|
||||||
|
let mut completer = NuCompleter::new(std::sync::Arc::new(engine), stack);
|
||||||
|
// Test completions for the 'ls' flags
|
||||||
|
let suggestions = completer.complete("ls -", 4);
|
||||||
|
|
||||||
|
assert_eq!(12, suggestions.len());
|
||||||
|
|
||||||
|
let expected: Vec<String> = vec![
|
||||||
|
"--all".into(),
|
||||||
|
"--du".into(),
|
||||||
|
"--full-paths".into(),
|
||||||
|
"--help".into(),
|
||||||
|
"--long".into(),
|
||||||
|
"--short-names".into(),
|
||||||
|
"-a".into(),
|
||||||
|
"-d".into(),
|
||||||
|
"-f".into(),
|
||||||
|
"-h".into(),
|
||||||
|
"-l".into(),
|
||||||
|
"-s".into(),
|
||||||
|
];
|
||||||
|
|
||||||
|
// Match results
|
||||||
|
match_suggestions(expected, suggestions);
|
||||||
|
}
|
29
crates/nu-cli/tests/folder_completions.rs
Normal file
@ -0,0 +1,29 @@
|
|||||||
|
pub mod support;
|
||||||
|
|
||||||
|
use nu_cli::NuCompleter;
|
||||||
|
use reedline::Completer;
|
||||||
|
use support::{folder, match_suggestions, new_engine};
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn folder_completions() {
|
||||||
|
// Create a new engine
|
||||||
|
let (dir, dir_str, engine, stack) = new_engine();
|
||||||
|
|
||||||
|
// Instatiate a new completer
|
||||||
|
let mut completer = NuCompleter::new(std::sync::Arc::new(engine), stack);
|
||||||
|
|
||||||
|
// Test completions for the current folder
|
||||||
|
let target_dir = format!("cd {}", dir_str);
|
||||||
|
let suggestions = completer.complete(&target_dir, target_dir.len());
|
||||||
|
|
||||||
|
// Create the expected values
|
||||||
|
let expected_paths: Vec<String> = vec![
|
||||||
|
folder(dir.join("test_a")),
|
||||||
|
folder(dir.join("test_b")),
|
||||||
|
folder(dir.join("another")),
|
||||||
|
folder(dir.join(".hidden_folder")),
|
||||||
|
];
|
||||||
|
|
||||||
|
// Match the results
|
||||||
|
match_suggestions(expected_paths, suggestions);
|
||||||
|
}
|
117
crates/nu-cli/tests/support/completions_helpers.rs
Normal file
@ -0,0 +1,117 @@
|
|||||||
|
use std::path::PathBuf;
|
||||||
|
|
||||||
|
use nu_command::create_default_context;
|
||||||
|
use nu_engine::eval_block;
|
||||||
|
use nu_parser::parse;
|
||||||
|
use nu_protocol::{
|
||||||
|
engine::{EngineState, Stack, StateDelta, StateWorkingSet},
|
||||||
|
PipelineData, ShellError, Span, Value,
|
||||||
|
};
|
||||||
|
use nu_test_support::fs;
|
||||||
|
use reedline::Suggestion;
|
||||||
|
const SEP: char = std::path::MAIN_SEPARATOR;
|
||||||
|
|
||||||
|
// creates a new engine with the current path into the completions fixtures folder
|
||||||
|
pub fn new_engine() -> (PathBuf, String, EngineState, Stack) {
|
||||||
|
// Target folder inside assets
|
||||||
|
let dir = fs::fixtures().join("completions");
|
||||||
|
let mut dir_str = dir
|
||||||
|
.clone()
|
||||||
|
.into_os_string()
|
||||||
|
.into_string()
|
||||||
|
.unwrap_or_default();
|
||||||
|
dir_str.push(SEP);
|
||||||
|
|
||||||
|
// Create a new engine with default context
|
||||||
|
let mut engine_state = create_default_context(&dir);
|
||||||
|
|
||||||
|
// New stack
|
||||||
|
let mut stack = Stack::new();
|
||||||
|
|
||||||
|
// New delta state
|
||||||
|
let delta = StateDelta::new(&engine_state);
|
||||||
|
|
||||||
|
// Add pwd as env var
|
||||||
|
stack.add_env_var(
|
||||||
|
"PWD".to_string(),
|
||||||
|
Value::String {
|
||||||
|
val: dir_str.clone(),
|
||||||
|
span: nu_protocol::Span {
|
||||||
|
start: 0,
|
||||||
|
end: dir_str.len(),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
);
|
||||||
|
stack.add_env_var(
|
||||||
|
"TEST".to_string(),
|
||||||
|
Value::String {
|
||||||
|
val: "NUSHELL".to_string(),
|
||||||
|
span: nu_protocol::Span {
|
||||||
|
start: 0,
|
||||||
|
end: dir_str.len(),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
// Merge delta
|
||||||
|
let merge_result = engine_state.merge_delta(delta, Some(&mut stack), &dir);
|
||||||
|
assert!(merge_result.is_ok());
|
||||||
|
|
||||||
|
(dir, dir_str, engine_state, stack)
|
||||||
|
}
|
||||||
|
|
||||||
|
// match a list of suggestions with the expected values
|
||||||
|
pub fn match_suggestions(expected: Vec<String>, suggestions: Vec<Suggestion>) {
|
||||||
|
expected.iter().zip(suggestions).for_each(|it| {
|
||||||
|
assert_eq!(it.0, &it.1.value);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// append the separator to the converted path
|
||||||
|
pub fn folder(path: PathBuf) -> String {
|
||||||
|
let mut converted_path = file(path);
|
||||||
|
converted_path.push(SEP);
|
||||||
|
|
||||||
|
converted_path
|
||||||
|
}
|
||||||
|
|
||||||
|
// convert a given path to string
|
||||||
|
pub fn file(path: PathBuf) -> String {
|
||||||
|
path.into_os_string().into_string().unwrap_or_default()
|
||||||
|
}
|
||||||
|
|
||||||
|
// merge_input executes the given input into the engine
|
||||||
|
// and merges the state
|
||||||
|
pub fn merge_input(
|
||||||
|
input: &[u8],
|
||||||
|
engine_state: &mut EngineState,
|
||||||
|
stack: &mut Stack,
|
||||||
|
dir: PathBuf,
|
||||||
|
) -> Result<(), ShellError> {
|
||||||
|
let (block, delta) = {
|
||||||
|
let mut working_set = StateWorkingSet::new(engine_state);
|
||||||
|
|
||||||
|
let (block, err) = parse(&mut working_set, None, input, false, &[]);
|
||||||
|
|
||||||
|
assert!(err.is_none());
|
||||||
|
|
||||||
|
(block, working_set.render())
|
||||||
|
};
|
||||||
|
assert!(eval_block(
|
||||||
|
engine_state,
|
||||||
|
stack,
|
||||||
|
&block,
|
||||||
|
PipelineData::Value(
|
||||||
|
Value::Nothing {
|
||||||
|
span: Span { start: 0, end: 0 },
|
||||||
|
},
|
||||||
|
None
|
||||||
|
),
|
||||||
|
false,
|
||||||
|
false
|
||||||
|
)
|
||||||
|
.is_ok());
|
||||||
|
|
||||||
|
// Merge delta
|
||||||
|
engine_state.merge_delta(delta, Some(stack), &dir)
|
||||||
|
}
|
3
crates/nu-cli/tests/support/mod.rs
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
pub mod completions_helpers;
|
||||||
|
|
||||||
|
pub use completions_helpers::{file, folder, match_suggestions, merge_input, new_engine};
|
@ -1,107 +0,0 @@
|
|||||||
use std::path::PathBuf;
|
|
||||||
|
|
||||||
use nu_cli::NuCompleter;
|
|
||||||
use nu_command::create_default_context;
|
|
||||||
use nu_protocol::engine::{EngineState, Stack};
|
|
||||||
use nu_test_support::fs;
|
|
||||||
use reedline::{Completer, Suggestion};
|
|
||||||
const SEP: char = std::path::MAIN_SEPARATOR;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn file_completions() {
|
|
||||||
// Create a new engine
|
|
||||||
let (dir, dir_str, engine) = new_engine();
|
|
||||||
|
|
||||||
let stack = Stack::new();
|
|
||||||
|
|
||||||
// Instatiate a new completer
|
|
||||||
let mut completer = NuCompleter::new(std::sync::Arc::new(engine), stack);
|
|
||||||
|
|
||||||
// Test completions for the current folder
|
|
||||||
let target_dir = format!("cp {}", dir_str);
|
|
||||||
let suggestions = completer.complete(&target_dir, target_dir.len());
|
|
||||||
|
|
||||||
// Create the expected values
|
|
||||||
let expected_paths: Vec<String> = vec![
|
|
||||||
file(dir.join("nushell")),
|
|
||||||
folder(dir.join("test_a")),
|
|
||||||
folder(dir.join("test_b")),
|
|
||||||
folder(dir.join("another")),
|
|
||||||
file(dir.join(".hidden_file")),
|
|
||||||
folder(dir.join(".hidden_folder")),
|
|
||||||
];
|
|
||||||
|
|
||||||
// Match the results
|
|
||||||
match_suggestions(expected_paths, suggestions);
|
|
||||||
|
|
||||||
// Test completions for the completions/another folder
|
|
||||||
let target_dir = format!("cd {}", folder(dir.join("another")));
|
|
||||||
let suggestions = completer.complete(&target_dir, target_dir.len());
|
|
||||||
|
|
||||||
// Create the expected values
|
|
||||||
let expected_paths: Vec<String> = vec![file(dir.join("another").join("newfile"))];
|
|
||||||
|
|
||||||
// Match the results
|
|
||||||
match_suggestions(expected_paths, suggestions);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn folder_completions() {
|
|
||||||
// Create a new engine
|
|
||||||
let (dir, dir_str, engine) = new_engine();
|
|
||||||
|
|
||||||
let stack = Stack::new();
|
|
||||||
|
|
||||||
// Instatiate a new completer
|
|
||||||
let mut completer = NuCompleter::new(std::sync::Arc::new(engine), stack);
|
|
||||||
|
|
||||||
// Test completions for the current folder
|
|
||||||
let target_dir = format!("cd {}", dir_str);
|
|
||||||
let suggestions = completer.complete(&target_dir, target_dir.len());
|
|
||||||
|
|
||||||
// Create the expected values
|
|
||||||
let expected_paths: Vec<String> = vec![
|
|
||||||
folder(dir.join("test_a")),
|
|
||||||
folder(dir.join("test_b")),
|
|
||||||
folder(dir.join("another")),
|
|
||||||
folder(dir.join(".hidden_folder")),
|
|
||||||
];
|
|
||||||
|
|
||||||
// Match the results
|
|
||||||
match_suggestions(expected_paths, suggestions);
|
|
||||||
}
|
|
||||||
|
|
||||||
// creates a new engine with the current path into the completions fixtures folder
|
|
||||||
pub fn new_engine() -> (PathBuf, String, EngineState) {
|
|
||||||
// Target folder inside assets
|
|
||||||
let dir = fs::fixtures().join("completions");
|
|
||||||
let mut dir_str = dir
|
|
||||||
.clone()
|
|
||||||
.into_os_string()
|
|
||||||
.into_string()
|
|
||||||
.unwrap_or_default();
|
|
||||||
dir_str.push(SEP);
|
|
||||||
|
|
||||||
// Create a default engine
|
|
||||||
(dir.clone(), dir_str, create_default_context(dir))
|
|
||||||
}
|
|
||||||
|
|
||||||
// match a list of suggestions with the expected values
|
|
||||||
pub fn match_suggestions(expected: Vec<String>, suggestions: Vec<Suggestion>) {
|
|
||||||
expected.iter().zip(suggestions).for_each(|it| {
|
|
||||||
assert_eq!(it.0, &it.1.value);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
// append the separator to the converted path
|
|
||||||
pub fn folder(path: PathBuf) -> String {
|
|
||||||
let mut converted_path = file(path);
|
|
||||||
converted_path.push(SEP);
|
|
||||||
|
|
||||||
converted_path
|
|
||||||
}
|
|
||||||
|
|
||||||
// convert a given path to string
|
|
||||||
pub fn file(path: PathBuf) -> String {
|
|
||||||
path.into_os_string().into_string().unwrap_or_default()
|
|
||||||
}
|
|
88
crates/nu-cli/tests/variables_completions.rs
Normal file
@ -0,0 +1,88 @@
|
|||||||
|
pub mod support;
|
||||||
|
|
||||||
|
use nu_cli::NuCompleter;
|
||||||
|
use reedline::Completer;
|
||||||
|
use support::{match_suggestions, new_engine};
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn variables_completions() {
|
||||||
|
// Create a new engine
|
||||||
|
let (dir, _, mut engine, mut stack) = new_engine();
|
||||||
|
|
||||||
|
// Add record value as example
|
||||||
|
let record = "let actor = { name: 'Tom Hardy', age: 44 }";
|
||||||
|
assert!(support::merge_input(record.as_bytes(), &mut engine, &mut stack, dir).is_ok());
|
||||||
|
|
||||||
|
// Instatiate a new completer
|
||||||
|
let mut completer = NuCompleter::new(std::sync::Arc::new(engine), stack);
|
||||||
|
|
||||||
|
// Test completions for $nu
|
||||||
|
let suggestions = completer.complete("$nu.", 4);
|
||||||
|
|
||||||
|
assert_eq!(9, suggestions.len());
|
||||||
|
|
||||||
|
let expected: Vec<String> = vec![
|
||||||
|
"config-path".into(),
|
||||||
|
"env-path".into(),
|
||||||
|
"history-path".into(),
|
||||||
|
"home-path".into(),
|
||||||
|
"loginshell-path".into(),
|
||||||
|
"os-info".into(),
|
||||||
|
"pid".into(),
|
||||||
|
"scope".into(),
|
||||||
|
"temp-path".into(),
|
||||||
|
];
|
||||||
|
|
||||||
|
// Match results
|
||||||
|
match_suggestions(expected, suggestions);
|
||||||
|
|
||||||
|
// Test completions for $nu.h (filter)
|
||||||
|
let suggestions = completer.complete("$nu.h", 5);
|
||||||
|
|
||||||
|
assert_eq!(2, suggestions.len());
|
||||||
|
|
||||||
|
let expected: Vec<String> = vec!["history-path".into(), "home-path".into()];
|
||||||
|
|
||||||
|
// Match results
|
||||||
|
match_suggestions(expected, suggestions);
|
||||||
|
|
||||||
|
// Test completions for custom var
|
||||||
|
let suggestions = completer.complete("$actor.", 7);
|
||||||
|
|
||||||
|
assert_eq!(2, suggestions.len());
|
||||||
|
|
||||||
|
let expected: Vec<String> = vec!["age".into(), "name".into()];
|
||||||
|
|
||||||
|
// Match results
|
||||||
|
match_suggestions(expected, suggestions);
|
||||||
|
|
||||||
|
// Test completions for custom var (filtering)
|
||||||
|
let suggestions = completer.complete("$actor.n", 8);
|
||||||
|
|
||||||
|
assert_eq!(1, suggestions.len());
|
||||||
|
|
||||||
|
let expected: Vec<String> = vec!["name".into()];
|
||||||
|
|
||||||
|
// Match results
|
||||||
|
match_suggestions(expected, suggestions);
|
||||||
|
|
||||||
|
// Test completions for $env
|
||||||
|
let suggestions = completer.complete("$env.", 5);
|
||||||
|
|
||||||
|
assert_eq!(2, suggestions.len());
|
||||||
|
|
||||||
|
let expected: Vec<String> = vec!["PWD".into(), "TEST".into()];
|
||||||
|
|
||||||
|
// Match results
|
||||||
|
match_suggestions(expected, suggestions);
|
||||||
|
|
||||||
|
// Test completions for $env
|
||||||
|
let suggestions = completer.complete("$env.T", 6);
|
||||||
|
|
||||||
|
assert_eq!(1, suggestions.len());
|
||||||
|
|
||||||
|
let expected: Vec<String> = vec!["TEST".into()];
|
||||||
|
|
||||||
|
// Match results
|
||||||
|
match_suggestions(expected, suggestions);
|
||||||
|
}
|
@ -4,11 +4,11 @@ description = "Color configuration code used by Nushell"
|
|||||||
edition = "2021"
|
edition = "2021"
|
||||||
license = "MIT"
|
license = "MIT"
|
||||||
name = "nu-color-config"
|
name = "nu-color-config"
|
||||||
version = "0.62.0"
|
version = "0.64.0"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
nu-protocol = { path = "../nu-protocol", version = "0.62.0" }
|
nu-protocol = { path = "../nu-protocol", version = "0.64.0" }
|
||||||
nu-ansi-term = "0.45.1"
|
nu-ansi-term = "0.46.0"
|
||||||
nu-json = { path = "../nu-json", version = "0.62.0" }
|
nu-json = { path = "../nu-json", version = "0.64.0" }
|
||||||
nu-table = { path = "../nu-table", version = "0.62.0" }
|
nu-table = { path = "../nu-table", version = "0.64.0" }
|
||||||
serde = { version="1.0.123", features=["derive"] }
|
serde = { version="1.0.123", features=["derive"] }
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
use nu_ansi_term::{Color, Style};
|
use nu_ansi_term::{Color, Style};
|
||||||
use serde::Deserialize;
|
use serde::Deserialize;
|
||||||
|
|
||||||
#[derive(Deserialize, PartialEq, Debug)]
|
#[derive(Deserialize, PartialEq, Eq, Debug)]
|
||||||
pub struct NuStyle {
|
pub struct NuStyle {
|
||||||
pub fg: Option<String>,
|
pub fg: Option<String>,
|
||||||
pub bg: Option<String>,
|
pub bg: Option<String>,
|
||||||
|
@ -4,28 +4,29 @@ description = "Nushell's built-in commands"
|
|||||||
edition = "2021"
|
edition = "2021"
|
||||||
license = "MIT"
|
license = "MIT"
|
||||||
name = "nu-command"
|
name = "nu-command"
|
||||||
version = "0.62.0"
|
version = "0.64.0"
|
||||||
build = "build.rs"
|
build = "build.rs"
|
||||||
|
|
||||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
nu-color-config = { path = "../nu-color-config", version = "0.62.0" }
|
nu-color-config = { path = "../nu-color-config", version = "0.64.0" }
|
||||||
nu-engine = { path = "../nu-engine", version = "0.62.0" }
|
nu-engine = { path = "../nu-engine", version = "0.64.0" }
|
||||||
nu-glob = { path = "../nu-glob", version = "0.62.0" }
|
nu-glob = { path = "../nu-glob", version = "0.64.0" }
|
||||||
nu-json = { path = "../nu-json", version = "0.62.0" }
|
nu-json = { path = "../nu-json", version = "0.64.0" }
|
||||||
nu-parser = { path = "../nu-parser", version = "0.62.0" }
|
nu-parser = { path = "../nu-parser", version = "0.64.0" }
|
||||||
nu-path = { path = "../nu-path", version = "0.62.0" }
|
nu-path = { path = "../nu-path", version = "0.64.0" }
|
||||||
nu-pretty-hex = { path = "../nu-pretty-hex", version = "0.62.0" }
|
nu-pretty-hex = { path = "../nu-pretty-hex", version = "0.64.0" }
|
||||||
nu-protocol = { path = "../nu-protocol", version = "0.62.0" }
|
nu-protocol = { path = "../nu-protocol", version = "0.64.0" }
|
||||||
nu-system = { path = "../nu-system", version = "0.62.0" }
|
nu-system = { path = "../nu-system", version = "0.64.0" }
|
||||||
nu-table = { path = "../nu-table", version = "0.62.0" }
|
nu-table = { path = "../nu-table", version = "0.64.0" }
|
||||||
nu-term-grid = { path = "../nu-term-grid", version = "0.62.0" }
|
nu-term-grid = { path = "../nu-term-grid", version = "0.64.0" }
|
||||||
nu-test-support = { path = "../nu-test-support", version = "0.62.0" }
|
nu-test-support = { path = "../nu-test-support", version = "0.64.0" }
|
||||||
nu-utils = { path = "../nu-utils", version = "0.62.0" }
|
nu-utils = { path = "../nu-utils", version = "0.64.0" }
|
||||||
nu-ansi-term = "0.45.1"
|
nu-ansi-term = "0.46.0"
|
||||||
|
|
||||||
# Potential dependencies for extras
|
# Potential dependencies for extras
|
||||||
|
alphanumeric-sort = "1.4.4"
|
||||||
base64 = "0.13.0"
|
base64 = "0.13.0"
|
||||||
bytesize = "1.1.0"
|
bytesize = "1.1.0"
|
||||||
calamine = "0.18.0"
|
calamine = "0.18.0"
|
||||||
@ -46,6 +47,7 @@ htmlescape = "0.3.1"
|
|||||||
ical = "0.7.0"
|
ical = "0.7.0"
|
||||||
indexmap = { version="1.7", features=["serde-1"] }
|
indexmap = { version="1.7", features=["serde-1"] }
|
||||||
Inflector = "0.11"
|
Inflector = "0.11"
|
||||||
|
is-root = "0.1.2"
|
||||||
itertools = "0.10.0"
|
itertools = "0.10.0"
|
||||||
lazy_static = "1.4.0"
|
lazy_static = "1.4.0"
|
||||||
log = "0.4.14"
|
log = "0.4.14"
|
||||||
@ -56,6 +58,7 @@ mime = "0.3.16"
|
|||||||
notify = "4.0.17"
|
notify = "4.0.17"
|
||||||
num = { version = "0.4.0", optional = true }
|
num = { version = "0.4.0", optional = true }
|
||||||
pathdiff = "0.2.1"
|
pathdiff = "0.2.1"
|
||||||
|
powierza-coefficient = "1.0"
|
||||||
quick-xml = "0.22"
|
quick-xml = "0.22"
|
||||||
rand = "0.8"
|
rand = "0.8"
|
||||||
rayon = "1.5.1"
|
rayon = "1.5.1"
|
||||||
@ -68,7 +71,8 @@ serde_ini = "0.2.0"
|
|||||||
serde_urlencoded = "0.7.0"
|
serde_urlencoded = "0.7.0"
|
||||||
serde_yaml = "0.8.16"
|
serde_yaml = "0.8.16"
|
||||||
sha2 = "0.10.0"
|
sha2 = "0.10.0"
|
||||||
shadow-rs = "0.11.0"
|
# Disable default features b/c the default features build Git (very slow to compile)
|
||||||
|
shadow-rs = { version = "0.11.0", default-features = false }
|
||||||
strip-ansi-escapes = "0.1.1"
|
strip-ansi-escapes = "0.1.1"
|
||||||
sysinfo = "0.23.5"
|
sysinfo = "0.23.5"
|
||||||
terminal_size = "0.1.17"
|
terminal_size = "0.1.17"
|
||||||
@ -79,27 +83,38 @@ unicode-segmentation = "1.8.0"
|
|||||||
url = "2.2.1"
|
url = "2.2.1"
|
||||||
uuid = { version = "0.8.2", features = ["v4"] }
|
uuid = { version = "0.8.2", features = ["v4"] }
|
||||||
which = { version = "4.2.2", optional = true }
|
which = { version = "4.2.2", optional = true }
|
||||||
reedline = { version = "0.5.0", features = ["bashisms"]}
|
reedline = { version = "0.7.0", features = ["bashisms", "sqlite"]}
|
||||||
wax = { version = "0.4.0", features = ["diagnostics"] }
|
wax = { version = "0.4.0", features = ["diagnostics"] }
|
||||||
rusqlite = { version = "0.27.0", features = ["bundled"], optional = true }
|
rusqlite = { version = "0.27.0", features = ["bundled"], optional = true }
|
||||||
sqlparser = { version = "0.16.0", features = ["serde"], optional = true }
|
sqlparser = { version = "0.16.0", features = ["serde"], optional = true }
|
||||||
|
|
||||||
[target.'cfg(unix)'.dependencies]
|
[target.'cfg(unix)'.dependencies]
|
||||||
umask = "1.0.0"
|
umask = "2.0.0"
|
||||||
users = "0.11.0"
|
users = "0.11.0"
|
||||||
|
|
||||||
[target.'cfg(not(any(target_os = "android", target_os = "ios")))'.dependencies.trash]
|
[target.'cfg(not(any(target_os = "android", target_os = "ios")))'.dependencies.trash]
|
||||||
version = "2.0.2"
|
version = "2.1.3"
|
||||||
optional = true
|
optional = true
|
||||||
|
|
||||||
[dependencies.polars]
|
[dependencies.polars]
|
||||||
version = "0.20.0"
|
version = "0.21.1"
|
||||||
|
# path = "../../../../polars/polars"
|
||||||
optional = true
|
optional = true
|
||||||
features = [
|
features = [
|
||||||
"default", "parquet", "json", "serde", "object",
|
"default", "to_dummies", "parquet", "json", "serde", "serde-lazy",
|
||||||
"checked_arithmetic", "strings", "cum_agg", "is_in",
|
"object", "checked_arithmetic", "strings", "cum_agg", "is_in",
|
||||||
"rolling_window", "strings", "rows", "random",
|
"rolling_window", "strings", "rows", "random",
|
||||||
"dtype-datetime"
|
"dtype-datetime", "dtype-struct", "lazy", "cross_join",
|
||||||
|
"dynamic_groupby"
|
||||||
|
]
|
||||||
|
|
||||||
|
[target.'cfg(windows)'.dependencies.windows]
|
||||||
|
version = "0.37.0"
|
||||||
|
features = [
|
||||||
|
"alloc",
|
||||||
|
"Win32_Foundation",
|
||||||
|
"Win32_Storage_FileSystem",
|
||||||
|
"Win32_System_SystemServices",
|
||||||
]
|
]
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
@ -110,7 +125,7 @@ dataframe = ["polars", "num"]
|
|||||||
database = ["sqlparser", "rusqlite"]
|
database = ["sqlparser", "rusqlite"]
|
||||||
|
|
||||||
[build-dependencies]
|
[build-dependencies]
|
||||||
shadow-rs = "0.11.0"
|
shadow-rs = { version = "0.11.0", default-features = false }
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
hamcrest2 = "0.3.0"
|
hamcrest2 = "0.3.0"
|
||||||
|
@ -1,3 +1,18 @@
|
|||||||
|
use std::process::Command;
|
||||||
|
|
||||||
fn main() -> shadow_rs::SdResult<()> {
|
fn main() -> shadow_rs::SdResult<()> {
|
||||||
|
// Look up the current Git commit ourselves instead of relying on shadow_rs,
|
||||||
|
// because shadow_rs does it in a really slow-to-compile way (it builds libgit2)
|
||||||
|
let hash = get_git_hash().expect("failed to get latest git commit hash");
|
||||||
|
println!("cargo:rustc-env=NU_COMMIT_HASH={}", hash);
|
||||||
|
|
||||||
shadow_rs::new()
|
shadow_rs::new()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn get_git_hash() -> Result<String, std::io::Error> {
|
||||||
|
let out = Command::new("git").args(["rev-parse", "HEAD"]).output()?;
|
||||||
|
Ok(String::from_utf8(out.stdout)
|
||||||
|
.expect("could not convert stdout to string")
|
||||||
|
.trim()
|
||||||
|
.to_string())
|
||||||
|
}
|
||||||
|
317
crates/nu-command/src/charting/hashable_value.rs
Normal file
@ -0,0 +1,317 @@
|
|||||||
|
use chrono::{DateTime, FixedOffset};
|
||||||
|
use nu_protocol::{ShellError, Span, Value};
|
||||||
|
use std::hash::{Hash, Hasher};
|
||||||
|
|
||||||
|
/// A subset of [Value](crate::Value), which is hashable.
|
||||||
|
/// And it means that we can put the value into something like [HashMap](std::collections::HashMap) or [HashSet](std::collections::HashSet)
|
||||||
|
/// for further usage like value statistics.
|
||||||
|
///
|
||||||
|
/// For now the main way to crate a [HashableValue] is using [from_value](HashableValue::from_value)
|
||||||
|
///
|
||||||
|
/// Please note that although each variant contains `span` field, but during hashing, this field will not be concerned.
|
||||||
|
/// Which means that the following will be true:
|
||||||
|
/// ```text
|
||||||
|
/// assert_eq!(HashableValue::Bool {val: true, span: Span{start: 0, end: 1}}, HashableValue::Bool {val: true, span: Span{start: 90, end: 1000}})
|
||||||
|
/// ```
|
||||||
|
#[derive(Eq, Debug)]
|
||||||
|
pub enum HashableValue {
|
||||||
|
Bool {
|
||||||
|
val: bool,
|
||||||
|
span: Span,
|
||||||
|
},
|
||||||
|
Int {
|
||||||
|
val: i64,
|
||||||
|
span: Span,
|
||||||
|
},
|
||||||
|
Float {
|
||||||
|
val: [u8; 8], // because f64 is not hashable, we save it as [u8;8] array to make it hashable.
|
||||||
|
span: Span,
|
||||||
|
},
|
||||||
|
Filesize {
|
||||||
|
val: i64,
|
||||||
|
span: Span,
|
||||||
|
},
|
||||||
|
Duration {
|
||||||
|
val: i64,
|
||||||
|
span: Span,
|
||||||
|
},
|
||||||
|
Date {
|
||||||
|
val: DateTime<FixedOffset>,
|
||||||
|
span: Span,
|
||||||
|
},
|
||||||
|
String {
|
||||||
|
val: String,
|
||||||
|
span: Span,
|
||||||
|
},
|
||||||
|
Binary {
|
||||||
|
val: Vec<u8>,
|
||||||
|
span: Span,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Default for HashableValue {
|
||||||
|
fn default() -> Self {
|
||||||
|
HashableValue::Bool {
|
||||||
|
val: false,
|
||||||
|
span: Span { start: 0, end: 0 },
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl HashableValue {
|
||||||
|
/// Try to convert from `value` to self
|
||||||
|
///
|
||||||
|
/// A `span` is required because when there is an error in value, it may not contain `span` field.
|
||||||
|
///
|
||||||
|
/// If the given value is not hashable(mainly because of it is structured data), an error will returned.
|
||||||
|
pub fn from_value(value: Value, span: Span) -> Result<Self, ShellError> {
|
||||||
|
match value {
|
||||||
|
Value::Bool { val, span } => Ok(HashableValue::Bool { val, span }),
|
||||||
|
Value::Int { val, span } => Ok(HashableValue::Int { val, span }),
|
||||||
|
Value::Filesize { val, span } => Ok(HashableValue::Filesize { val, span }),
|
||||||
|
Value::Duration { val, span } => Ok(HashableValue::Duration { val, span }),
|
||||||
|
Value::Date { val, span } => Ok(HashableValue::Date { val, span }),
|
||||||
|
Value::Float { val, span } => Ok(HashableValue::Float {
|
||||||
|
val: val.to_ne_bytes(),
|
||||||
|
span,
|
||||||
|
}),
|
||||||
|
Value::String { val, span } => Ok(HashableValue::String { val, span }),
|
||||||
|
Value::Binary { val, span } => Ok(HashableValue::Binary { val, span }),
|
||||||
|
|
||||||
|
_ => {
|
||||||
|
let input_span = value.span().unwrap_or(span);
|
||||||
|
Err(ShellError::UnsupportedInput(
|
||||||
|
format!("input value {value:?} is not hashable"),
|
||||||
|
input_span,
|
||||||
|
))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Convert from self to nu's core data type `Value`.
|
||||||
|
pub fn into_value(self) -> Value {
|
||||||
|
match self {
|
||||||
|
HashableValue::Bool { val, span } => Value::Bool { val, span },
|
||||||
|
HashableValue::Int { val, span } => Value::Int { val, span },
|
||||||
|
HashableValue::Filesize { val, span } => Value::Filesize { val, span },
|
||||||
|
HashableValue::Duration { val, span } => Value::Duration { val, span },
|
||||||
|
HashableValue::Date { val, span } => Value::Date { val, span },
|
||||||
|
HashableValue::Float { val, span } => Value::Float {
|
||||||
|
val: f64::from_ne_bytes(val),
|
||||||
|
span,
|
||||||
|
},
|
||||||
|
HashableValue::String { val, span } => Value::String { val, span },
|
||||||
|
HashableValue::Binary { val, span } => Value::Binary { val, span },
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Hash for HashableValue {
|
||||||
|
fn hash<H: Hasher>(&self, state: &mut H) {
|
||||||
|
match self {
|
||||||
|
HashableValue::Bool { val, .. } => val.hash(state),
|
||||||
|
HashableValue::Int { val, .. } => val.hash(state),
|
||||||
|
HashableValue::Filesize { val, .. } => val.hash(state),
|
||||||
|
HashableValue::Duration { val, .. } => val.hash(state),
|
||||||
|
HashableValue::Date { val, .. } => val.hash(state),
|
||||||
|
HashableValue::Float { val, .. } => val.hash(state),
|
||||||
|
HashableValue::String { val, .. } => val.hash(state),
|
||||||
|
HashableValue::Binary { val, .. } => val.hash(state),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PartialEq for HashableValue {
|
||||||
|
fn eq(&self, other: &Self) -> bool {
|
||||||
|
match (self, other) {
|
||||||
|
(HashableValue::Bool { val: lhs, .. }, HashableValue::Bool { val: rhs, .. }) => {
|
||||||
|
lhs == rhs
|
||||||
|
}
|
||||||
|
(HashableValue::Int { val: lhs, .. }, HashableValue::Int { val: rhs, .. }) => {
|
||||||
|
lhs == rhs
|
||||||
|
}
|
||||||
|
(
|
||||||
|
HashableValue::Filesize { val: lhs, .. },
|
||||||
|
HashableValue::Filesize { val: rhs, .. },
|
||||||
|
) => lhs == rhs,
|
||||||
|
(
|
||||||
|
HashableValue::Duration { val: lhs, .. },
|
||||||
|
HashableValue::Duration { val: rhs, .. },
|
||||||
|
) => lhs == rhs,
|
||||||
|
(HashableValue::Date { val: lhs, .. }, HashableValue::Date { val: rhs, .. }) => {
|
||||||
|
lhs == rhs
|
||||||
|
}
|
||||||
|
(HashableValue::Float { val: lhs, .. }, HashableValue::Float { val: rhs, .. }) => {
|
||||||
|
lhs == rhs
|
||||||
|
}
|
||||||
|
(HashableValue::String { val: lhs, .. }, HashableValue::String { val: rhs, .. }) => {
|
||||||
|
lhs == rhs
|
||||||
|
}
|
||||||
|
(HashableValue::Binary { val: lhs, .. }, HashableValue::Binary { val: rhs, .. }) => {
|
||||||
|
lhs == rhs
|
||||||
|
}
|
||||||
|
_ => false,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod test {
|
||||||
|
use super::*;
|
||||||
|
use nu_protocol::ast::{CellPath, PathMember};
|
||||||
|
use std::collections::{HashMap, HashSet};
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn from_value() {
|
||||||
|
let span = Span::test_data();
|
||||||
|
let values = vec![
|
||||||
|
(
|
||||||
|
Value::Bool { val: true, span },
|
||||||
|
HashableValue::Bool { val: true, span },
|
||||||
|
),
|
||||||
|
(
|
||||||
|
Value::Int { val: 1, span },
|
||||||
|
HashableValue::Int { val: 1, span },
|
||||||
|
),
|
||||||
|
(
|
||||||
|
Value::Filesize { val: 1, span },
|
||||||
|
HashableValue::Filesize { val: 1, span },
|
||||||
|
),
|
||||||
|
(
|
||||||
|
Value::Duration { val: 1, span },
|
||||||
|
HashableValue::Duration { val: 1, span },
|
||||||
|
),
|
||||||
|
(
|
||||||
|
Value::Date {
|
||||||
|
val: DateTime::<FixedOffset>::parse_from_rfc2822(
|
||||||
|
"Wed, 18 Feb 2015 23:16:09 GMT",
|
||||||
|
)
|
||||||
|
.unwrap(),
|
||||||
|
span,
|
||||||
|
},
|
||||||
|
HashableValue::Date {
|
||||||
|
val: DateTime::<FixedOffset>::parse_from_rfc2822(
|
||||||
|
"Wed, 18 Feb 2015 23:16:09 GMT",
|
||||||
|
)
|
||||||
|
.unwrap(),
|
||||||
|
span,
|
||||||
|
},
|
||||||
|
),
|
||||||
|
(
|
||||||
|
Value::String {
|
||||||
|
val: "1".to_string(),
|
||||||
|
span,
|
||||||
|
},
|
||||||
|
HashableValue::String {
|
||||||
|
val: "1".to_string(),
|
||||||
|
span,
|
||||||
|
},
|
||||||
|
),
|
||||||
|
(
|
||||||
|
Value::Binary { val: vec![1], span },
|
||||||
|
HashableValue::Binary { val: vec![1], span },
|
||||||
|
),
|
||||||
|
];
|
||||||
|
for (val, expect_hashable_val) in values.into_iter() {
|
||||||
|
assert_eq!(
|
||||||
|
HashableValue::from_value(val, Span { start: 0, end: 0 }).unwrap(),
|
||||||
|
expect_hashable_val
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn from_unhashable_value() {
|
||||||
|
let span = Span::test_data();
|
||||||
|
let values = [
|
||||||
|
Value::List {
|
||||||
|
vals: vec![Value::Bool { val: true, span }],
|
||||||
|
span,
|
||||||
|
},
|
||||||
|
Value::Block {
|
||||||
|
val: 0,
|
||||||
|
captures: HashMap::new(),
|
||||||
|
span,
|
||||||
|
},
|
||||||
|
Value::Nothing { span },
|
||||||
|
Value::Error {
|
||||||
|
error: ShellError::DidYouMean("what?".to_string(), span),
|
||||||
|
},
|
||||||
|
Value::CellPath {
|
||||||
|
val: CellPath {
|
||||||
|
members: vec![PathMember::Int { val: 0, span }],
|
||||||
|
},
|
||||||
|
span,
|
||||||
|
},
|
||||||
|
];
|
||||||
|
for v in values {
|
||||||
|
assert!(HashableValue::from_value(v, Span { start: 0, end: 0 }).is_err())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn from_to_tobe_same() {
|
||||||
|
let span = Span::test_data();
|
||||||
|
let values = vec![
|
||||||
|
Value::Bool { val: true, span },
|
||||||
|
Value::Int { val: 1, span },
|
||||||
|
Value::Filesize { val: 1, span },
|
||||||
|
Value::Duration { val: 1, span },
|
||||||
|
Value::String {
|
||||||
|
val: "1".to_string(),
|
||||||
|
span,
|
||||||
|
},
|
||||||
|
Value::Binary { val: vec![1], span },
|
||||||
|
];
|
||||||
|
for val in values.into_iter() {
|
||||||
|
let expected_val = val.clone();
|
||||||
|
assert_eq!(
|
||||||
|
HashableValue::from_value(val, Span { start: 0, end: 0 })
|
||||||
|
.unwrap()
|
||||||
|
.into_value(),
|
||||||
|
expected_val
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn hashable_value_eq_without_concern_span() {
|
||||||
|
assert_eq!(
|
||||||
|
HashableValue::Bool {
|
||||||
|
val: true,
|
||||||
|
span: Span { start: 0, end: 1 }
|
||||||
|
},
|
||||||
|
HashableValue::Bool {
|
||||||
|
val: true,
|
||||||
|
span: Span {
|
||||||
|
start: 90,
|
||||||
|
end: 1000
|
||||||
|
}
|
||||||
|
}
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn put_to_hashset() {
|
||||||
|
let span = Span::test_data();
|
||||||
|
let mut set = HashSet::new();
|
||||||
|
set.insert(HashableValue::Bool { val: true, span });
|
||||||
|
assert!(set.contains(&HashableValue::Bool { val: true, span }));
|
||||||
|
|
||||||
|
// hashable value doesn't care about span.
|
||||||
|
let diff_span = Span { start: 1, end: 2 };
|
||||||
|
set.insert(HashableValue::Bool {
|
||||||
|
val: true,
|
||||||
|
span: diff_span,
|
||||||
|
});
|
||||||
|
assert!(set.contains(&HashableValue::Bool { val: true, span }));
|
||||||
|
assert!(set.contains(&HashableValue::Bool {
|
||||||
|
val: true,
|
||||||
|
span: diff_span
|
||||||
|
}));
|
||||||
|
assert_eq!(set.len(), 1);
|
||||||
|
|
||||||
|
set.insert(HashableValue::Int { val: 2, span });
|
||||||
|
assert_eq!(set.len(), 2);
|
||||||
|
}
|
||||||
|
}
|
256
crates/nu-command/src/charting/histogram.rs
Normal file
@ -0,0 +1,256 @@
|
|||||||
|
use super::hashable_value::HashableValue;
|
||||||
|
use nu_engine::CallExt;
|
||||||
|
use nu_protocol::ast::Call;
|
||||||
|
use nu_protocol::engine::{Command, EngineState, Stack};
|
||||||
|
use nu_protocol::{
|
||||||
|
Example, IntoPipelineData, PipelineData, ShellError, Signature, Span, Spanned, SyntaxShape,
|
||||||
|
Value,
|
||||||
|
};
|
||||||
|
use std::collections::HashMap;
|
||||||
|
use std::iter;
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct Histogram;
|
||||||
|
|
||||||
|
enum PercentageCalcMethod {
|
||||||
|
Normalize,
|
||||||
|
Relative,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Command for Histogram {
|
||||||
|
fn name(&self) -> &str {
|
||||||
|
"histogram"
|
||||||
|
}
|
||||||
|
|
||||||
|
fn signature(&self) -> Signature {
|
||||||
|
Signature::build("histogram")
|
||||||
|
.optional("column-name", SyntaxShape::String, "column name to calc frequency, no need to provide if input is just a list")
|
||||||
|
.optional("frequency-column-name", SyntaxShape::String, "histogram's frequency column, default to be frequency column output")
|
||||||
|
.named("percentage-type", SyntaxShape::String, "percentage calculate method, can be 'normalize' or 'relative', in 'normalize', defaults to be 'normalize'", Some('t'))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn usage(&self) -> &str {
|
||||||
|
"Creates a new table with a histogram based on the column name passed in."
|
||||||
|
}
|
||||||
|
|
||||||
|
fn examples(&self) -> Vec<Example> {
|
||||||
|
vec![
|
||||||
|
Example {
|
||||||
|
description: "Get a histogram for the types of files",
|
||||||
|
example: "ls | histogram type",
|
||||||
|
result: None,
|
||||||
|
},
|
||||||
|
Example {
|
||||||
|
description:
|
||||||
|
"Get a histogram for the types of files, with frequency column named freq",
|
||||||
|
example: "ls | histogram type freq",
|
||||||
|
result: None,
|
||||||
|
},
|
||||||
|
Example {
|
||||||
|
description: "Get a histogram for a list of numbers",
|
||||||
|
example: "echo [1 2 3 1 1 1 2 2 1 1] | histogram",
|
||||||
|
result: None,
|
||||||
|
},
|
||||||
|
Example {
|
||||||
|
description: "Get a histogram for a list of numbers, and percentage is based on the maximum value",
|
||||||
|
example: "echo [1 2 3 1 1 1 2 2 1 1] | histogram --percentage-type relative",
|
||||||
|
result: None,
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
fn run(
|
||||||
|
&self,
|
||||||
|
engine_state: &EngineState,
|
||||||
|
stack: &mut Stack,
|
||||||
|
call: &Call,
|
||||||
|
input: PipelineData,
|
||||||
|
) -> Result<PipelineData, ShellError> {
|
||||||
|
// input check.
|
||||||
|
let column_name: Option<Spanned<String>> = call.opt(engine_state, stack, 0)?;
|
||||||
|
let frequency_name_arg = call.opt::<Spanned<String>>(engine_state, stack, 1)?;
|
||||||
|
let frequency_column_name = match frequency_name_arg {
|
||||||
|
Some(inner) => {
|
||||||
|
let span = inner.span;
|
||||||
|
if ["value", "count", "quantile", "percentage"].contains(&inner.item.as_str()) {
|
||||||
|
return Err(ShellError::UnsupportedInput(
|
||||||
|
"frequency-column-name can't be 'value', 'count' or 'percentage'"
|
||||||
|
.to_string(),
|
||||||
|
span,
|
||||||
|
));
|
||||||
|
}
|
||||||
|
inner.item
|
||||||
|
}
|
||||||
|
None => "frequency".to_string(),
|
||||||
|
};
|
||||||
|
|
||||||
|
let calc_method: Option<Spanned<String>> =
|
||||||
|
call.get_flag(engine_state, stack, "percentage-type")?;
|
||||||
|
let calc_method = match calc_method {
|
||||||
|
None => PercentageCalcMethod::Normalize,
|
||||||
|
Some(inner) => match inner.item.as_str() {
|
||||||
|
"normalize" => PercentageCalcMethod::Normalize,
|
||||||
|
"relative" => PercentageCalcMethod::Relative,
|
||||||
|
_ => {
|
||||||
|
return Err(ShellError::UnsupportedInput(
|
||||||
|
"calc method can only be 'normalize' or 'relative'".to_string(),
|
||||||
|
inner.span,
|
||||||
|
))
|
||||||
|
}
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
let span = call.head;
|
||||||
|
let data_as_value = input.into_value(span);
|
||||||
|
// `input` is not a list, here we can return an error.
|
||||||
|
match data_as_value.as_list() {
|
||||||
|
Ok(list_value) => run_histogram(
|
||||||
|
list_value.to_vec(),
|
||||||
|
column_name,
|
||||||
|
frequency_column_name,
|
||||||
|
calc_method,
|
||||||
|
span,
|
||||||
|
),
|
||||||
|
Err(e) => Err(e),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn run_histogram(
|
||||||
|
values: Vec<Value>,
|
||||||
|
column_name: Option<Spanned<String>>,
|
||||||
|
freq_column: String,
|
||||||
|
calc_method: PercentageCalcMethod,
|
||||||
|
head_span: Span,
|
||||||
|
) -> Result<PipelineData, ShellError> {
|
||||||
|
let mut inputs = vec![];
|
||||||
|
// convert from inputs to hashable values.
|
||||||
|
match column_name {
|
||||||
|
None => {
|
||||||
|
// some invalid input scenario needs to handle:
|
||||||
|
// Expect input is a list of hashable value, if one value is not hashable, throw out error.
|
||||||
|
for v in values {
|
||||||
|
let current_span = v.span().unwrap_or(head_span);
|
||||||
|
inputs.push(HashableValue::from_value(v, head_span).map_err(|_| {
|
||||||
|
ShellError::UnsupportedInput(
|
||||||
|
"--column-name is not provided, can only support a list of simple value."
|
||||||
|
.to_string(),
|
||||||
|
current_span,
|
||||||
|
)
|
||||||
|
})?);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Some(ref col) => {
|
||||||
|
// some invalid input scenario needs to handle:
|
||||||
|
// * item in `input` is not a record, just skip it.
|
||||||
|
// * a record doesn't contain specific column, just skip it.
|
||||||
|
// * all records don't contain specific column, throw out error, indicate at least one row should contains specific column.
|
||||||
|
// * a record contain a value which can't be hashed, skip it.
|
||||||
|
let col_name = &col.item;
|
||||||
|
for v in values {
|
||||||
|
match v {
|
||||||
|
// parse record, and fill valid value to actual input.
|
||||||
|
Value::Record { cols, vals, .. } => {
|
||||||
|
for (c, v) in iter::zip(cols, vals) {
|
||||||
|
if &c == col_name {
|
||||||
|
if let Ok(v) = HashableValue::from_value(v, head_span) {
|
||||||
|
inputs.push(v);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ => continue,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if inputs.is_empty() {
|
||||||
|
return Err(ShellError::UnsupportedInput(
|
||||||
|
format!("expect input is table, and inputs doesn't contain any value which has {col_name} column"),
|
||||||
|
head_span,
|
||||||
|
));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let value_column_name = column_name
|
||||||
|
.map(|x| x.item)
|
||||||
|
.unwrap_or_else(|| "value".to_string());
|
||||||
|
Ok(histogram_impl(
|
||||||
|
inputs,
|
||||||
|
&value_column_name,
|
||||||
|
calc_method,
|
||||||
|
&freq_column,
|
||||||
|
head_span,
|
||||||
|
))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn histogram_impl(
|
||||||
|
inputs: Vec<HashableValue>,
|
||||||
|
value_column_name: &str,
|
||||||
|
calc_method: PercentageCalcMethod,
|
||||||
|
freq_column: &str,
|
||||||
|
span: Span,
|
||||||
|
) -> PipelineData {
|
||||||
|
// here we can make sure that inputs is not empty, and every elements
|
||||||
|
// is a simple val and ok to make count.
|
||||||
|
let mut counter = HashMap::new();
|
||||||
|
let mut max_cnt = 0;
|
||||||
|
let total_cnt = inputs.len();
|
||||||
|
for i in inputs {
|
||||||
|
let new_cnt = *counter.get(&i).unwrap_or(&0) + 1;
|
||||||
|
counter.insert(i, new_cnt);
|
||||||
|
if new_cnt > max_cnt {
|
||||||
|
max_cnt = new_cnt;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut result = vec![];
|
||||||
|
let result_cols = vec![
|
||||||
|
value_column_name.to_string(),
|
||||||
|
"count".to_string(),
|
||||||
|
"quantile".to_string(),
|
||||||
|
"percentage".to_string(),
|
||||||
|
freq_column.to_string(),
|
||||||
|
];
|
||||||
|
const MAX_FREQ_COUNT: f64 = 100.0;
|
||||||
|
for (val, count) in counter.into_iter() {
|
||||||
|
let quantile = match calc_method {
|
||||||
|
PercentageCalcMethod::Normalize => (count as f64 / total_cnt as f64),
|
||||||
|
PercentageCalcMethod::Relative => (count as f64 / max_cnt as f64),
|
||||||
|
};
|
||||||
|
|
||||||
|
let percentage = format!("{:.2}%", quantile * 100_f64);
|
||||||
|
let freq = "*".repeat((MAX_FREQ_COUNT * quantile).floor() as usize);
|
||||||
|
|
||||||
|
result.push(Value::Record {
|
||||||
|
cols: result_cols.clone(),
|
||||||
|
vals: vec![
|
||||||
|
val.into_value(),
|
||||||
|
Value::Int { val: count, span },
|
||||||
|
Value::Float {
|
||||||
|
val: quantile,
|
||||||
|
span,
|
||||||
|
},
|
||||||
|
Value::String {
|
||||||
|
val: percentage,
|
||||||
|
span,
|
||||||
|
},
|
||||||
|
Value::String { val: freq, span },
|
||||||
|
],
|
||||||
|
span,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
Value::List { vals: result, span }.into_pipeline_data()
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_examples() {
|
||||||
|
use crate::test_examples;
|
||||||
|
|
||||||
|
test_examples(Histogram)
|
||||||
|
}
|
||||||
|
}
|
4
crates/nu-command/src/charting/mod.rs
Normal file
@ -0,0 +1,4 @@
|
|||||||
|
mod hashable_value;
|
||||||
|
mod histogram;
|
||||||
|
|
||||||
|
pub use histogram::Histogram;
|
@ -24,13 +24,13 @@ enum Zone {
|
|||||||
Local,
|
Local,
|
||||||
East(u8),
|
East(u8),
|
||||||
West(u8),
|
West(u8),
|
||||||
Error, // we want the nullshell to cast it instead of rust
|
Error, // we want Nushell to cast it instead of Rust
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Zone {
|
impl Zone {
|
||||||
fn new(i: i64) -> Self {
|
fn new(i: i64) -> Self {
|
||||||
if i.abs() <= 12 {
|
if i.abs() <= 12 {
|
||||||
// guanranteed here
|
// guaranteed here
|
||||||
if i >= 0 {
|
if i >= 0 {
|
||||||
Self::East(i as u8) // won't go out of range
|
Self::East(i as u8) // won't go out of range
|
||||||
} else {
|
} else {
|
||||||
@ -59,29 +59,29 @@ impl Command for SubCommand {
|
|||||||
|
|
||||||
fn signature(&self) -> Signature {
|
fn signature(&self) -> Signature {
|
||||||
Signature::build("into datetime")
|
Signature::build("into datetime")
|
||||||
.switch(
|
|
||||||
"list",
|
|
||||||
"lists strftime cheatsheet",
|
|
||||||
Some('l'),
|
|
||||||
)
|
|
||||||
.named(
|
.named(
|
||||||
"timezone",
|
"timezone",
|
||||||
SyntaxShape::String,
|
SyntaxShape::String,
|
||||||
"Specify timezone if the input is timestamp, like 'UTC/u' or 'LOCAL/l'",
|
"Specify timezone if the input is a Unix timestamp. Valid options: 'UTC' ('u') or 'LOCAL' ('l')",
|
||||||
Some('z'),
|
Some('z'),
|
||||||
)
|
)
|
||||||
.named(
|
.named(
|
||||||
"offset",
|
"offset",
|
||||||
SyntaxShape::Int,
|
SyntaxShape::Int,
|
||||||
"Specify timezone by offset if the input is timestamp, like '+8', '-4', prior than timezone",
|
"Specify timezone by offset from UTC if the input is a Unix timestamp, like '+8', '-4'",
|
||||||
Some('o'),
|
Some('o'),
|
||||||
)
|
)
|
||||||
.named(
|
.named(
|
||||||
"format",
|
"format",
|
||||||
SyntaxShape::String,
|
SyntaxShape::String,
|
||||||
"Specify date and time formatting",
|
"Specify an expected format for parsing strings to datetimes. Use --list to see all possible options",
|
||||||
Some('f'),
|
Some('f'),
|
||||||
)
|
)
|
||||||
|
.switch(
|
||||||
|
"list",
|
||||||
|
"Show all possible variables for use with the --format flag",
|
||||||
|
Some('l'),
|
||||||
|
)
|
||||||
.rest(
|
.rest(
|
||||||
"rest",
|
"rest",
|
||||||
SyntaxShape::CellPath,
|
SyntaxShape::CellPath,
|
||||||
@ -112,28 +112,40 @@ impl Command for SubCommand {
|
|||||||
vec![
|
vec![
|
||||||
Example {
|
Example {
|
||||||
description: "Convert to datetime",
|
description: "Convert to datetime",
|
||||||
example: "'16.11.1984 8:00 am +0000' | into datetime",
|
example: "'27.02.2021 1:55 pm +0000' | into datetime",
|
||||||
result: None,
|
result: Some(Value::Date {
|
||||||
|
val: Utc.timestamp(1614434100, 0).into(),
|
||||||
|
span: Span::test_data(),
|
||||||
|
}),
|
||||||
},
|
},
|
||||||
Example {
|
Example {
|
||||||
description: "Convert to datetime",
|
description: "Convert to datetime",
|
||||||
example: "'2020-08-04T16:39:18+00:00' | into datetime",
|
example: "'2021-02-27T13:55:40+00:00' | into datetime",
|
||||||
result: None,
|
result: Some(Value::Date {
|
||||||
|
val: Utc.timestamp(1614434140, 0).into(),
|
||||||
|
span: Span::test_data(),
|
||||||
|
}),
|
||||||
},
|
},
|
||||||
Example {
|
Example {
|
||||||
description: "Convert to datetime using a custom format",
|
description: "Convert to datetime using a custom format",
|
||||||
example: "'20200904_163918+0000' | into datetime -f '%Y%m%d_%H%M%S%z'",
|
example: "'20210227_135540+0000' | into datetime -f '%Y%m%d_%H%M%S%z'",
|
||||||
result: None,
|
result: Some(Value::Date {
|
||||||
|
val: Utc.timestamp(1614434140, 0).into(),
|
||||||
|
span: Span::test_data(),
|
||||||
|
}),
|
||||||
},
|
},
|
||||||
Example {
|
Example {
|
||||||
description: "Convert timestamp (no larger than 8e+12) to datetime using a specified timezone",
|
description: "Convert timestamp (no larger than 8e+12) to a UTC datetime",
|
||||||
example: "'1614434140' | into datetime -z 'UTC'",
|
example: "1614434140 | into datetime",
|
||||||
result: None,
|
result: Some(Value::Date {
|
||||||
|
val: Utc.timestamp(1614434140, 0).into(),
|
||||||
|
span: Span::test_data(),
|
||||||
|
}),
|
||||||
},
|
},
|
||||||
Example {
|
Example {
|
||||||
description:
|
description:
|
||||||
"Convert timestamp (no larger than 8e+12) to datetime using a specified timezone offset (between -12 and 12)",
|
"Convert timestamp (no larger than 8e+12) to datetime using a specified timezone offset (between -12 and 12)",
|
||||||
example: "'1614434140' | into datetime -o +9",
|
example: "1614434140 | into datetime -o +9",
|
||||||
result: None,
|
result: None,
|
||||||
},
|
},
|
||||||
]
|
]
|
||||||
@ -209,58 +221,78 @@ fn action(
|
|||||||
dateformat: &Option<DatetimeFormat>,
|
dateformat: &Option<DatetimeFormat>,
|
||||||
head: Span,
|
head: Span,
|
||||||
) -> Value {
|
) -> Value {
|
||||||
match input {
|
// Check to see if input looks like a Unix timestamp (i.e. can it be parsed to an int?)
|
||||||
Value::String { val: s, span } => {
|
let timestamp = match input {
|
||||||
let ts = s.parse::<i64>();
|
Value::Int { val, .. } => Ok(*val),
|
||||||
// if timezone if specified, first check if the input is a timestamp.
|
Value::String { val, .. } => val.parse::<i64>(),
|
||||||
if let Some(tz) = timezone {
|
other => {
|
||||||
const TIMESTAMP_BOUND: i64 = 8.2e+12 as i64;
|
return Value::Error {
|
||||||
// Since the timestamp method of chrono itself don't throw an error (it just panicked)
|
error: ShellError::UnsupportedInput(
|
||||||
// We have to manually guard it.
|
format!("Expected string or int, got {} instead", other.get_type()),
|
||||||
if let Ok(t) = ts {
|
head,
|
||||||
if t.abs() > TIMESTAMP_BOUND {
|
),
|
||||||
return Value::Error{error: ShellError::UnsupportedInput(
|
|
||||||
"Given timestamp is out of range, it should between -8e+12 and 8e+12".to_string(),
|
|
||||||
head,
|
|
||||||
)};
|
|
||||||
}
|
|
||||||
const HOUR: i32 = 3600;
|
|
||||||
let stampout = match tz.item {
|
|
||||||
Zone::Utc => Value::Date {
|
|
||||||
val: Utc.timestamp(t, 0).into(),
|
|
||||||
span: head,
|
|
||||||
},
|
|
||||||
Zone::Local => Value::Date {
|
|
||||||
val: Local.timestamp(t, 0).into(),
|
|
||||||
span: head,
|
|
||||||
},
|
|
||||||
Zone::East(i) => {
|
|
||||||
let eastoffset = FixedOffset::east((i as i32) * HOUR);
|
|
||||||
Value::Date {
|
|
||||||
val: eastoffset.timestamp(t, 0),
|
|
||||||
span: head,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Zone::West(i) => {
|
|
||||||
let westoffset = FixedOffset::west((i as i32) * HOUR);
|
|
||||||
Value::Date {
|
|
||||||
val: westoffset.timestamp(t, 0),
|
|
||||||
span: head,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Zone::Error => Value::Error {
|
|
||||||
error: ShellError::UnsupportedInput(
|
|
||||||
"Cannot convert given timezone or offset to timestamp".to_string(),
|
|
||||||
tz.span,
|
|
||||||
),
|
|
||||||
},
|
|
||||||
};
|
|
||||||
return stampout;
|
|
||||||
}
|
|
||||||
};
|
};
|
||||||
// if it's not, continue and default to the system's local timezone.
|
}
|
||||||
let out = match dateformat {
|
};
|
||||||
Some(dt) => match DateTime::parse_from_str(s, &dt.0) {
|
|
||||||
|
if let Ok(ts) = timestamp {
|
||||||
|
const TIMESTAMP_BOUND: i64 = 8.2e+12 as i64;
|
||||||
|
const HOUR: i32 = 3600;
|
||||||
|
|
||||||
|
if ts.abs() > TIMESTAMP_BOUND {
|
||||||
|
return Value::Error {
|
||||||
|
error: ShellError::UnsupportedInput(
|
||||||
|
"Given timestamp is out of range, it should between -8e+12 and 8e+12"
|
||||||
|
.to_string(),
|
||||||
|
head,
|
||||||
|
),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
return match timezone {
|
||||||
|
// default to UTC
|
||||||
|
None => Value::Date {
|
||||||
|
val: Utc.timestamp(ts, 0).into(),
|
||||||
|
span: head,
|
||||||
|
},
|
||||||
|
Some(Spanned { item, span }) => match item {
|
||||||
|
Zone::Utc => Value::Date {
|
||||||
|
val: Utc.timestamp(ts, 0).into(),
|
||||||
|
span: head,
|
||||||
|
},
|
||||||
|
Zone::Local => Value::Date {
|
||||||
|
val: Local.timestamp(ts, 0).into(),
|
||||||
|
span: head,
|
||||||
|
},
|
||||||
|
Zone::East(i) => {
|
||||||
|
let eastoffset = FixedOffset::east((*i as i32) * HOUR);
|
||||||
|
Value::Date {
|
||||||
|
val: eastoffset.timestamp(ts, 0),
|
||||||
|
span: head,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Zone::West(i) => {
|
||||||
|
let westoffset = FixedOffset::west((*i as i32) * HOUR);
|
||||||
|
Value::Date {
|
||||||
|
val: westoffset.timestamp(ts, 0),
|
||||||
|
span: head,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Zone::Error => Value::Error {
|
||||||
|
error: ShellError::UnsupportedInput(
|
||||||
|
"Cannot convert given timezone or offset to timestamp".to_string(),
|
||||||
|
*span,
|
||||||
|
),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// If input is not a timestamp, try parsing it as a string
|
||||||
|
match input {
|
||||||
|
Value::String { val, span } => {
|
||||||
|
match dateformat {
|
||||||
|
Some(dt) => match DateTime::parse_from_str(val, &dt.0) {
|
||||||
Ok(d) => Value::Date { val: d, span: head },
|
Ok(d) => Value::Date { val: d, span: head },
|
||||||
Err(reason) => {
|
Err(reason) => {
|
||||||
return Value::Error {
|
return Value::Error {
|
||||||
@ -276,23 +308,21 @@ fn action(
|
|||||||
// Tries to automatically parse the date
|
// Tries to automatically parse the date
|
||||||
// (i.e. without a format string)
|
// (i.e. without a format string)
|
||||||
// and assumes the system's local timezone if none is specified
|
// and assumes the system's local timezone if none is specified
|
||||||
None => match parse_date_from_string(s, *span) {
|
None => match parse_date_from_string(val, *span) {
|
||||||
Ok(date) => Value::Date {
|
Ok(date) => Value::Date {
|
||||||
val: date,
|
val: date,
|
||||||
span: *span,
|
span: *span,
|
||||||
},
|
},
|
||||||
Err(err) => err,
|
Err(err) => err,
|
||||||
},
|
},
|
||||||
};
|
|
||||||
|
|
||||||
out
|
|
||||||
}
|
|
||||||
other => {
|
|
||||||
let got = format!("Expected string, got {} instead", other.get_type());
|
|
||||||
Value::Error {
|
|
||||||
error: ShellError::UnsupportedInput(got, head),
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
other => Value::Error {
|
||||||
|
error: ShellError::UnsupportedInput(
|
||||||
|
format!("Expected string, got {} instead", other.get_type()),
|
||||||
|
head,
|
||||||
|
),
|
||||||
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -351,6 +381,23 @@ mod tests {
|
|||||||
assert_eq!(actual, expected)
|
assert_eq!(actual, expected)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn takes_timestamp_offset_as_int() {
|
||||||
|
let date_int = Value::test_int(1614434140);
|
||||||
|
let timezone_option = Some(Spanned {
|
||||||
|
item: Zone::East(8),
|
||||||
|
span: Span::test_data(),
|
||||||
|
});
|
||||||
|
let actual = action(&date_int, &timezone_option, &None, Span::test_data());
|
||||||
|
let expected = Value::Date {
|
||||||
|
val: DateTime::parse_from_str("2021-02-27 21:55:40 +08:00", "%Y-%m-%d %H:%M:%S %z")
|
||||||
|
.unwrap(),
|
||||||
|
span: Span::test_data(),
|
||||||
|
};
|
||||||
|
|
||||||
|
assert_eq!(actual, expected)
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn takes_timestamp() {
|
fn takes_timestamp() {
|
||||||
let date_str = Value::test_string("1614434140");
|
let date_str = Value::test_string("1614434140");
|
||||||
@ -367,6 +414,20 @@ mod tests {
|
|||||||
assert_eq!(actual, expected)
|
assert_eq!(actual, expected)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn takes_timestamp_without_timezone() {
|
||||||
|
let date_str = Value::test_string("1614434140");
|
||||||
|
let timezone_option = None;
|
||||||
|
let actual = action(&date_str, &timezone_option, &None, Span::test_data());
|
||||||
|
|
||||||
|
let expected = Value::Date {
|
||||||
|
val: Utc.timestamp(1614434140, 0).into(),
|
||||||
|
span: Span::test_data(),
|
||||||
|
};
|
||||||
|
|
||||||
|
assert_eq!(actual, expected)
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn takes_invalid_timestamp() {
|
fn takes_invalid_timestamp() {
|
||||||
let date_str = Value::test_string("10440970000000");
|
let date_str = Value::test_string("10440970000000");
|
||||||
|
@ -133,6 +133,10 @@ pub fn action(input: &Value, span: Span) -> Value {
|
|||||||
},
|
},
|
||||||
Err(error) => Value::Error { error },
|
Err(error) => Value::Error { error },
|
||||||
},
|
},
|
||||||
|
Value::Nothing { .. } => Value::Filesize {
|
||||||
|
val: 0,
|
||||||
|
span: value_span,
|
||||||
|
},
|
||||||
_ => Value::Error {
|
_ => Value::Error {
|
||||||
error: ShellError::UnsupportedInput(
|
error: ShellError::UnsupportedInput(
|
||||||
"'into filesize' for unsupported type".into(),
|
"'into filesize' for unsupported type".into(),
|
||||||
|
@ -85,6 +85,11 @@ impl Command for SubCommand {
|
|||||||
span: Span::test_data(),
|
span: Span::test_data(),
|
||||||
}),
|
}),
|
||||||
},
|
},
|
||||||
|
Example {
|
||||||
|
description: "Convert date to integer (Unix timestamp)",
|
||||||
|
example: "2022-02-02 | into int",
|
||||||
|
result: Some(Value::test_int(1643760000)),
|
||||||
|
},
|
||||||
Example {
|
Example {
|
||||||
description: "Convert to integer from binary",
|
description: "Convert to integer from binary",
|
||||||
example: "'1101' | into int -r 2",
|
example: "'1101' | into int -r 2",
|
||||||
@ -181,8 +186,15 @@ pub fn action(input: &Value, span: Span, radix: u32) -> Value {
|
|||||||
Value::Int { val: 0, span }
|
Value::Int { val: 0, span }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
Value::Date { val, .. } => Value::Int {
|
||||||
|
val: val.timestamp(),
|
||||||
|
span,
|
||||||
|
},
|
||||||
_ => Value::Error {
|
_ => Value::Error {
|
||||||
error: ShellError::UnsupportedInput("'into int' for unsupported type".into(), span),
|
error: ShellError::UnsupportedInput(
|
||||||
|
format!("'into int' for unsupported type '{}'", input.get_type()),
|
||||||
|
span,
|
||||||
|
),
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -248,7 +248,7 @@ pub fn action(
|
|||||||
span,
|
span,
|
||||||
},
|
},
|
||||||
Value::Nothing { .. } => Value::String {
|
Value::Nothing { .. } => Value::String {
|
||||||
val: "nothing".to_string(),
|
val: "".to_string(),
|
||||||
span,
|
span,
|
||||||
},
|
},
|
||||||
Value::Record {
|
Value::Record {
|
||||||
|
@ -26,14 +26,18 @@ impl Command for Alias {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn extra_usage(&self) -> &str {
|
fn extra_usage(&self) -> &str {
|
||||||
r#"This command is a parser keyword. For details, check
|
r#"This command is a parser keyword. For details, check:
|
||||||
https://www.nushell.sh/book/thinking_in_nushell.html#parsing-and-evaluation-are-different-stages"#
|
https://www.nushell.sh/book/thinking_in_nushell.html"#
|
||||||
}
|
}
|
||||||
|
|
||||||
fn is_parser_keyword(&self) -> bool {
|
fn is_parser_keyword(&self) -> bool {
|
||||||
true
|
true
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn search_terms(&self) -> Vec<&str> {
|
||||||
|
vec!["abbr", "aka", "fn", "func", "function"]
|
||||||
|
}
|
||||||
|
|
||||||
fn run(
|
fn run(
|
||||||
&self,
|
&self,
|
||||||
_engine_state: &EngineState,
|
_engine_state: &EngineState,
|
||||||
|
@ -27,8 +27,8 @@ impl Command for Def {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn extra_usage(&self) -> &str {
|
fn extra_usage(&self) -> &str {
|
||||||
r#"This command is a parser keyword. For details, check
|
r#"This command is a parser keyword. For details, check:
|
||||||
https://www.nushell.sh/book/thinking_in_nushell.html#parsing-and-evaluation-are-different-stages"#
|
https://www.nushell.sh/book/thinking_in_nushell.html"#
|
||||||
}
|
}
|
||||||
|
|
||||||
fn is_parser_keyword(&self) -> bool {
|
fn is_parser_keyword(&self) -> bool {
|
||||||
|
@ -27,8 +27,8 @@ impl Command for DefEnv {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn extra_usage(&self) -> &str {
|
fn extra_usage(&self) -> &str {
|
||||||
r#"This command is a parser keyword. For details, check
|
r#"This command is a parser keyword. For details, check:
|
||||||
https://www.nushell.sh/book/thinking_in_nushell.html#parsing-and-evaluation-are-different-stages"#
|
https://www.nushell.sh/book/thinking_in_nushell.html"#
|
||||||
}
|
}
|
||||||
|
|
||||||
fn is_parser_keyword(&self) -> bool {
|
fn is_parser_keyword(&self) -> bool {
|
||||||
|
@ -13,7 +13,7 @@ impl Command for Describe {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn usage(&self) -> &str {
|
fn usage(&self) -> &str {
|
||||||
"Describe the value(s) piped in."
|
"Describe the type and structure of the value(s) piped in."
|
||||||
}
|
}
|
||||||
|
|
||||||
fn signature(&self) -> Signature {
|
fn signature(&self) -> Signature {
|
||||||
@ -55,6 +55,10 @@ impl Command for Describe {
|
|||||||
result: Some(Value::test_string("string")),
|
result: Some(Value::test_string("string")),
|
||||||
}]
|
}]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn search_terms(&self) -> Vec<&str> {
|
||||||
|
vec!["type", "typeof", "info", "structure"]
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
|
@ -24,6 +24,10 @@ impl Command for ErrorMake {
|
|||||||
"Create an error."
|
"Create an error."
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn search_terms(&self) -> Vec<&str> {
|
||||||
|
vec!["err", "panic", "crash", "throw"]
|
||||||
|
}
|
||||||
|
|
||||||
fn run(
|
fn run(
|
||||||
&self,
|
&self,
|
||||||
engine_state: &EngineState,
|
engine_state: &EngineState,
|
||||||
@ -36,7 +40,7 @@ impl Command for ErrorMake {
|
|||||||
let arg: Option<Value> = call.opt(engine_state, stack, 0)?;
|
let arg: Option<Value> = call.opt(engine_state, stack, 0)?;
|
||||||
|
|
||||||
if let Some(arg) = arg {
|
if let Some(arg) = arg {
|
||||||
Ok(make_error(&arg)
|
Ok(make_error(&arg, span)
|
||||||
.map(|err| Value::Error { error: err })
|
.map(|err| Value::Error { error: err })
|
||||||
.unwrap_or_else(|| Value::Error {
|
.unwrap_or_else(|| Value::Error {
|
||||||
error: ShellError::GenericError(
|
error: ShellError::GenericError(
|
||||||
@ -51,7 +55,7 @@ impl Command for ErrorMake {
|
|||||||
} else {
|
} else {
|
||||||
input.map(
|
input.map(
|
||||||
move |value| {
|
move |value| {
|
||||||
make_error(&value)
|
make_error(&value, span)
|
||||||
.map(|err| Value::Error { error: err })
|
.map(|err| Value::Error { error: err })
|
||||||
.unwrap_or_else(|| Value::Error {
|
.unwrap_or_else(|| Value::Error {
|
||||||
error: ShellError::GenericError(
|
error: ShellError::GenericError(
|
||||||
@ -89,7 +93,7 @@ impl Command for ErrorMake {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn make_error(value: &Value) -> Option<ShellError> {
|
fn make_error(value: &Value, throw_span: Span) -> Option<ShellError> {
|
||||||
if let Value::Record { .. } = &value {
|
if let Value::Record { .. } = &value {
|
||||||
let msg = value.get_data_by_key("msg");
|
let msg = value.get_data_by_key("msg");
|
||||||
let label = value.get_data_by_key("label");
|
let label = value.get_data_by_key("label");
|
||||||
@ -117,13 +121,26 @@ fn make_error(value: &Value) -> Option<ShellError> {
|
|||||||
None,
|
None,
|
||||||
Vec::new(),
|
Vec::new(),
|
||||||
)),
|
)),
|
||||||
|
(
|
||||||
|
None,
|
||||||
|
None,
|
||||||
|
Some(Value::String {
|
||||||
|
val: label_text, ..
|
||||||
|
}),
|
||||||
|
) => Some(ShellError::GenericError(
|
||||||
|
message,
|
||||||
|
label_text,
|
||||||
|
Some(throw_span),
|
||||||
|
None,
|
||||||
|
Vec::new(),
|
||||||
|
)),
|
||||||
_ => None,
|
_ => None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
(Some(Value::String { val: message, .. }), None) => Some(ShellError::GenericError(
|
(Some(Value::String { val: message, .. }), None) => Some(ShellError::GenericError(
|
||||||
message,
|
message,
|
||||||
"".to_string(),
|
"originates from here".to_string(),
|
||||||
None,
|
Some(throw_span),
|
||||||
None,
|
None,
|
||||||
Vec::new(),
|
Vec::new(),
|
||||||
)),
|
)),
|
||||||
|
@ -22,8 +22,8 @@ impl Command for ExportCommand {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn extra_usage(&self) -> &str {
|
fn extra_usage(&self) -> &str {
|
||||||
r#"This command is a parser keyword. For details, check
|
r#"This command is a parser keyword. For details, check:
|
||||||
https://www.nushell.sh/book/thinking_in_nushell.html#parsing-and-evaluation-are-different-stages"#
|
https://www.nushell.sh/book/thinking_in_nushell.html"#
|
||||||
}
|
}
|
||||||
|
|
||||||
fn is_parser_keyword(&self) -> bool {
|
fn is_parser_keyword(&self) -> bool {
|
||||||
|
@ -26,8 +26,8 @@ impl Command for ExportAlias {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn extra_usage(&self) -> &str {
|
fn extra_usage(&self) -> &str {
|
||||||
r#"This command is a parser keyword. For details, check
|
r#"This command is a parser keyword. For details, check:
|
||||||
https://www.nushell.sh/book/thinking_in_nushell.html#parsing-and-evaluation-are-different-stages"#
|
https://www.nushell.sh/book/thinking_in_nushell.html"#
|
||||||
}
|
}
|
||||||
|
|
||||||
fn is_parser_keyword(&self) -> bool {
|
fn is_parser_keyword(&self) -> bool {
|
||||||
|
@ -27,8 +27,8 @@ impl Command for ExportDef {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn extra_usage(&self) -> &str {
|
fn extra_usage(&self) -> &str {
|
||||||
r#"This command is a parser keyword. For details, check
|
r#"This command is a parser keyword. For details, check:
|
||||||
https://www.nushell.sh/book/thinking_in_nushell.html#parsing-and-evaluation-are-different-stages"#
|
https://www.nushell.sh/book/thinking_in_nushell.html"#
|
||||||
}
|
}
|
||||||
|
|
||||||
fn is_parser_keyword(&self) -> bool {
|
fn is_parser_keyword(&self) -> bool {
|
||||||
|
@ -27,8 +27,8 @@ impl Command for ExportDefEnv {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn extra_usage(&self) -> &str {
|
fn extra_usage(&self) -> &str {
|
||||||
r#"This command is a parser keyword. For details, check
|
r#"This command is a parser keyword. For details, check:
|
||||||
https://www.nushell.sh/book/thinking_in_nushell.html#parsing-and-evaluation-are-different-stages"#
|
https://www.nushell.sh/book/thinking_in_nushell.html"#
|
||||||
}
|
}
|
||||||
|
|
||||||
fn is_parser_keyword(&self) -> bool {
|
fn is_parser_keyword(&self) -> bool {
|
||||||
|
@ -30,8 +30,8 @@ impl Command for ExportEnv {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn extra_usage(&self) -> &str {
|
fn extra_usage(&self) -> &str {
|
||||||
r#"This command is a parser keyword. For details, check
|
r#"This command is a parser keyword. For details, check:
|
||||||
https://www.nushell.sh/book/thinking_in_nushell.html#parsing-and-evaluation-are-different-stages"#
|
https://www.nushell.sh/book/thinking_in_nushell.html"#
|
||||||
}
|
}
|
||||||
|
|
||||||
fn is_parser_keyword(&self) -> bool {
|
fn is_parser_keyword(&self) -> bool {
|
||||||
|
@ -22,8 +22,8 @@ impl Command for ExportExtern {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn extra_usage(&self) -> &str {
|
fn extra_usage(&self) -> &str {
|
||||||
r#"This command is a parser keyword. For details, check
|
r#"This command is a parser keyword. For details, check:
|
||||||
https://www.nushell.sh/book/thinking_in_nushell.html#parsing-and-evaluation-are-different-stages"#
|
https://www.nushell.sh/book/thinking_in_nushell.html"#
|
||||||
}
|
}
|
||||||
|
|
||||||
fn is_parser_keyword(&self) -> bool {
|
fn is_parser_keyword(&self) -> bool {
|
||||||
|
@ -22,8 +22,8 @@ impl Command for Extern {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn extra_usage(&self) -> &str {
|
fn extra_usage(&self) -> &str {
|
||||||
r#"This command is a parser keyword. For details, check
|
r#"This command is a parser keyword. For details, check:
|
||||||
https://www.nushell.sh/book/thinking_in_nushell.html#parsing-and-evaluation-are-different-stages"#
|
https://www.nushell.sh/book/thinking_in_nushell.html"#
|
||||||
}
|
}
|
||||||
|
|
||||||
fn is_parser_keyword(&self) -> bool {
|
fn is_parser_keyword(&self) -> bool {
|
||||||
|
@ -45,8 +45,8 @@ impl Command for For {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn extra_usage(&self) -> &str {
|
fn extra_usage(&self) -> &str {
|
||||||
r#"This command is a parser keyword. For details, check
|
r#"This command is a parser keyword. For details, check:
|
||||||
https://www.nushell.sh/book/thinking_in_nushell.html#parsing-and-evaluation-are-different-stages"#
|
https://www.nushell.sh/book/thinking_in_nushell.html"#
|
||||||
}
|
}
|
||||||
|
|
||||||
fn is_parser_keyword(&self) -> bool {
|
fn is_parser_keyword(&self) -> bool {
|
||||||
|
@ -25,9 +25,8 @@ impl Command for Hide {
|
|||||||
fn extra_usage(&self) -> &str {
|
fn extra_usage(&self) -> &str {
|
||||||
r#"Symbols are hidden by priority: First aliases, then custom commands, then environment variables.
|
r#"Symbols are hidden by priority: First aliases, then custom commands, then environment variables.
|
||||||
|
|
||||||
This command is a parser keyword. For details, check
|
This command is a parser keyword. For details, check:
|
||||||
https://www.nushell.sh/book/thinking_in_nushell.html#parsing-and-evaluation-are-different-stages
|
https://www.nushell.sh/book/thinking_in_nushell.html"#
|
||||||
"#
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn is_parser_keyword(&self) -> bool {
|
fn is_parser_keyword(&self) -> bool {
|
||||||
@ -63,23 +62,23 @@ https://www.nushell.sh/book/thinking_in_nushell.html#parsing-and-evaluation-are-
|
|||||||
return Err(ShellError::NonUtf8(import_pattern.head.span));
|
return Err(ShellError::NonUtf8(import_pattern.head.span));
|
||||||
};
|
};
|
||||||
|
|
||||||
if let Some(overlay_id) = engine_state.find_overlay(&import_pattern.head.name) {
|
if let Some(module_id) = engine_state.find_module(&import_pattern.head.name, &[]) {
|
||||||
// The first word is a module
|
// The first word is a module
|
||||||
let overlay = engine_state.get_overlay(overlay_id);
|
let module = engine_state.get_module(module_id);
|
||||||
|
|
||||||
let env_vars_to_hide = if import_pattern.members.is_empty() {
|
let env_vars_to_hide = if import_pattern.members.is_empty() {
|
||||||
overlay.env_vars_with_head(&import_pattern.head.name)
|
module.env_vars_with_head(&import_pattern.head.name)
|
||||||
} else {
|
} else {
|
||||||
match &import_pattern.members[0] {
|
match &import_pattern.members[0] {
|
||||||
ImportPatternMember::Glob { .. } => overlay.env_vars(),
|
ImportPatternMember::Glob { .. } => module.env_vars(),
|
||||||
ImportPatternMember::Name { name, span } => {
|
ImportPatternMember::Name { name, span } => {
|
||||||
let mut output = vec![];
|
let mut output = vec![];
|
||||||
|
|
||||||
if let Some((name, id)) =
|
if let Some((name, id)) =
|
||||||
overlay.env_var_with_head(name, &import_pattern.head.name)
|
module.env_var_with_head(name, &import_pattern.head.name)
|
||||||
{
|
{
|
||||||
output.push((name, id));
|
output.push((name, id));
|
||||||
} else if !(overlay.has_alias(name) || overlay.has_decl(name)) {
|
} else if !(module.has_alias(name) || module.has_decl(name)) {
|
||||||
return Err(ShellError::EnvVarNotFoundAtRuntime(
|
return Err(ShellError::EnvVarNotFoundAtRuntime(
|
||||||
String::from_utf8_lossy(name).into(),
|
String::from_utf8_lossy(name).into(),
|
||||||
*span,
|
*span,
|
||||||
@ -93,10 +92,10 @@ https://www.nushell.sh/book/thinking_in_nushell.html#parsing-and-evaluation-are-
|
|||||||
|
|
||||||
for (name, span) in names {
|
for (name, span) in names {
|
||||||
if let Some((name, id)) =
|
if let Some((name, id)) =
|
||||||
overlay.env_var_with_head(name, &import_pattern.head.name)
|
module.env_var_with_head(name, &import_pattern.head.name)
|
||||||
{
|
{
|
||||||
output.push((name, id));
|
output.push((name, id));
|
||||||
} else if !(overlay.has_alias(name) || overlay.has_decl(name)) {
|
} else if !(module.has_alias(name) || module.has_decl(name)) {
|
||||||
return Err(ShellError::EnvVarNotFoundAtRuntime(
|
return Err(ShellError::EnvVarNotFoundAtRuntime(
|
||||||
String::from_utf8_lossy(name).into(),
|
String::from_utf8_lossy(name).into(),
|
||||||
*span,
|
*span,
|
||||||
|
@ -1,14 +1,13 @@
|
|||||||
use nu_protocol::ast::Call;
|
use nu_protocol::ast::Call;
|
||||||
use nu_protocol::engine::{Command, EngineState, Stack};
|
use nu_protocol::engine::{Command, EngineState, Stack};
|
||||||
use nu_protocol::{
|
use nu_protocol::{
|
||||||
Category, Example, IntoInterruptiblePipelineData, PipelineData, ShellError, Signature, Value,
|
Category, Example, HistoryFileFormat, IntoInterruptiblePipelineData, PipelineData, ShellError,
|
||||||
|
Signature, Value,
|
||||||
|
};
|
||||||
|
use reedline::{
|
||||||
|
FileBackedHistory, History as ReedlineHistory, SearchDirection, SearchQuery,
|
||||||
|
SqliteBackedHistory,
|
||||||
};
|
};
|
||||||
|
|
||||||
const NEWLINE_ESCAPE_CODE: &str = "<\\n>";
|
|
||||||
|
|
||||||
fn decode_newlines(escaped: &str) -> String {
|
|
||||||
escaped.replace(NEWLINE_ESCAPE_CODE, "\n")
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct History;
|
pub struct History;
|
||||||
@ -36,44 +35,74 @@ impl Command for History {
|
|||||||
_input: PipelineData,
|
_input: PipelineData,
|
||||||
) -> Result<nu_protocol::PipelineData, nu_protocol::ShellError> {
|
) -> Result<nu_protocol::PipelineData, nu_protocol::ShellError> {
|
||||||
let head = call.head;
|
let head = call.head;
|
||||||
|
// todo for sqlite history this command should be an alias to `open ~/.config/nushell/history.sqlite3 | get history`
|
||||||
if let Some(config_path) = nu_path::config_dir() {
|
if let Some(config_path) = nu_path::config_dir() {
|
||||||
let clear = call.has_flag("clear");
|
let clear = call.has_flag("clear");
|
||||||
let ctrlc = engine_state.ctrlc.clone();
|
let ctrlc = engine_state.ctrlc.clone();
|
||||||
|
|
||||||
let mut history_path = config_path;
|
let mut history_path = config_path;
|
||||||
history_path.push("nushell");
|
history_path.push("nushell");
|
||||||
history_path.push("history.txt");
|
match engine_state.config.history_file_format {
|
||||||
|
HistoryFileFormat::Sqlite => {
|
||||||
|
history_path.push("history.sqlite3");
|
||||||
|
}
|
||||||
|
HistoryFileFormat::PlainText => {
|
||||||
|
history_path.push("history.txt");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
if clear {
|
if clear {
|
||||||
let _ = std::fs::remove_file(history_path);
|
let _ = std::fs::remove_file(history_path);
|
||||||
|
// TODO: FIXME also clear the auxiliary files when using sqlite
|
||||||
Ok(PipelineData::new(head))
|
Ok(PipelineData::new(head))
|
||||||
} else {
|
} else {
|
||||||
let contents = std::fs::read_to_string(history_path);
|
let history_reader: Option<Box<dyn ReedlineHistory>> =
|
||||||
|
match engine_state.config.history_file_format {
|
||||||
|
HistoryFileFormat::Sqlite => SqliteBackedHistory::with_file(history_path)
|
||||||
|
.map(|inner| {
|
||||||
|
let boxed: Box<dyn ReedlineHistory> = Box::new(inner);
|
||||||
|
boxed
|
||||||
|
})
|
||||||
|
.ok(),
|
||||||
|
|
||||||
if let Ok(contents) = contents {
|
HistoryFileFormat::PlainText => FileBackedHistory::with_file(
|
||||||
Ok(contents
|
engine_state.config.max_history_size as usize,
|
||||||
.lines()
|
history_path,
|
||||||
.enumerate()
|
)
|
||||||
.map(move |(index, command)| Value::Record {
|
.map(|inner| {
|
||||||
cols: vec!["command".to_string(), "index".to_string()],
|
let boxed: Box<dyn ReedlineHistory> = Box::new(inner);
|
||||||
vals: vec![
|
boxed
|
||||||
Value::String {
|
|
||||||
val: decode_newlines(command),
|
|
||||||
span: head,
|
|
||||||
},
|
|
||||||
Value::Int {
|
|
||||||
val: index as i64,
|
|
||||||
span: head,
|
|
||||||
},
|
|
||||||
],
|
|
||||||
span: head,
|
|
||||||
})
|
})
|
||||||
.collect::<Vec<_>>()
|
.ok(),
|
||||||
.into_iter()
|
};
|
||||||
.into_pipeline_data(ctrlc))
|
|
||||||
} else {
|
let data = history_reader
|
||||||
Err(ShellError::FileNotFound(head))
|
.and_then(|h| {
|
||||||
}
|
h.search(SearchQuery::everything(SearchDirection::Forward))
|
||||||
|
.ok()
|
||||||
|
})
|
||||||
|
.map(move |entries| {
|
||||||
|
entries
|
||||||
|
.into_iter()
|
||||||
|
.enumerate()
|
||||||
|
.map(move |(idx, entry)| Value::Record {
|
||||||
|
cols: vec!["command".to_string(), "index".to_string()],
|
||||||
|
vals: vec![
|
||||||
|
Value::String {
|
||||||
|
val: entry.command_line,
|
||||||
|
span: head,
|
||||||
|
},
|
||||||
|
Value::Int {
|
||||||
|
val: idx as i64,
|
||||||
|
span: head,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
span: head,
|
||||||
|
})
|
||||||
|
})
|
||||||
|
.ok_or(ShellError::FileNotFound(head))?
|
||||||
|
.into_pipeline_data(ctrlc);
|
||||||
|
Ok(data)
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
Err(ShellError::FileNotFound(head))
|
Err(ShellError::FileNotFound(head))
|
||||||
|
@ -34,8 +34,8 @@ impl Command for If {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn extra_usage(&self) -> &str {
|
fn extra_usage(&self) -> &str {
|
||||||
r#"This command is a parser keyword. For details, check
|
r#"This command is a parser keyword. For details, check:
|
||||||
https://www.nushell.sh/book/thinking_in_nushell.html#parsing-and-evaluation-are-different-stages"#
|
https://www.nushell.sh/book/thinking_in_nushell.html"#
|
||||||
}
|
}
|
||||||
|
|
||||||
fn is_parser_keyword(&self) -> bool {
|
fn is_parser_keyword(&self) -> bool {
|
||||||
|
@ -27,8 +27,8 @@ impl Command for Let {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn extra_usage(&self) -> &str {
|
fn extra_usage(&self) -> &str {
|
||||||
r#"This command is a parser keyword. For details, check
|
r#"This command is a parser keyword. For details, check:
|
||||||
https://www.nushell.sh/book/thinking_in_nushell.html#parsing-and-evaluation-are-different-stages"#
|
https://www.nushell.sh/book/thinking_in_nushell.html"#
|
||||||
}
|
}
|
||||||
|
|
||||||
fn is_parser_keyword(&self) -> bool {
|
fn is_parser_keyword(&self) -> bool {
|
||||||
|
@ -22,6 +22,7 @@ mod ignore;
|
|||||||
mod let_;
|
mod let_;
|
||||||
mod metadata;
|
mod metadata;
|
||||||
mod module;
|
mod module;
|
||||||
|
pub(crate) mod overlay;
|
||||||
mod source;
|
mod source;
|
||||||
mod tutor;
|
mod tutor;
|
||||||
mod use_;
|
mod use_;
|
||||||
@ -51,6 +52,7 @@ pub use ignore::Ignore;
|
|||||||
pub use let_::Let;
|
pub use let_::Let;
|
||||||
pub use metadata::Metadata;
|
pub use metadata::Metadata;
|
||||||
pub use module::Module;
|
pub use module::Module;
|
||||||
|
pub use overlay::*;
|
||||||
pub use source::Source;
|
pub use source::Source;
|
||||||
pub use tutor::Tutor;
|
pub use tutor::Tutor;
|
||||||
pub use use_::Use;
|
pub use use_::Use;
|
||||||
|
@ -26,8 +26,8 @@ impl Command for Module {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn extra_usage(&self) -> &str {
|
fn extra_usage(&self) -> &str {
|
||||||
r#"This command is a parser keyword. For details, check
|
r#"This command is a parser keyword. For details, check:
|
||||||
https://www.nushell.sh/book/thinking_in_nushell.html#parsing-and-evaluation-are-different-stages"#
|
https://www.nushell.sh/book/thinking_in_nushell.html"#
|
||||||
}
|
}
|
||||||
|
|
||||||
fn is_parser_keyword(&self) -> bool {
|
fn is_parser_keyword(&self) -> bool {
|
||||||
|
148
crates/nu-command/src/core_commands/overlay/add.rs
Normal file
@ -0,0 +1,148 @@
|
|||||||
|
use nu_engine::{eval_block, CallExt};
|
||||||
|
use nu_protocol::ast::Call;
|
||||||
|
use nu_protocol::engine::{Command, EngineState, Stack};
|
||||||
|
use nu_protocol::{Category, Example, PipelineData, ShellError, Signature, Spanned, SyntaxShape};
|
||||||
|
|
||||||
|
use std::path::Path;
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct OverlayAdd;
|
||||||
|
|
||||||
|
impl Command for OverlayAdd {
|
||||||
|
fn name(&self) -> &str {
|
||||||
|
"overlay add"
|
||||||
|
}
|
||||||
|
|
||||||
|
fn usage(&self) -> &str {
|
||||||
|
"Add definitions from a module as an overlay"
|
||||||
|
}
|
||||||
|
|
||||||
|
fn signature(&self) -> nu_protocol::Signature {
|
||||||
|
Signature::build("overlay add")
|
||||||
|
.required(
|
||||||
|
"name",
|
||||||
|
SyntaxShape::String,
|
||||||
|
"Module name to create overlay for",
|
||||||
|
)
|
||||||
|
// TODO:
|
||||||
|
// .switch(
|
||||||
|
// "prefix",
|
||||||
|
// "Prepend module name to the imported symbols",
|
||||||
|
// Some('p'),
|
||||||
|
// )
|
||||||
|
.category(Category::Core)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn extra_usage(&self) -> &str {
|
||||||
|
r#"This command is a parser keyword. For details, check:
|
||||||
|
https://www.nushell.sh/book/thinking_in_nushell.html"#
|
||||||
|
}
|
||||||
|
|
||||||
|
fn is_parser_keyword(&self) -> bool {
|
||||||
|
true
|
||||||
|
}
|
||||||
|
|
||||||
|
fn run(
|
||||||
|
&self,
|
||||||
|
engine_state: &EngineState,
|
||||||
|
stack: &mut Stack,
|
||||||
|
call: &Call,
|
||||||
|
_input: PipelineData,
|
||||||
|
) -> Result<PipelineData, ShellError> {
|
||||||
|
let name_arg: Spanned<String> = call.req(engine_state, stack, 0)?;
|
||||||
|
|
||||||
|
let maybe_overlay_name = if engine_state
|
||||||
|
.find_overlay(name_arg.item.as_bytes())
|
||||||
|
.is_some()
|
||||||
|
{
|
||||||
|
Some(name_arg.item.clone())
|
||||||
|
} else if let Some(os_str) = Path::new(&name_arg.item).file_stem() {
|
||||||
|
os_str.to_str().map(|name| name.to_string())
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
};
|
||||||
|
|
||||||
|
if let Some(overlay_name) = maybe_overlay_name {
|
||||||
|
if let Some(overlay_id) = engine_state.find_overlay(overlay_name.as_bytes()) {
|
||||||
|
let old_module_id = engine_state.get_overlay(overlay_id).origin;
|
||||||
|
|
||||||
|
stack.add_overlay(overlay_name.clone());
|
||||||
|
|
||||||
|
if let Some(new_module_id) = engine_state.find_module(overlay_name.as_bytes(), &[])
|
||||||
|
{
|
||||||
|
if !stack.has_env_overlay(&overlay_name, engine_state)
|
||||||
|
|| (old_module_id != new_module_id)
|
||||||
|
{
|
||||||
|
// Add environment variables only if:
|
||||||
|
// a) adding a new overlay
|
||||||
|
// b) refreshing an active overlay (the origin module changed)
|
||||||
|
let module = engine_state.get_module(new_module_id);
|
||||||
|
|
||||||
|
for (name, block_id) in module.env_vars() {
|
||||||
|
let name = if let Ok(s) = String::from_utf8(name.clone()) {
|
||||||
|
s
|
||||||
|
} else {
|
||||||
|
return Err(ShellError::NonUtf8(call.head));
|
||||||
|
};
|
||||||
|
|
||||||
|
let block = engine_state.get_block(block_id);
|
||||||
|
|
||||||
|
let val = eval_block(
|
||||||
|
engine_state,
|
||||||
|
stack,
|
||||||
|
block,
|
||||||
|
PipelineData::new(call.head),
|
||||||
|
false,
|
||||||
|
true,
|
||||||
|
)?
|
||||||
|
.into_value(call.head);
|
||||||
|
|
||||||
|
stack.add_env_var(name, val);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
return Err(ShellError::OverlayNotFoundAtRuntime(
|
||||||
|
name_arg.item,
|
||||||
|
name_arg.span,
|
||||||
|
));
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
return Err(ShellError::OverlayNotFoundAtRuntime(
|
||||||
|
name_arg.item,
|
||||||
|
name_arg.span,
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(PipelineData::new(call.head))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn examples(&self) -> Vec<Example> {
|
||||||
|
vec![
|
||||||
|
Example {
|
||||||
|
description: "Create an overlay from a module",
|
||||||
|
example: r#"module spam { export def foo [] { "foo" } }
|
||||||
|
overlay add spam"#,
|
||||||
|
result: None,
|
||||||
|
},
|
||||||
|
Example {
|
||||||
|
description: "Create an overlay from a file",
|
||||||
|
example: r#"echo 'export env FOO { "foo" }' | save spam.nu
|
||||||
|
overlay add spam.nu"#,
|
||||||
|
result: None,
|
||||||
|
},
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod test {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_examples() {
|
||||||
|
use crate::test_examples;
|
||||||
|
|
||||||
|
test_examples(OverlayAdd {})
|
||||||
|
}
|
||||||
|
}
|
58
crates/nu-command/src/core_commands/overlay/command.rs
Normal file
@ -0,0 +1,58 @@
|
|||||||
|
use nu_engine::get_full_help;
|
||||||
|
use nu_protocol::{
|
||||||
|
ast::Call,
|
||||||
|
engine::{Command, EngineState, Stack},
|
||||||
|
Category, IntoPipelineData, PipelineData, Signature, Value,
|
||||||
|
};
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct Overlay;
|
||||||
|
|
||||||
|
impl Command for Overlay {
|
||||||
|
fn name(&self) -> &str {
|
||||||
|
"overlay"
|
||||||
|
}
|
||||||
|
|
||||||
|
fn signature(&self) -> Signature {
|
||||||
|
Signature::build("overlay").category(Category::Core)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn usage(&self) -> &str {
|
||||||
|
"Commands for manipulating overlays."
|
||||||
|
}
|
||||||
|
|
||||||
|
fn extra_usage(&self) -> &str {
|
||||||
|
r#"This command is a parser keyword. For details, check:
|
||||||
|
https://www.nushell.sh/book/thinking_in_nushell.html"#
|
||||||
|
}
|
||||||
|
|
||||||
|
fn is_parser_keyword(&self) -> bool {
|
||||||
|
true
|
||||||
|
}
|
||||||
|
|
||||||
|
fn run(
|
||||||
|
&self,
|
||||||
|
engine_state: &EngineState,
|
||||||
|
stack: &mut Stack,
|
||||||
|
call: &Call,
|
||||||
|
_input: PipelineData,
|
||||||
|
) -> Result<nu_protocol::PipelineData, nu_protocol::ShellError> {
|
||||||
|
Ok(Value::String {
|
||||||
|
val: get_full_help(&Overlay.signature(), &[], engine_state, stack),
|
||||||
|
span: call.head,
|
||||||
|
}
|
||||||
|
.into_pipeline_data())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod test {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_examples() {
|
||||||
|
use crate::test_examples;
|
||||||
|
|
||||||
|
test_examples(Overlay {})
|
||||||
|
}
|
||||||
|
}
|
85
crates/nu-command/src/core_commands/overlay/list.rs
Normal file
@ -0,0 +1,85 @@
|
|||||||
|
use nu_protocol::ast::Call;
|
||||||
|
use nu_protocol::engine::{Command, EngineState, Stack};
|
||||||
|
use nu_protocol::{
|
||||||
|
Category, Example, IntoPipelineData, PipelineData, ShellError, Signature, Span, Value,
|
||||||
|
};
|
||||||
|
|
||||||
|
use log::trace;
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct OverlayList;
|
||||||
|
|
||||||
|
impl Command for OverlayList {
|
||||||
|
fn name(&self) -> &str {
|
||||||
|
"overlay list"
|
||||||
|
}
|
||||||
|
|
||||||
|
fn usage(&self) -> &str {
|
||||||
|
"List all active overlays"
|
||||||
|
}
|
||||||
|
|
||||||
|
fn signature(&self) -> nu_protocol::Signature {
|
||||||
|
Signature::build("overlay list").category(Category::Core)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn extra_usage(&self) -> &str {
|
||||||
|
"The overlays are listed in the order they were activated."
|
||||||
|
}
|
||||||
|
|
||||||
|
fn run(
|
||||||
|
&self,
|
||||||
|
engine_state: &EngineState,
|
||||||
|
stack: &mut Stack,
|
||||||
|
call: &Call,
|
||||||
|
_input: PipelineData,
|
||||||
|
) -> Result<PipelineData, ShellError> {
|
||||||
|
let active_overlays_parser: Vec<Value> = engine_state
|
||||||
|
.active_overlay_names(&[])
|
||||||
|
.iter()
|
||||||
|
.map(|s| Value::string(String::from_utf8_lossy(s), call.head))
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
let active_overlays_engine: Vec<Value> = stack
|
||||||
|
.active_overlays
|
||||||
|
.iter()
|
||||||
|
.map(|s| Value::string(s, call.head))
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
// Check if the overlays in the engine match the overlays in the parser
|
||||||
|
if (active_overlays_parser.len() != active_overlays_engine.len())
|
||||||
|
|| active_overlays_parser
|
||||||
|
.iter()
|
||||||
|
.zip(active_overlays_engine.iter())
|
||||||
|
.any(|(op, oe)| op != oe)
|
||||||
|
{
|
||||||
|
trace!("parser overlays: {:?}", active_overlays_parser);
|
||||||
|
trace!("engine overlays: {:?}", active_overlays_engine);
|
||||||
|
|
||||||
|
return Err(ShellError::NushellFailedSpannedHelp(
|
||||||
|
"Overlay mismatch".into(),
|
||||||
|
"Active overlays do not match between the engine and the parser.".into(),
|
||||||
|
call.head,
|
||||||
|
"Run Nushell with --log-level=trace to see what went wrong.".into(),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(Value::List {
|
||||||
|
vals: active_overlays_engine,
|
||||||
|
span: call.head,
|
||||||
|
}
|
||||||
|
.into_pipeline_data())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn examples(&self) -> Vec<Example> {
|
||||||
|
vec![Example {
|
||||||
|
description: "Get the last activated overlay",
|
||||||
|
example: r#"module spam { export def foo [] { "foo" } }
|
||||||
|
overlay add spam
|
||||||
|
overlay list | last"#,
|
||||||
|
result: Some(Value::String {
|
||||||
|
val: "spam".to_string(),
|
||||||
|
span: Span::test_data(),
|
||||||
|
}),
|
||||||
|
}]
|
||||||
|
}
|
||||||
|
}
|
11
crates/nu-command/src/core_commands/overlay/mod.rs
Normal file
@ -0,0 +1,11 @@
|
|||||||
|
mod add;
|
||||||
|
mod command;
|
||||||
|
mod list;
|
||||||
|
mod new;
|
||||||
|
mod remove;
|
||||||
|
|
||||||
|
pub use add::OverlayAdd;
|
||||||
|
pub use command::Overlay;
|
||||||
|
pub use list::OverlayList;
|
||||||
|
pub use new::OverlayNew;
|
||||||
|
pub use remove::OverlayRemove;
|
74
crates/nu-command/src/core_commands/overlay/new.rs
Normal file
@ -0,0 +1,74 @@
|
|||||||
|
use nu_engine::CallExt;
|
||||||
|
use nu_protocol::ast::Call;
|
||||||
|
use nu_protocol::engine::{Command, EngineState, Stack};
|
||||||
|
use nu_protocol::{Category, Example, PipelineData, ShellError, Signature, Spanned, SyntaxShape};
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct OverlayNew;
|
||||||
|
|
||||||
|
impl Command for OverlayNew {
|
||||||
|
fn name(&self) -> &str {
|
||||||
|
"overlay new"
|
||||||
|
}
|
||||||
|
|
||||||
|
fn usage(&self) -> &str {
|
||||||
|
"Create an empty overlay"
|
||||||
|
}
|
||||||
|
|
||||||
|
fn signature(&self) -> nu_protocol::Signature {
|
||||||
|
Signature::build("overlay new")
|
||||||
|
.required("name", SyntaxShape::String, "Name of the overlay")
|
||||||
|
// TODO:
|
||||||
|
// .switch(
|
||||||
|
// "prefix",
|
||||||
|
// "Prepend module name to the imported symbols",
|
||||||
|
// Some('p'),
|
||||||
|
// )
|
||||||
|
.category(Category::Core)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn extra_usage(&self) -> &str {
|
||||||
|
r#"The command will first create an empty module, then add it as an overlay.
|
||||||
|
|
||||||
|
This command is a parser keyword. For details, check:
|
||||||
|
https://www.nushell.sh/book/thinking_in_nushell.html"#
|
||||||
|
}
|
||||||
|
|
||||||
|
fn is_parser_keyword(&self) -> bool {
|
||||||
|
true
|
||||||
|
}
|
||||||
|
|
||||||
|
fn run(
|
||||||
|
&self,
|
||||||
|
engine_state: &EngineState,
|
||||||
|
stack: &mut Stack,
|
||||||
|
call: &Call,
|
||||||
|
_input: PipelineData,
|
||||||
|
) -> Result<PipelineData, ShellError> {
|
||||||
|
let name_arg: Spanned<String> = call.req(engine_state, stack, 0)?;
|
||||||
|
|
||||||
|
stack.add_overlay(name_arg.item);
|
||||||
|
|
||||||
|
Ok(PipelineData::new(call.head))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn examples(&self) -> Vec<Example> {
|
||||||
|
vec![Example {
|
||||||
|
description: "Create an empty overlay",
|
||||||
|
example: r#"overlay new spam"#,
|
||||||
|
result: None,
|
||||||
|
}]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod test {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_examples() {
|
||||||
|
use crate::test_examples;
|
||||||
|
|
||||||
|
test_examples(OverlayNew {})
|
||||||
|
}
|
||||||
|
}
|
117
crates/nu-command/src/core_commands/overlay/remove.rs
Normal file
@ -0,0 +1,117 @@
|
|||||||
|
use nu_engine::CallExt;
|
||||||
|
use nu_protocol::ast::Call;
|
||||||
|
use nu_protocol::engine::{Command, EngineState, Stack};
|
||||||
|
use nu_protocol::{
|
||||||
|
Category, Example, PipelineData, ShellError, Signature, Spanned, SyntaxShape, Value,
|
||||||
|
};
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct OverlayRemove;
|
||||||
|
|
||||||
|
impl Command for OverlayRemove {
|
||||||
|
fn name(&self) -> &str {
|
||||||
|
"overlay remove"
|
||||||
|
}
|
||||||
|
|
||||||
|
fn usage(&self) -> &str {
|
||||||
|
"Remove an active overlay"
|
||||||
|
}
|
||||||
|
|
||||||
|
fn signature(&self) -> nu_protocol::Signature {
|
||||||
|
Signature::build("overlay remove")
|
||||||
|
.optional("name", SyntaxShape::String, "Overlay to remove")
|
||||||
|
.switch(
|
||||||
|
"keep-custom",
|
||||||
|
"Keep newly added symbols within the next activated overlay",
|
||||||
|
Some('k'),
|
||||||
|
)
|
||||||
|
.category(Category::Core)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn extra_usage(&self) -> &str {
|
||||||
|
r#"This command is a parser keyword. For details, check:
|
||||||
|
https://www.nushell.sh/book/thinking_in_nushell.html"#
|
||||||
|
}
|
||||||
|
|
||||||
|
fn is_parser_keyword(&self) -> bool {
|
||||||
|
true
|
||||||
|
}
|
||||||
|
|
||||||
|
fn run(
|
||||||
|
&self,
|
||||||
|
engine_state: &EngineState,
|
||||||
|
stack: &mut Stack,
|
||||||
|
call: &Call,
|
||||||
|
_input: PipelineData,
|
||||||
|
) -> Result<nu_protocol::PipelineData, nu_protocol::ShellError> {
|
||||||
|
let overlay_name: Spanned<String> = if let Some(name) = call.opt(engine_state, stack, 0)? {
|
||||||
|
name
|
||||||
|
} else {
|
||||||
|
Spanned {
|
||||||
|
item: stack.last_overlay_name()?,
|
||||||
|
span: call.head,
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
if !stack.is_overlay_active(&overlay_name.item) {
|
||||||
|
return Err(ShellError::OverlayNotFoundAtRuntime(
|
||||||
|
overlay_name.item,
|
||||||
|
overlay_name.span,
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
if call.has_flag("keep-custom") {
|
||||||
|
if let Some(overlay_id) = engine_state.find_overlay(overlay_name.item.as_bytes()) {
|
||||||
|
let overlay_frame = engine_state.get_overlay(overlay_id);
|
||||||
|
let origin_module = engine_state.get_module(overlay_frame.origin);
|
||||||
|
|
||||||
|
let env_vars_to_keep: Vec<(String, Value)> = stack
|
||||||
|
.get_overlay_env_vars(engine_state, &overlay_name.item)
|
||||||
|
.into_iter()
|
||||||
|
.filter(|(name, _)| !origin_module.has_env_var(name.as_bytes()))
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
stack.remove_overlay(&overlay_name.item);
|
||||||
|
|
||||||
|
for (name, val) in env_vars_to_keep {
|
||||||
|
stack.add_env_var(name, val);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
return Err(ShellError::OverlayNotFoundAtRuntime(
|
||||||
|
overlay_name.item,
|
||||||
|
overlay_name.span,
|
||||||
|
));
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
stack.remove_overlay(&overlay_name.item);
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(PipelineData::new(call.head))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn examples(&self) -> Vec<Example> {
|
||||||
|
vec![
|
||||||
|
Example {
|
||||||
|
description: "Remove an overlay created from a module",
|
||||||
|
example: r#"module spam { export def foo [] { "foo" } }
|
||||||
|
overlay add spam
|
||||||
|
overlay remove spam"#,
|
||||||
|
result: None,
|
||||||
|
},
|
||||||
|
Example {
|
||||||
|
description: "Remove an overlay created from a file",
|
||||||
|
example: r#"echo 'export alias f = "foo"' | save spam.nu
|
||||||
|
overlay add spam.nu
|
||||||
|
overlay remove spam"#,
|
||||||
|
result: None,
|
||||||
|
},
|
||||||
|
Example {
|
||||||
|
description: "Remove the last activated overlay",
|
||||||
|
example: r#"module spam { export env FOO { "foo" } }
|
||||||
|
overlay add spam
|
||||||
|
overlay remove"#,
|
||||||
|
result: None,
|
||||||
|
},
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
@ -42,8 +42,8 @@ impl Command for Register {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn extra_usage(&self) -> &str {
|
fn extra_usage(&self) -> &str {
|
||||||
r#"This command is a parser keyword. For details, check
|
r#"This command is a parser keyword. For details, check:
|
||||||
https://www.nushell.sh/book/thinking_in_nushell.html#parsing-and-evaluation-are-different-stages"#
|
https://www.nushell.sh/book/thinking_in_nushell.html"#
|
||||||
}
|
}
|
||||||
|
|
||||||
fn is_parser_keyword(&self) -> bool {
|
fn is_parser_keyword(&self) -> bool {
|
||||||
|
@ -27,8 +27,8 @@ impl Command for Source {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn extra_usage(&self) -> &str {
|
fn extra_usage(&self) -> &str {
|
||||||
r#"This command is a parser keyword. For details, check
|
r#"This command is a parser keyword. For details, check:
|
||||||
https://www.nushell.sh/book/thinking_in_nushell.html#parsing-and-evaluation-are-different-stages"#
|
https://www.nushell.sh/book/thinking_in_nushell.html"#
|
||||||
}
|
}
|
||||||
|
|
||||||
fn is_parser_keyword(&self) -> bool {
|
fn is_parser_keyword(&self) -> bool {
|
||||||
|
@ -74,6 +74,7 @@ fn tutor(
|
|||||||
|
|
||||||
let search: Option<String> = call.opt(engine_state, stack, 0).unwrap_or(None);
|
let search: Option<String> = call.opt(engine_state, stack, 0).unwrap_or(None);
|
||||||
let find: Option<String> = call.get_flag(engine_state, stack, "find")?;
|
let find: Option<String> = call.get_flag(engine_state, stack, "find")?;
|
||||||
|
let notes = "You can learn about a topic using `tutor` followed by the name of the topic.\nFor example: `tutor table` to open the table topic.\n\n";
|
||||||
|
|
||||||
let search_space = [
|
let search_space = [
|
||||||
(vec!["begin"], begin_tutor()),
|
(vec!["begin"], begin_tutor()),
|
||||||
@ -100,7 +101,6 @@ fn tutor(
|
|||||||
vec!["var", "vars", "variable", "variables"],
|
vec!["var", "vars", "variable", "variables"],
|
||||||
variable_tutor(),
|
variable_tutor(),
|
||||||
),
|
),
|
||||||
(vec!["engine-q", "e-q"], engineq_tutor()),
|
|
||||||
(vec!["block", "blocks"], block_tutor()),
|
(vec!["block", "blocks"], block_tutor()),
|
||||||
(vec!["shorthand", "shorthands"], shorthand_tutor()),
|
(vec!["shorthand", "shorthands"], shorthand_tutor()),
|
||||||
];
|
];
|
||||||
@ -113,13 +113,22 @@ fn tutor(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let message = format!("You can find '{}' in the following topics:\n{}\n\nYou can learn about a topic using `tutor` followed by the name of the topic.\nFor example: `tutor table` to open the table topic.\n\n",
|
let message = format!(
|
||||||
find,
|
"You can find '{find}' in the following topics:\n\n{}\n\n{notes}",
|
||||||
results.into_iter().map(|x| format!("- {}", x)).join("\n")
|
results.into_iter().map(|x| format!("- {}", x)).join("\n")
|
||||||
);
|
);
|
||||||
|
|
||||||
return Ok(display(&message, engine_state, stack, span));
|
return Ok(display(&message, engine_state, stack, span));
|
||||||
} else if let Some(search) = search {
|
} else if let Some(search) = search {
|
||||||
|
if search == "list" {
|
||||||
|
let results = search_space.map(|s| s.0[0].to_string());
|
||||||
|
let message = format!(
|
||||||
|
"This tutorial contains the following topics:\n\n{}\n\n{notes}",
|
||||||
|
results.map(|x| format!("- {}", x)).join("\n")
|
||||||
|
);
|
||||||
|
return Ok(display(&message, engine_state, stack, span));
|
||||||
|
}
|
||||||
|
|
||||||
for search_group in search_space {
|
for search_group in search_space {
|
||||||
if search_group.0.contains(&search.as_str()) {
|
if search_group.0.contains(&search.as_str()) {
|
||||||
return Ok(display(search_group.1, engine_state, stack, span));
|
return Ok(display(search_group.1, engine_state, stack, span));
|
||||||
@ -136,7 +145,8 @@ Welcome to the Nushell tutorial!
|
|||||||
With the `tutor` command, you'll be able to learn a lot about how Nushell
|
With the `tutor` command, you'll be able to learn a lot about how Nushell
|
||||||
works along with many fun tips and tricks to speed up everyday tasks.
|
works along with many fun tips and tricks to speed up everyday tasks.
|
||||||
|
|
||||||
To get started, you can use `tutor begin`.
|
To get started, you can use `tutor begin`, and to see all the available
|
||||||
|
tutorials just run `tutor list`.
|
||||||
|
|
||||||
"#
|
"#
|
||||||
}
|
}
|
||||||
@ -390,29 +400,6 @@ same value using:
|
|||||||
"#
|
"#
|
||||||
}
|
}
|
||||||
|
|
||||||
fn engineq_tutor() -> &'static str {
|
|
||||||
r#"
|
|
||||||
Engine-q is the upcoming engine for Nushell. Build for speed and correctness,
|
|
||||||
it also comes with a set of changes from Nushell versions prior to 0.60. To
|
|
||||||
get ready for engine-q look for some of these changes that might impact your
|
|
||||||
current scripts:
|
|
||||||
|
|
||||||
* Engine-q now uses a few new data structures, including a record syntax
|
|
||||||
that allows you to model key-value pairs similar to JSON objects.
|
|
||||||
* Environment variables can now contain more than just strings. Structured
|
|
||||||
values are converted to strings for external commands using converters.
|
|
||||||
* `if` will now use an `else` keyword before the else block.
|
|
||||||
* We're moving from "config.toml" to "config.nu". This means startup will
|
|
||||||
now be a script file.
|
|
||||||
* `config` and its subcommands are being replaced by a record that you can
|
|
||||||
update in the shell which contains all the settings under the variable
|
|
||||||
`$config`.
|
|
||||||
* bigint/bigdecimal values are now machine i64 and f64 values
|
|
||||||
* And more, you can read more about upcoming changes in the up-to-date list
|
|
||||||
at: https://github.com/nushell/engine-q/issues/522
|
|
||||||
"#
|
|
||||||
}
|
|
||||||
|
|
||||||
fn display(help: &str, engine_state: &EngineState, stack: &mut Stack, span: Span) -> PipelineData {
|
fn display(help: &str, engine_state: &EngineState, stack: &mut Stack, span: Span) -> PipelineData {
|
||||||
let help = help.split('`');
|
let help = help.split('`');
|
||||||
|
|
||||||
@ -424,7 +411,7 @@ fn display(help: &str, engine_state: &EngineState, stack: &mut Stack, span: Span
|
|||||||
code_mode = false;
|
code_mode = false;
|
||||||
|
|
||||||
//TODO: support no-color mode
|
//TODO: support no-color mode
|
||||||
if let Some(highlighter) = engine_state.find_decl(b"nu-highlight") {
|
if let Some(highlighter) = engine_state.find_decl(b"nu-highlight", &[]) {
|
||||||
let decl = engine_state.get_decl(highlighter);
|
let decl = engine_state.get_decl(highlighter);
|
||||||
|
|
||||||
if let Ok(output) = decl.run(
|
if let Ok(output) = decl.run(
|
||||||
|
@ -24,8 +24,8 @@ impl Command for Use {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn extra_usage(&self) -> &str {
|
fn extra_usage(&self) -> &str {
|
||||||
r#"This command is a parser keyword. For details, check
|
r#"This command is a parser keyword. For details, check:
|
||||||
https://www.nushell.sh/book/thinking_in_nushell.html#parsing-and-evaluation-are-different-stages"#
|
https://www.nushell.sh/book/thinking_in_nushell.html"#
|
||||||
}
|
}
|
||||||
|
|
||||||
fn is_parser_keyword(&self) -> bool {
|
fn is_parser_keyword(&self) -> bool {
|
||||||
@ -55,20 +55,20 @@ https://www.nushell.sh/book/thinking_in_nushell.html#parsing-and-evaluation-are-
|
|||||||
));
|
));
|
||||||
};
|
};
|
||||||
|
|
||||||
if let Some(overlay_id) = import_pattern.head.id {
|
if let Some(module_id) = import_pattern.head.id {
|
||||||
let overlay = engine_state.get_overlay(overlay_id);
|
let module = engine_state.get_module(module_id);
|
||||||
|
|
||||||
let env_vars_to_use = if import_pattern.members.is_empty() {
|
let env_vars_to_use = if import_pattern.members.is_empty() {
|
||||||
overlay.env_vars_with_head(&import_pattern.head.name)
|
module.env_vars_with_head(&import_pattern.head.name)
|
||||||
} else {
|
} else {
|
||||||
match &import_pattern.members[0] {
|
match &import_pattern.members[0] {
|
||||||
ImportPatternMember::Glob { .. } => overlay.env_vars(),
|
ImportPatternMember::Glob { .. } => module.env_vars(),
|
||||||
ImportPatternMember::Name { name, span } => {
|
ImportPatternMember::Name { name, span } => {
|
||||||
let mut output = vec![];
|
let mut output = vec![];
|
||||||
|
|
||||||
if let Some(id) = overlay.get_env_var_id(name) {
|
if let Some(id) = module.get_env_var_id(name) {
|
||||||
output.push((name.clone(), id));
|
output.push((name.clone(), id));
|
||||||
} else if !overlay.has_decl(name) && !overlay.has_alias(name) {
|
} else if !module.has_decl(name) && !module.has_alias(name) {
|
||||||
return Err(ShellError::EnvVarNotFoundAtRuntime(
|
return Err(ShellError::EnvVarNotFoundAtRuntime(
|
||||||
String::from_utf8_lossy(name).into(),
|
String::from_utf8_lossy(name).into(),
|
||||||
*span,
|
*span,
|
||||||
@ -81,9 +81,9 @@ https://www.nushell.sh/book/thinking_in_nushell.html#parsing-and-evaluation-are-
|
|||||||
let mut output = vec![];
|
let mut output = vec![];
|
||||||
|
|
||||||
for (name, span) in names {
|
for (name, span) in names {
|
||||||
if let Some(id) = overlay.get_env_var_id(name) {
|
if let Some(id) = module.get_env_var_id(name) {
|
||||||
output.push((name.clone(), id));
|
output.push((name.clone(), id));
|
||||||
} else if !overlay.has_decl(name) && !overlay.has_alias(name) {
|
} else if !module.has_decl(name) && !module.has_alias(name) {
|
||||||
return Err(ShellError::EnvVarNotFoundAtRuntime(
|
return Err(ShellError::EnvVarNotFoundAtRuntime(
|
||||||
String::from_utf8_lossy(name).into(),
|
String::from_utf8_lossy(name).into(),
|
||||||
*span,
|
*span,
|
||||||
@ -105,8 +105,6 @@ https://www.nushell.sh/book/thinking_in_nushell.html#parsing-and-evaluation-are-
|
|||||||
|
|
||||||
let block = engine_state.get_block(block_id);
|
let block = engine_state.get_block(block_id);
|
||||||
|
|
||||||
// TODO: Add string conversions (e.g. int to string)
|
|
||||||
// TODO: Later expand env to take all Values
|
|
||||||
let val = eval_block(
|
let val = eval_block(
|
||||||
engine_state,
|
engine_state,
|
||||||
stack,
|
stack,
|
||||||
|
@ -63,21 +63,7 @@ pub fn version(
|
|||||||
span: call.head,
|
span: call.head,
|
||||||
});
|
});
|
||||||
|
|
||||||
cols.push("tag".to_string());
|
let commit_hash: Option<&str> = option_env!("NU_COMMIT_HASH");
|
||||||
vals.push(Value::String {
|
|
||||||
val: shadow_rs::tag(),
|
|
||||||
span: call.head,
|
|
||||||
});
|
|
||||||
|
|
||||||
let short_commit: Option<&str> = Some(shadow::SHORT_COMMIT).filter(|x| !x.is_empty());
|
|
||||||
if let Some(short_commit) = short_commit {
|
|
||||||
cols.push("short_commit".to_string());
|
|
||||||
vals.push(Value::String {
|
|
||||||
val: short_commit.to_string(),
|
|
||||||
span: call.head,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
let commit_hash: Option<&str> = Some(shadow::COMMIT_HASH).filter(|x| !x.is_empty());
|
|
||||||
if let Some(commit_hash) = commit_hash {
|
if let Some(commit_hash) = commit_hash {
|
||||||
cols.push("commit_hash".to_string());
|
cols.push("commit_hash".to_string());
|
||||||
vals.push(Value::String {
|
vals.push(Value::String {
|
||||||
@ -85,14 +71,6 @@ pub fn version(
|
|||||||
span: call.head,
|
span: call.head,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
let commit_date: Option<&str> = Some(shadow::COMMIT_DATE).filter(|x| !x.is_empty());
|
|
||||||
if let Some(commit_date) = commit_date {
|
|
||||||
cols.push("commit_date".to_string());
|
|
||||||
vals.push(Value::String {
|
|
||||||
val: commit_date.to_string(),
|
|
||||||
span: call.head,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
let build_os: Option<&str> = Some(shadow::BUILD_OS).filter(|x| !x.is_empty());
|
let build_os: Option<&str> = Some(shadow::BUILD_OS).filter(|x| !x.is_empty());
|
||||||
if let Some(build_os) = build_os {
|
if let Some(build_os) = build_os {
|
||||||
@ -105,7 +83,7 @@ pub fn version(
|
|||||||
|
|
||||||
let build_target: Option<&str> = Some(shadow::BUILD_TARGET).filter(|x| !x.is_empty());
|
let build_target: Option<&str> = Some(shadow::BUILD_TARGET).filter(|x| !x.is_empty());
|
||||||
if let Some(build_target) = build_target {
|
if let Some(build_target) = build_target {
|
||||||
cols.push("build_os".to_string());
|
cols.push("build_target".to_string());
|
||||||
vals.push(Value::String {
|
vals.push(Value::String {
|
||||||
val: build_target.to_string(),
|
val: build_target.to_string(),
|
||||||
span: call.head,
|
span: call.head,
|
||||||
|
@ -8,7 +8,7 @@ use nu_protocol::{
|
|||||||
engine::{Command, EngineState, Stack},
|
engine::{Command, EngineState, Stack},
|
||||||
Category, Example, IntoPipelineData, PipelineData, ShellError, Signature, SyntaxShape,
|
Category, Example, IntoPipelineData, PipelineData, ShellError, Signature, SyntaxShape,
|
||||||
};
|
};
|
||||||
use sqlparser::ast::{Ident, SelectItem, SetExpr, TableAlias, TableFactor};
|
use sqlparser::ast::{Ident, SelectItem, SetExpr, Statement, TableAlias, TableFactor};
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct AliasExpr;
|
pub struct AliasExpr;
|
||||||
@ -29,26 +29,15 @@ impl Command for AliasExpr {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn examples(&self) -> Vec<Example> {
|
fn examples(&self) -> Vec<Example> {
|
||||||
vec![
|
vec![Example {
|
||||||
Example {
|
description: "Creates an alias for a column selection",
|
||||||
description: "Creates an alias for a column selection",
|
example: "db col name_a | db as new_a",
|
||||||
example: "db col name_a | db as new_a",
|
result: None,
|
||||||
result: None,
|
}]
|
||||||
},
|
|
||||||
Example {
|
|
||||||
description: "Creates an alias for a table",
|
|
||||||
example: r#"db open name
|
|
||||||
| db select a
|
|
||||||
| db from table_a
|
|
||||||
| db as table_a_new
|
|
||||||
| db describe"#,
|
|
||||||
result: None,
|
|
||||||
},
|
|
||||||
]
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn search_terms(&self) -> Vec<&str> {
|
fn search_terms(&self) -> Vec<&str> {
|
||||||
vec!["database", "column", "expression"]
|
vec!["database", "alias", "column"]
|
||||||
}
|
}
|
||||||
|
|
||||||
fn run(
|
fn run(
|
||||||
@ -110,44 +99,56 @@ fn alias_db(
|
|||||||
new_alias: String,
|
new_alias: String,
|
||||||
call: &Call,
|
call: &Call,
|
||||||
) -> Result<PipelineData, ShellError> {
|
) -> Result<PipelineData, ShellError> {
|
||||||
match db.query {
|
match db.statement.as_mut() {
|
||||||
None => Err(ShellError::GenericError(
|
None => Err(ShellError::GenericError(
|
||||||
"Error creating alias".into(),
|
"Error creating alias".into(),
|
||||||
"there is no query defined yet".into(),
|
"there is no statement defined yet".into(),
|
||||||
Some(call.head),
|
Some(call.head),
|
||||||
None,
|
None,
|
||||||
Vec::new(),
|
Vec::new(),
|
||||||
)),
|
)),
|
||||||
Some(ref mut query) => match &mut query.body {
|
Some(statement) => match statement {
|
||||||
SetExpr::Select(ref mut select) => {
|
Statement::Query(query) => match &mut query.body {
|
||||||
select.as_mut().from.iter_mut().for_each(|table| {
|
SetExpr::Select(select) => {
|
||||||
let new_alias = Some(TableAlias {
|
select.as_mut().from.iter_mut().for_each(|table| {
|
||||||
name: Ident {
|
let new_alias = Some(TableAlias {
|
||||||
value: new_alias.clone(),
|
name: Ident {
|
||||||
quote_style: None,
|
value: new_alias.clone(),
|
||||||
},
|
quote_style: None,
|
||||||
columns: Vec::new(),
|
},
|
||||||
|
columns: Vec::new(),
|
||||||
|
});
|
||||||
|
|
||||||
|
if let TableFactor::Table { ref mut alias, .. } = table.relation {
|
||||||
|
*alias = new_alias;
|
||||||
|
} else if let TableFactor::Derived { ref mut alias, .. } = table.relation {
|
||||||
|
*alias = new_alias;
|
||||||
|
} else if let TableFactor::TableFunction { ref mut alias, .. } =
|
||||||
|
table.relation
|
||||||
|
{
|
||||||
|
*alias = new_alias;
|
||||||
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
if let TableFactor::Table { ref mut alias, .. } = table.relation {
|
Ok(db.into_value(call.head).into_pipeline_data())
|
||||||
*alias = new_alias;
|
}
|
||||||
} else if let TableFactor::Derived { ref mut alias, .. } = table.relation {
|
_ => Err(ShellError::GenericError(
|
||||||
*alias = new_alias;
|
"Error creating alias".into(),
|
||||||
} else if let TableFactor::TableFunction { ref mut alias, .. } = table.relation
|
"Query has no select from defined".into(),
|
||||||
{
|
Some(call.head),
|
||||||
*alias = new_alias;
|
None,
|
||||||
}
|
Vec::new(),
|
||||||
});
|
)),
|
||||||
|
},
|
||||||
Ok(db.into_value(call.head).into_pipeline_data())
|
s => {
|
||||||
|
return Err(ShellError::GenericError(
|
||||||
|
"Connection doesnt define a query".into(),
|
||||||
|
format!("Expected a connection with query. Got {}", s),
|
||||||
|
Some(call.head),
|
||||||
|
None,
|
||||||
|
Vec::new(),
|
||||||
|
))
|
||||||
}
|
}
|
||||||
_ => Err(ShellError::GenericError(
|
|
||||||
"Error creating alias".into(),
|
|
||||||
"Query has no select from defined".into(),
|
|
||||||
Some(call.head),
|
|
||||||
None,
|
|
||||||
Vec::new(),
|
|
||||||
)),
|
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
@ -8,7 +8,7 @@ use nu_protocol::{
|
|||||||
Category, Example, IntoPipelineData, PipelineData, ShellError, Signature, Span, SyntaxShape,
|
Category, Example, IntoPipelineData, PipelineData, ShellError, Signature, Span, SyntaxShape,
|
||||||
Value,
|
Value,
|
||||||
};
|
};
|
||||||
use sqlparser::ast::{BinaryOperator, Expr, Query, Select, SetExpr};
|
use sqlparser::ast::{BinaryOperator, Expr, Query, Select, SetExpr, Statement};
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct AndDb;
|
pub struct AndDb;
|
||||||
@ -78,12 +78,23 @@ impl Command for AndDb {
|
|||||||
|
|
||||||
Ok(expression.into_value(call.head).into_pipeline_data())
|
Ok(expression.into_value(call.head).into_pipeline_data())
|
||||||
} else if let Ok(mut db) = SQLiteDatabase::try_from_value(value.clone()) {
|
} else if let Ok(mut db) = SQLiteDatabase::try_from_value(value.clone()) {
|
||||||
db.query = match db.query {
|
match db.statement.as_mut() {
|
||||||
Some(query) => Some(modify_query(query, expr, call.head)?),
|
Some(statement) => match statement {
|
||||||
|
Statement::Query(query) => modify_query(query, expr, call.head)?,
|
||||||
|
s => {
|
||||||
|
return Err(ShellError::GenericError(
|
||||||
|
"Connection doesnt define a query".into(),
|
||||||
|
format!("Expected a connection with query. Got {}", s),
|
||||||
|
Some(call.head),
|
||||||
|
None,
|
||||||
|
Vec::new(),
|
||||||
|
))
|
||||||
|
}
|
||||||
|
},
|
||||||
None => {
|
None => {
|
||||||
return Err(ShellError::GenericError(
|
return Err(ShellError::GenericError(
|
||||||
"Connection without query".into(),
|
"Connection without statement".into(),
|
||||||
"Missing query in the connection".into(),
|
"The connection needs a statement defined".into(),
|
||||||
Some(call.head),
|
Some(call.head),
|
||||||
None,
|
None,
|
||||||
Vec::new(),
|
Vec::new(),
|
||||||
@ -103,26 +114,24 @@ impl Command for AndDb {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn modify_query(mut query: Query, expression: Expr, span: Span) -> Result<Query, ShellError> {
|
fn modify_query(query: &mut Box<Query>, expression: Expr, span: Span) -> Result<(), ShellError> {
|
||||||
query.body = match query.body {
|
match query.body {
|
||||||
SetExpr::Select(select) => Ok(SetExpr::Select(modify_select(select, expression, span)?)),
|
SetExpr::Select(ref mut select) => modify_select(select, expression, span)?,
|
||||||
_ => Err(ShellError::GenericError(
|
_ => {
|
||||||
"Query without a select".into(),
|
return Err(ShellError::GenericError(
|
||||||
"Missing a WHERE clause before an AND clause".into(),
|
"Query without a select".into(),
|
||||||
Some(span),
|
"Missing a WHERE clause before an AND clause".into(),
|
||||||
None,
|
Some(span),
|
||||||
Vec::new(),
|
None,
|
||||||
)),
|
Vec::new(),
|
||||||
}?;
|
))
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
Ok(query)
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn modify_select(
|
fn modify_select(select: &mut Box<Select>, expression: Expr, span: Span) -> Result<(), ShellError> {
|
||||||
mut select: Box<Select>,
|
|
||||||
expression: Expr,
|
|
||||||
span: Span,
|
|
||||||
) -> Result<Box<Select>, ShellError> {
|
|
||||||
let new_expression = match &select.selection {
|
let new_expression = match &select.selection {
|
||||||
Some(expr) => Ok(Expr::BinaryOp {
|
Some(expr) => Ok(Expr::BinaryOp {
|
||||||
left: Box::new(expr.clone()),
|
left: Box::new(expr.clone()),
|
||||||
@ -139,5 +148,5 @@ fn modify_select(
|
|||||||
}?;
|
}?;
|
||||||
|
|
||||||
select.as_mut().selection = Some(new_expression);
|
select.as_mut().selection = Some(new_expression);
|
||||||
Ok(select)
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -27,7 +27,7 @@ impl Command for ColExpr {
|
|||||||
fn examples(&self) -> Vec<Example> {
|
fn examples(&self) -> Vec<Example> {
|
||||||
vec![Example {
|
vec![Example {
|
||||||
description: "Creates a named column expression",
|
description: "Creates a named column expression",
|
||||||
example: "col name_1",
|
example: "db col name_1",
|
||||||
result: None,
|
result: None,
|
||||||
}]
|
}]
|
||||||
}
|
}
|
66
crates/nu-command/src/database/commands/conversions.rs
Normal file
@ -0,0 +1,66 @@
|
|||||||
|
use crate::{database::values::definitions::ConnectionDb, SQLiteDatabase};
|
||||||
|
use nu_protocol::{ShellError, Value};
|
||||||
|
use sqlparser::ast::{ObjectName, Statement, TableAlias, TableFactor};
|
||||||
|
|
||||||
|
pub fn value_into_table_factor(
|
||||||
|
table: Value,
|
||||||
|
connection: &ConnectionDb,
|
||||||
|
alias: Option<TableAlias>,
|
||||||
|
) -> Result<TableFactor, ShellError> {
|
||||||
|
match table {
|
||||||
|
Value::String { val, .. } => {
|
||||||
|
let ident = sqlparser::ast::Ident {
|
||||||
|
value: val,
|
||||||
|
quote_style: None,
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok(TableFactor::Table {
|
||||||
|
name: ObjectName(vec![ident]),
|
||||||
|
alias,
|
||||||
|
args: Vec::new(),
|
||||||
|
with_hints: Vec::new(),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
Value::CustomValue { span, .. } => {
|
||||||
|
let db = SQLiteDatabase::try_from_value(table)?;
|
||||||
|
|
||||||
|
if &db.connection != connection {
|
||||||
|
return Err(ShellError::GenericError(
|
||||||
|
"Incompatible connections".into(),
|
||||||
|
"trying to join on table with different connection".into(),
|
||||||
|
Some(span),
|
||||||
|
None,
|
||||||
|
Vec::new(),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
match db.statement {
|
||||||
|
Some(statement) => match statement {
|
||||||
|
Statement::Query(query) => Ok(TableFactor::Derived {
|
||||||
|
lateral: false,
|
||||||
|
subquery: query,
|
||||||
|
alias,
|
||||||
|
}),
|
||||||
|
s => Err(ShellError::GenericError(
|
||||||
|
"Connection doesnt define a query".into(),
|
||||||
|
format!("Expected a connection with query. Got {}", s),
|
||||||
|
Some(span),
|
||||||
|
None,
|
||||||
|
Vec::new(),
|
||||||
|
)),
|
||||||
|
},
|
||||||
|
None => Err(ShellError::GenericError(
|
||||||
|
"Error creating derived table".into(),
|
||||||
|
"there is no statement defined yet".into(),
|
||||||
|
Some(span),
|
||||||
|
None,
|
||||||
|
Vec::new(),
|
||||||
|
)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ => Err(ShellError::UnsupportedInput(
|
||||||
|
"String or connection".into(),
|
||||||
|
table.span()?,
|
||||||
|
)),
|
||||||
|
}
|
||||||
|
}
|
@ -1,11 +1,13 @@
|
|||||||
use super::super::SQLiteDatabase;
|
use crate::database::values::definitions::ConnectionDb;
|
||||||
|
|
||||||
|
use super::{super::SQLiteDatabase, conversions::value_into_table_factor};
|
||||||
use nu_engine::CallExt;
|
use nu_engine::CallExt;
|
||||||
use nu_protocol::{
|
use nu_protocol::{
|
||||||
ast::Call,
|
ast::Call,
|
||||||
engine::{Command, EngineState, Stack},
|
engine::{Command, EngineState, Stack},
|
||||||
Category, Example, IntoPipelineData, PipelineData, ShellError, Signature, SyntaxShape,
|
Category, Example, IntoPipelineData, PipelineData, ShellError, Signature, SyntaxShape, Value,
|
||||||
};
|
};
|
||||||
use sqlparser::ast::{Ident, ObjectName, Query, Select, SetExpr, TableFactor, TableWithJoins};
|
use sqlparser::ast::{Ident, Query, Select, SetExpr, Statement, TableAlias, TableWithJoins};
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct FromDb;
|
pub struct FromDb;
|
||||||
@ -23,8 +25,14 @@ impl Command for FromDb {
|
|||||||
Signature::build(self.name())
|
Signature::build(self.name())
|
||||||
.required(
|
.required(
|
||||||
"select",
|
"select",
|
||||||
|
SyntaxShape::Any,
|
||||||
|
"table of derived table to select from",
|
||||||
|
)
|
||||||
|
.named(
|
||||||
|
"as",
|
||||||
SyntaxShape::String,
|
SyntaxShape::String,
|
||||||
"Name of table to select from",
|
"Alias for the selected table",
|
||||||
|
Some('a'),
|
||||||
)
|
)
|
||||||
.category(Category::Custom("database".into()))
|
.category(Category::Custom("database".into()))
|
||||||
}
|
}
|
||||||
@ -48,51 +56,94 @@ impl Command for FromDb {
|
|||||||
call: &Call,
|
call: &Call,
|
||||||
input: PipelineData,
|
input: PipelineData,
|
||||||
) -> Result<PipelineData, ShellError> {
|
) -> Result<PipelineData, ShellError> {
|
||||||
let table: String = call.req(engine_state, stack, 0)?;
|
|
||||||
|
|
||||||
let mut db = SQLiteDatabase::try_from_pipeline(input, call.head)?;
|
let mut db = SQLiteDatabase::try_from_pipeline(input, call.head)?;
|
||||||
db.query = match db.query {
|
db.statement = match db.statement {
|
||||||
None => Some(create_query(table)),
|
None => Some(create_statement(&db.connection, engine_state, stack, call)?),
|
||||||
Some(query) => Some(modify_query(query, table)),
|
Some(statement) => Some(modify_statement(
|
||||||
|
&db.connection,
|
||||||
|
statement,
|
||||||
|
engine_state,
|
||||||
|
stack,
|
||||||
|
call,
|
||||||
|
)?),
|
||||||
};
|
};
|
||||||
|
|
||||||
Ok(db.into_value(call.head).into_pipeline_data())
|
Ok(db.into_value(call.head).into_pipeline_data())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn create_query(table: String) -> Query {
|
fn create_statement(
|
||||||
Query {
|
connection: &ConnectionDb,
|
||||||
|
engine_state: &EngineState,
|
||||||
|
stack: &mut Stack,
|
||||||
|
call: &Call,
|
||||||
|
) -> Result<Statement, ShellError> {
|
||||||
|
let query = Query {
|
||||||
with: None,
|
with: None,
|
||||||
body: SetExpr::Select(Box::new(create_select(table))),
|
body: SetExpr::Select(Box::new(create_select(
|
||||||
|
connection,
|
||||||
|
engine_state,
|
||||||
|
stack,
|
||||||
|
call,
|
||||||
|
)?)),
|
||||||
order_by: Vec::new(),
|
order_by: Vec::new(),
|
||||||
limit: None,
|
limit: None,
|
||||||
offset: None,
|
offset: None,
|
||||||
fetch: None,
|
fetch: None,
|
||||||
lock: None,
|
lock: None,
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok(Statement::Query(Box::new(query)))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn modify_statement(
|
||||||
|
connection: &ConnectionDb,
|
||||||
|
mut statement: Statement,
|
||||||
|
engine_state: &EngineState,
|
||||||
|
stack: &mut Stack,
|
||||||
|
call: &Call,
|
||||||
|
) -> Result<Statement, ShellError> {
|
||||||
|
match statement {
|
||||||
|
Statement::Query(ref mut query) => {
|
||||||
|
match query.body {
|
||||||
|
SetExpr::Select(ref mut select) => {
|
||||||
|
let table = create_table(connection, engine_state, stack, call)?;
|
||||||
|
select.from.push(table);
|
||||||
|
}
|
||||||
|
_ => {
|
||||||
|
query.as_mut().body = SetExpr::Select(Box::new(create_select(
|
||||||
|
connection,
|
||||||
|
engine_state,
|
||||||
|
stack,
|
||||||
|
call,
|
||||||
|
)?));
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok(statement)
|
||||||
|
}
|
||||||
|
s => Err(ShellError::GenericError(
|
||||||
|
"Connection doesnt define a query".into(),
|
||||||
|
format!("Expected a connection with query. Got {}", s),
|
||||||
|
Some(call.head),
|
||||||
|
None,
|
||||||
|
Vec::new(),
|
||||||
|
)),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn modify_query(mut query: Query, table: String) -> Query {
|
fn create_select(
|
||||||
query.body = match query.body {
|
connection: &ConnectionDb,
|
||||||
SetExpr::Select(select) => SetExpr::Select(modify_select(select, table)),
|
engine_state: &EngineState,
|
||||||
_ => SetExpr::Select(Box::new(create_select(table))),
|
stack: &mut Stack,
|
||||||
};
|
call: &Call,
|
||||||
|
) -> Result<Select, ShellError> {
|
||||||
query
|
Ok(Select {
|
||||||
}
|
|
||||||
|
|
||||||
fn modify_select(mut select: Box<Select>, table: String) -> Box<Select> {
|
|
||||||
select.as_mut().from = create_from(table);
|
|
||||||
select
|
|
||||||
}
|
|
||||||
|
|
||||||
fn create_select(table: String) -> Select {
|
|
||||||
Select {
|
|
||||||
distinct: false,
|
distinct: false,
|
||||||
top: None,
|
top: None,
|
||||||
projection: Vec::new(),
|
projection: Vec::new(),
|
||||||
into: None,
|
into: None,
|
||||||
from: create_from(table),
|
from: vec![create_table(connection, engine_state, stack, call)?],
|
||||||
lateral_views: Vec::new(),
|
lateral_views: Vec::new(),
|
||||||
selection: None,
|
selection: None,
|
||||||
group_by: Vec::new(),
|
group_by: Vec::new(),
|
||||||
@ -100,29 +151,32 @@ fn create_select(table: String) -> Select {
|
|||||||
distribute_by: Vec::new(),
|
distribute_by: Vec::new(),
|
||||||
sort_by: Vec::new(),
|
sort_by: Vec::new(),
|
||||||
having: None,
|
having: None,
|
||||||
}
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
// This function needs more work
|
fn create_table(
|
||||||
// It needs to define multi tables and joins
|
connection: &ConnectionDb,
|
||||||
// I assume we will need to define expressions for the columns instead of strings
|
engine_state: &EngineState,
|
||||||
fn create_from(table: String) -> Vec<TableWithJoins> {
|
stack: &mut Stack,
|
||||||
let ident = Ident {
|
call: &Call,
|
||||||
value: table,
|
) -> Result<TableWithJoins, ShellError> {
|
||||||
quote_style: None,
|
let alias = call
|
||||||
};
|
.get_flag::<String>(engine_state, stack, "as")?
|
||||||
|
.map(|alias| TableAlias {
|
||||||
|
name: Ident {
|
||||||
|
value: alias,
|
||||||
|
quote_style: None,
|
||||||
|
},
|
||||||
|
columns: Vec::new(),
|
||||||
|
});
|
||||||
|
|
||||||
let table_factor = TableFactor::Table {
|
let select_table: Value = call.req(engine_state, stack, 0)?;
|
||||||
name: ObjectName(vec![ident]),
|
let table_factor = value_into_table_factor(select_table, connection, alias)?;
|
||||||
alias: None,
|
|
||||||
args: Vec::new(),
|
|
||||||
with_hints: Vec::new(),
|
|
||||||
};
|
|
||||||
|
|
||||||
let table = TableWithJoins {
|
let table = TableWithJoins {
|
||||||
relation: table_factor,
|
relation: table_factor,
|
||||||
joins: Vec::new(),
|
joins: Vec::new(),
|
||||||
};
|
};
|
||||||
|
|
||||||
vec![table]
|
Ok(table)
|
||||||
}
|
}
|
||||||
|
85
crates/nu-command/src/database/commands/function.rs
Normal file
@ -0,0 +1,85 @@
|
|||||||
|
use crate::database::values::dsl::ExprDb;
|
||||||
|
use nu_engine::CallExt;
|
||||||
|
use nu_protocol::{
|
||||||
|
ast::Call,
|
||||||
|
engine::{Command, EngineState, Stack},
|
||||||
|
Category, Example, IntoPipelineData, PipelineData, ShellError, Signature, SyntaxShape, Value,
|
||||||
|
};
|
||||||
|
use sqlparser::ast::{Expr, Function, FunctionArg, FunctionArgExpr, Ident, ObjectName};
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct FunctionExpr;
|
||||||
|
|
||||||
|
impl Command for FunctionExpr {
|
||||||
|
fn name(&self) -> &str {
|
||||||
|
"db fn"
|
||||||
|
}
|
||||||
|
|
||||||
|
fn signature(&self) -> Signature {
|
||||||
|
Signature::build(self.name())
|
||||||
|
.required("name", SyntaxShape::String, "function name")
|
||||||
|
.switch("distinct", "distict values", Some('d'))
|
||||||
|
.rest("arguments", SyntaxShape::Any, "function arguments")
|
||||||
|
.category(Category::Custom("database".into()))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn usage(&self) -> &str {
|
||||||
|
"Creates function expression for a select operation"
|
||||||
|
}
|
||||||
|
|
||||||
|
fn examples(&self) -> Vec<Example> {
|
||||||
|
vec![Example {
|
||||||
|
description: "Creates a function expression",
|
||||||
|
example: "db fn count name_1",
|
||||||
|
result: None,
|
||||||
|
}]
|
||||||
|
}
|
||||||
|
|
||||||
|
fn search_terms(&self) -> Vec<&str> {
|
||||||
|
vec!["database", "function", "expression"]
|
||||||
|
}
|
||||||
|
|
||||||
|
fn run(
|
||||||
|
&self,
|
||||||
|
engine_state: &EngineState,
|
||||||
|
stack: &mut Stack,
|
||||||
|
call: &Call,
|
||||||
|
_input: PipelineData,
|
||||||
|
) -> Result<PipelineData, ShellError> {
|
||||||
|
let name: String = call.req(engine_state, stack, 0)?;
|
||||||
|
let vals: Vec<Value> = call.rest(engine_state, stack, 1)?;
|
||||||
|
let value = Value::List {
|
||||||
|
vals,
|
||||||
|
span: call.head,
|
||||||
|
};
|
||||||
|
let expressions = ExprDb::extract_exprs(value)?;
|
||||||
|
|
||||||
|
let name: Vec<Ident> = name
|
||||||
|
.split('.')
|
||||||
|
.map(|part| Ident {
|
||||||
|
value: part.to_string(),
|
||||||
|
quote_style: None,
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
let name = ObjectName(name);
|
||||||
|
|
||||||
|
let args: Vec<FunctionArg> = expressions
|
||||||
|
.into_iter()
|
||||||
|
.map(|expr| {
|
||||||
|
let arg = FunctionArgExpr::Expr(expr);
|
||||||
|
|
||||||
|
FunctionArg::Unnamed(arg)
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
let expression: ExprDb = Expr::Function(Function {
|
||||||
|
name,
|
||||||
|
args,
|
||||||
|
over: None,
|
||||||
|
distinct: call.has_flag("distinct"),
|
||||||
|
})
|
||||||
|
.into();
|
||||||
|
|
||||||
|
Ok(expression.into_value(call.head).into_pipeline_data())
|
||||||
|
}
|
||||||
|
}
|
102
crates/nu-command/src/database/commands/group_by.rs
Normal file
@ -0,0 +1,102 @@
|
|||||||
|
use crate::database::values::dsl::ExprDb;
|
||||||
|
|
||||||
|
use super::super::SQLiteDatabase;
|
||||||
|
use nu_engine::CallExt;
|
||||||
|
use nu_protocol::{
|
||||||
|
ast::Call,
|
||||||
|
engine::{Command, EngineState, Stack},
|
||||||
|
Category, Example, IntoPipelineData, PipelineData, ShellError, Signature, SyntaxShape, Value,
|
||||||
|
};
|
||||||
|
use sqlparser::ast::{SetExpr, Statement};
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct GroupByDb;
|
||||||
|
|
||||||
|
impl Command for GroupByDb {
|
||||||
|
fn name(&self) -> &str {
|
||||||
|
"db group-by"
|
||||||
|
}
|
||||||
|
|
||||||
|
fn usage(&self) -> &str {
|
||||||
|
"Group by query"
|
||||||
|
}
|
||||||
|
|
||||||
|
fn signature(&self) -> Signature {
|
||||||
|
Signature::build(self.name())
|
||||||
|
.rest(
|
||||||
|
"select",
|
||||||
|
SyntaxShape::Any,
|
||||||
|
"Select expression(s) on the table",
|
||||||
|
)
|
||||||
|
.category(Category::Custom("database".into()))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn search_terms(&self) -> Vec<&str> {
|
||||||
|
vec!["database", "select"]
|
||||||
|
}
|
||||||
|
|
||||||
|
fn examples(&self) -> Vec<Example> {
|
||||||
|
vec![Example {
|
||||||
|
description: "orders query by a column",
|
||||||
|
example: r#"db open db.mysql
|
||||||
|
| db from table_a
|
||||||
|
| db select a
|
||||||
|
| db group-by a
|
||||||
|
| db describe"#,
|
||||||
|
result: None,
|
||||||
|
}]
|
||||||
|
}
|
||||||
|
|
||||||
|
fn run(
|
||||||
|
&self,
|
||||||
|
engine_state: &EngineState,
|
||||||
|
stack: &mut Stack,
|
||||||
|
call: &Call,
|
||||||
|
input: PipelineData,
|
||||||
|
) -> Result<PipelineData, ShellError> {
|
||||||
|
let vals: Vec<Value> = call.rest(engine_state, stack, 0)?;
|
||||||
|
let value = Value::List {
|
||||||
|
vals,
|
||||||
|
span: call.head,
|
||||||
|
};
|
||||||
|
let expressions = ExprDb::extract_exprs(value)?;
|
||||||
|
|
||||||
|
let mut db = SQLiteDatabase::try_from_pipeline(input, call.head)?;
|
||||||
|
match db.statement.as_mut() {
|
||||||
|
Some(statement) => match statement {
|
||||||
|
Statement::Query(ref mut query) => match &mut query.body {
|
||||||
|
SetExpr::Select(ref mut select) => select.group_by = expressions,
|
||||||
|
s => {
|
||||||
|
return Err(ShellError::GenericError(
|
||||||
|
"Connection doesnt define a select".into(),
|
||||||
|
format!("Expected a connection with select query. Got {}", s),
|
||||||
|
Some(call.head),
|
||||||
|
None,
|
||||||
|
Vec::new(),
|
||||||
|
))
|
||||||
|
}
|
||||||
|
},
|
||||||
|
s => {
|
||||||
|
return Err(ShellError::GenericError(
|
||||||
|
"Connection doesnt define a query".into(),
|
||||||
|
format!("Expected a connection with query. Got {}", s),
|
||||||
|
Some(call.head),
|
||||||
|
None,
|
||||||
|
Vec::new(),
|
||||||
|
))
|
||||||
|
}
|
||||||
|
},
|
||||||
|
None => {
|
||||||
|
return Err(ShellError::GenericError(
|
||||||
|
"Connection without statement".into(),
|
||||||
|
"The connection needs a statement defined".into(),
|
||||||
|
Some(call.head),
|
||||||
|
None,
|
||||||
|
Vec::new(),
|
||||||
|
))
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok(db.into_value(call.head).into_pipeline_data())
|
||||||
|
}
|
||||||
|
}
|