Compare commits
270 Commits
Author | SHA1 | Date | |
---|---|---|---|
6c07bc10e2 | |||
6365ba0286 | |||
545b1dcd94 | |||
fb89f2f48c | |||
f6ee21f76b | |||
d69a4db2e7 | |||
d4bfbb5eaf | |||
507f24d029 | |||
230c36f2fb | |||
219c719e98 | |||
50146bdef3 | |||
2042f7f769 | |||
0594f9e7aa | |||
3b8deb9ec7 | |||
727ff5f2d4 | |||
3d62528d8c | |||
a42d419b66 | |||
9602e82029 | |||
8e98df8b28 | |||
2daf8ec72d | |||
afcacda35f | |||
06cf3fa5ad | |||
9a482ce284 | |||
8018ae3286 | |||
ef322a24c5 | |||
a8db4f0b0e | |||
98a4280c41 | |||
0e1bfae13d | |||
6ff717c0ba | |||
d534a89867 | |||
5bc9246f0f | |||
1e89cc3578 | |||
06f5199570 | |||
9e5e9819d6 | |||
1f8ccd8e5e | |||
e9d8b19d4d | |||
7c63ce15d8 | |||
a3a9571dac | |||
2cc5952c37 | |||
aa88449f29 | |||
06199d731b | |||
0ba86d7eb8 | |||
6efd1bcb3f | |||
0d06b6259f | |||
8fdc272bcc | |||
0ea7a38c21 | |||
1999e0dcf3 | |||
ac30b3d108 | |||
2b1e05aad0 | |||
6c56829976 | |||
2c58beec13 | |||
9c779b071b | |||
1e94793df5 | |||
7d9a77f179 | |||
bb079608dd | |||
5fa42eeb8c | |||
3e09158afc | |||
7a78171b34 | |||
633ebc7e43 | |||
f0cb2f38df | |||
f26d3bf8d7 | |||
498672f5e5 | |||
038391519b | |||
8004e8e2a0 | |||
e192684612 | |||
5d40fc2726 | |||
a22d70718f | |||
24a49f1b0a | |||
04473a5593 | |||
d1e7884d19 | |||
2b96c93b8d | |||
fc41a0f96b | |||
8bd68416e3 | |||
2062e33c37 | |||
c6383874e9 | |||
d90b25c633 | |||
44bcfb3403 | |||
c047fd4778 | |||
16bd7b6d0d | |||
3cef94ba39 | |||
f818193b53 | |||
1aec4a343a | |||
852de79212 | |||
06f40405fe | |||
65bac77e8a | |||
32d1939a95 | |||
53e35670ea | |||
a92567489f | |||
2145feff5d | |||
0b95465ea1 | |||
ec804f4568 | |||
4717ac70fd | |||
9969fbfbb1 | |||
5f39267a80 | |||
94a9380e8b | |||
1d64863585 | |||
8218f72eea | |||
c0b99b7131 | |||
75c033e4d1 | |||
d88d057bf6 | |||
b00098ccc6 | |||
7e5e9c28dd | |||
8ffffe9bcc | |||
8030f7e9f0 | |||
e4959d2f9f | |||
f311da9623 | |||
14d80d54fe | |||
23b467061b | |||
8d8f25b210 | |||
7ee22603ac | |||
4052a99ff5 | |||
ccfa35289b | |||
54fc164e1c | |||
3a35bf7d4e | |||
a61d09222f | |||
07ac3c3aab | |||
061e9294b3 | |||
374757f286 | |||
ca75cd7c0a | |||
d08c072f19 | |||
9b99b2f6ac | |||
1cb449b2d1 | |||
6cc66c8afd | |||
08e495ea67 | |||
b0647f780d | |||
2dfd975940 | |||
fbdb125141 | |||
c2ea993f7e | |||
e14e60dd2c | |||
768ff47d28 | |||
78a1879e36 | |||
0b9c0fea9d | |||
02a3430ef0 | |||
6623ed9061 | |||
48cf103439 | |||
1bcb87c48d | |||
da104050e6 | |||
d306b834ca | |||
d4371438d1 | |||
6a972312d4 | |||
ac48f5a318 | |||
e36649f74b | |||
1a52460695 | |||
ab98ecd55b | |||
9a8e939cbe | |||
bb27b9f371 | |||
1ca3063ac3 | |||
7c9a78d922 | |||
49cbc30974 | |||
07a7bb14bf | |||
74f1c5b67b | |||
3b0151aba6 | |||
4a69819f9a | |||
1f7d3498cd | |||
f0b9dc9da1 | |||
96f8691c8d | |||
07255e576d | |||
260be40774 | |||
14c9bd44ef | |||
92785ab92c | |||
98ab31e15e | |||
80d57d70cd | |||
8dc199d817 | |||
435693a8bb | |||
5077242892 | |||
7a7aa310aa | |||
07893e01c1 | |||
f16401152b | |||
3df03e2e6d | |||
7c6f976d65 | |||
ae9c0fc138 | |||
9da2e142b2 | |||
5999506f87 | |||
1fc7abcc38 | |||
2659ea3dbd | |||
fa27110651 | |||
b4f8798a3a | |||
7714956276 | |||
8e5cc655e9 | |||
c78e28511d | |||
f189369fd7 | |||
2516305fa8 | |||
f2d7454330 | |||
3cf3329e49 | |||
d2bc2dcbb2 | |||
4ec4649903 | |||
55e5106695 | |||
5f35e4ad1e | |||
e7831d38ae | |||
5c9fe85ec4 | |||
cd5199de31 | |||
5319544481 | |||
be3f0edc97 | |||
fb8f7b114e | |||
187f2454c8 | |||
3492d4015d | |||
190f379ff3 | |||
5c2bc73d7b | |||
aeed8670f1 | |||
b38f90d4c7 | |||
9771270b38 | |||
f6b99b2d8f | |||
ec611526ac | |||
cd2df83ddc | |||
3eb447030b | |||
f2a45b3eac | |||
e94d13da1b | |||
c20ba95885 | |||
8eab311565 | |||
e2b510b65e | |||
e6a70f9846 | |||
667eb27d1b | |||
b9eb213f36 | |||
cc78446ffd | |||
5ff2ae628b | |||
661283c4d2 | |||
ee29a15119 | |||
2a18206771 | |||
a26272b44b | |||
7e730e28bb | |||
96253c69fb | |||
ded9d1cedb | |||
d1cc70fc4a | |||
18c9b62b00 | |||
1295495758 | |||
e97ba9b74c | |||
09b972f1dc | |||
0fb6f8f93c | |||
995d8db1fe | |||
7e97be1dd4 | |||
b501db673a | |||
c0ce1e9057 | |||
4d7b86f278 | |||
f2d47f97da | |||
0de289f6b7 | |||
ae674bfaec | |||
76079d5183 | |||
409f1480f5 | |||
e206555d9d | |||
88ec4186ec | |||
dd1d9b7623 | |||
1314a87cb0 | |||
cf65f77b02 | |||
c9f05f074a | |||
7710317224 | |||
0a990ed105 | |||
a35b975d84 | |||
6e85b04923 | |||
4d31139a44 | |||
1bad40726d | |||
cb3276fb3b | |||
c17129a92a | |||
5bf1c98a39 | |||
13b371ab58 | |||
2a3991cfdb | |||
583b7b1821 | |||
581afc9023 | |||
8e2847431e | |||
6a1378c1bb | |||
2fe14a7a5a | |||
7490392eb9 | |||
9844e6125b | |||
56af7e8d5f | |||
dc612e7ffb | |||
1d1dbfd04c | |||
c150e11cb4 | |||
87c684c7da | |||
10792a29f7 | |||
257290acc2 | |||
cfefb65d55 |
4
.github/pull_request_template.md
vendored
@ -7,5 +7,5 @@
|
||||
Make sure you've run and fixed any issues with these commands:
|
||||
|
||||
- [ ] `cargo fmt --all -- --check` to check standard code formatting (`cargo fmt --all` applies these changes)
|
||||
- [ ] `cargo clippy --all --all-features -- -D warnings -D clippy::unwrap_used -A clippy::needless_collect` to check that you're using the standard code style
|
||||
- [ ] `cargo build; cargo test --all --all-features` to check that all the tests pass
|
||||
- [ ] `cargo clippy --workspace --features=extra -- -D warnings -D clippy::unwrap_used -A clippy::needless_collect` to check that you're using the standard code style
|
||||
- [ ] `cargo test --workspace --features=extra` to check that all the tests pass
|
||||
|
129
.github/workflows/ci.yml
vendored
@ -1,34 +1,23 @@
|
||||
on: [pull_request]
|
||||
on:
|
||||
pull_request:
|
||||
push: # Run CI on the main branch after every merge. This is important to fill the GitHub Actions cache in a way that pull requests can see it
|
||||
branches:
|
||||
- main
|
||||
|
||||
name: Continuous integration
|
||||
name: continuous-integration
|
||||
|
||||
jobs:
|
||||
ci:
|
||||
nu-fmt-clippy:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
fail-fast: true
|
||||
matrix:
|
||||
platform: [windows-latest, macos-latest, ubuntu-latest]
|
||||
style: [all, default, minimal]
|
||||
rust:
|
||||
- stable
|
||||
include:
|
||||
- style: all
|
||||
flags: '--all-features'
|
||||
- style: default
|
||||
flags: ''
|
||||
- style: minimal
|
||||
flags: '--no-default-features'
|
||||
exclude:
|
||||
- platform: windows-latest
|
||||
style: default
|
||||
- platform: windows-latest
|
||||
style: minimal
|
||||
- platform: macos-latest
|
||||
style: default
|
||||
- platform: macos-latest
|
||||
style: minimal
|
||||
|
||||
runs-on: ${{ matrix.platform }}
|
||||
env:
|
||||
NUSHELL_CARGO_TARGET: ci
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
@ -41,6 +30,10 @@ jobs:
|
||||
override: true
|
||||
components: rustfmt, clippy
|
||||
|
||||
- uses: Swatinem/rust-cache@v1
|
||||
with:
|
||||
key: "v2" # increment this to bust the cache if needed
|
||||
|
||||
- name: Rustfmt
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
@ -51,24 +44,58 @@ jobs:
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
command: clippy
|
||||
args: --all ${{ matrix.flags }} -- -D warnings -D clippy::unwrap_used -A clippy::needless_collect
|
||||
args: --features=extra --workspace --exclude nu_plugin_* -- -D warnings -D clippy::unwrap_used -A clippy::needless_collect
|
||||
|
||||
- name: Build Nushell
|
||||
uses: actions-rs/cargo@v1
|
||||
nu-tests:
|
||||
env:
|
||||
NUSHELL_CARGO_TARGET: ci
|
||||
|
||||
strategy:
|
||||
fail-fast: true
|
||||
matrix:
|
||||
platform: [windows-latest, macos-latest, ubuntu-latest]
|
||||
style: [extra, default]
|
||||
rust:
|
||||
- stable
|
||||
include:
|
||||
- style: extra
|
||||
flags: "--features=extra"
|
||||
- style: default
|
||||
flags: ""
|
||||
exclude:
|
||||
- platform: windows-latest
|
||||
style: default
|
||||
- platform: macos-latest
|
||||
style: default
|
||||
|
||||
runs-on: ${{ matrix.platform }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
|
||||
- name: Setup Rust toolchain
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
command: build
|
||||
args: ${{ matrix.flags }}
|
||||
profile: minimal
|
||||
toolchain: ${{ matrix.rust }}
|
||||
override: true
|
||||
|
||||
- uses: Swatinem/rust-cache@v1
|
||||
with:
|
||||
key: ${{ matrix.style }}v3 # increment this to bust the cache if needed
|
||||
|
||||
- name: Tests
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
command: test
|
||||
args: --all ${{ matrix.flags }}
|
||||
|
||||
args: --workspace --profile ci --exclude nu_plugin_* ${{ matrix.flags }}
|
||||
|
||||
python-virtualenv:
|
||||
env:
|
||||
NUSHELL_CARGO_TARGET: ci
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
fail-fast: true
|
||||
matrix:
|
||||
platform: [ubuntu-latest, macos-latest, windows-latest]
|
||||
rust:
|
||||
@ -87,13 +114,16 @@ jobs:
|
||||
profile: minimal
|
||||
toolchain: ${{ matrix.rust }}
|
||||
override: true
|
||||
components: rustfmt, clippy
|
||||
|
||||
- uses: Swatinem/rust-cache@v1
|
||||
with:
|
||||
key: "2" # increment this to bust the cache if needed
|
||||
|
||||
- name: Install Nushell
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
command: install
|
||||
args: --path=. --no-default-features
|
||||
args: --path=. --profile ci --no-default-features
|
||||
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@v2
|
||||
@ -112,3 +142,40 @@ jobs:
|
||||
- name: Test Nushell in virtualenv
|
||||
run: cd virtualenv && tox -e ${{ matrix.py }} -- -k nushell
|
||||
shell: bash
|
||||
|
||||
# Build+test plugins on their own, without the rest of Nu. This helps with CI parallelization and
|
||||
# also helps test that the plugins build without any feature unification shenanigans
|
||||
plugins:
|
||||
env:
|
||||
NUSHELL_CARGO_TARGET: ci
|
||||
|
||||
strategy:
|
||||
fail-fast: true
|
||||
matrix:
|
||||
platform: [windows-latest, macos-latest, ubuntu-latest]
|
||||
rust:
|
||||
- stable
|
||||
|
||||
runs-on: ${{ matrix.platform }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
|
||||
- name: Setup Rust toolchain
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
profile: minimal
|
||||
toolchain: ${{ matrix.rust }}
|
||||
override: true
|
||||
|
||||
- name: Clippy
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
command: clippy
|
||||
args: --package nu_plugin_* ${{ matrix.flags }} -- -D warnings -D clippy::unwrap_used -A clippy::needless_collect
|
||||
|
||||
- name: Tests
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
command: test
|
||||
args: --profile ci --package nu_plugin_*
|
||||
|
155
.github/workflows/release-pkg.nu
vendored
Executable file
@ -0,0 +1,155 @@
|
||||
#!/usr/bin/env nu
|
||||
|
||||
# Created: 2022/05/26 19:05:20
|
||||
# Description:
|
||||
# A script to do the github release task, need nushell to be installed.
|
||||
# REF:
|
||||
# 1. https://github.com/volks73/cargo-wix
|
||||
|
||||
# The main binary file to be released
|
||||
let bin = 'nu'
|
||||
let os = $env.OS
|
||||
let target = $env.TARGET
|
||||
# Repo source dir like `/home/runner/work/nushell/nushell`
|
||||
let src = $env.GITHUB_WORKSPACE
|
||||
let flags = $env.TARGET_RUSTFLAGS
|
||||
let dist = $'($env.GITHUB_WORKSPACE)/output'
|
||||
let version = (open Cargo.toml | get package.version)
|
||||
|
||||
# $env
|
||||
|
||||
$'(char nl)Packaging ($bin) v($version) for ($target) in ($src)...'; hr-line -b
|
||||
if not ('Cargo.lock' | path exists) { cargo generate-lockfile }
|
||||
|
||||
$'Start building ($bin)...'; hr-line
|
||||
|
||||
# ----------------------------------------------------------------------------
|
||||
# Build for Ubuntu and macOS
|
||||
# ----------------------------------------------------------------------------
|
||||
if $os in ['ubuntu-latest', 'macos-latest'] {
|
||||
if $os == 'ubuntu-latest' {
|
||||
sudo apt-get install libxcb-composite0-dev -y
|
||||
}
|
||||
if $target == 'aarch64-unknown-linux-gnu' {
|
||||
sudo apt-get install gcc-aarch64-linux-gnu -y
|
||||
let-env CARGO_TARGET_AARCH64_UNKNOWN_LINUX_GNU_LINKER = 'aarch64-linux-gnu-gcc'
|
||||
cargo-build-nu $flags
|
||||
} else if $target == 'armv7-unknown-linux-gnueabihf' {
|
||||
sudo apt-get install pkg-config gcc-arm-linux-gnueabihf -y
|
||||
let-env CARGO_TARGET_ARMV7_UNKNOWN_LINUX_GNUEABIHF_LINKER = 'arm-linux-gnueabihf-gcc'
|
||||
cargo-build-nu $flags
|
||||
} else {
|
||||
# musl-tools to fix 'Failed to find tool. Is `musl-gcc` installed?'
|
||||
# Actually just for x86_64-unknown-linux-musl target
|
||||
sudo apt install musl-tools -y
|
||||
cargo-build-nu $flags
|
||||
}
|
||||
}
|
||||
|
||||
# ----------------------------------------------------------------------------
|
||||
# Build for Windows without static-link-openssl feature
|
||||
# ----------------------------------------------------------------------------
|
||||
if $os in ['windows-latest'] {
|
||||
if ($flags | str trim | empty?) {
|
||||
cargo build --release --all --target $target --features=extra
|
||||
} else {
|
||||
cargo build --release --all --target $target --features=extra $flags
|
||||
}
|
||||
}
|
||||
|
||||
# ----------------------------------------------------------------------------
|
||||
# Prepare for the release archive
|
||||
# ----------------------------------------------------------------------------
|
||||
let suffix = if $os == 'windows-latest' { '.exe' }
|
||||
# nu, nu_plugin_* were all included
|
||||
let executable = $'target/($target)/release/($bin)*($suffix)'
|
||||
$'Current executable file: ($executable)'
|
||||
|
||||
cd $src; mkdir $dist;
|
||||
rm -rf $'target/($target)/release/*.d' $'target/($target)/release/nu_pretty_hex*'
|
||||
$'(char nl)All executable files:'; hr-line
|
||||
ls -f $executable
|
||||
|
||||
$'(char nl)Copying release files...'; hr-line
|
||||
cp -v README.release.txt $'($dist)/README.txt'
|
||||
[LICENSE $executable] | each {|it| cp -rv $it $dist } | flatten
|
||||
|
||||
$'(char nl)Check binary release version detail:'; hr-line
|
||||
let ver = if $os == 'windows-latest' {
|
||||
(do -i { ./output/nu.exe -c 'version' }) | str collect
|
||||
} else {
|
||||
(do -i { ./output/nu -c 'version' }) | str collect
|
||||
}
|
||||
if ($ver | str trim | empty?) {
|
||||
$'(ansi r)Incompatible nu binary...(ansi reset)'
|
||||
} else { $ver }
|
||||
|
||||
# ----------------------------------------------------------------------------
|
||||
# Create a release archive and send it to output for the following steps
|
||||
# ----------------------------------------------------------------------------
|
||||
cd $dist; $'(char nl)Creating release archive...'; hr-line
|
||||
if $os in ['ubuntu-latest', 'macos-latest'] {
|
||||
|
||||
$'(char nl)(ansi g)Archive contents:(ansi reset)'; hr-line; ls
|
||||
|
||||
let archive = $'($dist)/($bin)-($version)-($target).tar.gz'
|
||||
tar czf $archive *
|
||||
print $'archive: ---> ($archive)'; ls $archive
|
||||
echo $'::set-output name=archive::($archive)'
|
||||
|
||||
} else if $os == 'windows-latest' {
|
||||
|
||||
let releaseStem = $'($bin)-($version)-($target)'
|
||||
|
||||
$'(char nl)Download less related stuffs...'; hr-line
|
||||
curl https://github.com/jftuga/less-Windows/releases/download/less-v590/less.exe -o $'($dist)\less.exe'
|
||||
curl https://raw.githubusercontent.com/jftuga/less-Windows/master/LICENSE -o $'($dist)\LICENSE-for-less.txt'
|
||||
|
||||
# Create Windows msi release package
|
||||
if (get-env _EXTRA_) == 'msi' {
|
||||
|
||||
let wixRelease = $'($src)/target/wix/($releaseStem).msi'
|
||||
$'(char nl)Start creating Windows msi package...'
|
||||
cd $src; hr-line
|
||||
# Wix need the binaries be stored in target/release/
|
||||
cp -r $'($dist)/*' target/release/
|
||||
cargo install cargo-wix --version 0.3.2
|
||||
cargo wix --no-build --nocapture --package nu --output $wixRelease
|
||||
echo $'::set-output name=archive::($wixRelease)'
|
||||
|
||||
} else {
|
||||
|
||||
$'(char nl)(ansi g)Archive contents:(ansi reset)'; hr-line; ls
|
||||
let archive = $'($dist)/($releaseStem).zip'
|
||||
7z a $archive *
|
||||
print $'archive: ---> ($archive)';
|
||||
let pkg = (ls -f $archive | get name)
|
||||
if not ($pkg | empty?) {
|
||||
echo $'::set-output name=archive::($pkg | get 0)'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
def 'cargo-build-nu' [ options: string ] {
|
||||
if ($options | str trim | empty?) {
|
||||
cargo build --release --all --target $target --features=extra,static-link-openssl
|
||||
} else {
|
||||
cargo build --release --all --target $target --features=extra,static-link-openssl $options
|
||||
}
|
||||
}
|
||||
|
||||
# Print a horizontal line marker
|
||||
def 'hr-line' [
|
||||
--blank-line(-b): bool
|
||||
] {
|
||||
print $'(ansi g)---------------------------------------------------------------------------->(ansi reset)'
|
||||
if $blank-line { char nl }
|
||||
}
|
||||
|
||||
# Get the specified env key's value or ''
|
||||
def 'get-env' [
|
||||
key: string # The key to get it's env value
|
||||
default: string = '' # The default value for an empty env
|
||||
] {
|
||||
$env | get -i $key | default $default
|
||||
}
|
522
.github/workflows/release.yml
vendored
@ -1,3 +1,7 @@
|
||||
#
|
||||
# REF:
|
||||
# 1. https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstrategymatrixinclude
|
||||
#
|
||||
name: Create Release Draft
|
||||
|
||||
on:
|
||||
@ -5,439 +9,89 @@ on:
|
||||
push:
|
||||
tags: ["[0-9]+.[0-9]+.[0-9]+*"]
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
jobs:
|
||||
linux:
|
||||
name: Build Linux
|
||||
runs-on: ubuntu-latest
|
||||
all:
|
||||
name: All
|
||||
|
||||
strategy:
|
||||
matrix:
|
||||
target:
|
||||
- aarch64-apple-darwin
|
||||
- x86_64-apple-darwin
|
||||
- x86_64-pc-windows-msvc
|
||||
- x86_64-unknown-linux-gnu
|
||||
- x86_64-unknown-linux-musl
|
||||
- aarch64-unknown-linux-gnu
|
||||
- armv7-unknown-linux-gnueabihf
|
||||
extra: ['bin']
|
||||
include:
|
||||
- target: aarch64-apple-darwin
|
||||
os: macos-latest
|
||||
target_rustflags: ''
|
||||
- target: x86_64-apple-darwin
|
||||
os: macos-latest
|
||||
target_rustflags: ''
|
||||
- target: x86_64-pc-windows-msvc
|
||||
extra: 'bin'
|
||||
os: windows-latest
|
||||
target_rustflags: ''
|
||||
- target: x86_64-pc-windows-msvc
|
||||
extra: msi
|
||||
os: windows-latest
|
||||
target_rustflags: ''
|
||||
- target: x86_64-unknown-linux-gnu
|
||||
os: ubuntu-latest
|
||||
target_rustflags: ''
|
||||
- target: x86_64-unknown-linux-musl
|
||||
os: ubuntu-latest
|
||||
target_rustflags: ''
|
||||
- target: aarch64-unknown-linux-gnu
|
||||
os: ubuntu-latest
|
||||
target_rustflags: ''
|
||||
- target: armv7-unknown-linux-gnueabihf
|
||||
os: ubuntu-latest
|
||||
target_rustflags: ''
|
||||
|
||||
runs-on: ${{matrix.os}}
|
||||
|
||||
steps:
|
||||
- name: Check out code
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Install libxcb
|
||||
run: sudo apt-get install libxcb-composite0-dev
|
||||
|
||||
- name: Set up cargo
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
profile: minimal
|
||||
toolchain: stable
|
||||
override: true
|
||||
|
||||
- name: Build
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
command: build
|
||||
args: --release --all --features=extra
|
||||
|
||||
# - name: Strip binaries (nu)
|
||||
# run: strip target/release/nu
|
||||
|
||||
# - name: Strip binaries (nu_plugin_inc)
|
||||
# run: strip target/release/nu_plugin_inc
|
||||
|
||||
# - name: Strip binaries (nu_plugin_match)
|
||||
# run: strip target/release/nu_plugin_match
|
||||
|
||||
# - name: Strip binaries (nu_plugin_textview)
|
||||
# run: strip target/release/nu_plugin_textview
|
||||
|
||||
# - name: Strip binaries (nu_plugin_binaryview)
|
||||
# run: strip target/release/nu_plugin_binaryview
|
||||
|
||||
# - name: Strip binaries (nu_plugin_chart_bar)
|
||||
# run: strip target/release/nu_plugin_chart_bar
|
||||
|
||||
# - name: Strip binaries (nu_plugin_chart_line)
|
||||
# run: strip target/release/nu_plugin_chart_line
|
||||
|
||||
# - name: Strip binaries (nu_plugin_from_bson)
|
||||
# run: strip target/release/nu_plugin_from_bson
|
||||
|
||||
# - name: Strip binaries (nu_plugin_from_sqlite)
|
||||
# run: strip target/release/nu_plugin_from_sqlite
|
||||
|
||||
# - name: Strip binaries (nu_plugin_from_mp4)
|
||||
# run: strip target/release/nu_plugin_from_mp4
|
||||
|
||||
# - name: Strip binaries (nu_plugin_query_json)
|
||||
# run: strip target/release/nu_plugin_query_json
|
||||
|
||||
# - name: Strip binaries (nu_plugin_s3)
|
||||
# run: strip target/release/nu_plugin_s3
|
||||
|
||||
# - name: Strip binaries (nu_plugin_selector)
|
||||
# run: strip target/release/nu_plugin_selector
|
||||
|
||||
# - name: Strip binaries (nu_plugin_start)
|
||||
# run: strip target/release/nu_plugin_start
|
||||
|
||||
# - name: Strip binaries (nu_plugin_to_bson)
|
||||
# run: strip target/release/nu_plugin_to_bson
|
||||
|
||||
# - name: Strip binaries (nu_plugin_to_sqlite)
|
||||
# run: strip target/release/nu_plugin_to_sqlite
|
||||
|
||||
# - name: Strip binaries (nu_plugin_tree)
|
||||
# run: strip target/release/nu_plugin_tree
|
||||
|
||||
# - name: Strip binaries (nu_plugin_xpath)
|
||||
# run: strip target/release/nu_plugin_xpath
|
||||
|
||||
- name: Create output directory
|
||||
run: mkdir output
|
||||
|
||||
- name: Copy files to output
|
||||
run: |
|
||||
cp target/release/nu target/release/nu_plugin_* output/
|
||||
cp README.build.txt output/README.txt
|
||||
cp LICENSE output/LICENSE
|
||||
rm output/*.d
|
||||
|
||||
# Note: If OpenSSL changes, this path will need to be updated
|
||||
- name: Copy OpenSSL to output
|
||||
run: cp /usr/lib/x86_64-linux-gnu/libssl.so.1.1 output/
|
||||
|
||||
- name: Upload artifact
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: linux
|
||||
path: output/*
|
||||
|
||||
macos:
|
||||
name: Build macOS
|
||||
runs-on: macos-latest
|
||||
steps:
|
||||
- name: Check out code
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Set up cargo
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
profile: minimal
|
||||
toolchain: stable
|
||||
override: true
|
||||
|
||||
- name: Build
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
command: build
|
||||
args: --release --all --features=extra
|
||||
|
||||
# - name: Strip binaries (nu)
|
||||
# run: strip target/release/nu
|
||||
|
||||
# - name: Strip binaries (nu_plugin_inc)
|
||||
# run: strip target/release/nu_plugin_inc
|
||||
|
||||
# - name: Strip binaries (nu_plugin_match)
|
||||
# run: strip target/release/nu_plugin_match
|
||||
|
||||
# - name: Strip binaries (nu_plugin_textview)
|
||||
# run: strip target/release/nu_plugin_textview
|
||||
|
||||
# - name: Strip binaries (nu_plugin_binaryview)
|
||||
# run: strip target/release/nu_plugin_binaryview
|
||||
|
||||
# - name: Strip binaries (nu_plugin_chart_bar)
|
||||
# run: strip target/release/nu_plugin_chart_bar
|
||||
|
||||
# - name: Strip binaries (nu_plugin_chart_line)
|
||||
# run: strip target/release/nu_plugin_chart_line
|
||||
|
||||
# - name: Strip binaries (nu_plugin_from_bson)
|
||||
# run: strip target/release/nu_plugin_from_bson
|
||||
|
||||
# - name: Strip binaries (nu_plugin_from_sqlite)
|
||||
# run: strip target/release/nu_plugin_from_sqlite
|
||||
|
||||
# - name: Strip binaries (nu_plugin_from_mp4)
|
||||
# run: strip target/release/nu_plugin_from_mp4
|
||||
|
||||
# - name: Strip binaries (nu_plugin_query_json)
|
||||
# run: strip target/release/nu_plugin_query_json
|
||||
|
||||
# - name: Strip binaries (nu_plugin_s3)
|
||||
# run: strip target/release/nu_plugin_s3
|
||||
|
||||
# - name: Strip binaries (nu_plugin_selector)
|
||||
# run: strip target/release/nu_plugin_selector
|
||||
|
||||
# - name: Strip binaries (nu_plugin_start)
|
||||
# run: strip target/release/nu_plugin_start
|
||||
|
||||
# - name: Strip binaries (nu_plugin_to_bson)
|
||||
# run: strip target/release/nu_plugin_to_bson
|
||||
|
||||
# - name: Strip binaries (nu_plugin_to_sqlite)
|
||||
# run: strip target/release/nu_plugin_to_sqlite
|
||||
|
||||
# - name: Strip binaries (nu_plugin_tree)
|
||||
# run: strip target/release/nu_plugin_tree
|
||||
|
||||
# - name: Strip binaries (nu_plugin_xpath)
|
||||
# run: strip target/release/nu_plugin_xpath
|
||||
|
||||
- name: Create output directory
|
||||
run: mkdir output
|
||||
|
||||
- name: Copy files to output
|
||||
run: |
|
||||
cp target/release/nu target/release/nu_plugin_* output/
|
||||
cp README.build.txt output/README.txt
|
||||
cp LICENSE output/LICENSE
|
||||
rm output/*.d
|
||||
|
||||
- name: Upload artifact
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: macos
|
||||
path: output/*
|
||||
|
||||
windows:
|
||||
name: Build Windows
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- name: Check out code
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Set up cargo
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
profile: minimal
|
||||
toolchain: stable
|
||||
override: true
|
||||
|
||||
- name: Add cargo-wix subcommand
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
command: install
|
||||
args: cargo-wix --version 0.3.1
|
||||
|
||||
- name: Build
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
command: build
|
||||
args: --release --all --features=extra
|
||||
|
||||
# - name: Strip binaries (nu.exe)
|
||||
# run: strip target/release/nu.exe
|
||||
|
||||
# - name: Strip binaries (nu_plugin_inc.exe)
|
||||
# run: strip target/release/nu_plugin_inc.exe
|
||||
|
||||
# - name: Strip binaries (nu_plugin_match.exe)
|
||||
# run: strip target/release/nu_plugin_match.exe
|
||||
|
||||
# - name: Strip binaries (nu_plugin_textview.exe)
|
||||
# run: strip target/release/nu_plugin_textview.exe
|
||||
|
||||
# - name: Strip binaries (nu_plugin_binaryview.exe)
|
||||
# run: strip target/release/nu_plugin_binaryview.exe
|
||||
|
||||
# - name: Strip binaries (nu_plugin_chart_bar.exe)
|
||||
# run: strip target/release/nu_plugin_chart_bar.exe
|
||||
|
||||
# - name: Strip binaries (nu_plugin_chart_line.exe)
|
||||
# run: strip target/release/nu_plugin_chart_line.exe
|
||||
|
||||
# - name: Strip binaries (nu_plugin_from_bson.exe)
|
||||
# run: strip target/release/nu_plugin_from_bson.exe
|
||||
|
||||
# - name: Strip binaries (nu_plugin_from_sqlite.exe)
|
||||
# run: strip target/release/nu_plugin_from_sqlite.exe
|
||||
|
||||
# - name: Strip binaries (nu_plugin_from_mp4.exe)
|
||||
# run: strip target/release/nu_plugin_from_mp4.exe
|
||||
|
||||
# - name: Strip binaries (nu_plugin_query_json.exe)
|
||||
# run: strip target/release/nu_plugin_query_json.exe
|
||||
|
||||
# - name: Strip binaries (nu_plugin_s3.exe)
|
||||
# run: strip target/release/nu_plugin_s3.exe
|
||||
|
||||
# - name: Strip binaries (nu_plugin_selector.exe)
|
||||
# run: strip target/release/nu_plugin_selector.exe
|
||||
|
||||
# - name: Strip binaries (nu_plugin_start.exe)
|
||||
# run: strip target/release/nu_plugin_start.exe
|
||||
|
||||
# - name: Strip binaries (nu_plugin_to_bson.exe)
|
||||
# run: strip target/release/nu_plugin_to_bson.exe
|
||||
|
||||
# - name: Strip binaries (nu_plugin_to_sqlite.exe)
|
||||
# run: strip target/release/nu_plugin_to_sqlite.exe
|
||||
|
||||
# - name: Strip binaries (nu_plugin_tree.exe)
|
||||
# run: strip target/release/nu_plugin_tree.exe
|
||||
|
||||
# - name: Strip binaries (nu_plugin_xpath.exe)
|
||||
# run: strip target/release/nu_plugin_xpath.exe
|
||||
|
||||
- name: Create output directory
|
||||
run: mkdir output
|
||||
|
||||
- name: Download Less Binary
|
||||
run: Invoke-WebRequest -Uri "https://github.com/jftuga/less-Windows/releases/download/less-v562.0/less.exe" -OutFile "target\release\less.exe"
|
||||
|
||||
- name: Download Less License
|
||||
run: Invoke-WebRequest -Uri "https://raw.githubusercontent.com/jftuga/less-Windows/master/LICENSE" -OutFile "target\release\LICENSE-for-less.txt"
|
||||
|
||||
- name: Copy files to output
|
||||
run: |
|
||||
cp target\release\nu.exe output\
|
||||
cp LICENSE output\
|
||||
cp target\release\LICENSE-for-less.txt output\
|
||||
cp target\release\nu_plugin_*.exe output\
|
||||
cp README.build.txt output\README.txt
|
||||
cp target\release\less.exe output\
|
||||
# Note: If the version of `less.exe` needs to be changed, update this URL
|
||||
# Similarly, if `less.exe` is checked into the repo, copy from the local path here
|
||||
# moved this stuff down to create wix after we download less
|
||||
|
||||
- name: Create msi with wix
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
command: wix
|
||||
args: --no-build --nocapture --output target\wix\nushell-windows.msi
|
||||
|
||||
- name: Upload installer
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: windows-installer
|
||||
path: target\wix\nushell-windows.msi
|
||||
|
||||
- name: Upload zip
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: windows-zip
|
||||
path: output\*
|
||||
|
||||
release:
|
||||
name: Publish Release
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- linux
|
||||
- macos
|
||||
- windows
|
||||
steps:
|
||||
- name: Check out code
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Determine Release Info
|
||||
id: info
|
||||
env:
|
||||
GITHUB_REF: ${{ github.ref }}
|
||||
run: |
|
||||
VERSION=${GITHUB_REF##*/}
|
||||
MAJOR=${VERSION%%.*}
|
||||
MINOR=${VERSION%.*}
|
||||
MINOR=${MINOR#*.}
|
||||
PATCH=${VERSION##*.}
|
||||
echo "::set-output name=version::${VERSION}"
|
||||
echo "::set-output name=linuxdir::nu_${MAJOR}_${MINOR}_${PATCH}_linux"
|
||||
echo "::set-output name=macosdir::nu_${MAJOR}_${MINOR}_${PATCH}_macOS"
|
||||
echo "::set-output name=windowsdir::nu_${MAJOR}_${MINOR}_${PATCH}_windows"
|
||||
echo "::set-output name=innerdir::nushell-${VERSION}"
|
||||
|
||||
- name: Create Release Draft
|
||||
id: create_release
|
||||
uses: actions/create-release@v1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
tag_name: ${{ github.ref }}
|
||||
release_name: ${{ steps.info.outputs.version }} Release
|
||||
draft: true
|
||||
|
||||
- name: Create Linux Directory
|
||||
run: mkdir -p ${{ steps.info.outputs.linuxdir }}/${{ steps.info.outputs.innerdir }}
|
||||
|
||||
- name: Download Linux Artifacts
|
||||
uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: linux
|
||||
path: ${{ steps.info.outputs.linuxdir }}/${{ steps.info.outputs.innerdir }}
|
||||
|
||||
- name: Restore Linux File Modes
|
||||
run: |
|
||||
chmod 755 ${{ steps.info.outputs.linuxdir }}/${{ steps.info.outputs.innerdir }}/nu*
|
||||
chmod 755 ${{ steps.info.outputs.linuxdir }}/${{ steps.info.outputs.innerdir }}/libssl*
|
||||
|
||||
- name: Create Linux tarball
|
||||
run: tar -zcvf ${{ steps.info.outputs.linuxdir }}.tar.gz ${{ steps.info.outputs.linuxdir }}
|
||||
|
||||
- name: Upload Linux Artifact
|
||||
uses: actions/upload-release-asset@v1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.create_release.outputs.upload_url }}
|
||||
asset_path: ./${{ steps.info.outputs.linuxdir }}.tar.gz
|
||||
asset_name: ${{ steps.info.outputs.linuxdir }}.tar.gz
|
||||
asset_content_type: application/gzip
|
||||
|
||||
- name: Create macOS Directory
|
||||
run: mkdir -p ${{ steps.info.outputs.macosdir }}/${{ steps.info.outputs.innerdir }}
|
||||
|
||||
- name: Download macOS Artifacts
|
||||
uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: macos
|
||||
path: ${{ steps.info.outputs.macosdir }}/${{ steps.info.outputs.innerdir }}
|
||||
|
||||
- name: Restore macOS File Modes
|
||||
run: chmod 755 ${{ steps.info.outputs.macosdir }}/${{ steps.info.outputs.innerdir }}/nu*
|
||||
|
||||
- name: Create macOS Archive
|
||||
run: zip -r ${{ steps.info.outputs.macosdir }}.zip ${{ steps.info.outputs.macosdir }}
|
||||
|
||||
- name: Upload macOS Artifact
|
||||
uses: actions/upload-release-asset@v1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.create_release.outputs.upload_url }}
|
||||
asset_path: ./${{ steps.info.outputs.macosdir }}.zip
|
||||
asset_name: ${{ steps.info.outputs.macosdir }}.zip
|
||||
asset_content_type: application/zip
|
||||
|
||||
- name: Create Windows Directory
|
||||
run: mkdir -p ${{ steps.info.outputs.windowsdir }}/${{ steps.info.outputs.innerdir }}
|
||||
|
||||
- name: Download Windows zip
|
||||
uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: windows-zip
|
||||
path: ${{ steps.info.outputs.windowsdir }}/${{ steps.info.outputs.innerdir }}
|
||||
|
||||
- name: Show Windows Artifacts
|
||||
run: ls -la ${{ steps.info.outputs.windowsdir }}/${{ steps.info.outputs.innerdir }}
|
||||
|
||||
- name: Create macOS Archive
|
||||
run: zip -r ${{ steps.info.outputs.windowsdir }}.zip ${{ steps.info.outputs.windowsdir }}
|
||||
|
||||
- name: Upload Windows zip
|
||||
uses: actions/upload-release-asset@v1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.create_release.outputs.upload_url }}
|
||||
asset_path: ./${{ steps.info.outputs.windowsdir }}.zip
|
||||
asset_name: ${{ steps.info.outputs.windowsdir }}.zip
|
||||
asset_content_type: application/zip
|
||||
|
||||
- name: Download Windows installer
|
||||
uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: windows-installer
|
||||
path: ./
|
||||
|
||||
- name: Upload Windows installer
|
||||
uses: actions/upload-release-asset@v1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.create_release.outputs.upload_url }}
|
||||
asset_path: ./nushell-windows.msi
|
||||
asset_name: ${{ steps.info.outputs.windowsdir }}.msi
|
||||
asset_content_type: application/x-msi
|
||||
- uses: actions/checkout@v3.0.2
|
||||
|
||||
- name: Install Rust Toolchain Components
|
||||
uses: actions-rs/toolchain@v1.0.6
|
||||
with:
|
||||
override: true
|
||||
profile: minimal
|
||||
toolchain: stable
|
||||
target: ${{ matrix.target }}
|
||||
|
||||
- name: Setup Nushell
|
||||
uses: hustcer/setup-nu@v1
|
||||
with:
|
||||
version: 0.63.0
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Release Nu Binary
|
||||
id: nu
|
||||
run: nu .github/workflows/release-pkg.nu
|
||||
env:
|
||||
OS: ${{ matrix.os }}
|
||||
REF: ${{ github.ref }}
|
||||
TARGET: ${{ matrix.target }}
|
||||
_EXTRA_: ${{ matrix.extra }}
|
||||
TARGET_RUSTFLAGS: ${{ matrix.target_rustflags }}
|
||||
|
||||
# REF: https://github.com/marketplace/actions/gh-release
|
||||
- name: Publish Archive
|
||||
uses: softprops/action-gh-release@v1
|
||||
if: ${{ startsWith(github.ref, 'refs/tags/') }}
|
||||
with:
|
||||
draft: true
|
||||
files: ${{ steps.nu.outputs.archive }}
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
8
.gitignore
vendored
@ -21,3 +21,11 @@ debian/nu/
|
||||
|
||||
# VSCode's IDE items
|
||||
.vscode/*
|
||||
|
||||
# Helix configuration folder
|
||||
.helix/*
|
||||
.helix
|
||||
|
||||
# Coverage tools
|
||||
lcov.info
|
||||
tarpaulin-report.html
|
||||
|
1433
Cargo.lock
generated
60
Cargo.toml
@ -10,8 +10,8 @@ license = "MIT"
|
||||
name = "nu"
|
||||
readme = "README.md"
|
||||
repository = "https://github.com/nushell/nushell"
|
||||
rust-version = "1.59"
|
||||
version = "0.61.0"
|
||||
rust-version = "1.60"
|
||||
version = "0.63.1"
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
@ -36,28 +36,33 @@ chrono = "0.4.19"
|
||||
crossterm = "0.23.0"
|
||||
ctrlc = "3.2.1"
|
||||
log = "0.4"
|
||||
miette = "4.1.0"
|
||||
miette = "4.5.0"
|
||||
nu-ansi-term = "0.45.1"
|
||||
nu-cli = { path="./crates/nu-cli", version = "0.61.0" }
|
||||
nu-color-config = { path = "./crates/nu-color-config", version = "0.61.0" }
|
||||
nu-command = { path="./crates/nu-command", version = "0.61.0" }
|
||||
nu-engine = { path="./crates/nu-engine", version = "0.61.0" }
|
||||
nu-json = { path="./crates/nu-json", version = "0.61.0" }
|
||||
nu-parser = { path="./crates/nu-parser", version = "0.61.0" }
|
||||
nu-path = { path="./crates/nu-path", version = "0.61.0" }
|
||||
nu-plugin = { path = "./crates/nu-plugin", optional = true, version = "0.61.0" }
|
||||
nu-pretty-hex = { path = "./crates/nu-pretty-hex", version = "0.61.0" }
|
||||
nu-protocol = { path = "./crates/nu-protocol", version = "0.61.0" }
|
||||
nu-system = { path = "./crates/nu-system", version = "0.61.0" }
|
||||
nu-table = { path = "./crates/nu-table", version = "0.61.0" }
|
||||
nu-term-grid = { path = "./crates/nu-term-grid", version = "0.61.0" }
|
||||
nu-cli = { path="./crates/nu-cli", version = "0.63.1" }
|
||||
nu-color-config = { path = "./crates/nu-color-config", version = "0.63.1" }
|
||||
nu-command = { path="./crates/nu-command", version = "0.63.1" }
|
||||
nu-engine = { path="./crates/nu-engine", version = "0.63.1" }
|
||||
nu-json = { path="./crates/nu-json", version = "0.63.1" }
|
||||
nu-parser = { path="./crates/nu-parser", version = "0.63.1" }
|
||||
nu-path = { path="./crates/nu-path", version = "0.63.1" }
|
||||
nu-plugin = { path = "./crates/nu-plugin", optional = true, version = "0.63.1" }
|
||||
nu-pretty-hex = { path = "./crates/nu-pretty-hex", version = "0.63.1" }
|
||||
nu-protocol = { path = "./crates/nu-protocol", version = "0.63.1" }
|
||||
nu-system = { path = "./crates/nu-system", version = "0.63.1" }
|
||||
nu-table = { path = "./crates/nu-table", version = "0.63.1" }
|
||||
nu-term-grid = { path = "./crates/nu-term-grid", version = "0.63.1" }
|
||||
nu-utils = { path = "./crates/nu-utils", version = "0.63.1" }
|
||||
pretty_env_logger = "0.4.0"
|
||||
rayon = "1.5.1"
|
||||
reedline = { version = "0.4.0", features = ["bashisms"]}
|
||||
reedline = { git = "https://github.com/nushell/reedline", branch = "main", features = ["bashisms"]}
|
||||
is_executable = "1.0.1"
|
||||
|
||||
[target.'cfg(not(target_os = "windows"))'.dependencies]
|
||||
# Our dependencies don't use OpenSSL on Windows
|
||||
openssl = { version = "0.10.38", features = ["vendored"], optional = true }
|
||||
|
||||
[dev-dependencies]
|
||||
nu-test-support = { path="./crates/nu-test-support", version = "0.61.0" }
|
||||
nu-test-support = { path="./crates/nu-test-support", version = "0.63.1" }
|
||||
tempfile = "3.2.0"
|
||||
assert_cmd = "2.0.2"
|
||||
pretty_assertions = "1.0.0"
|
||||
@ -67,18 +72,19 @@ rstest = "0.12.0"
|
||||
itertools = "0.10.3"
|
||||
|
||||
[target.'cfg(windows)'.build-dependencies]
|
||||
embed-resource = "1"
|
||||
winres = "0.1"
|
||||
|
||||
[features]
|
||||
plugin = ["nu-plugin", "nu-cli/plugin", "nu-parser/plugin", "nu-command/plugin", "nu-protocol/plugin", "nu-engine/plugin"]
|
||||
default = ["plugin", "which-support", "zip-support", "trash-support"]
|
||||
default = ["plugin", "which-support", "trash-support"]
|
||||
stable = ["default"]
|
||||
extra = ["default", "dataframe"]
|
||||
extra = ["default", "dataframe", "database"]
|
||||
wasi = []
|
||||
# Enable to statically link OpenSSL; otherwise the system version will be used. Not enabled by default because it takes a while to build
|
||||
static-link-openssl = ["dep:openssl"]
|
||||
|
||||
# Stable (Default)
|
||||
which-support = ["nu-command/which-support"]
|
||||
zip-support = ["nu-command/zip"]
|
||||
trash-support = ["nu-command/trash-support"]
|
||||
|
||||
# Extra
|
||||
@ -86,6 +92,9 @@ trash-support = ["nu-command/trash-support"]
|
||||
# Dataframe feature for nushell
|
||||
dataframe = ["nu-command/dataframe"]
|
||||
|
||||
# Database commands for nushell
|
||||
database = ["nu-command/database"]
|
||||
|
||||
[profile.release]
|
||||
opt-level = "s" # Optimize for size
|
||||
strip = "debuginfo"
|
||||
@ -98,6 +107,13 @@ inherits = "release"
|
||||
strip = false
|
||||
debug = true
|
||||
|
||||
# build with `cargo build --profile ci`
|
||||
# to analyze performance with tooling like linux perf
|
||||
[profile.ci]
|
||||
inherits = "dev"
|
||||
strip = false
|
||||
debug = false
|
||||
|
||||
# Main nu binary
|
||||
[[bin]]
|
||||
name = "nu"
|
||||
|
@ -1,24 +0,0 @@
|
||||
[tasks.lalrpop]
|
||||
install_crate = { crate_name = "lalrpop", binary = "lalrpop", test_arg = "--help" }
|
||||
command = "lalrpop"
|
||||
args = ["src/parser/parser.lalrpop"]
|
||||
|
||||
[tasks.baseline]
|
||||
command = "cargo"
|
||||
args = ["build", "--bins"]
|
||||
|
||||
[tasks.run]
|
||||
command = "cargo"
|
||||
args = ["run"]
|
||||
dependencies = ["baseline"]
|
||||
|
||||
[tasks.test]
|
||||
command = "cargo"
|
||||
args = ["test"]
|
||||
dependencies = ["baseline"]
|
||||
|
||||
[tasks.check]
|
||||
command = "cargo"
|
||||
args = ["check"]
|
||||
dependencies = ["baseline"]
|
||||
|
@ -1 +0,0 @@
|
||||
Nu will look for the plugins in your PATH on startup. While nu will have some functionality without them, for full functionality you'll need to copy them into your path so they can be loaded.
|
@ -1,7 +1,7 @@
|
||||
# README
|
||||
|
||||
[](https://crates.io/crates/nu)
|
||||
[](https://github.com/nushell/nushell/actions)
|
||||

|
||||
[](https://discord.gg/NtAbbGn)
|
||||
[](https://changelog.com/podcast/363)
|
||||
[](https://twitter.com/nu_shell)
|
||||
@ -43,7 +43,7 @@ You can also find information on more specific topics in our [cookbook](https://
|
||||
|
||||
Up-to-date installation instructions can be found in the [installation chapter of the book](https://www.nushell.sh/book/installation.html). **Windows users**: please note that Nu works on Windows 10 and does not currently have Windows 7/8.1 support.
|
||||
|
||||
To build Nu, you will need to use the **latest stable (1.59 or later)** version of the compiler.
|
||||
To build Nu, you will need to use the **latest stable (1.60 or later)** version of the compiler.
|
||||
|
||||
Required dependencies:
|
||||
|
||||
@ -275,6 +275,7 @@ Please submit an issue or PR to be added to this list.
|
||||
- [starship](https://github.com/starship/starship)
|
||||
- [oh-my-posh](https://ohmyposh.dev)
|
||||
- [Couchbase Shell](https://couchbase.sh)
|
||||
- [virtualenv](https://github.com/pypa/virtualenv)
|
||||
### Mentions
|
||||
- [The Python Launcher for Unix](https://github.com/brettcannon/python-launcher#how-do-i-get-a-table-of-python-executables-in-nushell)
|
||||
|
||||
@ -285,7 +286,7 @@ See [Contributing](CONTRIBUTING.md) for details.
|
||||
Thanks to all the people who already contributed!
|
||||
|
||||
<a href="https://github.com/nushell/nushell/graphs/contributors">
|
||||
<img src="https://contributors-img.web.app/image?repo=nushell/nushell" />
|
||||
<img src="https://contributors-img.web.app/image?repo=nushell/nushell&max=500" />
|
||||
</a>
|
||||
|
||||
## License
|
||||
|
3
README.release.txt
Normal file
@ -0,0 +1,3 @@
|
||||
To use Nu plugins, use the register command to tell Nu where to find the plugin. For example:
|
||||
|
||||
> register -e json ./nu_plugin_query
|
Before Width: | Height: | Size: 166 KiB |
Before Width: | Height: | Size: 206 KiB |
Before Width: | Height: | Size: 167 KiB |
Before Width: | Height: | Size: 91 KiB |
Before Width: | Height: | Size: 182 KiB |
Before Width: | Height: | Size: 144 KiB |
Before Width: | Height: | Size: 146 KiB |
Before Width: | Height: | Size: 2.2 KiB |
Before Width: | Height: | Size: 1.6 KiB |
Before Width: | Height: | Size: 40 KiB |
Before Width: | Height: | Size: 68 KiB |
Before Width: | Height: | Size: 77 KiB |
@ -1,49 +0,0 @@
|
||||
#include <winver.h>
|
||||
|
||||
#define VER_FILEVERSION 0,59,1,0
|
||||
#define VER_FILEVERSION_STR "0.59.1"
|
||||
|
||||
#define VER_PRODUCTVERSION 0,59,1,0
|
||||
#define VER_PRODUCTVERSION_STR "0.59.1"
|
||||
|
||||
#ifdef RC_INVOKED
|
||||
|
||||
#ifdef DEBUG // TODO: Actually define DEBUG
|
||||
#define VER_DEBUG VS_FF_DEBUG
|
||||
#else
|
||||
#define VER_DEBUG 0
|
||||
#endif
|
||||
|
||||
VS_VERSION_INFO VERSIONINFO
|
||||
FILEVERSION VER_FILEVERSION
|
||||
PRODUCTVERSION VER_PRODUCTVERSION
|
||||
FILEFLAGSMASK VS_FFI_FILEFLAGSMASK
|
||||
FILEFLAGS VER_DEBUG
|
||||
FILEOS VOS__WINDOWS32
|
||||
FILETYPE VFT_APP
|
||||
FILESUBTYPE VFT2_UNKNOWN
|
||||
BEGIN
|
||||
BLOCK "StringFileInfo"
|
||||
BEGIN
|
||||
BLOCK "040904b0"
|
||||
BEGIN
|
||||
VALUE "CompanyName", "nushell"
|
||||
VALUE "FileDescription", "Nushell"
|
||||
VALUE "FileVersion", VER_FILEVERSION_STR
|
||||
VALUE "InternalName", "nu.exe"
|
||||
VALUE "LegalCopyright", "Copyright (C) 2022"
|
||||
VALUE "OriginalFilename", "nu.exe"
|
||||
VALUE "ProductName", "Nushell"
|
||||
VALUE "ProductVersion", VER_PRODUCTVERSION_STR
|
||||
END
|
||||
END
|
||||
|
||||
BLOCK "VarFileInfo"
|
||||
BEGIN
|
||||
VALUE "Translation", 0x409, 1200
|
||||
END
|
||||
END
|
||||
|
||||
#define IDI_ICON 0x101
|
||||
IDI_ICON ICON "assets/nu_logo.ico"
|
||||
#endif
|
8
build.rs
@ -1,6 +1,12 @@
|
||||
#[cfg(windows)]
|
||||
fn main() {
|
||||
embed_resource::compile_for("assets/nushell.rc", &["nu"])
|
||||
let mut res = winres::WindowsResource::new();
|
||||
res.set("ProductName", "Nushell");
|
||||
res.set("FileDescription", "Nushell");
|
||||
res.set("LegalCopyright", "Copyright (C) 2022");
|
||||
res.set_icon("assets/nu_logo.ico");
|
||||
res.compile()
|
||||
.expect("Failed to run the Windows resource compiler (rc.exe)");
|
||||
}
|
||||
|
||||
#[cfg(not(windows))]
|
||||
|
@ -4,21 +4,25 @@ description = "CLI-related functionality for Nushell"
|
||||
edition = "2021"
|
||||
license = "MIT"
|
||||
name = "nu-cli"
|
||||
version = "0.61.0"
|
||||
version = "0.63.1"
|
||||
|
||||
[dev-dependencies]
|
||||
nu-test-support = { path="../nu-test-support", version = "0.63.1" }
|
||||
nu-command = { path = "../nu-command", version = "0.63.1" }
|
||||
|
||||
[dependencies]
|
||||
nu-engine = { path = "../nu-engine", version = "0.61.0" }
|
||||
nu-path = { path = "../nu-path", version = "0.61.0" }
|
||||
nu-parser = { path = "../nu-parser", version = "0.61.0" }
|
||||
nu-protocol = { path = "../nu-protocol", version = "0.61.0" }
|
||||
nu-utils = { path = "../nu-utils", version = "0.61.0" }
|
||||
nu-engine = { path = "../nu-engine", version = "0.63.1" }
|
||||
nu-path = { path = "../nu-path", version = "0.63.1" }
|
||||
nu-parser = { path = "../nu-parser", version = "0.63.1" }
|
||||
nu-protocol = { path = "../nu-protocol", version = "0.63.1" }
|
||||
nu-utils = { path = "../nu-utils", version = "0.63.1" }
|
||||
nu-ansi-term = "0.45.1"
|
||||
nu-color-config = { path = "../nu-color-config", version = "0.61.0" }
|
||||
|
||||
reedline = { git = "https://github.com/nushell/reedline", branch = "main", features = ["bashisms"]}
|
||||
nu-color-config = { path = "../nu-color-config", version = "0.63.1" }
|
||||
crossterm = "0.23.0"
|
||||
miette = { version = "4.4.0", features = ["fancy"] }
|
||||
miette = { version = "4.5.0", features = ["fancy"] }
|
||||
thiserror = "1.0.29"
|
||||
reedline = { version = "0.4.0", features = ["bashisms"]}
|
||||
fuzzy-matcher = "0.3.7"
|
||||
|
||||
log = "0.4"
|
||||
is_executable = "1.0.1"
|
||||
|
@ -2,11 +2,11 @@ use crate::util::report_error;
|
||||
use log::info;
|
||||
use miette::Result;
|
||||
use nu_engine::{convert_env_values, eval_block};
|
||||
use nu_parser::{parse, trim_quotes};
|
||||
use nu_parser::parse;
|
||||
use nu_protocol::engine::Stack;
|
||||
use nu_protocol::{
|
||||
engine::{EngineState, StateDelta, StateWorkingSet},
|
||||
Config, PipelineData, Spanned,
|
||||
PipelineData, Spanned, Value,
|
||||
};
|
||||
use std::path::Path;
|
||||
|
||||
@ -17,21 +17,19 @@ pub fn evaluate_commands(
|
||||
stack: &mut Stack,
|
||||
input: PipelineData,
|
||||
is_perf_true: bool,
|
||||
table_mode: Option<Value>,
|
||||
) -> Result<()> {
|
||||
// Run a command (or commands) given to us by the user
|
||||
let (block, delta) = {
|
||||
if let Some(ref t_mode) = table_mode {
|
||||
let mut config = engine_state.get_config().clone();
|
||||
config.table_mode = t_mode.as_string()?;
|
||||
engine_state.set_config(&config);
|
||||
}
|
||||
|
||||
let mut working_set = StateWorkingSet::new(engine_state);
|
||||
|
||||
let (input, _) = if commands.item.starts_with('\'') || commands.item.starts_with('"') {
|
||||
(
|
||||
trim_quotes(commands.item.as_bytes()),
|
||||
commands.span.start + 1,
|
||||
)
|
||||
} else {
|
||||
(commands.item.as_bytes(), commands.span.start)
|
||||
};
|
||||
|
||||
let (output, err) = parse(&mut working_set, None, input, false, &[]);
|
||||
let (output, err) = parse(&mut working_set, None, commands.item.as_bytes(), false, &[]);
|
||||
if let Some(err) = err {
|
||||
report_error(&working_set, &err);
|
||||
|
||||
@ -46,20 +44,17 @@ pub fn evaluate_commands(
|
||||
report_error(&working_set, &err);
|
||||
}
|
||||
|
||||
let config = match stack.get_config() {
|
||||
Ok(config) => config,
|
||||
Err(e) => {
|
||||
let working_set = StateWorkingSet::new(engine_state);
|
||||
|
||||
report_error(&working_set, &e);
|
||||
Config::default()
|
||||
}
|
||||
};
|
||||
let mut config = engine_state.get_config().clone();
|
||||
if let Some(t_mode) = table_mode {
|
||||
config.table_mode = t_mode.as_string()?;
|
||||
}
|
||||
|
||||
// Merge the delta in case env vars changed in the config
|
||||
match nu_engine::env::current_dir(engine_state, stack) {
|
||||
Ok(cwd) => {
|
||||
if let Err(e) = engine_state.merge_delta(StateDelta::new(), Some(stack), cwd) {
|
||||
if let Err(e) =
|
||||
engine_state.merge_delta(StateDelta::new(engine_state), Some(stack), cwd)
|
||||
{
|
||||
let working_set = StateWorkingSet::new(engine_state);
|
||||
report_error(&working_set, &e);
|
||||
std::process::exit(1);
|
||||
@ -81,7 +76,7 @@ pub fn evaluate_commands(
|
||||
|
||||
match eval_block(engine_state, stack, &block, input, false, false) {
|
||||
Ok(pipeline_data) => {
|
||||
crate::eval_file::print_table_or_error(engine_state, stack, pipeline_data, &config)
|
||||
crate::eval_file::print_table_or_error(engine_state, stack, pipeline_data, &mut config)
|
||||
}
|
||||
Err(err) => {
|
||||
let working_set = StateWorkingSet::new(engine_state);
|
||||
|
@ -12,50 +12,19 @@ pub trait Completer {
|
||||
span: Span,
|
||||
offset: usize,
|
||||
pos: usize,
|
||||
) -> (Vec<Suggestion>, CompletionOptions);
|
||||
options: &CompletionOptions,
|
||||
) -> Vec<Suggestion>;
|
||||
|
||||
// Filter results using the completion options
|
||||
fn filter(
|
||||
&self,
|
||||
prefix: Vec<u8>,
|
||||
items: Vec<Suggestion>,
|
||||
options: CompletionOptions,
|
||||
) -> Vec<Suggestion> {
|
||||
items
|
||||
.into_iter()
|
||||
.filter(|it| {
|
||||
// Minimise clones for new functionality
|
||||
match (options.case_sensitive, options.positional) {
|
||||
(true, true) => it.value.as_bytes().starts_with(&prefix),
|
||||
(true, false) => it
|
||||
.value
|
||||
.contains(std::str::from_utf8(&prefix).unwrap_or("")),
|
||||
(false, positional) => {
|
||||
let value = it.value.to_lowercase();
|
||||
let prefix = std::str::from_utf8(&prefix).unwrap_or("").to_lowercase();
|
||||
if positional {
|
||||
value.starts_with(&prefix)
|
||||
} else {
|
||||
value.contains(&prefix)
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
.collect()
|
||||
fn get_sort_by(&self) -> SortBy {
|
||||
SortBy::Ascending
|
||||
}
|
||||
|
||||
// Sort results using the completion options
|
||||
fn sort(
|
||||
&self,
|
||||
items: Vec<Suggestion>,
|
||||
prefix: Vec<u8>,
|
||||
options: CompletionOptions,
|
||||
) -> Vec<Suggestion> {
|
||||
fn sort(&self, items: Vec<Suggestion>, prefix: Vec<u8>) -> Vec<Suggestion> {
|
||||
let prefix_str = String::from_utf8_lossy(&prefix).to_string();
|
||||
let mut filtered_items = items;
|
||||
|
||||
// Sort items
|
||||
match options.sort_by {
|
||||
match self.get_sort_by() {
|
||||
SortBy::LevenshteinDistance => {
|
||||
filtered_items.sort_by(|a, b| {
|
||||
let a_distance = levenshtein_distance(&prefix_str, &a.value);
|
||||
|
@ -1,7 +1,5 @@
|
||||
use crate::completions::{
|
||||
file_completions::file_path_completion, Completer, CompletionOptions, SortBy,
|
||||
};
|
||||
use nu_parser::{trim_quotes, FlatShape};
|
||||
use crate::completions::{Completer, CompletionOptions, MatchAlgorithm, SortBy};
|
||||
use nu_parser::FlatShape;
|
||||
use nu_protocol::{
|
||||
engine::{EngineState, StateWorkingSet},
|
||||
Span,
|
||||
@ -12,7 +10,6 @@ use std::sync::Arc;
|
||||
pub struct CommandCompletion {
|
||||
engine_state: Arc<EngineState>,
|
||||
flattened: Vec<(Span, FlatShape)>,
|
||||
flat_idx: usize,
|
||||
flat_shape: FlatShape,
|
||||
}
|
||||
|
||||
@ -21,21 +18,23 @@ impl CommandCompletion {
|
||||
engine_state: Arc<EngineState>,
|
||||
_: &StateWorkingSet,
|
||||
flattened: Vec<(Span, FlatShape)>,
|
||||
flat_idx: usize,
|
||||
flat_shape: FlatShape,
|
||||
) -> Self {
|
||||
Self {
|
||||
engine_state,
|
||||
flattened,
|
||||
flat_idx,
|
||||
flat_shape,
|
||||
}
|
||||
}
|
||||
|
||||
fn external_command_completion(&self, prefix: &str) -> Vec<String> {
|
||||
fn external_command_completion(
|
||||
&self,
|
||||
prefix: &str,
|
||||
match_algorithm: MatchAlgorithm,
|
||||
) -> Vec<String> {
|
||||
let mut executables = vec![];
|
||||
|
||||
let paths = self.engine_state.env_vars.get("PATH");
|
||||
let paths = self.engine_state.get_env_var("PATH");
|
||||
|
||||
if let Some(paths) = paths {
|
||||
if let Ok(paths) = paths.as_list() {
|
||||
@ -53,7 +52,8 @@ impl CommandCompletion {
|
||||
) && matches!(
|
||||
item.path()
|
||||
.file_name()
|
||||
.map(|x| x.to_string_lossy().starts_with(prefix)),
|
||||
.map(|x| match_algorithm
|
||||
.matches_str(&x.to_string_lossy(), prefix)),
|
||||
Some(true)
|
||||
) && is_executable::is_executable(&item.path())
|
||||
{
|
||||
@ -76,11 +76,14 @@ impl CommandCompletion {
|
||||
span: Span,
|
||||
offset: usize,
|
||||
find_externals: bool,
|
||||
match_algorithm: MatchAlgorithm,
|
||||
) -> Vec<Suggestion> {
|
||||
let prefix = working_set.get_span_contents(span);
|
||||
let partial = working_set.get_span_contents(span);
|
||||
|
||||
let filter_predicate = |command: &[u8]| match_algorithm.matches_u8(command, partial);
|
||||
|
||||
let results = working_set
|
||||
.find_commands_by_prefix(prefix)
|
||||
.find_commands_by_predicate(filter_predicate)
|
||||
.into_iter()
|
||||
.map(move |x| Suggestion {
|
||||
value: String::from_utf8_lossy(&x.0).to_string(),
|
||||
@ -90,40 +93,42 @@ impl CommandCompletion {
|
||||
start: span.start - offset,
|
||||
end: span.end - offset,
|
||||
},
|
||||
append_whitespace: true,
|
||||
});
|
||||
|
||||
let results_aliases =
|
||||
working_set
|
||||
.find_aliases_by_prefix(prefix)
|
||||
let results_aliases = working_set
|
||||
.find_aliases_by_predicate(filter_predicate)
|
||||
.into_iter()
|
||||
.map(move |x| Suggestion {
|
||||
value: String::from_utf8_lossy(&x).to_string(),
|
||||
description: None,
|
||||
extra: None,
|
||||
span: reedline::Span {
|
||||
start: span.start - offset,
|
||||
end: span.end - offset,
|
||||
},
|
||||
append_whitespace: true,
|
||||
});
|
||||
|
||||
let mut results = results.chain(results_aliases).collect::<Vec<_>>();
|
||||
|
||||
let partial = working_set.get_span_contents(span);
|
||||
let partial = String::from_utf8_lossy(partial).to_string();
|
||||
let results = if find_externals {
|
||||
let results_external = self
|
||||
.external_command_completion(&partial, match_algorithm)
|
||||
.into_iter()
|
||||
.map(move |x| Suggestion {
|
||||
value: String::from_utf8_lossy(&x).to_string(),
|
||||
value: x,
|
||||
description: None,
|
||||
extra: None,
|
||||
span: reedline::Span {
|
||||
start: span.start - offset,
|
||||
end: span.end - offset,
|
||||
},
|
||||
append_whitespace: true,
|
||||
});
|
||||
|
||||
let mut results = results.chain(results_aliases).collect::<Vec<_>>();
|
||||
|
||||
let prefix = working_set.get_span_contents(span);
|
||||
let prefix = String::from_utf8_lossy(prefix).to_string();
|
||||
let results = if find_externals {
|
||||
let results_external =
|
||||
self.external_command_completion(&prefix)
|
||||
.into_iter()
|
||||
.map(move |x| Suggestion {
|
||||
value: x,
|
||||
description: None,
|
||||
extra: None,
|
||||
span: reedline::Span {
|
||||
start: span.start - offset,
|
||||
end: span.end - offset,
|
||||
},
|
||||
});
|
||||
|
||||
for external in results_external {
|
||||
if results.contains(&external) {
|
||||
results.push(Suggestion {
|
||||
@ -131,6 +136,7 @@ impl CommandCompletion {
|
||||
description: None,
|
||||
extra: None,
|
||||
span: external.span,
|
||||
append_whitespace: true,
|
||||
})
|
||||
} else {
|
||||
results.push(external)
|
||||
@ -150,11 +156,12 @@ impl Completer for CommandCompletion {
|
||||
fn fetch(
|
||||
&mut self,
|
||||
working_set: &StateWorkingSet,
|
||||
prefix: Vec<u8>,
|
||||
_prefix: Vec<u8>,
|
||||
span: Span,
|
||||
offset: usize,
|
||||
pos: usize,
|
||||
) -> (Vec<Suggestion>, CompletionOptions) {
|
||||
options: &CompletionOptions,
|
||||
) -> Vec<Suggestion> {
|
||||
let last = self
|
||||
.flattened
|
||||
.iter()
|
||||
@ -172,9 +179,6 @@ impl Completer for CommandCompletion {
|
||||
})
|
||||
.last();
|
||||
|
||||
// Options
|
||||
let options = CompletionOptions::new(true, true, SortBy::LevenshteinDistance);
|
||||
|
||||
// The last item here would be the earliest shape that could possible by part of this subcommand
|
||||
let subcommands = if let Some(last) = last {
|
||||
self.complete_commands(
|
||||
@ -185,13 +189,14 @@ impl Completer for CommandCompletion {
|
||||
},
|
||||
offset,
|
||||
false,
|
||||
options.match_algorithm,
|
||||
)
|
||||
} else {
|
||||
vec![]
|
||||
};
|
||||
|
||||
if !subcommands.is_empty() {
|
||||
return (subcommands, options);
|
||||
return subcommands;
|
||||
}
|
||||
|
||||
let commands = if matches!(self.flat_shape, nu_parser::FlatShape::External)
|
||||
@ -199,76 +204,18 @@ impl Completer for CommandCompletion {
|
||||
|| ((span.end - span.start) == 0)
|
||||
{
|
||||
// we're in a gap or at a command
|
||||
self.complete_commands(working_set, span, offset, true)
|
||||
self.complete_commands(working_set, span, offset, true, options.match_algorithm)
|
||||
} else {
|
||||
vec![]
|
||||
};
|
||||
|
||||
let cwd = if let Some(d) = self.engine_state.env_vars.get("PWD") {
|
||||
match d.as_string() {
|
||||
Ok(s) => s,
|
||||
Err(_) => "".to_string(),
|
||||
}
|
||||
} else {
|
||||
"".to_string()
|
||||
};
|
||||
|
||||
let preceding_byte = if span.start > offset {
|
||||
working_set
|
||||
.get_span_contents(Span {
|
||||
start: span.start - 1,
|
||||
end: span.start,
|
||||
})
|
||||
.to_vec()
|
||||
} else {
|
||||
vec![]
|
||||
};
|
||||
// let prefix = working_set.get_span_contents(flat.0);
|
||||
let prefix = String::from_utf8_lossy(&prefix).to_string();
|
||||
let output = file_path_completion(span, &prefix, &cwd)
|
||||
subcommands
|
||||
.into_iter()
|
||||
.map(move |x| {
|
||||
if self.flat_idx == 0 {
|
||||
// We're in the command position
|
||||
if x.1.starts_with('"') && !matches!(preceding_byte.get(0), Some(b'^')) {
|
||||
let trimmed = trim_quotes(x.1.as_bytes());
|
||||
let trimmed = String::from_utf8_lossy(trimmed).to_string();
|
||||
let expanded = nu_path::canonicalize_with(trimmed, &cwd);
|
||||
|
||||
if let Ok(expanded) = expanded {
|
||||
if is_executable::is_executable(expanded) {
|
||||
(x.0, format!("^{}", x.1))
|
||||
} else {
|
||||
(x.0, x.1)
|
||||
}
|
||||
} else {
|
||||
(x.0, x.1)
|
||||
}
|
||||
} else {
|
||||
(x.0, x.1)
|
||||
}
|
||||
} else {
|
||||
(x.0, x.1)
|
||||
}
|
||||
})
|
||||
.map(move |x| Suggestion {
|
||||
value: x.1,
|
||||
description: None,
|
||||
extra: None,
|
||||
span: reedline::Span {
|
||||
start: x.0.start - offset,
|
||||
end: x.0.end - offset,
|
||||
},
|
||||
})
|
||||
.chain(subcommands.into_iter())
|
||||
.chain(commands.into_iter())
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
(output, options)
|
||||
.collect::<Vec<_>>()
|
||||
}
|
||||
|
||||
// Replace base filter with no filter once all the results are already based in the current path
|
||||
fn filter(&self, _: Vec<u8>, items: Vec<Suggestion>, _: CompletionOptions) -> Vec<Suggestion> {
|
||||
items
|
||||
fn get_sort_by(&self) -> SortBy {
|
||||
SortBy::LevenshteinDistance
|
||||
}
|
||||
}
|
||||
|
@ -1,28 +1,27 @@
|
||||
use crate::completions::{
|
||||
CommandCompletion, Completer, CustomCompletion, FileCompletion, FlagCompletion,
|
||||
VariableCompletion,
|
||||
CommandCompletion, Completer, CompletionOptions, CustomCompletion, DirectoryCompletion,
|
||||
DotNuCompletion, FileCompletion, FlagCompletion, MatchAlgorithm, VariableCompletion,
|
||||
};
|
||||
use nu_parser::{flatten_expression, parse, FlatShape};
|
||||
use nu_protocol::{
|
||||
engine::{EngineState, Stack, StateWorkingSet},
|
||||
Span, Value,
|
||||
Span,
|
||||
};
|
||||
use reedline::{Completer as ReedlineCompleter, Suggestion};
|
||||
use std::str;
|
||||
use std::sync::Arc;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct NuCompleter {
|
||||
engine_state: Arc<EngineState>,
|
||||
stack: Stack,
|
||||
config: Option<Value>,
|
||||
}
|
||||
|
||||
impl NuCompleter {
|
||||
pub fn new(engine_state: Arc<EngineState>, stack: Stack, config: Option<Value>) -> Self {
|
||||
pub fn new(engine_state: Arc<EngineState>, stack: Stack) -> Self {
|
||||
Self {
|
||||
engine_state,
|
||||
stack,
|
||||
config,
|
||||
}
|
||||
}
|
||||
|
||||
@ -36,15 +35,23 @@ impl NuCompleter {
|
||||
offset: usize,
|
||||
pos: usize,
|
||||
) -> Vec<Suggestion> {
|
||||
// Fetch
|
||||
let (mut suggestions, options) =
|
||||
completer.fetch(working_set, prefix.clone(), new_span, offset, pos);
|
||||
let config = self.engine_state.get_config();
|
||||
|
||||
// Filter
|
||||
suggestions = completer.filter(prefix.clone(), suggestions, options.clone());
|
||||
let mut options = CompletionOptions {
|
||||
case_sensitive: config.case_sensitive_completions,
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
if config.completion_algorithm == "fuzzy" {
|
||||
options.match_algorithm = MatchAlgorithm::Fuzzy;
|
||||
}
|
||||
|
||||
// Fetch
|
||||
let mut suggestions =
|
||||
completer.fetch(working_set, prefix.clone(), new_span, offset, pos, &options);
|
||||
|
||||
// Sort
|
||||
suggestions = completer.sort(suggestions, prefix, options);
|
||||
suggestions = completer.sort(suggestions, prefix);
|
||||
|
||||
suggestions
|
||||
}
|
||||
@ -52,6 +59,7 @@ impl NuCompleter {
|
||||
fn completion_helper(&mut self, line: &str, pos: usize) -> Vec<Suggestion> {
|
||||
let mut working_set = StateWorkingSet::new(&self.engine_state);
|
||||
let offset = working_set.next_span_start();
|
||||
let initial_line = line.to_string();
|
||||
let mut line = line.to_string();
|
||||
line.insert(pos, 'a');
|
||||
let pos = offset + pos;
|
||||
@ -69,6 +77,10 @@ impl NuCompleter {
|
||||
|
||||
for (flat_idx, flat) in flattened.iter().enumerate() {
|
||||
if pos >= flat.0.start && pos < flat.0.end {
|
||||
// Context variables
|
||||
let most_left_var =
|
||||
most_left_variable(flat_idx, &working_set, flattened.clone());
|
||||
|
||||
// Create a new span
|
||||
let new_span = Span {
|
||||
start: flat.0.start,
|
||||
@ -79,9 +91,37 @@ impl NuCompleter {
|
||||
let mut prefix = working_set.get_span_contents(flat.0).to_vec();
|
||||
prefix.remove(pos - flat.0.start);
|
||||
|
||||
// Completions that depends on the previous expression (e.g: use, source)
|
||||
if flat_idx > 0 {
|
||||
if let Some(previous_expr) = flattened.get(flat_idx - 1) {
|
||||
// Read the content for the previous expression
|
||||
let prev_expr_str =
|
||||
working_set.get_span_contents(previous_expr.0).to_vec();
|
||||
|
||||
// Completion for .nu files
|
||||
if prev_expr_str == b"use" || prev_expr_str == b"source" {
|
||||
let mut completer =
|
||||
DotNuCompletion::new(self.engine_state.clone());
|
||||
|
||||
return self.process_completion(
|
||||
&mut completer,
|
||||
&working_set,
|
||||
prefix,
|
||||
new_span,
|
||||
offset,
|
||||
pos,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Variables completion
|
||||
if prefix.starts_with(b"$") {
|
||||
let mut completer = VariableCompletion::new(self.engine_state.clone());
|
||||
if prefix.starts_with(b"$") || most_left_var.is_some() {
|
||||
let mut completer = VariableCompletion::new(
|
||||
self.engine_state.clone(),
|
||||
self.stack.clone(),
|
||||
most_left_var.unwrap_or((vec![], vec![])),
|
||||
);
|
||||
|
||||
return self.process_completion(
|
||||
&mut completer,
|
||||
@ -113,9 +153,8 @@ impl NuCompleter {
|
||||
let mut completer = CustomCompletion::new(
|
||||
self.engine_state.clone(),
|
||||
self.stack.clone(),
|
||||
self.config.clone(),
|
||||
*decl_id,
|
||||
line,
|
||||
initial_line,
|
||||
);
|
||||
|
||||
return self.process_completion(
|
||||
@ -127,8 +166,9 @@ impl NuCompleter {
|
||||
pos,
|
||||
);
|
||||
}
|
||||
FlatShape::Filepath | FlatShape::GlobPattern => {
|
||||
let mut completer = FileCompletion::new(self.engine_state.clone());
|
||||
FlatShape::Directory => {
|
||||
let mut completer =
|
||||
DirectoryCompletion::new(self.engine_state.clone());
|
||||
|
||||
return self.process_completion(
|
||||
&mut completer,
|
||||
@ -144,18 +184,34 @@ impl NuCompleter {
|
||||
self.engine_state.clone(),
|
||||
&working_set,
|
||||
flattened.clone(),
|
||||
flat_idx,
|
||||
// flat_idx,
|
||||
flat_shape.clone(),
|
||||
);
|
||||
|
||||
return self.process_completion(
|
||||
let out: Vec<_> = self.process_completion(
|
||||
&mut completer,
|
||||
&working_set,
|
||||
prefix,
|
||||
prefix.clone(),
|
||||
new_span,
|
||||
offset,
|
||||
pos,
|
||||
);
|
||||
|
||||
if out.is_empty() {
|
||||
let mut completer =
|
||||
FileCompletion::new(self.engine_state.clone());
|
||||
|
||||
return self.process_completion(
|
||||
&mut completer,
|
||||
&working_set,
|
||||
prefix,
|
||||
new_span,
|
||||
offset,
|
||||
pos,
|
||||
);
|
||||
}
|
||||
|
||||
return out;
|
||||
}
|
||||
};
|
||||
}
|
||||
@ -172,3 +228,53 @@ impl ReedlineCompleter for NuCompleter {
|
||||
self.completion_helper(line, pos)
|
||||
}
|
||||
}
|
||||
|
||||
// reads the most left variable returning it's name (e.g: $myvar)
|
||||
// and the depth (a.b.c)
|
||||
fn most_left_variable(
|
||||
idx: usize,
|
||||
working_set: &StateWorkingSet<'_>,
|
||||
flattened: Vec<(Span, FlatShape)>,
|
||||
) -> Option<(Vec<u8>, Vec<Vec<u8>>)> {
|
||||
// Reverse items to read the list backwards and truncate
|
||||
// because the only items that matters are the ones before the current index
|
||||
let mut rev = flattened;
|
||||
rev.truncate(idx);
|
||||
rev = rev.into_iter().rev().collect();
|
||||
|
||||
// Store the variables and sub levels found and reverse to correct order
|
||||
let mut variables_found: Vec<Vec<u8>> = vec![];
|
||||
let mut found_var = false;
|
||||
for item in rev.clone() {
|
||||
let result = working_set.get_span_contents(item.0).to_vec();
|
||||
|
||||
match item.1 {
|
||||
FlatShape::Variable => {
|
||||
variables_found.push(result);
|
||||
found_var = true;
|
||||
|
||||
break;
|
||||
}
|
||||
FlatShape::String => {
|
||||
variables_found.push(result);
|
||||
}
|
||||
_ => {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// If most left var was not found
|
||||
if !found_var {
|
||||
return None;
|
||||
}
|
||||
|
||||
// Reverse the order back
|
||||
variables_found = variables_found.into_iter().rev().collect();
|
||||
|
||||
// Extract the variable and the sublevels
|
||||
let var = variables_found.first().unwrap_or(&vec![]).to_vec();
|
||||
let sublevels: Vec<Vec<u8>> = variables_found.into_iter().skip(1).collect();
|
||||
|
||||
Some((var, sublevels))
|
||||
}
|
||||
|
@ -1,25 +1,93 @@
|
||||
#[derive(Clone)]
|
||||
use std::fmt::Display;
|
||||
|
||||
use fuzzy_matcher::{skim::SkimMatcherV2, FuzzyMatcher};
|
||||
use nu_parser::trim_quotes_str;
|
||||
|
||||
#[derive(Copy, Clone)]
|
||||
pub enum SortBy {
|
||||
LevenshteinDistance,
|
||||
Ascending,
|
||||
None,
|
||||
}
|
||||
|
||||
/// Describes how suggestions should be matched.
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
pub enum MatchAlgorithm {
|
||||
/// Only show suggestions which begin with the given input
|
||||
///
|
||||
/// Example:
|
||||
/// "git switch" is matched by "git sw"
|
||||
Prefix,
|
||||
|
||||
/// Only show suggestions which contain the input chars at any place
|
||||
///
|
||||
/// Example:
|
||||
/// "git checkout" is matched by "gco"
|
||||
Fuzzy,
|
||||
}
|
||||
|
||||
impl MatchAlgorithm {
|
||||
/// Returns whether the `needle` search text matches the given `haystack`.
|
||||
pub fn matches_str(&self, haystack: &str, needle: &str) -> bool {
|
||||
let haystack = trim_quotes_str(haystack);
|
||||
let needle = trim_quotes_str(needle);
|
||||
match *self {
|
||||
MatchAlgorithm::Prefix => haystack.starts_with(needle),
|
||||
MatchAlgorithm::Fuzzy => {
|
||||
let matcher = SkimMatcherV2::default();
|
||||
matcher.fuzzy_match(haystack, needle).is_some()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns whether the `needle` search text matches the given `haystack`.
|
||||
pub fn matches_u8(&self, haystack: &[u8], needle: &[u8]) -> bool {
|
||||
match *self {
|
||||
MatchAlgorithm::Prefix => haystack.starts_with(needle),
|
||||
MatchAlgorithm::Fuzzy => {
|
||||
let haystack_str = String::from_utf8_lossy(haystack);
|
||||
let needle_str = String::from_utf8_lossy(needle);
|
||||
|
||||
let matcher = SkimMatcherV2::default();
|
||||
matcher.fuzzy_match(&haystack_str, &needle_str).is_some()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl TryFrom<String> for MatchAlgorithm {
|
||||
type Error = InvalidMatchAlgorithm;
|
||||
|
||||
fn try_from(value: String) -> Result<Self, Self::Error> {
|
||||
match value.as_str() {
|
||||
"prefix" => Ok(Self::Prefix),
|
||||
"fuzzy" => Ok(Self::Fuzzy),
|
||||
_ => Err(InvalidMatchAlgorithm::Unknown),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum InvalidMatchAlgorithm {
|
||||
Unknown,
|
||||
}
|
||||
|
||||
impl Display for InvalidMatchAlgorithm {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match *self {
|
||||
InvalidMatchAlgorithm::Unknown => write!(f, "unknown match algorithm"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::error::Error for InvalidMatchAlgorithm {}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct CompletionOptions {
|
||||
pub case_sensitive: bool,
|
||||
pub positional: bool,
|
||||
pub sort_by: SortBy,
|
||||
}
|
||||
|
||||
impl CompletionOptions {
|
||||
pub fn new(case_sensitive: bool, positional: bool, sort_by: SortBy) -> Self {
|
||||
Self {
|
||||
case_sensitive,
|
||||
positional,
|
||||
sort_by,
|
||||
}
|
||||
}
|
||||
pub match_algorithm: MatchAlgorithm,
|
||||
}
|
||||
|
||||
impl Default for CompletionOptions {
|
||||
@ -28,6 +96,42 @@ impl Default for CompletionOptions {
|
||||
case_sensitive: true,
|
||||
positional: true,
|
||||
sort_by: SortBy::Ascending,
|
||||
match_algorithm: MatchAlgorithm::Prefix,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::MatchAlgorithm;
|
||||
|
||||
#[test]
|
||||
fn match_algorithm_prefix() {
|
||||
let algorithm = MatchAlgorithm::Prefix;
|
||||
|
||||
assert!(algorithm.matches_str("example text", ""));
|
||||
assert!(algorithm.matches_str("example text", "examp"));
|
||||
assert!(!algorithm.matches_str("example text", "text"));
|
||||
|
||||
assert!(algorithm.matches_u8(&[1, 2, 3], &[]));
|
||||
assert!(algorithm.matches_u8(&[1, 2, 3], &[1, 2]));
|
||||
assert!(!algorithm.matches_u8(&[1, 2, 3], &[2, 3]));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn match_algorithm_fuzzy() {
|
||||
let algorithm = MatchAlgorithm::Fuzzy;
|
||||
|
||||
assert!(algorithm.matches_str("example text", ""));
|
||||
assert!(algorithm.matches_str("example text", "examp"));
|
||||
assert!(algorithm.matches_str("example text", "ext"));
|
||||
assert!(algorithm.matches_str("example text", "mplxt"));
|
||||
assert!(!algorithm.matches_str("example text", "mpp"));
|
||||
|
||||
assert!(algorithm.matches_u8(&[1, 2, 3], &[]));
|
||||
assert!(algorithm.matches_u8(&[1, 2, 3], &[1, 2]));
|
||||
assert!(algorithm.matches_u8(&[1, 2, 3], &[2, 3]));
|
||||
assert!(algorithm.matches_u8(&[1, 2, 3], &[1, 3]));
|
||||
assert!(!algorithm.matches_u8(&[1, 2, 3], &[2, 2]));
|
||||
}
|
||||
}
|
||||
|
@ -1,9 +1,9 @@
|
||||
use crate::completions::{Completer, CompletionOptions, SortBy};
|
||||
use crate::completions::{Completer, CompletionOptions, MatchAlgorithm, SortBy};
|
||||
use nu_engine::eval_call;
|
||||
use nu_protocol::{
|
||||
ast::{Argument, Call, Expr, Expression},
|
||||
engine::{EngineState, Stack, StateWorkingSet},
|
||||
PipelineData, Span, Type, Value, CONFIG_VARIABLE_ID,
|
||||
PipelineData, Span, Type, Value,
|
||||
};
|
||||
use reedline::Suggestion;
|
||||
use std::sync::Arc;
|
||||
@ -11,25 +11,19 @@ use std::sync::Arc;
|
||||
pub struct CustomCompletion {
|
||||
engine_state: Arc<EngineState>,
|
||||
stack: Stack,
|
||||
config: Option<Value>,
|
||||
decl_id: usize,
|
||||
line: String,
|
||||
sort_by: SortBy,
|
||||
}
|
||||
|
||||
impl CustomCompletion {
|
||||
pub fn new(
|
||||
engine_state: Arc<EngineState>,
|
||||
stack: Stack,
|
||||
config: Option<Value>,
|
||||
decl_id: usize,
|
||||
line: String,
|
||||
) -> Self {
|
||||
pub fn new(engine_state: Arc<EngineState>, stack: Stack, decl_id: usize, line: String) -> Self {
|
||||
Self {
|
||||
engine_state,
|
||||
stack,
|
||||
config,
|
||||
decl_id,
|
||||
line,
|
||||
sort_by: SortBy::None,
|
||||
}
|
||||
}
|
||||
|
||||
@ -40,10 +34,9 @@ impl CustomCompletion {
|
||||
offset: usize,
|
||||
) -> Vec<Suggestion> {
|
||||
list.filter_map(move |x| {
|
||||
let s = x.as_string();
|
||||
|
||||
match s {
|
||||
Ok(s) => Some(Suggestion {
|
||||
// Match for string values
|
||||
if let Ok(s) = x.as_string() {
|
||||
return Some(Suggestion {
|
||||
value: s,
|
||||
description: None,
|
||||
extra: None,
|
||||
@ -51,9 +44,48 @@ impl CustomCompletion {
|
||||
start: span.start - offset,
|
||||
end: span.end - offset,
|
||||
},
|
||||
}),
|
||||
Err(_) => None,
|
||||
append_whitespace: false,
|
||||
});
|
||||
}
|
||||
|
||||
// Match for record values
|
||||
if let Ok((cols, vals)) = x.as_record() {
|
||||
let mut suggestion = Suggestion {
|
||||
value: String::from(""), // Initialize with empty string
|
||||
description: None,
|
||||
extra: None,
|
||||
span: reedline::Span {
|
||||
start: span.start - offset,
|
||||
end: span.end - offset,
|
||||
},
|
||||
append_whitespace: false,
|
||||
};
|
||||
|
||||
// Iterate the cols looking for `value` and `description`
|
||||
cols.iter().zip(vals).for_each(|it| {
|
||||
// Match `value` column
|
||||
if it.0 == "value" {
|
||||
// Convert the value to string
|
||||
if let Ok(val_str) = it.1.as_string() {
|
||||
// Update the suggestion value
|
||||
suggestion.value = val_str;
|
||||
}
|
||||
}
|
||||
|
||||
// Match `description` column
|
||||
if it.0 == "description" {
|
||||
// Convert the value to string
|
||||
if let Ok(desc_str) = it.1.as_string() {
|
||||
// Update the suggestion value
|
||||
suggestion.description = Some(desc_str);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return Some(suggestion);
|
||||
}
|
||||
|
||||
None
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
@ -63,28 +95,15 @@ impl Completer for CustomCompletion {
|
||||
fn fetch(
|
||||
&mut self,
|
||||
_: &StateWorkingSet,
|
||||
_: Vec<u8>,
|
||||
prefix: Vec<u8>,
|
||||
span: Span,
|
||||
offset: usize,
|
||||
pos: usize,
|
||||
) -> (Vec<Suggestion>, CompletionOptions) {
|
||||
completion_options: &CompletionOptions,
|
||||
) -> Vec<Suggestion> {
|
||||
// Line position
|
||||
let line_pos = pos - offset;
|
||||
|
||||
// Set up our initial config to start from
|
||||
if let Some(conf) = &self.config {
|
||||
self.stack.vars.insert(CONFIG_VARIABLE_ID, conf.clone());
|
||||
} else {
|
||||
self.stack.vars.insert(
|
||||
CONFIG_VARIABLE_ID,
|
||||
Value::Record {
|
||||
cols: vec![],
|
||||
vals: vec![],
|
||||
span: Span { start: 0, end: 0 },
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
// Call custom declaration
|
||||
let result = eval_call(
|
||||
&self.engine_state,
|
||||
@ -112,8 +131,10 @@ impl Completer for CustomCompletion {
|
||||
PipelineData::new(span),
|
||||
);
|
||||
|
||||
let mut custom_completion_options = None;
|
||||
|
||||
// Parse result
|
||||
let (suggestions, options) = match result {
|
||||
let suggestions = match result {
|
||||
Ok(pd) => {
|
||||
let value = pd.into_value(span);
|
||||
match &value {
|
||||
@ -128,14 +149,18 @@ impl Completer for CustomCompletion {
|
||||
.unwrap_or_default();
|
||||
let options = value.get_data_by_key("options");
|
||||
|
||||
let options = if let Some(Value::Record { .. }) = &options {
|
||||
if let Some(Value::Record { .. }) = &options {
|
||||
let options = options.unwrap_or_default();
|
||||
let should_sort = options
|
||||
.get_data_by_key("sort")
|
||||
.and_then(|val| val.as_bool().ok())
|
||||
.unwrap_or(false);
|
||||
|
||||
CompletionOptions {
|
||||
if should_sort {
|
||||
self.sort_by = SortBy::Ascending;
|
||||
}
|
||||
|
||||
custom_completion_options = Some(CompletionOptions {
|
||||
case_sensitive: options
|
||||
.get_data_by_key("case_sensitive")
|
||||
.and_then(|val| val.as_bool().ok())
|
||||
@ -149,23 +174,60 @@ impl Completer for CustomCompletion {
|
||||
} else {
|
||||
SortBy::None
|
||||
},
|
||||
}
|
||||
} else {
|
||||
CompletionOptions::default()
|
||||
};
|
||||
match_algorithm: match options
|
||||
.get_data_by_key("completion_algorithm")
|
||||
{
|
||||
Some(option) => option
|
||||
.as_string()
|
||||
.ok()
|
||||
.and_then(|option| option.try_into().ok())
|
||||
.unwrap_or(MatchAlgorithm::Prefix),
|
||||
None => completion_options.match_algorithm,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
(completions, options)
|
||||
completions
|
||||
}
|
||||
Value::List { vals, .. } => {
|
||||
let completions = self.map_completions(vals.iter(), span, offset);
|
||||
(completions, CompletionOptions::default())
|
||||
}
|
||||
_ => (vec![], CompletionOptions::default()),
|
||||
Value::List { vals, .. } => self.map_completions(vals.iter(), span, offset),
|
||||
_ => vec![],
|
||||
}
|
||||
}
|
||||
_ => (vec![], CompletionOptions::default()),
|
||||
_ => vec![],
|
||||
};
|
||||
|
||||
(suggestions, options)
|
||||
if let Some(custom_completion_options) = custom_completion_options {
|
||||
filter(&prefix, suggestions, &custom_completion_options)
|
||||
} else {
|
||||
filter(&prefix, suggestions, completion_options)
|
||||
}
|
||||
}
|
||||
|
||||
fn get_sort_by(&self) -> SortBy {
|
||||
self.sort_by
|
||||
}
|
||||
}
|
||||
|
||||
fn filter(prefix: &[u8], items: Vec<Suggestion>, options: &CompletionOptions) -> Vec<Suggestion> {
|
||||
items
|
||||
.into_iter()
|
||||
.filter(|it| match options.match_algorithm {
|
||||
MatchAlgorithm::Prefix => match (options.case_sensitive, options.positional) {
|
||||
(true, true) => it.value.as_bytes().starts_with(prefix),
|
||||
(true, false) => it.value.contains(std::str::from_utf8(prefix).unwrap_or("")),
|
||||
(false, positional) => {
|
||||
let value = it.value.to_lowercase();
|
||||
let prefix = std::str::from_utf8(prefix).unwrap_or("").to_lowercase();
|
||||
if positional {
|
||||
value.starts_with(&prefix)
|
||||
} else {
|
||||
value.contains(&prefix)
|
||||
}
|
||||
}
|
||||
},
|
||||
MatchAlgorithm::Fuzzy => options
|
||||
.match_algorithm
|
||||
.matches_u8(it.value.as_bytes(), prefix),
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
160
crates/nu-cli/src/completions/directory_completions.rs
Normal file
@ -0,0 +1,160 @@
|
||||
use crate::completions::{matches, Completer, CompletionOptions};
|
||||
use nu_protocol::{
|
||||
engine::{EngineState, StateWorkingSet},
|
||||
levenshtein_distance, Span,
|
||||
};
|
||||
use reedline::Suggestion;
|
||||
use std::fs;
|
||||
use std::path::Path;
|
||||
use std::sync::Arc;
|
||||
|
||||
use super::{partial_from, prepend_base_dir};
|
||||
|
||||
const SEP: char = std::path::MAIN_SEPARATOR;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct DirectoryCompletion {
|
||||
engine_state: Arc<EngineState>,
|
||||
}
|
||||
|
||||
impl DirectoryCompletion {
|
||||
pub fn new(engine_state: Arc<EngineState>) -> Self {
|
||||
Self { engine_state }
|
||||
}
|
||||
}
|
||||
|
||||
impl Completer for DirectoryCompletion {
|
||||
fn fetch(
|
||||
&mut self,
|
||||
_: &StateWorkingSet,
|
||||
prefix: Vec<u8>,
|
||||
span: Span,
|
||||
offset: usize,
|
||||
_: usize,
|
||||
options: &CompletionOptions,
|
||||
) -> Vec<Suggestion> {
|
||||
let cwd = if let Some(d) = self.engine_state.get_env_var("PWD") {
|
||||
match d.as_string() {
|
||||
Ok(s) => s,
|
||||
Err(_) => "".to_string(),
|
||||
}
|
||||
} else {
|
||||
"".to_string()
|
||||
};
|
||||
let partial = String::from_utf8_lossy(&prefix).to_string();
|
||||
|
||||
// Filter only the folders
|
||||
let output: Vec<_> = directory_completion(span, &partial, &cwd, options)
|
||||
.into_iter()
|
||||
.map(move |x| Suggestion {
|
||||
value: x.1,
|
||||
description: None,
|
||||
extra: None,
|
||||
span: reedline::Span {
|
||||
start: x.0.start - offset,
|
||||
end: x.0.end - offset,
|
||||
},
|
||||
append_whitespace: false,
|
||||
})
|
||||
.collect();
|
||||
|
||||
output
|
||||
}
|
||||
|
||||
// Sort results prioritizing the non hidden folders
|
||||
fn sort(&self, items: Vec<Suggestion>, prefix: Vec<u8>) -> Vec<Suggestion> {
|
||||
let prefix_str = String::from_utf8_lossy(&prefix).to_string();
|
||||
|
||||
// Sort items
|
||||
let mut sorted_items = items;
|
||||
sorted_items.sort_by(|a, b| a.value.cmp(&b.value));
|
||||
sorted_items.sort_by(|a, b| {
|
||||
let a_distance = levenshtein_distance(&prefix_str, &a.value);
|
||||
let b_distance = levenshtein_distance(&prefix_str, &b.value);
|
||||
a_distance.cmp(&b_distance)
|
||||
});
|
||||
|
||||
// Separate the results between hidden and non hidden
|
||||
let mut hidden: Vec<Suggestion> = vec![];
|
||||
let mut non_hidden: Vec<Suggestion> = vec![];
|
||||
|
||||
for item in sorted_items.into_iter() {
|
||||
let item_path = Path::new(&item.value);
|
||||
|
||||
if let Some(value) = item_path.file_name() {
|
||||
if let Some(value) = value.to_str() {
|
||||
if value.starts_with('.') {
|
||||
hidden.push(item);
|
||||
} else {
|
||||
non_hidden.push(item);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Append the hidden folders to the non hidden vec to avoid creating a new vec
|
||||
non_hidden.append(&mut hidden);
|
||||
|
||||
non_hidden
|
||||
}
|
||||
}
|
||||
|
||||
pub fn directory_completion(
|
||||
span: nu_protocol::Span,
|
||||
partial: &str,
|
||||
cwd: &str,
|
||||
options: &CompletionOptions,
|
||||
) -> Vec<(nu_protocol::Span, String)> {
|
||||
let original_input = partial;
|
||||
|
||||
let (base_dir_name, partial) = partial_from(partial);
|
||||
|
||||
let base_dir = nu_path::expand_path_with(&base_dir_name, cwd);
|
||||
|
||||
// This check is here as base_dir.read_dir() with base_dir == "" will open the current dir
|
||||
// which we don't want in this case (if we did, base_dir would already be ".")
|
||||
if base_dir == Path::new("") {
|
||||
return Vec::new();
|
||||
}
|
||||
|
||||
if let Ok(result) = base_dir.read_dir() {
|
||||
return result
|
||||
.filter_map(|entry| {
|
||||
entry.ok().and_then(|entry| {
|
||||
if let Ok(metadata) = fs::metadata(entry.path()) {
|
||||
if metadata.is_dir() {
|
||||
let mut file_name = entry.file_name().to_string_lossy().into_owned();
|
||||
if matches(&partial, &file_name, options) {
|
||||
let mut path = if prepend_base_dir(original_input, &base_dir_name) {
|
||||
format!("{}{}", base_dir_name, file_name)
|
||||
} else {
|
||||
file_name.to_string()
|
||||
};
|
||||
|
||||
if entry.path().is_dir() {
|
||||
path.push(SEP);
|
||||
file_name.push(SEP);
|
||||
}
|
||||
|
||||
// Fix files or folders with quotes
|
||||
if path.contains('\'') || path.contains('"') || path.contains(' ') {
|
||||
path = format!("`{}`", path);
|
||||
}
|
||||
|
||||
Some((span, path))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
} else {
|
||||
None
|
||||
}
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
})
|
||||
.collect();
|
||||
}
|
||||
|
||||
Vec::new()
|
||||
}
|
124
crates/nu-cli/src/completions/dotnu_completions.rs
Normal file
@ -0,0 +1,124 @@
|
||||
use crate::completions::{
|
||||
file_path_completion, partial_from, Completer, CompletionOptions, SortBy,
|
||||
};
|
||||
use nu_protocol::{
|
||||
engine::{EngineState, StateWorkingSet},
|
||||
Span,
|
||||
};
|
||||
use reedline::Suggestion;
|
||||
use std::sync::Arc;
|
||||
const SEP: char = std::path::MAIN_SEPARATOR;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct DotNuCompletion {
|
||||
engine_state: Arc<EngineState>,
|
||||
}
|
||||
|
||||
impl DotNuCompletion {
|
||||
pub fn new(engine_state: Arc<EngineState>) -> Self {
|
||||
Self { engine_state }
|
||||
}
|
||||
}
|
||||
|
||||
impl Completer for DotNuCompletion {
|
||||
fn fetch(
|
||||
&mut self,
|
||||
_: &StateWorkingSet,
|
||||
prefix: Vec<u8>,
|
||||
span: Span,
|
||||
offset: usize,
|
||||
_: usize,
|
||||
options: &CompletionOptions,
|
||||
) -> Vec<Suggestion> {
|
||||
let prefix_str = String::from_utf8_lossy(&prefix).to_string();
|
||||
let mut search_dirs: Vec<String> = vec![];
|
||||
let (base_dir, mut partial) = partial_from(&prefix_str);
|
||||
let mut is_current_folder = false;
|
||||
|
||||
// Fetch the lib dirs
|
||||
let lib_dirs: Vec<String> =
|
||||
if let Some(lib_dirs) = self.engine_state.get_env_var("NU_LIB_DIRS") {
|
||||
lib_dirs
|
||||
.as_list()
|
||||
.into_iter()
|
||||
.flat_map(|it| {
|
||||
it.iter().map(|x| {
|
||||
x.as_path()
|
||||
.expect("internal error: failed to convert lib path")
|
||||
})
|
||||
})
|
||||
.map(|it| {
|
||||
it.into_os_string()
|
||||
.into_string()
|
||||
.expect("internal error: failed to convert OS path")
|
||||
})
|
||||
.collect()
|
||||
} else {
|
||||
vec![]
|
||||
};
|
||||
|
||||
// Check if the base_dir is a folder
|
||||
if base_dir != format!(".{}", SEP) {
|
||||
// Add the base dir into the directories to be searched
|
||||
search_dirs.push(base_dir.clone());
|
||||
|
||||
// Reset the partial adding the basic dir back
|
||||
// in order to make the span replace work properly
|
||||
let mut base_dir_partial = base_dir;
|
||||
base_dir_partial.push_str(&partial);
|
||||
|
||||
partial = base_dir_partial;
|
||||
} else {
|
||||
// Fetch the current folder
|
||||
let current_folder = if let Some(d) = self.engine_state.get_env_var("PWD") {
|
||||
match d.as_string() {
|
||||
Ok(s) => s,
|
||||
Err(_) => "".to_string(),
|
||||
}
|
||||
} else {
|
||||
"".to_string()
|
||||
};
|
||||
is_current_folder = true;
|
||||
|
||||
// Add the current folder and the lib dirs into the
|
||||
// directories to be searched
|
||||
search_dirs.push(current_folder);
|
||||
search_dirs.extend(lib_dirs);
|
||||
}
|
||||
|
||||
// Fetch the files filtering the ones that ends with .nu
|
||||
// and transform them into suggestions
|
||||
let output: Vec<Suggestion> = search_dirs
|
||||
.into_iter()
|
||||
.flat_map(|it| {
|
||||
file_path_completion(span, &partial, &it, options)
|
||||
.into_iter()
|
||||
.filter(|it| {
|
||||
// Different base dir, so we list the .nu files or folders
|
||||
if !is_current_folder {
|
||||
it.1.ends_with(".nu") || it.1.ends_with(SEP)
|
||||
} else {
|
||||
// Lib dirs, so we filter only the .nu files
|
||||
it.1.ends_with(".nu")
|
||||
}
|
||||
})
|
||||
.map(move |x| Suggestion {
|
||||
value: x.1,
|
||||
description: None,
|
||||
extra: None,
|
||||
span: reedline::Span {
|
||||
start: x.0.start - offset,
|
||||
end: x.0.end - offset,
|
||||
},
|
||||
append_whitespace: true,
|
||||
})
|
||||
})
|
||||
.collect();
|
||||
|
||||
output
|
||||
}
|
||||
|
||||
fn get_sort_by(&self) -> SortBy {
|
||||
SortBy::LevenshteinDistance
|
||||
}
|
||||
}
|
@ -28,8 +28,9 @@ impl Completer for FileCompletion {
|
||||
span: Span,
|
||||
offset: usize,
|
||||
_: usize,
|
||||
) -> (Vec<Suggestion>, CompletionOptions) {
|
||||
let cwd = if let Some(d) = self.engine_state.env_vars.get("PWD") {
|
||||
options: &CompletionOptions,
|
||||
) -> Vec<Suggestion> {
|
||||
let cwd = if let Some(d) = self.engine_state.get_env_var("PWD") {
|
||||
match d.as_string() {
|
||||
Ok(s) => s,
|
||||
Err(_) => "".to_string(),
|
||||
@ -38,7 +39,7 @@ impl Completer for FileCompletion {
|
||||
"".to_string()
|
||||
};
|
||||
let prefix = String::from_utf8_lossy(&prefix).to_string();
|
||||
let output: Vec<_> = file_path_completion(span, &prefix, &cwd)
|
||||
let output: Vec<_> = file_path_completion(span, &prefix, &cwd, options)
|
||||
.into_iter()
|
||||
.map(move |x| Suggestion {
|
||||
value: x.1,
|
||||
@ -48,26 +49,20 @@ impl Completer for FileCompletion {
|
||||
start: x.0.start - offset,
|
||||
end: x.0.end - offset,
|
||||
},
|
||||
append_whitespace: false,
|
||||
})
|
||||
.collect();
|
||||
|
||||
// Options
|
||||
let options = CompletionOptions::default();
|
||||
|
||||
(output, options)
|
||||
output
|
||||
}
|
||||
|
||||
// Sort results prioritizing the non hidden folders
|
||||
fn sort(
|
||||
&self,
|
||||
items: Vec<Suggestion>,
|
||||
prefix: Vec<u8>,
|
||||
_: CompletionOptions, // Ignore the given options, once it's a custom sorting
|
||||
) -> Vec<Suggestion> {
|
||||
fn sort(&self, items: Vec<Suggestion>, prefix: Vec<u8>) -> Vec<Suggestion> {
|
||||
let prefix_str = String::from_utf8_lossy(&prefix).to_string();
|
||||
|
||||
// Sort items
|
||||
let mut sorted_items = items;
|
||||
sorted_items.sort_by(|a, b| a.value.cmp(&b.value));
|
||||
sorted_items.sort_by(|a, b| {
|
||||
let a_distance = levenshtein_distance(&prefix_str, &a.value);
|
||||
let b_distance = levenshtein_distance(&prefix_str, &b.value);
|
||||
@ -97,30 +92,30 @@ impl Completer for FileCompletion {
|
||||
|
||||
non_hidden
|
||||
}
|
||||
}
|
||||
|
||||
// Replace base filter with no filter once all the results are already based in the current path
|
||||
fn filter(&self, _: Vec<u8>, items: Vec<Suggestion>, _: CompletionOptions) -> Vec<Suggestion> {
|
||||
items
|
||||
}
|
||||
pub fn partial_from(input: &str) -> (String, String) {
|
||||
let partial = input.replace('`', "");
|
||||
|
||||
// If partial is only a word we want to search in the current dir
|
||||
let (base, rest) = partial.rsplit_once(is_separator).unwrap_or((".", &partial));
|
||||
// On windows, this standardizes paths to use \
|
||||
let mut base = base.replace(is_separator, &SEP.to_string());
|
||||
|
||||
// rsplit_once removes the separator
|
||||
base.push(SEP);
|
||||
|
||||
(base.to_string(), rest.to_string())
|
||||
}
|
||||
|
||||
pub fn file_path_completion(
|
||||
span: nu_protocol::Span,
|
||||
partial: &str,
|
||||
cwd: &str,
|
||||
options: &CompletionOptions,
|
||||
) -> Vec<(nu_protocol::Span, String)> {
|
||||
let partial = partial.replace('\'', "");
|
||||
|
||||
let (base_dir_name, partial) = {
|
||||
// If partial is only a word we want to search in the current dir
|
||||
let (base, rest) = partial.rsplit_once(is_separator).unwrap_or((".", &partial));
|
||||
// On windows, this standardizes paths to use \
|
||||
let mut base = base.replace(is_separator, &SEP.to_string());
|
||||
|
||||
// rsplit_once removes the separator
|
||||
base.push(SEP);
|
||||
(base, rest)
|
||||
};
|
||||
let original_input = partial;
|
||||
let (base_dir_name, partial) = partial_from(partial);
|
||||
|
||||
let base_dir = nu_path::expand_path_with(&base_dir_name, cwd);
|
||||
// This check is here as base_dir.read_dir() with base_dir == "" will open the current dir
|
||||
@ -134,15 +129,21 @@ pub fn file_path_completion(
|
||||
.filter_map(|entry| {
|
||||
entry.ok().and_then(|entry| {
|
||||
let mut file_name = entry.file_name().to_string_lossy().into_owned();
|
||||
if matches(partial, &file_name) {
|
||||
let mut path = format!("{}{}", base_dir_name, file_name);
|
||||
if matches(&partial, &file_name, options) {
|
||||
let mut path = if prepend_base_dir(original_input, &base_dir_name) {
|
||||
format!("{}{}", base_dir_name, file_name)
|
||||
} else {
|
||||
file_name.to_string()
|
||||
};
|
||||
|
||||
if entry.path().is_dir() {
|
||||
path.push(SEP);
|
||||
file_name.push(SEP);
|
||||
}
|
||||
|
||||
if path.contains(' ') {
|
||||
path = format!("\'{}\'", path);
|
||||
// Fix files or folders with quotes
|
||||
if path.contains('\'') || path.contains('"') || path.contains(' ') {
|
||||
path = format!("`{}`", path);
|
||||
}
|
||||
|
||||
Some((span, path))
|
||||
@ -157,7 +158,33 @@ pub fn file_path_completion(
|
||||
Vec::new()
|
||||
}
|
||||
|
||||
pub fn matches(partial: &str, from: &str) -> bool {
|
||||
from.to_ascii_lowercase()
|
||||
.starts_with(&partial.to_ascii_lowercase())
|
||||
pub fn matches(partial: &str, from: &str, options: &CompletionOptions) -> bool {
|
||||
// Check for case sensitive
|
||||
if !options.case_sensitive {
|
||||
return options
|
||||
.match_algorithm
|
||||
.matches_str(&from.to_ascii_lowercase(), &partial.to_ascii_lowercase());
|
||||
}
|
||||
|
||||
options.match_algorithm.matches_str(from, partial)
|
||||
}
|
||||
|
||||
/// Returns whether the base_dir should be prepended to the file path
|
||||
pub fn prepend_base_dir(input: &str, base_dir: &str) -> bool {
|
||||
if base_dir == format!(".{}", SEP) {
|
||||
// if the current base_dir path is the local folder we only add a "./" prefix if the user
|
||||
// input already includes a local folder prefix.
|
||||
let manually_entered = {
|
||||
let mut chars = input.chars();
|
||||
let first_char = chars.next();
|
||||
let second_char = chars.next();
|
||||
|
||||
first_char == Some('.') && second_char.map(is_separator).unwrap_or(false)
|
||||
};
|
||||
|
||||
manually_entered
|
||||
} else {
|
||||
// always prepend the base dir if it is a subfolder
|
||||
true
|
||||
}
|
||||
}
|
||||
|
@ -26,7 +26,8 @@ impl Completer for FlagCompletion {
|
||||
span: Span,
|
||||
offset: usize,
|
||||
_: usize,
|
||||
) -> (Vec<Suggestion>, CompletionOptions) {
|
||||
options: &CompletionOptions,
|
||||
) -> Vec<Suggestion> {
|
||||
// Check if it's a flag
|
||||
if let Expr::Call(call) = &self.expression.expr {
|
||||
let decl = working_set.get_decl(call.decl_id);
|
||||
@ -40,7 +41,8 @@ impl Completer for FlagCompletion {
|
||||
let mut named = vec![0; short.len_utf8()];
|
||||
short.encode_utf8(&mut named);
|
||||
named.insert(0, b'-');
|
||||
if named.starts_with(&prefix) {
|
||||
|
||||
if options.match_algorithm.matches_u8(&named, &prefix) {
|
||||
output.push(Suggestion {
|
||||
value: String::from_utf8_lossy(&named).to_string(),
|
||||
description: Some(flag_desc.to_string()),
|
||||
@ -49,6 +51,7 @@ impl Completer for FlagCompletion {
|
||||
start: span.start - offset,
|
||||
end: span.end - offset,
|
||||
},
|
||||
append_whitespace: true,
|
||||
});
|
||||
}
|
||||
}
|
||||
@ -60,7 +63,8 @@ impl Completer for FlagCompletion {
|
||||
let mut named = named.long.as_bytes().to_vec();
|
||||
named.insert(0, b'-');
|
||||
named.insert(0, b'-');
|
||||
if named.starts_with(&prefix) {
|
||||
|
||||
if options.match_algorithm.matches_u8(&named, &prefix) {
|
||||
output.push(Suggestion {
|
||||
value: String::from_utf8_lossy(&named).to_string(),
|
||||
description: Some(flag_desc.to_string()),
|
||||
@ -69,13 +73,14 @@ impl Completer for FlagCompletion {
|
||||
start: span.start - offset,
|
||||
end: span.end - offset,
|
||||
},
|
||||
append_whitespace: true,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return (output, CompletionOptions::default());
|
||||
return output;
|
||||
}
|
||||
|
||||
(vec![], CompletionOptions::default())
|
||||
vec![]
|
||||
}
|
||||
}
|
||||
|
@ -3,6 +3,8 @@ mod command_completions;
|
||||
mod completer;
|
||||
mod completion_options;
|
||||
mod custom_completions;
|
||||
mod directory_completions;
|
||||
mod dotnu_completions;
|
||||
mod file_completions;
|
||||
mod flag_completions;
|
||||
mod variable_completions;
|
||||
@ -10,8 +12,12 @@ mod variable_completions;
|
||||
pub use base::Completer;
|
||||
pub use command_completions::CommandCompletion;
|
||||
pub use completer::NuCompleter;
|
||||
pub use completion_options::{CompletionOptions, SortBy};
|
||||
pub use completion_options::{CompletionOptions, MatchAlgorithm, SortBy};
|
||||
pub use custom_completions::CustomCompletion;
|
||||
pub use file_completions::{file_path_completion, FileCompletion};
|
||||
pub use directory_completions::DirectoryCompletion;
|
||||
pub use dotnu_completions::DotNuCompletion;
|
||||
pub use file_completions::{
|
||||
file_path_completion, matches, partial_from, prepend_base_dir, FileCompletion,
|
||||
};
|
||||
pub use flag_completions::FlagCompletion;
|
||||
pub use variable_completions::VariableCompletion;
|
||||
|
@ -1,19 +1,32 @@
|
||||
use crate::completions::{Completer, CompletionOptions};
|
||||
use nu_engine::eval_variable;
|
||||
use nu_protocol::{
|
||||
engine::{EngineState, StateWorkingSet},
|
||||
Span,
|
||||
engine::{EngineState, Stack, StateWorkingSet},
|
||||
Span, Value,
|
||||
};
|
||||
|
||||
use reedline::Suggestion;
|
||||
use std::str;
|
||||
use std::sync::Arc;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct VariableCompletion {
|
||||
engine_state: Arc<EngineState>,
|
||||
engine_state: Arc<EngineState>, // TODO: Is engine state necessary? It's already a part of working set in fetch()
|
||||
stack: Stack,
|
||||
var_context: (Vec<u8>, Vec<Vec<u8>>), // tuple with $var and the sublevels (.b.c.d)
|
||||
}
|
||||
|
||||
impl VariableCompletion {
|
||||
pub fn new(engine_state: Arc<EngineState>) -> Self {
|
||||
Self { engine_state }
|
||||
pub fn new(
|
||||
engine_state: Arc<EngineState>,
|
||||
stack: Stack,
|
||||
var_context: (Vec<u8>, Vec<Vec<u8>>),
|
||||
) -> Self {
|
||||
Self {
|
||||
engine_state,
|
||||
stack,
|
||||
var_context,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -25,58 +38,255 @@ impl Completer for VariableCompletion {
|
||||
span: Span,
|
||||
offset: usize,
|
||||
_: usize,
|
||||
) -> (Vec<Suggestion>, CompletionOptions) {
|
||||
options: &CompletionOptions,
|
||||
) -> Vec<Suggestion> {
|
||||
let mut output = vec![];
|
||||
|
||||
let builtins = ["$nu", "$in", "$config", "$env", "$nothing"];
|
||||
let var_str = std::str::from_utf8(&self.var_context.0)
|
||||
.unwrap_or("")
|
||||
.to_lowercase();
|
||||
let var_id = working_set.find_variable(&self.var_context.0);
|
||||
let current_span = reedline::Span {
|
||||
start: span.start - offset,
|
||||
end: span.end - offset,
|
||||
};
|
||||
let sublevels_count = self.var_context.1.len();
|
||||
|
||||
// Completions for the given variable
|
||||
if !var_str.is_empty() {
|
||||
// Completion for $env.<tab>
|
||||
if var_str.as_str() == "$env" {
|
||||
let env_vars = self.stack.get_env_vars(&self.engine_state);
|
||||
|
||||
// Return nested values
|
||||
if sublevels_count > 0 {
|
||||
// Extract the target var ($env.<target-var>)
|
||||
let target_var = self.var_context.1[0].clone();
|
||||
let target_var_str =
|
||||
str::from_utf8(&target_var).unwrap_or_default().to_string();
|
||||
|
||||
// Everything after the target var is the nested level ($env.<target-var>.<nested_levels>...)
|
||||
let nested_levels: Vec<Vec<u8>> =
|
||||
self.var_context.1.clone().into_iter().skip(1).collect();
|
||||
|
||||
if let Some(val) = env_vars.get(&target_var_str) {
|
||||
for suggestion in
|
||||
nested_suggestions(val.clone(), nested_levels, current_span)
|
||||
{
|
||||
if options
|
||||
.match_algorithm
|
||||
.matches_u8(suggestion.value.as_bytes(), &prefix)
|
||||
{
|
||||
output.push(suggestion);
|
||||
}
|
||||
}
|
||||
|
||||
return output;
|
||||
}
|
||||
} else {
|
||||
// No nesting provided, return all env vars
|
||||
for env_var in env_vars {
|
||||
if options
|
||||
.match_algorithm
|
||||
.matches_u8(env_var.0.as_bytes(), &prefix)
|
||||
{
|
||||
output.push(Suggestion {
|
||||
value: env_var.0,
|
||||
description: None,
|
||||
extra: None,
|
||||
span: current_span,
|
||||
append_whitespace: false,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return output;
|
||||
}
|
||||
}
|
||||
|
||||
// Completions for $nu.<tab>
|
||||
if var_str.as_str() == "$nu" {
|
||||
// Eval nu var
|
||||
if let Ok(nuval) = eval_variable(
|
||||
&self.engine_state,
|
||||
&self.stack,
|
||||
nu_protocol::NU_VARIABLE_ID,
|
||||
nu_protocol::Span {
|
||||
start: current_span.start,
|
||||
end: current_span.end,
|
||||
},
|
||||
) {
|
||||
for suggestion in
|
||||
nested_suggestions(nuval, self.var_context.1.clone(), current_span)
|
||||
{
|
||||
if options
|
||||
.match_algorithm
|
||||
.matches_u8(suggestion.value.as_bytes(), &prefix)
|
||||
{
|
||||
output.push(suggestion);
|
||||
}
|
||||
}
|
||||
|
||||
return output;
|
||||
}
|
||||
}
|
||||
|
||||
// Completion other variable types
|
||||
if let Some(var_id) = var_id {
|
||||
// Extract the variable value from the stack
|
||||
let var = self.stack.get_var(
|
||||
var_id,
|
||||
Span {
|
||||
start: span.start,
|
||||
end: span.end,
|
||||
},
|
||||
);
|
||||
|
||||
// If the value exists and it's of type Record
|
||||
if let Ok(value) = var {
|
||||
for suggestion in
|
||||
nested_suggestions(value, self.var_context.1.clone(), current_span)
|
||||
{
|
||||
if options
|
||||
.match_algorithm
|
||||
.matches_u8(suggestion.value.as_bytes(), &prefix)
|
||||
{
|
||||
output.push(suggestion);
|
||||
}
|
||||
}
|
||||
|
||||
return output;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Variable completion (e.g: $en<tab> to complete $env)
|
||||
for builtin in builtins {
|
||||
if builtin.as_bytes().starts_with(&prefix) {
|
||||
if options
|
||||
.match_algorithm
|
||||
.matches_u8(builtin.as_bytes(), &prefix)
|
||||
{
|
||||
output.push(Suggestion {
|
||||
value: builtin.to_string(),
|
||||
description: None,
|
||||
extra: None,
|
||||
span: reedline::Span {
|
||||
start: span.start - offset,
|
||||
end: span.end - offset,
|
||||
},
|
||||
span: current_span,
|
||||
append_whitespace: false,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
for scope in &working_set.delta.scope {
|
||||
for v in &scope.vars {
|
||||
if v.0.starts_with(&prefix) {
|
||||
output.push(Suggestion {
|
||||
value: String::from_utf8_lossy(v.0).to_string(),
|
||||
description: None,
|
||||
extra: None,
|
||||
span: reedline::Span {
|
||||
start: span.start - offset,
|
||||
end: span.end - offset,
|
||||
},
|
||||
});
|
||||
// TODO: The following can be refactored (see find_commands_by_predicate() used in
|
||||
// command_completions).
|
||||
let mut removed_overlays = vec![];
|
||||
// Working set scope vars
|
||||
for scope_frame in working_set.delta.scope.iter().rev() {
|
||||
for overlay_frame in scope_frame
|
||||
.active_overlays(&mut removed_overlays)
|
||||
.iter()
|
||||
.rev()
|
||||
{
|
||||
for v in &overlay_frame.vars {
|
||||
if options.match_algorithm.matches_u8(v.0, &prefix) {
|
||||
output.push(Suggestion {
|
||||
value: String::from_utf8_lossy(v.0).to_string(),
|
||||
description: None,
|
||||
extra: None,
|
||||
span: current_span,
|
||||
append_whitespace: false,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
for scope in &self.engine_state.scope {
|
||||
for v in &scope.vars {
|
||||
if v.0.starts_with(&prefix) {
|
||||
|
||||
// Permanent state vars
|
||||
// for scope in &self.engine_state.scope {
|
||||
for overlay_frame in self
|
||||
.engine_state
|
||||
.active_overlays(&removed_overlays)
|
||||
.iter()
|
||||
.rev()
|
||||
{
|
||||
for v in &overlay_frame.vars {
|
||||
if options.match_algorithm.matches_u8(v.0, &prefix) {
|
||||
output.push(Suggestion {
|
||||
value: String::from_utf8_lossy(v.0).to_string(),
|
||||
description: None,
|
||||
extra: None,
|
||||
span: reedline::Span {
|
||||
start: span.start - offset,
|
||||
end: span.end - offset,
|
||||
},
|
||||
span: current_span,
|
||||
append_whitespace: false,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
output.dedup();
|
||||
output.dedup(); // TODO: Removes only consecutive duplicates, is it intended?
|
||||
|
||||
(output, CompletionOptions::default())
|
||||
output
|
||||
}
|
||||
}
|
||||
|
||||
// Find recursively the values for sublevels
|
||||
// if no sublevels are set it returns the current value
|
||||
fn nested_suggestions(
|
||||
val: Value,
|
||||
sublevels: Vec<Vec<u8>>,
|
||||
current_span: reedline::Span,
|
||||
) -> Vec<Suggestion> {
|
||||
let mut output: Vec<Suggestion> = vec![];
|
||||
let value = recursive_value(val, sublevels);
|
||||
|
||||
match value {
|
||||
Value::Record {
|
||||
cols,
|
||||
vals: _,
|
||||
span: _,
|
||||
} => {
|
||||
// Add all the columns as completion
|
||||
for item in cols {
|
||||
output.push(Suggestion {
|
||||
value: item,
|
||||
description: None,
|
||||
extra: None,
|
||||
span: current_span,
|
||||
append_whitespace: false,
|
||||
});
|
||||
}
|
||||
|
||||
output
|
||||
}
|
||||
|
||||
_ => output,
|
||||
}
|
||||
}
|
||||
|
||||
// Extracts the recursive value (e.g: $var.a.b.c)
|
||||
fn recursive_value(val: Value, sublevels: Vec<Vec<u8>>) -> Value {
|
||||
// Go to next sublevel
|
||||
if let Some(next_sublevel) = sublevels.clone().into_iter().next() {
|
||||
match val {
|
||||
Value::Record {
|
||||
cols,
|
||||
vals,
|
||||
span: _,
|
||||
} => {
|
||||
for item in cols.into_iter().zip(vals.into_iter()) {
|
||||
// Check if index matches with sublevel
|
||||
if item.0.as_bytes().to_vec() == next_sublevel {
|
||||
// If matches try to fetch recursively the next
|
||||
return recursive_value(item.1, sublevels.into_iter().skip(1).collect());
|
||||
}
|
||||
}
|
||||
|
||||
// Current sublevel value not found
|
||||
return Value::Nothing {
|
||||
span: Span { start: 0, end: 0 },
|
||||
};
|
||||
}
|
||||
_ => return val,
|
||||
}
|
||||
}
|
||||
|
||||
val
|
||||
}
|
||||
|
@ -69,7 +69,9 @@ pub fn eval_config_contents(
|
||||
// Merge the delta in case env vars changed in the config
|
||||
match nu_engine::env::current_dir(engine_state, stack) {
|
||||
Ok(cwd) => {
|
||||
if let Err(e) = engine_state.merge_delta(StateDelta::new(), Some(stack), cwd) {
|
||||
if let Err(e) =
|
||||
engine_state.merge_delta(StateDelta::new(engine_state), Some(stack), cwd)
|
||||
{
|
||||
let working_set = StateWorkingSet::new(engine_state);
|
||||
report_error(&working_set, &e);
|
||||
}
|
||||
|
@ -9,7 +9,7 @@ use nu_protocol::{
|
||||
engine::{EngineState, Stack, StateWorkingSet},
|
||||
Config, PipelineData, Span, Value,
|
||||
};
|
||||
use std::io::Write;
|
||||
use nu_utils::stdout_write_all_and_flush;
|
||||
|
||||
/// Main function used when a file path is found as argument for nu
|
||||
pub fn evaluate_file(
|
||||
@ -61,17 +61,20 @@ pub fn evaluate_file(
|
||||
}
|
||||
|
||||
pub fn print_table_or_error(
|
||||
engine_state: &EngineState,
|
||||
engine_state: &mut EngineState,
|
||||
stack: &mut Stack,
|
||||
mut pipeline_data: PipelineData,
|
||||
config: &Config,
|
||||
config: &mut Config,
|
||||
) {
|
||||
let exit_code = match &mut pipeline_data {
|
||||
PipelineData::ExternalStream { exit_code, .. } => exit_code.take(),
|
||||
_ => None,
|
||||
};
|
||||
|
||||
match engine_state.find_decl("table".as_bytes()) {
|
||||
// Change the engine_state config to use the passed in configuration
|
||||
engine_state.set_config(config);
|
||||
|
||||
match engine_state.find_decl("table".as_bytes(), &[]) {
|
||||
Some(decl_id) => {
|
||||
let table = engine_state.get_decl(decl_id).run(
|
||||
engine_state,
|
||||
@ -83,8 +86,6 @@ pub fn print_table_or_error(
|
||||
match table {
|
||||
Ok(table) => {
|
||||
for item in table {
|
||||
let stdout = std::io::stdout();
|
||||
|
||||
if let Value::Error { error } = item {
|
||||
let working_set = StateWorkingSet::new(engine_state);
|
||||
|
||||
@ -96,10 +97,7 @@ pub fn print_table_or_error(
|
||||
let mut out = item.into_string("\n", config);
|
||||
out.push('\n');
|
||||
|
||||
match stdout.lock().write_all(out.as_bytes()) {
|
||||
Ok(_) => (),
|
||||
Err(err) => eprintln!("{}", err),
|
||||
};
|
||||
let _ = stdout_write_all_and_flush(out).map_err(|err| eprintln!("{}", err));
|
||||
}
|
||||
}
|
||||
Err(error) => {
|
||||
@ -113,8 +111,6 @@ pub fn print_table_or_error(
|
||||
}
|
||||
None => {
|
||||
for item in pipeline_data {
|
||||
let stdout = std::io::stdout();
|
||||
|
||||
if let Value::Error { error } = item {
|
||||
let working_set = StateWorkingSet::new(engine_state);
|
||||
|
||||
@ -126,10 +122,7 @@ pub fn print_table_or_error(
|
||||
let mut out = item.into_string("\n", config);
|
||||
out.push('\n');
|
||||
|
||||
match stdout.lock().write_all(out.as_bytes()) {
|
||||
Ok(_) => (),
|
||||
Err(err) => eprintln!("{}", err),
|
||||
};
|
||||
let _ = stdout_write_all_and_flush(out).map_err(|err| eprintln!("{}", err));
|
||||
}
|
||||
}
|
||||
};
|
||||
|
@ -1,7 +1,6 @@
|
||||
mod commands;
|
||||
mod completions;
|
||||
mod config_files;
|
||||
mod errors;
|
||||
mod eval_file;
|
||||
mod menus;
|
||||
mod nu_highlight;
|
||||
@ -15,9 +14,8 @@ mod util;
|
||||
mod validation;
|
||||
|
||||
pub use commands::evaluate_commands;
|
||||
pub use completions::NuCompleter;
|
||||
pub use completions::{FileCompletion, NuCompleter};
|
||||
pub use config_files::eval_config_contents;
|
||||
pub use errors::CliError;
|
||||
pub use eval_file::evaluate_file;
|
||||
pub use menus::{DescriptionMenu, NuHelpCompleter};
|
||||
pub use nu_highlight::NuHighlight;
|
||||
@ -25,7 +23,6 @@ pub use print::Print;
|
||||
pub use prompt::NushellPrompt;
|
||||
pub use repl::evaluate_repl;
|
||||
pub use syntax_highlight::NuHighlighter;
|
||||
pub use util::print_pipeline_data;
|
||||
pub use util::{eval_source, gather_parent_env_vars, get_init_cwd, report_error};
|
||||
pub use validation::NuValidator;
|
||||
|
||||
|
@ -272,7 +272,7 @@ impl DescriptionMenu {
|
||||
}
|
||||
|
||||
fn no_records_msg(&self, use_ansi_coloring: bool) -> String {
|
||||
let msg = "TYPE TO START SEACH";
|
||||
let msg = "TYPE TO START SEARCH";
|
||||
if use_ansi_coloring {
|
||||
format!(
|
||||
"{}{}{}",
|
||||
@ -586,7 +586,7 @@ impl Menu for DescriptionMenu {
|
||||
} else {
|
||||
self.example_index = Some(self.examples.len().saturating_sub(1));
|
||||
}
|
||||
} else {
|
||||
} else if !self.examples.is_empty() {
|
||||
self.example_index = Some(0);
|
||||
}
|
||||
}
|
||||
@ -598,7 +598,7 @@ impl Menu for DescriptionMenu {
|
||||
} else {
|
||||
self.example_index = Some(0);
|
||||
}
|
||||
} else {
|
||||
} else if !self.examples.is_empty() {
|
||||
self.example_index = Some(0);
|
||||
}
|
||||
}
|
||||
|
@ -92,6 +92,7 @@ impl NuHelpCompleter {
|
||||
start: pos,
|
||||
end: pos + line.len(),
|
||||
},
|
||||
append_whitespace: false,
|
||||
}
|
||||
})
|
||||
.collect()
|
||||
|
@ -155,6 +155,7 @@ fn convert_to_suggestions(
|
||||
description,
|
||||
extra,
|
||||
span,
|
||||
append_whitespace: false,
|
||||
}]
|
||||
}
|
||||
Value::List { vals, .. } => vals
|
||||
@ -169,6 +170,7 @@ fn convert_to_suggestions(
|
||||
start: 0,
|
||||
end: line.len(),
|
||||
},
|
||||
append_whitespace: false,
|
||||
}],
|
||||
}
|
||||
}
|
||||
|
@ -22,7 +22,7 @@ impl Command for NuHighlight {
|
||||
fn run(
|
||||
&self,
|
||||
engine_state: &EngineState,
|
||||
stack: &mut Stack,
|
||||
_stack: &mut Stack,
|
||||
call: &Call,
|
||||
input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
@ -30,7 +30,7 @@ impl Command for NuHighlight {
|
||||
|
||||
let ctrlc = engine_state.ctrlc.clone();
|
||||
let engine_state = engine_state.clone();
|
||||
let config = stack.get_config()?;
|
||||
let config = engine_state.get_config().clone();
|
||||
|
||||
let highlighter = crate::NuHighlighter {
|
||||
engine_state,
|
||||
|
@ -16,6 +16,11 @@ impl Command for Print {
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("print")
|
||||
.rest("rest", SyntaxShape::Any, "the values to print")
|
||||
.switch(
|
||||
"no_newline",
|
||||
"print without inserting a newline for the line ending",
|
||||
Some('n'),
|
||||
)
|
||||
.category(Category::Strings)
|
||||
}
|
||||
|
||||
@ -31,10 +36,12 @@ impl Command for Print {
|
||||
_input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let args: Vec<Value> = call.rest(engine_state, stack, 0)?;
|
||||
let no_newline = call.has_flag("no_newline");
|
||||
let head = call.head;
|
||||
|
||||
for arg in args {
|
||||
crate::util::print_pipeline_data(arg.into_pipeline_data(), engine_state, stack)?;
|
||||
arg.into_pipeline_data()
|
||||
.print(engine_state, stack, no_newline)?;
|
||||
}
|
||||
|
||||
Ok(PipelineData::new(head))
|
||||
|
@ -24,7 +24,7 @@ const DEFAULT_COMPLETION_MENU: &str = r#"
|
||||
type: {
|
||||
layout: columnar
|
||||
columns: 4
|
||||
col_width: 20
|
||||
col_width: 20
|
||||
col_padding: 2
|
||||
}
|
||||
style: {
|
||||
@ -58,7 +58,7 @@ const DEFAULT_HELP_MENU: &str = r#"
|
||||
type: {
|
||||
layout: description
|
||||
columns: 4
|
||||
col_width: 20
|
||||
col_width: 20
|
||||
col_padding: 2
|
||||
selection_rows: 4
|
||||
description_rows: 10
|
||||
@ -501,14 +501,16 @@ fn add_menu_keybindings(keybindings: &mut Keybindings) {
|
||||
ReedlineEvent::MenuPrevious,
|
||||
);
|
||||
|
||||
// History menu keybinding
|
||||
keybindings.add_binding(
|
||||
KeyModifiers::CONTROL,
|
||||
KeyCode::Char('r'),
|
||||
ReedlineEvent::Menu("history_menu".to_string()),
|
||||
);
|
||||
|
||||
keybindings.add_binding(
|
||||
KeyModifiers::CONTROL,
|
||||
KeyCode::Char('x'),
|
||||
ReedlineEvent::UntilFound(vec![
|
||||
ReedlineEvent::Menu("history_menu".to_string()),
|
||||
ReedlineEvent::MenuPageNext,
|
||||
]),
|
||||
ReedlineEvent::MenuPageNext,
|
||||
);
|
||||
|
||||
keybindings.add_binding(
|
||||
@ -522,8 +524,8 @@ fn add_menu_keybindings(keybindings: &mut Keybindings) {
|
||||
|
||||
// Help menu keybinding
|
||||
keybindings.add_binding(
|
||||
KeyModifiers::CONTROL,
|
||||
KeyCode::Char('q'),
|
||||
KeyModifiers::NONE,
|
||||
KeyCode::F(1),
|
||||
ReedlineEvent::Menu("help_menu".to_string()),
|
||||
);
|
||||
}
|
||||
@ -814,6 +816,7 @@ fn event_from_record(
|
||||
"none" => ReedlineEvent::None,
|
||||
"actionhandler" => ReedlineEvent::ActionHandler,
|
||||
"clearscreen" => ReedlineEvent::ClearScreen,
|
||||
"clearscrollback" => ReedlineEvent::ClearScrollback,
|
||||
"historyhintcomplete" => ReedlineEvent::HistoryHintComplete,
|
||||
"historyhintwordcomplete" => ReedlineEvent::HistoryHintWordComplete,
|
||||
"ctrld" => ReedlineEvent::CtrlD,
|
||||
@ -836,6 +839,7 @@ fn event_from_record(
|
||||
"menuprevious" => ReedlineEvent::MenuPrevious,
|
||||
"menupagenext" => ReedlineEvent::MenuPageNext,
|
||||
"menupageprevious" => ReedlineEvent::MenuPagePrevious,
|
||||
"openeditor" => ReedlineEvent::OpenEditor,
|
||||
"menu" => {
|
||||
let menu = extract_value("name", cols, vals, span)?;
|
||||
ReedlineEvent::Menu(menu.into_string("", config))
|
||||
@ -881,6 +885,7 @@ fn edit_from_record(
|
||||
let value = extract_value("value", cols, vals, span)?;
|
||||
EditCommand::InsertString(value.into_string("", config))
|
||||
}
|
||||
"insertnewline" => EditCommand::InsertNewline,
|
||||
"backspace" => EditCommand::Backspace,
|
||||
"delete" => EditCommand::Delete,
|
||||
"backspaceword" => EditCommand::BackspaceWord,
|
||||
|
@ -1,26 +1,28 @@
|
||||
use crate::reedline_config::add_menus;
|
||||
use crate::{completions::NuCompleter, NuHighlighter, NuValidator, NushellPrompt};
|
||||
use crate::{prompt_update, reedline_config};
|
||||
use crate::{
|
||||
reedline_config::KeybindingsMode,
|
||||
completions::NuCompleter,
|
||||
prompt_update,
|
||||
reedline_config::{add_menus, create_keybindings, KeybindingsMode},
|
||||
util::{eval_source, report_error},
|
||||
NuHighlighter, NuValidator, NushellPrompt,
|
||||
};
|
||||
use log::info;
|
||||
use log::trace;
|
||||
use log::{info, trace};
|
||||
use miette::{IntoDiagnostic, Result};
|
||||
use nu_color_config::get_color_config;
|
||||
use nu_engine::convert_env_values;
|
||||
use nu_engine::{convert_env_values, eval_block};
|
||||
use nu_parser::lex;
|
||||
use nu_protocol::engine::Stack;
|
||||
use nu_protocol::PipelineData;
|
||||
use nu_protocol::{
|
||||
engine::{EngineState, StateWorkingSet},
|
||||
Config, ShellError, Span, Value, CONFIG_VARIABLE_ID,
|
||||
engine::{EngineState, Stack, StateWorkingSet},
|
||||
BlockId, PipelineData, PositionalArg, ShellError, Span, Value,
|
||||
};
|
||||
use reedline::{DefaultHinter, Emacs, Vi};
|
||||
use std::io::{self, Write};
|
||||
use std::path::PathBuf;
|
||||
use std::{sync::atomic::Ordering, time::Instant};
|
||||
|
||||
const PRE_EXECUTE_MARKER: &str = "\x1b]133;A\x1b\\";
|
||||
const PRE_PROMPT_MARKER: &str = "\x1b]133;C\x1b\\";
|
||||
const RESET_APPLICATION_MODE: &str = "\x1b[?1l";
|
||||
|
||||
pub fn evaluate_repl(
|
||||
engine_state: &mut EngineState,
|
||||
stack: &mut Stack,
|
||||
@ -76,15 +78,7 @@ pub fn evaluate_repl(
|
||||
|
||||
// Get the config once for the history `max_history_size`
|
||||
// Updating that will not be possible in one session
|
||||
let mut config = match stack.get_config() {
|
||||
Ok(config) => config,
|
||||
Err(e) => {
|
||||
let working_set = StateWorkingSet::new(engine_state);
|
||||
|
||||
report_error(&working_set, &e);
|
||||
Config::default()
|
||||
}
|
||||
};
|
||||
let mut config = engine_state.get_config();
|
||||
|
||||
if is_perf_true {
|
||||
info!("setup reedline {}:{}:{}", file!(), line!(), column!());
|
||||
@ -114,26 +108,18 @@ pub fn evaluate_repl(
|
||||
);
|
||||
}
|
||||
|
||||
config = match stack.get_config() {
|
||||
Ok(config) => config,
|
||||
Err(e) => {
|
||||
let working_set = StateWorkingSet::new(engine_state);
|
||||
//Reset the ctrl-c handler
|
||||
if let Some(ctrlc) = &mut engine_state.ctrlc {
|
||||
ctrlc.store(false, Ordering::SeqCst);
|
||||
}
|
||||
|
||||
report_error(&working_set, &e);
|
||||
Config::default()
|
||||
}
|
||||
};
|
||||
config = engine_state.get_config();
|
||||
|
||||
if is_perf_true {
|
||||
info!("setup colors {}:{}:{}", file!(), line!(), column!());
|
||||
}
|
||||
|
||||
let color_hm = get_color_config(&config);
|
||||
|
||||
//Reset the ctrl-c handler
|
||||
if let Some(ctrlc) = &mut engine_state.ctrlc {
|
||||
ctrlc.store(false, Ordering::SeqCst);
|
||||
}
|
||||
let color_hm = get_color_config(config);
|
||||
|
||||
if is_perf_true {
|
||||
info!("update reedline {}:{}:{}", file!(), line!(), column!());
|
||||
@ -151,7 +137,6 @@ pub fn evaluate_repl(
|
||||
.with_completer(Box::new(NuCompleter::new(
|
||||
engine_reference.clone(),
|
||||
stack.clone(),
|
||||
stack.vars.get(&CONFIG_VARIABLE_ID).cloned(),
|
||||
)))
|
||||
.with_quick_completions(config.quick_completions)
|
||||
.with_partial_completions(config.partial_completions)
|
||||
@ -165,7 +150,7 @@ pub fn evaluate_repl(
|
||||
line_editor.disable_hints()
|
||||
};
|
||||
|
||||
line_editor = match add_menus(line_editor, engine_reference, stack, &config) {
|
||||
line_editor = match add_menus(line_editor, engine_reference, stack, config) {
|
||||
Ok(line_editor) => line_editor,
|
||||
Err(e) => {
|
||||
let working_set = StateWorkingSet::new(engine_state);
|
||||
@ -174,6 +159,27 @@ pub fn evaluate_repl(
|
||||
}
|
||||
};
|
||||
|
||||
let buffer_editor = if !config.buffer_editor.is_empty() {
|
||||
Some(config.buffer_editor.clone())
|
||||
} else {
|
||||
stack
|
||||
.get_env_var(engine_state, "EDITOR")
|
||||
.map(|v| v.as_string().unwrap_or_default())
|
||||
.filter(|v| !v.is_empty())
|
||||
.or_else(|| {
|
||||
stack
|
||||
.get_env_var(engine_state, "VISUAL")
|
||||
.map(|v| v.as_string().unwrap_or_default())
|
||||
.filter(|v| !v.is_empty())
|
||||
})
|
||||
};
|
||||
|
||||
line_editor = if let Some(buffer_editor) = buffer_editor {
|
||||
line_editor.with_buffer_editor(buffer_editor, "nu".into())
|
||||
} else {
|
||||
line_editor
|
||||
};
|
||||
|
||||
if config.sync_history_on_enter {
|
||||
if is_perf_true {
|
||||
info!("sync history {}:{}:{}", file!(), line!(), column!());
|
||||
@ -186,7 +192,7 @@ pub fn evaluate_repl(
|
||||
}
|
||||
|
||||
// Changing the line editor based on the found keybindings
|
||||
line_editor = match reedline_config::create_keybindings(&config) {
|
||||
line_editor = match create_keybindings(config) {
|
||||
Ok(keybindings) => match keybindings {
|
||||
KeybindingsMode::Emacs(keybindings) => {
|
||||
let edit_mode = Box::new(Emacs::new(keybindings));
|
||||
@ -211,13 +217,67 @@ pub fn evaluate_repl(
|
||||
info!("prompt_update {}:{}:{}", file!(), line!(), column!());
|
||||
}
|
||||
|
||||
let prompt = prompt_update::update_prompt(
|
||||
&config,
|
||||
engine_state,
|
||||
stack,
|
||||
&mut nu_prompt,
|
||||
is_perf_true,
|
||||
);
|
||||
// Right before we start our prompt and take input from the user,
|
||||
// fire the "pre_prompt" hook
|
||||
if let Some(hook) = &config.hooks.pre_prompt {
|
||||
if let Err(err) = run_hook(engine_state, stack, vec![], hook) {
|
||||
let working_set = StateWorkingSet::new(engine_state);
|
||||
report_error(&working_set, &err);
|
||||
}
|
||||
}
|
||||
|
||||
// Next, check all the environment variables they ask for
|
||||
// fire the "env_change" hook
|
||||
if let Some(hook) = config.hooks.env_change.clone() {
|
||||
match hook {
|
||||
Value::Record {
|
||||
cols, vals: blocks, ..
|
||||
} => {
|
||||
for (idx, env_var) in cols.iter().enumerate() {
|
||||
let before = engine_state
|
||||
.previous_env_vars
|
||||
.get(env_var)
|
||||
.cloned()
|
||||
.unwrap_or_default();
|
||||
let after = stack.get_env_var(engine_state, env_var).unwrap_or_default();
|
||||
if before != after {
|
||||
if let Err(err) = run_hook(
|
||||
engine_state,
|
||||
stack,
|
||||
vec![before, after.clone()],
|
||||
&blocks[idx],
|
||||
) {
|
||||
let working_set = StateWorkingSet::new(engine_state);
|
||||
report_error(&working_set, &err);
|
||||
}
|
||||
|
||||
engine_state
|
||||
.previous_env_vars
|
||||
.insert(env_var.to_string(), after);
|
||||
}
|
||||
}
|
||||
}
|
||||
x => {
|
||||
let working_set = StateWorkingSet::new(engine_state);
|
||||
report_error(
|
||||
&working_set,
|
||||
&ShellError::TypeMismatch(
|
||||
"record for 'env_change' hook".to_string(),
|
||||
x.span().unwrap_or_else(|_| Span::new(0, 0)),
|
||||
),
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
config = engine_state.get_config();
|
||||
|
||||
if config.shell_integration {
|
||||
run_ansi_sequence(PRE_EXECUTE_MARKER)?;
|
||||
}
|
||||
|
||||
let prompt =
|
||||
prompt_update::update_prompt(config, engine_state, stack, &mut nu_prompt, is_perf_true);
|
||||
|
||||
entry_num += 1;
|
||||
|
||||
@ -231,8 +291,39 @@ pub fn evaluate_repl(
|
||||
}
|
||||
|
||||
let input = line_editor.read_line(prompt);
|
||||
|
||||
match input {
|
||||
Ok(Signal::Success(s)) => {
|
||||
// Right before we start running the code the user gave us,
|
||||
// fire the "pre_execution" hook
|
||||
if let Some(hook) = &config.hooks.pre_execution {
|
||||
if let Err(err) = run_hook(engine_state, stack, vec![], hook) {
|
||||
let working_set = StateWorkingSet::new(engine_state);
|
||||
report_error(&working_set, &err);
|
||||
}
|
||||
}
|
||||
|
||||
if config.shell_integration {
|
||||
run_ansi_sequence(RESET_APPLICATION_MODE)?;
|
||||
run_ansi_sequence(PRE_PROMPT_MARKER)?;
|
||||
if let Some(cwd) = stack.get_env_var(engine_state, "PWD") {
|
||||
let path = cwd.as_string()?;
|
||||
// Try to abbreviate string for windows title
|
||||
let maybe_abbrev_path = if let Some(p) = nu_path::home_dir() {
|
||||
path.replace(&p.as_path().display().to_string(), "~")
|
||||
} else {
|
||||
path
|
||||
};
|
||||
|
||||
// Set window title too
|
||||
// https://tldp.org/HOWTO/Xterm-Title-3.html
|
||||
// ESC]0;stringBEL -- Set icon name and window title to string
|
||||
// ESC]1;stringBEL -- Set icon name to string
|
||||
// ESC]2;stringBEL -- Set window title to string
|
||||
run_ansi_sequence(&format!("\x1b]2;{}\x07", maybe_abbrev_path))?;
|
||||
}
|
||||
}
|
||||
|
||||
let start_time = Instant::now();
|
||||
let tokens = lex(s.as_bytes(), 0, &[], &[], false);
|
||||
// Check if this is a single call to a directory, if so auto-cd
|
||||
@ -255,10 +346,9 @@ pub fn evaluate_repl(
|
||||
|
||||
report_error(
|
||||
&working_set,
|
||||
&ShellError::DirectoryNotFound(tokens.0[0].span),
|
||||
&ShellError::DirectoryNotFound(tokens.0[0].span, None),
|
||||
);
|
||||
}
|
||||
|
||||
let path = nu_path::canonicalize_with(path, &cwd)
|
||||
.expect("internal error: cannot canonicalize known path");
|
||||
(path.to_string_lossy().to_string(), tokens.0[0].span)
|
||||
@ -304,21 +394,22 @@ pub fn evaluate_repl(
|
||||
&format!("entry #{}", entry_num),
|
||||
PipelineData::new(Span::new(0, 0)),
|
||||
);
|
||||
|
||||
stack.add_env_var(
|
||||
"CMD_DURATION_MS".into(),
|
||||
Value::String {
|
||||
val: format!("{}", start_time.elapsed().as_millis()),
|
||||
span: Span { start: 0, end: 0 },
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
stack.add_env_var(
|
||||
"CMD_DURATION_MS".into(),
|
||||
Value::String {
|
||||
val: format!("{}", start_time.elapsed().as_millis()),
|
||||
span: Span { start: 0, end: 0 },
|
||||
},
|
||||
);
|
||||
|
||||
// FIXME: permanent state changes like this hopefully in time can be removed
|
||||
// and be replaced by just passing the cwd in where needed
|
||||
if let Some(cwd) = stack.get_env_var(engine_state, "PWD") {
|
||||
let path = cwd.as_string()?;
|
||||
let _ = std::env::set_current_dir(path);
|
||||
engine_state.env_vars.insert("PWD".into(), cwd);
|
||||
engine_state.add_env_var("PWD".into(), cwd);
|
||||
}
|
||||
}
|
||||
Ok(Signal::CtrlC) => {
|
||||
@ -329,9 +420,6 @@ pub fn evaluate_repl(
|
||||
println!();
|
||||
break;
|
||||
}
|
||||
Ok(Signal::CtrlL) => {
|
||||
line_editor.clear_screen().into_diagnostic()?;
|
||||
}
|
||||
Err(err) => {
|
||||
let message = err.to_string();
|
||||
if !message.contains("duration") {
|
||||
@ -343,3 +431,87 @@ pub fn evaluate_repl(
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn run_ansi_sequence(seq: &str) -> Result<(), ShellError> {
|
||||
match io::stdout().write_all(seq.as_bytes()) {
|
||||
Ok(it) => it,
|
||||
Err(err) => {
|
||||
return Err(ShellError::GenericError(
|
||||
"Error writing ansi sequence".into(),
|
||||
err.to_string(),
|
||||
Some(Span { start: 0, end: 0 }),
|
||||
None,
|
||||
Vec::new(),
|
||||
));
|
||||
}
|
||||
};
|
||||
io::stdout().flush().map_err(|e| {
|
||||
ShellError::GenericError(
|
||||
"Error flushing stdio".into(),
|
||||
e.to_string(),
|
||||
Some(Span { start: 0, end: 0 }),
|
||||
None,
|
||||
Vec::new(),
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
pub fn run_hook(
|
||||
engine_state: &EngineState,
|
||||
stack: &mut Stack,
|
||||
arguments: Vec<Value>,
|
||||
value: &Value,
|
||||
) -> Result<(), ShellError> {
|
||||
match value {
|
||||
Value::List { vals, .. } => {
|
||||
for val in vals {
|
||||
run_hook(engine_state, stack, arguments.clone(), val)?
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
Value::Block {
|
||||
val: block_id,
|
||||
span,
|
||||
..
|
||||
} => run_hook_block(engine_state, stack, *block_id, arguments, *span),
|
||||
x => match x.span() {
|
||||
Ok(span) => Err(ShellError::MissingConfigValue(
|
||||
"block for hook in config".into(),
|
||||
span,
|
||||
)),
|
||||
_ => Err(ShellError::MissingConfigValue(
|
||||
"block for hook in config".into(),
|
||||
Span { start: 0, end: 0 },
|
||||
)),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
pub fn run_hook_block(
|
||||
engine_state: &EngineState,
|
||||
stack: &mut Stack,
|
||||
block_id: BlockId,
|
||||
arguments: Vec<Value>,
|
||||
span: Span,
|
||||
) -> Result<(), ShellError> {
|
||||
let block = engine_state.get_block(block_id);
|
||||
let input = PipelineData::new(span);
|
||||
|
||||
let mut callee_stack = stack.gather_captures(&block.captures);
|
||||
|
||||
for (idx, PositionalArg { var_id, .. }) in
|
||||
block.signature.required_positional.iter().enumerate()
|
||||
{
|
||||
if let Some(var_id) = var_id {
|
||||
callee_stack.add_var(*var_id, arguments[idx].clone())
|
||||
}
|
||||
}
|
||||
|
||||
match eval_block(engine_state, &mut callee_stack, block, input, false, false) {
|
||||
Ok(pipeline_data) => match pipeline_data.into_value(span) {
|
||||
Value::Error { error } => Err(error),
|
||||
_ => Ok(()),
|
||||
},
|
||||
Err(err) => Err(err),
|
||||
}
|
||||
}
|
||||
|
@ -178,6 +178,11 @@ impl Highlighter for NuHighlighter {
|
||||
get_shape_color(shape.1.to_string(), &self.config),
|
||||
next_token,
|
||||
)),
|
||||
FlatShape::Directory => output.push((
|
||||
// nushell Directory
|
||||
get_shape_color(shape.1.to_string(), &self.config),
|
||||
next_token,
|
||||
)),
|
||||
FlatShape::GlobPattern => output.push((
|
||||
// nushell GlobPattern
|
||||
get_shape_color(shape.1.to_string(), &self.config),
|
||||
|
@ -1,129 +1,59 @@
|
||||
use crate::CliError;
|
||||
use log::trace;
|
||||
use nu_engine::eval_block;
|
||||
use nu_parser::{lex, parse, trim_quotes, Token, TokenContents};
|
||||
use nu_parser::{escape_quote_string, lex, parse, unescape_unquote_string, Token, TokenContents};
|
||||
use nu_protocol::engine::StateWorkingSet;
|
||||
use nu_protocol::CliError;
|
||||
use nu_protocol::{
|
||||
ast::Call,
|
||||
engine::{EngineState, Stack},
|
||||
PipelineData, ShellError, Span, Value,
|
||||
};
|
||||
#[cfg(windows)]
|
||||
use nu_utils::enable_vt_processing;
|
||||
use std::io::Write;
|
||||
use std::path::PathBuf;
|
||||
|
||||
pub fn print_pipeline_data(
|
||||
input: PipelineData,
|
||||
engine_state: &EngineState,
|
||||
stack: &mut Stack,
|
||||
) -> Result<(), ShellError> {
|
||||
// If the table function is in the declarations, then we can use it
|
||||
// to create the table value that will be printed in the terminal
|
||||
|
||||
let config = stack.get_config().unwrap_or_default();
|
||||
|
||||
let stdout = std::io::stdout();
|
||||
|
||||
if let PipelineData::ExternalStream {
|
||||
stdout: stream,
|
||||
exit_code,
|
||||
..
|
||||
} = input
|
||||
{
|
||||
if let Some(stream) = stream {
|
||||
for s in stream {
|
||||
let _ = stdout.lock().write_all(s?.as_binary()?);
|
||||
}
|
||||
}
|
||||
|
||||
// Make sure everything has finished
|
||||
if let Some(exit_code) = exit_code {
|
||||
let _: Vec<_> = exit_code.into_iter().collect();
|
||||
}
|
||||
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
match engine_state.find_decl("table".as_bytes()) {
|
||||
Some(decl_id) => {
|
||||
let table = engine_state.get_decl(decl_id).run(
|
||||
engine_state,
|
||||
stack,
|
||||
&Call::new(Span::new(0, 0)),
|
||||
input,
|
||||
)?;
|
||||
|
||||
for item in table {
|
||||
let stdout = std::io::stdout();
|
||||
|
||||
if let Value::Error { error } = item {
|
||||
return Err(error);
|
||||
}
|
||||
|
||||
let mut out = item.into_string("\n", &config);
|
||||
out.push('\n');
|
||||
|
||||
match stdout.lock().write_all(out.as_bytes()) {
|
||||
Ok(_) => (),
|
||||
Err(err) => eprintln!("{}", err),
|
||||
};
|
||||
}
|
||||
}
|
||||
None => {
|
||||
for item in input {
|
||||
let stdout = std::io::stdout();
|
||||
|
||||
if let Value::Error { error } = item {
|
||||
return Err(error);
|
||||
}
|
||||
|
||||
let mut out = item.into_string("\n", &config);
|
||||
out.push('\n');
|
||||
|
||||
match stdout.lock().write_all(out.as_bytes()) {
|
||||
Ok(_) => (),
|
||||
Err(err) => eprintln!("{}", err),
|
||||
};
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// This will collect environment variables from std::env and adds them to a stack.
|
||||
//
|
||||
// In order to ensure the values have spans, it first creates a dummy file, writes the collected
|
||||
// env vars into it (in a "NAME"="value" format, quite similar to the output of the Unix 'env'
|
||||
// tool), then uses the file to get the spans. The file stays in memory, no filesystem IO is done.
|
||||
pub fn gather_parent_env_vars(engine_state: &mut EngineState) {
|
||||
gather_env_vars(std::env::vars(), engine_state);
|
||||
}
|
||||
|
||||
fn gather_env_vars(vars: impl Iterator<Item = (String, String)>, engine_state: &mut EngineState) {
|
||||
fn report_capture_error(engine_state: &EngineState, env_str: &str, msg: &str) {
|
||||
let working_set = StateWorkingSet::new(engine_state);
|
||||
report_error(
|
||||
&working_set,
|
||||
&ShellError::LabeledError(
|
||||
&ShellError::GenericError(
|
||||
format!("Environment variable was not captured: {}", env_str),
|
||||
msg.into(),
|
||||
"".to_string(),
|
||||
None,
|
||||
Some(msg.into()),
|
||||
Vec::new(),
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
fn put_env_to_fake_file(name: &str, val: &str, fake_env_file: &mut String) {
|
||||
fake_env_file.push('`');
|
||||
fake_env_file.push_str(name);
|
||||
fake_env_file.push('`');
|
||||
fake_env_file.push_str(&escape_quote_string(name));
|
||||
fake_env_file.push('=');
|
||||
fake_env_file.push('`');
|
||||
fake_env_file.push_str(val);
|
||||
fake_env_file.push('`');
|
||||
fake_env_file.push_str(&escape_quote_string(val));
|
||||
fake_env_file.push('\n');
|
||||
}
|
||||
|
||||
let mut fake_env_file = String::new();
|
||||
let mut has_pwd = false;
|
||||
|
||||
// Make sure we always have PWD
|
||||
if std::env::var("PWD").is_err() {
|
||||
// Write all the env vars into a fake file
|
||||
for (name, val) in vars {
|
||||
if name == "PWD" {
|
||||
has_pwd = true;
|
||||
}
|
||||
put_env_to_fake_file(&name, &val, &mut fake_env_file);
|
||||
}
|
||||
|
||||
if !has_pwd {
|
||||
match std::env::current_dir() {
|
||||
Ok(cwd) => {
|
||||
put_env_to_fake_file("PWD", &cwd.to_string_lossy(), &mut fake_env_file);
|
||||
@ -133,20 +63,18 @@ pub fn gather_parent_env_vars(engine_state: &mut EngineState) {
|
||||
let working_set = StateWorkingSet::new(engine_state);
|
||||
report_error(
|
||||
&working_set,
|
||||
&ShellError::LabeledError(
|
||||
&ShellError::GenericError(
|
||||
"Current directory not found".to_string(),
|
||||
format!("Retrieving current directory failed: {:?}", e),
|
||||
"".to_string(),
|
||||
None,
|
||||
Some(format!("Retrieving current directory failed: {:?}", e)),
|
||||
Vec::new(),
|
||||
),
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Write all the env vars into a fake file
|
||||
for (name, val) in std::env::vars() {
|
||||
put_env_to_fake_file(&name, &val, &mut fake_env_file);
|
||||
}
|
||||
|
||||
// Lex the fake file, assign spans to all environment variables and add them
|
||||
// to stack
|
||||
let span_offset = engine_state.next_span_start();
|
||||
@ -184,8 +112,19 @@ pub fn gather_parent_env_vars(engine_state: &mut EngineState) {
|
||||
continue;
|
||||
}
|
||||
|
||||
let bytes = trim_quotes(bytes);
|
||||
String::from_utf8_lossy(bytes).to_string()
|
||||
let (bytes, parse_error) = unescape_unquote_string(bytes, *span);
|
||||
|
||||
if parse_error.is_some() {
|
||||
report_capture_error(
|
||||
engine_state,
|
||||
&String::from_utf8_lossy(contents),
|
||||
"Got unparsable name.",
|
||||
);
|
||||
|
||||
continue;
|
||||
}
|
||||
|
||||
bytes
|
||||
} else {
|
||||
report_capture_error(
|
||||
engine_state,
|
||||
@ -213,10 +152,20 @@ pub fn gather_parent_env_vars(engine_state: &mut EngineState) {
|
||||
continue;
|
||||
}
|
||||
|
||||
let bytes = trim_quotes(bytes);
|
||||
let (bytes, parse_error) = unescape_unquote_string(bytes, *span);
|
||||
|
||||
if parse_error.is_some() {
|
||||
report_capture_error(
|
||||
engine_state,
|
||||
&String::from_utf8_lossy(contents),
|
||||
"Got unparsable value.",
|
||||
);
|
||||
|
||||
continue;
|
||||
}
|
||||
|
||||
Value::String {
|
||||
val: String::from_utf8_lossy(bytes).to_string(),
|
||||
val: bytes,
|
||||
span: *span,
|
||||
}
|
||||
} else {
|
||||
@ -230,7 +179,7 @@ pub fn gather_parent_env_vars(engine_state: &mut EngineState) {
|
||||
};
|
||||
|
||||
// stack.add_env_var(name, value);
|
||||
engine_state.env_vars.insert(name, value);
|
||||
engine_state.add_env_var(name, value);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -262,8 +211,8 @@ pub fn eval_source(
|
||||
(output, working_set.render())
|
||||
};
|
||||
|
||||
let cwd = match nu_engine::env::current_dir_str(engine_state, stack) {
|
||||
Ok(p) => PathBuf::from(p),
|
||||
let cwd = match nu_engine::env::current_dir(engine_state, stack) {
|
||||
Ok(p) => p,
|
||||
Err(e) => {
|
||||
let working_set = StateWorkingSet::new(engine_state);
|
||||
report_error(&working_set, &e);
|
||||
@ -271,10 +220,7 @@ pub fn eval_source(
|
||||
}
|
||||
};
|
||||
|
||||
if let Err(err) = engine_state.merge_delta(delta, Some(stack), &cwd) {
|
||||
let working_set = StateWorkingSet::new(engine_state);
|
||||
report_error(&working_set, &err);
|
||||
}
|
||||
let _ = engine_state.merge_delta(delta, Some(stack), &cwd);
|
||||
|
||||
match eval_block(engine_state, stack, &block, input, false, false) {
|
||||
Ok(mut pipeline_data) => {
|
||||
@ -288,7 +234,7 @@ pub fn eval_source(
|
||||
set_last_exit_code(stack, 0);
|
||||
}
|
||||
|
||||
if let Err(err) = print_pipeline_data(pipeline_data, engine_state, stack) {
|
||||
if let Err(err) = pipeline_data.print(engine_state, stack, false) {
|
||||
let working_set = StateWorkingSet::new(engine_state);
|
||||
|
||||
report_error(&working_set, &err);
|
||||
@ -350,3 +296,38 @@ pub fn get_init_cwd() -> PathBuf {
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_gather_env_vars() {
|
||||
let mut engine_state = EngineState::new();
|
||||
let symbols = r##" !"#$%&'()*+,-./:;<=>?@[\]^_`{|}~"##;
|
||||
|
||||
gather_env_vars(
|
||||
[
|
||||
("FOO".into(), "foo".into()),
|
||||
("SYMBOLS".into(), symbols.into()),
|
||||
(symbols.into(), "symbols".into()),
|
||||
]
|
||||
.into_iter(),
|
||||
&mut engine_state,
|
||||
);
|
||||
|
||||
let env = engine_state.render_env_vars();
|
||||
|
||||
assert!(
|
||||
matches!(env.get(&"FOO".to_string()), Some(&Value::String { val, .. }) if val == "foo")
|
||||
);
|
||||
assert!(
|
||||
matches!(env.get(&"SYMBOLS".to_string()), Some(&Value::String { val, .. }) if val == symbols)
|
||||
);
|
||||
assert!(
|
||||
matches!(env.get(&symbols.to_string()), Some(&Value::String { val, .. }) if val == "symbols")
|
||||
);
|
||||
assert!(env.get(&"PWD".to_string()).is_some());
|
||||
assert_eq!(env.len(), 4);
|
||||
}
|
||||
}
|
||||
|
29
crates/nu-cli/tests/custom_completions.rs
Normal file
@ -0,0 +1,29 @@
|
||||
pub mod support;
|
||||
|
||||
use nu_cli::NuCompleter;
|
||||
use reedline::Completer;
|
||||
use support::{match_suggestions, new_engine};
|
||||
|
||||
#[test]
|
||||
fn variables_completions() {
|
||||
// Create a new engine
|
||||
let (dir, _, mut engine, mut stack) = new_engine();
|
||||
|
||||
// Add record value as example
|
||||
let record = r#"def animals [] { ["cat", "dog", "eel" ] }
|
||||
def my-command [animal: string@animals] { print $animal }"#;
|
||||
assert!(support::merge_input(record.as_bytes(), &mut engine, &mut stack, dir).is_ok());
|
||||
|
||||
// Instatiate a new completer
|
||||
let mut completer = NuCompleter::new(std::sync::Arc::new(engine), stack);
|
||||
|
||||
// Test completions for $nu
|
||||
let suggestions = completer.complete("my-command ".into(), 11);
|
||||
|
||||
assert_eq!(3, suggestions.len());
|
||||
|
||||
let expected: Vec<String> = vec!["cat".into(), "dog".into(), "eel".into()];
|
||||
|
||||
// Match results
|
||||
match_suggestions(expected, suggestions);
|
||||
}
|
28
crates/nu-cli/tests/dotnu_completions.rs
Normal file
@ -0,0 +1,28 @@
|
||||
pub mod support;
|
||||
|
||||
use nu_cli::NuCompleter;
|
||||
use reedline::Completer;
|
||||
use support::new_engine;
|
||||
|
||||
#[test]
|
||||
fn dotnu_completions() {
|
||||
// Create a new engine
|
||||
let (_, _, engine, stack) = new_engine();
|
||||
|
||||
// Instatiate a new completer
|
||||
let mut completer = NuCompleter::new(std::sync::Arc::new(engine), stack);
|
||||
|
||||
// Test source completion
|
||||
let completion_str = "source ".to_string();
|
||||
let suggestions = completer.complete(&completion_str, completion_str.len());
|
||||
|
||||
assert_eq!(1, suggestions.len());
|
||||
assert_eq!("custom_completion.nu", suggestions.get(0).unwrap().value);
|
||||
|
||||
// Test use completion
|
||||
let completion_str = "use ".to_string();
|
||||
let suggestions = completer.complete(&completion_str, completion_str.len());
|
||||
|
||||
assert_eq!(1, suggestions.len());
|
||||
assert_eq!("custom_completion.nu", suggestions.get(0).unwrap().value);
|
||||
}
|
42
crates/nu-cli/tests/file_completions.rs
Normal file
@ -0,0 +1,42 @@
|
||||
pub mod support;
|
||||
|
||||
use nu_cli::NuCompleter;
|
||||
use reedline::Completer;
|
||||
use support::{file, folder, match_suggestions, new_engine};
|
||||
|
||||
#[test]
|
||||
fn file_completions() {
|
||||
// Create a new engine
|
||||
let (dir, dir_str, engine, stack) = new_engine();
|
||||
|
||||
// Instatiate a new completer
|
||||
let mut completer = NuCompleter::new(std::sync::Arc::new(engine), stack);
|
||||
|
||||
// Test completions for the current folder
|
||||
let target_dir = format!("cp {}", dir_str);
|
||||
let suggestions = completer.complete(&target_dir, target_dir.len());
|
||||
|
||||
// Create the expected values
|
||||
let expected_paths: Vec<String> = vec![
|
||||
file(dir.join("nushell")),
|
||||
folder(dir.join("test_a")),
|
||||
folder(dir.join("test_b")),
|
||||
folder(dir.join("another")),
|
||||
file(dir.join("custom_completion.nu")),
|
||||
file(dir.join(".hidden_file")),
|
||||
folder(dir.join(".hidden_folder")),
|
||||
];
|
||||
|
||||
// Match the results
|
||||
match_suggestions(expected_paths, suggestions);
|
||||
|
||||
// Test completions for the completions/another folder
|
||||
let target_dir = format!("cd {}", folder(dir.join("another")));
|
||||
let suggestions = completer.complete(&target_dir, target_dir.len());
|
||||
|
||||
// Create the expected values
|
||||
let expected_paths: Vec<String> = vec![file(dir.join("another").join("newfile"))];
|
||||
|
||||
// Match the results
|
||||
match_suggestions(expected_paths, suggestions);
|
||||
}
|
36
crates/nu-cli/tests/flag_completions.rs
Normal file
@ -0,0 +1,36 @@
|
||||
pub mod support;
|
||||
|
||||
use nu_cli::NuCompleter;
|
||||
use reedline::Completer;
|
||||
use support::{match_suggestions, new_engine};
|
||||
|
||||
#[test]
|
||||
fn flag_completions() {
|
||||
// Create a new engine
|
||||
let (_, _, engine, stack) = new_engine();
|
||||
|
||||
// Instatiate a new completer
|
||||
let mut completer = NuCompleter::new(std::sync::Arc::new(engine), stack);
|
||||
// Test completions for the 'ls' flags
|
||||
let suggestions = completer.complete("ls -".into(), 4);
|
||||
|
||||
assert_eq!(12, suggestions.len());
|
||||
|
||||
let expected: Vec<String> = vec![
|
||||
"--all".into(),
|
||||
"--du".into(),
|
||||
"--full-paths".into(),
|
||||
"--help".into(),
|
||||
"--long".into(),
|
||||
"--short-names".into(),
|
||||
"-a".into(),
|
||||
"-d".into(),
|
||||
"-f".into(),
|
||||
"-h".into(),
|
||||
"-l".into(),
|
||||
"-s".into(),
|
||||
];
|
||||
|
||||
// Match results
|
||||
match_suggestions(expected, suggestions);
|
||||
}
|
29
crates/nu-cli/tests/folder_completions.rs
Normal file
@ -0,0 +1,29 @@
|
||||
pub mod support;
|
||||
|
||||
use nu_cli::NuCompleter;
|
||||
use reedline::Completer;
|
||||
use support::{folder, match_suggestions, new_engine};
|
||||
|
||||
#[test]
|
||||
fn folder_completions() {
|
||||
// Create a new engine
|
||||
let (dir, dir_str, engine, stack) = new_engine();
|
||||
|
||||
// Instatiate a new completer
|
||||
let mut completer = NuCompleter::new(std::sync::Arc::new(engine), stack);
|
||||
|
||||
// Test completions for the current folder
|
||||
let target_dir = format!("cd {}", dir_str);
|
||||
let suggestions = completer.complete(&target_dir, target_dir.len());
|
||||
|
||||
// Create the expected values
|
||||
let expected_paths: Vec<String> = vec![
|
||||
folder(dir.join("test_a")),
|
||||
folder(dir.join("test_b")),
|
||||
folder(dir.join("another")),
|
||||
folder(dir.join(".hidden_folder")),
|
||||
];
|
||||
|
||||
// Match the results
|
||||
match_suggestions(expected_paths, suggestions);
|
||||
}
|
117
crates/nu-cli/tests/support/completions_helpers.rs
Normal file
@ -0,0 +1,117 @@
|
||||
use std::path::PathBuf;
|
||||
|
||||
use nu_command::create_default_context;
|
||||
use nu_engine::eval_block;
|
||||
use nu_parser::parse;
|
||||
use nu_protocol::{
|
||||
engine::{EngineState, Stack, StateDelta, StateWorkingSet},
|
||||
PipelineData, ShellError, Span, Value,
|
||||
};
|
||||
use nu_test_support::fs;
|
||||
use reedline::Suggestion;
|
||||
const SEP: char = std::path::MAIN_SEPARATOR;
|
||||
|
||||
// creates a new engine with the current path into the completions fixtures folder
|
||||
pub fn new_engine() -> (PathBuf, String, EngineState, Stack) {
|
||||
// Target folder inside assets
|
||||
let dir = fs::fixtures().join("completions");
|
||||
let mut dir_str = dir
|
||||
.clone()
|
||||
.into_os_string()
|
||||
.into_string()
|
||||
.unwrap_or_default();
|
||||
dir_str.push(SEP);
|
||||
|
||||
// Create a new engine with default context
|
||||
let mut engine_state = create_default_context(&dir);
|
||||
|
||||
// New stack
|
||||
let mut stack = Stack::new();
|
||||
|
||||
// New delta state
|
||||
let delta = StateDelta::new(&engine_state);
|
||||
|
||||
// Add pwd as env var
|
||||
stack.add_env_var(
|
||||
"PWD".to_string(),
|
||||
Value::String {
|
||||
val: dir_str.clone(),
|
||||
span: nu_protocol::Span {
|
||||
start: 0,
|
||||
end: dir_str.len(),
|
||||
},
|
||||
},
|
||||
);
|
||||
stack.add_env_var(
|
||||
"TEST".to_string(),
|
||||
Value::String {
|
||||
val: "NUSHELL".to_string(),
|
||||
span: nu_protocol::Span {
|
||||
start: 0,
|
||||
end: dir_str.len(),
|
||||
},
|
||||
},
|
||||
);
|
||||
|
||||
// Merge delta
|
||||
let merge_result = engine_state.merge_delta(delta, Some(&mut stack), &dir);
|
||||
assert!(merge_result.is_ok());
|
||||
|
||||
(dir, dir_str, engine_state, stack)
|
||||
}
|
||||
|
||||
// match a list of suggestions with the expected values
|
||||
pub fn match_suggestions(expected: Vec<String>, suggestions: Vec<Suggestion>) {
|
||||
expected.iter().zip(suggestions).for_each(|it| {
|
||||
assert_eq!(it.0, &it.1.value);
|
||||
});
|
||||
}
|
||||
|
||||
// append the separator to the converted path
|
||||
pub fn folder(path: PathBuf) -> String {
|
||||
let mut converted_path = file(path);
|
||||
converted_path.push(SEP);
|
||||
|
||||
converted_path
|
||||
}
|
||||
|
||||
// convert a given path to string
|
||||
pub fn file(path: PathBuf) -> String {
|
||||
path.into_os_string().into_string().unwrap_or_default()
|
||||
}
|
||||
|
||||
// merge_input executes the given input into the engine
|
||||
// and merges the state
|
||||
pub fn merge_input(
|
||||
input: &[u8],
|
||||
engine_state: &mut EngineState,
|
||||
stack: &mut Stack,
|
||||
dir: PathBuf,
|
||||
) -> Result<(), ShellError> {
|
||||
let (block, delta) = {
|
||||
let mut working_set = StateWorkingSet::new(&engine_state);
|
||||
|
||||
let (block, err) = parse(&mut working_set, None, input, false, &[]);
|
||||
|
||||
assert!(err.is_none());
|
||||
|
||||
(block, working_set.render())
|
||||
};
|
||||
assert!(eval_block(
|
||||
&engine_state,
|
||||
stack,
|
||||
&block,
|
||||
PipelineData::Value(
|
||||
Value::Nothing {
|
||||
span: Span { start: 0, end: 0 },
|
||||
},
|
||||
None
|
||||
),
|
||||
false,
|
||||
false
|
||||
)
|
||||
.is_ok());
|
||||
|
||||
// Merge delta
|
||||
engine_state.merge_delta(delta, Some(stack), &dir)
|
||||
}
|
3
crates/nu-cli/tests/support/mod.rs
Normal file
@ -0,0 +1,3 @@
|
||||
pub mod completions_helpers;
|
||||
|
||||
pub use completions_helpers::{file, folder, match_suggestions, merge_input, new_engine};
|
87
crates/nu-cli/tests/variables_completions.rs
Normal file
@ -0,0 +1,87 @@
|
||||
pub mod support;
|
||||
|
||||
use nu_cli::NuCompleter;
|
||||
use reedline::Completer;
|
||||
use support::{match_suggestions, new_engine};
|
||||
|
||||
#[test]
|
||||
fn variables_completions() {
|
||||
// Create a new engine
|
||||
let (dir, _, mut engine, mut stack) = new_engine();
|
||||
|
||||
// Add record value as example
|
||||
let record = "let actor = { name: 'Tom Hardy', age: 44 }";
|
||||
assert!(support::merge_input(record.as_bytes(), &mut engine, &mut stack, dir).is_ok());
|
||||
|
||||
// Instatiate a new completer
|
||||
let mut completer = NuCompleter::new(std::sync::Arc::new(engine), stack);
|
||||
|
||||
// Test completions for $nu
|
||||
let suggestions = completer.complete("$nu.".into(), 4);
|
||||
|
||||
assert_eq!(8, suggestions.len());
|
||||
|
||||
let expected: Vec<String> = vec![
|
||||
"config-path".into(),
|
||||
"env-path".into(),
|
||||
"history-path".into(),
|
||||
"home-path".into(),
|
||||
"os-info".into(),
|
||||
"pid".into(),
|
||||
"scope".into(),
|
||||
"temp-path".into(),
|
||||
];
|
||||
|
||||
// Match results
|
||||
match_suggestions(expected, suggestions);
|
||||
|
||||
// Test completions for $nu.h (filter)
|
||||
let suggestions = completer.complete("$nu.h".into(), 5);
|
||||
|
||||
assert_eq!(2, suggestions.len());
|
||||
|
||||
let expected: Vec<String> = vec!["history-path".into(), "home-path".into()];
|
||||
|
||||
// Match results
|
||||
match_suggestions(expected, suggestions);
|
||||
|
||||
// Test completions for custom var
|
||||
let suggestions = completer.complete("$actor.".into(), 7);
|
||||
|
||||
assert_eq!(2, suggestions.len());
|
||||
|
||||
let expected: Vec<String> = vec!["age".into(), "name".into()];
|
||||
|
||||
// Match results
|
||||
match_suggestions(expected, suggestions);
|
||||
|
||||
// Test completions for custom var (filtering)
|
||||
let suggestions = completer.complete("$actor.n".into(), 7);
|
||||
|
||||
assert_eq!(1, suggestions.len());
|
||||
|
||||
let expected: Vec<String> = vec!["name".into()];
|
||||
|
||||
// Match results
|
||||
match_suggestions(expected, suggestions);
|
||||
|
||||
// Test completions for $env
|
||||
let suggestions = completer.complete("$env.".into(), 5);
|
||||
|
||||
assert_eq!(2, suggestions.len());
|
||||
|
||||
let expected: Vec<String> = vec!["PWD".into(), "TEST".into()];
|
||||
|
||||
// Match results
|
||||
match_suggestions(expected, suggestions);
|
||||
|
||||
// Test completions for $env
|
||||
let suggestions = completer.complete("$env.T".into(), 5);
|
||||
|
||||
assert_eq!(1, suggestions.len());
|
||||
|
||||
let expected: Vec<String> = vec!["TEST".into()];
|
||||
|
||||
// Match results
|
||||
match_suggestions(expected, suggestions);
|
||||
}
|
@ -4,11 +4,11 @@ description = "Color configuration code used by Nushell"
|
||||
edition = "2021"
|
||||
license = "MIT"
|
||||
name = "nu-color-config"
|
||||
version = "0.61.0"
|
||||
version = "0.63.1"
|
||||
|
||||
[dependencies]
|
||||
nu-protocol = { path = "../nu-protocol", version = "0.61.0" }
|
||||
nu-protocol = { path = "../nu-protocol", version = "0.63.1" }
|
||||
nu-ansi-term = "0.45.1"
|
||||
nu-json = { path = "../nu-json", version = "0.61.0" }
|
||||
nu-table = { path = "../nu-table", version = "0.61.0" }
|
||||
nu-json = { path = "../nu-json", version = "0.63.1" }
|
||||
nu-table = { path = "../nu-table", version = "0.63.1" }
|
||||
serde = { version="1.0.123", features=["derive"] }
|
||||
|
@ -161,6 +161,13 @@ pub fn lookup_ansi_color_style(s: &str) -> Style {
|
||||
"dgrbl" | "dark_gray_blink" => Color::DarkGray.blink(),
|
||||
"dgrst" | "dark_gray_strike" => Color::DarkGray.strikethrough(),
|
||||
|
||||
"def" | "default" => Color::Default.normal(),
|
||||
"defb" | "default_bold" => Color::Default.bold(),
|
||||
"defu" | "default_underline" => Color::Default.underline(),
|
||||
"defi" | "default_italic" => Color::Default.italic(),
|
||||
"defd" | "default_dimmed" => Color::Default.dimmed(),
|
||||
"defr" | "default_reverse" => Color::Default.reverse(),
|
||||
|
||||
_ => Color::White.normal(),
|
||||
}
|
||||
}
|
||||
|
@ -29,6 +29,7 @@ pub fn get_shape_color(shape: String, conf: &Config) -> Style {
|
||||
"shape_record" => Style::new().fg(Color::Cyan).bold(),
|
||||
"shape_block" => Style::new().fg(Color::Blue).bold(),
|
||||
"shape_filepath" => Style::new().fg(Color::Cyan),
|
||||
"shape_directory" => Style::new().fg(Color::Cyan),
|
||||
"shape_globpattern" => Style::new().fg(Color::Cyan).bold(),
|
||||
"shape_variable" => Style::new().fg(Color::Purple),
|
||||
"shape_flag" => Style::new().fg(Color::Blue).bold(),
|
||||
|
@ -4,34 +4,35 @@ description = "Nushell's built-in commands"
|
||||
edition = "2021"
|
||||
license = "MIT"
|
||||
name = "nu-command"
|
||||
version = "0.61.0"
|
||||
version = "0.63.1"
|
||||
build = "build.rs"
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
||||
nu-color-config = { path = "../nu-color-config", version = "0.61.0" }
|
||||
nu-engine = { path = "../nu-engine", version = "0.61.0" }
|
||||
nu-glob = { path = "../nu-glob", version = "0.61.0" }
|
||||
nu-json = { path = "../nu-json", version = "0.61.0" }
|
||||
nu-parser = { path = "../nu-parser", version = "0.61.0" }
|
||||
nu-path = { path = "../nu-path", version = "0.61.0" }
|
||||
nu-pretty-hex = { path = "../nu-pretty-hex", version = "0.61.0" }
|
||||
nu-protocol = { path = "../nu-protocol", version = "0.61.0" }
|
||||
nu-system = { path = "../nu-system", version = "0.61.0" }
|
||||
nu-table = { path = "../nu-table", version = "0.61.0" }
|
||||
nu-term-grid = { path = "../nu-term-grid", version = "0.61.0" }
|
||||
nu-test-support = { path = "../nu-test-support", version = "0.61.0" }
|
||||
nu-utils = { path = "../nu-utils", version = "0.61.0" }
|
||||
nu-color-config = { path = "../nu-color-config", version = "0.63.1" }
|
||||
nu-engine = { path = "../nu-engine", version = "0.63.1" }
|
||||
nu-glob = { path = "../nu-glob", version = "0.63.1" }
|
||||
nu-json = { path = "../nu-json", version = "0.63.1" }
|
||||
nu-parser = { path = "../nu-parser", version = "0.63.1" }
|
||||
nu-path = { path = "../nu-path", version = "0.63.1" }
|
||||
nu-pretty-hex = { path = "../nu-pretty-hex", version = "0.63.1" }
|
||||
nu-protocol = { path = "../nu-protocol", version = "0.63.1" }
|
||||
nu-system = { path = "../nu-system", version = "0.63.1" }
|
||||
nu-table = { path = "../nu-table", version = "0.63.1" }
|
||||
nu-term-grid = { path = "../nu-term-grid", version = "0.63.1" }
|
||||
nu-test-support = { path = "../nu-test-support", version = "0.63.1" }
|
||||
nu-utils = { path = "../nu-utils", version = "0.63.1" }
|
||||
nu-ansi-term = "0.45.1"
|
||||
|
||||
# Potential dependencies for extras
|
||||
alphanumeric-sort = "1.4.4"
|
||||
base64 = "0.13.0"
|
||||
bytesize = "1.1.0"
|
||||
calamine = "0.18.0"
|
||||
chrono = { version = "0.4.19", features = ["serde"] }
|
||||
chrono-humanize = "0.2.1"
|
||||
chrono-tz = "0.6.0"
|
||||
chrono-tz = "0.6.1"
|
||||
crossterm = "0.23.0"
|
||||
csv = "1.1.6"
|
||||
dialoguer = "0.9.0"
|
||||
@ -53,8 +54,10 @@ lscolors = { version = "0.9.0", features = ["crossterm"]}
|
||||
md5 = { package = "md-5", version = "0.10.0" }
|
||||
meval = "0.2.0"
|
||||
mime = "0.3.16"
|
||||
notify = "4.0.17"
|
||||
num = { version = "0.4.0", optional = true }
|
||||
pathdiff = "0.2.1"
|
||||
powierza-coefficient = "1.0"
|
||||
quick-xml = "0.22"
|
||||
rand = "0.8"
|
||||
rayon = "1.5.1"
|
||||
@ -67,7 +70,8 @@ serde_ini = "0.2.0"
|
||||
serde_urlencoded = "0.7.0"
|
||||
serde_yaml = "0.8.16"
|
||||
sha2 = "0.10.0"
|
||||
shadow-rs = "0.11.0"
|
||||
# Disable default features b/c the default features build Git (very slow to compile)
|
||||
shadow-rs = { version = "0.11.0", default-features = false }
|
||||
strip-ansi-escapes = "0.1.1"
|
||||
sysinfo = "0.23.5"
|
||||
terminal_size = "0.1.17"
|
||||
@ -78,26 +82,29 @@ unicode-segmentation = "1.8.0"
|
||||
url = "2.2.1"
|
||||
uuid = { version = "0.8.2", features = ["v4"] }
|
||||
which = { version = "4.2.2", optional = true }
|
||||
reedline = { version = "0.4.0", features = ["bashisms"]}
|
||||
reedline = { git = "https://github.com/nushell/reedline", branch = "main", features = ["bashisms"]}
|
||||
wax = { version = "0.4.0", features = ["diagnostics"] }
|
||||
zip = { version="0.5.9", optional = true }
|
||||
rusqlite = { version = "0.27.0", features = ["bundled"], optional = true }
|
||||
sqlparser = { version = "0.16.0", features = ["serde"], optional = true }
|
||||
|
||||
[target.'cfg(unix)'.dependencies]
|
||||
umask = "1.0.0"
|
||||
umask = "2.0.0"
|
||||
users = "0.11.0"
|
||||
|
||||
[target.'cfg(not(any(target_os = "android", target_os = "ios")))'.dependencies.trash]
|
||||
version = "2.0.2"
|
||||
version = "2.1.3"
|
||||
optional = true
|
||||
|
||||
[dependencies.polars]
|
||||
version = "0.20.0"
|
||||
version = "0.21.1"
|
||||
# path = "../../../../polars/polars"
|
||||
optional = true
|
||||
features = [
|
||||
"default", "parquet", "json", "serde", "object",
|
||||
"checked_arithmetic", "strings", "cum_agg", "is_in",
|
||||
"default", "to_dummies", "parquet", "json", "serde", "serde-lazy",
|
||||
"object", "checked_arithmetic", "strings", "cum_agg", "is_in",
|
||||
"rolling_window", "strings", "rows", "random",
|
||||
"dtype-datetime"
|
||||
"dtype-datetime", "dtype-struct", "lazy", "cross_join",
|
||||
"dynamic_groupby"
|
||||
]
|
||||
|
||||
[features]
|
||||
@ -105,9 +112,10 @@ trash-support = ["trash"]
|
||||
which-support = ["which"]
|
||||
plugin = ["nu-parser/plugin"]
|
||||
dataframe = ["polars", "num"]
|
||||
database = ["sqlparser", "rusqlite"]
|
||||
|
||||
[build-dependencies]
|
||||
shadow-rs = "0.11.0"
|
||||
shadow-rs = { version = "0.11.0", default-features = false }
|
||||
|
||||
[dev-dependencies]
|
||||
hamcrest2 = "0.3.0"
|
||||
|
1
crates/nu-command/assets/228_themes.json
Normal file
@ -1,3 +1,18 @@
|
||||
use std::process::Command;
|
||||
|
||||
fn main() -> shadow_rs::SdResult<()> {
|
||||
// Look up the current Git commit ourselves instead of relying on shadow_rs,
|
||||
// because shadow_rs does it in a really slow-to-compile way (it builds libgit2)
|
||||
let hash = get_git_hash().expect("failed to get latest git commit hash");
|
||||
println!("cargo:rustc-env=NU_COMMIT_HASH={}", hash);
|
||||
|
||||
shadow_rs::new()
|
||||
}
|
||||
|
||||
fn get_git_hash() -> Result<String, std::io::Error> {
|
||||
let out = Command::new("git").args(["rev-parse", "HEAD"]).output()?;
|
||||
Ok(String::from_utf8(out.stdout)
|
||||
.expect("could not convert stdout to string")
|
||||
.trim()
|
||||
.to_string())
|
||||
}
|
||||
|
317
crates/nu-command/src/charting/hashable_value.rs
Normal file
@ -0,0 +1,317 @@
|
||||
use chrono::{DateTime, FixedOffset};
|
||||
use nu_protocol::{ShellError, Span, Value};
|
||||
use std::hash::{Hash, Hasher};
|
||||
|
||||
/// A subset of [Value](crate::Value), which is hashable.
|
||||
/// And it means that we can put the value into something like [HashMap](std::collections::HashMap) or [HashSet](std::collections::HashSet)
|
||||
/// for further usage like value statistics.
|
||||
///
|
||||
/// For now the main way to crate a [HashableValue] is using [from_value](HashableValue::from_value)
|
||||
///
|
||||
/// Please note that although each variant contains `span` field, but during hashing, this field will not be concerned.
|
||||
/// Which means that the following will be true:
|
||||
/// ```text
|
||||
/// assert_eq!(HashableValue::Bool {val: true, span: Span{start: 0, end: 1}}, HashableValue::Bool {val: true, span: Span{start: 90, end: 1000}})
|
||||
/// ```
|
||||
#[derive(Eq, Debug)]
|
||||
pub enum HashableValue {
|
||||
Bool {
|
||||
val: bool,
|
||||
span: Span,
|
||||
},
|
||||
Int {
|
||||
val: i64,
|
||||
span: Span,
|
||||
},
|
||||
Float {
|
||||
val: [u8; 8], // because f64 is not hashable, we save it as [u8;8] array to make it hashable.
|
||||
span: Span,
|
||||
},
|
||||
Filesize {
|
||||
val: i64,
|
||||
span: Span,
|
||||
},
|
||||
Duration {
|
||||
val: i64,
|
||||
span: Span,
|
||||
},
|
||||
Date {
|
||||
val: DateTime<FixedOffset>,
|
||||
span: Span,
|
||||
},
|
||||
String {
|
||||
val: String,
|
||||
span: Span,
|
||||
},
|
||||
Binary {
|
||||
val: Vec<u8>,
|
||||
span: Span,
|
||||
},
|
||||
}
|
||||
|
||||
impl Default for HashableValue {
|
||||
fn default() -> Self {
|
||||
HashableValue::Bool {
|
||||
val: false,
|
||||
span: Span { start: 0, end: 0 },
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl HashableValue {
|
||||
/// Try to convert from `value` to self
|
||||
///
|
||||
/// A `span` is required because when there is an error in value, it may not contain `span` field.
|
||||
///
|
||||
/// If the given value is not hashable(mainly because of it is structured data), an error will returned.
|
||||
pub fn from_value(value: Value, span: Span) -> Result<Self, ShellError> {
|
||||
match value {
|
||||
Value::Bool { val, span } => Ok(HashableValue::Bool { val, span }),
|
||||
Value::Int { val, span } => Ok(HashableValue::Int { val, span }),
|
||||
Value::Filesize { val, span } => Ok(HashableValue::Filesize { val, span }),
|
||||
Value::Duration { val, span } => Ok(HashableValue::Duration { val, span }),
|
||||
Value::Date { val, span } => Ok(HashableValue::Date { val, span }),
|
||||
Value::Float { val, span } => Ok(HashableValue::Float {
|
||||
val: val.to_ne_bytes(),
|
||||
span,
|
||||
}),
|
||||
Value::String { val, span } => Ok(HashableValue::String { val, span }),
|
||||
Value::Binary { val, span } => Ok(HashableValue::Binary { val, span }),
|
||||
|
||||
_ => {
|
||||
let input_span = value.span().unwrap_or(span);
|
||||
Err(ShellError::UnsupportedInput(
|
||||
format!("input value {value:?} is not hashable"),
|
||||
input_span,
|
||||
))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Convert from self to nu's core data type `Value`.
|
||||
pub fn into_value(self) -> Value {
|
||||
match self {
|
||||
HashableValue::Bool { val, span } => Value::Bool { val, span },
|
||||
HashableValue::Int { val, span } => Value::Int { val, span },
|
||||
HashableValue::Filesize { val, span } => Value::Filesize { val, span },
|
||||
HashableValue::Duration { val, span } => Value::Duration { val, span },
|
||||
HashableValue::Date { val, span } => Value::Date { val, span },
|
||||
HashableValue::Float { val, span } => Value::Float {
|
||||
val: f64::from_ne_bytes(val),
|
||||
span,
|
||||
},
|
||||
HashableValue::String { val, span } => Value::String { val, span },
|
||||
HashableValue::Binary { val, span } => Value::Binary { val, span },
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Hash for HashableValue {
|
||||
fn hash<H: Hasher>(&self, state: &mut H) {
|
||||
match self {
|
||||
HashableValue::Bool { val, .. } => val.hash(state),
|
||||
HashableValue::Int { val, .. } => val.hash(state),
|
||||
HashableValue::Filesize { val, .. } => val.hash(state),
|
||||
HashableValue::Duration { val, .. } => val.hash(state),
|
||||
HashableValue::Date { val, .. } => val.hash(state),
|
||||
HashableValue::Float { val, .. } => val.hash(state),
|
||||
HashableValue::String { val, .. } => val.hash(state),
|
||||
HashableValue::Binary { val, .. } => val.hash(state),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq for HashableValue {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
match (self, other) {
|
||||
(HashableValue::Bool { val: lhs, .. }, HashableValue::Bool { val: rhs, .. }) => {
|
||||
lhs == rhs
|
||||
}
|
||||
(HashableValue::Int { val: lhs, .. }, HashableValue::Int { val: rhs, .. }) => {
|
||||
lhs == rhs
|
||||
}
|
||||
(
|
||||
HashableValue::Filesize { val: lhs, .. },
|
||||
HashableValue::Filesize { val: rhs, .. },
|
||||
) => lhs == rhs,
|
||||
(
|
||||
HashableValue::Duration { val: lhs, .. },
|
||||
HashableValue::Duration { val: rhs, .. },
|
||||
) => lhs == rhs,
|
||||
(HashableValue::Date { val: lhs, .. }, HashableValue::Date { val: rhs, .. }) => {
|
||||
lhs == rhs
|
||||
}
|
||||
(HashableValue::Float { val: lhs, .. }, HashableValue::Float { val: rhs, .. }) => {
|
||||
lhs == rhs
|
||||
}
|
||||
(HashableValue::String { val: lhs, .. }, HashableValue::String { val: rhs, .. }) => {
|
||||
lhs == rhs
|
||||
}
|
||||
(HashableValue::Binary { val: lhs, .. }, HashableValue::Binary { val: rhs, .. }) => {
|
||||
lhs == rhs
|
||||
}
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::*;
|
||||
use nu_protocol::ast::{CellPath, PathMember};
|
||||
use std::collections::{HashMap, HashSet};
|
||||
|
||||
#[test]
|
||||
fn from_value() {
|
||||
let span = Span::test_data();
|
||||
let values = vec![
|
||||
(
|
||||
Value::Bool { val: true, span },
|
||||
HashableValue::Bool { val: true, span },
|
||||
),
|
||||
(
|
||||
Value::Int { val: 1, span },
|
||||
HashableValue::Int { val: 1, span },
|
||||
),
|
||||
(
|
||||
Value::Filesize { val: 1, span },
|
||||
HashableValue::Filesize { val: 1, span },
|
||||
),
|
||||
(
|
||||
Value::Duration { val: 1, span },
|
||||
HashableValue::Duration { val: 1, span },
|
||||
),
|
||||
(
|
||||
Value::Date {
|
||||
val: DateTime::<FixedOffset>::parse_from_rfc2822(
|
||||
"Wed, 18 Feb 2015 23:16:09 GMT",
|
||||
)
|
||||
.unwrap(),
|
||||
span,
|
||||
},
|
||||
HashableValue::Date {
|
||||
val: DateTime::<FixedOffset>::parse_from_rfc2822(
|
||||
"Wed, 18 Feb 2015 23:16:09 GMT",
|
||||
)
|
||||
.unwrap(),
|
||||
span,
|
||||
},
|
||||
),
|
||||
(
|
||||
Value::String {
|
||||
val: "1".to_string(),
|
||||
span,
|
||||
},
|
||||
HashableValue::String {
|
||||
val: "1".to_string(),
|
||||
span,
|
||||
},
|
||||
),
|
||||
(
|
||||
Value::Binary { val: vec![1], span },
|
||||
HashableValue::Binary { val: vec![1], span },
|
||||
),
|
||||
];
|
||||
for (val, expect_hashable_val) in values.into_iter() {
|
||||
assert_eq!(
|
||||
HashableValue::from_value(val, Span { start: 0, end: 0 }).unwrap(),
|
||||
expect_hashable_val
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn from_unhashable_value() {
|
||||
let span = Span::test_data();
|
||||
let values = [
|
||||
Value::List {
|
||||
vals: vec![Value::Bool { val: true, span }],
|
||||
span,
|
||||
},
|
||||
Value::Block {
|
||||
val: 0,
|
||||
captures: HashMap::new(),
|
||||
span,
|
||||
},
|
||||
Value::Nothing { span },
|
||||
Value::Error {
|
||||
error: ShellError::DidYouMean("what?".to_string(), span),
|
||||
},
|
||||
Value::CellPath {
|
||||
val: CellPath {
|
||||
members: vec![PathMember::Int { val: 0, span }],
|
||||
},
|
||||
span,
|
||||
},
|
||||
];
|
||||
for v in values {
|
||||
assert!(HashableValue::from_value(v, Span { start: 0, end: 0 }).is_err())
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn from_to_tobe_same() {
|
||||
let span = Span::test_data();
|
||||
let values = vec![
|
||||
Value::Bool { val: true, span },
|
||||
Value::Int { val: 1, span },
|
||||
Value::Filesize { val: 1, span },
|
||||
Value::Duration { val: 1, span },
|
||||
Value::String {
|
||||
val: "1".to_string(),
|
||||
span,
|
||||
},
|
||||
Value::Binary { val: vec![1], span },
|
||||
];
|
||||
for val in values.into_iter() {
|
||||
let expected_val = val.clone();
|
||||
assert_eq!(
|
||||
HashableValue::from_value(val, Span { start: 0, end: 0 })
|
||||
.unwrap()
|
||||
.into_value(),
|
||||
expected_val
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn hashable_value_eq_without_concern_span() {
|
||||
assert_eq!(
|
||||
HashableValue::Bool {
|
||||
val: true,
|
||||
span: Span { start: 0, end: 1 }
|
||||
},
|
||||
HashableValue::Bool {
|
||||
val: true,
|
||||
span: Span {
|
||||
start: 90,
|
||||
end: 1000
|
||||
}
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn put_to_hashset() {
|
||||
let span = Span::test_data();
|
||||
let mut set = HashSet::new();
|
||||
set.insert(HashableValue::Bool { val: true, span });
|
||||
assert!(set.contains(&HashableValue::Bool { val: true, span }));
|
||||
|
||||
// hashable value doesn't care about span.
|
||||
let diff_span = Span { start: 1, end: 2 };
|
||||
set.insert(HashableValue::Bool {
|
||||
val: true,
|
||||
span: diff_span,
|
||||
});
|
||||
assert!(set.contains(&HashableValue::Bool { val: true, span }));
|
||||
assert!(set.contains(&HashableValue::Bool {
|
||||
val: true,
|
||||
span: diff_span
|
||||
}));
|
||||
assert_eq!(set.len(), 1);
|
||||
|
||||
set.insert(HashableValue::Int { val: 2, span });
|
||||
assert_eq!(set.len(), 2);
|
||||
}
|
||||
}
|
256
crates/nu-command/src/charting/histogram.rs
Normal file
@ -0,0 +1,256 @@
|
||||
use super::hashable_value::HashableValue;
|
||||
use nu_engine::CallExt;
|
||||
use nu_protocol::ast::Call;
|
||||
use nu_protocol::engine::{Command, EngineState, Stack};
|
||||
use nu_protocol::{
|
||||
Example, IntoPipelineData, PipelineData, ShellError, Signature, Span, Spanned, SyntaxShape,
|
||||
Value,
|
||||
};
|
||||
use std::collections::HashMap;
|
||||
use std::iter;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct Histogram;
|
||||
|
||||
enum PercentageCalcMethod {
|
||||
Normalize,
|
||||
Relative,
|
||||
}
|
||||
|
||||
impl Command for Histogram {
|
||||
fn name(&self) -> &str {
|
||||
"histogram"
|
||||
}
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("histogram")
|
||||
.optional("column-name", SyntaxShape::String, "column name to calc frequency, no need to provide if input is just a list")
|
||||
.optional("frequency-column-name", SyntaxShape::String, "histogram's frequency column, default to be frequency column output")
|
||||
.named("percentage-type", SyntaxShape::String, "percentage calculate method, can be 'normalize' or 'relative', in 'normalize', defaults to be 'normalize'", Some('t'))
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
"Creates a new table with a histogram based on the column name passed in."
|
||||
}
|
||||
|
||||
fn examples(&self) -> Vec<Example> {
|
||||
vec![
|
||||
Example {
|
||||
description: "Get a histogram for the types of files",
|
||||
example: "ls | histogram type",
|
||||
result: None,
|
||||
},
|
||||
Example {
|
||||
description:
|
||||
"Get a histogram for the types of files, with frequency column named freq",
|
||||
example: "ls | histogram type freq",
|
||||
result: None,
|
||||
},
|
||||
Example {
|
||||
description: "Get a histogram for a list of numbers",
|
||||
example: "echo [1 2 3 1 1 1 2 2 1 1] | histogram",
|
||||
result: None,
|
||||
},
|
||||
Example {
|
||||
description: "Get a histogram for a list of numbers, and percentage is based on the maximum value",
|
||||
example: "echo [1 2 3 1 1 1 2 2 1 1] | histogram --percentage-type relative",
|
||||
result: None,
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
fn run(
|
||||
&self,
|
||||
engine_state: &EngineState,
|
||||
stack: &mut Stack,
|
||||
call: &Call,
|
||||
input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
// input check.
|
||||
let column_name: Option<Spanned<String>> = call.opt(engine_state, stack, 0)?;
|
||||
let frequency_name_arg = call.opt::<Spanned<String>>(engine_state, stack, 1)?;
|
||||
let frequency_column_name = match frequency_name_arg {
|
||||
Some(inner) => {
|
||||
let span = inner.span;
|
||||
if ["value", "count", "quantile", "percentage"].contains(&inner.item.as_str()) {
|
||||
return Err(ShellError::UnsupportedInput(
|
||||
"frequency-column-name can't be 'value', 'count' or 'percentage'"
|
||||
.to_string(),
|
||||
span,
|
||||
));
|
||||
}
|
||||
inner.item
|
||||
}
|
||||
None => "frequency".to_string(),
|
||||
};
|
||||
|
||||
let calc_method: Option<Spanned<String>> =
|
||||
call.get_flag(engine_state, stack, "percentage-type")?;
|
||||
let calc_method = match calc_method {
|
||||
None => PercentageCalcMethod::Normalize,
|
||||
Some(inner) => match inner.item.as_str() {
|
||||
"normalize" => PercentageCalcMethod::Normalize,
|
||||
"relative" => PercentageCalcMethod::Relative,
|
||||
_ => {
|
||||
return Err(ShellError::UnsupportedInput(
|
||||
"calc method can only be 'normalize' or 'relative'".to_string(),
|
||||
inner.span,
|
||||
))
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
let span = call.head;
|
||||
let data_as_value = input.into_value(span);
|
||||
// `input` is not a list, here we can return an error.
|
||||
match data_as_value.as_list() {
|
||||
Ok(list_value) => run_histogram(
|
||||
list_value.to_vec(),
|
||||
column_name,
|
||||
frequency_column_name,
|
||||
calc_method,
|
||||
span,
|
||||
),
|
||||
Err(e) => Err(e),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn run_histogram(
|
||||
values: Vec<Value>,
|
||||
column_name: Option<Spanned<String>>,
|
||||
freq_column: String,
|
||||
calc_method: PercentageCalcMethod,
|
||||
head_span: Span,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let mut inputs = vec![];
|
||||
// convert from inputs to hashable values.
|
||||
match column_name {
|
||||
None => {
|
||||
// some invalid input scenario needs to handle:
|
||||
// Expect input is a list of hashable value, if one value is not hashable, throw out error.
|
||||
for v in values {
|
||||
let current_span = v.span().unwrap_or(head_span);
|
||||
inputs.push(HashableValue::from_value(v, head_span).map_err(|_| {
|
||||
ShellError::UnsupportedInput(
|
||||
"--column-name is not provided, can only support a list of simple value."
|
||||
.to_string(),
|
||||
current_span,
|
||||
)
|
||||
})?);
|
||||
}
|
||||
}
|
||||
Some(ref col) => {
|
||||
// some invalid input scenario needs to handle:
|
||||
// * item in `input` is not a record, just skip it.
|
||||
// * a record doesn't contain specific column, just skip it.
|
||||
// * all records don't contain specific column, throw out error, indicate at least one row should contains specific column.
|
||||
// * a record contain a value which can't be hashed, skip it.
|
||||
let col_name = &col.item;
|
||||
for v in values {
|
||||
match v {
|
||||
// parse record, and fill valid value to actual input.
|
||||
Value::Record { cols, vals, .. } => {
|
||||
for (c, v) in iter::zip(cols, vals) {
|
||||
if &c == col_name {
|
||||
if let Ok(v) = HashableValue::from_value(v, head_span) {
|
||||
inputs.push(v);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => continue,
|
||||
}
|
||||
}
|
||||
|
||||
if inputs.is_empty() {
|
||||
return Err(ShellError::UnsupportedInput(
|
||||
format!("expect input is table, and inputs doesn't contain any value which has {col_name} column"),
|
||||
head_span,
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let value_column_name = column_name
|
||||
.map(|x| x.item)
|
||||
.unwrap_or_else(|| "value".to_string());
|
||||
Ok(histogram_impl(
|
||||
inputs,
|
||||
&value_column_name,
|
||||
calc_method,
|
||||
&freq_column,
|
||||
head_span,
|
||||
))
|
||||
}
|
||||
|
||||
fn histogram_impl(
|
||||
inputs: Vec<HashableValue>,
|
||||
value_column_name: &str,
|
||||
calc_method: PercentageCalcMethod,
|
||||
freq_column: &str,
|
||||
span: Span,
|
||||
) -> PipelineData {
|
||||
// here we can make sure that inputs is not empty, and every elements
|
||||
// is a simple val and ok to make count.
|
||||
let mut counter = HashMap::new();
|
||||
let mut max_cnt = 0;
|
||||
let total_cnt = inputs.len();
|
||||
for i in inputs {
|
||||
let new_cnt = *counter.get(&i).unwrap_or(&0) + 1;
|
||||
counter.insert(i, new_cnt);
|
||||
if new_cnt > max_cnt {
|
||||
max_cnt = new_cnt;
|
||||
}
|
||||
}
|
||||
|
||||
let mut result = vec![];
|
||||
let result_cols = vec![
|
||||
value_column_name.to_string(),
|
||||
"count".to_string(),
|
||||
"quantile".to_string(),
|
||||
"percentage".to_string(),
|
||||
freq_column.to_string(),
|
||||
];
|
||||
const MAX_FREQ_COUNT: f64 = 100.0;
|
||||
for (val, count) in counter.into_iter() {
|
||||
let quantile = match calc_method {
|
||||
PercentageCalcMethod::Normalize => (count as f64 / total_cnt as f64),
|
||||
PercentageCalcMethod::Relative => (count as f64 / max_cnt as f64),
|
||||
};
|
||||
|
||||
let percentage = format!("{:.2}%", quantile * 100_f64);
|
||||
let freq = "*".repeat((MAX_FREQ_COUNT * quantile).floor() as usize);
|
||||
|
||||
result.push(Value::Record {
|
||||
cols: result_cols.clone(),
|
||||
vals: vec![
|
||||
val.into_value(),
|
||||
Value::Int { val: count, span },
|
||||
Value::Float {
|
||||
val: quantile,
|
||||
span,
|
||||
},
|
||||
Value::String {
|
||||
val: percentage,
|
||||
span,
|
||||
},
|
||||
Value::String { val: freq, span },
|
||||
],
|
||||
span,
|
||||
});
|
||||
}
|
||||
Value::List { vals: result, span }.into_pipeline_data()
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_examples() {
|
||||
use crate::test_examples;
|
||||
|
||||
test_examples(Histogram)
|
||||
}
|
||||
}
|
4
crates/nu-command/src/charting/mod.rs
Normal file
@ -0,0 +1,4 @@
|
||||
mod hashable_value;
|
||||
mod histogram;
|
||||
|
||||
pub use histogram::Histogram;
|
@ -21,6 +21,10 @@ impl Command for Fmt {
|
||||
Signature::build("fmt").category(Category::Conversions)
|
||||
}
|
||||
|
||||
fn search_terms(&self) -> Vec<&str> {
|
||||
vec!["display", "render", "format"]
|
||||
}
|
||||
|
||||
fn examples(&self) -> Vec<Example> {
|
||||
vec![Example {
|
||||
description: "Get a record containing multiple formats for the number 42",
|
||||
|
@ -28,6 +28,10 @@ impl Command for SubCommand {
|
||||
"Convert value to a binary primitive"
|
||||
}
|
||||
|
||||
fn search_terms(&self) -> Vec<&str> {
|
||||
vec!["convert", "binary", "bytes", "bin"]
|
||||
}
|
||||
|
||||
fn run(
|
||||
&self,
|
||||
engine_state: &EngineState,
|
||||
|
@ -27,6 +27,10 @@ impl Command for SubCommand {
|
||||
"Convert value to boolean"
|
||||
}
|
||||
|
||||
fn search_terms(&self) -> Vec<&str> {
|
||||
vec!["convert", "boolean", "true", "false", "1", "0"]
|
||||
}
|
||||
|
||||
fn run(
|
||||
&self,
|
||||
engine_state: &EngineState,
|
||||
@ -136,11 +140,14 @@ fn string_to_boolean(s: &str, span: Span) -> Result<bool, ShellError> {
|
||||
let val = o.parse::<f64>();
|
||||
match val {
|
||||
Ok(f) => Ok(f.abs() >= f64::EPSILON),
|
||||
Err(_) => Err(ShellError::CantConvertWithHelp(
|
||||
Err(_) => Err(ShellError::CantConvert(
|
||||
"boolean".to_string(),
|
||||
"string".to_string(),
|
||||
span,
|
||||
r#"the strings "true" and "false" can be converted into a bool"#.to_string(),
|
||||
Some(
|
||||
r#"the strings "true" and "false" can be converted into a bool"#
|
||||
.to_string(),
|
||||
),
|
||||
)),
|
||||
}
|
||||
}
|
||||
|
@ -24,13 +24,13 @@ enum Zone {
|
||||
Local,
|
||||
East(u8),
|
||||
West(u8),
|
||||
Error, // we want the nullshell to cast it instead of rust
|
||||
Error, // we want Nushell to cast it instead of Rust
|
||||
}
|
||||
|
||||
impl Zone {
|
||||
fn new(i: i64) -> Self {
|
||||
if i.abs() <= 12 {
|
||||
// guanranteed here
|
||||
// guaranteed here
|
||||
if i >= 0 {
|
||||
Self::East(i as u8) // won't go out of range
|
||||
} else {
|
||||
@ -59,29 +59,29 @@ impl Command for SubCommand {
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("into datetime")
|
||||
.switch(
|
||||
"list",
|
||||
"lists strftime cheatsheet",
|
||||
Some('l'),
|
||||
)
|
||||
.named(
|
||||
"timezone",
|
||||
SyntaxShape::String,
|
||||
"Specify timezone if the input is timestamp, like 'UTC/u' or 'LOCAL/l'",
|
||||
"Specify timezone if the input is a Unix timestamp. Valid options: 'UTC' ('u') or 'LOCAL' ('l')",
|
||||
Some('z'),
|
||||
)
|
||||
.named(
|
||||
"offset",
|
||||
SyntaxShape::Int,
|
||||
"Specify timezone by offset if the input is timestamp, like '+8', '-4', prior than timezone",
|
||||
"Specify timezone by offset from UTC if the input is a Unix timestamp, like '+8', '-4'",
|
||||
Some('o'),
|
||||
)
|
||||
.named(
|
||||
"format",
|
||||
SyntaxShape::String,
|
||||
"Specify date and time formatting",
|
||||
"Specify an expected format for parsing strings to datetimes. Use --list to see all possible options",
|
||||
Some('f'),
|
||||
)
|
||||
.switch(
|
||||
"list",
|
||||
"Show all possible variables for use with the --format flag",
|
||||
Some('l'),
|
||||
)
|
||||
.rest(
|
||||
"rest",
|
||||
SyntaxShape::CellPath,
|
||||
@ -104,32 +104,48 @@ impl Command for SubCommand {
|
||||
"Convert text into a datetime"
|
||||
}
|
||||
|
||||
fn search_terms(&self) -> Vec<&str> {
|
||||
vec!["convert", "date", "time", "timezone", "UTC"]
|
||||
}
|
||||
|
||||
fn examples(&self) -> Vec<Example> {
|
||||
vec![
|
||||
Example {
|
||||
description: "Convert to datetime",
|
||||
example: "'16.11.1984 8:00 am +0000' | into datetime",
|
||||
result: None,
|
||||
example: "'27.02.2021 1:55 pm +0000' | into datetime",
|
||||
result: Some(Value::Date {
|
||||
val: Utc.timestamp(1614434100, 0).into(),
|
||||
span: Span::test_data(),
|
||||
}),
|
||||
},
|
||||
Example {
|
||||
description: "Convert to datetime",
|
||||
example: "'2020-08-04T16:39:18+00:00' | into datetime",
|
||||
result: None,
|
||||
example: "'2021-02-27T13:55:40+00:00' | into datetime",
|
||||
result: Some(Value::Date {
|
||||
val: Utc.timestamp(1614434140, 0).into(),
|
||||
span: Span::test_data(),
|
||||
}),
|
||||
},
|
||||
Example {
|
||||
description: "Convert to datetime using a custom format",
|
||||
example: "'20200904_163918+0000' | into datetime -f '%Y%m%d_%H%M%S%z'",
|
||||
result: None,
|
||||
example: "'20210227_135540+0000' | into datetime -f '%Y%m%d_%H%M%S%z'",
|
||||
result: Some(Value::Date {
|
||||
val: Utc.timestamp(1614434140, 0).into(),
|
||||
span: Span::test_data(),
|
||||
}),
|
||||
},
|
||||
Example {
|
||||
description: "Convert timestamp (no larger than 8e+12) to datetime using a specified timezone",
|
||||
example: "'1614434140' | into datetime -z 'UTC'",
|
||||
result: None,
|
||||
description: "Convert timestamp (no larger than 8e+12) to a UTC datetime",
|
||||
example: "1614434140 | into datetime",
|
||||
result: Some(Value::Date {
|
||||
val: Utc.timestamp(1614434140, 0).into(),
|
||||
span: Span::test_data(),
|
||||
}),
|
||||
},
|
||||
Example {
|
||||
description:
|
||||
"Convert timestamp (no larger than 8e+12) to datetime using a specified timezone offset (between -12 and 12)",
|
||||
example: "'1614434140' | into datetime -o +9",
|
||||
example: "1614434140 | into datetime -o +9",
|
||||
result: None,
|
||||
},
|
||||
]
|
||||
@ -205,66 +221,86 @@ fn action(
|
||||
dateformat: &Option<DatetimeFormat>,
|
||||
head: Span,
|
||||
) -> Value {
|
||||
match input {
|
||||
Value::String { val: s, span } => {
|
||||
let ts = s.parse::<i64>();
|
||||
// if timezone if specified, first check if the input is a timestamp.
|
||||
if let Some(tz) = timezone {
|
||||
const TIMESTAMP_BOUND: i64 = 8.2e+12 as i64;
|
||||
// Since the timestamp method of chrono itself don't throw an error (it just panicked)
|
||||
// We have to manually guard it.
|
||||
if let Ok(t) = ts {
|
||||
if t.abs() > TIMESTAMP_BOUND {
|
||||
return Value::Error{error: ShellError::UnsupportedInput(
|
||||
"Given timestamp is out of range, it should between -8e+12 and 8e+12".to_string(),
|
||||
head,
|
||||
)};
|
||||
}
|
||||
const HOUR: i32 = 3600;
|
||||
let stampout = match tz.item {
|
||||
Zone::Utc => Value::Date {
|
||||
val: Utc.timestamp(t, 0).into(),
|
||||
span: head,
|
||||
},
|
||||
Zone::Local => Value::Date {
|
||||
val: Local.timestamp(t, 0).into(),
|
||||
span: head,
|
||||
},
|
||||
Zone::East(i) => {
|
||||
let eastoffset = FixedOffset::east((i as i32) * HOUR);
|
||||
Value::Date {
|
||||
val: eastoffset.timestamp(t, 0),
|
||||
span: head,
|
||||
}
|
||||
}
|
||||
Zone::West(i) => {
|
||||
let westoffset = FixedOffset::west((i as i32) * HOUR);
|
||||
Value::Date {
|
||||
val: westoffset.timestamp(t, 0),
|
||||
span: head,
|
||||
}
|
||||
}
|
||||
Zone::Error => Value::Error {
|
||||
error: ShellError::UnsupportedInput(
|
||||
"Cannot convert given timezone or offset to timestamp".to_string(),
|
||||
tz.span,
|
||||
),
|
||||
},
|
||||
};
|
||||
return stampout;
|
||||
}
|
||||
// Check to see if input looks like a Unix timestamp (i.e. can it be parsed to an int?)
|
||||
let timestamp = match input {
|
||||
Value::Int { val, .. } => Ok(*val),
|
||||
Value::String { val, .. } => val.parse::<i64>(),
|
||||
other => {
|
||||
return Value::Error {
|
||||
error: ShellError::UnsupportedInput(
|
||||
format!("Expected string or int, got {} instead", other.get_type()),
|
||||
head,
|
||||
),
|
||||
};
|
||||
// if it's not, continue and default to the system's local timezone.
|
||||
let out = match dateformat {
|
||||
Some(dt) => match DateTime::parse_from_str(s, &dt.0) {
|
||||
}
|
||||
};
|
||||
|
||||
if let Ok(ts) = timestamp {
|
||||
const TIMESTAMP_BOUND: i64 = 8.2e+12 as i64;
|
||||
const HOUR: i32 = 3600;
|
||||
|
||||
if ts.abs() > TIMESTAMP_BOUND {
|
||||
return Value::Error {
|
||||
error: ShellError::UnsupportedInput(
|
||||
"Given timestamp is out of range, it should between -8e+12 and 8e+12"
|
||||
.to_string(),
|
||||
head,
|
||||
),
|
||||
};
|
||||
}
|
||||
|
||||
return match timezone {
|
||||
// default to UTC
|
||||
None => Value::Date {
|
||||
val: Utc.timestamp(ts, 0).into(),
|
||||
span: head,
|
||||
},
|
||||
Some(Spanned { item, span }) => match item {
|
||||
Zone::Utc => Value::Date {
|
||||
val: Utc.timestamp(ts, 0).into(),
|
||||
span: head,
|
||||
},
|
||||
Zone::Local => Value::Date {
|
||||
val: Local.timestamp(ts, 0).into(),
|
||||
span: head,
|
||||
},
|
||||
Zone::East(i) => {
|
||||
let eastoffset = FixedOffset::east((*i as i32) * HOUR);
|
||||
Value::Date {
|
||||
val: eastoffset.timestamp(ts, 0),
|
||||
span: head,
|
||||
}
|
||||
}
|
||||
Zone::West(i) => {
|
||||
let westoffset = FixedOffset::west((*i as i32) * HOUR);
|
||||
Value::Date {
|
||||
val: westoffset.timestamp(ts, 0),
|
||||
span: head,
|
||||
}
|
||||
}
|
||||
Zone::Error => Value::Error {
|
||||
error: ShellError::UnsupportedInput(
|
||||
"Cannot convert given timezone or offset to timestamp".to_string(),
|
||||
*span,
|
||||
),
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
// If input is not a timestamp, try parsing it as a string
|
||||
match input {
|
||||
Value::String { val, span } => {
|
||||
match dateformat {
|
||||
Some(dt) => match DateTime::parse_from_str(val, &dt.0) {
|
||||
Ok(d) => Value::Date { val: d, span: head },
|
||||
Err(reason) => {
|
||||
return Value::Error {
|
||||
error: ShellError::CantConvertWithHelp(
|
||||
error: ShellError::CantConvert(
|
||||
format!("could not parse as datetime using format '{}'", dt.0),
|
||||
reason.to_string(),
|
||||
head,
|
||||
"you can use `into datetime` without a format string to enable flexible parsing".to_string()
|
||||
Some("you can use `into datetime` without a format string to enable flexible parsing".to_string())
|
||||
),
|
||||
}
|
||||
}
|
||||
@ -272,23 +308,21 @@ fn action(
|
||||
// Tries to automatically parse the date
|
||||
// (i.e. without a format string)
|
||||
// and assumes the system's local timezone if none is specified
|
||||
None => match parse_date_from_string(s, *span) {
|
||||
None => match parse_date_from_string(val, *span) {
|
||||
Ok(date) => Value::Date {
|
||||
val: date,
|
||||
span: *span,
|
||||
},
|
||||
Err(err) => err,
|
||||
},
|
||||
};
|
||||
|
||||
out
|
||||
}
|
||||
other => {
|
||||
let got = format!("Expected string, got {} instead", other.get_type());
|
||||
Value::Error {
|
||||
error: ShellError::UnsupportedInput(got, head),
|
||||
}
|
||||
}
|
||||
other => Value::Error {
|
||||
error: ShellError::UnsupportedInput(
|
||||
format!("Expected string, got {} instead", other.get_type()),
|
||||
head,
|
||||
),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
@ -347,6 +381,23 @@ mod tests {
|
||||
assert_eq!(actual, expected)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn takes_timestamp_offset_as_int() {
|
||||
let date_int = Value::test_int(1614434140);
|
||||
let timezone_option = Some(Spanned {
|
||||
item: Zone::East(8),
|
||||
span: Span::test_data(),
|
||||
});
|
||||
let actual = action(&date_int, &timezone_option, &None, Span::test_data());
|
||||
let expected = Value::Date {
|
||||
val: DateTime::parse_from_str("2021-02-27 21:55:40 +08:00", "%Y-%m-%d %H:%M:%S %z")
|
||||
.unwrap(),
|
||||
span: Span::test_data(),
|
||||
};
|
||||
|
||||
assert_eq!(actual, expected)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn takes_timestamp() {
|
||||
let date_str = Value::test_string("1614434140");
|
||||
@ -363,6 +414,20 @@ mod tests {
|
||||
assert_eq!(actual, expected)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn takes_timestamp_without_timezone() {
|
||||
let date_str = Value::test_string("1614434140");
|
||||
let timezone_option = None;
|
||||
let actual = action(&date_str, &timezone_option, &None, Span::test_data());
|
||||
|
||||
let expected = Value::Date {
|
||||
val: Utc.timestamp(1614434140, 0).into(),
|
||||
span: Span::test_data(),
|
||||
};
|
||||
|
||||
assert_eq!(actual, expected)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn takes_invalid_timestamp() {
|
||||
let date_str = Value::test_string("10440970000000");
|
||||
|
@ -25,6 +25,10 @@ impl Command for SubCommand {
|
||||
"Convert text into a decimal"
|
||||
}
|
||||
|
||||
fn search_terms(&self) -> Vec<&str> {
|
||||
vec!["convert", "number", "floating"]
|
||||
}
|
||||
|
||||
fn run(
|
||||
&self,
|
||||
engine_state: &EngineState,
|
||||
@ -101,7 +105,12 @@ fn action(input: &Value, head: Span) -> Value {
|
||||
match other.parse::<f64>() {
|
||||
Ok(x) => Value::Float { val: x, span: head },
|
||||
Err(reason) => Value::Error {
|
||||
error: ShellError::CantConvert("float".to_string(), reason.to_string(), *span),
|
||||
error: ShellError::CantConvert(
|
||||
"float".to_string(),
|
||||
reason.to_string(),
|
||||
*span,
|
||||
None,
|
||||
),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
@ -28,6 +28,10 @@ impl Command for SubCommand {
|
||||
"Convert value to duration"
|
||||
}
|
||||
|
||||
fn search_terms(&self) -> Vec<&str> {
|
||||
vec!["convert", "time", "period"]
|
||||
}
|
||||
|
||||
fn run(
|
||||
&self,
|
||||
engine_state: &EngineState,
|
||||
@ -151,11 +155,11 @@ fn string_to_duration(s: &str, span: Span) -> Result<i64, ShellError> {
|
||||
}
|
||||
}
|
||||
|
||||
Err(ShellError::CantConvertWithHelp(
|
||||
Err(ShellError::CantConvert(
|
||||
"duration".to_string(),
|
||||
"string".to_string(),
|
||||
span,
|
||||
"supported units are ns, us, ms, sec, min, hr, day, and wk".to_string(),
|
||||
Some("supported units are ns, us, ms, sec, min, hr, day, and wk".to_string()),
|
||||
))
|
||||
}
|
||||
|
||||
|
@ -27,6 +27,10 @@ impl Command for SubCommand {
|
||||
"Convert value to filesize"
|
||||
}
|
||||
|
||||
fn search_terms(&self) -> Vec<&str> {
|
||||
vec!["convert", "number", "size", "bytes"]
|
||||
}
|
||||
|
||||
fn run(
|
||||
&self,
|
||||
engine_state: &EngineState,
|
||||
@ -129,6 +133,10 @@ pub fn action(input: &Value, span: Span) -> Value {
|
||||
},
|
||||
Err(error) => Value::Error { error },
|
||||
},
|
||||
Value::Nothing { .. } => Value::Filesize {
|
||||
val: 0,
|
||||
span: value_span,
|
||||
},
|
||||
_ => Value::Error {
|
||||
error: ShellError::UnsupportedInput(
|
||||
"'into filesize' for unsupported type".into(),
|
||||
@ -148,7 +156,12 @@ pub fn action(input: &Value, span: Span) -> Value {
|
||||
fn int_from_string(a_string: &str, span: Span) -> Result<i64, ShellError> {
|
||||
match a_string.trim().parse::<bytesize::ByteSize>() {
|
||||
Ok(n) => Ok(n.0 as i64),
|
||||
Err(_) => Err(ShellError::CantConvert("int".into(), "string".into(), span)),
|
||||
Err(_) => Err(ShellError::CantConvert(
|
||||
"int".into(),
|
||||
"string".into(),
|
||||
span,
|
||||
None,
|
||||
)),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -33,6 +33,10 @@ impl Command for SubCommand {
|
||||
"Convert value to integer"
|
||||
}
|
||||
|
||||
fn search_terms(&self) -> Vec<&str> {
|
||||
vec!["convert", "number", "natural"]
|
||||
}
|
||||
|
||||
fn run(
|
||||
&self,
|
||||
engine_state: &EngineState,
|
||||
@ -81,6 +85,11 @@ impl Command for SubCommand {
|
||||
span: Span::test_data(),
|
||||
}),
|
||||
},
|
||||
Example {
|
||||
description: "Convert date to integer (Unix timestamp)",
|
||||
example: "2022-02-02 | into int",
|
||||
result: Some(Value::test_int(1643760000)),
|
||||
},
|
||||
Example {
|
||||
description: "Convert to integer from binary",
|
||||
example: "'1101' | into int -r 2",
|
||||
@ -177,6 +186,10 @@ pub fn action(input: &Value, span: Span, radix: u32) -> Value {
|
||||
Value::Int { val: 0, span }
|
||||
}
|
||||
}
|
||||
Value::Date { val, .. } => Value::Int {
|
||||
val: val.timestamp(),
|
||||
span,
|
||||
},
|
||||
_ => Value::Error {
|
||||
error: ShellError::UnsupportedInput("'into int' for unsupported type".into(), span),
|
||||
},
|
||||
@ -207,7 +220,7 @@ fn convert_int(input: &Value, head: Span, radix: u32) -> Value {
|
||||
match i64::from_str_radix(&i, radix) {
|
||||
Ok(n) => Value::Int { val: n, span: head },
|
||||
Err(_reason) => Value::Error {
|
||||
error: ShellError::CantConvert("int".to_string(), "string".to_string(), head),
|
||||
error: ShellError::CantConvert("int".to_string(), "string".to_string(), head, None),
|
||||
},
|
||||
}
|
||||
}
|
||||
@ -219,29 +232,30 @@ fn int_from_string(a_string: &str, span: Span) -> Result<i64, ShellError> {
|
||||
let num = match i64::from_str_radix(b.trim_start_matches("0b"), 2) {
|
||||
Ok(n) => n,
|
||||
Err(_reason) => {
|
||||
return Err(ShellError::CantConvertWithHelp(
|
||||
return Err(ShellError::CantConvert(
|
||||
"int".to_string(),
|
||||
"string".to_string(),
|
||||
span,
|
||||
r#"digits following "0b" can only be 0 or 1"#.to_string(),
|
||||
Some(r#"digits following "0b" can only be 0 or 1"#.to_string()),
|
||||
))
|
||||
}
|
||||
};
|
||||
Ok(num)
|
||||
}
|
||||
h if h.starts_with("0x") => {
|
||||
let num = match i64::from_str_radix(h.trim_start_matches("0x"), 16) {
|
||||
Ok(n) => n,
|
||||
Err(_reason) => {
|
||||
return Err(ShellError::CantConvertWithHelp(
|
||||
let num =
|
||||
match i64::from_str_radix(h.trim_start_matches("0x"), 16) {
|
||||
Ok(n) => n,
|
||||
Err(_reason) => return Err(ShellError::CantConvert(
|
||||
"int".to_string(),
|
||||
"string".to_string(),
|
||||
span,
|
||||
r#"hexadecimal digits following "0x" should be in 0-9, a-f, or A-F"#
|
||||
.to_string(),
|
||||
))
|
||||
}
|
||||
};
|
||||
Some(
|
||||
r#"hexadecimal digits following "0x" should be in 0-9, a-f, or A-F"#
|
||||
.to_string(),
|
||||
),
|
||||
)),
|
||||
};
|
||||
Ok(num)
|
||||
}
|
||||
_ => match a_string.parse::<i64>() {
|
||||
@ -252,6 +266,7 @@ fn int_from_string(a_string: &str, span: Span) -> Result<i64, ShellError> {
|
||||
"int".to_string(),
|
||||
"string".to_string(),
|
||||
span,
|
||||
None,
|
||||
)),
|
||||
},
|
||||
},
|
||||
|
@ -37,6 +37,10 @@ impl Command for SubCommand {
|
||||
"Convert value to string"
|
||||
}
|
||||
|
||||
fn search_terms(&self) -> Vec<&str> {
|
||||
vec!["convert", "str", "text"]
|
||||
}
|
||||
|
||||
fn run(
|
||||
&self,
|
||||
engine_state: &EngineState,
|
||||
@ -138,7 +142,7 @@ fn string_helper(
|
||||
let head = call.head;
|
||||
let decimals_value: Option<i64> = call.get_flag(engine_state, stack, "decimals")?;
|
||||
let column_paths: Vec<CellPath> = call.rest(engine_state, stack, 0)?;
|
||||
let config = stack.get_config().unwrap_or_default();
|
||||
let config = engine_state.get_config().clone();
|
||||
|
||||
if let Some(decimal_val) = decimals_value {
|
||||
if decimals && decimal_val.is_negative() {
|
||||
@ -244,7 +248,7 @@ pub fn action(
|
||||
span,
|
||||
},
|
||||
Value::Nothing { .. } => Value::String {
|
||||
val: "nothing".to_string(),
|
||||
val: "".to_string(),
|
||||
span,
|
||||
},
|
||||
Value::Record {
|
||||
@ -258,15 +262,20 @@ pub fn action(
|
||||
),
|
||||
},
|
||||
Value::Binary { .. } => Value::Error {
|
||||
error: ShellError::CantConvertWithHelp(
|
||||
error: ShellError::CantConvert(
|
||||
"string".into(),
|
||||
"binary".into(),
|
||||
span,
|
||||
"try using the `decode` command".into(),
|
||||
Some("try using the `decode` command".into()),
|
||||
),
|
||||
},
|
||||
x => Value::Error {
|
||||
error: ShellError::CantConvert(String::from("string"), x.get_type().to_string(), span),
|
||||
error: ShellError::CantConvert(
|
||||
String::from("string"),
|
||||
x.get_type().to_string(),
|
||||
span,
|
||||
None,
|
||||
),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
@ -26,8 +26,8 @@ impl Command for Alias {
|
||||
}
|
||||
|
||||
fn extra_usage(&self) -> &str {
|
||||
r#"This command is a parser keyword. For details, check
|
||||
https://www.nushell.sh/book/thinking_in_nushell.html#parsing-and-evaluation-are-different-stages"#
|
||||
r#"This command is a parser keyword. For details, check:
|
||||
https://www.nushell.sh/book/thinking_in_nushell.html"#
|
||||
}
|
||||
|
||||
fn is_parser_keyword(&self) -> bool {
|
||||
|
@ -25,12 +25,12 @@ impl Command for Debug {
|
||||
fn run(
|
||||
&self,
|
||||
engine_state: &EngineState,
|
||||
stack: &mut Stack,
|
||||
_stack: &mut Stack,
|
||||
call: &Call,
|
||||
input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let head = call.head;
|
||||
let config = stack.get_config().unwrap_or_default();
|
||||
let config = engine_state.get_config().clone();
|
||||
let raw = call.has_flag("raw");
|
||||
|
||||
input.map(
|
||||
|
@ -27,8 +27,8 @@ impl Command for Def {
|
||||
}
|
||||
|
||||
fn extra_usage(&self) -> &str {
|
||||
r#"This command is a parser keyword. For details, check
|
||||
https://www.nushell.sh/book/thinking_in_nushell.html#parsing-and-evaluation-are-different-stages"#
|
||||
r#"This command is a parser keyword. For details, check:
|
||||
https://www.nushell.sh/book/thinking_in_nushell.html"#
|
||||
}
|
||||
|
||||
fn is_parser_keyword(&self) -> bool {
|
||||
|
@ -27,8 +27,8 @@ impl Command for DefEnv {
|
||||
}
|
||||
|
||||
fn extra_usage(&self) -> &str {
|
||||
r#"This command is a parser keyword. For details, check
|
||||
https://www.nushell.sh/book/thinking_in_nushell.html#parsing-and-evaluation-are-different-stages"#
|
||||
r#"This command is a parser keyword. For details, check:
|
||||
https://www.nushell.sh/book/thinking_in_nushell.html"#
|
||||
}
|
||||
|
||||
fn is_parser_keyword(&self) -> bool {
|
||||
|
@ -13,7 +13,7 @@ impl Command for Describe {
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
"Describe the value(s) piped in."
|
||||
"Describe the type and structure of the value(s) piped in."
|
||||
}
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
@ -35,8 +35,13 @@ impl Command for Describe {
|
||||
))
|
||||
} else {
|
||||
let value = input.into_value(call.head);
|
||||
let description = match value {
|
||||
Value::CustomValue { val, .. } => val.value_string(),
|
||||
_ => value.get_type().to_string(),
|
||||
};
|
||||
|
||||
Ok(Value::String {
|
||||
val: value.get_type().to_string(),
|
||||
val: description,
|
||||
span: head,
|
||||
}
|
||||
.into_pipeline_data())
|
||||
@ -50,6 +55,10 @@ impl Command for Describe {
|
||||
result: Some(Value::test_string("string")),
|
||||
}]
|
||||
}
|
||||
|
||||
fn search_terms(&self) -> Vec<&str> {
|
||||
vec!["type", "typeof", "info", "structure"]
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
|
@ -24,6 +24,10 @@ impl Command for ErrorMake {
|
||||
"Create an error."
|
||||
}
|
||||
|
||||
fn search_terms(&self) -> Vec<&str> {
|
||||
vec!["err", "panic", "crash", "throw"]
|
||||
}
|
||||
|
||||
fn run(
|
||||
&self,
|
||||
engine_state: &EngineState,
|
||||
@ -36,26 +40,30 @@ impl Command for ErrorMake {
|
||||
let arg: Option<Value> = call.opt(engine_state, stack, 0)?;
|
||||
|
||||
if let Some(arg) = arg {
|
||||
Ok(make_error(&arg)
|
||||
Ok(make_error(&arg, span)
|
||||
.map(|err| Value::Error { error: err })
|
||||
.unwrap_or_else(|| Value::Error {
|
||||
error: ShellError::SpannedLabeledError(
|
||||
error: ShellError::GenericError(
|
||||
"Creating error value not supported.".into(),
|
||||
"unsupported error format".into(),
|
||||
span,
|
||||
Some(span),
|
||||
None,
|
||||
Vec::new(),
|
||||
),
|
||||
})
|
||||
.into_pipeline_data())
|
||||
} else {
|
||||
input.map(
|
||||
move |value| {
|
||||
make_error(&value)
|
||||
make_error(&value, span)
|
||||
.map(|err| Value::Error { error: err })
|
||||
.unwrap_or_else(|| Value::Error {
|
||||
error: ShellError::SpannedLabeledError(
|
||||
error: ShellError::GenericError(
|
||||
"Creating error value not supported.".into(),
|
||||
"unsupported error format".into(),
|
||||
span,
|
||||
Some(span),
|
||||
None,
|
||||
Vec::new(),
|
||||
),
|
||||
})
|
||||
},
|
||||
@ -85,7 +93,7 @@ impl Command for ErrorMake {
|
||||
}
|
||||
}
|
||||
|
||||
fn make_error(value: &Value) -> Option<ShellError> {
|
||||
fn make_error(value: &Value, throw_span: Span) -> Option<ShellError> {
|
||||
if let Value::Record { .. } = &value {
|
||||
let msg = value.get_data_by_key("msg");
|
||||
let label = value.get_data_by_key("label");
|
||||
@ -103,20 +111,39 @@ fn make_error(value: &Value) -> Option<ShellError> {
|
||||
Some(Value::String {
|
||||
val: label_text, ..
|
||||
}),
|
||||
) => Some(ShellError::SpannedLabeledError(
|
||||
) => Some(ShellError::GenericError(
|
||||
message,
|
||||
label_text,
|
||||
Span {
|
||||
Some(Span {
|
||||
start: start as usize,
|
||||
end: end as usize,
|
||||
},
|
||||
}),
|
||||
None,
|
||||
Vec::new(),
|
||||
)),
|
||||
(
|
||||
None,
|
||||
None,
|
||||
Some(Value::String {
|
||||
val: label_text, ..
|
||||
}),
|
||||
) => Some(ShellError::GenericError(
|
||||
message,
|
||||
label_text,
|
||||
Some(throw_span),
|
||||
None,
|
||||
Vec::new(),
|
||||
)),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
(Some(Value::String { val: message, .. }), None) => {
|
||||
Some(ShellError::UnlabeledError(message))
|
||||
}
|
||||
(Some(Value::String { val: message, .. }), None) => Some(ShellError::GenericError(
|
||||
message,
|
||||
"originates from here".to_string(),
|
||||
Some(throw_span),
|
||||
None,
|
||||
Vec::new(),
|
||||
)),
|
||||
_ => None,
|
||||
}
|
||||
} else {
|
||||
|
@ -22,8 +22,8 @@ impl Command for ExportCommand {
|
||||
}
|
||||
|
||||
fn extra_usage(&self) -> &str {
|
||||
r#"This command is a parser keyword. For details, check
|
||||
https://www.nushell.sh/book/thinking_in_nushell.html#parsing-and-evaluation-are-different-stages"#
|
||||
r#"This command is a parser keyword. For details, check:
|
||||
https://www.nushell.sh/book/thinking_in_nushell.html"#
|
||||
}
|
||||
|
||||
fn is_parser_keyword(&self) -> bool {
|
||||
|
@ -26,8 +26,8 @@ impl Command for ExportAlias {
|
||||
}
|
||||
|
||||
fn extra_usage(&self) -> &str {
|
||||
r#"This command is a parser keyword. For details, check
|
||||
https://www.nushell.sh/book/thinking_in_nushell.html#parsing-and-evaluation-are-different-stages"#
|
||||
r#"This command is a parser keyword. For details, check:
|
||||
https://www.nushell.sh/book/thinking_in_nushell.html"#
|
||||
}
|
||||
|
||||
fn is_parser_keyword(&self) -> bool {
|
||||
|
@ -27,8 +27,8 @@ impl Command for ExportDef {
|
||||
}
|
||||
|
||||
fn extra_usage(&self) -> &str {
|
||||
r#"This command is a parser keyword. For details, check
|
||||
https://www.nushell.sh/book/thinking_in_nushell.html#parsing-and-evaluation-are-different-stages"#
|
||||
r#"This command is a parser keyword. For details, check:
|
||||
https://www.nushell.sh/book/thinking_in_nushell.html"#
|
||||
}
|
||||
|
||||
fn is_parser_keyword(&self) -> bool {
|
||||
|
@ -27,8 +27,8 @@ impl Command for ExportDefEnv {
|
||||
}
|
||||
|
||||
fn extra_usage(&self) -> &str {
|
||||
r#"This command is a parser keyword. For details, check
|
||||
https://www.nushell.sh/book/thinking_in_nushell.html#parsing-and-evaluation-are-different-stages"#
|
||||
r#"This command is a parser keyword. For details, check:
|
||||
https://www.nushell.sh/book/thinking_in_nushell.html"#
|
||||
}
|
||||
|
||||
fn is_parser_keyword(&self) -> bool {
|
||||
|
@ -30,8 +30,8 @@ impl Command for ExportEnv {
|
||||
}
|
||||
|
||||
fn extra_usage(&self) -> &str {
|
||||
r#"This command is a parser keyword. For details, check
|
||||
https://www.nushell.sh/book/thinking_in_nushell.html#parsing-and-evaluation-are-different-stages"#
|
||||
r#"This command is a parser keyword. For details, check:
|
||||
https://www.nushell.sh/book/thinking_in_nushell.html"#
|
||||
}
|
||||
|
||||
fn is_parser_keyword(&self) -> bool {
|
||||
|
@ -22,8 +22,8 @@ impl Command for ExportExtern {
|
||||
}
|
||||
|
||||
fn extra_usage(&self) -> &str {
|
||||
r#"This command is a parser keyword. For details, check
|
||||
https://www.nushell.sh/book/thinking_in_nushell.html#parsing-and-evaluation-are-different-stages"#
|
||||
r#"This command is a parser keyword. For details, check:
|
||||
https://www.nushell.sh/book/thinking_in_nushell.html"#
|
||||
}
|
||||
|
||||
fn is_parser_keyword(&self) -> bool {
|
||||
|
@ -22,8 +22,8 @@ impl Command for Extern {
|
||||
}
|
||||
|
||||
fn extra_usage(&self) -> &str {
|
||||
r#"This command is a parser keyword. For details, check
|
||||
https://www.nushell.sh/book/thinking_in_nushell.html#parsing-and-evaluation-are-different-stages"#
|
||||
r#"This command is a parser keyword. For details, check:
|
||||
https://www.nushell.sh/book/thinking_in_nushell.html"#
|
||||
}
|
||||
|
||||
fn is_parser_keyword(&self) -> bool {
|
||||
|
@ -2,8 +2,8 @@ use nu_engine::{eval_block, eval_expression, CallExt};
|
||||
use nu_protocol::ast::Call;
|
||||
use nu_protocol::engine::{CaptureBlock, Command, EngineState, Stack};
|
||||
use nu_protocol::{
|
||||
Category, Example, IntoInterruptiblePipelineData, PipelineData, Signature, Span, SyntaxShape,
|
||||
Value,
|
||||
Category, Example, IntoInterruptiblePipelineData, ListStream, PipelineData, Signature, Span,
|
||||
SyntaxShape, Value,
|
||||
};
|
||||
|
||||
#[derive(Clone)]
|
||||
@ -45,8 +45,8 @@ impl Command for For {
|
||||
}
|
||||
|
||||
fn extra_usage(&self) -> &str {
|
||||
r#"This command is a parser keyword. For details, check
|
||||
https://www.nushell.sh/book/thinking_in_nushell.html#parsing-and-evaluation-are-different-stages"#
|
||||
r#"This command is a parser keyword. For details, check:
|
||||
https://www.nushell.sh/book/thinking_in_nushell.html"#
|
||||
}
|
||||
|
||||
fn is_parser_keyword(&self) -> bool {
|
||||
@ -88,46 +88,47 @@ https://www.nushell.sh/book/thinking_in_nushell.html#parsing-and-evaluation-are-
|
||||
let redirect_stderr = call.redirect_stderr;
|
||||
|
||||
match values {
|
||||
Value::List { vals, .. } => Ok(vals
|
||||
.into_iter()
|
||||
.enumerate()
|
||||
.map(move |(idx, x)| {
|
||||
stack.with_env(&orig_env_vars, &orig_env_hidden);
|
||||
Value::List { vals, .. } => {
|
||||
Ok(ListStream::from_stream(vals.into_iter(), ctrlc.clone())
|
||||
.enumerate()
|
||||
.map(move |(idx, x)| {
|
||||
stack.with_env(&orig_env_vars, &orig_env_hidden);
|
||||
|
||||
stack.add_var(
|
||||
var_id,
|
||||
if numbered {
|
||||
Value::Record {
|
||||
cols: vec!["index".into(), "item".into()],
|
||||
vals: vec![
|
||||
Value::Int {
|
||||
val: idx as i64,
|
||||
span: head,
|
||||
},
|
||||
x,
|
||||
],
|
||||
span: head,
|
||||
}
|
||||
} else {
|
||||
x
|
||||
},
|
||||
);
|
||||
stack.add_var(
|
||||
var_id,
|
||||
if numbered {
|
||||
Value::Record {
|
||||
cols: vec!["index".into(), "item".into()],
|
||||
vals: vec![
|
||||
Value::Int {
|
||||
val: idx as i64,
|
||||
span: head,
|
||||
},
|
||||
x,
|
||||
],
|
||||
span: head,
|
||||
}
|
||||
} else {
|
||||
x
|
||||
},
|
||||
);
|
||||
|
||||
//let block = engine_state.get_block(block_id);
|
||||
match eval_block(
|
||||
&engine_state,
|
||||
&mut stack,
|
||||
&block,
|
||||
PipelineData::new(head),
|
||||
redirect_stdout,
|
||||
redirect_stderr,
|
||||
) {
|
||||
Ok(pipeline_data) => pipeline_data.into_value(head),
|
||||
Err(error) => Value::Error { error },
|
||||
}
|
||||
})
|
||||
.filter(|x| !x.is_nothing())
|
||||
.into_pipeline_data(ctrlc)),
|
||||
//let block = engine_state.get_block(block_id);
|
||||
match eval_block(
|
||||
&engine_state,
|
||||
&mut stack,
|
||||
&block,
|
||||
PipelineData::new(head),
|
||||
redirect_stdout,
|
||||
redirect_stderr,
|
||||
) {
|
||||
Ok(pipeline_data) => pipeline_data.into_value(head),
|
||||
Err(error) => Value::Error { error },
|
||||
}
|
||||
})
|
||||
.filter(|x| !x.is_nothing())
|
||||
.into_pipeline_data(ctrlc))
|
||||
}
|
||||
Value::Range { val, .. } => Ok(val
|
||||
.into_range_iter(ctrlc.clone())?
|
||||
.enumerate()
|
||||
|
@ -54,11 +54,6 @@ impl Command for Help {
|
||||
example: "help commands",
|
||||
result: None,
|
||||
},
|
||||
Example {
|
||||
description: "generate documentation",
|
||||
example: "help generate_docs",
|
||||
result: None,
|
||||
},
|
||||
Example {
|
||||
description: "show help for single command",
|
||||
example: "help match",
|
||||
|
@ -25,9 +25,8 @@ impl Command for Hide {
|
||||
fn extra_usage(&self) -> &str {
|
||||
r#"Symbols are hidden by priority: First aliases, then custom commands, then environment variables.
|
||||
|
||||
This command is a parser keyword. For details, check
|
||||
https://www.nushell.sh/book/thinking_in_nushell.html#parsing-and-evaluation-are-different-stages
|
||||
"#
|
||||
This command is a parser keyword. For details, check:
|
||||
https://www.nushell.sh/book/thinking_in_nushell.html"#
|
||||
}
|
||||
|
||||
fn is_parser_keyword(&self) -> bool {
|
||||
@ -48,10 +47,12 @@ https://www.nushell.sh/book/thinking_in_nushell.html#parsing-and-evaluation-are-
|
||||
{
|
||||
pat
|
||||
} else {
|
||||
return Err(ShellError::SpannedLabeledError(
|
||||
return Err(ShellError::GenericError(
|
||||
"Unexpected import".into(),
|
||||
"import pattern not supported".into(),
|
||||
call.head,
|
||||
Some(call.head),
|
||||
None,
|
||||
Vec::new(),
|
||||
));
|
||||
};
|
||||
|
||||
@ -61,23 +62,23 @@ https://www.nushell.sh/book/thinking_in_nushell.html#parsing-and-evaluation-are-
|
||||
return Err(ShellError::NonUtf8(import_pattern.head.span));
|
||||
};
|
||||
|
||||
if let Some(overlay_id) = engine_state.find_overlay(&import_pattern.head.name) {
|
||||
if let Some(module_id) = engine_state.find_module(&import_pattern.head.name, &[]) {
|
||||
// The first word is a module
|
||||
let overlay = engine_state.get_overlay(overlay_id);
|
||||
let module = engine_state.get_module(module_id);
|
||||
|
||||
let env_vars_to_hide = if import_pattern.members.is_empty() {
|
||||
overlay.env_vars_with_head(&import_pattern.head.name)
|
||||
module.env_vars_with_head(&import_pattern.head.name)
|
||||
} else {
|
||||
match &import_pattern.members[0] {
|
||||
ImportPatternMember::Glob { .. } => overlay.env_vars(),
|
||||
ImportPatternMember::Glob { .. } => module.env_vars(),
|
||||
ImportPatternMember::Name { name, span } => {
|
||||
let mut output = vec![];
|
||||
|
||||
if let Some((name, id)) =
|
||||
overlay.env_var_with_head(name, &import_pattern.head.name)
|
||||
module.env_var_with_head(name, &import_pattern.head.name)
|
||||
{
|
||||
output.push((name, id));
|
||||
} else if !(overlay.has_alias(name) || overlay.has_decl(name)) {
|
||||
} else if !(module.has_alias(name) || module.has_decl(name)) {
|
||||
return Err(ShellError::EnvVarNotFoundAtRuntime(
|
||||
String::from_utf8_lossy(name).into(),
|
||||
*span,
|
||||
@ -91,10 +92,10 @@ https://www.nushell.sh/book/thinking_in_nushell.html#parsing-and-evaluation-are-
|
||||
|
||||
for (name, span) in names {
|
||||
if let Some((name, id)) =
|
||||
overlay.env_var_with_head(name, &import_pattern.head.name)
|
||||
module.env_var_with_head(name, &import_pattern.head.name)
|
||||
{
|
||||
output.push((name, id));
|
||||
} else if !(overlay.has_alias(name) || overlay.has_decl(name)) {
|
||||
} else if !(module.has_alias(name) || module.has_decl(name)) {
|
||||
return Err(ShellError::EnvVarNotFoundAtRuntime(
|
||||
String::from_utf8_lossy(name).into(),
|
||||
*span,
|
||||
|
@ -1,9 +1,8 @@
|
||||
use nu_engine::{eval_block, eval_expression, CallExt};
|
||||
use nu_engine::{eval_block, eval_expression, eval_expression_with_input, CallExt};
|
||||
use nu_protocol::ast::Call;
|
||||
use nu_protocol::engine::{CaptureBlock, Command, EngineState, Stack};
|
||||
use nu_protocol::{
|
||||
Category, Example, FromValue, IntoPipelineData, PipelineData, ShellError, Signature,
|
||||
SyntaxShape, Value,
|
||||
Category, Example, FromValue, PipelineData, ShellError, Signature, SyntaxShape, Value,
|
||||
};
|
||||
|
||||
#[derive(Clone)]
|
||||
@ -35,8 +34,8 @@ impl Command for If {
|
||||
}
|
||||
|
||||
fn extra_usage(&self) -> &str {
|
||||
r#"This command is a parser keyword. For details, check
|
||||
https://www.nushell.sh/book/thinking_in_nushell.html#parsing-and-evaluation-are-different-stages"#
|
||||
r#"This command is a parser keyword. For details, check:
|
||||
https://www.nushell.sh/book/thinking_in_nushell.html"#
|
||||
}
|
||||
|
||||
fn is_parser_keyword(&self) -> bool {
|
||||
@ -85,12 +84,24 @@ https://www.nushell.sh/book/thinking_in_nushell.html#parsing-and-evaluation-are-
|
||||
call.redirect_stderr,
|
||||
)
|
||||
} else {
|
||||
eval_expression(engine_state, stack, else_expr)
|
||||
.map(|x| x.into_pipeline_data())
|
||||
eval_expression_with_input(
|
||||
engine_state,
|
||||
stack,
|
||||
else_expr,
|
||||
input,
|
||||
call.redirect_stdout,
|
||||
call.redirect_stderr,
|
||||
)
|
||||
}
|
||||
} else {
|
||||
eval_expression(engine_state, stack, else_case)
|
||||
.map(|x| x.into_pipeline_data())
|
||||
eval_expression_with_input(
|
||||
engine_state,
|
||||
stack,
|
||||
else_case,
|
||||
input,
|
||||
call.redirect_stdout,
|
||||
call.redirect_stderr,
|
||||
)
|
||||
}
|
||||
} else {
|
||||
Ok(PipelineData::new(call.head))
|
||||
@ -100,6 +111,7 @@ https://www.nushell.sh/book/thinking_in_nushell.html#parsing-and-evaluation-are-
|
||||
"bool".into(),
|
||||
x.get_type().to_string(),
|
||||
result.span()?,
|
||||
None,
|
||||
)),
|
||||
}
|
||||
}
|
||||
|
@ -27,8 +27,8 @@ impl Command for Let {
|
||||
}
|
||||
|
||||
fn extra_usage(&self) -> &str {
|
||||
r#"This command is a parser keyword. For details, check
|
||||
https://www.nushell.sh/book/thinking_in_nushell.html#parsing-and-evaluation-are-different-stages"#
|
||||
r#"This command is a parser keyword. For details, check:
|
||||
https://www.nushell.sh/book/thinking_in_nushell.html"#
|
||||
}
|
||||
|
||||
fn is_parser_keyword(&self) -> bool {
|
||||
|
@ -22,6 +22,7 @@ mod ignore;
|
||||
mod let_;
|
||||
mod metadata;
|
||||
mod module;
|
||||
pub(crate) mod overlay;
|
||||
mod source;
|
||||
mod tutor;
|
||||
mod use_;
|
||||
@ -51,6 +52,7 @@ pub use ignore::Ignore;
|
||||
pub use let_::Let;
|
||||
pub use metadata::Metadata;
|
||||
pub use module::Module;
|
||||
pub use overlay::*;
|
||||
pub use source::Source;
|
||||
pub use tutor::Tutor;
|
||||
pub use use_::Use;
|
||||
|
@ -26,8 +26,8 @@ impl Command for Module {
|
||||
}
|
||||
|
||||
fn extra_usage(&self) -> &str {
|
||||
r#"This command is a parser keyword. For details, check
|
||||
https://www.nushell.sh/book/thinking_in_nushell.html#parsing-and-evaluation-are-different-stages"#
|
||||
r#"This command is a parser keyword. For details, check:
|
||||
https://www.nushell.sh/book/thinking_in_nushell.html"#
|
||||
}
|
||||
|
||||
fn is_parser_keyword(&self) -> bool {
|
||||
|
148
crates/nu-command/src/core_commands/overlay/add.rs
Normal file
@ -0,0 +1,148 @@
|
||||
use nu_engine::{eval_block, CallExt};
|
||||
use nu_protocol::ast::Call;
|
||||
use nu_protocol::engine::{Command, EngineState, Stack};
|
||||
use nu_protocol::{Category, Example, PipelineData, ShellError, Signature, Spanned, SyntaxShape};
|
||||
|
||||
use std::path::Path;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct OverlayAdd;
|
||||
|
||||
impl Command for OverlayAdd {
|
||||
fn name(&self) -> &str {
|
||||
"overlay add"
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
"Add definitions from a module as an overlay"
|
||||
}
|
||||
|
||||
fn signature(&self) -> nu_protocol::Signature {
|
||||
Signature::build("overlay add")
|
||||
.required(
|
||||
"name",
|
||||
SyntaxShape::String,
|
||||
"Module name to create overlay for",
|
||||
)
|
||||
// TODO:
|
||||
// .switch(
|
||||
// "prefix",
|
||||
// "Prepend module name to the imported symbols",
|
||||
// Some('p'),
|
||||
// )
|
||||
.category(Category::Core)
|
||||
}
|
||||
|
||||
fn extra_usage(&self) -> &str {
|
||||
r#"This command is a parser keyword. For details, check:
|
||||
https://www.nushell.sh/book/thinking_in_nushell.html"#
|
||||
}
|
||||
|
||||
fn is_parser_keyword(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn run(
|
||||
&self,
|
||||
engine_state: &EngineState,
|
||||
stack: &mut Stack,
|
||||
call: &Call,
|
||||
_input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let name_arg: Spanned<String> = call.req(engine_state, stack, 0)?;
|
||||
|
||||
let maybe_overlay_name = if engine_state
|
||||
.find_overlay(name_arg.item.as_bytes())
|
||||
.is_some()
|
||||
{
|
||||
Some(name_arg.item.clone())
|
||||
} else if let Some(os_str) = Path::new(&name_arg.item).file_stem() {
|
||||
os_str.to_str().map(|name| name.to_string())
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
if let Some(overlay_name) = maybe_overlay_name {
|
||||
if let Some(overlay_id) = engine_state.find_overlay(overlay_name.as_bytes()) {
|
||||
let old_module_id = engine_state.get_overlay(overlay_id).origin;
|
||||
|
||||
stack.add_overlay(overlay_name.clone());
|
||||
|
||||
if let Some(new_module_id) = engine_state.find_module(overlay_name.as_bytes(), &[])
|
||||
{
|
||||
if !stack.has_env_overlay(&overlay_name, engine_state)
|
||||
|| (old_module_id != new_module_id)
|
||||
{
|
||||
// Add environment variables only if:
|
||||
// a) adding a new overlay
|
||||
// b) refreshing an active overlay (the origin module changed)
|
||||
let module = engine_state.get_module(new_module_id);
|
||||
|
||||
for (name, block_id) in module.env_vars() {
|
||||
let name = if let Ok(s) = String::from_utf8(name.clone()) {
|
||||
s
|
||||
} else {
|
||||
return Err(ShellError::NonUtf8(call.head));
|
||||
};
|
||||
|
||||
let block = engine_state.get_block(block_id);
|
||||
|
||||
let val = eval_block(
|
||||
engine_state,
|
||||
stack,
|
||||
block,
|
||||
PipelineData::new(call.head),
|
||||
false,
|
||||
true,
|
||||
)?
|
||||
.into_value(call.head);
|
||||
|
||||
stack.add_env_var(name, val);
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
return Err(ShellError::OverlayNotFoundAtRuntime(
|
||||
name_arg.item,
|
||||
name_arg.span,
|
||||
));
|
||||
}
|
||||
} else {
|
||||
return Err(ShellError::OverlayNotFoundAtRuntime(
|
||||
name_arg.item,
|
||||
name_arg.span,
|
||||
));
|
||||
}
|
||||
|
||||
Ok(PipelineData::new(call.head))
|
||||
}
|
||||
|
||||
fn examples(&self) -> Vec<Example> {
|
||||
vec![
|
||||
Example {
|
||||
description: "Create an overlay from a module",
|
||||
example: r#"module spam { export def foo [] { "foo" } }
|
||||
overlay add spam"#,
|
||||
result: None,
|
||||
},
|
||||
Example {
|
||||
description: "Create an overlay from a file",
|
||||
example: r#"echo 'export env FOO { "foo" }' | save spam.nu
|
||||
overlay add spam.nu"#,
|
||||
result: None,
|
||||
},
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_examples() {
|
||||
use crate::test_examples;
|
||||
|
||||
test_examples(OverlayAdd {})
|
||||
}
|
||||
}
|
58
crates/nu-command/src/core_commands/overlay/command.rs
Normal file
@ -0,0 +1,58 @@
|
||||
use nu_engine::get_full_help;
|
||||
use nu_protocol::{
|
||||
ast::Call,
|
||||
engine::{Command, EngineState, Stack},
|
||||
Category, IntoPipelineData, PipelineData, Signature, Value,
|
||||
};
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct Overlay;
|
||||
|
||||
impl Command for Overlay {
|
||||
fn name(&self) -> &str {
|
||||
"overlay"
|
||||
}
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("overlay").category(Category::Core)
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
"Commands for manipulating overlays."
|
||||
}
|
||||
|
||||
fn extra_usage(&self) -> &str {
|
||||
r#"This command is a parser keyword. For details, check:
|
||||
https://www.nushell.sh/book/thinking_in_nushell.html"#
|
||||
}
|
||||
|
||||
fn is_parser_keyword(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn run(
|
||||
&self,
|
||||
engine_state: &EngineState,
|
||||
stack: &mut Stack,
|
||||
call: &Call,
|
||||
_input: PipelineData,
|
||||
) -> Result<nu_protocol::PipelineData, nu_protocol::ShellError> {
|
||||
Ok(Value::String {
|
||||
val: get_full_help(&Overlay.signature(), &[], engine_state, stack),
|
||||
span: call.head,
|
||||
}
|
||||
.into_pipeline_data())
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_examples() {
|
||||
use crate::test_examples;
|
||||
|
||||
test_examples(Overlay {})
|
||||
}
|
||||
}
|
85
crates/nu-command/src/core_commands/overlay/list.rs
Normal file
@ -0,0 +1,85 @@
|
||||
use nu_protocol::ast::Call;
|
||||
use nu_protocol::engine::{Command, EngineState, Stack};
|
||||
use nu_protocol::{
|
||||
Category, Example, IntoPipelineData, PipelineData, ShellError, Signature, Span, Value,
|
||||
};
|
||||
|
||||
use log::trace;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct OverlayList;
|
||||
|
||||
impl Command for OverlayList {
|
||||
fn name(&self) -> &str {
|
||||
"overlay list"
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
"List all active overlays"
|
||||
}
|
||||
|
||||
fn signature(&self) -> nu_protocol::Signature {
|
||||
Signature::build("overlay list").category(Category::Core)
|
||||
}
|
||||
|
||||
fn extra_usage(&self) -> &str {
|
||||
"The overlays are listed in the order they were activated."
|
||||
}
|
||||
|
||||
fn run(
|
||||
&self,
|
||||
engine_state: &EngineState,
|
||||
stack: &mut Stack,
|
||||
call: &Call,
|
||||
_input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let active_overlays_parser: Vec<Value> = engine_state
|
||||
.active_overlay_names(&[])
|
||||
.iter()
|
||||
.map(|s| Value::string(String::from_utf8_lossy(s), call.head))
|
||||
.collect();
|
||||
|
||||
let active_overlays_engine: Vec<Value> = stack
|
||||
.active_overlays
|
||||
.iter()
|
||||
.map(|s| Value::string(s, call.head))
|
||||
.collect();
|
||||
|
||||
// Check if the overlays in the engine match the overlays in the parser
|
||||
if (active_overlays_parser.len() != active_overlays_engine.len())
|
||||
|| active_overlays_parser
|
||||
.iter()
|
||||
.zip(active_overlays_engine.iter())
|
||||
.any(|(op, oe)| op != oe)
|
||||
{
|
||||
trace!("parser overlays: {:?}", active_overlays_parser);
|
||||
trace!("engine overlays: {:?}", active_overlays_engine);
|
||||
|
||||
return Err(ShellError::NushellFailedSpannedHelp(
|
||||
"Overlay mismatch".into(),
|
||||
"Active overlays do not match between the engine and the parser.".into(),
|
||||
call.head,
|
||||
"Run Nushell with --log-level=trace to see what went wrong.".into(),
|
||||
));
|
||||
}
|
||||
|
||||
Ok(Value::List {
|
||||
vals: active_overlays_engine,
|
||||
span: call.head,
|
||||
}
|
||||
.into_pipeline_data())
|
||||
}
|
||||
|
||||
fn examples(&self) -> Vec<Example> {
|
||||
vec![Example {
|
||||
description: "Get the last activated overlay",
|
||||
example: r#"module spam { export def foo [] { "foo" } }
|
||||
overlay add spam
|
||||
overlay list | last"#,
|
||||
result: Some(Value::String {
|
||||
val: "spam".to_string(),
|
||||
span: Span::test_data(),
|
||||
}),
|
||||
}]
|
||||
}
|
||||
}
|