mirror of
https://github.com/nushell/nushell.git
synced 2025-07-01 07:00:37 +02:00
Compare commits
108 Commits
Author | SHA1 | Date | |
---|---|---|---|
30f98f7e64 | |||
c9409a2edb | |||
b857064d65 | |||
a541382776 | |||
07ad24ab97 | |||
55db643048 | |||
8f9b198d48 | |||
6c7129cc0c | |||
919d55f3fc | |||
bdf63420d1 | |||
b7af715f6b | |||
b6eda33438 | |||
ab641d9f18 | |||
c7e128eed1 | |||
cc0259bbed | |||
23fba6d2ea | |||
3182adb6a0 | |||
d52ec65f18 | |||
b968376be9 | |||
90bd8c82b7 | |||
0955e8c5b6 | |||
ef55367224 | |||
a60f454154 | |||
7a7df3e635 | |||
62198a29c2 | |||
e87a35104a | |||
1e051e573d | |||
e172a621f3 | |||
9f09930834 | |||
20c2de9eed | |||
22ca5a6b8d | |||
8b19399b13 | |||
d289c773d0 | |||
a935e0720f | |||
1c3ff179bc | |||
ccab3d6b6e | |||
3e39fae6e1 | |||
d575fd1c3a | |||
0a2fb137af | |||
4907575d3d | |||
4200df21d3 | |||
e0bb5a2bd2 | |||
a6c2c685bc | |||
1e2fa68db0 | |||
599f16f15c | |||
91da168251 | |||
e104bccfb9 | |||
74bd0e32cc | |||
03015ed33f | |||
79ea70d4ec | |||
3ec76af96e | |||
b8efd2a347 | |||
9083157baa | |||
6cdc9e3b77 | |||
f8d4adfb7a | |||
719d9aa83c | |||
9ebaa737aa | |||
88b0982dac | |||
8c2e12ad79 | |||
2c31b3db07 | |||
eedf833b6f | |||
69d81cc065 | |||
af9c31152a | |||
abb6fca5e3 | |||
3ec1c40320 | |||
619211c1bf | |||
3a685049da | |||
ae54d05930 | |||
e7c4597ad0 | |||
09c9495015 | |||
e05f387632 | |||
9870c7c9a6 | |||
3f75b6b371 | |||
04fed82e5e | |||
f3a1dfef95 | |||
f738932bbd | |||
4968b6b9d0 | |||
ee97c00818 | |||
1dbd431117 | |||
09ab583f64 | |||
9ad6d13982 | |||
8d4426f2f8 | |||
8c8f795e9e | |||
7f2f67238f | |||
740fe942c1 | |||
7c5dcbb3fc | |||
7e055810b1 | |||
5758993e9f | |||
d7014e671d | |||
b0427ca9ff | |||
3af575cce7 | |||
f787d272e6 | |||
f061c9a30e | |||
8812072f06 | |||
e911ff4d67 | |||
28b6db115a | |||
e735bd475f | |||
299d199150 | |||
5e784d38eb | |||
868029f655 | |||
043d1ed9fb | |||
6230a62e9e | |||
71b49c3374 | |||
2eef42c6b9 | |||
0209992f6c | |||
c9d54f821b | |||
59d6dee3b3 | |||
9d25b2f29a |
30
.github/workflows/milestone.yml
vendored
Normal file
30
.github/workflows/milestone.yml
vendored
Normal file
@ -0,0 +1,30 @@
|
|||||||
|
# Description:
|
||||||
|
# - Add milestone to a merged PR automatically
|
||||||
|
# - Add milestone to a closed issue that has a merged PR fix (if any)
|
||||||
|
|
||||||
|
name: Milestone Action
|
||||||
|
on:
|
||||||
|
issues:
|
||||||
|
types: [closed]
|
||||||
|
pull_request_target:
|
||||||
|
types: [closed]
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
update-milestone:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
name: Milestone Update
|
||||||
|
steps:
|
||||||
|
- name: Set Milestone for PR
|
||||||
|
uses: hustcer/milestone-action@main
|
||||||
|
if: github.event.pull_request.merged == true
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
|
# Bind milestone to closed issue that has a merged PR fix
|
||||||
|
- name: Set Milestone for Issue
|
||||||
|
uses: hustcer/milestone-action@main
|
||||||
|
if: github.event.issue.state == 'closed'
|
||||||
|
with:
|
||||||
|
action: bind-issue
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
2
.github/workflows/nightly-build.yml
vendored
2
.github/workflows/nightly-build.yml
vendored
@ -170,7 +170,7 @@ jobs:
|
|||||||
# REF: https://github.com/marketplace/actions/gh-release
|
# REF: https://github.com/marketplace/actions/gh-release
|
||||||
# Create a release only in nushell/nightly repo
|
# Create a release only in nushell/nightly repo
|
||||||
- name: Publish Archive
|
- name: Publish Archive
|
||||||
uses: softprops/action-gh-release@v2.0.8
|
uses: softprops/action-gh-release@v2.0.9
|
||||||
if: ${{ startsWith(github.repository, 'nushell/nightly') }}
|
if: ${{ startsWith(github.repository, 'nushell/nightly') }}
|
||||||
with:
|
with:
|
||||||
prerelease: true
|
prerelease: true
|
||||||
|
7
.github/workflows/release.yml
vendored
7
.github/workflows/release.yml
vendored
@ -98,9 +98,10 @@ jobs:
|
|||||||
TARGET: ${{ matrix.target }}
|
TARGET: ${{ matrix.target }}
|
||||||
_EXTRA_: ${{ matrix.extra }}
|
_EXTRA_: ${{ matrix.extra }}
|
||||||
|
|
||||||
# REF: https://github.com/marketplace/actions/gh-release
|
# WARN: Don't upgrade this action due to the release per asset issue.
|
||||||
|
# See: https://github.com/softprops/action-gh-release/issues/445
|
||||||
- name: Publish Archive
|
- name: Publish Archive
|
||||||
uses: softprops/action-gh-release@v2.0.8
|
uses: softprops/action-gh-release@v2.0.5
|
||||||
if: ${{ startsWith(github.ref, 'refs/tags/') }}
|
if: ${{ startsWith(github.ref, 'refs/tags/') }}
|
||||||
with:
|
with:
|
||||||
draft: true
|
draft: true
|
||||||
@ -124,7 +125,7 @@ jobs:
|
|||||||
- name: Create Checksums
|
- name: Create Checksums
|
||||||
run: cd release && shasum -a 256 * > ../SHA256SUMS
|
run: cd release && shasum -a 256 * > ../SHA256SUMS
|
||||||
- name: Publish Checksums
|
- name: Publish Checksums
|
||||||
uses: softprops/action-gh-release@v2.0.8
|
uses: softprops/action-gh-release@v2.0.5
|
||||||
with:
|
with:
|
||||||
draft: true
|
draft: true
|
||||||
files: SHA256SUMS
|
files: SHA256SUMS
|
||||||
|
2
.github/workflows/typos.yml
vendored
2
.github/workflows/typos.yml
vendored
@ -10,4 +10,4 @@ jobs:
|
|||||||
uses: actions/checkout@v4.1.7
|
uses: actions/checkout@v4.1.7
|
||||||
|
|
||||||
- name: Check spelling
|
- name: Check spelling
|
||||||
uses: crate-ci/typos@v1.26.0
|
uses: crate-ci/typos@v1.27.0
|
||||||
|
599
Cargo.lock
generated
599
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
66
Cargo.toml
66
Cargo.toml
@ -10,8 +10,8 @@ homepage = "https://www.nushell.sh"
|
|||||||
license = "MIT"
|
license = "MIT"
|
||||||
name = "nu"
|
name = "nu"
|
||||||
repository = "https://github.com/nushell/nushell"
|
repository = "https://github.com/nushell/nushell"
|
||||||
rust-version = "1.79.0"
|
rust-version = "1.80.1"
|
||||||
version = "0.99.0"
|
version = "0.100.0"
|
||||||
|
|
||||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
|
|
||||||
@ -66,8 +66,8 @@ alphanumeric-sort = "1.5"
|
|||||||
ansi-str = "0.8"
|
ansi-str = "0.8"
|
||||||
anyhow = "1.0.82"
|
anyhow = "1.0.82"
|
||||||
base64 = "0.22.1"
|
base64 = "0.22.1"
|
||||||
bracoxide = "0.1.2"
|
bracoxide = "0.1.4"
|
||||||
brotli = "5.0"
|
brotli = "6.0"
|
||||||
byteorder = "1.5"
|
byteorder = "1.5"
|
||||||
bytes = "1"
|
bytes = "1"
|
||||||
bytesize = "1.3"
|
bytesize = "1.3"
|
||||||
@ -75,7 +75,7 @@ calamine = "0.24.0"
|
|||||||
chardetng = "0.1.17"
|
chardetng = "0.1.17"
|
||||||
chrono = { default-features = false, version = "0.4.34" }
|
chrono = { default-features = false, version = "0.4.34" }
|
||||||
chrono-humanize = "0.2.3"
|
chrono-humanize = "0.2.3"
|
||||||
chrono-tz = "0.8"
|
chrono-tz = "0.10"
|
||||||
crossbeam-channel = "0.5.8"
|
crossbeam-channel = "0.5.8"
|
||||||
crossterm = "0.28.1"
|
crossterm = "0.28.1"
|
||||||
csv = "1.3"
|
csv = "1.3"
|
||||||
@ -86,7 +86,7 @@ dirs = "5.0"
|
|||||||
dirs-sys = "0.4"
|
dirs-sys = "0.4"
|
||||||
dtparse = "2.0"
|
dtparse = "2.0"
|
||||||
encoding_rs = "0.8"
|
encoding_rs = "0.8"
|
||||||
fancy-regex = "0.13"
|
fancy-regex = "0.14"
|
||||||
filesize = "0.2"
|
filesize = "0.2"
|
||||||
filetime = "0.2"
|
filetime = "0.2"
|
||||||
fuzzy-matcher = "0.3"
|
fuzzy-matcher = "0.3"
|
||||||
@ -103,7 +103,7 @@ log = "0.4"
|
|||||||
lru = "0.12"
|
lru = "0.12"
|
||||||
lscolors = { version = "0.17", default-features = false }
|
lscolors = { version = "0.17", default-features = false }
|
||||||
lsp-server = "0.7.5"
|
lsp-server = "0.7.5"
|
||||||
lsp-types = "0.95.0"
|
lsp-types = { version = "0.95.0", features = ["proposed"] }
|
||||||
mach2 = "0.4"
|
mach2 = "0.4"
|
||||||
md5 = { version = "0.10", package = "md-5" }
|
md5 = { version = "0.10", package = "md-5" }
|
||||||
miette = "7.2"
|
miette = "7.2"
|
||||||
@ -117,8 +117,8 @@ notify-debouncer-full = { version = "0.3", default-features = false }
|
|||||||
nu-ansi-term = "0.50.1"
|
nu-ansi-term = "0.50.1"
|
||||||
num-format = "0.4"
|
num-format = "0.4"
|
||||||
num-traits = "0.2"
|
num-traits = "0.2"
|
||||||
|
oem_cp = "2.0.0"
|
||||||
omnipath = "0.1"
|
omnipath = "0.1"
|
||||||
once_cell = "1.20"
|
|
||||||
open = "5.3"
|
open = "5.3"
|
||||||
os_pipe = { version = "1.2", features = ["io_safety"] }
|
os_pipe = { version = "1.2", features = ["io_safety"] }
|
||||||
pathdiff = "0.2"
|
pathdiff = "0.2"
|
||||||
@ -137,7 +137,7 @@ rand = "0.8"
|
|||||||
rand_chacha = "0.3.1"
|
rand_chacha = "0.3.1"
|
||||||
ratatui = "0.26"
|
ratatui = "0.26"
|
||||||
rayon = "1.10"
|
rayon = "1.10"
|
||||||
reedline = "0.36.0"
|
reedline = "0.37.0"
|
||||||
regex = "1.9.5"
|
regex = "1.9.5"
|
||||||
rmp = "0.8"
|
rmp = "0.8"
|
||||||
rmp-serde = "1.3"
|
rmp-serde = "1.3"
|
||||||
@ -153,13 +153,13 @@ serde_yaml = "0.9"
|
|||||||
sha2 = "0.10"
|
sha2 = "0.10"
|
||||||
strip-ansi-escapes = "0.2.0"
|
strip-ansi-escapes = "0.2.0"
|
||||||
syn = "2.0"
|
syn = "2.0"
|
||||||
sysinfo = "0.30"
|
sysinfo = "0.32"
|
||||||
tabled = { version = "0.16.0", default-features = false }
|
tabled = { version = "0.16.0", default-features = false }
|
||||||
tempfile = "3.13"
|
tempfile = "3.13"
|
||||||
terminal_size = "0.3"
|
terminal_size = "0.3"
|
||||||
titlecase = "2.0"
|
titlecase = "2.0"
|
||||||
toml = "0.8"
|
toml = "0.8"
|
||||||
trash = "3.3"
|
trash = "5.2"
|
||||||
umask = "2.1"
|
umask = "2.1"
|
||||||
unicode-segmentation = "1.12"
|
unicode-segmentation = "1.12"
|
||||||
unicode-width = "0.1"
|
unicode-width = "0.1"
|
||||||
@ -172,11 +172,11 @@ uu_mv = "0.0.27"
|
|||||||
uu_whoami = "0.0.27"
|
uu_whoami = "0.0.27"
|
||||||
uu_uname = "0.0.27"
|
uu_uname = "0.0.27"
|
||||||
uucore = "0.0.27"
|
uucore = "0.0.27"
|
||||||
uuid = "1.10.0"
|
uuid = "1.11.0"
|
||||||
v_htmlescape = "0.15.0"
|
v_htmlescape = "0.15.0"
|
||||||
wax = "0.6"
|
wax = "0.6"
|
||||||
which = "6.0.0"
|
which = "6.0.0"
|
||||||
windows = "0.54"
|
windows = "0.56"
|
||||||
windows-sys = "0.48"
|
windows-sys = "0.48"
|
||||||
winreg = "0.52"
|
winreg = "0.52"
|
||||||
|
|
||||||
@ -189,22 +189,22 @@ unchecked_duration_subtraction = "warn"
|
|||||||
workspace = true
|
workspace = true
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
nu-cli = { path = "./crates/nu-cli", version = "0.99.0" }
|
nu-cli = { path = "./crates/nu-cli", version = "0.100.0" }
|
||||||
nu-cmd-base = { path = "./crates/nu-cmd-base", version = "0.99.0" }
|
nu-cmd-base = { path = "./crates/nu-cmd-base", version = "0.100.0" }
|
||||||
nu-cmd-lang = { path = "./crates/nu-cmd-lang", version = "0.99.0" }
|
nu-cmd-lang = { path = "./crates/nu-cmd-lang", version = "0.100.0" }
|
||||||
nu-cmd-plugin = { path = "./crates/nu-cmd-plugin", version = "0.99.0", optional = true }
|
nu-cmd-plugin = { path = "./crates/nu-cmd-plugin", version = "0.100.0", optional = true }
|
||||||
nu-cmd-extra = { path = "./crates/nu-cmd-extra", version = "0.99.0" }
|
nu-cmd-extra = { path = "./crates/nu-cmd-extra", version = "0.100.0" }
|
||||||
nu-command = { path = "./crates/nu-command", version = "0.99.0" }
|
nu-command = { path = "./crates/nu-command", version = "0.100.0" }
|
||||||
nu-engine = { path = "./crates/nu-engine", version = "0.99.0" }
|
nu-engine = { path = "./crates/nu-engine", version = "0.100.0" }
|
||||||
nu-explore = { path = "./crates/nu-explore", version = "0.99.0" }
|
nu-explore = { path = "./crates/nu-explore", version = "0.100.0" }
|
||||||
nu-lsp = { path = "./crates/nu-lsp/", version = "0.99.0" }
|
nu-lsp = { path = "./crates/nu-lsp/", version = "0.100.0" }
|
||||||
nu-parser = { path = "./crates/nu-parser", version = "0.99.0" }
|
nu-parser = { path = "./crates/nu-parser", version = "0.100.0" }
|
||||||
nu-path = { path = "./crates/nu-path", version = "0.99.0" }
|
nu-path = { path = "./crates/nu-path", version = "0.100.0" }
|
||||||
nu-plugin-engine = { path = "./crates/nu-plugin-engine", optional = true, version = "0.99.0" }
|
nu-plugin-engine = { path = "./crates/nu-plugin-engine", optional = true, version = "0.100.0" }
|
||||||
nu-protocol = { path = "./crates/nu-protocol", version = "0.99.0" }
|
nu-protocol = { path = "./crates/nu-protocol", version = "0.100.0" }
|
||||||
nu-std = { path = "./crates/nu-std", version = "0.99.0" }
|
nu-std = { path = "./crates/nu-std", version = "0.100.0" }
|
||||||
nu-system = { path = "./crates/nu-system", version = "0.99.0" }
|
nu-system = { path = "./crates/nu-system", version = "0.100.0" }
|
||||||
nu-utils = { path = "./crates/nu-utils", version = "0.99.0" }
|
nu-utils = { path = "./crates/nu-utils", version = "0.100.0" }
|
||||||
reedline = { workspace = true, features = ["bashisms", "sqlite"] }
|
reedline = { workspace = true, features = ["bashisms", "sqlite"] }
|
||||||
|
|
||||||
crossterm = { workspace = true }
|
crossterm = { workspace = true }
|
||||||
@ -234,9 +234,9 @@ nix = { workspace = true, default-features = false, features = [
|
|||||||
] }
|
] }
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
nu-test-support = { path = "./crates/nu-test-support", version = "0.99.0" }
|
nu-test-support = { path = "./crates/nu-test-support", version = "0.100.0" }
|
||||||
nu-plugin-protocol = { path = "./crates/nu-plugin-protocol", version = "0.99.0" }
|
nu-plugin-protocol = { path = "./crates/nu-plugin-protocol", version = "0.100.0" }
|
||||||
nu-plugin-core = { path = "./crates/nu-plugin-core", version = "0.99.0" }
|
nu-plugin-core = { path = "./crates/nu-plugin-core", version = "0.100.0" }
|
||||||
assert_cmd = "2.0"
|
assert_cmd = "2.0"
|
||||||
dirs = { workspace = true }
|
dirs = { workspace = true }
|
||||||
tango-bench = "0.6"
|
tango-bench = "0.6"
|
||||||
@ -320,4 +320,4 @@ bench = false
|
|||||||
# Run individual benchmarks like `cargo bench -- <regex>` e.g. `cargo bench -- parse`
|
# Run individual benchmarks like `cargo bench -- <regex>` e.g. `cargo bench -- parse`
|
||||||
[[bench]]
|
[[bench]]
|
||||||
name = "benchmarks"
|
name = "benchmarks"
|
||||||
harness = false
|
harness = false
|
||||||
|
@ -229,7 +229,7 @@ Please submit an issue or PR to be added to this list.
|
|||||||
See [Contributing](CONTRIBUTING.md) for details. Thanks to all the people who already contributed!
|
See [Contributing](CONTRIBUTING.md) for details. Thanks to all the people who already contributed!
|
||||||
|
|
||||||
<a href="https://github.com/nushell/nushell/graphs/contributors">
|
<a href="https://github.com/nushell/nushell/graphs/contributors">
|
||||||
<img src="https://contributors-img.web.app/image?repo=nushell/nushell&max=750" />
|
<img src="https://contributors-img.web.app/image?repo=nushell/nushell&max=750&columns=20" />
|
||||||
</a>
|
</a>
|
||||||
|
|
||||||
## License
|
## License
|
||||||
|
@ -5,27 +5,27 @@ repository = "https://github.com/nushell/nushell/tree/main/crates/nu-cli"
|
|||||||
edition = "2021"
|
edition = "2021"
|
||||||
license = "MIT"
|
license = "MIT"
|
||||||
name = "nu-cli"
|
name = "nu-cli"
|
||||||
version = "0.99.0"
|
version = "0.100.0"
|
||||||
|
|
||||||
[lib]
|
[lib]
|
||||||
bench = false
|
bench = false
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
nu-cmd-lang = { path = "../nu-cmd-lang", version = "0.99.0" }
|
nu-cmd-lang = { path = "../nu-cmd-lang", version = "0.100.0" }
|
||||||
nu-command = { path = "../nu-command", version = "0.99.0" }
|
nu-command = { path = "../nu-command", version = "0.100.0" }
|
||||||
nu-test-support = { path = "../nu-test-support", version = "0.99.0" }
|
nu-test-support = { path = "../nu-test-support", version = "0.100.0" }
|
||||||
rstest = { workspace = true, default-features = false }
|
rstest = { workspace = true, default-features = false }
|
||||||
tempfile = { workspace = true }
|
tempfile = { workspace = true }
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
nu-cmd-base = { path = "../nu-cmd-base", version = "0.99.0" }
|
nu-cmd-base = { path = "../nu-cmd-base", version = "0.100.0" }
|
||||||
nu-engine = { path = "../nu-engine", version = "0.99.0" }
|
nu-engine = { path = "../nu-engine", version = "0.100.0" }
|
||||||
nu-path = { path = "../nu-path", version = "0.99.0" }
|
nu-path = { path = "../nu-path", version = "0.100.0" }
|
||||||
nu-parser = { path = "../nu-parser", version = "0.99.0" }
|
nu-parser = { path = "../nu-parser", version = "0.100.0" }
|
||||||
nu-plugin-engine = { path = "../nu-plugin-engine", version = "0.99.0", optional = true }
|
nu-plugin-engine = { path = "../nu-plugin-engine", version = "0.100.0", optional = true }
|
||||||
nu-protocol = { path = "../nu-protocol", version = "0.99.0" }
|
nu-protocol = { path = "../nu-protocol", version = "0.100.0" }
|
||||||
nu-utils = { path = "../nu-utils", version = "0.99.0" }
|
nu-utils = { path = "../nu-utils", version = "0.100.0" }
|
||||||
nu-color-config = { path = "../nu-color-config", version = "0.99.0" }
|
nu-color-config = { path = "../nu-color-config", version = "0.100.0" }
|
||||||
nu-ansi-term = { workspace = true }
|
nu-ansi-term = { workspace = true }
|
||||||
reedline = { workspace = true, features = ["bashisms", "sqlite"] }
|
reedline = { workspace = true, features = ["bashisms", "sqlite"] }
|
||||||
|
|
||||||
@ -37,7 +37,6 @@ is_executable = { workspace = true }
|
|||||||
log = { workspace = true }
|
log = { workspace = true }
|
||||||
miette = { workspace = true, features = ["fancy-no-backtrace"] }
|
miette = { workspace = true, features = ["fancy-no-backtrace"] }
|
||||||
lscolors = { workspace = true, default-features = false, features = ["nu-ansi-term"] }
|
lscolors = { workspace = true, default-features = false, features = ["nu-ansi-term"] }
|
||||||
once_cell = { workspace = true }
|
|
||||||
percent-encoding = { workspace = true }
|
percent-encoding = { workspace = true }
|
||||||
sysinfo = { workspace = true }
|
sysinfo = { workspace = true }
|
||||||
unicode-segmentation = { workspace = true }
|
unicode-segmentation = { workspace = true }
|
||||||
@ -49,4 +48,4 @@ plugin = ["nu-plugin-engine"]
|
|||||||
system-clipboard = ["reedline/system_clipboard"]
|
system-clipboard = ["reedline/system_clipboard"]
|
||||||
|
|
||||||
[lints]
|
[lints]
|
||||||
workspace = true
|
workspace = true
|
||||||
|
@ -17,6 +17,7 @@ pub fn add_cli_context(mut engine_state: EngineState) -> EngineState {
|
|||||||
CommandlineGetCursor,
|
CommandlineGetCursor,
|
||||||
CommandlineSetCursor,
|
CommandlineSetCursor,
|
||||||
History,
|
History,
|
||||||
|
HistoryImport,
|
||||||
HistorySession,
|
HistorySession,
|
||||||
Keybindings,
|
Keybindings,
|
||||||
KeybindingsDefault,
|
KeybindingsDefault,
|
||||||
|
9
crates/nu-cli/src/commands/history/fields.rs
Normal file
9
crates/nu-cli/src/commands/history/fields.rs
Normal file
@ -0,0 +1,9 @@
|
|||||||
|
// Each const is named after a HistoryItem field, and the value is the field name to be displayed to
|
||||||
|
// the user (or accept during import).
|
||||||
|
pub const COMMAND_LINE: &str = "command";
|
||||||
|
pub const START_TIMESTAMP: &str = "start_timestamp";
|
||||||
|
pub const HOSTNAME: &str = "hostname";
|
||||||
|
pub const CWD: &str = "cwd";
|
||||||
|
pub const EXIT_STATUS: &str = "exit_status";
|
||||||
|
pub const DURATION: &str = "duration";
|
||||||
|
pub const SESSION_ID: &str = "session_id";
|
@ -5,6 +5,8 @@ use reedline::{
|
|||||||
SqliteBackedHistory,
|
SqliteBackedHistory,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
use super::fields;
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct History;
|
pub struct History;
|
||||||
|
|
||||||
@ -83,7 +85,8 @@ impl Command for History {
|
|||||||
entries.into_iter().enumerate().map(move |(idx, entry)| {
|
entries.into_iter().enumerate().map(move |(idx, entry)| {
|
||||||
Value::record(
|
Value::record(
|
||||||
record! {
|
record! {
|
||||||
"command" => Value::string(entry.command_line, head),
|
fields::COMMAND_LINE => Value::string(entry.command_line, head),
|
||||||
|
// TODO: This name is inconsistent with create_history_record.
|
||||||
"index" => Value::int(idx as i64, head),
|
"index" => Value::int(idx as i64, head),
|
||||||
},
|
},
|
||||||
head,
|
head,
|
||||||
@ -176,13 +179,13 @@ fn create_history_record(idx: usize, entry: HistoryItem, long: bool, head: Span)
|
|||||||
Value::record(
|
Value::record(
|
||||||
record! {
|
record! {
|
||||||
"item_id" => item_id_value,
|
"item_id" => item_id_value,
|
||||||
"start_timestamp" => start_timestamp_value,
|
fields::START_TIMESTAMP => start_timestamp_value,
|
||||||
"command" => command_value,
|
fields::COMMAND_LINE => command_value,
|
||||||
"session_id" => session_id_value,
|
fields::SESSION_ID => session_id_value,
|
||||||
"hostname" => hostname_value,
|
fields::HOSTNAME => hostname_value,
|
||||||
"cwd" => cwd_value,
|
fields::CWD => cwd_value,
|
||||||
"duration" => duration_value,
|
fields::DURATION => duration_value,
|
||||||
"exit_status" => exit_status_value,
|
fields::EXIT_STATUS => exit_status_value,
|
||||||
"idx" => index_value,
|
"idx" => index_value,
|
||||||
},
|
},
|
||||||
head,
|
head,
|
||||||
@ -190,11 +193,11 @@ fn create_history_record(idx: usize, entry: HistoryItem, long: bool, head: Span)
|
|||||||
} else {
|
} else {
|
||||||
Value::record(
|
Value::record(
|
||||||
record! {
|
record! {
|
||||||
"start_timestamp" => start_timestamp_value,
|
fields::START_TIMESTAMP => start_timestamp_value,
|
||||||
"command" => command_value,
|
fields::COMMAND_LINE => command_value,
|
||||||
"cwd" => cwd_value,
|
fields::CWD => cwd_value,
|
||||||
"duration" => duration_value,
|
fields::DURATION => duration_value,
|
||||||
"exit_status" => exit_status_value,
|
fields::EXIT_STATUS => exit_status_value,
|
||||||
},
|
},
|
||||||
head,
|
head,
|
||||||
)
|
)
|
||||||
|
415
crates/nu-cli/src/commands/history/history_import.rs
Normal file
415
crates/nu-cli/src/commands/history/history_import.rs
Normal file
@ -0,0 +1,415 @@
|
|||||||
|
use std::path::{Path, PathBuf};
|
||||||
|
|
||||||
|
use nu_engine::command_prelude::*;
|
||||||
|
use nu_protocol::HistoryFileFormat;
|
||||||
|
|
||||||
|
use reedline::{
|
||||||
|
FileBackedHistory, History, HistoryItem, ReedlineError, SearchQuery, SqliteBackedHistory,
|
||||||
|
};
|
||||||
|
|
||||||
|
use super::fields;
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct HistoryImport;
|
||||||
|
|
||||||
|
impl Command for HistoryImport {
|
||||||
|
fn name(&self) -> &str {
|
||||||
|
"history import"
|
||||||
|
}
|
||||||
|
|
||||||
|
fn description(&self) -> &str {
|
||||||
|
"Import command line history"
|
||||||
|
}
|
||||||
|
|
||||||
|
fn extra_description(&self) -> &str {
|
||||||
|
r#"Can import history from input, either successive command lines or more detailed records. If providing records, available fields are:
|
||||||
|
command_line, id, start_timestamp, hostname, cwd, duration, exit_status.
|
||||||
|
|
||||||
|
If no input is provided, will import all history items from existing history in the other format: if current history is stored in sqlite, it will store it in plain text and vice versa.
|
||||||
|
|
||||||
|
Note that history item IDs are ignored when importing from file."#
|
||||||
|
}
|
||||||
|
|
||||||
|
fn signature(&self) -> nu_protocol::Signature {
|
||||||
|
Signature::build("history import")
|
||||||
|
.category(Category::History)
|
||||||
|
.input_output_types(vec![
|
||||||
|
(Type::Nothing, Type::Nothing),
|
||||||
|
(Type::List(Box::new(Type::String)), Type::Nothing),
|
||||||
|
(Type::table(), Type::Nothing),
|
||||||
|
])
|
||||||
|
}
|
||||||
|
|
||||||
|
fn examples(&self) -> Vec<Example> {
|
||||||
|
vec![
|
||||||
|
Example {
|
||||||
|
example: "history import",
|
||||||
|
description:
|
||||||
|
"Append all items from history in the other format to the current history",
|
||||||
|
result: None,
|
||||||
|
},
|
||||||
|
Example {
|
||||||
|
example: "echo foo | history import",
|
||||||
|
description: "Append `foo` to the current history",
|
||||||
|
result: None,
|
||||||
|
},
|
||||||
|
Example {
|
||||||
|
example: "[[ command_line cwd ]; [ foo /home ]] | history import",
|
||||||
|
description: "Append `foo` ran from `/home` to the current history",
|
||||||
|
result: None,
|
||||||
|
},
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
fn run(
|
||||||
|
&self,
|
||||||
|
engine_state: &EngineState,
|
||||||
|
_stack: &mut Stack,
|
||||||
|
call: &Call,
|
||||||
|
input: PipelineData,
|
||||||
|
) -> Result<PipelineData, ShellError> {
|
||||||
|
let ok = Ok(Value::nothing(call.head).into_pipeline_data());
|
||||||
|
|
||||||
|
let Some(history) = engine_state.history_config() else {
|
||||||
|
return ok;
|
||||||
|
};
|
||||||
|
let Some(current_history_path) = history.file_path() else {
|
||||||
|
return Err(ShellError::ConfigDirNotFound {
|
||||||
|
span: Some(call.head),
|
||||||
|
});
|
||||||
|
};
|
||||||
|
if let Some(bak_path) = backup(¤t_history_path)? {
|
||||||
|
println!("Backed history to {}", bak_path.display());
|
||||||
|
}
|
||||||
|
match input {
|
||||||
|
PipelineData::Empty => {
|
||||||
|
let other_format = match history.file_format {
|
||||||
|
HistoryFileFormat::Sqlite => HistoryFileFormat::Plaintext,
|
||||||
|
HistoryFileFormat::Plaintext => HistoryFileFormat::Sqlite,
|
||||||
|
};
|
||||||
|
let src = new_backend(other_format, None)?;
|
||||||
|
let mut dst = new_backend(history.file_format, Some(current_history_path))?;
|
||||||
|
let items = src
|
||||||
|
.search(SearchQuery::everything(
|
||||||
|
reedline::SearchDirection::Forward,
|
||||||
|
None,
|
||||||
|
))
|
||||||
|
.map_err(error_from_reedline)?
|
||||||
|
.into_iter()
|
||||||
|
.map(Ok);
|
||||||
|
import(dst.as_mut(), items)
|
||||||
|
}
|
||||||
|
_ => {
|
||||||
|
let input = input.into_iter().map(item_from_value);
|
||||||
|
import(
|
||||||
|
new_backend(history.file_format, Some(current_history_path))?.as_mut(),
|
||||||
|
input,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}?;
|
||||||
|
|
||||||
|
ok
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn new_backend(
|
||||||
|
format: HistoryFileFormat,
|
||||||
|
path: Option<PathBuf>,
|
||||||
|
) -> Result<Box<dyn History>, ShellError> {
|
||||||
|
let path = match path {
|
||||||
|
Some(path) => path,
|
||||||
|
None => {
|
||||||
|
let Some(mut path) = nu_path::nu_config_dir() else {
|
||||||
|
return Err(ShellError::ConfigDirNotFound { span: None });
|
||||||
|
};
|
||||||
|
path.push(format.default_file_name());
|
||||||
|
path.into_std_path_buf()
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
fn map(
|
||||||
|
result: Result<impl History + 'static, ReedlineError>,
|
||||||
|
) -> Result<Box<dyn History>, ShellError> {
|
||||||
|
result
|
||||||
|
.map(|x| Box::new(x) as Box<dyn History>)
|
||||||
|
.map_err(error_from_reedline)
|
||||||
|
}
|
||||||
|
match format {
|
||||||
|
// Use a reasonably large value for maximum capacity.
|
||||||
|
HistoryFileFormat::Plaintext => map(FileBackedHistory::with_file(0xfffffff, path)),
|
||||||
|
HistoryFileFormat::Sqlite => map(SqliteBackedHistory::with_file(path, None, None)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn import(
|
||||||
|
dst: &mut dyn History,
|
||||||
|
src: impl Iterator<Item = Result<HistoryItem, ShellError>>,
|
||||||
|
) -> Result<(), ShellError> {
|
||||||
|
for item in src {
|
||||||
|
let mut item = item?;
|
||||||
|
item.id = None;
|
||||||
|
dst.save(item).map_err(error_from_reedline)?;
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn error_from_reedline(e: ReedlineError) -> ShellError {
|
||||||
|
// TODO: Should we add a new ShellError variant?
|
||||||
|
ShellError::GenericError {
|
||||||
|
error: "Reedline error".to_owned(),
|
||||||
|
msg: format!("{e}"),
|
||||||
|
span: None,
|
||||||
|
help: None,
|
||||||
|
inner: Vec::new(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn item_from_value(v: Value) -> Result<HistoryItem, ShellError> {
|
||||||
|
let span = v.span();
|
||||||
|
match v {
|
||||||
|
Value::Record { val, .. } => item_from_record(val.into_owned(), span),
|
||||||
|
Value::String { val, .. } => Ok(HistoryItem {
|
||||||
|
command_line: val,
|
||||||
|
id: None,
|
||||||
|
start_timestamp: None,
|
||||||
|
session_id: None,
|
||||||
|
hostname: None,
|
||||||
|
cwd: None,
|
||||||
|
duration: None,
|
||||||
|
exit_status: None,
|
||||||
|
more_info: None,
|
||||||
|
}),
|
||||||
|
_ => Err(ShellError::UnsupportedInput {
|
||||||
|
msg: "Only list and record inputs are supported".to_owned(),
|
||||||
|
input: v.get_type().to_string(),
|
||||||
|
msg_span: span,
|
||||||
|
input_span: span,
|
||||||
|
}),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn item_from_record(mut rec: Record, span: Span) -> Result<HistoryItem, ShellError> {
|
||||||
|
let cmd = match rec.remove(fields::COMMAND_LINE) {
|
||||||
|
Some(v) => v.as_str()?.to_owned(),
|
||||||
|
None => {
|
||||||
|
return Err(ShellError::TypeMismatch {
|
||||||
|
err_message: format!("missing column: {}", fields::COMMAND_LINE),
|
||||||
|
span,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
fn get<T>(
|
||||||
|
rec: &mut Record,
|
||||||
|
field: &'static str,
|
||||||
|
f: impl FnOnce(Value) -> Result<T, ShellError>,
|
||||||
|
) -> Result<Option<T>, ShellError> {
|
||||||
|
rec.remove(field).map(f).transpose()
|
||||||
|
}
|
||||||
|
|
||||||
|
let rec = &mut rec;
|
||||||
|
let item = HistoryItem {
|
||||||
|
command_line: cmd,
|
||||||
|
id: None,
|
||||||
|
start_timestamp: get(rec, fields::START_TIMESTAMP, |v| Ok(v.as_date()?.to_utc()))?,
|
||||||
|
hostname: get(rec, fields::HOSTNAME, |v| Ok(v.as_str()?.to_owned()))?,
|
||||||
|
cwd: get(rec, fields::CWD, |v| Ok(v.as_str()?.to_owned()))?,
|
||||||
|
exit_status: get(rec, fields::EXIT_STATUS, |v| v.as_int())?,
|
||||||
|
duration: get(rec, fields::DURATION, duration_from_value)?,
|
||||||
|
more_info: None,
|
||||||
|
// TODO: Currently reedline doesn't let you create session IDs.
|
||||||
|
session_id: None,
|
||||||
|
};
|
||||||
|
|
||||||
|
if !rec.is_empty() {
|
||||||
|
let cols = rec.columns().map(|s| s.as_str()).collect::<Vec<_>>();
|
||||||
|
return Err(ShellError::TypeMismatch {
|
||||||
|
err_message: format!("unsupported column names: {}", cols.join(", ")),
|
||||||
|
span,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
Ok(item)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn duration_from_value(v: Value) -> Result<std::time::Duration, ShellError> {
|
||||||
|
chrono::Duration::nanoseconds(v.as_duration()?)
|
||||||
|
.to_std()
|
||||||
|
.map_err(|_| ShellError::IOError {
|
||||||
|
msg: "negative duration not supported".to_string(),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
fn find_backup_path(path: &Path) -> Result<PathBuf, ShellError> {
|
||||||
|
let Ok(mut bak_path) = path.to_path_buf().into_os_string().into_string() else {
|
||||||
|
// This isn't fundamentally problem, but trying to work with OsString is a nightmare.
|
||||||
|
return Err(ShellError::IOError {
|
||||||
|
msg: "History path mush be representable as UTF-8".to_string(),
|
||||||
|
});
|
||||||
|
};
|
||||||
|
bak_path.push_str(".bak");
|
||||||
|
if !Path::new(&bak_path).exists() {
|
||||||
|
return Ok(bak_path.into());
|
||||||
|
}
|
||||||
|
let base_len = bak_path.len();
|
||||||
|
for i in 1..100 {
|
||||||
|
use std::fmt::Write;
|
||||||
|
bak_path.truncate(base_len);
|
||||||
|
write!(&mut bak_path, ".{i}").unwrap();
|
||||||
|
if !Path::new(&bak_path).exists() {
|
||||||
|
return Ok(PathBuf::from(bak_path));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Err(ShellError::IOError {
|
||||||
|
msg: "Too many existing backup files".to_string(),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
fn backup(path: &Path) -> Result<Option<PathBuf>, ShellError> {
|
||||||
|
match path.metadata() {
|
||||||
|
Ok(md) if md.is_file() => (),
|
||||||
|
Ok(_) => {
|
||||||
|
return Err(ShellError::IOError {
|
||||||
|
msg: "history path exists but is not a file".to_string(),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
Err(e) if e.kind() == std::io::ErrorKind::NotFound => return Ok(None),
|
||||||
|
Err(e) => return Err(e.into()),
|
||||||
|
}
|
||||||
|
let bak_path = find_backup_path(path)?;
|
||||||
|
std::fs::copy(path, &bak_path)?;
|
||||||
|
Ok(Some(bak_path))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use chrono::DateTime;
|
||||||
|
use rstest::rstest;
|
||||||
|
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_item_from_value_string() -> Result<(), ShellError> {
|
||||||
|
let item = item_from_value(Value::string("foo", Span::unknown()))?;
|
||||||
|
assert_eq!(
|
||||||
|
item,
|
||||||
|
HistoryItem {
|
||||||
|
command_line: "foo".to_string(),
|
||||||
|
id: None,
|
||||||
|
start_timestamp: None,
|
||||||
|
session_id: None,
|
||||||
|
hostname: None,
|
||||||
|
cwd: None,
|
||||||
|
duration: None,
|
||||||
|
exit_status: None,
|
||||||
|
more_info: None
|
||||||
|
}
|
||||||
|
);
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_item_from_value_record() {
|
||||||
|
let span = Span::unknown();
|
||||||
|
let rec = new_record(&[
|
||||||
|
("command", Value::string("foo", span)),
|
||||||
|
(
|
||||||
|
"start_timestamp",
|
||||||
|
Value::date(
|
||||||
|
DateTime::parse_from_rfc3339("1996-12-19T16:39:57-08:00").unwrap(),
|
||||||
|
span,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
("hostname", Value::string("localhost", span)),
|
||||||
|
("cwd", Value::string("/home/test", span)),
|
||||||
|
("duration", Value::duration(100_000_000, span)),
|
||||||
|
("exit_status", Value::int(42, span)),
|
||||||
|
]);
|
||||||
|
let item = item_from_value(rec).unwrap();
|
||||||
|
assert_eq!(
|
||||||
|
item,
|
||||||
|
HistoryItem {
|
||||||
|
command_line: "foo".to_string(),
|
||||||
|
id: None,
|
||||||
|
start_timestamp: Some(
|
||||||
|
DateTime::parse_from_rfc3339("1996-12-19T16:39:57-08:00")
|
||||||
|
.unwrap()
|
||||||
|
.to_utc()
|
||||||
|
),
|
||||||
|
hostname: Some("localhost".to_string()),
|
||||||
|
cwd: Some("/home/test".to_string()),
|
||||||
|
duration: Some(std::time::Duration::from_nanos(100_000_000)),
|
||||||
|
exit_status: Some(42),
|
||||||
|
|
||||||
|
session_id: None,
|
||||||
|
more_info: None
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_item_from_value_record_extra_field() {
|
||||||
|
let span = Span::unknown();
|
||||||
|
let rec = new_record(&[
|
||||||
|
("command_line", Value::string("foo", span)),
|
||||||
|
("id_nonexistent", Value::int(1, span)),
|
||||||
|
]);
|
||||||
|
assert!(item_from_value(rec).is_err());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_item_from_value_record_bad_type() {
|
||||||
|
let span = Span::unknown();
|
||||||
|
let rec = new_record(&[
|
||||||
|
("command_line", Value::string("foo", span)),
|
||||||
|
("id", Value::string("one".to_string(), span)),
|
||||||
|
]);
|
||||||
|
assert!(item_from_value(rec).is_err());
|
||||||
|
}
|
||||||
|
|
||||||
|
fn new_record(rec: &[(&'static str, Value)]) -> Value {
|
||||||
|
let span = Span::unknown();
|
||||||
|
let rec = Record::from_raw_cols_vals(
|
||||||
|
rec.iter().map(|(col, _)| col.to_string()).collect(),
|
||||||
|
rec.iter().map(|(_, val)| val.clone()).collect(),
|
||||||
|
span,
|
||||||
|
span,
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
Value::record(rec, span)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[rstest]
|
||||||
|
#[case::no_backup(&["history.dat"], "history.dat.bak")]
|
||||||
|
#[case::backup_exists(&["history.dat", "history.dat.bak"], "history.dat.bak.1")]
|
||||||
|
#[case::multiple_backups_exists( &["history.dat", "history.dat.bak", "history.dat.bak.1"], "history.dat.bak.2")]
|
||||||
|
fn test_find_backup_path(#[case] existing: &[&str], #[case] want: &str) {
|
||||||
|
let dir = tempfile::tempdir().unwrap();
|
||||||
|
for name in existing {
|
||||||
|
std::fs::File::create_new(dir.path().join(name)).unwrap();
|
||||||
|
}
|
||||||
|
let got = find_backup_path(&dir.path().join("history.dat")).unwrap();
|
||||||
|
assert_eq!(got, dir.path().join(want))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_backup() {
|
||||||
|
let dir = tempfile::tempdir().unwrap();
|
||||||
|
let mut history = std::fs::File::create_new(dir.path().join("history.dat")).unwrap();
|
||||||
|
use std::io::Write;
|
||||||
|
write!(&mut history, "123").unwrap();
|
||||||
|
let want_bak_path = dir.path().join("history.dat.bak");
|
||||||
|
assert_eq!(
|
||||||
|
backup(&dir.path().join("history.dat")),
|
||||||
|
Ok(Some(want_bak_path.clone()))
|
||||||
|
);
|
||||||
|
let got_data = String::from_utf8(std::fs::read(want_bak_path).unwrap()).unwrap();
|
||||||
|
assert_eq!(got_data, "123");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_backup_no_file() {
|
||||||
|
let dir = tempfile::tempdir().unwrap();
|
||||||
|
let bak_path = backup(&dir.path().join("history.dat")).unwrap();
|
||||||
|
assert!(bak_path.is_none());
|
||||||
|
}
|
||||||
|
}
|
@ -1,5 +1,8 @@
|
|||||||
|
mod fields;
|
||||||
mod history_;
|
mod history_;
|
||||||
|
mod history_import;
|
||||||
mod history_session;
|
mod history_session;
|
||||||
|
|
||||||
pub use history_::History;
|
pub use history_::History;
|
||||||
|
pub use history_import::HistoryImport;
|
||||||
pub use history_session::HistorySession;
|
pub use history_session::HistorySession;
|
||||||
|
@ -7,7 +7,7 @@ mod keybindings_list;
|
|||||||
mod keybindings_listen;
|
mod keybindings_listen;
|
||||||
|
|
||||||
pub use commandline::{Commandline, CommandlineEdit, CommandlineGetCursor, CommandlineSetCursor};
|
pub use commandline::{Commandline, CommandlineEdit, CommandlineGetCursor, CommandlineSetCursor};
|
||||||
pub use history::{History, HistorySession};
|
pub use history::{History, HistoryImport, HistorySession};
|
||||||
pub use keybindings::Keybindings;
|
pub use keybindings::Keybindings;
|
||||||
pub use keybindings_default::KeybindingsDefault;
|
pub use keybindings_default::KeybindingsDefault;
|
||||||
pub use keybindings_list::KeybindingsList;
|
pub use keybindings_list::KeybindingsList;
|
||||||
|
@ -67,7 +67,9 @@ impl Completer for OperatorCompletion {
|
|||||||
],
|
],
|
||||||
Expr::String(_) => vec![
|
Expr::String(_) => vec![
|
||||||
("=~", "Contains regex match"),
|
("=~", "Contains regex match"),
|
||||||
|
("like", "Contains regex match"),
|
||||||
("!~", "Does not contain regex match"),
|
("!~", "Does not contain regex match"),
|
||||||
|
("not-like", "Does not contain regex match"),
|
||||||
(
|
(
|
||||||
"++",
|
"++",
|
||||||
"Appends two lists, a list and a value, two strings, or two binary values",
|
"Appends two lists, a list and a value, two strings, or two binary values",
|
||||||
|
@ -711,6 +711,7 @@ pub(crate) fn create_keybindings(config: &Config) -> Result<KeybindingsMode, She
|
|||||||
}
|
}
|
||||||
for keybinding in parsed_keybindings {
|
for keybinding in parsed_keybindings {
|
||||||
add_keybinding(
|
add_keybinding(
|
||||||
|
&keybinding.name,
|
||||||
&keybinding.mode,
|
&keybinding.mode,
|
||||||
keybinding,
|
keybinding,
|
||||||
config,
|
config,
|
||||||
@ -729,7 +730,9 @@ pub(crate) fn create_keybindings(config: &Config) -> Result<KeybindingsMode, She
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[allow(clippy::only_used_in_recursion)]
|
||||||
fn add_keybinding(
|
fn add_keybinding(
|
||||||
|
name: &Option<Value>,
|
||||||
mode: &Value,
|
mode: &Value,
|
||||||
keybinding: &ParsedKeybinding,
|
keybinding: &ParsedKeybinding,
|
||||||
config: &Config,
|
config: &Config,
|
||||||
@ -752,6 +755,7 @@ fn add_keybinding(
|
|||||||
Value::List { vals, .. } => {
|
Value::List { vals, .. } => {
|
||||||
for inner_mode in vals {
|
for inner_mode in vals {
|
||||||
add_keybinding(
|
add_keybinding(
|
||||||
|
name,
|
||||||
inner_mode,
|
inner_mode,
|
||||||
keybinding,
|
keybinding,
|
||||||
config,
|
config,
|
||||||
@ -858,10 +862,10 @@ fn add_parsed_keybinding(
|
|||||||
c if c.starts_with('f') => c[1..]
|
c if c.starts_with('f') => c[1..]
|
||||||
.parse()
|
.parse()
|
||||||
.ok()
|
.ok()
|
||||||
.filter(|num| (1..=20).contains(num))
|
.filter(|num| (1..=35).contains(num))
|
||||||
.map(KeyCode::F)
|
.map(KeyCode::F)
|
||||||
.ok_or(ShellError::InvalidValue {
|
.ok_or(ShellError::InvalidValue {
|
||||||
valid: "'f1', 'f2', ..., or 'f20'".into(),
|
valid: "'f1', 'f2', ..., or 'f35'".into(),
|
||||||
actual: format!("'{keycode}'"),
|
actual: format!("'{keycode}'"),
|
||||||
span: keybinding.keycode.span(),
|
span: keybinding.keycode.span(),
|
||||||
})?,
|
})?,
|
||||||
|
@ -130,13 +130,8 @@ pub fn evaluate_repl(
|
|||||||
// escape a few things because this says so
|
// escape a few things because this says so
|
||||||
// https://code.visualstudio.com/docs/terminal/shell-integration#_vs-code-custom-sequences-osc-633-st
|
// https://code.visualstudio.com/docs/terminal/shell-integration#_vs-code-custom-sequences-osc-633-st
|
||||||
let cmd_text = line_editor.current_buffer_contents().to_string();
|
let cmd_text = line_editor.current_buffer_contents().to_string();
|
||||||
let len = cmd_text.len();
|
|
||||||
let mut cmd_text_chars = cmd_text[0..len].chars();
|
|
||||||
let mut replaced_cmd_text = String::with_capacity(len);
|
|
||||||
|
|
||||||
while let Some(c) = unescape_for_vscode(&mut cmd_text_chars) {
|
let replaced_cmd_text = escape_special_vscode_bytes(&cmd_text)?;
|
||||||
replaced_cmd_text.push(c);
|
|
||||||
}
|
|
||||||
|
|
||||||
run_shell_integration_osc633(
|
run_shell_integration_osc633(
|
||||||
engine_state,
|
engine_state,
|
||||||
@ -220,26 +215,41 @@ pub fn evaluate_repl(
|
|||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn unescape_for_vscode(text: &mut std::str::Chars) -> Option<char> {
|
fn escape_special_vscode_bytes(input: &str) -> Result<String, ShellError> {
|
||||||
match text.next() {
|
let bytes = input
|
||||||
Some('\\') => match text.next() {
|
.chars()
|
||||||
Some('0') => Some('\x00'), // NUL '\0' (null character)
|
.flat_map(|c| {
|
||||||
Some('a') => Some('\x07'), // BEL '\a' (bell)
|
let mut buf = [0; 4]; // Buffer to hold UTF-8 bytes of the character
|
||||||
Some('b') => Some('\x08'), // BS '\b' (backspace)
|
let c_bytes = c.encode_utf8(&mut buf); // Get UTF-8 bytes for the character
|
||||||
Some('t') => Some('\x09'), // HT '\t' (horizontal tab)
|
|
||||||
Some('n') => Some('\x0a'), // LF '\n' (new line)
|
if c_bytes.len() == 1 {
|
||||||
Some('v') => Some('\x0b'), // VT '\v' (vertical tab)
|
let byte = c_bytes.as_bytes()[0];
|
||||||
Some('f') => Some('\x0c'), // FF '\f' (form feed)
|
|
||||||
Some('r') => Some('\x0d'), // CR '\r' (carriage ret)
|
match byte {
|
||||||
Some(';') => Some('\x3b'), // semi-colon
|
// Escape bytes below 0x20
|
||||||
Some('\\') => Some('\x5c'), // backslash
|
b if b < 0x20 => format!("\\x{:02X}", byte).into_bytes(),
|
||||||
Some('e') => Some('\x1b'), // escape
|
// Escape semicolon as \x3B
|
||||||
Some(c) => Some(c),
|
b';' => "\\x3B".to_string().into_bytes(),
|
||||||
None => None,
|
// Escape backslash as \\
|
||||||
},
|
b'\\' => "\\\\".to_string().into_bytes(),
|
||||||
Some(c) => Some(c),
|
// Otherwise, return the character unchanged
|
||||||
None => None,
|
_ => vec![byte],
|
||||||
}
|
}
|
||||||
|
} else {
|
||||||
|
// pass through multi-byte characters unchanged
|
||||||
|
c_bytes.bytes().collect()
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
String::from_utf8(bytes).map_err(|err| ShellError::CantConvert {
|
||||||
|
to_type: "string".to_string(),
|
||||||
|
from_type: "bytes".to_string(),
|
||||||
|
span: Span::unknown(),
|
||||||
|
help: Some(format!(
|
||||||
|
"Error {err}, Unable to convert {input} to escaped bytes"
|
||||||
|
)),
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_line_editor(engine_state: &mut EngineState, use_color: bool) -> Result<Reedline> {
|
fn get_line_editor(engine_state: &mut EngineState, use_color: bool) -> Result<Reedline> {
|
||||||
@ -750,7 +760,7 @@ fn fill_in_result_related_history_metadata(
|
|||||||
c.duration = Some(cmd_duration);
|
c.duration = Some(cmd_duration);
|
||||||
c.exit_status = stack
|
c.exit_status = stack
|
||||||
.get_env_var(engine_state, "LAST_EXIT_CODE")
|
.get_env_var(engine_state, "LAST_EXIT_CODE")
|
||||||
.and_then(|e| e.as_i64().ok());
|
.and_then(|e| e.as_int().ok());
|
||||||
c
|
c
|
||||||
})
|
})
|
||||||
.into_diagnostic()?; // todo: don't stop repl if error here?
|
.into_diagnostic()?; // todo: don't stop repl if error here?
|
||||||
@ -1069,16 +1079,8 @@ fn run_shell_integration_osc633(
|
|||||||
|
|
||||||
// escape a few things because this says so
|
// escape a few things because this says so
|
||||||
// https://code.visualstudio.com/docs/terminal/shell-integration#_vs-code-custom-sequences-osc-633-st
|
// https://code.visualstudio.com/docs/terminal/shell-integration#_vs-code-custom-sequences-osc-633-st
|
||||||
|
let replaced_cmd_text =
|
||||||
let replaced_cmd_text: String = repl_cmd_line_text
|
escape_special_vscode_bytes(&repl_cmd_line_text).unwrap_or(repl_cmd_line_text);
|
||||||
.chars()
|
|
||||||
.map(|c| match c {
|
|
||||||
'\n' => '\x0a',
|
|
||||||
'\r' => '\x0d',
|
|
||||||
'\x1b' => '\x1b',
|
|
||||||
_ => c,
|
|
||||||
})
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
//OSC 633 ; E ; <commandline> [; <nonce] ST - Explicitly set the command line with an optional nonce.
|
//OSC 633 ; E ; <commandline> [; <nonce] ST - Explicitly set the command line with an optional nonce.
|
||||||
run_ansi_sequence(&format!(
|
run_ansi_sequence(&format!(
|
||||||
@ -1245,7 +1247,7 @@ fn get_command_finished_marker(
|
|||||||
) -> String {
|
) -> String {
|
||||||
let exit_code = stack
|
let exit_code = stack
|
||||||
.get_env_var(engine_state, "LAST_EXIT_CODE")
|
.get_env_var(engine_state, "LAST_EXIT_CODE")
|
||||||
.and_then(|e| e.as_i64().ok());
|
.and_then(|e| e.as_int().ok());
|
||||||
|
|
||||||
if shell_integration_osc633 {
|
if shell_integration_osc633 {
|
||||||
if stack
|
if stack
|
||||||
@ -1356,10 +1358,9 @@ fn run_finaliziation_ansi_sequence(
|
|||||||
|
|
||||||
// Absolute paths with a drive letter, like 'C:', 'D:\', 'E:\foo'
|
// Absolute paths with a drive letter, like 'C:', 'D:\', 'E:\foo'
|
||||||
#[cfg(windows)]
|
#[cfg(windows)]
|
||||||
static DRIVE_PATH_REGEX: once_cell::sync::Lazy<fancy_regex::Regex> =
|
static DRIVE_PATH_REGEX: std::sync::LazyLock<fancy_regex::Regex> = std::sync::LazyLock::new(|| {
|
||||||
once_cell::sync::Lazy::new(|| {
|
fancy_regex::Regex::new(r"^[a-zA-Z]:[/\\]?").expect("Internal error: regex creation")
|
||||||
fancy_regex::Regex::new(r"^[a-zA-Z]:[/\\]?").expect("Internal error: regex creation")
|
});
|
||||||
});
|
|
||||||
|
|
||||||
// A best-effort "does this string look kinda like a path?" function to determine whether to auto-cd
|
// A best-effort "does this string look kinda like a path?" function to determine whether to auto-cd
|
||||||
fn looks_like_path(orig: &str) -> bool {
|
fn looks_like_path(orig: &str) -> bool {
|
||||||
@ -1421,7 +1422,7 @@ fn are_session_ids_in_sync() {
|
|||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod test_auto_cd {
|
mod test_auto_cd {
|
||||||
use super::{do_auto_cd, parse_operation, ReplOperation};
|
use super::{do_auto_cd, escape_special_vscode_bytes, parse_operation, ReplOperation};
|
||||||
use nu_path::AbsolutePath;
|
use nu_path::AbsolutePath;
|
||||||
use nu_protocol::engine::{EngineState, Stack};
|
use nu_protocol::engine::{EngineState, Stack};
|
||||||
use tempfile::tempdir;
|
use tempfile::tempdir;
|
||||||
@ -1571,4 +1572,43 @@ mod test_auto_cd {
|
|||||||
let input = if cfg!(windows) { r"foo\" } else { "foo/" };
|
let input = if cfg!(windows) { r"foo\" } else { "foo/" };
|
||||||
check(tempdir, input, dir);
|
check(tempdir, input, dir);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn escape_vscode_semicolon_test() {
|
||||||
|
let input = r#"now;is"#;
|
||||||
|
let expected = r#"now\x3Bis"#;
|
||||||
|
let actual = escape_special_vscode_bytes(input).unwrap();
|
||||||
|
assert_eq!(expected, actual);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn escape_vscode_backslash_test() {
|
||||||
|
let input = r#"now\is"#;
|
||||||
|
let expected = r#"now\\is"#;
|
||||||
|
let actual = escape_special_vscode_bytes(input).unwrap();
|
||||||
|
assert_eq!(expected, actual);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn escape_vscode_linefeed_test() {
|
||||||
|
let input = "now\nis";
|
||||||
|
let expected = r#"now\x0Ais"#;
|
||||||
|
let actual = escape_special_vscode_bytes(input).unwrap();
|
||||||
|
assert_eq!(expected, actual);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn escape_vscode_tab_null_cr_test() {
|
||||||
|
let input = "now\t\0\ris";
|
||||||
|
let expected = r#"now\x09\x00\x0Dis"#;
|
||||||
|
let actual = escape_special_vscode_bytes(input).unwrap();
|
||||||
|
assert_eq!(expected, actual);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn escape_vscode_multibyte_ok() {
|
||||||
|
let input = "now🍪is";
|
||||||
|
let actual = escape_special_vscode_bytes(input).unwrap();
|
||||||
|
assert_eq!(input, actual);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
use nu_cmd_base::hook::eval_hook;
|
use nu_cmd_base::hook::eval_hook;
|
||||||
use nu_engine::{eval_block, eval_block_with_early_return};
|
use nu_engine::{eval_block, eval_block_with_early_return};
|
||||||
use nu_parser::{escape_quote_string, lex, parse, unescape_unquote_string, Token, TokenContents};
|
use nu_parser::{lex, parse, unescape_unquote_string, Token, TokenContents};
|
||||||
use nu_protocol::{
|
use nu_protocol::{
|
||||||
cli_error::report_compile_error,
|
cli_error::report_compile_error,
|
||||||
debugger::WithoutDebug,
|
debugger::WithoutDebug,
|
||||||
@ -10,7 +10,7 @@ use nu_protocol::{
|
|||||||
};
|
};
|
||||||
#[cfg(windows)]
|
#[cfg(windows)]
|
||||||
use nu_utils::enable_vt_processing;
|
use nu_utils::enable_vt_processing;
|
||||||
use nu_utils::perf;
|
use nu_utils::{escape_quote_string, perf};
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
|
|
||||||
// This will collect environment variables from std::env and adds them to a stack.
|
// This will collect environment variables from std::env and adds them to a stack.
|
||||||
@ -221,7 +221,7 @@ pub fn eval_source(
|
|||||||
report_shell_error(engine_state, &err);
|
report_shell_error(engine_state, &err);
|
||||||
let code = err.exit_code();
|
let code = err.exit_code();
|
||||||
stack.set_last_error(&err);
|
stack.set_last_error(&err);
|
||||||
code
|
code.unwrap_or(0)
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -282,8 +282,22 @@ fn evaluate_source(
|
|||||||
}?;
|
}?;
|
||||||
|
|
||||||
if let PipelineData::ByteStream(..) = pipeline {
|
if let PipelineData::ByteStream(..) = pipeline {
|
||||||
pipeline.print(engine_state, stack, false, false)
|
// run the display hook on bytestreams too
|
||||||
} else if let Some(hook) = engine_state.get_config().hooks.display_output.clone() {
|
run_display_hook(engine_state, stack, pipeline, false)
|
||||||
|
} else {
|
||||||
|
run_display_hook(engine_state, stack, pipeline, true)
|
||||||
|
}?;
|
||||||
|
|
||||||
|
Ok(false)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn run_display_hook(
|
||||||
|
engine_state: &mut EngineState,
|
||||||
|
stack: &mut Stack,
|
||||||
|
pipeline: PipelineData,
|
||||||
|
no_newline: bool,
|
||||||
|
) -> Result<(), ShellError> {
|
||||||
|
if let Some(hook) = engine_state.get_config().hooks.display_output.clone() {
|
||||||
let pipeline = eval_hook(
|
let pipeline = eval_hook(
|
||||||
engine_state,
|
engine_state,
|
||||||
stack,
|
stack,
|
||||||
@ -292,14 +306,11 @@ fn evaluate_source(
|
|||||||
&hook,
|
&hook,
|
||||||
"display_output",
|
"display_output",
|
||||||
)?;
|
)?;
|
||||||
pipeline.print(engine_state, stack, false, false)
|
pipeline.print(engine_state, stack, no_newline, false)
|
||||||
} else {
|
} else {
|
||||||
pipeline.print(engine_state, stack, true, false)
|
pipeline.print(engine_state, stack, no_newline, false)
|
||||||
}?;
|
}
|
||||||
|
|
||||||
Ok(false)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod test {
|
mod test {
|
||||||
use super::*;
|
use super::*;
|
||||||
|
296
crates/nu-cli/tests/commands/history_import.rs
Normal file
296
crates/nu-cli/tests/commands/history_import.rs
Normal file
@ -0,0 +1,296 @@
|
|||||||
|
use nu_protocol::HistoryFileFormat;
|
||||||
|
use nu_test_support::{nu, Outcome};
|
||||||
|
use reedline::{
|
||||||
|
FileBackedHistory, History, HistoryItem, HistoryItemId, ReedlineError, SearchQuery,
|
||||||
|
SqliteBackedHistory,
|
||||||
|
};
|
||||||
|
use rstest::rstest;
|
||||||
|
use tempfile::TempDir;
|
||||||
|
|
||||||
|
struct Test {
|
||||||
|
cfg_dir: TempDir,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Test {
|
||||||
|
fn new(history_format: &'static str) -> Self {
|
||||||
|
let cfg_dir = tempfile::Builder::new()
|
||||||
|
.prefix("history_import_test")
|
||||||
|
.tempdir()
|
||||||
|
.unwrap();
|
||||||
|
// Assigning to $env.config.history.file_format seems to work only in startup
|
||||||
|
// configuration.
|
||||||
|
std::fs::write(
|
||||||
|
cfg_dir.path().join("env.nu"),
|
||||||
|
format!("$env.config.history.file_format = {history_format:?}"),
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
Self { cfg_dir }
|
||||||
|
}
|
||||||
|
|
||||||
|
fn nu(&self, cmd: impl AsRef<str>) -> Outcome {
|
||||||
|
let env = [(
|
||||||
|
"XDG_CONFIG_HOME".to_string(),
|
||||||
|
self.cfg_dir.path().to_str().unwrap().to_string(),
|
||||||
|
)];
|
||||||
|
let env_config = self.cfg_dir.path().join("env.nu");
|
||||||
|
nu!(envs: env, env_config: env_config, cmd.as_ref())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn open_plaintext(&self) -> Result<FileBackedHistory, ReedlineError> {
|
||||||
|
FileBackedHistory::with_file(
|
||||||
|
100,
|
||||||
|
self.cfg_dir
|
||||||
|
.path()
|
||||||
|
.join("nushell")
|
||||||
|
.join(HistoryFileFormat::Plaintext.default_file_name()),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn open_sqlite(&self) -> Result<SqliteBackedHistory, ReedlineError> {
|
||||||
|
SqliteBackedHistory::with_file(
|
||||||
|
self.cfg_dir
|
||||||
|
.path()
|
||||||
|
.join("nushell")
|
||||||
|
.join(HistoryFileFormat::Sqlite.default_file_name()),
|
||||||
|
None,
|
||||||
|
None,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn open_backend(&self, format: HistoryFileFormat) -> Result<Box<dyn History>, ReedlineError> {
|
||||||
|
fn boxed(be: impl History + 'static) -> Box<dyn History> {
|
||||||
|
Box::new(be)
|
||||||
|
}
|
||||||
|
use HistoryFileFormat::*;
|
||||||
|
match format {
|
||||||
|
Plaintext => self.open_plaintext().map(boxed),
|
||||||
|
Sqlite => self.open_sqlite().map(boxed),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
enum HistorySource {
|
||||||
|
Vec(Vec<HistoryItem>),
|
||||||
|
Command(&'static str),
|
||||||
|
}
|
||||||
|
|
||||||
|
struct TestCase {
|
||||||
|
dst_format: HistoryFileFormat,
|
||||||
|
dst_history: Vec<HistoryItem>,
|
||||||
|
src_history: HistorySource,
|
||||||
|
want_history: Vec<HistoryItem>,
|
||||||
|
}
|
||||||
|
|
||||||
|
const EMPTY_TEST_CASE: TestCase = TestCase {
|
||||||
|
dst_format: HistoryFileFormat::Plaintext,
|
||||||
|
dst_history: Vec::new(),
|
||||||
|
src_history: HistorySource::Vec(Vec::new()),
|
||||||
|
want_history: Vec::new(),
|
||||||
|
};
|
||||||
|
|
||||||
|
impl TestCase {
|
||||||
|
fn run(self) {
|
||||||
|
use HistoryFileFormat::*;
|
||||||
|
let test = Test::new(match self.dst_format {
|
||||||
|
Plaintext => "plaintext",
|
||||||
|
Sqlite => "sqlite",
|
||||||
|
});
|
||||||
|
save_all(
|
||||||
|
&mut *test.open_backend(self.dst_format).unwrap(),
|
||||||
|
self.dst_history,
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
let outcome = match self.src_history {
|
||||||
|
HistorySource::Vec(src_history) => {
|
||||||
|
let src_format = match self.dst_format {
|
||||||
|
Plaintext => Sqlite,
|
||||||
|
Sqlite => Plaintext,
|
||||||
|
};
|
||||||
|
save_all(&mut *test.open_backend(src_format).unwrap(), src_history).unwrap();
|
||||||
|
test.nu("history import")
|
||||||
|
}
|
||||||
|
HistorySource::Command(cmd) => {
|
||||||
|
let mut cmd = cmd.to_string();
|
||||||
|
cmd.push_str(" | history import");
|
||||||
|
test.nu(cmd)
|
||||||
|
}
|
||||||
|
};
|
||||||
|
assert!(outcome.status.success());
|
||||||
|
let got = query_all(&*test.open_backend(self.dst_format).unwrap()).unwrap();
|
||||||
|
|
||||||
|
// Compare just the commands first, for readability.
|
||||||
|
fn commands_only(items: &[HistoryItem]) -> Vec<&str> {
|
||||||
|
items
|
||||||
|
.iter()
|
||||||
|
.map(|item| item.command_line.as_str())
|
||||||
|
.collect()
|
||||||
|
}
|
||||||
|
assert_eq!(commands_only(&got), commands_only(&self.want_history));
|
||||||
|
// If commands match, compare full items.
|
||||||
|
assert_eq!(got, self.want_history);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn query_all(history: &dyn History) -> Result<Vec<HistoryItem>, ReedlineError> {
|
||||||
|
history.search(SearchQuery::everything(
|
||||||
|
reedline::SearchDirection::Forward,
|
||||||
|
None,
|
||||||
|
))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn save_all(history: &mut dyn History, items: Vec<HistoryItem>) -> Result<(), ReedlineError> {
|
||||||
|
for item in items {
|
||||||
|
history.save(item)?;
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
const EMPTY_ITEM: HistoryItem = HistoryItem {
|
||||||
|
command_line: String::new(),
|
||||||
|
id: None,
|
||||||
|
start_timestamp: None,
|
||||||
|
session_id: None,
|
||||||
|
hostname: None,
|
||||||
|
cwd: None,
|
||||||
|
duration: None,
|
||||||
|
exit_status: None,
|
||||||
|
more_info: None,
|
||||||
|
};
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn history_import_pipe_string() {
|
||||||
|
TestCase {
|
||||||
|
dst_format: HistoryFileFormat::Plaintext,
|
||||||
|
src_history: HistorySource::Command("echo bar"),
|
||||||
|
want_history: vec![HistoryItem {
|
||||||
|
id: Some(HistoryItemId::new(0)),
|
||||||
|
command_line: "bar".to_string(),
|
||||||
|
..EMPTY_ITEM
|
||||||
|
}],
|
||||||
|
..EMPTY_TEST_CASE
|
||||||
|
}
|
||||||
|
.run();
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn history_import_pipe_record() {
|
||||||
|
TestCase {
|
||||||
|
dst_format: HistoryFileFormat::Sqlite,
|
||||||
|
src_history: HistorySource::Command("[[cwd command]; [/tmp some_command]]"),
|
||||||
|
want_history: vec![HistoryItem {
|
||||||
|
id: Some(HistoryItemId::new(1)),
|
||||||
|
command_line: "some_command".to_string(),
|
||||||
|
cwd: Some("/tmp".to_string()),
|
||||||
|
..EMPTY_ITEM
|
||||||
|
}],
|
||||||
|
..EMPTY_TEST_CASE
|
||||||
|
}
|
||||||
|
.run();
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn to_empty_plaintext() {
|
||||||
|
TestCase {
|
||||||
|
dst_format: HistoryFileFormat::Plaintext,
|
||||||
|
src_history: HistorySource::Vec(vec![
|
||||||
|
HistoryItem {
|
||||||
|
command_line: "foo".to_string(),
|
||||||
|
..EMPTY_ITEM
|
||||||
|
},
|
||||||
|
HistoryItem {
|
||||||
|
command_line: "bar".to_string(),
|
||||||
|
..EMPTY_ITEM
|
||||||
|
},
|
||||||
|
]),
|
||||||
|
want_history: vec![
|
||||||
|
HistoryItem {
|
||||||
|
id: Some(HistoryItemId::new(0)),
|
||||||
|
command_line: "foo".to_string(),
|
||||||
|
..EMPTY_ITEM
|
||||||
|
},
|
||||||
|
HistoryItem {
|
||||||
|
id: Some(HistoryItemId::new(1)),
|
||||||
|
command_line: "bar".to_string(),
|
||||||
|
..EMPTY_ITEM
|
||||||
|
},
|
||||||
|
],
|
||||||
|
..EMPTY_TEST_CASE
|
||||||
|
}
|
||||||
|
.run()
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn to_empty_sqlite() {
|
||||||
|
TestCase {
|
||||||
|
dst_format: HistoryFileFormat::Sqlite,
|
||||||
|
src_history: HistorySource::Vec(vec![
|
||||||
|
HistoryItem {
|
||||||
|
command_line: "foo".to_string(),
|
||||||
|
..EMPTY_ITEM
|
||||||
|
},
|
||||||
|
HistoryItem {
|
||||||
|
command_line: "bar".to_string(),
|
||||||
|
..EMPTY_ITEM
|
||||||
|
},
|
||||||
|
]),
|
||||||
|
want_history: vec![
|
||||||
|
HistoryItem {
|
||||||
|
id: Some(HistoryItemId::new(1)),
|
||||||
|
command_line: "foo".to_string(),
|
||||||
|
..EMPTY_ITEM
|
||||||
|
},
|
||||||
|
HistoryItem {
|
||||||
|
id: Some(HistoryItemId::new(2)),
|
||||||
|
command_line: "bar".to_string(),
|
||||||
|
..EMPTY_ITEM
|
||||||
|
},
|
||||||
|
],
|
||||||
|
..EMPTY_TEST_CASE
|
||||||
|
}
|
||||||
|
.run()
|
||||||
|
}
|
||||||
|
|
||||||
|
#[rstest]
|
||||||
|
#[case::plaintext(HistoryFileFormat::Plaintext)]
|
||||||
|
#[case::sqlite(HistoryFileFormat::Sqlite)]
|
||||||
|
fn to_existing(#[case] dst_format: HistoryFileFormat) {
|
||||||
|
TestCase {
|
||||||
|
dst_format,
|
||||||
|
dst_history: vec![
|
||||||
|
HistoryItem {
|
||||||
|
id: Some(HistoryItemId::new(0)),
|
||||||
|
command_line: "original-1".to_string(),
|
||||||
|
..EMPTY_ITEM
|
||||||
|
},
|
||||||
|
HistoryItem {
|
||||||
|
id: Some(HistoryItemId::new(1)),
|
||||||
|
command_line: "original-2".to_string(),
|
||||||
|
..EMPTY_ITEM
|
||||||
|
},
|
||||||
|
],
|
||||||
|
src_history: HistorySource::Vec(vec![HistoryItem {
|
||||||
|
id: Some(HistoryItemId::new(1)),
|
||||||
|
command_line: "new".to_string(),
|
||||||
|
..EMPTY_ITEM
|
||||||
|
}]),
|
||||||
|
want_history: vec![
|
||||||
|
HistoryItem {
|
||||||
|
id: Some(HistoryItemId::new(0)),
|
||||||
|
command_line: "original-1".to_string(),
|
||||||
|
..EMPTY_ITEM
|
||||||
|
},
|
||||||
|
HistoryItem {
|
||||||
|
id: Some(HistoryItemId::new(1)),
|
||||||
|
command_line: "original-2".to_string(),
|
||||||
|
..EMPTY_ITEM
|
||||||
|
},
|
||||||
|
HistoryItem {
|
||||||
|
id: Some(HistoryItemId::new(2)),
|
||||||
|
command_line: "new".to_string(),
|
||||||
|
..EMPTY_ITEM
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}
|
||||||
|
.run()
|
||||||
|
}
|
@ -1,2 +1,3 @@
|
|||||||
|
mod history_import;
|
||||||
mod keybindings_list;
|
mod keybindings_list;
|
||||||
mod nu_highlight;
|
mod nu_highlight;
|
||||||
|
@ -5,7 +5,7 @@ edition = "2021"
|
|||||||
license = "MIT"
|
license = "MIT"
|
||||||
name = "nu-cmd-base"
|
name = "nu-cmd-base"
|
||||||
repository = "https://github.com/nushell/nushell/tree/main/crates/nu-cmd-base"
|
repository = "https://github.com/nushell/nushell/tree/main/crates/nu-cmd-base"
|
||||||
version = "0.99.0"
|
version = "0.100.0"
|
||||||
|
|
||||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
|
|
||||||
@ -13,10 +13,10 @@ version = "0.99.0"
|
|||||||
workspace = true
|
workspace = true
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
nu-engine = { path = "../nu-engine", version = "0.99.0" }
|
nu-engine = { path = "../nu-engine", version = "0.100.0" }
|
||||||
nu-parser = { path = "../nu-parser", version = "0.99.0" }
|
nu-parser = { path = "../nu-parser", version = "0.100.0" }
|
||||||
nu-path = { path = "../nu-path", version = "0.99.0" }
|
nu-path = { path = "../nu-path", version = "0.100.0" }
|
||||||
nu-protocol = { path = "../nu-protocol", version = "0.99.0" }
|
nu-protocol = { path = "../nu-protocol", version = "0.100.0" }
|
||||||
|
|
||||||
indexmap = { workspace = true }
|
indexmap = { workspace = true }
|
||||||
miette = { workspace = true }
|
miette = { workspace = true }
|
||||||
|
@ -78,10 +78,10 @@ pub fn get_editor(
|
|||||||
get_editor_commandline(&config.buffer_editor, "$env.config.buffer_editor")
|
get_editor_commandline(&config.buffer_editor, "$env.config.buffer_editor")
|
||||||
{
|
{
|
||||||
Ok(buff_editor)
|
Ok(buff_editor)
|
||||||
} else if let Some(value) = env_vars.get("EDITOR") {
|
|
||||||
get_editor_commandline(value, "$env.EDITOR")
|
|
||||||
} else if let Some(value) = env_vars.get("VISUAL") {
|
} else if let Some(value) = env_vars.get("VISUAL") {
|
||||||
get_editor_commandline(value, "$env.VISUAL")
|
get_editor_commandline(value, "$env.VISUAL")
|
||||||
|
} else if let Some(value) = env_vars.get("EDITOR") {
|
||||||
|
get_editor_commandline(value, "$env.EDITOR")
|
||||||
} else {
|
} else {
|
||||||
Err(ShellError::GenericError {
|
Err(ShellError::GenericError {
|
||||||
error: "No editor configured".into(),
|
error: "No editor configured".into(),
|
||||||
|
@ -5,7 +5,7 @@ edition = "2021"
|
|||||||
license = "MIT"
|
license = "MIT"
|
||||||
name = "nu-cmd-extra"
|
name = "nu-cmd-extra"
|
||||||
repository = "https://github.com/nushell/nushell/tree/main/crates/nu-cmd-extra"
|
repository = "https://github.com/nushell/nushell/tree/main/crates/nu-cmd-extra"
|
||||||
version = "0.99.0"
|
version = "0.100.0"
|
||||||
|
|
||||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
|
|
||||||
@ -16,13 +16,13 @@ bench = false
|
|||||||
workspace = true
|
workspace = true
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
nu-cmd-base = { path = "../nu-cmd-base", version = "0.99.0" }
|
nu-cmd-base = { path = "../nu-cmd-base", version = "0.100.0" }
|
||||||
nu-engine = { path = "../nu-engine", version = "0.99.0" }
|
nu-engine = { path = "../nu-engine", version = "0.100.0" }
|
||||||
nu-json = { version = "0.99.0", path = "../nu-json" }
|
nu-json = { version = "0.100.0", path = "../nu-json" }
|
||||||
nu-parser = { path = "../nu-parser", version = "0.99.0" }
|
nu-parser = { path = "../nu-parser", version = "0.100.0" }
|
||||||
nu-pretty-hex = { version = "0.99.0", path = "../nu-pretty-hex" }
|
nu-pretty-hex = { version = "0.100.0", path = "../nu-pretty-hex" }
|
||||||
nu-protocol = { path = "../nu-protocol", version = "0.99.0" }
|
nu-protocol = { path = "../nu-protocol", version = "0.100.0" }
|
||||||
nu-utils = { path = "../nu-utils", version = "0.99.0" }
|
nu-utils = { path = "../nu-utils", version = "0.100.0" }
|
||||||
|
|
||||||
# Potential dependencies for extras
|
# Potential dependencies for extras
|
||||||
heck = { workspace = true }
|
heck = { workspace = true }
|
||||||
@ -36,6 +36,6 @@ v_htmlescape = { workspace = true }
|
|||||||
itertools = { workspace = true }
|
itertools = { workspace = true }
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
nu-cmd-lang = { path = "../nu-cmd-lang", version = "0.99.0" }
|
nu-cmd-lang = { path = "../nu-cmd-lang", version = "0.100.0" }
|
||||||
nu-command = { path = "../nu-command", version = "0.99.0" }
|
nu-command = { path = "../nu-command", version = "0.100.0" }
|
||||||
nu-test-support = { path = "../nu-test-support", version = "0.99.0" }
|
nu-test-support = { path = "../nu-test-support", version = "0.100.0" }
|
@ -6,7 +6,7 @@ repository = "https://github.com/nushell/nushell/tree/main/crates/nu-cmd-lang"
|
|||||||
edition = "2021"
|
edition = "2021"
|
||||||
license = "MIT"
|
license = "MIT"
|
||||||
name = "nu-cmd-lang"
|
name = "nu-cmd-lang"
|
||||||
version = "0.99.0"
|
version = "0.100.0"
|
||||||
|
|
||||||
[lib]
|
[lib]
|
||||||
bench = false
|
bench = false
|
||||||
@ -15,10 +15,10 @@ bench = false
|
|||||||
workspace = true
|
workspace = true
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
nu-engine = { path = "../nu-engine", version = "0.99.0" }
|
nu-engine = { path = "../nu-engine", version = "0.100.0" }
|
||||||
nu-parser = { path = "../nu-parser", version = "0.99.0" }
|
nu-parser = { path = "../nu-parser", version = "0.100.0" }
|
||||||
nu-protocol = { path = "../nu-protocol", version = "0.99.0" }
|
nu-protocol = { path = "../nu-protocol", version = "0.100.0" }
|
||||||
nu-utils = { path = "../nu-utils", version = "0.99.0" }
|
nu-utils = { path = "../nu-utils", version = "0.100.0" }
|
||||||
|
|
||||||
itertools = { workspace = true }
|
itertools = { workspace = true }
|
||||||
shadow-rs = { version = "0.35", default-features = false }
|
shadow-rs = { version = "0.35", default-features = false }
|
||||||
|
@ -107,7 +107,11 @@ fn run_catch(
|
|||||||
|
|
||||||
if let Some(catch) = catch {
|
if let Some(catch) = catch {
|
||||||
stack.set_last_error(&error);
|
stack.set_last_error(&error);
|
||||||
let error = error.into_value(span);
|
let fancy_errors = match engine_state.get_config().error_style {
|
||||||
|
nu_protocol::ErrorStyle::Fancy => true,
|
||||||
|
nu_protocol::ErrorStyle::Plain => false,
|
||||||
|
};
|
||||||
|
let error = error.into_value(span, fancy_errors);
|
||||||
let block = engine_state.get_block(catch.block_id);
|
let block = engine_state.get_block(catch.block_id);
|
||||||
// Put the error value in the positional closure var
|
// Put the error value in the positional closure var
|
||||||
if let Some(var) = block.signature.get_positional(0) {
|
if let Some(var) = block.signature.get_positional(0) {
|
||||||
|
@ -98,15 +98,21 @@ This command is a parser keyword. For details, check:
|
|||||||
engine_state.get_span_contents(import_pattern.head.span),
|
engine_state.get_span_contents(import_pattern.head.span),
|
||||||
);
|
);
|
||||||
|
|
||||||
let maybe_file_path = find_in_dirs_env(
|
let maybe_file_path_or_dir = find_in_dirs_env(
|
||||||
&module_arg_str,
|
&module_arg_str,
|
||||||
engine_state,
|
engine_state,
|
||||||
caller_stack,
|
caller_stack,
|
||||||
get_dirs_var_from_call(caller_stack, call),
|
get_dirs_var_from_call(caller_stack, call),
|
||||||
)?;
|
)?;
|
||||||
let maybe_parent = maybe_file_path
|
// module_arg_str maybe a directory, in this case
|
||||||
.as_ref()
|
// find_in_dirs_env returns a directory.
|
||||||
.and_then(|path| path.parent().map(|p| p.to_path_buf()));
|
let maybe_parent = maybe_file_path_or_dir.as_ref().and_then(|path| {
|
||||||
|
if path.is_dir() {
|
||||||
|
Some(path.to_path_buf())
|
||||||
|
} else {
|
||||||
|
path.parent().map(|p| p.to_path_buf())
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
let mut callee_stack = caller_stack
|
let mut callee_stack = caller_stack
|
||||||
.gather_captures(engine_state, &block.captures)
|
.gather_captures(engine_state, &block.captures)
|
||||||
@ -118,9 +124,15 @@ This command is a parser keyword. For details, check:
|
|||||||
callee_stack.add_env_var("FILE_PWD".to_string(), file_pwd);
|
callee_stack.add_env_var("FILE_PWD".to_string(), file_pwd);
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(file_path) = maybe_file_path {
|
if let Some(path) = maybe_file_path_or_dir {
|
||||||
let file_path = Value::string(file_path.to_string_lossy(), call.head);
|
let module_file_path = if path.is_dir() {
|
||||||
callee_stack.add_env_var("CURRENT_FILE".to_string(), file_path);
|
// the existence of `mod.nu` is verified in parsing time
|
||||||
|
// so it's safe to use it here.
|
||||||
|
Value::string(path.join("mod.nu").to_string_lossy(), call.head)
|
||||||
|
} else {
|
||||||
|
Value::string(path.to_string_lossy(), call.head)
|
||||||
|
};
|
||||||
|
callee_stack.add_env_var("CURRENT_FILE".to_string(), module_file_path);
|
||||||
}
|
}
|
||||||
|
|
||||||
let eval_block = get_eval_block(engine_state);
|
let eval_block = get_eval_block(engine_state);
|
||||||
|
@ -5,7 +5,7 @@ edition = "2021"
|
|||||||
license = "MIT"
|
license = "MIT"
|
||||||
name = "nu-cmd-plugin"
|
name = "nu-cmd-plugin"
|
||||||
repository = "https://github.com/nushell/nushell/tree/main/crates/nu-cmd-plugin"
|
repository = "https://github.com/nushell/nushell/tree/main/crates/nu-cmd-plugin"
|
||||||
version = "0.99.0"
|
version = "0.100.0"
|
||||||
|
|
||||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
|
|
||||||
@ -13,10 +13,10 @@ version = "0.99.0"
|
|||||||
workspace = true
|
workspace = true
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
nu-engine = { path = "../nu-engine", version = "0.99.0" }
|
nu-engine = { path = "../nu-engine", version = "0.100.0" }
|
||||||
nu-path = { path = "../nu-path", version = "0.99.0" }
|
nu-path = { path = "../nu-path", version = "0.100.0" }
|
||||||
nu-protocol = { path = "../nu-protocol", version = "0.99.0", features = ["plugin"] }
|
nu-protocol = { path = "../nu-protocol", version = "0.100.0", features = ["plugin"] }
|
||||||
nu-plugin-engine = { path = "../nu-plugin-engine", version = "0.99.0" }
|
nu-plugin-engine = { path = "../nu-plugin-engine", version = "0.100.0" }
|
||||||
|
|
||||||
itertools = { workspace = true }
|
itertools = { workspace = true }
|
||||||
|
|
||||||
|
@ -119,7 +119,7 @@ apparent the next time `nu` is next launched with that plugin registry file.
|
|||||||
let metadata = interface.get_metadata()?;
|
let metadata = interface.get_metadata()?;
|
||||||
let commands = interface.get_signature()?;
|
let commands = interface.get_signature()?;
|
||||||
|
|
||||||
modify_plugin_file(engine_state, stack, call.head, custom_path, |contents| {
|
modify_plugin_file(engine_state, stack, call.head, &custom_path, |contents| {
|
||||||
// Update the file with the received metadata and signatures
|
// Update the file with the received metadata and signatures
|
||||||
let item = PluginRegistryItem::new(plugin.identity(), metadata, commands);
|
let item = PluginRegistryItem::new(plugin.identity(), metadata, commands);
|
||||||
contents.upsert_plugin(item);
|
contents.upsert_plugin(item);
|
||||||
|
@ -1,5 +1,8 @@
|
|||||||
use itertools::Itertools;
|
use itertools::{EitherOrBoth, Itertools};
|
||||||
use nu_engine::command_prelude::*;
|
use nu_engine::command_prelude::*;
|
||||||
|
use nu_protocol::{IntoValue, PluginRegistryItemData};
|
||||||
|
|
||||||
|
use crate::util::read_plugin_file;
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct PluginList;
|
pub struct PluginList;
|
||||||
@ -17,7 +20,7 @@ impl Command for PluginList {
|
|||||||
[
|
[
|
||||||
("name".into(), Type::String),
|
("name".into(), Type::String),
|
||||||
("version".into(), Type::String),
|
("version".into(), Type::String),
|
||||||
("is_running".into(), Type::Bool),
|
("status".into(), Type::String),
|
||||||
("pid".into(), Type::Int),
|
("pid".into(), Type::Int),
|
||||||
("filename".into(), Type::String),
|
("filename".into(), Type::String),
|
||||||
("shell".into(), Type::String),
|
("shell".into(), Type::String),
|
||||||
@ -26,11 +29,54 @@ impl Command for PluginList {
|
|||||||
.into(),
|
.into(),
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
|
.named(
|
||||||
|
"plugin-config",
|
||||||
|
SyntaxShape::Filepath,
|
||||||
|
"Use a plugin registry file other than the one set in `$nu.plugin-path`",
|
||||||
|
None,
|
||||||
|
)
|
||||||
|
.switch(
|
||||||
|
"engine",
|
||||||
|
"Show info for plugins that are loaded into the engine only.",
|
||||||
|
Some('e'),
|
||||||
|
)
|
||||||
|
.switch(
|
||||||
|
"registry",
|
||||||
|
"Show info for plugins from the registry file only.",
|
||||||
|
Some('r'),
|
||||||
|
)
|
||||||
.category(Category::Plugin)
|
.category(Category::Plugin)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn description(&self) -> &str {
|
fn description(&self) -> &str {
|
||||||
"List installed plugins."
|
"List loaded and installed plugins."
|
||||||
|
}
|
||||||
|
|
||||||
|
fn extra_description(&self) -> &str {
|
||||||
|
r#"
|
||||||
|
The `status` column will contain one of the following values:
|
||||||
|
|
||||||
|
- `added`: The plugin is present in the plugin registry file, but not in
|
||||||
|
the engine.
|
||||||
|
- `loaded`: The plugin is present both in the plugin registry file and in
|
||||||
|
the engine, but is not running.
|
||||||
|
- `running`: The plugin is currently running, and the `pid` column should
|
||||||
|
contain its process ID.
|
||||||
|
- `modified`: The plugin state present in the plugin registry file is different
|
||||||
|
from the state in the engine.
|
||||||
|
- `removed`: The plugin is still loaded in the engine, but is not present in
|
||||||
|
the plugin registry file.
|
||||||
|
- `invalid`: The data in the plugin registry file couldn't be deserialized,
|
||||||
|
and the plugin most likely needs to be added again.
|
||||||
|
|
||||||
|
`running` takes priority over any other status. Unless `--registry` is used
|
||||||
|
or the plugin has not been loaded yet, the values of `version`, `filename`,
|
||||||
|
`shell`, and `commands` reflect the values in the engine and not the ones in
|
||||||
|
the plugin registry file.
|
||||||
|
|
||||||
|
See also: `plugin use`
|
||||||
|
"#
|
||||||
|
.trim()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn search_terms(&self) -> Vec<&str> {
|
fn search_terms(&self) -> Vec<&str> {
|
||||||
@ -45,7 +91,7 @@ impl Command for PluginList {
|
|||||||
result: Some(Value::test_list(vec![Value::test_record(record! {
|
result: Some(Value::test_list(vec![Value::test_record(record! {
|
||||||
"name" => Value::test_string("inc"),
|
"name" => Value::test_string("inc"),
|
||||||
"version" => Value::test_string(env!("CARGO_PKG_VERSION")),
|
"version" => Value::test_string(env!("CARGO_PKG_VERSION")),
|
||||||
"is_running" => Value::test_bool(true),
|
"status" => Value::test_string("running"),
|
||||||
"pid" => Value::test_int(106480),
|
"pid" => Value::test_int(106480),
|
||||||
"filename" => if cfg!(windows) {
|
"filename" => if cfg!(windows) {
|
||||||
Value::test_string(r"C:\nu\plugins\nu_plugin_inc.exe")
|
Value::test_string(r"C:\nu\plugins\nu_plugin_inc.exe")
|
||||||
@ -67,58 +113,189 @@ impl Command for PluginList {
|
|||||||
fn run(
|
fn run(
|
||||||
&self,
|
&self,
|
||||||
engine_state: &EngineState,
|
engine_state: &EngineState,
|
||||||
_stack: &mut Stack,
|
stack: &mut Stack,
|
||||||
call: &Call,
|
call: &Call,
|
||||||
_input: PipelineData,
|
_input: PipelineData,
|
||||||
) -> Result<PipelineData, ShellError> {
|
) -> Result<PipelineData, ShellError> {
|
||||||
let head = call.head;
|
let custom_path = call.get_flag(engine_state, stack, "plugin-config")?;
|
||||||
|
let engine_mode = call.has_flag(engine_state, stack, "engine")?;
|
||||||
|
let registry_mode = call.has_flag(engine_state, stack, "registry")?;
|
||||||
|
|
||||||
// Group plugin decls by plugin identity
|
let plugins_info = match (engine_mode, registry_mode) {
|
||||||
let decls = engine_state.plugin_decls().into_group_map_by(|decl| {
|
// --engine and --registry together is equivalent to the default.
|
||||||
decl.plugin_identity()
|
(false, false) | (true, true) => {
|
||||||
.expect("plugin decl should have identity")
|
if engine_state.plugin_path.is_some() || custom_path.is_some() {
|
||||||
});
|
let plugins_in_engine = get_plugins_in_engine(engine_state);
|
||||||
|
let plugins_in_registry =
|
||||||
|
get_plugins_in_registry(engine_state, stack, call.head, &custom_path)?;
|
||||||
|
merge_plugin_info(plugins_in_engine, plugins_in_registry)
|
||||||
|
} else {
|
||||||
|
// Don't produce error when running nu --no-config-file
|
||||||
|
get_plugins_in_engine(engine_state)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
(true, false) => get_plugins_in_engine(engine_state),
|
||||||
|
(false, true) => get_plugins_in_registry(engine_state, stack, call.head, &custom_path)?,
|
||||||
|
};
|
||||||
|
|
||||||
// Build plugins list
|
Ok(plugins_info.into_value(call.head).into_pipeline_data())
|
||||||
let list = engine_state.plugins().iter().map(|plugin| {
|
}
|
||||||
// Find commands that belong to the plugin
|
}
|
||||||
let commands = decls.get(plugin.identity())
|
|
||||||
.into_iter()
|
#[derive(Debug, Clone, IntoValue, PartialOrd, Ord, PartialEq, Eq)]
|
||||||
.flat_map(|decls| {
|
struct PluginInfo {
|
||||||
decls.iter().map(|decl| Value::string(decl.name(), head))
|
name: String,
|
||||||
})
|
version: Option<String>,
|
||||||
.collect();
|
status: PluginStatus,
|
||||||
|
pid: Option<u32>,
|
||||||
let pid = plugin
|
filename: String,
|
||||||
.pid()
|
shell: Option<String>,
|
||||||
.map(|p| Value::int(p as i64, head))
|
commands: Vec<String>,
|
||||||
.unwrap_or(Value::nothing(head));
|
}
|
||||||
|
|
||||||
let shell = plugin
|
#[derive(Debug, Clone, Copy, IntoValue, PartialOrd, Ord, PartialEq, Eq)]
|
||||||
.identity()
|
#[nu_value(rename_all = "snake_case")]
|
||||||
.shell()
|
enum PluginStatus {
|
||||||
.map(|s| Value::string(s.to_string_lossy(), head))
|
Added,
|
||||||
.unwrap_or(Value::nothing(head));
|
Loaded,
|
||||||
|
Running,
|
||||||
let metadata = plugin.metadata();
|
Modified,
|
||||||
let version = metadata
|
Removed,
|
||||||
.and_then(|m| m.version)
|
Invalid,
|
||||||
.map(|s| Value::string(s, head))
|
}
|
||||||
.unwrap_or(Value::nothing(head));
|
|
||||||
|
fn get_plugins_in_engine(engine_state: &EngineState) -> Vec<PluginInfo> {
|
||||||
let record = record! {
|
// Group plugin decls by plugin identity
|
||||||
"name" => Value::string(plugin.identity().name(), head),
|
let decls = engine_state.plugin_decls().into_group_map_by(|decl| {
|
||||||
"version" => version,
|
decl.plugin_identity()
|
||||||
"is_running" => Value::bool(plugin.is_running(), head),
|
.expect("plugin decl should have identity")
|
||||||
"pid" => pid,
|
});
|
||||||
"filename" => Value::string(plugin.identity().filename().to_string_lossy(), head),
|
|
||||||
"shell" => shell,
|
// Build plugins list
|
||||||
"commands" => Value::list(commands, head),
|
engine_state
|
||||||
};
|
.plugins()
|
||||||
|
.iter()
|
||||||
Value::record(record, head)
|
.map(|plugin| {
|
||||||
}).collect();
|
// Find commands that belong to the plugin
|
||||||
|
let commands = decls
|
||||||
Ok(Value::list(list, head).into_pipeline_data())
|
.get(plugin.identity())
|
||||||
|
.into_iter()
|
||||||
|
.flat_map(|decls| decls.iter().map(|decl| decl.name().to_owned()))
|
||||||
|
.sorted()
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
PluginInfo {
|
||||||
|
name: plugin.identity().name().into(),
|
||||||
|
version: plugin.metadata().and_then(|m| m.version),
|
||||||
|
status: if plugin.pid().is_some() {
|
||||||
|
PluginStatus::Running
|
||||||
|
} else {
|
||||||
|
PluginStatus::Loaded
|
||||||
|
},
|
||||||
|
pid: plugin.pid(),
|
||||||
|
filename: plugin.identity().filename().to_string_lossy().into_owned(),
|
||||||
|
shell: plugin
|
||||||
|
.identity()
|
||||||
|
.shell()
|
||||||
|
.map(|path| path.to_string_lossy().into_owned()),
|
||||||
|
commands,
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.sorted()
|
||||||
|
.collect()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_plugins_in_registry(
|
||||||
|
engine_state: &EngineState,
|
||||||
|
stack: &mut Stack,
|
||||||
|
span: Span,
|
||||||
|
custom_path: &Option<Spanned<String>>,
|
||||||
|
) -> Result<Vec<PluginInfo>, ShellError> {
|
||||||
|
let plugin_file_contents = read_plugin_file(engine_state, stack, span, custom_path)?;
|
||||||
|
|
||||||
|
let plugins_info = plugin_file_contents
|
||||||
|
.plugins
|
||||||
|
.into_iter()
|
||||||
|
.map(|plugin| {
|
||||||
|
let mut info = PluginInfo {
|
||||||
|
name: plugin.name,
|
||||||
|
version: None,
|
||||||
|
status: PluginStatus::Added,
|
||||||
|
pid: None,
|
||||||
|
filename: plugin.filename.to_string_lossy().into_owned(),
|
||||||
|
shell: plugin.shell.map(|path| path.to_string_lossy().into_owned()),
|
||||||
|
commands: vec![],
|
||||||
|
};
|
||||||
|
|
||||||
|
if let PluginRegistryItemData::Valid { metadata, commands } = plugin.data {
|
||||||
|
info.version = metadata.version;
|
||||||
|
info.commands = commands
|
||||||
|
.into_iter()
|
||||||
|
.map(|command| command.sig.name)
|
||||||
|
.sorted()
|
||||||
|
.collect();
|
||||||
|
} else {
|
||||||
|
info.status = PluginStatus::Invalid;
|
||||||
|
}
|
||||||
|
info
|
||||||
|
})
|
||||||
|
.sorted()
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
Ok(plugins_info)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// If no options are provided, the command loads from both the plugin list in the engine and what's
|
||||||
|
/// in the registry file. We need to reconcile the two to set the proper states and make sure that
|
||||||
|
/// new plugins that were added to the plugin registry file show up.
|
||||||
|
fn merge_plugin_info(
|
||||||
|
from_engine: Vec<PluginInfo>,
|
||||||
|
from_registry: Vec<PluginInfo>,
|
||||||
|
) -> Vec<PluginInfo> {
|
||||||
|
from_engine
|
||||||
|
.into_iter()
|
||||||
|
.merge_join_by(from_registry, |info_a, info_b| {
|
||||||
|
info_a.name.cmp(&info_b.name)
|
||||||
|
})
|
||||||
|
.map(|either_or_both| match either_or_both {
|
||||||
|
// Exists in the engine, but not in the registry file
|
||||||
|
EitherOrBoth::Left(info) => PluginInfo {
|
||||||
|
status: match info.status {
|
||||||
|
PluginStatus::Running => info.status,
|
||||||
|
// The plugin is not in the registry file, so it should be marked as `removed`
|
||||||
|
_ => PluginStatus::Removed,
|
||||||
|
},
|
||||||
|
..info
|
||||||
|
},
|
||||||
|
// Exists in the registry file, but not in the engine
|
||||||
|
EitherOrBoth::Right(info) => info,
|
||||||
|
// Exists in both
|
||||||
|
EitherOrBoth::Both(info_engine, info_registry) => PluginInfo {
|
||||||
|
status: match (info_engine.status, info_registry.status) {
|
||||||
|
// Above all, `running` should be displayed if the plugin is running
|
||||||
|
(PluginStatus::Running, _) => PluginStatus::Running,
|
||||||
|
// `invalid` takes precedence over other states because the user probably wants
|
||||||
|
// to fix it
|
||||||
|
(_, PluginStatus::Invalid) => PluginStatus::Invalid,
|
||||||
|
// Display `modified` if the state in the registry is different somehow
|
||||||
|
_ if info_engine.is_modified(&info_registry) => PluginStatus::Modified,
|
||||||
|
// Otherwise, `loaded` (it's not running)
|
||||||
|
_ => PluginStatus::Loaded,
|
||||||
|
},
|
||||||
|
..info_engine
|
||||||
|
},
|
||||||
|
})
|
||||||
|
.sorted()
|
||||||
|
.collect()
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PluginInfo {
|
||||||
|
/// True if the plugin info shows some kind of change (other than status/pid) relative to the
|
||||||
|
/// other
|
||||||
|
fn is_modified(&self, other: &PluginInfo) -> bool {
|
||||||
|
self.name != other.name
|
||||||
|
|| self.filename != other.filename
|
||||||
|
|| self.shell != other.shell
|
||||||
|
|| self.commands != other.commands
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -87,7 +87,7 @@ fixed with `plugin add`.
|
|||||||
|
|
||||||
let filename = canonicalize_possible_filename_arg(engine_state, stack, &name.item);
|
let filename = canonicalize_possible_filename_arg(engine_state, stack, &name.item);
|
||||||
|
|
||||||
modify_plugin_file(engine_state, stack, call.head, custom_path, |contents| {
|
modify_plugin_file(engine_state, stack, call.head, &custom_path, |contents| {
|
||||||
if let Some(index) = contents
|
if let Some(index) = contents
|
||||||
.plugins
|
.plugins
|
||||||
.iter()
|
.iter()
|
||||||
|
@ -6,18 +6,17 @@ use std::{
|
|||||||
path::PathBuf,
|
path::PathBuf,
|
||||||
};
|
};
|
||||||
|
|
||||||
pub(crate) fn modify_plugin_file(
|
fn get_plugin_registry_file_path(
|
||||||
engine_state: &EngineState,
|
engine_state: &EngineState,
|
||||||
stack: &mut Stack,
|
stack: &mut Stack,
|
||||||
span: Span,
|
span: Span,
|
||||||
custom_path: Option<Spanned<String>>,
|
custom_path: &Option<Spanned<String>>,
|
||||||
operate: impl FnOnce(&mut PluginRegistryFile) -> Result<(), ShellError>,
|
) -> Result<PathBuf, ShellError> {
|
||||||
) -> Result<(), ShellError> {
|
|
||||||
#[allow(deprecated)]
|
#[allow(deprecated)]
|
||||||
let cwd = current_dir(engine_state, stack)?;
|
let cwd = current_dir(engine_state, stack)?;
|
||||||
|
|
||||||
let plugin_registry_file_path = if let Some(ref custom_path) = custom_path {
|
if let Some(ref custom_path) = custom_path {
|
||||||
nu_path::expand_path_with(&custom_path.item, cwd, true)
|
Ok(nu_path::expand_path_with(&custom_path.item, cwd, true))
|
||||||
} else {
|
} else {
|
||||||
engine_state
|
engine_state
|
||||||
.plugin_path
|
.plugin_path
|
||||||
@ -28,8 +27,53 @@ pub(crate) fn modify_plugin_file(
|
|||||||
span: Some(span),
|
span: Some(span),
|
||||||
help: Some("you may be running `nu` with --no-config-file".into()),
|
help: Some("you may be running `nu` with --no-config-file".into()),
|
||||||
inner: vec![],
|
inner: vec![],
|
||||||
})?
|
})
|
||||||
};
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn read_plugin_file(
|
||||||
|
engine_state: &EngineState,
|
||||||
|
stack: &mut Stack,
|
||||||
|
span: Span,
|
||||||
|
custom_path: &Option<Spanned<String>>,
|
||||||
|
) -> Result<PluginRegistryFile, ShellError> {
|
||||||
|
let plugin_registry_file_path =
|
||||||
|
get_plugin_registry_file_path(engine_state, stack, span, custom_path)?;
|
||||||
|
|
||||||
|
let file_span = custom_path.as_ref().map(|p| p.span).unwrap_or(span);
|
||||||
|
|
||||||
|
// Try to read the plugin file if it exists
|
||||||
|
if fs::metadata(&plugin_registry_file_path).is_ok_and(|m| m.len() > 0) {
|
||||||
|
PluginRegistryFile::read_from(
|
||||||
|
File::open(&plugin_registry_file_path).map_err(|err| ShellError::IOErrorSpanned {
|
||||||
|
msg: format!(
|
||||||
|
"failed to read `{}`: {}",
|
||||||
|
plugin_registry_file_path.display(),
|
||||||
|
err
|
||||||
|
),
|
||||||
|
span: file_span,
|
||||||
|
})?,
|
||||||
|
Some(file_span),
|
||||||
|
)
|
||||||
|
} else if let Some(path) = custom_path {
|
||||||
|
Err(ShellError::FileNotFound {
|
||||||
|
file: path.item.clone(),
|
||||||
|
span: path.span,
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
Ok(PluginRegistryFile::default())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn modify_plugin_file(
|
||||||
|
engine_state: &EngineState,
|
||||||
|
stack: &mut Stack,
|
||||||
|
span: Span,
|
||||||
|
custom_path: &Option<Spanned<String>>,
|
||||||
|
operate: impl FnOnce(&mut PluginRegistryFile) -> Result<(), ShellError>,
|
||||||
|
) -> Result<(), ShellError> {
|
||||||
|
let plugin_registry_file_path =
|
||||||
|
get_plugin_registry_file_path(engine_state, stack, span, custom_path)?;
|
||||||
|
|
||||||
let file_span = custom_path.as_ref().map(|p| p.span).unwrap_or(span);
|
let file_span = custom_path.as_ref().map(|p| p.span).unwrap_or(span);
|
||||||
|
|
||||||
|
@ -5,7 +5,7 @@ repository = "https://github.com/nushell/nushell/tree/main/crates/nu-color-confi
|
|||||||
edition = "2021"
|
edition = "2021"
|
||||||
license = "MIT"
|
license = "MIT"
|
||||||
name = "nu-color-config"
|
name = "nu-color-config"
|
||||||
version = "0.99.0"
|
version = "0.100.0"
|
||||||
|
|
||||||
[lib]
|
[lib]
|
||||||
bench = false
|
bench = false
|
||||||
@ -14,12 +14,12 @@ bench = false
|
|||||||
workspace = true
|
workspace = true
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
nu-protocol = { path = "../nu-protocol", version = "0.99.0" }
|
nu-protocol = { path = "../nu-protocol", version = "0.100.0" }
|
||||||
nu-engine = { path = "../nu-engine", version = "0.99.0" }
|
nu-engine = { path = "../nu-engine", version = "0.100.0" }
|
||||||
nu-json = { path = "../nu-json", version = "0.99.0" }
|
nu-json = { path = "../nu-json", version = "0.100.0" }
|
||||||
nu-ansi-term = { workspace = true }
|
nu-ansi-term = { workspace = true }
|
||||||
|
|
||||||
serde = { workspace = true, features = ["derive"] }
|
serde = { workspace = true, features = ["derive"] }
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
nu-test-support = { path = "../nu-test-support", version = "0.99.0" }
|
nu-test-support = { path = "../nu-test-support", version = "0.100.0" }
|
@ -223,7 +223,7 @@ fn test_computable_style_closure_basic() {
|
|||||||
];
|
];
|
||||||
let actual_repl = nu!(cwd: dirs.test(), nu_repl_code(&inp));
|
let actual_repl = nu!(cwd: dirs.test(), nu_repl_code(&inp));
|
||||||
assert_eq!(actual_repl.err, "");
|
assert_eq!(actual_repl.err, "");
|
||||||
assert_eq!(actual_repl.out, "[bell.obj, book.obj, candle.obj]");
|
assert_eq!(actual_repl.out, r#"["bell.obj", "book.obj", "candle.obj"]"#);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -5,7 +5,7 @@ edition = "2021"
|
|||||||
license = "MIT"
|
license = "MIT"
|
||||||
name = "nu-command"
|
name = "nu-command"
|
||||||
repository = "https://github.com/nushell/nushell/tree/main/crates/nu-command"
|
repository = "https://github.com/nushell/nushell/tree/main/crates/nu-command"
|
||||||
version = "0.99.0"
|
version = "0.100.0"
|
||||||
|
|
||||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
|
|
||||||
@ -16,21 +16,21 @@ bench = false
|
|||||||
workspace = true
|
workspace = true
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
nu-cmd-base = { path = "../nu-cmd-base", version = "0.99.0" }
|
nu-cmd-base = { path = "../nu-cmd-base", version = "0.100.0" }
|
||||||
nu-color-config = { path = "../nu-color-config", version = "0.99.0" }
|
nu-color-config = { path = "../nu-color-config", version = "0.100.0" }
|
||||||
nu-engine = { path = "../nu-engine", version = "0.99.0" }
|
nu-engine = { path = "../nu-engine", version = "0.100.0" }
|
||||||
nu-glob = { path = "../nu-glob", version = "0.99.0" }
|
nu-glob = { path = "../nu-glob", version = "0.100.0" }
|
||||||
nu-json = { path = "../nu-json", version = "0.99.0" }
|
nu-json = { path = "../nu-json", version = "0.100.0" }
|
||||||
nu-parser = { path = "../nu-parser", version = "0.99.0" }
|
nu-parser = { path = "../nu-parser", version = "0.100.0" }
|
||||||
nu-path = { path = "../nu-path", version = "0.99.0" }
|
nu-path = { path = "../nu-path", version = "0.100.0" }
|
||||||
nu-pretty-hex = { path = "../nu-pretty-hex", version = "0.99.0" }
|
nu-pretty-hex = { path = "../nu-pretty-hex", version = "0.100.0" }
|
||||||
nu-protocol = { path = "../nu-protocol", version = "0.99.0" }
|
nu-protocol = { path = "../nu-protocol", version = "0.100.0" }
|
||||||
nu-system = { path = "../nu-system", version = "0.99.0" }
|
nu-system = { path = "../nu-system", version = "0.100.0" }
|
||||||
nu-table = { path = "../nu-table", version = "0.99.0" }
|
nu-table = { path = "../nu-table", version = "0.100.0" }
|
||||||
nu-term-grid = { path = "../nu-term-grid", version = "0.99.0" }
|
nu-term-grid = { path = "../nu-term-grid", version = "0.100.0" }
|
||||||
nu-utils = { path = "../nu-utils", version = "0.99.0" }
|
nu-utils = { path = "../nu-utils", version = "0.100.0" }
|
||||||
nu-ansi-term = { workspace = true }
|
nu-ansi-term = { workspace = true }
|
||||||
nuon = { path = "../nuon", version = "0.99.0" }
|
nuon = { path = "../nuon", version = "0.100.0" }
|
||||||
|
|
||||||
alphanumeric-sort = { workspace = true }
|
alphanumeric-sort = { workspace = true }
|
||||||
base64 = { workspace = true }
|
base64 = { workspace = true }
|
||||||
@ -66,7 +66,7 @@ native-tls = { workspace = true }
|
|||||||
notify-debouncer-full = { workspace = true, default-features = false }
|
notify-debouncer-full = { workspace = true, default-features = false }
|
||||||
num-format = { workspace = true }
|
num-format = { workspace = true }
|
||||||
num-traits = { workspace = true }
|
num-traits = { workspace = true }
|
||||||
once_cell = { workspace = true }
|
oem_cp = { workspace = true }
|
||||||
open = { workspace = true }
|
open = { workspace = true }
|
||||||
os_pipe = { workspace = true }
|
os_pipe = { workspace = true }
|
||||||
pathdiff = { workspace = true }
|
pathdiff = { workspace = true }
|
||||||
@ -139,8 +139,8 @@ sqlite = ["rusqlite"]
|
|||||||
trash-support = ["trash"]
|
trash-support = ["trash"]
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
nu-cmd-lang = { path = "../nu-cmd-lang", version = "0.99.0" }
|
nu-cmd-lang = { path = "../nu-cmd-lang", version = "0.100.0" }
|
||||||
nu-test-support = { path = "../nu-test-support", version = "0.99.0" }
|
nu-test-support = { path = "../nu-test-support", version = "0.100.0" }
|
||||||
|
|
||||||
dirs = { workspace = true }
|
dirs = { workspace = true }
|
||||||
mockito = { workspace = true, default-features = false }
|
mockito = { workspace = true, default-features = false }
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
use crate::{generate_strftime_list, parse_date_from_string};
|
use crate::{generate_strftime_list, parse_date_from_string};
|
||||||
use chrono::{DateTime, FixedOffset, Local, NaiveDateTime, NaiveTime, TimeZone, Utc};
|
use chrono::{DateTime, FixedOffset, Local, NaiveDateTime, TimeZone, Utc};
|
||||||
use human_date_parser::{from_human_time, ParseResult};
|
use human_date_parser::{from_human_time, ParseResult};
|
||||||
use nu_cmd_base::input_handler::{operate, CmdArgument};
|
use nu_cmd_base::input_handler::{operate, CmdArgument};
|
||||||
use nu_engine::command_prelude::*;
|
use nu_engine::command_prelude::*;
|
||||||
@ -185,11 +185,13 @@ impl Command for SubCommand {
|
|||||||
example: "'16.11.1984 8:00 am' | into datetime --format '%d.%m.%Y %H:%M %P'",
|
example: "'16.11.1984 8:00 am' | into datetime --format '%d.%m.%Y %H:%M %P'",
|
||||||
#[allow(clippy::inconsistent_digit_grouping)]
|
#[allow(clippy::inconsistent_digit_grouping)]
|
||||||
result: Some(Value::date(
|
result: Some(Value::date(
|
||||||
DateTime::from_naive_utc_and_offset(
|
Local
|
||||||
NaiveDateTime::parse_from_str("16.11.1984 8:00 am", "%d.%m.%Y %H:%M %P")
|
.from_local_datetime(
|
||||||
.expect("date calculation should not fail in test"),
|
&NaiveDateTime::parse_from_str("16.11.1984 8:00 am", "%d.%m.%Y %H:%M %P")
|
||||||
*Local::now().offset(),
|
.expect("date calculation should not fail in test"),
|
||||||
),
|
)
|
||||||
|
.unwrap()
|
||||||
|
.with_timezone(Local::now().offset()),
|
||||||
Span::test_data(),
|
Span::test_data(),
|
||||||
)),
|
)),
|
||||||
},
|
},
|
||||||
@ -275,12 +277,13 @@ fn action(input: &Value, args: &Arguments, head: Span) -> Value {
|
|||||||
if let Ok(date) = from_human_time(&input_val) {
|
if let Ok(date) = from_human_time(&input_val) {
|
||||||
match date {
|
match date {
|
||||||
ParseResult::Date(date) => {
|
ParseResult::Date(date) => {
|
||||||
let time = NaiveTime::from_hms_opt(0, 0, 0).expect("valid time");
|
let time = Local::now().time();
|
||||||
let combined = date.and_time(time);
|
let combined = date.and_time(time);
|
||||||
let dt_fixed = DateTime::from_naive_utc_and_offset(
|
let local_offset = *Local::now().offset();
|
||||||
combined,
|
let dt_fixed =
|
||||||
*Local::now().offset(),
|
TimeZone::from_local_datetime(&local_offset, &combined)
|
||||||
);
|
.single()
|
||||||
|
.unwrap_or_default();
|
||||||
return Value::date(dt_fixed, span);
|
return Value::date(dt_fixed, span);
|
||||||
}
|
}
|
||||||
ParseResult::DateTime(date) => {
|
ParseResult::DateTime(date) => {
|
||||||
@ -289,10 +292,11 @@ fn action(input: &Value, args: &Arguments, head: Span) -> Value {
|
|||||||
ParseResult::Time(time) => {
|
ParseResult::Time(time) => {
|
||||||
let date = Local::now().date_naive();
|
let date = Local::now().date_naive();
|
||||||
let combined = date.and_time(time);
|
let combined = date.and_time(time);
|
||||||
let dt_fixed = DateTime::from_naive_utc_and_offset(
|
let local_offset = *Local::now().offset();
|
||||||
combined,
|
let dt_fixed =
|
||||||
*Local::now().offset(),
|
TimeZone::from_local_datetime(&local_offset, &combined)
|
||||||
);
|
.single()
|
||||||
|
.unwrap_or_default();
|
||||||
return Value::date(dt_fixed, span);
|
return Value::date(dt_fixed, span);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -386,13 +390,15 @@ fn action(input: &Value, args: &Arguments, head: Span) -> Value {
|
|||||||
Ok(d) => Value::date ( d, head ),
|
Ok(d) => Value::date ( d, head ),
|
||||||
Err(reason) => {
|
Err(reason) => {
|
||||||
match NaiveDateTime::parse_from_str(val, &dt.0) {
|
match NaiveDateTime::parse_from_str(val, &dt.0) {
|
||||||
Ok(d) => Value::date (
|
Ok(d) => {
|
||||||
DateTime::from_naive_utc_and_offset(
|
let local_offset = *Local::now().offset();
|
||||||
d,
|
let dt_fixed =
|
||||||
*Local::now().offset(),
|
TimeZone::from_local_datetime(&local_offset, &d)
|
||||||
),
|
.single()
|
||||||
head,
|
.unwrap_or_default();
|
||||||
),
|
|
||||||
|
Value::date (dt_fixed,head)
|
||||||
|
}
|
||||||
Err(_) => {
|
Err(_) => {
|
||||||
Value::error (
|
Value::error (
|
||||||
ShellError::CantConvert { to_type: format!("could not parse as datetime using format '{}'", dt.0), from_type: reason.to_string(), span: head, help: Some("you can use `into datetime` without a format string to enable flexible parsing".to_string()) },
|
ShellError::CantConvert { to_type: format!("could not parse as datetime using format '{}'", dt.0), from_type: reason.to_string(), span: head, help: Some("you can use `into datetime` without a format string to enable flexible parsing".to_string()) },
|
||||||
@ -503,7 +509,14 @@ mod tests {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
#[ignore]
|
||||||
fn takes_a_date_format_without_timezone() {
|
fn takes_a_date_format_without_timezone() {
|
||||||
|
// Ignoring this test for now because we changed the human-date-parser to use
|
||||||
|
// the users timezone instead of UTC. We may continue to tweak this behavior.
|
||||||
|
// Another hacky solution is to set the timezone to UTC in the test, which works
|
||||||
|
// on MacOS and Linux but hasn't been tested on Windows. Plus it kind of defeats
|
||||||
|
// the purpose of a "without_timezone" test.
|
||||||
|
// std::env::set_var("TZ", "UTC");
|
||||||
let date_str = Value::test_string("16.11.1984 8:00 am");
|
let date_str = Value::test_string("16.11.1984 8:00 am");
|
||||||
let fmt_options = Some(DatetimeFormat("%d.%m.%Y %H:%M %P".to_string()));
|
let fmt_options = Some(DatetimeFormat("%d.%m.%Y %H:%M %P".to_string()));
|
||||||
let args = Arguments {
|
let args = Arguments {
|
||||||
@ -513,12 +526,16 @@ mod tests {
|
|||||||
};
|
};
|
||||||
let actual = action(&date_str, &args, Span::test_data());
|
let actual = action(&date_str, &args, Span::test_data());
|
||||||
let expected = Value::date(
|
let expected = Value::date(
|
||||||
DateTime::from_naive_utc_and_offset(
|
Local
|
||||||
NaiveDateTime::parse_from_str("16.11.1984 8:00 am", "%d.%m.%Y %H:%M %P").unwrap(),
|
.from_local_datetime(
|
||||||
*Local::now().offset(),
|
&NaiveDateTime::parse_from_str("16.11.1984 8:00 am", "%d.%m.%Y %H:%M %P")
|
||||||
),
|
.unwrap(),
|
||||||
|
)
|
||||||
|
.unwrap()
|
||||||
|
.with_timezone(Local::now().offset()),
|
||||||
Span::test_data(),
|
Span::test_data(),
|
||||||
);
|
);
|
||||||
|
|
||||||
assert_eq!(actual, expected)
|
assert_eq!(actual, expected)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -259,7 +259,7 @@ fn action(input: &Value, args: &Arguments, span: Span) -> Value {
|
|||||||
if radix == 10 {
|
if radix == 10 {
|
||||||
*val as i64
|
*val as i64
|
||||||
} else {
|
} else {
|
||||||
match convert_int(&Value::int(*val as i64, span), span, radix).as_i64() {
|
match convert_int(&Value::int(*val as i64, span), span, radix).as_int() {
|
||||||
Ok(v) => v,
|
Ok(v) => v,
|
||||||
_ => {
|
_ => {
|
||||||
return Value::error(
|
return Value::error(
|
||||||
|
@ -1,9 +1,9 @@
|
|||||||
use crate::parse_date_from_string;
|
use crate::parse_date_from_string;
|
||||||
use nu_engine::command_prelude::*;
|
use nu_engine::command_prelude::*;
|
||||||
use nu_protocol::PipelineIterator;
|
use nu_protocol::PipelineIterator;
|
||||||
use once_cell::sync::Lazy;
|
|
||||||
use regex::{Regex, RegexBuilder};
|
use regex::{Regex, RegexBuilder};
|
||||||
use std::collections::HashSet;
|
use std::collections::HashSet;
|
||||||
|
use std::sync::LazyLock;
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct IntoValue;
|
pub struct IntoValue;
|
||||||
@ -18,7 +18,7 @@ impl Command for IntoValue {
|
|||||||
.input_output_types(vec![(Type::table(), Type::table())])
|
.input_output_types(vec![(Type::table(), Type::table())])
|
||||||
.named(
|
.named(
|
||||||
"columns",
|
"columns",
|
||||||
SyntaxShape::Table(vec![]),
|
SyntaxShape::List(Box::new(SyntaxShape::Any)),
|
||||||
"list of columns to update",
|
"list of columns to update",
|
||||||
Some('c'),
|
Some('c'),
|
||||||
)
|
)
|
||||||
@ -271,8 +271,9 @@ const DATETIME_DMY_PATTERN: &str = r#"(?x)
|
|||||||
$
|
$
|
||||||
"#;
|
"#;
|
||||||
|
|
||||||
static DATETIME_DMY_RE: Lazy<Regex> =
|
static DATETIME_DMY_RE: LazyLock<Regex> = LazyLock::new(|| {
|
||||||
Lazy::new(|| Regex::new(DATETIME_DMY_PATTERN).expect("datetime_dmy_pattern should be valid"));
|
Regex::new(DATETIME_DMY_PATTERN).expect("datetime_dmy_pattern should be valid")
|
||||||
|
});
|
||||||
const DATETIME_YMD_PATTERN: &str = r#"(?x)
|
const DATETIME_YMD_PATTERN: &str = r#"(?x)
|
||||||
^
|
^
|
||||||
['"]? # optional quotes
|
['"]? # optional quotes
|
||||||
@ -297,8 +298,9 @@ const DATETIME_YMD_PATTERN: &str = r#"(?x)
|
|||||||
['"]? # optional quotes
|
['"]? # optional quotes
|
||||||
$
|
$
|
||||||
"#;
|
"#;
|
||||||
static DATETIME_YMD_RE: Lazy<Regex> =
|
static DATETIME_YMD_RE: LazyLock<Regex> = LazyLock::new(|| {
|
||||||
Lazy::new(|| Regex::new(DATETIME_YMD_PATTERN).expect("datetime_ymd_pattern should be valid"));
|
Regex::new(DATETIME_YMD_PATTERN).expect("datetime_ymd_pattern should be valid")
|
||||||
|
});
|
||||||
//2023-03-24 16:44:17.865147299 -05:00
|
//2023-03-24 16:44:17.865147299 -05:00
|
||||||
const DATETIME_YMDZ_PATTERN: &str = r#"(?x)
|
const DATETIME_YMDZ_PATTERN: &str = r#"(?x)
|
||||||
^
|
^
|
||||||
@ -331,23 +333,24 @@ const DATETIME_YMDZ_PATTERN: &str = r#"(?x)
|
|||||||
['"]? # optional quotes
|
['"]? # optional quotes
|
||||||
$
|
$
|
||||||
"#;
|
"#;
|
||||||
static DATETIME_YMDZ_RE: Lazy<Regex> =
|
static DATETIME_YMDZ_RE: LazyLock<Regex> = LazyLock::new(|| {
|
||||||
Lazy::new(|| Regex::new(DATETIME_YMDZ_PATTERN).expect("datetime_ymdz_pattern should be valid"));
|
Regex::new(DATETIME_YMDZ_PATTERN).expect("datetime_ymdz_pattern should be valid")
|
||||||
|
});
|
||||||
|
|
||||||
static FLOAT_RE: Lazy<Regex> = Lazy::new(|| {
|
static FLOAT_RE: LazyLock<Regex> = LazyLock::new(|| {
|
||||||
Regex::new(r"^\s*[-+]?((\d*\.\d+)([eE][-+]?\d+)?|inf|NaN|(\d+)[eE][-+]?\d+|\d+\.)$")
|
Regex::new(r"^\s*[-+]?((\d*\.\d+)([eE][-+]?\d+)?|inf|NaN|(\d+)[eE][-+]?\d+|\d+\.)$")
|
||||||
.expect("float pattern should be valid")
|
.expect("float pattern should be valid")
|
||||||
});
|
});
|
||||||
|
|
||||||
static INTEGER_RE: Lazy<Regex> =
|
static INTEGER_RE: LazyLock<Regex> =
|
||||||
Lazy::new(|| Regex::new(r"^\s*-?(\d+)$").expect("integer pattern should be valid"));
|
LazyLock::new(|| Regex::new(r"^\s*-?(\d+)$").expect("integer pattern should be valid"));
|
||||||
|
|
||||||
static INTEGER_WITH_DELIMS_RE: Lazy<Regex> = Lazy::new(|| {
|
static INTEGER_WITH_DELIMS_RE: LazyLock<Regex> = LazyLock::new(|| {
|
||||||
Regex::new(r"^\s*-?(\d{1,3}([,_]\d{3})+)$")
|
Regex::new(r"^\s*-?(\d{1,3}([,_]\d{3})+)$")
|
||||||
.expect("integer with delimiters pattern should be valid")
|
.expect("integer with delimiters pattern should be valid")
|
||||||
});
|
});
|
||||||
|
|
||||||
static BOOLEAN_RE: Lazy<Regex> = Lazy::new(|| {
|
static BOOLEAN_RE: LazyLock<Regex> = LazyLock::new(|| {
|
||||||
RegexBuilder::new(r"^\s*(true)$|^(false)$")
|
RegexBuilder::new(r"^\s*(true)$|^(false)$")
|
||||||
.case_insensitive(true)
|
.case_insensitive(true)
|
||||||
.build()
|
.build()
|
||||||
|
@ -101,7 +101,7 @@ fn all_columns(span: Span) -> Value {
|
|||||||
let environment = {
|
let environment = {
|
||||||
let mut env_rec = Record::new();
|
let mut env_rec = Record::new();
|
||||||
for val in p.environ() {
|
for val in p.environ() {
|
||||||
if let Some((key, value)) = val.split_once('=') {
|
if let Some((key, value)) = val.to_string_lossy().split_once('=') {
|
||||||
let is_env_var_a_list = {
|
let is_env_var_a_list = {
|
||||||
{
|
{
|
||||||
#[cfg(target_family = "windows")]
|
#[cfg(target_family = "windows")]
|
||||||
@ -146,8 +146,8 @@ fn all_columns(span: Span) -> Value {
|
|||||||
"root" => root,
|
"root" => root,
|
||||||
"cwd" => cwd,
|
"cwd" => cwd,
|
||||||
"exe_path" => exe_path,
|
"exe_path" => exe_path,
|
||||||
"command" => Value::string(p.cmd().join(" "), span),
|
"command" => Value::string(p.cmd().join(std::ffi::OsStr::new(" ")).to_string_lossy(), span),
|
||||||
"name" => Value::string(p.name(), span),
|
"name" => Value::string(p.name().to_string_lossy(), span),
|
||||||
"environment" => environment,
|
"environment" => environment,
|
||||||
},
|
},
|
||||||
span,
|
span,
|
||||||
|
@ -387,6 +387,7 @@ pub fn add_shell_command_context(mut engine_state: EngineState) -> EngineState {
|
|||||||
HttpOptions,
|
HttpOptions,
|
||||||
Url,
|
Url,
|
||||||
UrlBuildQuery,
|
UrlBuildQuery,
|
||||||
|
UrlSplitQuery,
|
||||||
UrlDecode,
|
UrlDecode,
|
||||||
UrlEncode,
|
UrlEncode,
|
||||||
UrlJoin,
|
UrlJoin,
|
||||||
|
@ -146,6 +146,9 @@ impl Command for Open {
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// Assigning content type should only happen in raw mode. Otherwise, the content
|
||||||
|
// will potentially be in one of the built-in nushell `from xxx` formats and therefore
|
||||||
|
// cease to be in the original content-type.... or so I'm told. :)
|
||||||
let content_type = if raw {
|
let content_type = if raw {
|
||||||
path.extension()
|
path.extension()
|
||||||
.map(|ext| ext.to_string_lossy().to_string())
|
.map(|ext| ext.to_string_lossy().to_string())
|
||||||
@ -283,6 +286,9 @@ fn detect_content_type(extension: &str) -> Option<String> {
|
|||||||
match extension {
|
match extension {
|
||||||
// Per RFC-9512, application/yaml should be used
|
// Per RFC-9512, application/yaml should be used
|
||||||
"yaml" | "yml" => Some("application/yaml".to_string()),
|
"yaml" | "yml" => Some("application/yaml".to_string()),
|
||||||
|
"nu" => Some("application/x-nuscript".to_string()),
|
||||||
|
"json" | "jsonl" | "ndjson" => Some("application/json".to_string()),
|
||||||
|
"nuon" => Some("application/x-nuon".to_string()),
|
||||||
_ => mime_guess::from_ext(extension)
|
_ => mime_guess::from_ext(extension)
|
||||||
.first()
|
.first()
|
||||||
.map(|mime| mime.to_string()),
|
.map(|mime| mime.to_string()),
|
||||||
|
@ -48,6 +48,11 @@ impl Command for Touch {
|
|||||||
"do not create the file if it does not exist",
|
"do not create the file if it does not exist",
|
||||||
Some('c'),
|
Some('c'),
|
||||||
)
|
)
|
||||||
|
.switch(
|
||||||
|
"no-deref",
|
||||||
|
"do not follow symlinks",
|
||||||
|
Some('s')
|
||||||
|
)
|
||||||
.category(Category::FileSystem)
|
.category(Category::FileSystem)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -64,6 +69,7 @@ impl Command for Touch {
|
|||||||
) -> Result<PipelineData, ShellError> {
|
) -> Result<PipelineData, ShellError> {
|
||||||
let mut change_mtime: bool = call.has_flag(engine_state, stack, "modified")?;
|
let mut change_mtime: bool = call.has_flag(engine_state, stack, "modified")?;
|
||||||
let mut change_atime: bool = call.has_flag(engine_state, stack, "access")?;
|
let mut change_atime: bool = call.has_flag(engine_state, stack, "access")?;
|
||||||
|
let no_follow_symlinks: bool = call.has_flag(engine_state, stack, "no-deref")?;
|
||||||
let reference: Option<Spanned<String>> = call.get_flag(engine_state, stack, "reference")?;
|
let reference: Option<Spanned<String>> = call.get_flag(engine_state, stack, "reference")?;
|
||||||
let no_create: bool = call.has_flag(engine_state, stack, "no-create")?;
|
let no_create: bool = call.has_flag(engine_state, stack, "no-create")?;
|
||||||
let files: Vec<Spanned<NuGlob>> = get_rest_for_glob_pattern(engine_state, stack, call, 0)?;
|
let files: Vec<Spanned<NuGlob>> = get_rest_for_glob_pattern(engine_state, stack, call, 0)?;
|
||||||
@ -88,19 +94,29 @@ impl Command for Touch {
|
|||||||
|
|
||||||
if let Some(reference) = reference {
|
if let Some(reference) = reference {
|
||||||
let reference_path = nu_path::expand_path_with(reference.item, &cwd, true);
|
let reference_path = nu_path::expand_path_with(reference.item, &cwd, true);
|
||||||
if !reference_path.exists() {
|
let exists = if no_follow_symlinks {
|
||||||
|
// There's no symlink_exists function, so we settle for
|
||||||
|
// getting direct metadata and if it's OK, it exists
|
||||||
|
reference_path.symlink_metadata().is_ok()
|
||||||
|
} else {
|
||||||
|
reference_path.exists()
|
||||||
|
};
|
||||||
|
if !exists {
|
||||||
return Err(ShellError::FileNotFoundCustom {
|
return Err(ShellError::FileNotFoundCustom {
|
||||||
msg: "Reference path not found".into(),
|
msg: "Reference path not found".into(),
|
||||||
span: reference.span,
|
span: reference.span,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
let metadata = reference_path
|
let metadata = if no_follow_symlinks {
|
||||||
.metadata()
|
reference_path.symlink_metadata()
|
||||||
.map_err(|err| ShellError::IOErrorSpanned {
|
} else {
|
||||||
msg: format!("Failed to read metadata: {err}"),
|
reference_path.metadata()
|
||||||
span: reference.span,
|
};
|
||||||
})?;
|
let metadata = metadata.map_err(|err| ShellError::IOErrorSpanned {
|
||||||
|
msg: format!("Failed to read metadata: {err}"),
|
||||||
|
span: reference.span,
|
||||||
|
})?;
|
||||||
mtime = metadata
|
mtime = metadata
|
||||||
.modified()
|
.modified()
|
||||||
.map_err(|err| ShellError::IOErrorSpanned {
|
.map_err(|err| ShellError::IOErrorSpanned {
|
||||||
@ -117,14 +133,27 @@ impl Command for Touch {
|
|||||||
|
|
||||||
for glob in files {
|
for glob in files {
|
||||||
let path = expand_path_with(glob.item.as_ref(), &cwd, glob.item.is_expand());
|
let path = expand_path_with(glob.item.as_ref(), &cwd, glob.item.is_expand());
|
||||||
|
let exists = if no_follow_symlinks {
|
||||||
|
path.symlink_metadata().is_ok()
|
||||||
|
} else {
|
||||||
|
path.exists()
|
||||||
|
};
|
||||||
|
|
||||||
// If --no-create is passed and the file/dir does not exist there's nothing to do
|
// If --no-create is passed and the file/dir does not exist there's nothing to do
|
||||||
if no_create && !path.exists() {
|
if no_create && !exists {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Create a file at the given path unless the path is a directory
|
// If --no-deref was passed in, the behavior of touch is to error on missing
|
||||||
if !path.is_dir() {
|
if no_follow_symlinks && !exists {
|
||||||
|
return Err(ShellError::FileNotFound {
|
||||||
|
file: path.to_string_lossy().into_owned(),
|
||||||
|
span: glob.span,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create a file at the given path unless the path is a directory (or a symlink with -d)
|
||||||
|
if !path.is_dir() && (!no_follow_symlinks || !path.is_symlink()) {
|
||||||
if let Err(err) = OpenOptions::new()
|
if let Err(err) = OpenOptions::new()
|
||||||
.write(true)
|
.write(true)
|
||||||
.create(true)
|
.create(true)
|
||||||
@ -138,9 +167,31 @@ impl Command for Touch {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// We have to inefficiently access the target metadata to not reset it
|
||||||
|
// in set_symlink_file_times, because the filetime doesn't expose individual methods for it
|
||||||
|
let get_target_metadata = || {
|
||||||
|
path.symlink_metadata()
|
||||||
|
.map_err(|err| ShellError::IOErrorSpanned {
|
||||||
|
msg: format!("Failed to read metadata: {err}"),
|
||||||
|
span: glob.span,
|
||||||
|
})
|
||||||
|
};
|
||||||
|
|
||||||
if change_mtime {
|
if change_mtime {
|
||||||
if let Err(err) = filetime::set_file_mtime(&path, FileTime::from_system_time(mtime))
|
let result = if no_follow_symlinks {
|
||||||
{
|
filetime::set_symlink_file_times(
|
||||||
|
&path,
|
||||||
|
if change_atime {
|
||||||
|
FileTime::from_system_time(atime)
|
||||||
|
} else {
|
||||||
|
FileTime::from_system_time(get_target_metadata()?.accessed()?)
|
||||||
|
},
|
||||||
|
FileTime::from_system_time(mtime),
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
filetime::set_file_mtime(&path, FileTime::from_system_time(mtime))
|
||||||
|
};
|
||||||
|
if let Err(err) = result {
|
||||||
return Err(ShellError::ChangeModifiedTimeNotPossible {
|
return Err(ShellError::ChangeModifiedTimeNotPossible {
|
||||||
msg: format!("Failed to change the modified time: {err}"),
|
msg: format!("Failed to change the modified time: {err}"),
|
||||||
span: glob.span,
|
span: glob.span,
|
||||||
@ -149,8 +200,20 @@ impl Command for Touch {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if change_atime {
|
if change_atime {
|
||||||
if let Err(err) = filetime::set_file_atime(&path, FileTime::from_system_time(atime))
|
let result = if no_follow_symlinks {
|
||||||
{
|
filetime::set_symlink_file_times(
|
||||||
|
&path,
|
||||||
|
FileTime::from_system_time(atime),
|
||||||
|
if change_mtime {
|
||||||
|
FileTime::from_system_time(mtime)
|
||||||
|
} else {
|
||||||
|
FileTime::from_system_time(get_target_metadata()?.modified()?)
|
||||||
|
},
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
filetime::set_file_atime(&path, FileTime::from_system_time(atime))
|
||||||
|
};
|
||||||
|
if let Err(err) = result {
|
||||||
return Err(ShellError::ChangeAccessTimeNotPossible {
|
return Err(ShellError::ChangeAccessTimeNotPossible {
|
||||||
msg: format!("Failed to change the access time: {err}"),
|
msg: format!("Failed to change the access time: {err}"),
|
||||||
span: glob.span,
|
span: glob.span,
|
||||||
|
@ -157,7 +157,7 @@ fn flat_value(columns: &[CellPath], item: Value, all: bool) -> Vec<Value> {
|
|||||||
let mut inner_table = None;
|
let mut inner_table = None;
|
||||||
|
|
||||||
for (column_index, (column, value)) in val.into_owned().into_iter().enumerate() {
|
for (column_index, (column, value)) in val.into_owned().into_iter().enumerate() {
|
||||||
let column_requested = columns.iter().find(|c| c.to_string() == column);
|
let column_requested = columns.iter().find(|c| c.to_column_name() == column);
|
||||||
let need_flatten = { columns.is_empty() || column_requested.is_some() };
|
let need_flatten = { columns.is_empty() || column_requested.is_some() };
|
||||||
let span = value.span();
|
let span = value.span();
|
||||||
|
|
||||||
|
@ -38,6 +38,14 @@ impl Command for GroupBy {
|
|||||||
"Splits a list or table into groups, and returns a record containing those groups."
|
"Splits a list or table into groups, and returns a record containing those groups."
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn extra_description(&self) -> &str {
|
||||||
|
r#"the group-by command makes some assumptions:
|
||||||
|
- if the input data is not a string, the grouper will convert the key to string but the values will remain in their original format. e.g. with bools, "true" and true would be in the same group (see example).
|
||||||
|
- datetime is formatted based on your configuration setting. use `format date` to change the format.
|
||||||
|
- filesize is formatted based on your configuration setting. use `format filesize` to change the format.
|
||||||
|
- some nushell values are not supported, such as closures."#
|
||||||
|
}
|
||||||
|
|
||||||
fn run(
|
fn run(
|
||||||
&self,
|
&self,
|
||||||
engine_state: &EngineState,
|
engine_state: &EngineState,
|
||||||
@ -114,6 +122,20 @@ impl Command for GroupBy {
|
|||||||
}),
|
}),
|
||||||
])),
|
])),
|
||||||
},
|
},
|
||||||
|
Example {
|
||||||
|
description: "Group bools, whether they are strings or actual bools",
|
||||||
|
example: r#"[true "true" false "false"] | group-by"#,
|
||||||
|
result: Some(Value::test_record(record! {
|
||||||
|
"true" => Value::test_list(vec![
|
||||||
|
Value::test_bool(true),
|
||||||
|
Value::test_string("true"),
|
||||||
|
]),
|
||||||
|
"false" => Value::test_list(vec![
|
||||||
|
Value::test_bool(false),
|
||||||
|
Value::test_string("false"),
|
||||||
|
]),
|
||||||
|
})),
|
||||||
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -127,6 +149,7 @@ pub fn group_by(
|
|||||||
let head = call.head;
|
let head = call.head;
|
||||||
let grouper: Option<Value> = call.opt(engine_state, stack, 0)?;
|
let grouper: Option<Value> = call.opt(engine_state, stack, 0)?;
|
||||||
let to_table = call.has_flag(engine_state, stack, "to-table")?;
|
let to_table = call.has_flag(engine_state, stack, "to-table")?;
|
||||||
|
let config = engine_state.get_config();
|
||||||
|
|
||||||
let values: Vec<Value> = input.into_iter().collect();
|
let values: Vec<Value> = input.into_iter().collect();
|
||||||
if values.is_empty() {
|
if values.is_empty() {
|
||||||
@ -137,7 +160,7 @@ pub fn group_by(
|
|||||||
Some(grouper) => {
|
Some(grouper) => {
|
||||||
let span = grouper.span();
|
let span = grouper.span();
|
||||||
match grouper {
|
match grouper {
|
||||||
Value::CellPath { val, .. } => group_cell_path(val, values)?,
|
Value::CellPath { val, .. } => group_cell_path(val, values, config)?,
|
||||||
Value::Closure { val, .. } => {
|
Value::Closure { val, .. } => {
|
||||||
group_closure(values, span, *val, engine_state, stack)?
|
group_closure(values, span, *val, engine_state, stack)?
|
||||||
}
|
}
|
||||||
@ -149,7 +172,7 @@ pub fn group_by(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
None => group_no_grouper(values)?,
|
None => group_no_grouper(values, config)?,
|
||||||
};
|
};
|
||||||
|
|
||||||
let value = if to_table {
|
let value = if to_table {
|
||||||
@ -164,6 +187,7 @@ pub fn group_by(
|
|||||||
fn group_cell_path(
|
fn group_cell_path(
|
||||||
column_name: CellPath,
|
column_name: CellPath,
|
||||||
values: Vec<Value>,
|
values: Vec<Value>,
|
||||||
|
config: &nu_protocol::Config,
|
||||||
) -> Result<IndexMap<String, Vec<Value>>, ShellError> {
|
) -> Result<IndexMap<String, Vec<Value>>, ShellError> {
|
||||||
let mut groups = IndexMap::<_, Vec<_>>::new();
|
let mut groups = IndexMap::<_, Vec<_>>::new();
|
||||||
|
|
||||||
@ -176,18 +200,21 @@ fn group_cell_path(
|
|||||||
continue; // likely the result of a failed optional access, ignore this value
|
continue; // likely the result of a failed optional access, ignore this value
|
||||||
}
|
}
|
||||||
|
|
||||||
let key = key.coerce_string()?;
|
let key = key.to_abbreviated_string(config);
|
||||||
groups.entry(key).or_default().push(value);
|
groups.entry(key).or_default().push(value);
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(groups)
|
Ok(groups)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn group_no_grouper(values: Vec<Value>) -> Result<IndexMap<String, Vec<Value>>, ShellError> {
|
fn group_no_grouper(
|
||||||
|
values: Vec<Value>,
|
||||||
|
config: &nu_protocol::Config,
|
||||||
|
) -> Result<IndexMap<String, Vec<Value>>, ShellError> {
|
||||||
let mut groups = IndexMap::<_, Vec<_>>::new();
|
let mut groups = IndexMap::<_, Vec<_>>::new();
|
||||||
|
|
||||||
for value in values.into_iter() {
|
for value in values.into_iter() {
|
||||||
let key = value.coerce_string()?;
|
let key = value.to_abbreviated_string(config);
|
||||||
groups.entry(key).or_default().push(value);
|
groups.entry(key).or_default().push(value);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -203,12 +230,13 @@ fn group_closure(
|
|||||||
) -> Result<IndexMap<String, Vec<Value>>, ShellError> {
|
) -> Result<IndexMap<String, Vec<Value>>, ShellError> {
|
||||||
let mut groups = IndexMap::<_, Vec<_>>::new();
|
let mut groups = IndexMap::<_, Vec<_>>::new();
|
||||||
let mut closure = ClosureEval::new(engine_state, stack, closure);
|
let mut closure = ClosureEval::new(engine_state, stack, closure);
|
||||||
|
let config = engine_state.get_config();
|
||||||
|
|
||||||
for value in values {
|
for value in values {
|
||||||
let key = closure
|
let key = closure
|
||||||
.run_with_value(value.clone())?
|
.run_with_value(value.clone())?
|
||||||
.into_value(span)?
|
.into_value(span)?
|
||||||
.coerce_into_string()?;
|
.to_abbreviated_string(config);
|
||||||
|
|
||||||
groups.entry(key).or_default().push(value);
|
groups.entry(key).or_default().push(value);
|
||||||
}
|
}
|
||||||
|
@ -29,7 +29,7 @@ impl Command for Join {
|
|||||||
Signature::build("join")
|
Signature::build("join")
|
||||||
.required(
|
.required(
|
||||||
"right-table",
|
"right-table",
|
||||||
SyntaxShape::List(Box::new(SyntaxShape::Any)),
|
SyntaxShape::Table([].into()),
|
||||||
"The right table in the join.",
|
"The right table in the join.",
|
||||||
)
|
)
|
||||||
.required(
|
.required(
|
||||||
|
@ -1,3 +1,5 @@
|
|||||||
|
use std::io::Read;
|
||||||
|
|
||||||
use nu_engine::command_prelude::*;
|
use nu_engine::command_prelude::*;
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
@ -9,12 +11,15 @@ impl Command for Length {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn description(&self) -> &str {
|
fn description(&self) -> &str {
|
||||||
"Count the number of items in an input list or rows in a table."
|
"Count the number of items in an input list, rows in a table, or bytes in binary data."
|
||||||
}
|
}
|
||||||
|
|
||||||
fn signature(&self) -> nu_protocol::Signature {
|
fn signature(&self) -> nu_protocol::Signature {
|
||||||
Signature::build("length")
|
Signature::build("length")
|
||||||
.input_output_types(vec![(Type::List(Box::new(Type::Any)), Type::Int)])
|
.input_output_types(vec![
|
||||||
|
(Type::List(Box::new(Type::Any)), Type::Int),
|
||||||
|
(Type::Binary, Type::Int),
|
||||||
|
])
|
||||||
.category(Category::Filters)
|
.category(Category::Filters)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -44,6 +49,11 @@ impl Command for Length {
|
|||||||
example: "[{a:1 b:2}, {a:2 b:3}] | length",
|
example: "[{a:1 b:2}, {a:2 b:3}] | length",
|
||||||
result: Some(Value::test_int(2)),
|
result: Some(Value::test_int(2)),
|
||||||
},
|
},
|
||||||
|
Example {
|
||||||
|
description: "Count the number of bytes in binary data",
|
||||||
|
example: "0x[01 02] | length",
|
||||||
|
result: Some(Value::test_int(2)),
|
||||||
|
},
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -64,6 +74,19 @@ fn length_row(call: &Call, input: PipelineData) -> Result<PipelineData, ShellErr
|
|||||||
src_span: span,
|
src_span: span,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
PipelineData::Value(Value::Binary { val, .. }, ..) => {
|
||||||
|
Ok(Value::int(val.len() as i64, call.head).into_pipeline_data())
|
||||||
|
}
|
||||||
|
PipelineData::ByteStream(stream, _) if stream.type_().is_binary_coercible() => {
|
||||||
|
Ok(Value::int(
|
||||||
|
match stream.reader() {
|
||||||
|
Some(r) => r.bytes().count() as i64,
|
||||||
|
None => 0,
|
||||||
|
},
|
||||||
|
call.head,
|
||||||
|
)
|
||||||
|
.into_pipeline_data())
|
||||||
|
}
|
||||||
_ => {
|
_ => {
|
||||||
let mut count: i64 = 0;
|
let mut count: i64 = 0;
|
||||||
// Check for and propagate errors
|
// Check for and propagate errors
|
||||||
|
@ -25,7 +25,7 @@ impl Command for Range {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn search_terms(&self) -> Vec<&str> {
|
fn search_terms(&self) -> Vec<&str> {
|
||||||
vec!["filter", "head", "tail"]
|
vec!["filter", "head", "tail", "slice"]
|
||||||
}
|
}
|
||||||
|
|
||||||
fn examples(&self) -> Vec<Example> {
|
fn examples(&self) -> Vec<Example> {
|
||||||
|
@ -64,12 +64,12 @@ produce a table, a list will produce a list, and a record will produce a record.
|
|||||||
Value::String { val, .. } => {
|
Value::String { val, .. } => {
|
||||||
let cv = CellPath {
|
let cv = CellPath {
|
||||||
members: vec![PathMember::String {
|
members: vec![PathMember::String {
|
||||||
val: val.clone(),
|
val,
|
||||||
span: *col_span,
|
span: *col_span,
|
||||||
optional: false,
|
optional: false,
|
||||||
}],
|
}],
|
||||||
};
|
};
|
||||||
new_columns.push(cv.clone());
|
new_columns.push(cv);
|
||||||
}
|
}
|
||||||
Value::Int { val, internal_span } => {
|
Value::Int { val, internal_span } => {
|
||||||
if val < 0 {
|
if val < 0 {
|
||||||
@ -87,7 +87,7 @@ produce a table, a list will produce a list, and a record will produce a record.
|
|||||||
optional: false,
|
optional: false,
|
||||||
}],
|
}],
|
||||||
};
|
};
|
||||||
new_columns.push(cv.clone());
|
new_columns.push(cv);
|
||||||
}
|
}
|
||||||
x => {
|
x => {
|
||||||
return Err(ShellError::CantConvert {
|
return Err(ShellError::CantConvert {
|
||||||
@ -240,7 +240,7 @@ fn select(
|
|||||||
//FIXME: improve implementation to not clone
|
//FIXME: improve implementation to not clone
|
||||||
match input_val.clone().follow_cell_path(&path.members, false) {
|
match input_val.clone().follow_cell_path(&path.members, false) {
|
||||||
Ok(fetcher) => {
|
Ok(fetcher) => {
|
||||||
record.push(path.to_string(), fetcher);
|
record.push(path.to_column_name(), fetcher);
|
||||||
if !columns_with_value.contains(&path) {
|
if !columns_with_value.contains(&path) {
|
||||||
columns_with_value.push(path);
|
columns_with_value.push(path);
|
||||||
}
|
}
|
||||||
@ -271,7 +271,7 @@ fn select(
|
|||||||
// FIXME: remove clone
|
// FIXME: remove clone
|
||||||
match v.clone().follow_cell_path(&cell_path.members, false) {
|
match v.clone().follow_cell_path(&cell_path.members, false) {
|
||||||
Ok(result) => {
|
Ok(result) => {
|
||||||
record.push(cell_path.to_string(), result);
|
record.push(cell_path.to_column_name(), result);
|
||||||
}
|
}
|
||||||
Err(e) => return Err(e),
|
Err(e) => return Err(e),
|
||||||
}
|
}
|
||||||
@ -295,7 +295,7 @@ fn select(
|
|||||||
//FIXME: improve implementation to not clone
|
//FIXME: improve implementation to not clone
|
||||||
match x.clone().follow_cell_path(&path.members, false) {
|
match x.clone().follow_cell_path(&path.members, false) {
|
||||||
Ok(value) => {
|
Ok(value) => {
|
||||||
record.push(path.to_string(), value);
|
record.push(path.to_column_name(), value);
|
||||||
}
|
}
|
||||||
Err(e) => return Err(e),
|
Err(e) => return Err(e),
|
||||||
}
|
}
|
||||||
|
@ -84,16 +84,16 @@ pub fn split_by(
|
|||||||
input: PipelineData,
|
input: PipelineData,
|
||||||
) -> Result<PipelineData, ShellError> {
|
) -> Result<PipelineData, ShellError> {
|
||||||
let name = call.head;
|
let name = call.head;
|
||||||
|
let config = engine_state.get_config();
|
||||||
let splitter: Option<Value> = call.opt(engine_state, stack, 0)?;
|
let splitter: Option<Value> = call.opt(engine_state, stack, 0)?;
|
||||||
|
|
||||||
match splitter {
|
match splitter {
|
||||||
Some(v) => {
|
Some(v) => {
|
||||||
let splitter = Some(Spanned {
|
let splitter = Some(Spanned {
|
||||||
item: v.coerce_into_string()?,
|
item: v.to_abbreviated_string(config),
|
||||||
span: name,
|
span: name,
|
||||||
});
|
});
|
||||||
Ok(split(splitter.as_ref(), input, name)?)
|
Ok(split(splitter.as_ref(), input, name, config)?)
|
||||||
}
|
}
|
||||||
// This uses the same format as the 'requires a column name' error in sort_utils.rs
|
// This uses the same format as the 'requires a column name' error in sort_utils.rs
|
||||||
None => Err(ShellError::GenericError {
|
None => Err(ShellError::GenericError {
|
||||||
@ -110,6 +110,7 @@ pub fn split(
|
|||||||
column_name: Option<&Spanned<String>>,
|
column_name: Option<&Spanned<String>>,
|
||||||
values: PipelineData,
|
values: PipelineData,
|
||||||
span: Span,
|
span: Span,
|
||||||
|
config: &nu_protocol::Config,
|
||||||
) -> Result<PipelineData, ShellError> {
|
) -> Result<PipelineData, ShellError> {
|
||||||
let grouper = if let Some(column_name) = column_name {
|
let grouper = if let Some(column_name) = column_name {
|
||||||
Grouper::ByColumn(Some(column_name.clone()))
|
Grouper::ByColumn(Some(column_name.clone()))
|
||||||
@ -127,7 +128,7 @@ pub fn split(
|
|||||||
};
|
};
|
||||||
|
|
||||||
match group_key {
|
match group_key {
|
||||||
Some(group_key) => Ok(group_key.coerce_string()?),
|
Some(group_key) => Ok(group_key.to_abbreviated_string(config)),
|
||||||
None => Err(ShellError::CantFindColumn {
|
None => Err(ShellError::CantFindColumn {
|
||||||
col_name: column_name.item.to_string(),
|
col_name: column_name.item.to_string(),
|
||||||
span: Some(column_name.span),
|
span: Some(column_name.span),
|
||||||
@ -136,12 +137,12 @@ pub fn split(
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
data_split(values, Some(&block), span)
|
data_split(values, Some(&block), span, config)
|
||||||
}
|
}
|
||||||
Grouper::ByColumn(None) => {
|
Grouper::ByColumn(None) => {
|
||||||
let block = move |_, row: &Value| row.coerce_string();
|
let block = move |_, row: &Value| Ok(row.to_abbreviated_string(config));
|
||||||
|
|
||||||
data_split(values, Some(&block), span)
|
data_split(values, Some(&block), span, config)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -151,6 +152,7 @@ fn data_group(
|
|||||||
values: &Value,
|
values: &Value,
|
||||||
grouper: Option<&dyn Fn(usize, &Value) -> Result<String, ShellError>>,
|
grouper: Option<&dyn Fn(usize, &Value) -> Result<String, ShellError>>,
|
||||||
span: Span,
|
span: Span,
|
||||||
|
config: &nu_protocol::Config,
|
||||||
) -> Result<Value, ShellError> {
|
) -> Result<Value, ShellError> {
|
||||||
let mut groups: IndexMap<String, Vec<Value>> = IndexMap::new();
|
let mut groups: IndexMap<String, Vec<Value>> = IndexMap::new();
|
||||||
|
|
||||||
@ -158,7 +160,7 @@ fn data_group(
|
|||||||
let group_key = if let Some(ref grouper) = grouper {
|
let group_key = if let Some(ref grouper) = grouper {
|
||||||
grouper(idx, &value)
|
grouper(idx, &value)
|
||||||
} else {
|
} else {
|
||||||
value.coerce_string()
|
Ok(value.to_abbreviated_string(config))
|
||||||
};
|
};
|
||||||
|
|
||||||
let group = groups.entry(group_key?).or_default();
|
let group = groups.entry(group_key?).or_default();
|
||||||
@ -179,6 +181,7 @@ pub fn data_split(
|
|||||||
value: PipelineData,
|
value: PipelineData,
|
||||||
splitter: Option<&dyn Fn(usize, &Value) -> Result<String, ShellError>>,
|
splitter: Option<&dyn Fn(usize, &Value) -> Result<String, ShellError>>,
|
||||||
dst_span: Span,
|
dst_span: Span,
|
||||||
|
config: &nu_protocol::Config,
|
||||||
) -> Result<PipelineData, ShellError> {
|
) -> Result<PipelineData, ShellError> {
|
||||||
let mut splits = indexmap::IndexMap::new();
|
let mut splits = indexmap::IndexMap::new();
|
||||||
|
|
||||||
@ -188,7 +191,7 @@ pub fn data_split(
|
|||||||
match v {
|
match v {
|
||||||
Value::Record { val: grouped, .. } => {
|
Value::Record { val: grouped, .. } => {
|
||||||
for (outer_key, list) in grouped.into_owned() {
|
for (outer_key, list) in grouped.into_owned() {
|
||||||
match data_group(&list, splitter, span) {
|
match data_group(&list, splitter, span, config) {
|
||||||
Ok(grouped_vals) => {
|
Ok(grouped_vals) => {
|
||||||
if let Value::Record { val: sub, .. } = grouped_vals {
|
if let Value::Record { val: sub, .. } = grouped_vals {
|
||||||
for (inner_key, subset) in sub.into_owned() {
|
for (inner_key, subset) in sub.into_owned() {
|
||||||
|
@ -175,6 +175,12 @@ pub fn transpose(
|
|||||||
|
|
||||||
let metadata = input.metadata();
|
let metadata = input.metadata();
|
||||||
let input: Vec<_> = input.into_iter().collect();
|
let input: Vec<_> = input.into_iter().collect();
|
||||||
|
// Ensure error values are propagated
|
||||||
|
for i in input.iter() {
|
||||||
|
if let Value::Error { .. } = i {
|
||||||
|
return Ok(i.clone().into_pipeline_data_with_metadata(metadata));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
let descs = get_columns(&input);
|
let descs = get_columns(&input);
|
||||||
|
|
||||||
|
@ -46,7 +46,7 @@ impl Command for Uniq {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn search_terms(&self) -> Vec<&str> {
|
fn search_terms(&self) -> Vec<&str> {
|
||||||
vec!["distinct", "deduplicate"]
|
vec!["distinct", "deduplicate", "count"]
|
||||||
}
|
}
|
||||||
|
|
||||||
fn run(
|
fn run(
|
||||||
|
@ -3,6 +3,7 @@ use nu_engine::command_prelude::*;
|
|||||||
use nu_protocol::{
|
use nu_protocol::{
|
||||||
format_duration, format_filesize_from_conf, ByteStream, Config, PipelineMetadata,
|
format_duration, format_filesize_from_conf, ByteStream, Config, PipelineMetadata,
|
||||||
};
|
};
|
||||||
|
use std::io::Write;
|
||||||
|
|
||||||
const LINE_ENDING: &str = if cfg!(target_os = "windows") {
|
const LINE_ENDING: &str = if cfg!(target_os = "windows") {
|
||||||
"\r\n"
|
"\r\n"
|
||||||
@ -21,6 +22,11 @@ impl Command for ToText {
|
|||||||
fn signature(&self) -> Signature {
|
fn signature(&self) -> Signature {
|
||||||
Signature::build("to text")
|
Signature::build("to text")
|
||||||
.input_output_types(vec![(Type::Any, Type::String)])
|
.input_output_types(vec![(Type::Any, Type::String)])
|
||||||
|
.switch(
|
||||||
|
"no-newline",
|
||||||
|
"Do not append a newline to the end of the text",
|
||||||
|
Some('n'),
|
||||||
|
)
|
||||||
.category(Category::Formats)
|
.category(Category::Formats)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -35,36 +41,69 @@ impl Command for ToText {
|
|||||||
call: &Call,
|
call: &Call,
|
||||||
input: PipelineData,
|
input: PipelineData,
|
||||||
) -> Result<PipelineData, ShellError> {
|
) -> Result<PipelineData, ShellError> {
|
||||||
let span = call.head;
|
let head = call.head;
|
||||||
|
let no_newline = call.has_flag(engine_state, stack, "no-newline")?;
|
||||||
let input = input.try_expand_range()?;
|
let input = input.try_expand_range()?;
|
||||||
let config = stack.get_config(engine_state);
|
let config = stack.get_config(engine_state);
|
||||||
|
|
||||||
match input {
|
match input {
|
||||||
PipelineData::Empty => Ok(Value::string(String::new(), span)
|
PipelineData::Empty => Ok(Value::string(String::new(), head)
|
||||||
.into_pipeline_data_with_metadata(update_metadata(None))),
|
.into_pipeline_data_with_metadata(update_metadata(None))),
|
||||||
PipelineData::Value(value, ..) => {
|
PipelineData::Value(value, ..) => {
|
||||||
let str = local_into_string(value, LINE_ENDING, &config);
|
let add_trailing = !no_newline
|
||||||
|
&& match &value {
|
||||||
|
Value::List { vals, .. } => !vals.is_empty(),
|
||||||
|
Value::Record { val, .. } => !val.is_empty(),
|
||||||
|
_ => false,
|
||||||
|
};
|
||||||
|
let mut str = local_into_string(value, LINE_ENDING, &config);
|
||||||
|
if add_trailing {
|
||||||
|
str.push_str(LINE_ENDING);
|
||||||
|
}
|
||||||
Ok(
|
Ok(
|
||||||
Value::string(str, span)
|
Value::string(str, head)
|
||||||
.into_pipeline_data_with_metadata(update_metadata(None)),
|
.into_pipeline_data_with_metadata(update_metadata(None)),
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
PipelineData::ListStream(stream, meta) => {
|
PipelineData::ListStream(stream, meta) => {
|
||||||
let span = stream.span();
|
let span = stream.span();
|
||||||
let iter = stream.into_inner().map(move |value| {
|
let stream = if no_newline {
|
||||||
let mut str = local_into_string(value, LINE_ENDING, &config);
|
let mut first = true;
|
||||||
str.push_str(LINE_ENDING);
|
let mut iter = stream.into_inner();
|
||||||
str
|
ByteStream::from_fn(
|
||||||
});
|
|
||||||
Ok(PipelineData::ByteStream(
|
|
||||||
ByteStream::from_iter(
|
|
||||||
iter,
|
|
||||||
span,
|
span,
|
||||||
engine_state.signals().clone(),
|
engine_state.signals().clone(),
|
||||||
ByteStreamType::String,
|
ByteStreamType::String,
|
||||||
),
|
move |buf| {
|
||||||
update_metadata(meta),
|
let Some(val) = iter.next() else {
|
||||||
))
|
return Ok(false);
|
||||||
|
};
|
||||||
|
if first {
|
||||||
|
first = false;
|
||||||
|
} else {
|
||||||
|
write!(buf, "{LINE_ENDING}").err_span(head)?;
|
||||||
|
}
|
||||||
|
// TODO: write directly into `buf` instead of creating an intermediate
|
||||||
|
// string.
|
||||||
|
let str = local_into_string(val, LINE_ENDING, &config);
|
||||||
|
write!(buf, "{str}").err_span(head)?;
|
||||||
|
Ok(true)
|
||||||
|
},
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
ByteStream::from_iter(
|
||||||
|
stream.into_inner().map(move |val| {
|
||||||
|
let mut str = local_into_string(val, LINE_ENDING, &config);
|
||||||
|
str.push_str(LINE_ENDING);
|
||||||
|
str
|
||||||
|
}),
|
||||||
|
span,
|
||||||
|
engine_state.signals().clone(),
|
||||||
|
ByteStreamType::String,
|
||||||
|
)
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok(PipelineData::ByteStream(stream, update_metadata(meta)))
|
||||||
}
|
}
|
||||||
PipelineData::ByteStream(stream, meta) => {
|
PipelineData::ByteStream(stream, meta) => {
|
||||||
Ok(PipelineData::ByteStream(stream, update_metadata(meta)))
|
Ok(PipelineData::ByteStream(stream, update_metadata(meta)))
|
||||||
@ -75,8 +114,13 @@ impl Command for ToText {
|
|||||||
fn examples(&self) -> Vec<Example> {
|
fn examples(&self) -> Vec<Example> {
|
||||||
vec![
|
vec![
|
||||||
Example {
|
Example {
|
||||||
description: "Outputs data as simple text",
|
description: "Outputs data as simple text with a trailing newline",
|
||||||
example: "1 | to text",
|
example: "[1] | to text",
|
||||||
|
result: Some(Value::test_string("1".to_string() + LINE_ENDING)),
|
||||||
|
},
|
||||||
|
Example {
|
||||||
|
description: "Outputs data as simple text without a trailing newline",
|
||||||
|
example: "[1] | to text --no-newline",
|
||||||
result: Some(Value::test_string("1")),
|
result: Some(Value::test_string("1")),
|
||||||
},
|
},
|
||||||
Example {
|
Example {
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
use chrono::{Duration, Local, NaiveDate};
|
use chrono::{Duration, Local, NaiveDate};
|
||||||
use nu_engine::command_prelude::*;
|
use nu_engine::command_prelude::*;
|
||||||
|
use nu_protocol::FromValue;
|
||||||
|
|
||||||
use std::fmt::Write;
|
use std::fmt::Write;
|
||||||
|
|
||||||
@ -187,13 +188,14 @@ pub fn run_seq_dates(
|
|||||||
) -> Result<Value, ShellError> {
|
) -> Result<Value, ShellError> {
|
||||||
let today = Local::now().date_naive();
|
let today = Local::now().date_naive();
|
||||||
// if cannot convert , it will return error
|
// if cannot convert , it will return error
|
||||||
let mut step_size: i64 = increment.as_i64()?;
|
let increment_span = increment.span();
|
||||||
|
let mut step_size: i64 = i64::from_value(increment)?;
|
||||||
|
|
||||||
if step_size == 0 {
|
if step_size == 0 {
|
||||||
return Err(ShellError::GenericError {
|
return Err(ShellError::GenericError {
|
||||||
error: "increment cannot be 0".into(),
|
error: "increment cannot be 0".into(),
|
||||||
msg: "increment cannot be 0".into(),
|
msg: "increment cannot be 0".into(),
|
||||||
span: Some(increment.span()),
|
span: Some(increment_span),
|
||||||
help: None,
|
help: None,
|
||||||
inner: vec![],
|
inner: vec![],
|
||||||
});
|
});
|
||||||
@ -264,7 +266,7 @@ pub fn run_seq_dates(
|
|||||||
};
|
};
|
||||||
|
|
||||||
let mut days_to_output = match day_count {
|
let mut days_to_output = match day_count {
|
||||||
Some(d) => d.as_i64()?,
|
Some(d) => i64::from_value(d)?,
|
||||||
None => 0i64,
|
None => 0i64,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -220,6 +220,7 @@ fn build_help_commands(engine_state: &EngineState, span: Span) -> Vec<Value> {
|
|||||||
"params" => param_table,
|
"params" => param_table,
|
||||||
"input_output" => input_output_table,
|
"input_output" => input_output_table,
|
||||||
"search_terms" => Value::string(search_terms.join(", "), span),
|
"search_terms" => Value::string(search_terms.join(", "), span),
|
||||||
|
"is_const" => Value::bool(decl.is_const(), span),
|
||||||
};
|
};
|
||||||
|
|
||||||
found_cmds_vec.push(Value::record(record, span));
|
found_cmds_vec.push(Value::record(record, span));
|
||||||
|
@ -525,7 +525,7 @@ pub fn request_set_timeout(
|
|||||||
mut request: Request,
|
mut request: Request,
|
||||||
) -> Result<Request, ShellError> {
|
) -> Result<Request, ShellError> {
|
||||||
if let Some(timeout) = timeout {
|
if let Some(timeout) = timeout {
|
||||||
let val = timeout.as_i64()?;
|
let val = timeout.as_duration()?;
|
||||||
if val.is_negative() || val < 1 {
|
if val.is_negative() || val < 1 {
|
||||||
return Err(ShellError::TypeMismatch {
|
return Err(ShellError::TypeMismatch {
|
||||||
err_message: "Timeout value must be an int and larger than 0".to_string(),
|
err_message: "Timeout value must be an int and larger than 0".to_string(),
|
||||||
@ -533,7 +533,7 @@ pub fn request_set_timeout(
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
request = request.timeout(Duration::from_secs(val as u64));
|
request = request.timeout(Duration::from_nanos(val as u64));
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(request)
|
Ok(request)
|
||||||
|
@ -43,8 +43,8 @@ impl Command for SubCommand {
|
|||||||
)
|
)
|
||||||
.named(
|
.named(
|
||||||
"max-time",
|
"max-time",
|
||||||
SyntaxShape::Int,
|
SyntaxShape::Duration,
|
||||||
"timeout period in seconds",
|
"max duration before timeout occurs",
|
||||||
Some('m'),
|
Some('m'),
|
||||||
)
|
)
|
||||||
.named(
|
.named(
|
||||||
|
@ -38,8 +38,8 @@ impl Command for SubCommand {
|
|||||||
)
|
)
|
||||||
.named(
|
.named(
|
||||||
"max-time",
|
"max-time",
|
||||||
SyntaxShape::Int,
|
SyntaxShape::Duration,
|
||||||
"timeout period in seconds",
|
"max duration before timeout occurs",
|
||||||
Some('m'),
|
Some('m'),
|
||||||
)
|
)
|
||||||
.named(
|
.named(
|
||||||
|
@ -38,8 +38,8 @@ impl Command for SubCommand {
|
|||||||
)
|
)
|
||||||
.named(
|
.named(
|
||||||
"max-time",
|
"max-time",
|
||||||
SyntaxShape::Int,
|
SyntaxShape::Duration,
|
||||||
"timeout period in seconds",
|
"max duration before timeout occurs",
|
||||||
Some('m'),
|
Some('m'),
|
||||||
)
|
)
|
||||||
.named(
|
.named(
|
||||||
|
@ -37,8 +37,8 @@ impl Command for SubCommand {
|
|||||||
)
|
)
|
||||||
.named(
|
.named(
|
||||||
"max-time",
|
"max-time",
|
||||||
SyntaxShape::Int,
|
SyntaxShape::Duration,
|
||||||
"timeout period in seconds",
|
"max duration before timeout occurs",
|
||||||
Some('m'),
|
Some('m'),
|
||||||
)
|
)
|
||||||
.named(
|
.named(
|
||||||
|
@ -39,8 +39,8 @@ impl Command for SubCommand {
|
|||||||
)
|
)
|
||||||
.named(
|
.named(
|
||||||
"max-time",
|
"max-time",
|
||||||
SyntaxShape::Int,
|
SyntaxShape::Duration,
|
||||||
"timeout period in seconds",
|
"max duration before timeout occurs",
|
||||||
Some('m'),
|
Some('m'),
|
||||||
)
|
)
|
||||||
.named(
|
.named(
|
||||||
|
@ -39,8 +39,8 @@ impl Command for SubCommand {
|
|||||||
)
|
)
|
||||||
.named(
|
.named(
|
||||||
"max-time",
|
"max-time",
|
||||||
SyntaxShape::Int,
|
SyntaxShape::Duration,
|
||||||
"timeout period in seconds",
|
"max duration before timeout occurs",
|
||||||
Some('m'),
|
Some('m'),
|
||||||
)
|
)
|
||||||
.named(
|
.named(
|
||||||
@ -128,8 +128,13 @@ impl Command for SubCommand {
|
|||||||
result: None,
|
result: None,
|
||||||
},
|
},
|
||||||
Example {
|
Example {
|
||||||
description: "Upload a file to example.com",
|
description: "Upload a binary file to example.com",
|
||||||
example: "http post --content-type multipart/form-data https://www.example.com { audio: (open -r file.mp3) }",
|
example: "http post --content-type multipart/form-data https://www.example.com { file: (open -r file.mp3) }",
|
||||||
|
result: None,
|
||||||
|
},
|
||||||
|
Example {
|
||||||
|
description: "Convert a text file into binary and upload it to example.com",
|
||||||
|
example: "http post --content-type multipart/form-data https://www.example.com { file: (open -r file.txt | into binary) }",
|
||||||
result: None,
|
result: None,
|
||||||
},
|
},
|
||||||
]
|
]
|
||||||
|
@ -39,8 +39,8 @@ impl Command for SubCommand {
|
|||||||
)
|
)
|
||||||
.named(
|
.named(
|
||||||
"max-time",
|
"max-time",
|
||||||
SyntaxShape::Int,
|
SyntaxShape::Duration,
|
||||||
"timeout period in seconds",
|
"max duration before timeout occurs",
|
||||||
Some('m'),
|
Some('m'),
|
||||||
)
|
)
|
||||||
.named(
|
.named(
|
||||||
|
@ -1,5 +1,7 @@
|
|||||||
use nu_engine::command_prelude::*;
|
use nu_engine::command_prelude::*;
|
||||||
|
|
||||||
|
use super::query::{record_to_query_string, table_to_query_string};
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct SubCommand;
|
pub struct SubCommand;
|
||||||
|
|
||||||
@ -12,7 +14,10 @@ impl Command for SubCommand {
|
|||||||
Signature::build("url build-query")
|
Signature::build("url build-query")
|
||||||
.input_output_types(vec![
|
.input_output_types(vec![
|
||||||
(Type::record(), Type::String),
|
(Type::record(), Type::String),
|
||||||
(Type::table(), Type::String),
|
(
|
||||||
|
Type::Table([("key".into(), Type::Any), ("value".into(), Type::Any)].into()),
|
||||||
|
Type::String,
|
||||||
|
),
|
||||||
])
|
])
|
||||||
.category(Category::Network)
|
.category(Category::Network)
|
||||||
}
|
}
|
||||||
@ -33,15 +38,20 @@ impl Command for SubCommand {
|
|||||||
result: Some(Value::test_string("mode=normal&userid=31415")),
|
result: Some(Value::test_string("mode=normal&userid=31415")),
|
||||||
},
|
},
|
||||||
Example {
|
Example {
|
||||||
description: "Outputs a query string representing the contents of this 1-row table",
|
description: "Outputs a query string representing the contents of this record, with a value that needs to be url-encoded",
|
||||||
example: r#"[[foo bar]; ["1" "2"]] | url build-query"#,
|
|
||||||
result: Some(Value::test_string("foo=1&bar=2")),
|
|
||||||
},
|
|
||||||
Example {
|
|
||||||
description: "Outputs a query string representing the contents of this record",
|
|
||||||
example: r#"{a:"AT&T", b: "AT T"} | url build-query"#,
|
example: r#"{a:"AT&T", b: "AT T"} | url build-query"#,
|
||||||
result: Some(Value::test_string("a=AT%26T&b=AT+T")),
|
result: Some(Value::test_string("a=AT%26T&b=AT+T")),
|
||||||
},
|
},
|
||||||
|
Example {
|
||||||
|
description: "Outputs a query string representing the contents of this record, \"exploding\" the list into multiple parameters",
|
||||||
|
example: r#"{a: ["one", "two"], b: "three"} | url build-query"#,
|
||||||
|
result: Some(Value::test_string("a=one&a=two&b=three")),
|
||||||
|
},
|
||||||
|
Example {
|
||||||
|
description: "Outputs a query string representing the contents of this table containing key-value pairs",
|
||||||
|
example: r#"[[key, value]; [a, one], [a, two], [b, three], [a, four]] | url build-query"#,
|
||||||
|
result: Some(Value::test_string("a=one&a=two&b=three&a=four")),
|
||||||
|
},
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -53,59 +63,25 @@ impl Command for SubCommand {
|
|||||||
input: PipelineData,
|
input: PipelineData,
|
||||||
) -> Result<PipelineData, ShellError> {
|
) -> Result<PipelineData, ShellError> {
|
||||||
let head = call.head;
|
let head = call.head;
|
||||||
to_url(input, head)
|
let input_span = input.span().unwrap_or(head);
|
||||||
|
let value = input.into_value(input_span)?;
|
||||||
|
let span = value.span();
|
||||||
|
let output = match value {
|
||||||
|
Value::Record { ref val, .. } => record_to_query_string(val, span, head),
|
||||||
|
Value::List { ref vals, .. } => table_to_query_string(vals, span, head),
|
||||||
|
// Propagate existing errors
|
||||||
|
Value::Error { error, .. } => Err(*error),
|
||||||
|
other => Err(ShellError::UnsupportedInput {
|
||||||
|
msg: "Expected a record or table from pipeline".to_string(),
|
||||||
|
input: "value originates from here".into(),
|
||||||
|
msg_span: head,
|
||||||
|
input_span: other.span(),
|
||||||
|
}),
|
||||||
|
};
|
||||||
|
Ok(Value::string(output?, head).into_pipeline_data())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn to_url(input: PipelineData, head: Span) -> Result<PipelineData, ShellError> {
|
|
||||||
let output: Result<String, ShellError> = input
|
|
||||||
.into_iter()
|
|
||||||
.map(move |value| {
|
|
||||||
let span = value.span();
|
|
||||||
match value {
|
|
||||||
Value::Record { ref val, .. } => {
|
|
||||||
let mut row_vec = vec![];
|
|
||||||
for (k, v) in &**val {
|
|
||||||
match v.coerce_string() {
|
|
||||||
Ok(s) => {
|
|
||||||
row_vec.push((k.clone(), s));
|
|
||||||
}
|
|
||||||
_ => {
|
|
||||||
return Err(ShellError::UnsupportedInput {
|
|
||||||
msg: "Expected a record with string values".to_string(),
|
|
||||||
input: "value originates from here".into(),
|
|
||||||
msg_span: head,
|
|
||||||
input_span: span,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
match serde_urlencoded::to_string(row_vec) {
|
|
||||||
Ok(s) => Ok(s),
|
|
||||||
_ => Err(ShellError::CantConvert {
|
|
||||||
to_type: "URL".into(),
|
|
||||||
from_type: value.get_type().to_string(),
|
|
||||||
span: head,
|
|
||||||
help: None,
|
|
||||||
}),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// Propagate existing errors
|
|
||||||
Value::Error { error, .. } => Err(*error),
|
|
||||||
other => Err(ShellError::UnsupportedInput {
|
|
||||||
msg: "Expected a table from pipeline".to_string(),
|
|
||||||
input: "value originates from here".into(),
|
|
||||||
msg_span: head,
|
|
||||||
input_span: other.span(),
|
|
||||||
}),
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
Ok(Value::string(output?, head).into_pipeline_data())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod test {
|
mod test {
|
||||||
use super::*;
|
use super::*;
|
||||||
|
@ -1,5 +1,7 @@
|
|||||||
use nu_engine::command_prelude::*;
|
use nu_engine::command_prelude::*;
|
||||||
|
|
||||||
|
use super::query::{record_to_query_string, table_to_query_string};
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct SubCommand;
|
pub struct SubCommand;
|
||||||
|
|
||||||
@ -27,7 +29,7 @@ impl Command for SubCommand {
|
|||||||
fn examples(&self) -> Vec<Example> {
|
fn examples(&self) -> Vec<Example> {
|
||||||
vec![
|
vec![
|
||||||
Example {
|
Example {
|
||||||
description: "Outputs a url representing the contents of this record",
|
description: "Outputs a url representing the contents of this record, `params` and `query` fields must be equivalent",
|
||||||
example: r#"{
|
example: r#"{
|
||||||
"scheme": "http",
|
"scheme": "http",
|
||||||
"username": "",
|
"username": "",
|
||||||
@ -47,6 +49,21 @@ impl Command for SubCommand {
|
|||||||
"http://www.pixiv.net/member_illust.php?mode=medium&illust_id=99260204",
|
"http://www.pixiv.net/member_illust.php?mode=medium&illust_id=99260204",
|
||||||
)),
|
)),
|
||||||
},
|
},
|
||||||
|
Example {
|
||||||
|
description: "Outputs a url representing the contents of this record, \"exploding\" the list in `params` into multiple parameters",
|
||||||
|
example: r#"{
|
||||||
|
"scheme": "http",
|
||||||
|
"username": "user",
|
||||||
|
"password": "pwd",
|
||||||
|
"host": "www.pixiv.net",
|
||||||
|
"port": "1234",
|
||||||
|
"params": {a: ["one", "two"], b: "three"},
|
||||||
|
"fragment": ""
|
||||||
|
} | url join"#,
|
||||||
|
result: Some(Value::test_string(
|
||||||
|
"http://user:pwd@www.pixiv.net:1234?a=one&a=two&b=three",
|
||||||
|
)),
|
||||||
|
},
|
||||||
Example {
|
Example {
|
||||||
description: "Outputs a url representing the contents of this record",
|
description: "Outputs a url representing the contents of this record",
|
||||||
example: r#"{
|
example: r#"{
|
||||||
@ -95,7 +112,7 @@ impl Command for SubCommand {
|
|||||||
.into_owned()
|
.into_owned()
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.try_fold(UrlComponents::new(), |url, (k, v)| {
|
.try_fold(UrlComponents::new(), |url, (k, v)| {
|
||||||
url.add_component(k, v, span, engine_state)
|
url.add_component(k, v, head, engine_state)
|
||||||
});
|
});
|
||||||
|
|
||||||
url_components?.to_url(span)
|
url_components?.to_url(span)
|
||||||
@ -138,7 +155,7 @@ impl UrlComponents {
|
|||||||
self,
|
self,
|
||||||
key: String,
|
key: String,
|
||||||
value: Value,
|
value: Value,
|
||||||
span: Span,
|
head: Span,
|
||||||
engine_state: &EngineState,
|
engine_state: &EngineState,
|
||||||
) -> Result<Self, ShellError> {
|
) -> Result<Self, ShellError> {
|
||||||
let value_span = value.span();
|
let value_span = value.span();
|
||||||
@ -177,48 +194,41 @@ impl UrlComponents {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if key == "params" {
|
if key == "params" {
|
||||||
return match value {
|
let mut qs = match value {
|
||||||
Value::Record { val, .. } => {
|
Value::Record { ref val, .. } => record_to_query_string(val, value_span, head)?,
|
||||||
let mut qs = val
|
Value::List { ref vals, .. } => table_to_query_string(vals, value_span, head)?,
|
||||||
.into_owned()
|
Value::Error { error, .. } => return Err(*error),
|
||||||
.into_iter()
|
other => {
|
||||||
.map(|(k, v)| match v.coerce_into_string() {
|
return Err(ShellError::IncompatibleParametersSingle {
|
||||||
Ok(val) => Ok(format!("{k}={val}")),
|
msg: String::from("Key params has to be a record or a table"),
|
||||||
Err(err) => Err(err),
|
span: other.span(),
|
||||||
})
|
|
||||||
.collect::<Result<Vec<String>, ShellError>>()?
|
|
||||||
.join("&");
|
|
||||||
|
|
||||||
qs = if !qs.trim().is_empty() {
|
|
||||||
format!("?{qs}")
|
|
||||||
} else {
|
|
||||||
qs
|
|
||||||
};
|
|
||||||
|
|
||||||
if let Some(q) = self.query {
|
|
||||||
if q != qs {
|
|
||||||
// if query is present it means that also query_span is set.
|
|
||||||
return Err(ShellError::IncompatibleParameters {
|
|
||||||
left_message: format!("Mismatch, qs from params is: {qs}"),
|
|
||||||
left_span: value_span,
|
|
||||||
right_message: format!("instead query is: {q}"),
|
|
||||||
right_span: self.query_span.unwrap_or(Span::unknown()),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(Self {
|
|
||||||
query: Some(qs),
|
|
||||||
params_span: Some(value_span),
|
|
||||||
..self
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
Value::Error { error, .. } => Err(*error),
|
|
||||||
other => Err(ShellError::IncompatibleParametersSingle {
|
|
||||||
msg: String::from("Key params has to be a record"),
|
|
||||||
span: other.span(),
|
|
||||||
}),
|
|
||||||
};
|
};
|
||||||
|
|
||||||
|
qs = if !qs.trim().is_empty() {
|
||||||
|
format!("?{qs}")
|
||||||
|
} else {
|
||||||
|
qs
|
||||||
|
};
|
||||||
|
|
||||||
|
if let Some(q) = self.query {
|
||||||
|
if q != qs {
|
||||||
|
// if query is present it means that also query_span is set.
|
||||||
|
return Err(ShellError::IncompatibleParameters {
|
||||||
|
left_message: format!("Mismatch, query string from params is: {qs}"),
|
||||||
|
left_span: value_span,
|
||||||
|
right_message: format!("instead query is: {q}"),
|
||||||
|
right_span: self.query_span.unwrap_or(Span::unknown()),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return Ok(Self {
|
||||||
|
query: Some(qs),
|
||||||
|
params_span: Some(value_span),
|
||||||
|
..self
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
// apart from port and params all other keys are strings.
|
// apart from port and params all other keys are strings.
|
||||||
@ -258,7 +268,7 @@ impl UrlComponents {
|
|||||||
return Err(ShellError::IncompatibleParameters {
|
return Err(ShellError::IncompatibleParameters {
|
||||||
left_message: format!("Mismatch, query param is: {s}"),
|
left_message: format!("Mismatch, query param is: {s}"),
|
||||||
left_span: value_span,
|
left_span: value_span,
|
||||||
right_message: format!("instead qs from params is: {q}"),
|
right_message: format!("instead query string from params is: {q}"),
|
||||||
right_span: self.params_span.unwrap_or(Span::unknown()),
|
right_span: self.params_span.unwrap_or(Span::unknown()),
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
@ -284,7 +294,7 @@ impl UrlComponents {
|
|||||||
&ShellError::GenericError {
|
&ShellError::GenericError {
|
||||||
error: format!("'{key}' is not a valid URL field"),
|
error: format!("'{key}' is not a valid URL field"),
|
||||||
msg: format!("remove '{key}' col from input record"),
|
msg: format!("remove '{key}' col from input record"),
|
||||||
span: Some(span),
|
span: Some(value_span),
|
||||||
help: None,
|
help: None,
|
||||||
inner: vec![],
|
inner: vec![],
|
||||||
},
|
},
|
||||||
|
@ -3,6 +3,8 @@ mod decode;
|
|||||||
mod encode;
|
mod encode;
|
||||||
mod join;
|
mod join;
|
||||||
mod parse;
|
mod parse;
|
||||||
|
mod query;
|
||||||
|
mod split_query;
|
||||||
mod url_;
|
mod url_;
|
||||||
|
|
||||||
pub use self::parse::SubCommand as UrlParse;
|
pub use self::parse::SubCommand as UrlParse;
|
||||||
@ -10,4 +12,5 @@ pub use build_query::SubCommand as UrlBuildQuery;
|
|||||||
pub use decode::SubCommand as UrlDecode;
|
pub use decode::SubCommand as UrlDecode;
|
||||||
pub use encode::SubCommand as UrlEncode;
|
pub use encode::SubCommand as UrlEncode;
|
||||||
pub use join::SubCommand as UrlJoin;
|
pub use join::SubCommand as UrlJoin;
|
||||||
|
pub use split_query::SubCommand as UrlSplitQuery;
|
||||||
pub use url_::Url;
|
pub use url_::Url;
|
||||||
|
@ -2,6 +2,8 @@ use nu_engine::command_prelude::*;
|
|||||||
use nu_protocol::Config;
|
use nu_protocol::Config;
|
||||||
use url::Url;
|
use url::Url;
|
||||||
|
|
||||||
|
use super::query::query_string_to_table;
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct SubCommand;
|
pub struct SubCommand;
|
||||||
|
|
||||||
@ -53,7 +55,7 @@ impl Command for SubCommand {
|
|||||||
fn examples(&self) -> Vec<Example> {
|
fn examples(&self) -> Vec<Example> {
|
||||||
vec![Example {
|
vec![Example {
|
||||||
description: "Parses a url",
|
description: "Parses a url",
|
||||||
example: "'http://user123:pass567@www.example.com:8081/foo/bar?param1=section&p2=&f[name]=vldc#hello' | url parse",
|
example: "'http://user123:pass567@www.example.com:8081/foo/bar?param1=section&p2=&f[name]=vldc&f[no]=42#hello' | url parse",
|
||||||
result: Some(Value::test_record(record! {
|
result: Some(Value::test_record(record! {
|
||||||
"scheme" => Value::test_string("http"),
|
"scheme" => Value::test_string("http"),
|
||||||
"username" => Value::test_string("user123"),
|
"username" => Value::test_string("user123"),
|
||||||
@ -61,13 +63,14 @@ impl Command for SubCommand {
|
|||||||
"host" => Value::test_string("www.example.com"),
|
"host" => Value::test_string("www.example.com"),
|
||||||
"port" => Value::test_string("8081"),
|
"port" => Value::test_string("8081"),
|
||||||
"path" => Value::test_string("/foo/bar"),
|
"path" => Value::test_string("/foo/bar"),
|
||||||
"query" => Value::test_string("param1=section&p2=&f[name]=vldc"),
|
"query" => Value::test_string("param1=section&p2=&f[name]=vldc&f[no]=42"),
|
||||||
"fragment" => Value::test_string("hello"),
|
"fragment" => Value::test_string("hello"),
|
||||||
"params" => Value::test_record(record! {
|
"params" => Value::test_list(vec![
|
||||||
"param1" => Value::test_string("section"),
|
Value::test_record(record! {"key" => Value::test_string("param1"), "value" => Value::test_string("section") }),
|
||||||
"p2" => Value::test_string(""),
|
Value::test_record(record! {"key" => Value::test_string("p2"), "value" => Value::test_string("") }),
|
||||||
"f[name]" => Value::test_string("vldc"),
|
Value::test_record(record! {"key" => Value::test_string("f[name]"), "value" => Value::test_string("vldc") }),
|
||||||
}),
|
Value::test_record(record! {"key" => Value::test_string("f[no]"), "value" => Value::test_string("42") }),
|
||||||
|
]),
|
||||||
})),
|
})),
|
||||||
}]
|
}]
|
||||||
}
|
}
|
||||||
@ -80,54 +83,41 @@ fn get_url_string(value: &Value, config: &Config) -> String {
|
|||||||
fn parse(value: Value, head: Span, config: &Config) -> Result<PipelineData, ShellError> {
|
fn parse(value: Value, head: Span, config: &Config) -> Result<PipelineData, ShellError> {
|
||||||
let url_string = get_url_string(&value, config);
|
let url_string = get_url_string(&value, config);
|
||||||
|
|
||||||
let result_url = Url::parse(url_string.as_str());
|
|
||||||
|
|
||||||
// This is the span of the original string, not the call head.
|
// This is the span of the original string, not the call head.
|
||||||
let span = value.span();
|
let span = value.span();
|
||||||
|
|
||||||
match result_url {
|
let url = Url::parse(url_string.as_str()).map_err(|_| ShellError::UnsupportedInput {
|
||||||
Ok(url) => {
|
msg: "Incomplete or incorrect URL. Expected a full URL, e.g., https://www.example.com"
|
||||||
let params =
|
.to_string(),
|
||||||
serde_urlencoded::from_str::<Vec<(String, String)>>(url.query().unwrap_or(""));
|
input: "value originates from here".into(),
|
||||||
match params {
|
msg_span: head,
|
||||||
Ok(result) => {
|
input_span: span,
|
||||||
let params = result
|
})?;
|
||||||
.into_iter()
|
|
||||||
.map(|(k, v)| (k, Value::string(v, head)))
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
let port = url.port().map(|p| p.to_string()).unwrap_or_default();
|
let params = query_string_to_table(url.query().unwrap_or(""), head, span).map_err(|_| {
|
||||||
|
ShellError::UnsupportedInput {
|
||||||
let record = record! {
|
msg: "String not compatible with url-encoding".to_string(),
|
||||||
"scheme" => Value::string(url.scheme(), head),
|
|
||||||
"username" => Value::string(url.username(), head),
|
|
||||||
"password" => Value::string(url.password().unwrap_or(""), head),
|
|
||||||
"host" => Value::string(url.host_str().unwrap_or(""), head),
|
|
||||||
"port" => Value::string(port, head),
|
|
||||||
"path" => Value::string(url.path(), head),
|
|
||||||
"query" => Value::string(url.query().unwrap_or(""), head),
|
|
||||||
"fragment" => Value::string(url.fragment().unwrap_or(""), head),
|
|
||||||
"params" => Value::record(params, head),
|
|
||||||
};
|
|
||||||
|
|
||||||
Ok(PipelineData::Value(Value::record(record, head), None))
|
|
||||||
}
|
|
||||||
_ => Err(ShellError::UnsupportedInput {
|
|
||||||
msg: "String not compatible with url-encoding".to_string(),
|
|
||||||
input: "value originates from here".into(),
|
|
||||||
msg_span: head,
|
|
||||||
input_span: span,
|
|
||||||
}),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Err(_e) => Err(ShellError::UnsupportedInput {
|
|
||||||
msg: "Incomplete or incorrect URL. Expected a full URL, e.g., https://www.example.com"
|
|
||||||
.to_string(),
|
|
||||||
input: "value originates from here".into(),
|
input: "value originates from here".into(),
|
||||||
msg_span: head,
|
msg_span: head,
|
||||||
input_span: span,
|
input_span: span,
|
||||||
}),
|
}
|
||||||
}
|
})?;
|
||||||
|
|
||||||
|
let port = url.port().map(|p| p.to_string()).unwrap_or_default();
|
||||||
|
|
||||||
|
let record = record! {
|
||||||
|
"scheme" => Value::string(url.scheme(), head),
|
||||||
|
"username" => Value::string(url.username(), head),
|
||||||
|
"password" => Value::string(url.password().unwrap_or(""), head),
|
||||||
|
"host" => Value::string(url.host_str().unwrap_or(""), head),
|
||||||
|
"port" => Value::string(port, head),
|
||||||
|
"path" => Value::string(url.path(), head),
|
||||||
|
"query" => Value::string(url.query().unwrap_or(""), head),
|
||||||
|
"fragment" => Value::string(url.fragment().unwrap_or(""), head),
|
||||||
|
"params" => params,
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok(PipelineData::Value(Value::record(record, head), None))
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
|
115
crates/nu-command/src/network/url/query.rs
Normal file
115
crates/nu-command/src/network/url/query.rs
Normal file
@ -0,0 +1,115 @@
|
|||||||
|
use std::borrow::Cow;
|
||||||
|
|
||||||
|
use nu_protocol::{IntoValue, Record, ShellError, Span, Type, Value};
|
||||||
|
|
||||||
|
pub fn record_to_query_string(
|
||||||
|
record: &Record,
|
||||||
|
span: Span,
|
||||||
|
head: Span,
|
||||||
|
) -> Result<String, ShellError> {
|
||||||
|
let mut row_vec = vec![];
|
||||||
|
for (k, v) in record {
|
||||||
|
match v {
|
||||||
|
Value::List { ref vals, .. } => {
|
||||||
|
for v_item in vals {
|
||||||
|
row_vec.push((
|
||||||
|
k.as_str(),
|
||||||
|
v_item
|
||||||
|
.coerce_str()
|
||||||
|
.map_err(|_| ShellError::UnsupportedInput {
|
||||||
|
msg: "Expected a record with list of string values".to_string(),
|
||||||
|
input: "value originates from here".into(),
|
||||||
|
msg_span: head,
|
||||||
|
input_span: span,
|
||||||
|
})?,
|
||||||
|
));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ => row_vec.push((
|
||||||
|
k.as_str(),
|
||||||
|
v.coerce_str().map_err(|_| ShellError::UnsupportedInput {
|
||||||
|
msg: "Expected a record with string or list of string values".to_string(),
|
||||||
|
input: "value originates from here".into(),
|
||||||
|
msg_span: head,
|
||||||
|
input_span: span,
|
||||||
|
})?,
|
||||||
|
)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
serde_urlencoded::to_string(row_vec).map_err(|_| ShellError::CantConvert {
|
||||||
|
to_type: "URL".into(),
|
||||||
|
from_type: Type::record().to_string(),
|
||||||
|
span: head,
|
||||||
|
help: None,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn table_to_query_string(
|
||||||
|
table: &[Value],
|
||||||
|
span: Span,
|
||||||
|
head: Span,
|
||||||
|
) -> Result<String, ShellError> {
|
||||||
|
let row_vec = table
|
||||||
|
.iter()
|
||||||
|
.map(|val| match val {
|
||||||
|
Value::Record { val, internal_span } => key_value_from_record(val, *internal_span),
|
||||||
|
_ => Err(ShellError::UnsupportedInput {
|
||||||
|
msg: "expected a table".into(),
|
||||||
|
input: "not a table, contains non-record values".into(),
|
||||||
|
msg_span: head,
|
||||||
|
input_span: span,
|
||||||
|
}),
|
||||||
|
})
|
||||||
|
.collect::<Result<Vec<_>, ShellError>>()?;
|
||||||
|
|
||||||
|
serde_urlencoded::to_string(row_vec).map_err(|_| ShellError::CantConvert {
|
||||||
|
to_type: "URL".into(),
|
||||||
|
from_type: Type::table().to_string(),
|
||||||
|
span: head,
|
||||||
|
help: None,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
fn key_value_from_record(record: &Record, span: Span) -> Result<(Cow<str>, Cow<str>), ShellError> {
|
||||||
|
let key = record
|
||||||
|
.get("key")
|
||||||
|
.ok_or_else(|| ShellError::CantFindColumn {
|
||||||
|
col_name: "key".into(),
|
||||||
|
span: None,
|
||||||
|
src_span: span,
|
||||||
|
})?
|
||||||
|
.coerce_str()?;
|
||||||
|
let value = record
|
||||||
|
.get("value")
|
||||||
|
.ok_or_else(|| ShellError::CantFindColumn {
|
||||||
|
col_name: "value".into(),
|
||||||
|
span: None,
|
||||||
|
src_span: span,
|
||||||
|
})?
|
||||||
|
.coerce_str()?;
|
||||||
|
Ok((key, value))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn query_string_to_table(query: &str, head: Span, span: Span) -> Result<Value, ShellError> {
|
||||||
|
let params = serde_urlencoded::from_str::<Vec<(String, String)>>(query)
|
||||||
|
.map_err(|_| ShellError::UnsupportedInput {
|
||||||
|
msg: "String not compatible with url-encoding".to_string(),
|
||||||
|
input: "value originates from here".into(),
|
||||||
|
msg_span: head,
|
||||||
|
input_span: span,
|
||||||
|
})?
|
||||||
|
.into_iter()
|
||||||
|
.map(|(key, value)| {
|
||||||
|
Value::record(
|
||||||
|
nu_protocol::record! {
|
||||||
|
"key" => key.into_value(head),
|
||||||
|
"value" => value.into_value(head)
|
||||||
|
},
|
||||||
|
head,
|
||||||
|
)
|
||||||
|
})
|
||||||
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
|
Ok(Value::list(params, head))
|
||||||
|
}
|
106
crates/nu-command/src/network/url/split_query.rs
Normal file
106
crates/nu-command/src/network/url/split_query.rs
Normal file
@ -0,0 +1,106 @@
|
|||||||
|
use nu_engine::command_prelude::*;
|
||||||
|
|
||||||
|
use super::query::query_string_to_table;
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct SubCommand;
|
||||||
|
|
||||||
|
impl Command for SubCommand {
|
||||||
|
fn name(&self) -> &str {
|
||||||
|
"url split-query"
|
||||||
|
}
|
||||||
|
|
||||||
|
fn signature(&self) -> Signature {
|
||||||
|
Signature::build("url split-query")
|
||||||
|
.input_output_types(vec![(
|
||||||
|
Type::String,
|
||||||
|
Type::Table([("key".into(), Type::String), ("value".into(), Type::String)].into()),
|
||||||
|
)])
|
||||||
|
.category(Category::Network)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn description(&self) -> &str {
|
||||||
|
"Converts query string into table applying percent-decoding."
|
||||||
|
}
|
||||||
|
|
||||||
|
fn search_terms(&self) -> Vec<&str> {
|
||||||
|
vec!["convert", "record", "table"]
|
||||||
|
}
|
||||||
|
|
||||||
|
fn examples(&self) -> Vec<Example> {
|
||||||
|
vec![
|
||||||
|
Example {
|
||||||
|
description: "Outputs a table representing the contents of this query string",
|
||||||
|
example: r#""mode=normal&userid=31415" | url split-query"#,
|
||||||
|
result: Some(Value::test_list(vec![
|
||||||
|
Value::test_record(record!{
|
||||||
|
"key" => Value::test_string("mode"),
|
||||||
|
"value" => Value::test_string("normal"),
|
||||||
|
}),
|
||||||
|
Value::test_record(record!{
|
||||||
|
"key" => Value::test_string("userid"),
|
||||||
|
"value" => Value::test_string("31415"),
|
||||||
|
})
|
||||||
|
])),
|
||||||
|
},
|
||||||
|
Example {
|
||||||
|
description: "Outputs a table representing the contents of this query string, url-decoding the values",
|
||||||
|
example: r#""a=AT%26T&b=AT+T" | url split-query"#,
|
||||||
|
result: Some(Value::test_list(vec![
|
||||||
|
Value::test_record(record!{
|
||||||
|
"key" => Value::test_string("a"),
|
||||||
|
"value" => Value::test_string("AT&T"),
|
||||||
|
}),
|
||||||
|
Value::test_record(record!{
|
||||||
|
"key" => Value::test_string("b"),
|
||||||
|
"value" => Value::test_string("AT T"),
|
||||||
|
}),
|
||||||
|
])),
|
||||||
|
},
|
||||||
|
Example {
|
||||||
|
description: "Outputs a table representing the contents of this query string",
|
||||||
|
example: r#""a=one&a=two&b=three" | url split-query"#,
|
||||||
|
result: Some(Value::test_list(vec![
|
||||||
|
Value::test_record(record!{
|
||||||
|
"key" => Value::test_string("a"),
|
||||||
|
"value" => Value::test_string("one"),
|
||||||
|
}),
|
||||||
|
Value::test_record(record!{
|
||||||
|
"key" => Value::test_string("a"),
|
||||||
|
"value" => Value::test_string("two"),
|
||||||
|
}),
|
||||||
|
Value::test_record(record!{
|
||||||
|
"key" => Value::test_string("b"),
|
||||||
|
"value" => Value::test_string("three"),
|
||||||
|
}),
|
||||||
|
])),
|
||||||
|
},
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
fn run(
|
||||||
|
&self,
|
||||||
|
engine_state: &EngineState,
|
||||||
|
stack: &mut Stack,
|
||||||
|
call: &Call,
|
||||||
|
input: PipelineData,
|
||||||
|
) -> Result<PipelineData, ShellError> {
|
||||||
|
let value = input.into_value(call.head)?;
|
||||||
|
let span = value.span();
|
||||||
|
let query = value.to_expanded_string("", &stack.get_config(engine_state));
|
||||||
|
let table = query_string_to_table(&query, call.head, span)?;
|
||||||
|
Ok(PipelineData::Value(table, None))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod test {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_examples() {
|
||||||
|
use crate::test_examples;
|
||||||
|
|
||||||
|
test_examples(SubCommand {})
|
||||||
|
}
|
||||||
|
}
|
@ -1,8 +1,8 @@
|
|||||||
use nu_ansi_term::*;
|
use nu_ansi_term::*;
|
||||||
use nu_engine::command_prelude::*;
|
use nu_engine::command_prelude::*;
|
||||||
use nu_protocol::{engine::StateWorkingSet, Signals};
|
use nu_protocol::{engine::StateWorkingSet, Signals};
|
||||||
use once_cell::sync::Lazy;
|
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
|
use std::sync::LazyLock;
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct AnsiCommand;
|
pub struct AnsiCommand;
|
||||||
@ -14,7 +14,7 @@ struct AnsiCode {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[rustfmt::skip]
|
#[rustfmt::skip]
|
||||||
static CODE_LIST: Lazy<Vec<AnsiCode>> = Lazy::new(|| { vec![
|
static CODE_LIST: LazyLock<Vec<AnsiCode>> = LazyLock::new(|| { vec![
|
||||||
AnsiCode{ short_name: Some("g"), long_name: "green", code: Color::Green.prefix().to_string()},
|
AnsiCode{ short_name: Some("g"), long_name: "green", code: Color::Green.prefix().to_string()},
|
||||||
AnsiCode{ short_name: Some("gb"), long_name: "green_bold", code: Color::Green.bold().prefix().to_string()},
|
AnsiCode{ short_name: Some("gb"), long_name: "green_bold", code: Color::Green.bold().prefix().to_string()},
|
||||||
AnsiCode{ short_name: Some("gu"), long_name: "green_underline", code: Color::Green.underline().prefix().to_string()},
|
AnsiCode{ short_name: Some("gu"), long_name: "green_underline", code: Color::Green.underline().prefix().to_string()},
|
||||||
@ -425,8 +425,6 @@ static CODE_LIST: Lazy<Vec<AnsiCode>> = Lazy::new(|| { vec![
|
|||||||
AnsiCode { short_name: Some("grey89"), long_name: "xterm_grey89", code: Color::Fixed(254).prefix().to_string()},
|
AnsiCode { short_name: Some("grey89"), long_name: "xterm_grey89", code: Color::Fixed(254).prefix().to_string()},
|
||||||
AnsiCode { short_name: Some("grey93"), long_name: "xterm_grey93", code: Color::Fixed(255).prefix().to_string()},
|
AnsiCode { short_name: Some("grey93"), long_name: "xterm_grey93", code: Color::Fixed(255).prefix().to_string()},
|
||||||
|
|
||||||
AnsiCode{ short_name: None, long_name: "reset", code: "\x1b[0m".to_owned()},
|
|
||||||
|
|
||||||
// Attributes
|
// Attributes
|
||||||
AnsiCode { short_name: Some("n"), long_name: "attr_normal", code: Color::Green.suffix().to_string()},
|
AnsiCode { short_name: Some("n"), long_name: "attr_normal", code: Color::Green.suffix().to_string()},
|
||||||
AnsiCode { short_name: Some("bo"), long_name: "attr_bold", code: Style::new().bold().prefix().to_string()},
|
AnsiCode { short_name: Some("bo"), long_name: "attr_bold", code: Style::new().bold().prefix().to_string()},
|
||||||
@ -437,6 +435,8 @@ static CODE_LIST: Lazy<Vec<AnsiCode>> = Lazy::new(|| { vec![
|
|||||||
AnsiCode { short_name: Some("h"), long_name: "attr_hidden", code: Style::new().hidden().prefix().to_string()},
|
AnsiCode { short_name: Some("h"), long_name: "attr_hidden", code: Style::new().hidden().prefix().to_string()},
|
||||||
AnsiCode { short_name: Some("s"), long_name: "attr_strike", code: Style::new().strikethrough().prefix().to_string()},
|
AnsiCode { short_name: Some("s"), long_name: "attr_strike", code: Style::new().strikethrough().prefix().to_string()},
|
||||||
|
|
||||||
|
AnsiCode{ short_name: None, long_name: "reset", code: "\x1b[0m".to_owned()},
|
||||||
|
|
||||||
// Reference for ansi codes https://gist.github.com/fnky/458719343aabd01cfb17a3a4f7296797
|
// Reference for ansi codes https://gist.github.com/fnky/458719343aabd01cfb17a3a4f7296797
|
||||||
// Another good reference http://ascii-table.com/ansi-escape-sequences.php
|
// Another good reference http://ascii-table.com/ansi-escape-sequences.php
|
||||||
|
|
||||||
@ -448,7 +448,8 @@ static CODE_LIST: Lazy<Vec<AnsiCode>> = Lazy::new(|| { vec![
|
|||||||
AnsiCode{ short_name: None, long_name:"clear_screen_from_cursor_to_end", code: "\x1b[0J".to_string()}, // clears from cursor until end of screen
|
AnsiCode{ short_name: None, long_name:"clear_screen_from_cursor_to_end", code: "\x1b[0J".to_string()}, // clears from cursor until end of screen
|
||||||
AnsiCode{ short_name: None, long_name:"clear_screen_from_cursor_to_beginning", code: "\x1b[1J".to_string()}, // clears from cursor to beginning of screen
|
AnsiCode{ short_name: None, long_name:"clear_screen_from_cursor_to_beginning", code: "\x1b[1J".to_string()}, // clears from cursor to beginning of screen
|
||||||
AnsiCode{ short_name: Some("cls"), long_name:"clear_entire_screen", code: "\x1b[2J".to_string()}, // clears the entire screen
|
AnsiCode{ short_name: Some("cls"), long_name:"clear_entire_screen", code: "\x1b[2J".to_string()}, // clears the entire screen
|
||||||
AnsiCode{ short_name: Some("clsb"), long_name:"clear_entire_screen_plus_buffer", code: "\x1b[3J".to_string()}, // clear entire screen and delete all lines saved in the scrollback buffer
|
AnsiCode{ short_name: Some("clsb"), long_name:"clear_entire_screen_plus_buffer", code: "\x1b[2J\x1b[3J".to_string()}, // clear entire screen and delete all lines saved in the scrollback buffer
|
||||||
|
AnsiCode{ short_name: Some("clb"), long_name:"clear_scrollback_buffer", code: "\x1b[3J".to_string()}, // clear entire screen and delete all lines saved in the scrollback buffer
|
||||||
AnsiCode{ short_name: None, long_name:"erase_line", code: "\x1b[K".to_string()}, // clears the current line
|
AnsiCode{ short_name: None, long_name:"erase_line", code: "\x1b[K".to_string()}, // clears the current line
|
||||||
AnsiCode{ short_name: None, long_name:"erase_line_from_cursor_to_end", code: "\x1b[0K".to_string()}, // clears from cursor to end of line
|
AnsiCode{ short_name: None, long_name:"erase_line_from_cursor_to_end", code: "\x1b[0K".to_string()}, // clears from cursor to end of line
|
||||||
AnsiCode{ short_name: None, long_name:"erase_line_from_cursor_to_beginning", code: "\x1b[1K".to_string()}, // clears from cursor to start of line
|
AnsiCode{ short_name: None, long_name:"erase_line_from_cursor_to_beginning", code: "\x1b[1K".to_string()}, // clears from cursor to start of line
|
||||||
@ -493,8 +494,8 @@ static CODE_LIST: Lazy<Vec<AnsiCode>> = Lazy::new(|| { vec![
|
|||||||
]
|
]
|
||||||
});
|
});
|
||||||
|
|
||||||
static CODE_MAP: Lazy<HashMap<&'static str, &'static str>> =
|
static CODE_MAP: LazyLock<HashMap<&'static str, &'static str>> =
|
||||||
Lazy::new(|| build_ansi_hashmap(&CODE_LIST));
|
LazyLock::new(|| build_ansi_hashmap(&CODE_LIST));
|
||||||
|
|
||||||
impl Command for AnsiCommand {
|
impl Command for AnsiCommand {
|
||||||
fn name(&self) -> &str {
|
fn name(&self) -> &str {
|
||||||
@ -841,15 +842,19 @@ fn generate_ansi_code_list(
|
|||||||
.map(move |(i, ansi_code)| {
|
.map(move |(i, ansi_code)| {
|
||||||
let name = Value::string(ansi_code.long_name, call_span);
|
let name = Value::string(ansi_code.long_name, call_span);
|
||||||
let short_name = Value::string(ansi_code.short_name.unwrap_or(""), call_span);
|
let short_name = Value::string(ansi_code.short_name.unwrap_or(""), call_span);
|
||||||
// The first 102 items in the ansi array are colors
|
|
||||||
let preview = if i < 389 {
|
|
||||||
Value::string(format!("{}NUSHELL\u{1b}[0m", &ansi_code.code), call_span)
|
|
||||||
} else {
|
|
||||||
Value::string("\u{1b}[0m", call_span)
|
|
||||||
};
|
|
||||||
let code = Value::string(ansi_code.code.replace('\u{1b}', "\\e"), call_span);
|
let code = Value::string(ansi_code.code.replace('\u{1b}', "\\e"), call_span);
|
||||||
|
|
||||||
let record = if use_ansi_coloring {
|
let record = if use_ansi_coloring {
|
||||||
|
// The first 397 items in the ansi array are previewable
|
||||||
|
let preview = if i < 397 {
|
||||||
|
Value::string(
|
||||||
|
format!("\u{1b}[0m{}NUSHELL\u{1b}[0m", &ansi_code.code),
|
||||||
|
call_span,
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
Value::string("\u{1b}[0m", call_span)
|
||||||
|
};
|
||||||
|
|
||||||
record! {
|
record! {
|
||||||
"name" => name,
|
"name" => name,
|
||||||
"preview" => preview,
|
"preview" => preview,
|
||||||
|
@ -41,14 +41,21 @@ impl Command for Clear {
|
|||||||
call: &Call,
|
call: &Call,
|
||||||
_input: PipelineData,
|
_input: PipelineData,
|
||||||
) -> Result<PipelineData, ShellError> {
|
) -> Result<PipelineData, ShellError> {
|
||||||
let clear_type: ClearType = match call.has_flag(engine_state, stack, "keep-scrollback")? {
|
match call.has_flag(engine_state, stack, "keep-scrollback")? {
|
||||||
true => ClearType::All,
|
true => {
|
||||||
_ => ClearType::Purge,
|
std::io::stdout()
|
||||||
|
.queue(MoveTo(0, 0))?
|
||||||
|
.queue(ClearCommand(ClearType::All))?
|
||||||
|
.flush()?;
|
||||||
|
}
|
||||||
|
_ => {
|
||||||
|
std::io::stdout()
|
||||||
|
.queue(MoveTo(0, 0))?
|
||||||
|
.queue(ClearCommand(ClearType::All))?
|
||||||
|
.queue(ClearCommand(ClearType::Purge))?
|
||||||
|
.flush()?;
|
||||||
|
}
|
||||||
};
|
};
|
||||||
std::io::stdout()
|
|
||||||
.queue(ClearCommand(clear_type))?
|
|
||||||
.queue(MoveTo(0, 0))?
|
|
||||||
.flush()?;
|
|
||||||
|
|
||||||
Ok(PipelineData::Empty)
|
Ok(PipelineData::Empty)
|
||||||
}
|
}
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
use nix::sys::resource::{rlim_t, Resource, RLIM_INFINITY};
|
use nix::sys::resource::{rlim_t, Resource, RLIM_INFINITY};
|
||||||
use nu_engine::command_prelude::*;
|
use nu_engine::command_prelude::*;
|
||||||
|
|
||||||
use once_cell::sync::Lazy;
|
use std::sync::LazyLock;
|
||||||
|
|
||||||
/// An object contains resource related parameters
|
/// An object contains resource related parameters
|
||||||
struct ResourceInfo<'a> {
|
struct ResourceInfo<'a> {
|
||||||
@ -54,7 +54,7 @@ impl<'a> Default for ResourceInfo<'a> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
static RESOURCE_ARRAY: Lazy<Vec<ResourceInfo>> = Lazy::new(|| {
|
static RESOURCE_ARRAY: LazyLock<Vec<ResourceInfo>> = LazyLock::new(|| {
|
||||||
let resources = [
|
let resources = [
|
||||||
#[cfg(any(target_os = "freebsd", target_os = "dragonfly"))]
|
#[cfg(any(target_os = "freebsd", target_os = "dragonfly"))]
|
||||||
(
|
(
|
||||||
|
@ -16,6 +16,7 @@ impl Command for StorInsert {
|
|||||||
.input_output_types(vec![
|
.input_output_types(vec![
|
||||||
(Type::Nothing, Type::table()),
|
(Type::Nothing, Type::table()),
|
||||||
(Type::record(), Type::table()),
|
(Type::record(), Type::table()),
|
||||||
|
(Type::table(), Type::table()),
|
||||||
])
|
])
|
||||||
.required_named(
|
.required_named(
|
||||||
"table-name",
|
"table-name",
|
||||||
@ -43,7 +44,7 @@ impl Command for StorInsert {
|
|||||||
|
|
||||||
fn examples(&self) -> Vec<Example> {
|
fn examples(&self) -> Vec<Example> {
|
||||||
vec![Example {
|
vec![Example {
|
||||||
description: "Insert data the in-memory sqlite database using a data-record of column-name and column-value pairs",
|
description: "Insert data in the in-memory sqlite database using a data-record of column-name and column-value pairs",
|
||||||
example: "stor insert --table-name nudb --data-record {bool1: true, int1: 5, float1: 1.1, str1: fdncred, datetime1: 2023-04-17}",
|
example: "stor insert --table-name nudb --data-record {bool1: true, int1: 5, float1: 1.1, str1: fdncred, datetime1: 2023-04-17}",
|
||||||
result: None,
|
result: None,
|
||||||
},
|
},
|
||||||
@ -52,6 +53,16 @@ impl Command for StorInsert {
|
|||||||
example: "{bool1: true, int1: 5, float1: 1.1, str1: fdncred, datetime1: 2023-04-17} | stor insert --table-name nudb",
|
example: "{bool1: true, int1: 5, float1: 1.1, str1: fdncred, datetime1: 2023-04-17} | stor insert --table-name nudb",
|
||||||
result: None,
|
result: None,
|
||||||
},
|
},
|
||||||
|
Example {
|
||||||
|
description: "Insert data through pipeline input as a table literal",
|
||||||
|
example: "[[bool1 int1 float1]; [true 5 1.1], [false 8 3.14]] | stor insert --table-name nudb",
|
||||||
|
result: None,
|
||||||
|
},
|
||||||
|
Example {
|
||||||
|
description: "Insert ls entries",
|
||||||
|
example: "ls | stor insert --table-name files",
|
||||||
|
result: None,
|
||||||
|
},
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -71,10 +82,11 @@ impl Command for StorInsert {
|
|||||||
Signals::empty(),
|
Signals::empty(),
|
||||||
));
|
));
|
||||||
|
|
||||||
// Check if the record is being passed as input or using the data record parameter
|
let records = handle(span, data_record, input)?;
|
||||||
let columns = handle(span, data_record, input)?;
|
|
||||||
|
|
||||||
process(table_name, span, &db, columns)?;
|
for record in records {
|
||||||
|
process(table_name.clone(), span, &db, record)?;
|
||||||
|
}
|
||||||
|
|
||||||
Ok(Value::custom(db, span).into_pipeline_data())
|
Ok(Value::custom(db, span).into_pipeline_data())
|
||||||
}
|
}
|
||||||
@ -84,51 +96,54 @@ fn handle(
|
|||||||
span: Span,
|
span: Span,
|
||||||
data_record: Option<Record>,
|
data_record: Option<Record>,
|
||||||
input: PipelineData,
|
input: PipelineData,
|
||||||
) -> Result<Record, ShellError> {
|
) -> Result<Vec<Record>, ShellError> {
|
||||||
match input {
|
// Check for conflicting use of both pipeline input and flag
|
||||||
PipelineData::Empty => data_record.ok_or_else(|| ShellError::MissingParameter {
|
if let Some(record) = data_record {
|
||||||
param_name: "requires a record".into(),
|
if !matches!(input, PipelineData::Empty) {
|
||||||
span,
|
return Err(ShellError::GenericError {
|
||||||
}),
|
error: "Pipeline and Flag both being used".into(),
|
||||||
PipelineData::Value(value, ..) => {
|
msg: "Use either pipeline input or '--data-record' parameter".into(),
|
||||||
// Since input is being used, check if the data record parameter is used too
|
span: Some(span),
|
||||||
if data_record.is_some() {
|
help: None,
|
||||||
return Err(ShellError::GenericError {
|
inner: vec![],
|
||||||
error: "Pipeline and Flag both being used".into(),
|
});
|
||||||
msg: "Use either pipeline input or '--data-record' parameter".into(),
|
|
||||||
span: Some(span),
|
|
||||||
help: None,
|
|
||||||
inner: vec![],
|
|
||||||
});
|
|
||||||
}
|
|
||||||
match value {
|
|
||||||
Value::Record { val, .. } => Ok(val.into_owned()),
|
|
||||||
val => Err(ShellError::OnlySupportsThisInputType {
|
|
||||||
exp_input_type: "record".into(),
|
|
||||||
wrong_type: val.get_type().to_string(),
|
|
||||||
dst_span: Span::unknown(),
|
|
||||||
src_span: val.span(),
|
|
||||||
}),
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
return Ok(vec![record]);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle the input types
|
||||||
|
let values = match input {
|
||||||
|
PipelineData::Empty => {
|
||||||
|
return Err(ShellError::MissingParameter {
|
||||||
|
param_name: "requires a table or a record".into(),
|
||||||
|
span,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
PipelineData::ListStream(stream, ..) => stream.into_iter().collect::<Vec<_>>(),
|
||||||
|
PipelineData::Value(Value::List { vals, .. }, ..) => vals,
|
||||||
|
PipelineData::Value(val, ..) => vec![val],
|
||||||
_ => {
|
_ => {
|
||||||
if data_record.is_some() {
|
return Err(ShellError::OnlySupportsThisInputType {
|
||||||
return Err(ShellError::GenericError {
|
exp_input_type: "list or record".into(),
|
||||||
error: "Pipeline and Flag both being used".into(),
|
|
||||||
msg: "Use either pipeline input or '--data-record' parameter".into(),
|
|
||||||
span: Some(span),
|
|
||||||
help: None,
|
|
||||||
inner: vec![],
|
|
||||||
});
|
|
||||||
}
|
|
||||||
Err(ShellError::OnlySupportsThisInputType {
|
|
||||||
exp_input_type: "record".into(),
|
|
||||||
wrong_type: "".into(),
|
wrong_type: "".into(),
|
||||||
dst_span: span,
|
dst_span: span,
|
||||||
src_span: span,
|
src_span: span,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
};
|
||||||
|
|
||||||
|
values
|
||||||
|
.into_iter()
|
||||||
|
.map(|val| match val {
|
||||||
|
Value::Record { val, .. } => Ok(val.into_owned()),
|
||||||
|
other => Err(ShellError::OnlySupportsThisInputType {
|
||||||
|
exp_input_type: "record".into(),
|
||||||
|
wrong_type: other.get_type().to_string(),
|
||||||
|
dst_span: Span::unknown(),
|
||||||
|
src_span: other.span(),
|
||||||
|
}),
|
||||||
|
})
|
||||||
|
.collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn process(
|
fn process(
|
||||||
|
@ -2,8 +2,8 @@ use indexmap::{indexmap, IndexMap};
|
|||||||
use nu_engine::command_prelude::*;
|
use nu_engine::command_prelude::*;
|
||||||
|
|
||||||
use nu_protocol::Signals;
|
use nu_protocol::Signals;
|
||||||
use once_cell::sync::Lazy;
|
|
||||||
use std::collections::HashSet;
|
use std::collections::HashSet;
|
||||||
|
use std::sync::LazyLock;
|
||||||
|
|
||||||
// Character used to separate directories in a Path Environment variable on windows is ";"
|
// Character used to separate directories in a Path Environment variable on windows is ";"
|
||||||
#[cfg(target_family = "windows")]
|
#[cfg(target_family = "windows")]
|
||||||
@ -15,7 +15,7 @@ const ENV_PATH_SEPARATOR_CHAR: char = ':';
|
|||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct Char;
|
pub struct Char;
|
||||||
|
|
||||||
static CHAR_MAP: Lazy<IndexMap<&'static str, String>> = Lazy::new(|| {
|
static CHAR_MAP: LazyLock<IndexMap<&'static str, String>> = LazyLock::new(|| {
|
||||||
indexmap! {
|
indexmap! {
|
||||||
// These are some regular characters that either can't be used or
|
// These are some regular characters that either can't be used or
|
||||||
// it's just easier to use them like this.
|
// it's just easier to use them like this.
|
||||||
@ -150,7 +150,7 @@ static CHAR_MAP: Lazy<IndexMap<&'static str, String>> = Lazy::new(|| {
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
static NO_OUTPUT_CHARS: Lazy<HashSet<&'static str>> = Lazy::new(|| {
|
static NO_OUTPUT_CHARS: LazyLock<HashSet<&'static str>> = LazyLock::new(|| {
|
||||||
[
|
[
|
||||||
// If the character is in the this set, we don't output it to prevent
|
// If the character is in the this set, we don't output it to prevent
|
||||||
// the broken of `char --list` command table format and alignment.
|
// the broken of `char --list` command table format and alignment.
|
||||||
|
@ -1,4 +1,35 @@
|
|||||||
use nu_engine::command_prelude::*;
|
use nu_engine::command_prelude::*;
|
||||||
|
use oem_cp::decode_string_complete_table;
|
||||||
|
use std::collections::HashMap;
|
||||||
|
use std::sync::LazyLock;
|
||||||
|
|
||||||
|
// create a lazycell of all the code_table "Complete" code pages
|
||||||
|
// the commented out code pages are "Incomplete", which means they
|
||||||
|
// are stored as Option<char> and not &[char; 128]
|
||||||
|
static OEM_DECODE: LazyLock<HashMap<usize, &[char; 128]>> = LazyLock::new(|| {
|
||||||
|
let mut m = HashMap::new();
|
||||||
|
m.insert(437, &oem_cp::code_table::DECODING_TABLE_CP437);
|
||||||
|
// m.insert(720, &oem_cp::code_table::DECODING_TABLE_CP720);
|
||||||
|
m.insert(737, &oem_cp::code_table::DECODING_TABLE_CP737);
|
||||||
|
m.insert(775, &oem_cp::code_table::DECODING_TABLE_CP775);
|
||||||
|
|
||||||
|
m.insert(850, &oem_cp::code_table::DECODING_TABLE_CP850);
|
||||||
|
m.insert(852, &oem_cp::code_table::DECODING_TABLE_CP852);
|
||||||
|
m.insert(855, &oem_cp::code_table::DECODING_TABLE_CP855);
|
||||||
|
// m.insert(857, &oem_cp::code_table::DECODING_TABLE_CP857);
|
||||||
|
m.insert(858, &oem_cp::code_table::DECODING_TABLE_CP858);
|
||||||
|
m.insert(860, &oem_cp::code_table::DECODING_TABLE_CP860);
|
||||||
|
m.insert(861, &oem_cp::code_table::DECODING_TABLE_CP861);
|
||||||
|
m.insert(862, &oem_cp::code_table::DECODING_TABLE_CP862);
|
||||||
|
m.insert(863, &oem_cp::code_table::DECODING_TABLE_CP863);
|
||||||
|
// m.insert(864, &oem_cp::code_table::DECODING_TABLE_CP864);
|
||||||
|
m.insert(865, &oem_cp::code_table::DECODING_TABLE_CP865);
|
||||||
|
m.insert(866, &oem_cp::code_table::DECODING_TABLE_CP866);
|
||||||
|
// m.insert(869, &oem_cp::code_table::DECODING_TABLE_CP869);
|
||||||
|
// m.insert(874, &oem_cp::code_table::DECODING_TABLE_CP874);
|
||||||
|
|
||||||
|
m
|
||||||
|
});
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct Decode;
|
pub struct Decode;
|
||||||
@ -84,7 +115,7 @@ fn run(
|
|||||||
let span = stream.span();
|
let span = stream.span();
|
||||||
let bytes = stream.into_bytes()?;
|
let bytes = stream.into_bytes()?;
|
||||||
match encoding {
|
match encoding {
|
||||||
Some(encoding_name) => super::encoding::decode(head, encoding_name, &bytes),
|
Some(encoding_name) => detect_and_decode(encoding_name, head, bytes),
|
||||||
None => super::encoding::detect_encoding_name(head, span, &bytes)
|
None => super::encoding::detect_encoding_name(head, span, &bytes)
|
||||||
.map(|encoding| encoding.decode(&bytes).0.into_owned())
|
.map(|encoding| encoding.decode(&bytes).0.into_owned())
|
||||||
.map(|s| Value::string(s, head)),
|
.map(|s| Value::string(s, head)),
|
||||||
@ -95,7 +126,7 @@ fn run(
|
|||||||
let input_span = v.span();
|
let input_span = v.span();
|
||||||
match v {
|
match v {
|
||||||
Value::Binary { val: bytes, .. } => match encoding {
|
Value::Binary { val: bytes, .. } => match encoding {
|
||||||
Some(encoding_name) => super::encoding::decode(head, encoding_name, &bytes),
|
Some(encoding_name) => detect_and_decode(encoding_name, head, bytes),
|
||||||
None => super::encoding::detect_encoding_name(head, input_span, &bytes)
|
None => super::encoding::detect_encoding_name(head, input_span, &bytes)
|
||||||
.map(|encoding| encoding.decode(&bytes).0.into_owned())
|
.map(|encoding| encoding.decode(&bytes).0.into_owned())
|
||||||
.map(|s| Value::string(s, head)),
|
.map(|s| Value::string(s, head)),
|
||||||
@ -121,6 +152,27 @@ fn run(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Since we have two different decoding mechanisms, we allow oem_cp to be
|
||||||
|
// specified by only a number like `open file | decode 850`. If this decode
|
||||||
|
// parameter parses as a usize then we assume it was intentional and use oem_cp
|
||||||
|
// crate. Otherwise, if it doesn't parse as a usize, we assume it was a string
|
||||||
|
// and use the encoding_rs crate to try and decode it.
|
||||||
|
fn detect_and_decode(
|
||||||
|
encoding_name: Spanned<String>,
|
||||||
|
head: Span,
|
||||||
|
bytes: Vec<u8>,
|
||||||
|
) -> Result<Value, ShellError> {
|
||||||
|
let dec_table_id = encoding_name.item.parse::<usize>().unwrap_or(0usize);
|
||||||
|
if dec_table_id == 0 {
|
||||||
|
super::encoding::decode(head, encoding_name, &bytes)
|
||||||
|
} else {
|
||||||
|
Ok(Value::string(
|
||||||
|
decode_string_complete_table(&bytes, OEM_DECODE[&dec_table_id]),
|
||||||
|
head,
|
||||||
|
))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod test {
|
mod test {
|
||||||
use super::*;
|
use super::*;
|
||||||
|
@ -307,6 +307,20 @@ fn str_expand(contents: &str, span: Span, value_span: Span) -> Value {
|
|||||||
mod tests {
|
mod tests {
|
||||||
use super::*;
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_outer_single_item() {
|
||||||
|
assert_eq!(
|
||||||
|
str_expand("{W{x,y}}", Span::test_data(), Span::test_data()),
|
||||||
|
Value::list(
|
||||||
|
vec![
|
||||||
|
Value::string(String::from("Wx"), Span::test_data(),),
|
||||||
|
Value::string(String::from("Wy"), Span::test_data(),)
|
||||||
|
],
|
||||||
|
Span::test_data(),
|
||||||
|
)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn dots() {
|
fn dots() {
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
|
@ -1,3 +1,5 @@
|
|||||||
|
#[cfg(target_os = "macos")]
|
||||||
|
use chrono::{Local, TimeZone};
|
||||||
#[cfg(windows)]
|
#[cfg(windows)]
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
use nu_engine::command_prelude::*;
|
use nu_engine::command_prelude::*;
|
||||||
@ -175,6 +177,10 @@ fn run_ps(
|
|||||||
#[cfg(target_os = "macos")]
|
#[cfg(target_os = "macos")]
|
||||||
{
|
{
|
||||||
record.push("cwd", Value::string(proc.cwd(), span));
|
record.push("cwd", Value::string(proc.cwd(), span));
|
||||||
|
let timestamp = Local
|
||||||
|
.timestamp_nanos(proc.start_time * 1_000_000_000)
|
||||||
|
.into();
|
||||||
|
record.push("start_time", Value::date(timestamp, span));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -69,7 +69,7 @@ impl Command for External {
|
|||||||
};
|
};
|
||||||
|
|
||||||
// Find the absolute path to the executable. On Windows, set the
|
// Find the absolute path to the executable. On Windows, set the
|
||||||
// executable to "cmd.exe" if it's is a CMD internal command. If the
|
// executable to "cmd.exe" if it's a CMD internal command. If the
|
||||||
// command is not found, display a helpful error message.
|
// command is not found, display a helpful error message.
|
||||||
let executable = if cfg!(windows) && is_cmd_internal_command(&name_str) {
|
let executable = if cfg!(windows) && is_cmd_internal_command(&name_str) {
|
||||||
PathBuf::from("cmd.exe")
|
PathBuf::from("cmd.exe")
|
||||||
@ -114,7 +114,7 @@ impl Command for External {
|
|||||||
command.args(args.into_iter().map(|s| s.item));
|
command.args(args.into_iter().map(|s| s.item));
|
||||||
|
|
||||||
// Configure stdout and stderr. If both are set to `OutDest::Pipe`,
|
// Configure stdout and stderr. If both are set to `OutDest::Pipe`,
|
||||||
// we'll setup a pipe that merge two streams into one.
|
// we'll set up a pipe that merges two streams into one.
|
||||||
let stdout = stack.stdout();
|
let stdout = stack.stdout();
|
||||||
let stderr = stack.stderr();
|
let stderr = stack.stderr();
|
||||||
let merged_stream = if matches!(stdout, OutDest::Pipe) && matches!(stderr, OutDest::Pipe) {
|
let merged_stream = if matches!(stdout, OutDest::Pipe) && matches!(stderr, OutDest::Pipe) {
|
||||||
@ -129,7 +129,7 @@ impl Command for External {
|
|||||||
};
|
};
|
||||||
|
|
||||||
// Configure stdin. We'll try connecting input to the child process
|
// Configure stdin. We'll try connecting input to the child process
|
||||||
// directly. If that's not possible, we'll setup a pipe and spawn a
|
// directly. If that's not possible, we'll set up a pipe and spawn a
|
||||||
// thread to copy data into the child process.
|
// thread to copy data into the child process.
|
||||||
let data_to_copy_into_stdin = match input {
|
let data_to_copy_into_stdin = match input {
|
||||||
PipelineData::ByteStream(stream, metadata) => match stream.into_stdio() {
|
PipelineData::ByteStream(stream, metadata) => match stream.into_stdio() {
|
||||||
@ -449,8 +449,8 @@ pub fn command_not_found(
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Try to match the name with the search terms of existing commands.
|
// Try to match the name with the search terms of existing commands.
|
||||||
let signatures = engine_state.get_signatures(false);
|
let signatures = engine_state.get_signatures_and_declids(false);
|
||||||
if let Some(sig) = signatures.iter().find(|sig| {
|
if let Some((sig, _)) = signatures.iter().find(|(sig, _)| {
|
||||||
sig.search_terms
|
sig.search_terms
|
||||||
.iter()
|
.iter()
|
||||||
.any(|term| term.to_folded_case() == name.to_folded_case())
|
.any(|term| term.to_folded_case() == name.to_folded_case())
|
||||||
@ -463,7 +463,7 @@ pub fn command_not_found(
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Try a fuzzy search on the names of all existing commands.
|
// Try a fuzzy search on the names of all existing commands.
|
||||||
if let Some(cmd) = did_you_mean(signatures.iter().map(|sig| &sig.name), name) {
|
if let Some(cmd) = did_you_mean(signatures.iter().map(|(sig, _)| &sig.name), name) {
|
||||||
// The user is invoking an external command with the same name as a
|
// The user is invoking an external command with the same name as a
|
||||||
// built-in command. Remind them of this.
|
// built-in command. Remind them of this.
|
||||||
if cmd == name {
|
if cmd == name {
|
||||||
@ -534,7 +534,7 @@ fn escape_cmd_argument(arg: &Spanned<OsString>) -> Result<Cow<'_, OsStr>, ShellE
|
|||||||
let Spanned { item: arg, span } = arg;
|
let Spanned { item: arg, span } = arg;
|
||||||
let bytes = arg.as_encoded_bytes();
|
let bytes = arg.as_encoded_bytes();
|
||||||
if bytes.iter().any(|b| matches!(b, b'\r' | b'\n' | b'%')) {
|
if bytes.iter().any(|b| matches!(b, b'\r' | b'\n' | b'%')) {
|
||||||
// \r and \n trunacte the rest of the arguments and % can expand environment variables
|
// \r and \n truncate the rest of the arguments and % can expand environment variables
|
||||||
Err(ShellError::ExternalCommand {
|
Err(ShellError::ExternalCommand {
|
||||||
label:
|
label:
|
||||||
"Arguments to CMD internal commands cannot contain new lines or percent signs '%'"
|
"Arguments to CMD internal commands cannot contain new lines or percent signs '%'"
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
use nu_protocol::{ShellError, Span};
|
use nu_protocol::{ShellError, Span};
|
||||||
use once_cell::sync::Lazy;
|
use std::sync::LazyLock;
|
||||||
use std::{collections::HashMap, path::Path};
|
use std::{collections::HashMap, path::Path};
|
||||||
|
|
||||||
// Attribution: Thanks exa. Most of this file is taken from around here
|
// Attribution: Thanks exa. Most of this file is taken from around here
|
||||||
@ -84,7 +84,7 @@ impl Icons {
|
|||||||
// .unwrap_or_default()
|
// .unwrap_or_default()
|
||||||
// }
|
// }
|
||||||
|
|
||||||
static MAP_BY_NAME: Lazy<HashMap<&'static str, char>> = Lazy::new(|| {
|
static MAP_BY_NAME: LazyLock<HashMap<&'static str, char>> = LazyLock::new(|| {
|
||||||
[
|
[
|
||||||
(".Trash", '\u{f1f8}'), //
|
(".Trash", '\u{f1f8}'), //
|
||||||
(".atom", '\u{e764}'), //
|
(".atom", '\u{e764}'), //
|
||||||
|
@ -1088,7 +1088,7 @@ fn create_empty_placeholder(
|
|||||||
let data = vec![vec![cell]];
|
let data = vec![vec![cell]];
|
||||||
let mut table = NuTable::from(data);
|
let mut table = NuTable::from(data);
|
||||||
table.set_data_style(TextStyle::default().dimmed());
|
table.set_data_style(TextStyle::default().dimmed());
|
||||||
let out = TableOutput::new(table, false, false);
|
let out = TableOutput::new(table, false, false, false);
|
||||||
|
|
||||||
let style_computer = &StyleComputer::from_config(engine_state, stack);
|
let style_computer = &StyleComputer::from_config(engine_state, stack);
|
||||||
let config = create_nu_table_config(&config, style_computer, &out, false, TableMode::default());
|
let config = create_nu_table_config(&config, style_computer, &out, false, TableMode::default());
|
||||||
|
@ -292,3 +292,37 @@ fn def_env_wrapped_no_help() {
|
|||||||
let actual = nu!("def --wrapped foo [...rest] { echo $rest }; foo -h | to json --raw");
|
let actual = nu!("def --wrapped foo [...rest] { echo $rest }; foo -h | to json --raw");
|
||||||
assert_eq!(actual.out, r#"["-h"]"#);
|
assert_eq!(actual.out, r#"["-h"]"#);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn def_recursive_func_should_work() {
|
||||||
|
let actual = nu!("def bar [] { let x = 1; ($x | foo) }; def foo [] { foo }");
|
||||||
|
assert!(actual.err.is_empty());
|
||||||
|
|
||||||
|
let actual = nu!(r#"
|
||||||
|
def recursive [c: int] {
|
||||||
|
if ($c == 0) { return }
|
||||||
|
if ($c mod 2 > 0) {
|
||||||
|
$in | recursive ($c - 1)
|
||||||
|
} else {
|
||||||
|
recursive ($c - 1)
|
||||||
|
}
|
||||||
|
}"#);
|
||||||
|
assert!(actual.err.is_empty());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn export_def_recursive_func_should_work() {
|
||||||
|
let actual = nu!("export def bar [] { let x = 1; ($x | foo) }; export def foo [] { foo }");
|
||||||
|
assert!(actual.err.is_empty());
|
||||||
|
|
||||||
|
let actual = nu!(r#"
|
||||||
|
export def recursive [c: int] {
|
||||||
|
if ($c == 0) { return }
|
||||||
|
if ($c mod 2 > 0) {
|
||||||
|
$in | recursive ($c - 1)
|
||||||
|
} else {
|
||||||
|
recursive ($c - 1)
|
||||||
|
}
|
||||||
|
}"#);
|
||||||
|
assert!(actual.err.is_empty());
|
||||||
|
}
|
||||||
|
@ -23,37 +23,33 @@ fn groups() {
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn errors_if_given_unknown_column_name() {
|
fn errors_if_given_unknown_column_name() {
|
||||||
let sample = r#"
|
let sample = r#"{
|
||||||
{
|
"nu": {
|
||||||
"nu": {
|
"committers": [
|
||||||
"committers": [
|
{"name": "Andrés N. Robalino"},
|
||||||
{"name": "Andrés N. Robalino"},
|
{"name": "JT Turner"},
|
||||||
{"name": "JT Turner"},
|
{"name": "Yehuda Katz"}
|
||||||
{"name": "Yehuda Katz"}
|
],
|
||||||
],
|
"releases": [
|
||||||
"releases": [
|
{"version": "0.2"}
|
||||||
{"version": "0.2"}
|
{"version": "0.8"},
|
||||||
{"version": "0.8"},
|
{"version": "0.9999999"}
|
||||||
{"version": "0.9999999"}
|
],
|
||||||
],
|
"0xATYKARNU": [
|
||||||
"0xATYKARNU": [
|
["Th", "e", " "],
|
||||||
["Th", "e", " "],
|
["BIG", " ", "UnO"],
|
||||||
["BIG", " ", "UnO"],
|
["punto", "cero"]
|
||||||
["punto", "cero"]
|
]
|
||||||
]
|
}
|
||||||
}
|
}
|
||||||
}
|
"#;
|
||||||
"#;
|
|
||||||
|
|
||||||
let actual = nu!(pipeline(&format!(
|
let actual = nu!(pipeline(&format!(
|
||||||
r#"
|
r#"'{sample}'
|
||||||
'{sample}'
|
| from json
|
||||||
| from json
|
| group-by {{|| get nu.releases.missing_column }}"#
|
||||||
| group-by {{|| get nu.releases.version }}
|
|
||||||
"#
|
|
||||||
)));
|
)));
|
||||||
|
assert!(actual.err.contains("cannot find column"));
|
||||||
assert!(actual.err.contains("can't convert list<string> to string"));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -355,7 +355,6 @@ fn do_cases_where_result_differs_between_join_types_with_different_join_keys(joi
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[ignore]
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_alternative_table_syntax() {
|
fn test_alternative_table_syntax() {
|
||||||
let join_type = "--inner";
|
let join_type = "--inner";
|
||||||
|
@ -1,4 +1,6 @@
|
|||||||
|
use nu_test_support::fs::Stub::FileWithContent;
|
||||||
use nu_test_support::nu;
|
use nu_test_support::nu;
|
||||||
|
use nu_test_support::playground::Playground;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn length_columns_in_cal_table() {
|
fn length_columns_in_cal_table() {
|
||||||
@ -20,3 +22,14 @@ fn length_fails_on_echo_record() {
|
|||||||
|
|
||||||
assert!(actual.err.contains("only_supports_this_input_type"));
|
assert!(actual.err.contains("only_supports_this_input_type"));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn length_byte_stream() {
|
||||||
|
Playground::setup("length_bytes", |dirs, sandbox| {
|
||||||
|
sandbox.mkdir("length_bytes");
|
||||||
|
sandbox.with_files(&[FileWithContent("data.txt", "😀")]);
|
||||||
|
|
||||||
|
let actual = nu!(cwd: dirs.test(), "open data.txt | length");
|
||||||
|
assert_eq!(actual.out, "4");
|
||||||
|
});
|
||||||
|
}
|
||||||
|
@ -1,10 +1,11 @@
|
|||||||
use nu_test_support::nu;
|
use nu_test_support::nu;
|
||||||
|
use rstest::rstest;
|
||||||
|
|
||||||
#[test]
|
#[rstest]
|
||||||
fn let_name_builtin_var() {
|
#[case("let in = 3")]
|
||||||
let actual = nu!("let in = 3");
|
#[case("let in: int = 3")]
|
||||||
|
fn let_name_builtin_var(#[case] assignment: &str) {
|
||||||
assert!(actual
|
assert!(nu!(assignment)
|
||||||
.err
|
.err
|
||||||
.contains("'in' is the name of a builtin Nushell variable"));
|
.contains("'in' is the name of a builtin Nushell variable"));
|
||||||
}
|
}
|
||||||
|
@ -64,6 +64,12 @@ fn match_list_rest() {
|
|||||||
assert_eq!(actual.out, "single: 1 5");
|
assert_eq!(actual.out, "single: 1 5");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn match_list_rest_empty() {
|
||||||
|
let actual = nu!(r#"match [1] { [1 ..$rest] => { $rest == [] } }"#);
|
||||||
|
assert_eq!(actual.out, "true");
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn match_constant_1() {
|
fn match_constant_1() {
|
||||||
let actual = nu!(
|
let actual = nu!(
|
||||||
|
@ -268,6 +268,42 @@ fn modulo() {
|
|||||||
assert_eq!(actual.out, "1");
|
assert_eq!(actual.out, "1");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn floor_div_mod() {
|
||||||
|
let actual = nu!("let q = 8 // -3; let r = 8 mod -3; 8 == $q * -3 + $r");
|
||||||
|
assert_eq!(actual.out, "true");
|
||||||
|
|
||||||
|
let actual = nu!("let q = -8 // 3; let r = -8 mod 3; -8 == $q * 3 + $r");
|
||||||
|
assert_eq!(actual.out, "true");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn floor_div_mod_overflow() {
|
||||||
|
let actual = nu!(format!("{} // -1", i64::MIN));
|
||||||
|
assert!(actual.err.contains("overflow"));
|
||||||
|
|
||||||
|
let actual = nu!(format!("{} mod -1", i64::MIN));
|
||||||
|
assert!(actual.err.contains("overflow"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn floor_div_mod_zero() {
|
||||||
|
let actual = nu!("1 // 0");
|
||||||
|
assert!(actual.err.contains("zero"));
|
||||||
|
|
||||||
|
let actual = nu!("1 mod 0");
|
||||||
|
assert!(actual.err.contains("zero"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn floor_div_mod_large_num() {
|
||||||
|
let actual = nu!(format!("{} // {}", i64::MAX, i64::MAX / 2));
|
||||||
|
assert_eq!(actual.out, "2");
|
||||||
|
|
||||||
|
let actual = nu!(format!("{} mod {}", i64::MAX, i64::MAX / 2));
|
||||||
|
assert_eq!(actual.out, "1");
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn unit_multiplication_math() {
|
fn unit_multiplication_math() {
|
||||||
let actual = nu!(pipeline(
|
let actual = nu!(pipeline(
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
use nu_test_support::nu;
|
use nu_test_support::nu;
|
||||||
|
use rstest::rstest;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn mut_variable() {
|
fn mut_variable() {
|
||||||
@ -7,11 +8,11 @@ fn mut_variable() {
|
|||||||
assert_eq!(actual.out, "4");
|
assert_eq!(actual.out, "4");
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[rstest]
|
||||||
fn mut_name_builtin_var() {
|
#[case("mut in = 3")]
|
||||||
let actual = nu!("mut in = 3");
|
#[case("mut in: int = 3")]
|
||||||
|
fn mut_name_builtin_var(#[case] assignment: &str) {
|
||||||
assert!(actual
|
assert!(nu!(assignment)
|
||||||
.err
|
.err
|
||||||
.contains("'in' is the name of a builtin Nushell variable"));
|
.contains("'in' is the name of a builtin Nushell variable"));
|
||||||
}
|
}
|
||||||
|
@ -1,3 +1,5 @@
|
|||||||
|
use std::{thread, time::Duration};
|
||||||
|
|
||||||
use mockito::Server;
|
use mockito::Server;
|
||||||
use nu_test_support::{nu, pipeline};
|
use nu_test_support::{nu, pipeline};
|
||||||
|
|
||||||
@ -122,3 +124,21 @@ fn http_delete_redirect_mode_error() {
|
|||||||
"Redirect encountered when redirect handling mode was 'error' (301 Moved Permanently)"
|
"Redirect encountered when redirect handling mode was 'error' (301 Moved Permanently)"
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn http_delete_timeout() {
|
||||||
|
let mut server = Server::new();
|
||||||
|
let _mock = server
|
||||||
|
.mock("DELETE", "/")
|
||||||
|
.with_chunked_body(|w| {
|
||||||
|
thread::sleep(Duration::from_secs(1));
|
||||||
|
w.write_all(b"Delayed response!")
|
||||||
|
})
|
||||||
|
.create();
|
||||||
|
|
||||||
|
let actual = nu!(pipeline(
|
||||||
|
format!("http delete --max-time 500ms {url}", url = server.url()).as_str()
|
||||||
|
));
|
||||||
|
|
||||||
|
assert!(&actual.err.contains("nu::shell::io_error"));
|
||||||
|
}
|
||||||
|
@ -1,3 +1,5 @@
|
|||||||
|
use std::{thread, time::Duration};
|
||||||
|
|
||||||
use mockito::Server;
|
use mockito::Server;
|
||||||
use nu_test_support::{nu, pipeline};
|
use nu_test_support::{nu, pipeline};
|
||||||
|
|
||||||
@ -316,3 +318,21 @@ fn http_get_with_unknown_mime_type() {
|
|||||||
|
|
||||||
assert_eq!(actual.out, "[1,2,3]");
|
assert_eq!(actual.out, "[1,2,3]");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn http_get_timeout() {
|
||||||
|
let mut server = Server::new();
|
||||||
|
let _mock = server
|
||||||
|
.mock("GET", "/")
|
||||||
|
.with_chunked_body(|w| {
|
||||||
|
thread::sleep(Duration::from_secs(1));
|
||||||
|
w.write_all(b"Delayed response!")
|
||||||
|
})
|
||||||
|
.create();
|
||||||
|
|
||||||
|
let actual = nu!(pipeline(
|
||||||
|
format!("http get --max-time 500ms {url}", url = server.url()).as_str()
|
||||||
|
));
|
||||||
|
|
||||||
|
assert!(&actual.err.contains("nu::shell::io_error"));
|
||||||
|
}
|
||||||
|
@ -1,3 +1,5 @@
|
|||||||
|
use std::{thread, time::Duration};
|
||||||
|
|
||||||
use mockito::Server;
|
use mockito::Server;
|
||||||
use nu_test_support::{nu, pipeline};
|
use nu_test_support::{nu, pipeline};
|
||||||
|
|
||||||
@ -41,3 +43,21 @@ fn http_options_failed_due_to_server_error() {
|
|||||||
|
|
||||||
assert!(actual.err.contains("Bad request (400)"))
|
assert!(actual.err.contains("Bad request (400)"))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn http_options_timeout() {
|
||||||
|
let mut server = Server::new();
|
||||||
|
let _mock = server
|
||||||
|
.mock("OPTIONS", "/")
|
||||||
|
.with_chunked_body(|w| {
|
||||||
|
thread::sleep(Duration::from_secs(1));
|
||||||
|
w.write_all(b"Delayed response!")
|
||||||
|
})
|
||||||
|
.create();
|
||||||
|
|
||||||
|
let actual = nu!(pipeline(
|
||||||
|
format!("http options --max-time 500ms {url}", url = server.url()).as_str()
|
||||||
|
));
|
||||||
|
|
||||||
|
assert!(&actual.err.contains("nu::shell::io_error"));
|
||||||
|
}
|
||||||
|
@ -1,3 +1,5 @@
|
|||||||
|
use std::{thread, time::Duration};
|
||||||
|
|
||||||
use mockito::Server;
|
use mockito::Server;
|
||||||
use nu_test_support::{nu, pipeline};
|
use nu_test_support::{nu, pipeline};
|
||||||
|
|
||||||
@ -162,3 +164,25 @@ fn http_patch_redirect_mode_error() {
|
|||||||
"Redirect encountered when redirect handling mode was 'error' (301 Moved Permanently)"
|
"Redirect encountered when redirect handling mode was 'error' (301 Moved Permanently)"
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn http_patch_timeout() {
|
||||||
|
let mut server = Server::new();
|
||||||
|
let _mock = server
|
||||||
|
.mock("PATCH", "/")
|
||||||
|
.with_chunked_body(|w| {
|
||||||
|
thread::sleep(Duration::from_secs(1));
|
||||||
|
w.write_all(b"Delayed response!")
|
||||||
|
})
|
||||||
|
.create();
|
||||||
|
|
||||||
|
let actual = nu!(pipeline(
|
||||||
|
format!(
|
||||||
|
"http patch --max-time 500ms {url} patchbody",
|
||||||
|
url = server.url()
|
||||||
|
)
|
||||||
|
.as_str()
|
||||||
|
));
|
||||||
|
|
||||||
|
assert!(&actual.err.contains("nu::shell::io_error"));
|
||||||
|
}
|
||||||
|
@ -1,3 +1,5 @@
|
|||||||
|
use std::{thread, time::Duration};
|
||||||
|
|
||||||
use mockito::{Matcher, Server, ServerOpts};
|
use mockito::{Matcher, Server, ServerOpts};
|
||||||
use nu_test_support::{nu, pipeline};
|
use nu_test_support::{nu, pipeline};
|
||||||
|
|
||||||
@ -276,3 +278,25 @@ fn http_post_multipart_is_success() {
|
|||||||
|
|
||||||
assert!(actual.out.is_empty())
|
assert!(actual.out.is_empty())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn http_post_timeout() {
|
||||||
|
let mut server = Server::new();
|
||||||
|
let _mock = server
|
||||||
|
.mock("POST", "/")
|
||||||
|
.with_chunked_body(|w| {
|
||||||
|
thread::sleep(Duration::from_secs(1));
|
||||||
|
w.write_all(b"Delayed response!")
|
||||||
|
})
|
||||||
|
.create();
|
||||||
|
|
||||||
|
let actual = nu!(pipeline(
|
||||||
|
format!(
|
||||||
|
"http post --max-time 500ms {url} postbody",
|
||||||
|
url = server.url()
|
||||||
|
)
|
||||||
|
.as_str()
|
||||||
|
));
|
||||||
|
|
||||||
|
assert!(&actual.err.contains("nu::shell::io_error"));
|
||||||
|
}
|
||||||
|
@ -1,3 +1,5 @@
|
|||||||
|
use std::{thread, time::Duration};
|
||||||
|
|
||||||
use mockito::Server;
|
use mockito::Server;
|
||||||
use nu_test_support::{nu, pipeline};
|
use nu_test_support::{nu, pipeline};
|
||||||
|
|
||||||
@ -162,3 +164,25 @@ fn http_put_redirect_mode_error() {
|
|||||||
"Redirect encountered when redirect handling mode was 'error' (301 Moved Permanently)"
|
"Redirect encountered when redirect handling mode was 'error' (301 Moved Permanently)"
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn http_put_timeout() {
|
||||||
|
let mut server = Server::new();
|
||||||
|
let _mock = server
|
||||||
|
.mock("PUT", "/")
|
||||||
|
.with_chunked_body(|w| {
|
||||||
|
thread::sleep(Duration::from_secs(1));
|
||||||
|
w.write_all(b"Delayed response!")
|
||||||
|
})
|
||||||
|
.create();
|
||||||
|
|
||||||
|
let actual = nu!(pipeline(
|
||||||
|
format!(
|
||||||
|
"http put --max-time 500ms {url} putbody",
|
||||||
|
url = server.url()
|
||||||
|
)
|
||||||
|
.as_str()
|
||||||
|
));
|
||||||
|
|
||||||
|
assert!(&actual.err.contains("nu::shell::io_error"));
|
||||||
|
}
|
||||||
|
@ -394,7 +394,7 @@ fn test_content_types_with_open_raw() {
|
|||||||
let result = nu!(cwd: dirs.formats(), "open --raw sample_data.xlsx | metadata");
|
let result = nu!(cwd: dirs.formats(), "open --raw sample_data.xlsx | metadata");
|
||||||
assert!(result.out.contains("vnd.openxmlformats-officedocument"));
|
assert!(result.out.contains("vnd.openxmlformats-officedocument"));
|
||||||
let result = nu!(cwd: dirs.formats(), "open --raw sample_def.nu | metadata");
|
let result = nu!(cwd: dirs.formats(), "open --raw sample_def.nu | metadata");
|
||||||
assert!(!result.out.contains("content_type"));
|
assert!(result.out.contains("application/x-nuscript"));
|
||||||
let result = nu!(cwd: dirs.formats(), "open --raw sample.eml | metadata");
|
let result = nu!(cwd: dirs.formats(), "open --raw sample.eml | metadata");
|
||||||
assert!(result.out.contains("message/rfc822"));
|
assert!(result.out.contains("message/rfc822"));
|
||||||
let result = nu!(cwd: dirs.formats(), "open --raw cargo_sample.toml | metadata");
|
let result = nu!(cwd: dirs.formats(), "open --raw cargo_sample.toml | metadata");
|
||||||
|
@ -11,5 +11,19 @@ fn test_ansi_shows_error_on_escape() {
|
|||||||
fn test_ansi_list_outputs_table() {
|
fn test_ansi_list_outputs_table() {
|
||||||
let actual = nu!("ansi --list | length");
|
let actual = nu!("ansi --list | length");
|
||||||
|
|
||||||
assert_eq!(actual.out, "424");
|
assert_eq!(actual.out, "425");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_ansi_codes() {
|
||||||
|
let actual = nu!("$'(ansi clear_scrollback_buffer)'");
|
||||||
|
assert_eq!(actual.out, "\x1b[3J");
|
||||||
|
|
||||||
|
// Currently, bg is placed before fg in the results
|
||||||
|
// It's okay if something internally changes this, but
|
||||||
|
// if so, the test case will need to be updated to:
|
||||||
|
// assert_eq!(actual.out, "\x1b[31;48;2;0;255;0mHello\x1b[0m");
|
||||||
|
|
||||||
|
let actual = nu!("$'(ansi { fg: red, bg: \"#00ff00\" })Hello(ansi reset)'");
|
||||||
|
assert_eq!(actual.out, "\x1b[48;2;0;255;0;31mHello\x1b[0m");
|
||||||
}
|
}
|
||||||
|
@ -130,7 +130,10 @@ fn reject_optional_row() {
|
|||||||
#[test]
|
#[test]
|
||||||
fn reject_columns_with_list_spread() {
|
fn reject_columns_with_list_spread() {
|
||||||
let actual = nu!("let arg = [type size]; [[name type size];[Cargo.toml file 10mb] [Cargo.lock file 10mb] [src dir 100mb]] | reject ...$arg | to nuon");
|
let actual = nu!("let arg = [type size]; [[name type size];[Cargo.toml file 10mb] [Cargo.lock file 10mb] [src dir 100mb]] | reject ...$arg | to nuon");
|
||||||
assert_eq!(actual.out, "[[name]; [Cargo.toml], [Cargo.lock], [src]]");
|
assert_eq!(
|
||||||
|
actual.out,
|
||||||
|
r#"[[name]; ["Cargo.toml"], ["Cargo.lock"], [src]]"#
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@ -138,7 +141,7 @@ fn reject_rows_with_list_spread() {
|
|||||||
let actual = nu!("let arg = [2 0]; [[name type size];[Cargo.toml file 10mb] [Cargo.lock file 10mb] [src dir 100mb]] | reject ...$arg | to nuon");
|
let actual = nu!("let arg = [2 0]; [[name type size];[Cargo.toml file 10mb] [Cargo.lock file 10mb] [src dir 100mb]] | reject ...$arg | to nuon");
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
actual.out,
|
actual.out,
|
||||||
"[[name, type, size]; [Cargo.lock, file, 10000000b]]"
|
r#"[[name, type, size]; ["Cargo.lock", file, 10000000b]]"#
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -147,7 +150,7 @@ fn reject_mixed_with_list_spread() {
|
|||||||
let actual = nu!("let arg = [type 2]; [[name type size];[Cargp.toml file 10mb] [ Cargo.lock file 10mb] [src dir 100mb]] | reject ...$arg | to nuon");
|
let actual = nu!("let arg = [type 2]; [[name type size];[Cargp.toml file 10mb] [ Cargo.lock file 10mb] [src dir 100mb]] | reject ...$arg | to nuon");
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
actual.out,
|
actual.out,
|
||||||
"[[name, size]; [Cargp.toml, 10000000b], [Cargo.lock, 10000000b]]"
|
r#"[[name, size]; ["Cargp.toml", 10000000b], ["Cargo.lock", 10000000b]]"#
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -32,3 +32,9 @@ fn return_works_in_script_with_def_main() {
|
|||||||
);
|
);
|
||||||
assert!(actual.err.is_empty());
|
assert!(actual.err.is_empty());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn return_does_not_set_last_exit_code() {
|
||||||
|
let actual = nu!("hide-env LAST_EXIT_CODE; do --env { return 42 }; $env.LAST_EXIT_CODE?");
|
||||||
|
assert!(matches!(actual.out.as_str(), ""));
|
||||||
|
}
|
||||||
|
@ -415,7 +415,7 @@ fn save_with_custom_converter() {
|
|||||||
|
|
||||||
nu!(cwd: dirs.test(), pipeline(
|
nu!(cwd: dirs.test(), pipeline(
|
||||||
r#"
|
r#"
|
||||||
def "to ndjson" []: any -> string { each { to json --raw } | to text } ;
|
def "to ndjson" []: any -> string { each { to json --raw } | to text --no-newline } ;
|
||||||
{a: 1, b: 2} | save test.ndjson
|
{a: 1, b: 2} | save test.ndjson
|
||||||
"#
|
"#
|
||||||
));
|
));
|
||||||
|
@ -2905,3 +2905,39 @@ fn table_general_header_on_separator_issue1() {
|
|||||||
let actual = nu!("$env.config.table.header_on_separator = true; [['Llll oo Bbbbbbbb' 'Bbbbbbbb Aaaa' Nnnnnn Ggggg 'Xxxxx Llllllll #' Bbb 'Pppp Ccccc' 'Rrrrrrrr Dddd' Rrrrrr 'Rrrrrr Ccccc II' 'Rrrrrr Ccccc Ppppppp II' 'Pppppp Dddddddd Tttt' 'Pppppp Dddddddd Dddd' 'Rrrrrrrrr Trrrrrr' 'Pppppp Ppppp Dddd' 'Ppppp Dddd' Hhhh]; [RRRRRRR FFFFFFFF UUUU VV 202407160001 BBB 1 '7/16/2024' '' AAA-1111 AAA-1111-11 '7 YEARS' 2555 'RRRRRRRR DDDD' '7/16/2031' '7/16/2031' NN]] | table --width=87 --theme basic");
|
let actual = nu!("$env.config.table.header_on_separator = true; [['Llll oo Bbbbbbbb' 'Bbbbbbbb Aaaa' Nnnnnn Ggggg 'Xxxxx Llllllll #' Bbb 'Pppp Ccccc' 'Rrrrrrrr Dddd' Rrrrrr 'Rrrrrr Ccccc II' 'Rrrrrr Ccccc Ppppppp II' 'Pppppp Dddddddd Tttt' 'Pppppp Dddddddd Dddd' 'Rrrrrrrrr Trrrrrr' 'Pppppp Ppppp Dddd' 'Ppppp Dddd' Hhhh]; [RRRRRRR FFFFFFFF UUUU VV 202407160001 BBB 1 '7/16/2024' '' AAA-1111 AAA-1111-11 '7 YEARS' 2555 'RRRRRRRR DDDD' '7/16/2031' '7/16/2031' NN]] | table --width=87 --theme basic");
|
||||||
assert_eq!(actual.out, "+-#-+-Llll oo Bbbbbbbb-+-Bbbbbbbb Aaaa-+-Nnnnnn-+-Ggggg-+-Xxxxx Llllllll #-+-...-+| 0 | RRRRRRR | FFFFFFFF | UUUU | VV | 202407160001 | ... |+---+------------------+---------------+--------+-------+------------------+-----+");
|
assert_eq!(actual.out, "+-#-+-Llll oo Bbbbbbbb-+-Bbbbbbbb Aaaa-+-Nnnnnn-+-Ggggg-+-Xxxxx Llllllll #-+-...-+| 0 | RRRRRRR | FFFFFFFF | UUUU | VV | 202407160001 | ... |+---+------------------+---------------+--------+-------+------------------+-----+");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn table_footer_inheritance() {
|
||||||
|
let table1 = format!(
|
||||||
|
"[ [ head1, head2, head3 ]; {} ]",
|
||||||
|
(0..212)
|
||||||
|
.map(|_| "[ 79 79 79 ]")
|
||||||
|
.collect::<Vec<_>>()
|
||||||
|
.join(" ")
|
||||||
|
);
|
||||||
|
|
||||||
|
let structure = format!(
|
||||||
|
"{{\
|
||||||
|
field0: [ [ y1, y2, y3 ]; [ 1 2 3 ] [ 79 79 79 ] [ {{ f1: 'a string', f2: 1000 }}, 1, 2 ] ],\
|
||||||
|
field1: [ a, b, c ],\
|
||||||
|
field2: [ 123, 234, 345 ],\
|
||||||
|
field3: {},\
|
||||||
|
field4: {{ f1: 1, f2: 3, f3: {{ f1: f1, f2: f2, f3: f3 }} }},\
|
||||||
|
field5: [ [ x1, x2, x3 ]; [ 1 2 3 ] [ 79 79 79 ] [ {{ f1: 'a string', f2: 1000 }}, 1, 2 ] ],\
|
||||||
|
}}",
|
||||||
|
table1
|
||||||
|
);
|
||||||
|
let actual = nu!(format!(
|
||||||
|
"$env.config.table.footer_inheritance = true; {structure} | table --width=80 --expand"
|
||||||
|
));
|
||||||
|
|
||||||
|
assert_eq!(actual.out.match_indices("head1").count(), 2);
|
||||||
|
assert_eq!(actual.out.match_indices("head2").count(), 2);
|
||||||
|
assert_eq!(actual.out.match_indices("head3").count(), 2);
|
||||||
|
assert_eq!(actual.out.match_indices("y1").count(), 1);
|
||||||
|
assert_eq!(actual.out.match_indices("y2").count(), 1);
|
||||||
|
assert_eq!(actual.out.match_indices("y3").count(), 1);
|
||||||
|
assert_eq!(actual.out.match_indices("x1").count(), 1);
|
||||||
|
assert_eq!(actual.out.match_indices("x2").count(), 1);
|
||||||
|
assert_eq!(actual.out.match_indices("x3").count(), 1);
|
||||||
|
}
|
||||||
|
@ -1,19 +1,54 @@
|
|||||||
use nu_test_support::nu;
|
use nu_test_support::nu;
|
||||||
|
|
||||||
#[test]
|
const LINE_LEN: usize = if cfg!(target_os = "windows") { 2 } else { 1 };
|
||||||
fn list_to_text() {
|
|
||||||
let actual = nu!(r#"["foo" "bar" "baz"] | to text"#);
|
|
||||||
|
|
||||||
// these actually have newlines between them in the real world but nu! strips newlines, grr
|
#[test]
|
||||||
assert_eq!(actual.out, "foobarbaz");
|
fn list() {
|
||||||
|
// Using `str length` since nu! strips newlines, grr
|
||||||
|
let actual = nu!(r#"[] | to text | str length"#);
|
||||||
|
assert_eq!(actual.out, "0");
|
||||||
|
|
||||||
|
let actual = nu!(r#"[a] | to text | str length"#);
|
||||||
|
assert_eq!(actual.out, (1 + LINE_LEN).to_string());
|
||||||
|
|
||||||
|
let actual = nu!(r#"[a b] | to text | str length"#);
|
||||||
|
assert_eq!(actual.out, (2 * (1 + LINE_LEN)).to_string());
|
||||||
}
|
}
|
||||||
|
|
||||||
// the output should be the same when `to text` gets a ListStream instead of a Value::List
|
// The output should be the same when `to text` gets a ListStream instead of a Value::List.
|
||||||
#[test]
|
#[test]
|
||||||
fn list_stream_to_text() {
|
fn list_stream() {
|
||||||
// use `each` to convert the list to a ListStream
|
let actual = nu!(r#"[] | each {} | to text | str length"#);
|
||||||
let actual = nu!(r#"["foo" "bar" "baz"] | each {|i| $i} | to text"#);
|
assert_eq!(actual.out, "0");
|
||||||
|
|
||||||
// these actually have newlines between them in the real world but nu! strips newlines, grr
|
let actual = nu!(r#"[a] | each {} | to text | str length"#);
|
||||||
assert_eq!(actual.out, "foobarbaz");
|
assert_eq!(actual.out, (1 + LINE_LEN).to_string());
|
||||||
|
|
||||||
|
let actual = nu!(r#"[a b] | each {} | to text | str length"#);
|
||||||
|
assert_eq!(actual.out, (2 * (1 + LINE_LEN)).to_string());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn list_no_newline() {
|
||||||
|
let actual = nu!(r#"[] | to text -n | str length"#);
|
||||||
|
assert_eq!(actual.out, "0");
|
||||||
|
|
||||||
|
let actual = nu!(r#"[a] | to text -n | str length"#);
|
||||||
|
assert_eq!(actual.out, "1");
|
||||||
|
|
||||||
|
let actual = nu!(r#"[a b] | to text -n | str length"#);
|
||||||
|
assert_eq!(actual.out, (2 + LINE_LEN).to_string());
|
||||||
|
}
|
||||||
|
|
||||||
|
// The output should be the same when `to text` gets a ListStream instead of a Value::List.
|
||||||
|
#[test]
|
||||||
|
fn list_stream_no_newline() {
|
||||||
|
let actual = nu!(r#"[] | each {} | to text -n | str length"#);
|
||||||
|
assert_eq!(actual.out, "0");
|
||||||
|
|
||||||
|
let actual = nu!(r#"[a] | each {} | to text -n | str length"#);
|
||||||
|
assert_eq!(actual.out, "1");
|
||||||
|
|
||||||
|
let actual = nu!(r#"[a b] | each {} | to text -n | str length"#);
|
||||||
|
assert_eq!(actual.out, (2 + LINE_LEN).to_string());
|
||||||
}
|
}
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
use chrono::{DateTime, Local};
|
use chrono::{DateTime, Local};
|
||||||
use nu_test_support::fs::{files_exist_at, Stub};
|
use nu_test_support::fs::{files_exist_at, Stub};
|
||||||
use nu_test_support::nu;
|
use nu_test_support::nu;
|
||||||
use nu_test_support::playground::Playground;
|
use nu_test_support::playground::{Dirs, Playground};
|
||||||
|
|
||||||
// Use 1 instead of 0 because 0 has a special meaning in Windows
|
// Use 1 instead of 0 because 0 has a special meaning in Windows
|
||||||
const TIME_ONE: filetime::FileTime = filetime::FileTime::from_unix_time(1, 0);
|
const TIME_ONE: filetime::FileTime = filetime::FileTime::from_unix_time(1, 0);
|
||||||
@ -527,3 +527,128 @@ fn reference_respects_cwd() {
|
|||||||
assert!(path.exists());
|
assert!(path.exists());
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn setup_symlink_fs(dirs: &Dirs, sandbox: &mut Playground<'_>) {
|
||||||
|
sandbox.mkdir("d");
|
||||||
|
sandbox.with_files(&[Stub::EmptyFile("f"), Stub::EmptyFile("d/f")]);
|
||||||
|
sandbox.symlink("f", "fs");
|
||||||
|
sandbox.symlink("d", "ds");
|
||||||
|
sandbox.symlink("d/f", "fds");
|
||||||
|
|
||||||
|
// sandbox.symlink does not handle symlinks to missing files well. It panics
|
||||||
|
// But they are useful, and they should be tested.
|
||||||
|
#[cfg(unix)]
|
||||||
|
{
|
||||||
|
std::os::unix::fs::symlink(dirs.test().join("m"), dirs.test().join("fms")).unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(windows)]
|
||||||
|
{
|
||||||
|
std::os::windows::fs::symlink_file(dirs.test().join("m"), dirs.test().join("fms")).unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Change the file times to a known "old" value for comparison
|
||||||
|
filetime::set_symlink_file_times(dirs.test().join("f"), TIME_ONE, TIME_ONE).unwrap();
|
||||||
|
filetime::set_symlink_file_times(dirs.test().join("d"), TIME_ONE, TIME_ONE).unwrap();
|
||||||
|
filetime::set_symlink_file_times(dirs.test().join("d/f"), TIME_ONE, TIME_ONE).unwrap();
|
||||||
|
filetime::set_symlink_file_times(dirs.test().join("ds"), TIME_ONE, TIME_ONE).unwrap();
|
||||||
|
filetime::set_symlink_file_times(dirs.test().join("fs"), TIME_ONE, TIME_ONE).unwrap();
|
||||||
|
filetime::set_symlink_file_times(dirs.test().join("fds"), TIME_ONE, TIME_ONE).unwrap();
|
||||||
|
filetime::set_symlink_file_times(dirs.test().join("fms"), TIME_ONE, TIME_ONE).unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_times(path: &nu_path::AbsolutePath) -> (filetime::FileTime, filetime::FileTime) {
|
||||||
|
let metadata = path.symlink_metadata().unwrap();
|
||||||
|
|
||||||
|
(
|
||||||
|
filetime::FileTime::from_system_time(metadata.accessed().unwrap()),
|
||||||
|
filetime::FileTime::from_system_time(metadata.modified().unwrap()),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn follow_symlinks() {
|
||||||
|
Playground::setup("touch_follows_symlinks", |dirs, sandbox| {
|
||||||
|
setup_symlink_fs(&dirs, sandbox);
|
||||||
|
|
||||||
|
let missing = dirs.test().join("m");
|
||||||
|
assert!(!missing.exists());
|
||||||
|
|
||||||
|
nu!(
|
||||||
|
cwd: dirs.test(),
|
||||||
|
"
|
||||||
|
touch fds
|
||||||
|
touch ds
|
||||||
|
touch fs
|
||||||
|
touch fms
|
||||||
|
"
|
||||||
|
);
|
||||||
|
|
||||||
|
// We created the missing symlink target
|
||||||
|
assert!(missing.exists());
|
||||||
|
|
||||||
|
// The timestamps for files and directories were changed from TIME_ONE
|
||||||
|
let file_times = get_times(&dirs.test().join("f"));
|
||||||
|
let dir_times = get_times(&dirs.test().join("d"));
|
||||||
|
let dir_file_times = get_times(&dirs.test().join("d/f"));
|
||||||
|
|
||||||
|
assert_ne!(file_times, (TIME_ONE, TIME_ONE));
|
||||||
|
assert_ne!(dir_times, (TIME_ONE, TIME_ONE));
|
||||||
|
assert_ne!(dir_file_times, (TIME_ONE, TIME_ONE));
|
||||||
|
|
||||||
|
// For symlinks, they remain (mostly) the same
|
||||||
|
// We can't test accessed times, since to reach the target file, the symlink must be accessed!
|
||||||
|
let file_symlink_times = get_times(&dirs.test().join("fs"));
|
||||||
|
let dir_symlink_times = get_times(&dirs.test().join("ds"));
|
||||||
|
let dir_file_symlink_times = get_times(&dirs.test().join("fds"));
|
||||||
|
let file_missing_symlink_times = get_times(&dirs.test().join("fms"));
|
||||||
|
|
||||||
|
assert_eq!(file_symlink_times.1, TIME_ONE);
|
||||||
|
assert_eq!(dir_symlink_times.1, TIME_ONE);
|
||||||
|
assert_eq!(dir_file_symlink_times.1, TIME_ONE);
|
||||||
|
assert_eq!(file_missing_symlink_times.1, TIME_ONE);
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn no_follow_symlinks() {
|
||||||
|
Playground::setup("touch_touches_symlinks", |dirs, sandbox| {
|
||||||
|
setup_symlink_fs(&dirs, sandbox);
|
||||||
|
|
||||||
|
let missing = dirs.test().join("m");
|
||||||
|
assert!(!missing.exists());
|
||||||
|
|
||||||
|
nu!(
|
||||||
|
cwd: dirs.test(),
|
||||||
|
"
|
||||||
|
touch fds -s
|
||||||
|
touch ds -s
|
||||||
|
touch fs -s
|
||||||
|
touch fms -s
|
||||||
|
"
|
||||||
|
);
|
||||||
|
|
||||||
|
// We did not create the missing symlink target
|
||||||
|
assert!(!missing.exists());
|
||||||
|
|
||||||
|
// The timestamps for files and directories remain the same
|
||||||
|
let file_times = get_times(&dirs.test().join("f"));
|
||||||
|
let dir_times = get_times(&dirs.test().join("d"));
|
||||||
|
let dir_file_times = get_times(&dirs.test().join("d/f"));
|
||||||
|
|
||||||
|
assert_eq!(file_times, (TIME_ONE, TIME_ONE));
|
||||||
|
assert_eq!(dir_times, (TIME_ONE, TIME_ONE));
|
||||||
|
assert_eq!(dir_file_times, (TIME_ONE, TIME_ONE));
|
||||||
|
|
||||||
|
// For symlinks, everything changed. (except their targets, and paths, and personality)
|
||||||
|
let file_symlink_times = get_times(&dirs.test().join("fs"));
|
||||||
|
let dir_symlink_times = get_times(&dirs.test().join("ds"));
|
||||||
|
let dir_file_symlink_times = get_times(&dirs.test().join("fds"));
|
||||||
|
let file_missing_symlink_times = get_times(&dirs.test().join("fms"));
|
||||||
|
|
||||||
|
assert_ne!(file_symlink_times, (TIME_ONE, TIME_ONE));
|
||||||
|
assert_ne!(dir_symlink_times, (TIME_ONE, TIME_ONE));
|
||||||
|
assert_ne!(dir_file_symlink_times, (TIME_ONE, TIME_ONE));
|
||||||
|
assert_ne!(file_missing_symlink_times, (TIME_ONE, TIME_ONE));
|
||||||
|
})
|
||||||
|
}
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user