Compare commits

..

3 Commits
main ... 0.87.1

Author SHA1 Message Date
3d631490bc Bump version to 0.87.1 (#11056) 2023-11-18 18:46:36 +01:00
68211dea3e Send only absolute paths to uu_cp (#11080)
# Description
Fixes https://github.com/nushell/nushell/issues/10832

Replaces: https://github.com/nushell/nushell/pull/10843
2023-11-18 17:57:49 +01:00
b8e9293c45 Fix rm path handling (#11064)
# Description
Fixes issue #11061 where `rm` fails to find a file after a `cd`. It
looks like the new glob functions do not return absolute file paths
which we forgot to account for.

# Tests
Added a test (fails on current main, but passes with this PR).

---------

Co-authored-by: Jakub Žádník <kubouch@gmail.com>
2023-11-18 17:57:49 +01:00
589 changed files with 11563 additions and 16237 deletions

1
.github/.typos.toml vendored
View File

@ -12,4 +12,3 @@ IIF = "IIF"
numer = "numer"
ratatui = "ratatui"
doas = "doas"
wheres = "wheres"

View File

@ -41,7 +41,7 @@ jobs:
- uses: actions/checkout@v4
- name: Setup Rust toolchain and cache
uses: actions-rust-lang/setup-rust-toolchain@v1.6.0
uses: actions-rust-lang/setup-rust-toolchain@v1.5.0
with:
rustflags: ""
@ -85,24 +85,13 @@ jobs:
- uses: actions/checkout@v4
- name: Setup Rust toolchain and cache
uses: actions-rust-lang/setup-rust-toolchain@v1.6.0
uses: actions-rust-lang/setup-rust-toolchain@v1.5.0
with:
rustflags: ""
- name: Tests
run: cargo test --workspace --profile ci --exclude nu_plugin_* ${{ matrix.flags }}
- name: Check for clean repo
shell: bash
run: |
if [ -n "$(git status --porcelain)" ]; then
echo "there are changes";
git status --porcelain
exit 1
else
echo "no changes in working directory";
fi
std-lib-and-python-virtualenv:
strategy:
fail-fast: true
@ -117,7 +106,7 @@ jobs:
- uses: actions/checkout@v4
- name: Setup Rust toolchain and cache
uses: actions-rust-lang/setup-rust-toolchain@v1.6.0
uses: actions-rust-lang/setup-rust-toolchain@v1.5.0
with:
rustflags: ""
@ -128,7 +117,7 @@ jobs:
run: nu -c 'use std testing; testing run-tests --path crates/nu-std'
- name: Setup Python
uses: actions/setup-python@v5
uses: actions/setup-python@v4
with:
python-version: "3.10"
@ -140,17 +129,6 @@ jobs:
run: nu scripts/test_virtualenv.nu
shell: bash
- name: Check for clean repo
shell: bash
run: |
if [ -n "$(git status --porcelain)" ]; then
echo "there are changes";
git status --porcelain
exit 1
else
echo "no changes in working directory";
fi
plugins:
strategy:
fail-fast: true
@ -163,7 +141,7 @@ jobs:
- uses: actions/checkout@v4
- name: Setup Rust toolchain and cache
uses: actions-rust-lang/setup-rust-toolchain@v1.6.0
uses: actions-rust-lang/setup-rust-toolchain@v1.5.0
with:
rustflags: ""
@ -172,14 +150,3 @@ jobs:
- name: Tests
run: cargo test --profile ci --package nu_plugin_*
- name: Check for clean repo
shell: bash
run: |
if [ -n "$(git status --porcelain)" ]; then
echo "there are changes";
git status --porcelain
exit 1
else
echo "no changes in working directory";
fi

View File

@ -119,7 +119,7 @@ jobs:
os: ubuntu-20.04
target_rustflags: ''
- target: riscv64gc-unknown-linux-gnu
os: ubuntu-latest
os: ubuntu-20.04
target_rustflags: ''
runs-on: ${{matrix.os}}
@ -135,7 +135,7 @@ jobs:
echo "targets = ['${{matrix.target}}']" >> rust-toolchain.toml
- name: Setup Rust toolchain and cache
uses: actions-rust-lang/setup-rust-toolchain@v1.6.0
uses: actions-rust-lang/setup-rust-toolchain@v1.5.0
# WARN: Keep the rustflags to prevent from the winget submission error: `CAQuietExec: Error 0xc0000135`
with:
rustflags: ''
@ -249,7 +249,7 @@ jobs:
echo "targets = ['${{matrix.target}}']" >> rust-toolchain.toml
- name: Setup Rust toolchain and cache
uses: actions-rust-lang/setup-rust-toolchain@v1.6.0
uses: actions-rust-lang/setup-rust-toolchain@v1.5.0
# WARN: Keep the rustflags to prevent from the winget submission error: `CAQuietExec: Error 0xc0000135`
with:
rustflags: ''

View File

@ -82,8 +82,8 @@ print $'Start building ($bin)...'; hr-line
# ----------------------------------------------------------------------------
# Build for Ubuntu and macOS
# ----------------------------------------------------------------------------
if $os in [$USE_UBUNTU, 'macos-latest', 'ubuntu-latest'] {
if $os starts-with ubuntu {
if $os in [$USE_UBUNTU, 'macos-latest'] {
if $os == $USE_UBUNTU {
sudo apt update
sudo apt-get install libxcb-composite0-dev -y
}
@ -106,7 +106,7 @@ if $os in [$USE_UBUNTU, 'macos-latest', 'ubuntu-latest'] {
_ => {
# musl-tools to fix 'Failed to find tool. Is `musl-gcc` installed?'
# Actually just for x86_64-unknown-linux-musl target
if $os starts-with ubuntu { sudo apt install musl-tools -y }
if $os == $USE_UBUNTU { sudo apt install musl-tools -y }
cargo-build-nu $flags
}
}
@ -138,6 +138,8 @@ print $'(char nl)Copying release files...'; hr-line
> register ./nu_plugin_query" | save $'($dist)/README.txt' -f
[LICENSE $executable] | each {|it| cp -rv $it $dist } | flatten
# Sleep a few seconds to make sure the cp process finished successfully
sleep 3sec
print $'(char nl)Check binary release version detail:'; hr-line
let ver = if $os == 'windows-latest' {
@ -146,14 +148,14 @@ let ver = if $os == 'windows-latest' {
(do -i { ./output/nu -c 'version' }) | str join
}
if ($ver | str trim | is-empty) {
print $'(ansi r)Incompatible Nu binary: The binary cross compiled is not runnable on current arch...(ansi reset)'
print $'(ansi r)Incompatible nu binary...(ansi reset)'
} else { print $ver }
# ----------------------------------------------------------------------------
# Create a release archive and send it to output for the following steps
# ----------------------------------------------------------------------------
cd $dist; print $'(char nl)Creating release archive...'; hr-line
if $os in [$USE_UBUNTU, 'macos-latest', 'ubuntu-latest'] {
if $os in [$USE_UBUNTU, 'macos-latest'] {
let files = (ls | get name)
let dest = if $env.RELEASE_TYPE == 'full' { $'($bin)-($version)-($FULL_NAME)' } else { $'($bin)-($version)-($target)' }

View File

@ -66,7 +66,7 @@ jobs:
os: ubuntu-20.04
target_rustflags: ''
- target: riscv64gc-unknown-linux-gnu
os: ubuntu-latest
os: ubuntu-20.04
target_rustflags: ''
runs-on: ${{matrix.os}}
@ -79,7 +79,7 @@ jobs:
echo "targets = ['${{matrix.target}}']" >> rust-toolchain.toml
- name: Setup Rust toolchain and cache
uses: actions-rust-lang/setup-rust-toolchain@v1.6.0
uses: actions-rust-lang/setup-rust-toolchain@v1.5.0
# WARN: Keep the rustflags to prevent from the winget submission error: `CAQuietExec: Error 0xc0000135`
with:
rustflags: ''
@ -170,7 +170,7 @@ jobs:
echo "targets = ['${{matrix.target}}']" >> rust-toolchain.toml
- name: Setup Rust toolchain and cache
uses: actions-rust-lang/setup-rust-toolchain@v1.6.0
uses: actions-rust-lang/setup-rust-toolchain@v1.5.0
# WARN: Keep the rustflags to prevent from the winget submission error: `CAQuietExec: Error 0xc0000135`
with:
rustflags: ''

View File

@ -10,6 +10,6 @@ jobs:
uses: actions/checkout@v4
- name: Check spelling
uses: crate-ci/typos@v1.17.0
uses: crate-ci/typos@v1.16.23
with:
config: ./.github/.typos.toml

View File

@ -10,16 +10,11 @@ Welcome to Nushell and thank you for considering contributing!
- [Useful commands](#useful-commands)
- [Debugging tips](#debugging-tips)
- [Git etiquette](#git-etiquette)
- [Our Rust style](#our-rust-style)
- [Generally discouraged](#generally-discouraged)
- [Things we want to get better at](#things-we-want-to-get-better-at)
- [License](#license)
## Other helpful resources
More resources can be found in the nascent [developer documentation](devdocs/README.md) in this repo.
- [Developer FAQ](FAQ.md)
- [Platform support policy](PLATFORM_SUPPORT.md)
- [Our Rust style](devdocs/rust_style.md)
## Proposing design changes
First of all, before diving into the code, if you want to create a new feature, change something significantly, and especially if the change is user-facing, it is a good practice to first get an approval from the core team before starting to work on it.
@ -68,74 +63,74 @@ Read cargo's documentation for more details: https://doc.rust-lang.org/cargo/ref
- Build and run Nushell:
```nushell
```shell
cargo run
```
- Build and run with dataframe support.
```nushell
```shell
cargo run --features=dataframe
```
- Run Clippy on Nushell:
```nushell
```shell
cargo clippy --workspace -- -D warnings -D clippy::unwrap_used
```
or via the `toolkit.nu` command:
```nushell
```shell
use toolkit.nu clippy
clippy
```
- Run all tests:
```nushell
```shell
cargo test --workspace
```
along with dataframe tests
```nushell
```shell
cargo test --workspace --features=dataframe
```
or via the `toolkit.nu` command:
```nushell
```shell
use toolkit.nu test
test
```
- Run all tests for a specific command
```nushell
```shell
cargo test --package nu-cli --test main -- commands::<command_name_here>
```
- Check to see if there are code formatting issues
```nushell
```shell
cargo fmt --all -- --check
```
or via the `toolkit.nu` command:
```nushell
```shell
use toolkit.nu fmt
fmt --check
```
- Format the code in the project
```nushell
```shell
cargo fmt --all
```
or via the `toolkit.nu` command:
```nushell
```shell
use toolkit.nu fmt
fmt
```
- Set up `git` hooks to check formatting and run `clippy` before committing and pushing:
```nushell
```shell
use toolkit.nu setup-git-hooks
setup-git-hooks
```
@ -145,12 +140,12 @@ Read cargo's documentation for more details: https://doc.rust-lang.org/cargo/ref
- To view verbose logs when developing, enable the `trace` log level.
```nushell
```shell
cargo run --release -- --log-level trace
```
- To redirect trace logs to a file, enable the `--log-target file` switch.
```nushell
```shell
cargo run --release -- --log-level trace --log-target file
open $"($nu.temp-path)/nu-($nu.pid).log"
```
@ -242,6 +237,51 @@ You can help us to make the review process a smooth experience:
- Feel free to notify your reviewers or affected PR authors if your change might cause larger conflicts with another change.
- During the rollup of multiple PRs, we may choose to resolve merge conflicts and CI failures ourselves. (Allow maintainers to push to your branch to enable us to do this quickly.)
## Our Rust style
To make the collaboration on a project the scale of Nushell easy, we want to work towards a style of Rust code that can easily be understood by all of our contributors. We conservatively rely on most of [`clippy`s suggestions](https://github.com/rust-lang/rust-clippy) to get to the holy grail of "idiomatic" code. Good code in our eyes is not the most clever use of all available language features or with the most unique personal touch but readable and strikes a balance between being concise, and also unsurprising and explicit in the places where it matters.
One example of this philosophy is that we generally avoid to fight the borrow-checker in our data model but rather try to get to a correct and simple solution first and then figure out where we should reuse data to achieve the necessary performance. As we are still pre-1.0 this served us well to be able to quickly refactor or change larger parts of the code base.
### Generally discouraged
#### `+nightly` language features or things only available in the most recent `+stable`
To make life for the people easier that maintain the Nushell packages in various distributions with their own release cycle of `rustc` we typically rely on slightly older Rust versions. We do not make explicit guarantees how far back in the past we live but you can find out in our [`rust-toolchain.toml`](https://github.com/nushell/nushell/blob/main/rust-toolchain.toml)
(As a rule of thumb this has been typically been approximately 2 releases behind the newest stable compiler.)
The use of nightly features is prohibited.
#### Panicking
As Nushell aims to provide a reliable foundational way for folks to interact with their computer, we cannot carelessly crash the execution of their work by panicking Nushell.
Thus panicking is not an allowed error handling strategy for anything that could be triggered by user input OR behavior of the outside system. If Nushell panics this is a bug or we are against all odds already in an unrecoverable state (The system stopped cooperating, we went out of memory). The use of `.unwrap()` is thus outright banned and any uses of `.expect()` or related panicking macros like `unreachable!` should include a helpful description which assumptions have been violated.
#### `unsafe` code
For any use of `unsafe` code we need to require even higher standards and additional review. If you add or alter `unsafe` blocks you have to be familiar with the promises you need to uphold as found in the [Rustonomicon](https://doc.rust-lang.org/nomicon/intro.html). All `unsafe` uses should include `// SAFETY:` comments explaining how the invariants are upheld and thus alerting you what to watch out for when making a change.
##### FFI with system calls and the outside world
As a shell Nushell needs to interact with system APIs in several places, for which FFI code with unsafe blocks may be necessary. In some cases this can be handled by safe API wrapper crates but in some cases we may choose to directly do those calls.
If you do so you need to document the system behavior on top of the Rust memory model guarantees that you uphold. This means documenting whether using a particular system call is safe to use in a particular context and all failure cases are properly recovered.
##### Implementing self-contained data structures
Another motivation for reaching to `unsafe` code might be to try to implement a particular data structure that is not expressible on safe `std` library APIs. Doing so in the Nushell code base would have to clear a high bar for need based on profiling results. Also you should first do a survey of the [crate ecosystem](https://crates.io) that there doesn't exist a usable well vetted crate that already provides safe APIs to the desired datastructure.
##### Make things go faster by removing checks
This is probably a bad idea if you feel tempted to do so. Don't
#### Macros
Another advanced feature people feel tempted to use to work around perceived limitations of Rusts syntax and we are not particularly fans of are custom macros.
They have clear downsides not only in terms of readability if they locally introduce a different syntax. Most tooling apart from the compiler will struggle more with them. This limits for example consistent automatic formatting or automated refactors with `rust-analyzer`.
That you can fluently read `macro_rules!` is less likely than regular code. This can lead people to introduce funky behavior when using a macro. Be it because a macro is not following proper hygiene rules or because it introduces excessive work at compile time.
So we generally discourage the addition of macros. In a lot of cases your macro may start do something that can be expressed with functions or generics in a much more reusable fashion.
The only exceptions we may allow need to demonstrate that the macro can fix something that is otherwise extremely unreadable, error-prone, or consistently worse at compile time.
### Things we want to get better at
These are things we did pretty liberally to get Nushell off the ground, that make things harder for a high quality stable product. You may run across them but shouldn't take them as an endorsed example.
#### Liberal use of third-party dependencies
The amazing variety of crates on [crates.io](https://crates.io) allowed us to quickly get Nushell into a feature rich state but it left us with a bunch of baggage to clean up.
Each dependency introduces a compile time cost and duplicated code can add to the overall binary size. Also vetting more for correct and secure implementations takes unreasonably more time as this is also a continuous process of reacting to updates or potential vulnerabilities.
Thus we only want to accept dependencies that are essential and well tested implementations of a particular requirement of Nushells codebase.
Also as a project for the move to 1.0 we will try to unify among a set of dependencies if they possibly implement similar things in an area. We don't need three different crates with potentially perfect fit for three problems but rather one reliable crate with a maximized overlap between what it provides and what we need.
We will favor crates that are well tested and used and promise to be more stable and still frequently maintained.
#### Deeply nested code
As Nushell uses a lot of enums in its internal data representation there are a lot of `match` expressions. Combined with the need to handle a lot of edge cases and be defensive about any errors this has led to some absolutely hard to read deeply nested code (e.g. in the parser but also in the implementation of several commands).
This can be observed both as a "rightward drift" where the main part of the code is found after many levels of indentations or by long function bodies with several layers of branching with seemingly repeated branching inside the higher branch level.
This can also be exacerbated by "quick" bugfixes/enhancements that may just try to add a special case to catch a previously unexpected condition. The likelihood of introducing a bug in a sea of code duplication is high.
To combat this, consider using the early-`return` pattern to reject invalid data early in one place instead of building a tree through Rust's expression constructs with a lot of duplicated paths. Unpacking data into a type that expresses that the necessary things already have been checked and using functions to properly deal with separate and common behavior can also help.
## License
We use the [MIT License](https://github.com/nushell/nushell/blob/main/LICENSE) in all of our Nushell projects. If you are including or referencing a crate that uses the [GPL License](https://www.gnu.org/licenses/gpl-3.0.en.html#license-text) unfortunately we will not be able to accept your PR.

1602
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -10,8 +10,8 @@ homepage = "https://www.nushell.sh"
license = "MIT"
name = "nu"
repository = "https://github.com/nushell/nushell"
rust-version = "1.72.1"
version = "0.88.2"
rust-version = "1.60"
version = "0.87.1"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
@ -33,11 +33,7 @@ members = [
"crates/nu-cmd-lang",
"crates/nu-cmd-dataframe",
"crates/nu-command",
"crates/nu-color-config",
"crates/nu-explore",
"crates/nu-json",
"crates/nu-lsp",
"crates/nu-pretty-hex",
"crates/nu-protocol",
"crates/nu-plugin",
"crates/nu_plugin_inc",
@ -47,36 +43,33 @@ members = [
"crates/nu_plugin_custom_values",
"crates/nu_plugin_formats",
"crates/nu-std",
"crates/nu-table",
"crates/nu-term-grid",
"crates/nu-utils",
]
[dependencies]
nu-cli = { path = "./crates/nu-cli", version = "0.88.2" }
nu-color-config = { path = "./crates/nu-color-config", version = "0.88.2" }
nu-cmd-base = { path = "./crates/nu-cmd-base", version = "0.88.2" }
nu-cmd-lang = { path = "./crates/nu-cmd-lang", version = "0.88.2" }
nu-cmd-dataframe = { path = "./crates/nu-cmd-dataframe", version = "0.88.2", features = ["dataframe"], optional = true }
nu-cmd-extra = { path = "./crates/nu-cmd-extra", version = "0.88.2", optional = true }
nu-command = { path = "./crates/nu-command", version = "0.88.2" }
nu-engine = { path = "./crates/nu-engine", version = "0.88.2" }
nu-explore = { path = "./crates/nu-explore", version = "0.88.2" }
nu-json = { path = "./crates/nu-json", version = "0.88.2" }
nu-lsp = { path = "./crates/nu-lsp/", version = "0.88.2" }
nu-parser = { path = "./crates/nu-parser", version = "0.88.2" }
nu-path = { path = "./crates/nu-path", version = "0.88.2" }
nu-plugin = { path = "./crates/nu-plugin", optional = true, version = "0.88.2" }
nu-pretty-hex = { path = "./crates/nu-pretty-hex", version = "0.88.2" }
nu-protocol = { path = "./crates/nu-protocol", version = "0.88.2" }
nu-system = { path = "./crates/nu-system", version = "0.88.2" }
nu-table = { path = "./crates/nu-table", version = "0.88.2" }
nu-term-grid = { path = "./crates/nu-term-grid", version = "0.88.2" }
nu-std = { path = "./crates/nu-std", version = "0.88.2" }
nu-utils = { path = "./crates/nu-utils", version = "0.88.2" }
nu-cli = { path = "./crates/nu-cli", version = "0.87.1" }
nu-color-config = { path = "./crates/nu-color-config", version = "0.87.1" }
nu-cmd-base = { path = "./crates/nu-cmd-base", version = "0.87.1" }
nu-cmd-lang = { path = "./crates/nu-cmd-lang", version = "0.87.1" }
nu-cmd-dataframe = { path = "./crates/nu-cmd-dataframe", version = "0.87.1", features = ["dataframe"], optional = true }
nu-cmd-extra = { path = "./crates/nu-cmd-extra", version = "0.87.1", optional = true }
nu-command = { path = "./crates/nu-command", version = "0.87.1" }
nu-engine = { path = "./crates/nu-engine", version = "0.87.1" }
nu-explore = { path = "./crates/nu-explore", version = "0.87.1" }
nu-json = { path = "./crates/nu-json", version = "0.87.1" }
nu-lsp = { path = "./crates/nu-lsp/", version = "0.87.1" }
nu-parser = { path = "./crates/nu-parser", version = "0.87.1" }
nu-path = { path = "./crates/nu-path", version = "0.87.1" }
nu-plugin = { path = "./crates/nu-plugin", optional = true, version = "0.87.1" }
nu-pretty-hex = { path = "./crates/nu-pretty-hex", version = "0.87.1" }
nu-protocol = { path = "./crates/nu-protocol", version = "0.87.1" }
nu-system = { path = "./crates/nu-system", version = "0.87.1" }
nu-table = { path = "./crates/nu-table", version = "0.87.1" }
nu-term-grid = { path = "./crates/nu-term-grid", version = "0.87.1" }
nu-std = { path = "./crates/nu-std", version = "0.87.1" }
nu-utils = { path = "./crates/nu-utils", version = "0.87.1" }
nu-ansi-term = "0.49.0"
reedline = { version = "0.27.0", features = ["bashisms", "sqlite"] }
reedline = { version = "0.26.0", features = ["bashisms", "sqlite"] }
crossterm = "0.27"
ctrlc = "3.4"
@ -104,7 +97,7 @@ nix = { version = "0.27", default-features = false, features = [
] }
[dev-dependencies]
nu-test-support = { path = "./crates/nu-test-support", version = "0.88.2" }
nu-test-support = { path = "./crates/nu-test-support", version = "0.87.1" }
assert_cmd = "2.0"
criterion = "0.5"
pretty_assertions = "1.4"
@ -173,7 +166,7 @@ bench = false
# To use a development version of a dependency please use a global override here
# changing versions in each sub-crate of the workspace is tedious
[patch.crates-io]
reedline = { git = "https://github.com/nushell/reedline.git", branch = "main" }
# reedline = { git = "https://github.com/nushell/reedline.git", branch = "main" }
# nu-ansi-term = {git = "https://github.com/nushell/nu-ansi-term.git", branch = "main"}
# uu_cp = { git = "https://github.com/uutils/coreutils.git", branch = "main" }

View File

@ -54,7 +54,7 @@ Detailed installation instructions can be found in the [installation chapter of
[![Packaging status](https://repology.org/badge/vertical-allrepos/nushell.svg)](https://repology.org/project/nushell/versions)
For details about which platforms the Nushell team actively supports, see [our platform support policy](devdocs/PLATFORM_SUPPORT.md).
For details about which platforms the Nushell team actively supports, see [our platform support policy](PLATFORM_SUPPORT.md).
## Configuration
@ -199,7 +199,7 @@ topics that have been presented.
Nu adheres closely to a set of goals that make up its design philosophy. As features are added, they are checked against these goals.
- First and foremost, Nu is cross-platform. Commands and techniques should work across platforms and Nu has [first-class support for Windows, macOS, and Linux](devdocs/PLATFORM_SUPPORT.md).
- First and foremost, Nu is cross-platform. Commands and techniques should work across platforms and Nu has [first-class support for Windows, macOS, and Linux](PLATFORM_SUPPORT.md).
- Nu ensures compatibility with existing platform-specific executables.

View File

@ -4,31 +4,10 @@ use nu_parser::parse;
use nu_plugin::{EncodingType, PluginResponse};
use nu_protocol::{engine::EngineState, PipelineData, Span, Value};
use nu_utils::{get_default_config, get_default_env};
use std::path::{Path, PathBuf};
fn load_bench_commands() -> EngineState {
nu_command::add_shell_command_context(nu_cmd_lang::create_default_context())
}
fn canonicalize_path(engine_state: &EngineState, path: &Path) -> PathBuf {
let cwd = engine_state.current_work_dir();
if path.exists() {
match nu_path::canonicalize_with(path, cwd) {
Ok(canon_path) => canon_path,
Err(_) => path.to_owned(),
}
} else {
path.to_owned()
}
}
fn get_home_path(engine_state: &EngineState) -> PathBuf {
nu_path::home_dir()
.map(|path| canonicalize_path(engine_state, &path))
.unwrap_or_default()
}
// FIXME: All benchmarks live in this 1 file to speed up build times when benchmarking.
// When the *_benchmarks functions were in different files, `cargo bench` would build
// an executable for every single one - incredibly slowly. Would be nice to figure out
@ -36,12 +15,10 @@ fn get_home_path(engine_state: &EngineState) -> PathBuf {
fn parser_benchmarks(c: &mut Criterion) {
let mut engine_state = load_bench_commands();
let home_path = get_home_path(&engine_state);
// parsing config.nu breaks without PWD set, so set a valid path
// parsing config.nu breaks without PWD set
engine_state.add_env_var(
"PWD".into(),
Value::string(home_path.to_string_lossy(), Span::test_data()),
Value::string("/some/dir".to_string(), Span::test_data()),
);
let default_env = get_default_env().as_bytes();
@ -64,6 +41,7 @@ fn parser_benchmarks(c: &mut Criterion) {
c.bench_function("eval default_env.nu", |b| {
b.iter(|| {
let mut engine_state = load_bench_commands();
let mut stack = nu_protocol::engine::Stack::new();
eval_source(
&mut engine_state,
@ -78,6 +56,12 @@ fn parser_benchmarks(c: &mut Criterion) {
c.bench_function("eval default_config.nu", |b| {
b.iter(|| {
let mut engine_state = load_bench_commands();
// parsing config.nu breaks without PWD set
engine_state.add_env_var(
"PWD".into(),
Value::string("/some/dir".to_string(), Span::test_data()),
);
let mut stack = nu_protocol::engine::Stack::new();
eval_source(
&mut engine_state,
@ -92,17 +76,9 @@ fn parser_benchmarks(c: &mut Criterion) {
}
fn eval_benchmarks(c: &mut Criterion) {
let mut engine_state = load_bench_commands();
let home_path = get_home_path(&engine_state);
// parsing config.nu breaks without PWD set, so set a valid path
engine_state.add_env_var(
"PWD".into(),
Value::string(home_path.to_string_lossy(), Span::test_data()),
);
c.bench_function("eval default_env.nu", |b| {
b.iter(|| {
let mut engine_state = load_bench_commands();
let mut stack = nu_protocol::engine::Stack::new();
eval_source(
&mut engine_state,
@ -117,6 +93,12 @@ fn eval_benchmarks(c: &mut Criterion) {
c.bench_function("eval default_config.nu", |b| {
b.iter(|| {
let mut engine_state = load_bench_commands();
// parsing config.nu breaks without PWD set
engine_state.add_env_var(
"PWD".into(),
Value::string("/some/dir".to_string(), Span::test_data()),
);
let mut stack = nu_protocol::engine::Stack::new();
eval_source(
&mut engine_state,

View File

@ -5,31 +5,31 @@ repository = "https://github.com/nushell/nushell/tree/main/crates/nu-cli"
edition = "2021"
license = "MIT"
name = "nu-cli"
version = "0.88.2"
version = "0.87.1"
[lib]
bench = false
[dev-dependencies]
nu-cmd-lang = { path = "../nu-cmd-lang", version = "0.88.2" }
nu-command = { path = "../nu-command", version = "0.88.2" }
nu-test-support = { path = "../nu-test-support", version = "0.88.2" }
nu-cmd-lang = { path = "../nu-cmd-lang", version = "0.87.1" }
nu-command = { path = "../nu-command", version = "0.87.1" }
nu-test-support = { path = "../nu-test-support", version = "0.87.1" }
rstest = { version = "0.18.1", default-features = false }
[dependencies]
nu-cmd-base = { path = "../nu-cmd-base", version = "0.88.2" }
nu-engine = { path = "../nu-engine", version = "0.88.2" }
nu-path = { path = "../nu-path", version = "0.88.2" }
nu-parser = { path = "../nu-parser", version = "0.88.2" }
nu-protocol = { path = "../nu-protocol", version = "0.88.2" }
nu-utils = { path = "../nu-utils", version = "0.88.2" }
nu-color-config = { path = "../nu-color-config", version = "0.88.2" }
nu-cmd-base = { path = "../nu-cmd-base", version = "0.87.1" }
nu-engine = { path = "../nu-engine", version = "0.87.1" }
nu-path = { path = "../nu-path", version = "0.87.1" }
nu-parser = { path = "../nu-parser", version = "0.87.1" }
nu-protocol = { path = "../nu-protocol", version = "0.87.1" }
nu-utils = { path = "../nu-utils", version = "0.87.1" }
nu-color-config = { path = "../nu-color-config", version = "0.87.1" }
nu-ansi-term = "0.49.0"
reedline = { version = "0.27.0", features = ["bashisms", "sqlite"] }
reedline = { version = "0.26.0", features = ["bashisms", "sqlite"] }
chrono = { default-features = false, features = ["std"], version = "0.4" }
crossterm = "0.27"
fancy-regex = "0.12"
fancy-regex = "0.11"
fuzzy-matcher = "0.3"
is_executable = "1.0"
log = "0.4"
@ -37,10 +37,9 @@ miette = { version = "5.10", features = ["fancy-no-backtrace"] }
once_cell = "1.18"
percent-encoding = "2"
pathdiff = "0.2"
sysinfo = "0.30"
sysinfo = "0.29"
unicode-segmentation = "1.10"
uuid = { version = "1.6.0", features = ["v4"] }
which = "5.0.0"
uuid = { version = "1.5.0", features = ["v4"] }
[features]
plugin = []

View File

@ -34,7 +34,7 @@ impl Command for History {
"Show long listing of entries for sqlite history",
Some('l'),
)
.category(Category::History)
.category(Category::Misc)
}
fn run(
@ -107,7 +107,7 @@ impl Command for History {
)
})
})
.ok_or(ShellError::FileNotFound { span: head })?
.ok_or(ShellError::FileNotFound(head))?
.into_pipeline_data(ctrlc)),
HistoryFileFormat::Sqlite => Ok(history_reader
.and_then(|h| {
@ -119,12 +119,12 @@ impl Command for History {
create_history_record(idx, entry, long, head)
})
})
.ok_or(ShellError::FileNotFound { span: head })?
.ok_or(ShellError::FileNotFound(head))?
.into_pipeline_data(ctrlc)),
}
}
} else {
Err(ShellError::FileNotFound { span: head })
Err(ShellError::FileNotFound(head))
}
}

View File

@ -1,5 +0,0 @@
mod history_;
mod history_session;
pub use history_::History;
pub use history_session::HistorySession;

View File

@ -18,7 +18,7 @@ impl Command for HistorySession {
fn signature(&self) -> nu_protocol::Signature {
Signature::build("history session")
.category(Category::History)
.category(Category::Misc)
.input_output_types(vec![(Type::Nothing, Type::Int)])
}

View File

@ -45,13 +45,13 @@ impl Command for KeybindingsListen {
Ok(v) => Ok(v.into_pipeline_data()),
Err(e) => {
terminal::disable_raw_mode()?;
Err(ShellError::GenericError {
error: "Error with input".into(),
msg: "".into(),
span: None,
help: Some(e.to_string()),
inner: vec![],
})
Err(ShellError::GenericError(
"Error with input".to_string(),
"".to_string(),
None,
Some(e.to_string()),
Vec::new(),
))
}
}
}

View File

@ -1,13 +1,15 @@
mod commandline;
mod default_context;
mod history;
mod history_session;
mod keybindings;
mod keybindings_default;
mod keybindings_list;
mod keybindings_listen;
pub use commandline::Commandline;
pub use history::{History, HistorySession};
pub use history::History;
pub use history_session::HistorySession;
pub use keybindings::Keybindings;
pub use keybindings_default::KeybindingsDefault;
pub use keybindings_list::KeybindingsList;

View File

@ -67,7 +67,7 @@ impl NuCompleter {
let mut callee_stack = stack.gather_captures(&self.engine_state, &block.captures);
// Line
if let Some(pos_arg) = block.signature.required_positional.first() {
if let Some(pos_arg) = block.signature.required_positional.get(0) {
if let Some(var_id) = pos_arg.var_id {
callee_stack.add_var(
var_id,
@ -122,13 +122,11 @@ impl NuCompleter {
for pipeline_element in pipeline.elements {
match pipeline_element {
PipelineElement::Expression(_, expr)
| PipelineElement::Redirection(_, _, expr, _)
| PipelineElement::Redirection(_, _, expr)
| PipelineElement::And(_, expr)
| PipelineElement::Or(_, expr)
| PipelineElement::SameTargetRedirection { cmd: (_, expr), .. }
| PipelineElement::SeparateRedirection {
out: (_, expr, _), ..
} => {
| PipelineElement::SeparateRedirection { out: (_, expr), .. } => {
let flattened: Vec<_> = flatten_expression(&working_set, &expr);
let mut spans: Vec<String> = vec![];
@ -143,24 +141,18 @@ impl NuCompleter {
let current_span = working_set.get_span_contents(flat.0).to_vec();
let current_span_str = String::from_utf8_lossy(&current_span);
let is_last_span = pos >= flat.0.start && pos < flat.0.end;
// Skip the last 'a' as span item
if is_last_span {
let offset = pos - flat.0.start;
if offset == 0 {
spans.push(String::new())
} else {
let mut current_span_str = current_span_str.to_string();
current_span_str.remove(offset);
spans.push(current_span_str);
}
if flat_idx == flattened.len() - 1 {
let mut chars = current_span_str.chars();
chars.next_back();
let current_span_str = chars.as_str().to_owned();
spans.push(current_span_str.to_string());
} else {
spans.push(current_span_str.to_string());
}
// Complete based on the last span
if is_last_span {
if pos >= flat.0.start && pos < flat.0.end {
// Context variables
let most_left_var =
most_left_variable(flat_idx, &working_set, flattened.clone());
@ -250,9 +242,7 @@ impl NuCompleter {
working_set.get_span_contents(previous_expr.0).to_vec();
// Completion for .nu files
if prev_expr_str == b"use"
|| prev_expr_str == b"overlay use"
|| prev_expr_str == b"source-env"
if prev_expr_str == b"use" || prev_expr_str == b"source-env"
{
let mut completer =
DotNuCompletion::new(self.engine_state.clone());
@ -354,7 +344,9 @@ impl NuCompleter {
if let Some(external_result) = self.external_completion(
block_id, &spans, offset, new_span,
) {
return external_result;
if !external_result.is_empty() {
return external_result;
}
}
}

View File

@ -22,10 +22,7 @@ fn complete_rec(
Some(base) if matches(base, &entry_name, options) => {
let partial = &partial[1..];
if !partial.is_empty() || isdir {
completions.extend(complete_rec(partial, &path, options, dir, isdir));
if entry_name.eq(base) {
break;
}
completions.extend(complete_rec(partial, &path, options, dir, isdir))
} else {
completions.push(path)
}

View File

@ -5,7 +5,7 @@ use nu_protocol::{
};
use reedline::Suggestion;
use std::{
path::{is_separator, Path, MAIN_SEPARATOR as SEP, MAIN_SEPARATOR_STR},
path::{is_separator, MAIN_SEPARATOR as SEP, MAIN_SEPARATOR_STR},
sync::Arc,
};
@ -91,21 +91,16 @@ impl Completer for DotNuCompletion {
// and transform them into suggestions
let output: Vec<Suggestion> = search_dirs
.into_iter()
.flat_map(|search_dir| {
let completions = file_path_completion(span, &partial, &search_dir, options);
completions
.flat_map(|it| {
file_path_completion(span, &partial, &it, options)
.into_iter()
.filter(move |it| {
.filter(|it| {
// Different base dir, so we list the .nu files or folders
if !is_current_folder {
it.1.ends_with(".nu") || it.1.ends_with(SEP)
} else {
// Lib dirs, so we filter only the .nu files or directory modules
if it.1.ends_with(SEP) {
Path::new(&search_dir).join(&it.1).join("mod.nu").exists()
} else {
it.1.ends_with(".nu")
}
// Lib dirs, so we filter only the .nu files
it.1.ends_with(".nu")
}
})
.map(move |x| Suggestion {

View File

@ -35,10 +35,6 @@ pub fn evaluate_commands(
let mut working_set = StateWorkingSet::new(engine_state);
let output = parse(&mut working_set, None, commands.item.as_bytes(), false);
if let Some(warning) = working_set.parse_warnings.first() {
report_error(&working_set, warning);
}
if let Some(err) = working_set.parse_errors.first() {
report_error(&working_set, err);

View File

@ -35,10 +35,10 @@ pub fn evaluate_file(
let working_set = StateWorkingSet::new(engine_state);
report_error(
&working_set,
&ShellError::FileNotFoundCustom {
msg: format!("Could not access file '{}': {:?}", path, e.to_string()),
span: Span::unknown(),
},
&ShellError::FileNotFoundCustom(
format!("Could not access file '{}': {:?}", path, e.to_string()),
Span::unknown(),
),
);
std::process::exit(1);
});
@ -47,13 +47,13 @@ pub fn evaluate_file(
let working_set = StateWorkingSet::new(engine_state);
report_error(
&working_set,
&ShellError::NonUtf8Custom {
msg: format!(
&ShellError::NonUtf8Custom(
format!(
"Input file name '{}' is not valid UTF8",
file_path.to_string_lossy()
),
span: Span::unknown(),
},
Span::unknown(),
),
);
std::process::exit(1);
});
@ -64,14 +64,14 @@ pub fn evaluate_file(
let working_set = StateWorkingSet::new(engine_state);
report_error(
&working_set,
&ShellError::FileNotFoundCustom {
msg: format!(
&ShellError::FileNotFoundCustom(
format!(
"Could not read file '{}': {:?}",
file_path_str,
e.to_string()
),
span: Span::unknown(),
},
Span::unknown(),
),
);
std::process::exit(1);
});
@ -82,10 +82,10 @@ pub fn evaluate_file(
let working_set = StateWorkingSet::new(engine_state);
report_error(
&working_set,
&ShellError::FileNotFoundCustom {
msg: format!("The file path '{file_path_str}' does not have a parent"),
span: Span::unknown(),
},
&ShellError::FileNotFoundCustom(
format!("The file path '{file_path_str}' does not have a parent"),
Span::unknown(),
),
);
std::process::exit(1);
});
@ -98,10 +98,6 @@ pub fn evaluate_file(
"CURRENT_FILE".to_string(),
Value::string(file_path.to_string_lossy(), Span::unknown()),
);
stack.add_env_var(
"PROCESS_PATH".to_string(),
Value::string(path, Span::unknown()),
);
let source_filename = file_path
.file_name()
@ -139,7 +135,7 @@ pub fn evaluate_file(
false,
);
let pipeline_data = match pipeline_data {
Err(ShellError::Return { .. }) => {
Err(ShellError::Return(_, _)) => {
// allows early exists before `main` is run.
return Ok(());
}

View File

@ -1,4 +1,3 @@
use crate::prompt_update::{POST_PROMPT_MARKER, PRE_PROMPT_MARKER};
#[cfg(windows)]
use nu_utils::enable_vt_processing;
use reedline::DefaultPrompt;
@ -12,7 +11,6 @@ use {
/// Nushell prompt definition
#[derive(Clone)]
pub struct NushellPrompt {
shell_integration: bool,
left_prompt_string: Option<String>,
right_prompt_string: Option<String>,
default_prompt_indicator: Option<String>,
@ -22,10 +20,15 @@ pub struct NushellPrompt {
render_right_prompt_on_last_line: bool,
}
impl Default for NushellPrompt {
fn default() -> Self {
NushellPrompt::new()
}
}
impl NushellPrompt {
pub fn new(shell_integration: bool) -> NushellPrompt {
pub fn new() -> NushellPrompt {
NushellPrompt {
shell_integration,
left_prompt_string: None,
right_prompt_string: None,
default_prompt_indicator: None,
@ -108,11 +111,7 @@ impl Prompt for NushellPrompt {
.to_string()
.replace('\n', "\r\n");
if self.shell_integration {
format!("{PRE_PROMPT_MARKER}{prompt}{POST_PROMPT_MARKER}").into()
} else {
prompt.into()
}
prompt.into()
}
}

View File

@ -7,6 +7,8 @@ use nu_protocol::{
Config, PipelineData, Value,
};
use reedline::Prompt;
use std::borrow::Cow;
use std::sync::Arc;
// Name of environment variable where the prompt could be stored
pub(crate) const PROMPT_COMMAND: &str = "PROMPT_COMMAND";
@ -26,8 +28,8 @@ pub(crate) const TRANSIENT_PROMPT_MULTILINE_INDICATOR: &str =
"TRANSIENT_PROMPT_MULTILINE_INDICATOR";
// According to Daniel Imms @Tyriar, we need to do these this way:
// <133 A><prompt><133 B><command><133 C><command output>
pub(crate) const PRE_PROMPT_MARKER: &str = "\x1b]133;A\x1b\\";
pub(crate) const POST_PROMPT_MARKER: &str = "\x1b]133;B\x1b\\";
const PRE_PROMPT_MARKER: &str = "\x1b]133;A\x1b\\";
const POST_PROMPT_MARKER: &str = "\x1b]133;B\x1b\\";
fn get_prompt_string(
prompt: &str,
@ -96,12 +98,12 @@ fn get_prompt_string(
})
}
pub(crate) fn update_prompt(
pub(crate) fn update_prompt<'prompt>(
config: &Config,
engine_state: &EngineState,
stack: &Stack,
nu_prompt: &mut NushellPrompt,
) {
nu_prompt: &'prompt mut NushellPrompt,
) -> &'prompt dyn Prompt {
let mut stack = stack.clone();
let left_prompt_string = get_prompt_string(PROMPT_COMMAND, config, engine_state, &mut stack);
@ -144,55 +146,125 @@ pub(crate) fn update_prompt(
(prompt_vi_insert_string, prompt_vi_normal_string),
config.render_right_prompt_on_last_line,
);
let ret_val = nu_prompt as &dyn Prompt;
trace!("update_prompt {}:{}:{}", file!(), line!(), column!());
ret_val
}
/// Construct the transient prompt based on the normal nu_prompt
pub(crate) fn make_transient_prompt(
struct TransientPrompt {
engine_state: Arc<EngineState>,
stack: Stack,
}
/// Try getting `$env.TRANSIENT_PROMPT_<X>`, and get `$env.PROMPT_<X>` if that fails
fn get_transient_prompt_string(
transient_prompt: &str,
prompt: &str,
config: &Config,
engine_state: &EngineState,
stack: &mut Stack,
nu_prompt: &NushellPrompt,
) -> Box<dyn Prompt> {
let mut nu_prompt = nu_prompt.clone();
if let Some(s) = get_prompt_string(TRANSIENT_PROMPT_COMMAND, config, engine_state, stack) {
nu_prompt.update_prompt_left(Some(s))
}
if let Some(s) = get_prompt_string(TRANSIENT_PROMPT_COMMAND_RIGHT, config, engine_state, stack)
{
nu_prompt.update_prompt_right(Some(s), config.render_right_prompt_on_last_line)
}
if let Some(s) = get_prompt_string(TRANSIENT_PROMPT_INDICATOR, config, engine_state, stack) {
nu_prompt.update_prompt_indicator(Some(s))
}
if let Some(s) = get_prompt_string(
TRANSIENT_PROMPT_INDICATOR_VI_INSERT,
config,
engine_state,
stack,
) {
nu_prompt.update_prompt_vi_insert(Some(s))
}
if let Some(s) = get_prompt_string(
TRANSIENT_PROMPT_INDICATOR_VI_NORMAL,
config,
engine_state,
stack,
) {
nu_prompt.update_prompt_vi_normal(Some(s))
}
if let Some(s) = get_prompt_string(
TRANSIENT_PROMPT_MULTILINE_INDICATOR,
config,
engine_state,
stack,
) {
nu_prompt.update_prompt_multiline(Some(s))
}
Box::new(nu_prompt)
) -> Option<String> {
get_prompt_string(transient_prompt, config, engine_state, stack)
.or_else(|| get_prompt_string(prompt, config, engine_state, stack))
}
impl Prompt for TransientPrompt {
fn render_prompt_left(&self) -> Cow<str> {
let mut nu_prompt = NushellPrompt::new();
let config = &self.engine_state.get_config().clone();
let mut stack = self.stack.clone();
nu_prompt.update_prompt_left(get_transient_prompt_string(
TRANSIENT_PROMPT_COMMAND,
PROMPT_COMMAND,
config,
&self.engine_state,
&mut stack,
));
nu_prompt.render_prompt_left().to_string().into()
}
fn render_prompt_right(&self) -> Cow<str> {
let mut nu_prompt = NushellPrompt::new();
let config = &self.engine_state.get_config().clone();
let mut stack = self.stack.clone();
nu_prompt.update_prompt_right(
get_transient_prompt_string(
TRANSIENT_PROMPT_COMMAND_RIGHT,
PROMPT_COMMAND_RIGHT,
config,
&self.engine_state,
&mut stack,
),
config.render_right_prompt_on_last_line,
);
nu_prompt.render_prompt_right().to_string().into()
}
fn render_prompt_indicator(&self, prompt_mode: reedline::PromptEditMode) -> Cow<str> {
let mut nu_prompt = NushellPrompt::new();
let config = &self.engine_state.get_config().clone();
let mut stack = self.stack.clone();
nu_prompt.update_prompt_indicator(get_transient_prompt_string(
TRANSIENT_PROMPT_INDICATOR,
PROMPT_INDICATOR,
config,
&self.engine_state,
&mut stack,
));
nu_prompt.update_prompt_vi_insert(get_transient_prompt_string(
TRANSIENT_PROMPT_INDICATOR_VI_INSERT,
PROMPT_INDICATOR_VI_INSERT,
config,
&self.engine_state,
&mut stack,
));
nu_prompt.update_prompt_vi_normal(get_transient_prompt_string(
TRANSIENT_PROMPT_INDICATOR_VI_NORMAL,
PROMPT_INDICATOR_VI_NORMAL,
config,
&self.engine_state,
&mut stack,
));
nu_prompt
.render_prompt_indicator(prompt_mode)
.to_string()
.into()
}
fn render_prompt_multiline_indicator(&self) -> Cow<str> {
let mut nu_prompt = NushellPrompt::new();
let config = &self.engine_state.get_config().clone();
let mut stack = self.stack.clone();
nu_prompt.update_prompt_multiline(get_transient_prompt_string(
TRANSIENT_PROMPT_MULTILINE_INDICATOR,
PROMPT_MULTILINE_INDICATOR,
config,
&self.engine_state,
&mut stack,
));
nu_prompt
.render_prompt_multiline_indicator()
.to_string()
.into()
}
fn render_prompt_history_search_indicator(
&self,
history_search: reedline::PromptHistorySearch,
) -> Cow<str> {
NushellPrompt::new()
.render_prompt_history_search_indicator(history_search)
.to_string()
.into()
}
}
/// Construct the transient prompt
pub(crate) fn transient_prompt(engine_state: Arc<EngineState>, stack: &Stack) -> Box<dyn Prompt> {
Box::new(TransientPrompt {
engine_state,
stack: stack.clone(),
})
}

View File

@ -139,18 +139,18 @@ fn add_menu(
"columnar" => add_columnar_menu(line_editor, menu, engine_state, stack, config),
"list" => add_list_menu(line_editor, menu, engine_state, stack, config),
"description" => add_description_menu(line_editor, menu, engine_state, stack, config),
_ => Err(ShellError::UnsupportedConfigValue {
expected: "columnar, list or description".to_string(),
value: menu.menu_type.into_abbreviated_string(config),
span: menu.menu_type.span(),
}),
_ => Err(ShellError::UnsupportedConfigValue(
"columnar, list or description".to_string(),
menu.menu_type.into_abbreviated_string(config),
menu.menu_type.span(),
)),
}
} else {
Err(ShellError::UnsupportedConfigValue {
expected: "only record type".to_string(),
value: menu.menu_type.into_abbreviated_string(config),
span: menu.menu_type.span(),
})
Err(ShellError::UnsupportedConfigValue(
"only record type".to_string(),
menu.menu_type.into_abbreviated_string(config),
menu.menu_type.span(),
))
}
}
@ -261,11 +261,11 @@ pub(crate) fn add_columnar_menu(
completer: Box::new(menu_completer),
}))
}
_ => Err(ShellError::UnsupportedConfigValue {
expected: "block or omitted value".to_string(),
value: menu.source.into_abbreviated_string(config),
_ => Err(ShellError::UnsupportedConfigValue(
"block or omitted value".to_string(),
menu.source.into_abbreviated_string(config),
span,
}),
)),
}
}
@ -343,11 +343,11 @@ pub(crate) fn add_list_menu(
completer: Box::new(menu_completer),
}))
}
_ => Err(ShellError::UnsupportedConfigValue {
expected: "block or omitted value".to_string(),
value: menu.source.into_abbreviated_string(config),
span: menu.source.span(),
}),
_ => Err(ShellError::UnsupportedConfigValue(
"block or omitted value".to_string(),
menu.source.into_abbreviated_string(config),
menu.source.span(),
)),
}
}
@ -461,11 +461,11 @@ pub(crate) fn add_description_menu(
completer: Box::new(menu_completer),
}))
}
_ => Err(ShellError::UnsupportedConfigValue {
expected: "closure or omitted value".to_string(),
value: menu.source.into_abbreviated_string(config),
span: menu.source.span(),
}),
_ => Err(ShellError::UnsupportedConfigValue(
"closure or omitted value".to_string(),
menu.source.into_abbreviated_string(config),
menu.source.span(),
)),
}
}
@ -580,11 +580,11 @@ fn add_keybinding(
"emacs" => add_parsed_keybinding(emacs_keybindings, keybinding, config),
"vi_insert" => add_parsed_keybinding(insert_keybindings, keybinding, config),
"vi_normal" => add_parsed_keybinding(normal_keybindings, keybinding, config),
m => Err(ShellError::UnsupportedConfigValue {
expected: "emacs, vi_insert or vi_normal".to_string(),
value: m.to_string(),
m => Err(ShellError::UnsupportedConfigValue(
"emacs, vi_insert or vi_normal".to_string(),
m.to_string(),
span,
}),
)),
},
Value::List { vals, .. } => {
for inner_mode in vals {
@ -600,11 +600,11 @@ fn add_keybinding(
Ok(())
}
v => Err(ShellError::UnsupportedConfigValue {
expected: "string or list of strings".to_string(),
value: v.into_abbreviated_string(config),
span: v.span(),
}),
v => Err(ShellError::UnsupportedConfigValue(
"string or list of strings".to_string(),
v.into_abbreviated_string(config),
v.span(),
)),
}
}
@ -630,11 +630,11 @@ fn add_parsed_keybinding(
KeyModifiers::CONTROL | KeyModifiers::ALT | KeyModifiers::SHIFT
}
_ => {
return Err(ShellError::UnsupportedConfigValue {
expected: "CONTROL, SHIFT, ALT or NONE".to_string(),
value: keybinding.modifier.into_abbreviated_string(config),
span: keybinding.modifier.span(),
})
return Err(ShellError::UnsupportedConfigValue(
"CONTROL, SHIFT, ALT or NONE".to_string(),
keybinding.modifier.into_abbreviated_string(config),
keybinding.modifier.span(),
))
}
};
@ -654,11 +654,11 @@ fn add_parsed_keybinding(
let char = if let (Some(char), None) = (pos1, pos2) {
char
} else {
return Err(ShellError::UnsupportedConfigValue {
expected: "char_<CHAR: unicode codepoint>".to_string(),
value: c.to_string(),
span: keybinding.keycode.span(),
});
return Err(ShellError::UnsupportedConfigValue(
"char_<CHAR: unicode codepoint>".to_string(),
c.to_string(),
keybinding.keycode.span(),
));
};
KeyCode::Char(char)
@ -681,21 +681,21 @@ fn add_parsed_keybinding(
.parse()
.ok()
.filter(|num| matches!(num, 1..=20))
.ok_or(ShellError::UnsupportedConfigValue {
expected: "(f1|f2|...|f20)".to_string(),
value: format!("unknown function key: {c}"),
span: keybinding.keycode.span(),
})?;
.ok_or(ShellError::UnsupportedConfigValue(
"(f1|f2|...|f20)".to_string(),
format!("unknown function key: {c}"),
keybinding.keycode.span(),
))?;
KeyCode::F(fn_num)
}
"null" => KeyCode::Null,
"esc" | "escape" => KeyCode::Esc,
_ => {
return Err(ShellError::UnsupportedConfigValue {
expected: "crossterm KeyCode".to_string(),
value: keybinding.keycode.into_abbreviated_string(config),
span: keybinding.keycode.span(),
})
return Err(ShellError::UnsupportedConfigValue(
"crossterm KeyCode".to_string(),
keybinding.keycode.into_abbreviated_string(config),
keybinding.keycode.span(),
))
}
};
if let Some(event) = parse_event(&keybinding.event, config)? {
@ -719,10 +719,7 @@ impl<'config> EventType<'config> {
.map(Self::Send)
.or_else(|_| extract_value("edit", record, span).map(Self::Edit))
.or_else(|_| extract_value("until", record, span).map(Self::Until))
.map_err(|_| ShellError::MissingConfigValue {
missing_value: "send, edit or until".to_string(),
span,
})
.map_err(|_| ShellError::MissingConfigValue("send, edit or until".to_string(), span))
}
}
@ -752,11 +749,11 @@ fn parse_event(value: &Value, config: &Config) -> Result<Option<ReedlineEvent>,
.iter()
.map(|value| match parse_event(value, config) {
Ok(inner) => match inner {
None => Err(ShellError::UnsupportedConfigValue {
expected: "List containing valid events".to_string(),
value: "Nothing value (null)".to_string(),
span: value.span(),
}),
None => Err(ShellError::UnsupportedConfigValue(
"List containing valid events".to_string(),
"Nothing value (null)".to_string(),
value.span(),
)),
Some(event) => Ok(event),
},
Err(e) => Err(e),
@ -765,11 +762,11 @@ fn parse_event(value: &Value, config: &Config) -> Result<Option<ReedlineEvent>,
Ok(Some(ReedlineEvent::UntilFound(events)))
}
v => Err(ShellError::UnsupportedConfigValue {
expected: "list of events".to_string(),
value: v.into_abbreviated_string(config),
span: v.span(),
}),
v => Err(ShellError::UnsupportedConfigValue(
"list of events".to_string(),
v.into_abbreviated_string(config),
v.span(),
)),
},
},
Value::List { vals, .. } => {
@ -777,11 +774,11 @@ fn parse_event(value: &Value, config: &Config) -> Result<Option<ReedlineEvent>,
.iter()
.map(|value| match parse_event(value, config) {
Ok(inner) => match inner {
None => Err(ShellError::UnsupportedConfigValue {
expected: "List containing valid events".to_string(),
value: "Nothing value (null)".to_string(),
span: value.span(),
}),
None => Err(ShellError::UnsupportedConfigValue(
"List containing valid events".to_string(),
"Nothing value (null)".to_string(),
value.span(),
)),
Some(event) => Ok(event),
},
Err(e) => Err(e),
@ -791,11 +788,11 @@ fn parse_event(value: &Value, config: &Config) -> Result<Option<ReedlineEvent>,
Ok(Some(ReedlineEvent::Multiple(events)))
}
Value::Nothing { .. } => Ok(None),
v => Err(ShellError::UnsupportedConfigValue {
expected: "record or list of records, null to unbind key".to_string(),
value: v.into_abbreviated_string(config),
span: v.span(),
}),
v => Err(ShellError::UnsupportedConfigValue(
"record or list of records, null to unbind key".to_string(),
v.into_abbreviated_string(config),
v.span(),
)),
}
}
@ -843,11 +840,11 @@ fn event_from_record(
ReedlineEvent::ExecuteHostCommand(cmd.into_string("", config))
}
v => {
return Err(ShellError::UnsupportedConfigValue {
expected: "Reedline event".to_string(),
value: v.to_string(),
return Err(ShellError::UnsupportedConfigValue(
"Reedline event".to_string(),
v.to_string(),
span,
})
))
}
};
@ -957,11 +954,11 @@ fn edit_from_record(
}
"complete" => EditCommand::Complete,
e => {
return Err(ShellError::UnsupportedConfigValue {
expected: "reedline EditCommand".to_string(),
value: e.to_string(),
return Err(ShellError::UnsupportedConfigValue(
"reedline EditCommand".to_string(),
e.to_string(),
span,
})
))
}
};
@ -974,10 +971,7 @@ fn extract_char(value: &Value, config: &Config) -> Result<char, ShellError> {
.into_string("", config)
.chars()
.next()
.ok_or_else(|| ShellError::MissingConfigValue {
missing_value: "char to insert".to_string(),
span,
})
.ok_or_else(|| ShellError::MissingConfigValue("char to insert".to_string(), span))
}
#[cfg(test)]
@ -1107,6 +1101,6 @@ mod test {
let span = Span::test_data();
let b = EventType::try_from_record(&event, span);
assert!(matches!(b, Err(ShellError::MissingConfigValue { .. })));
assert!(matches!(b, Err(ShellError::MissingConfigValue(_, _))));
}
}

View File

@ -32,7 +32,7 @@ use std::{
sync::atomic::Ordering,
time::Instant,
};
use sysinfo::System;
use sysinfo::SystemExt;
// According to Daniel Imms @Tyriar, we need to do these this way:
// <133 A><prompt><133 B><command><133 C><command output>
@ -54,8 +54,7 @@ pub fn evaluate_repl(
) -> Result<()> {
use nu_cmd_base::hook;
use reedline::Signal;
let config = engine_state.get_config();
let use_color = config.use_ansi_coloring;
let use_color = engine_state.get_config().use_ansi_coloring;
// Guard against invocation without a connected terminal.
// reedline / crossterm event polling will fail without a connected tty
@ -69,7 +68,7 @@ pub fn evaluate_repl(
let mut entry_num = 0;
let mut nu_prompt = NushellPrompt::new(config.shell_integration);
let mut nu_prompt = NushellPrompt::new();
let start_time = std::time::Instant::now();
// Translate environment variables from Strings to Values
@ -135,6 +134,17 @@ pub fn evaluate_repl(
use_color,
);
start_time = std::time::Instant::now();
let sys = sysinfo::System::new();
perf(
"get sysinfo",
start_time,
file!(),
line!(),
column!(),
use_color,
);
if let Some(s) = prerun_command {
eval_source(
engine_state,
@ -202,6 +212,20 @@ pub fn evaluate_repl(
use_color,
);
start_time = std::time::Instant::now();
// Reset the SIGQUIT handler
if let Some(sig_quit) = engine_state.get_sig_quit() {
sig_quit.store(false, Ordering::SeqCst);
}
perf(
"reset sig_quit",
start_time,
file!(),
line!(),
column!(),
use_color,
);
start_time = std::time::Instant::now();
let config = engine_state.get_config();
@ -243,7 +267,11 @@ pub fn evaluate_repl(
.with_quick_completions(config.quick_completions)
.with_partial_completions(config.partial_completions)
.with_ansi_colors(config.use_ansi_coloring)
.with_cursor_config(cursor_config);
.with_cursor_config(cursor_config)
.with_transient_prompt(prompt_update::transient_prompt(
engine_reference.clone(),
stack,
));
perf(
"reedline builder",
start_time,
@ -396,9 +424,7 @@ pub fn evaluate_repl(
start_time = std::time::Instant::now();
let config = &engine_state.get_config().clone();
prompt_update::update_prompt(config, engine_state, stack, &mut nu_prompt);
let transient_prompt =
prompt_update::make_transient_prompt(config, engine_state, stack, &nu_prompt);
let prompt = prompt_update::update_prompt(config, engine_state, stack, &mut nu_prompt);
perf(
"update_prompt",
start_time,
@ -411,13 +437,12 @@ pub fn evaluate_repl(
entry_num += 1;
start_time = std::time::Instant::now();
line_editor = line_editor.with_transient_prompt(transient_prompt);
let input = line_editor.read_line(&nu_prompt);
let input = line_editor.read_line(prompt);
let shell_integration = config.shell_integration;
match input {
Ok(Signal::Success(s)) => {
let hostname = System::host_name();
let hostname = sys.host_name();
let history_supports_meta =
matches!(config.history_file_format, HistoryFileFormat::Sqlite);
if history_supports_meta && !s.is_empty() && line_editor.has_last_command_context()
@ -477,10 +502,10 @@ pub fn evaluate_repl(
report_error(
&working_set,
&ShellError::DirectoryNotFound {
dir: path.to_string_lossy().to_string(),
span: tokens.0[0].span,
},
&ShellError::DirectoryNotFound(
tokens.0[0].span,
path.to_string_lossy().to_string(),
),
);
}
let path = nu_path::canonicalize_with(path, &cwd)
@ -739,7 +764,7 @@ fn map_nucursorshape_to_cursorshape(shape: NuCursorShape) -> Option<SetCursorSty
}
}
fn get_command_finished_marker(stack: &Stack, engine_state: &EngineState) -> String {
pub fn get_command_finished_marker(stack: &Stack, engine_state: &EngineState) -> String {
let exit_code = stack
.get_env_var(engine_state, "LAST_EXIT_CODE")
.and_then(|e| e.as_i64().ok());
@ -748,21 +773,23 @@ fn get_command_finished_marker(stack: &Stack, engine_state: &EngineState) -> Str
}
fn run_ansi_sequence(seq: &str) -> Result<(), ShellError> {
io::stdout()
.write_all(seq.as_bytes())
.map_err(|e| ShellError::GenericError {
error: "Error writing ansi sequence".into(),
msg: e.to_string(),
span: Some(Span::unknown()),
help: None,
inner: vec![],
})?;
io::stdout().flush().map_err(|e| ShellError::GenericError {
error: "Error flushing stdio".into(),
msg: e.to_string(),
span: Some(Span::unknown()),
help: None,
inner: vec![],
io::stdout().write_all(seq.as_bytes()).map_err(|e| {
ShellError::GenericError(
"Error writing ansi sequence".into(),
e.to_string(),
Some(Span::unknown()),
None,
Vec::new(),
)
})?;
io::stdout().flush().map_err(|e| {
ShellError::GenericError(
"Error flushing stdio".into(),
e.to_string(),
Some(Span::unknown()),
None,
Vec::new(),
)
})
}

View File

@ -2,7 +2,7 @@ use log::trace;
use nu_ansi_term::Style;
use nu_color_config::{get_matching_brackets_style, get_shape_color};
use nu_parser::{flatten_block, parse, FlatShape};
use nu_protocol::ast::{Argument, Block, Expr, Expression, PipelineElement, RecordItem};
use nu_protocol::ast::{Argument, Block, Expr, Expression, PipelineElement};
use nu_protocol::engine::{EngineState, StateWorkingSet};
use nu_protocol::{Config, Span};
use reedline::{Highlighter, StyledText};
@ -17,27 +17,10 @@ impl Highlighter for NuHighlighter {
fn highlight(&self, line: &str, _cursor: usize) -> StyledText {
trace!("highlighting: {}", line);
let highlight_resolved_externals =
self.engine_state.get_config().highlight_resolved_externals;
let mut working_set = StateWorkingSet::new(&self.engine_state);
let block = parse(&mut working_set, None, line.as_bytes(), false);
let (shapes, global_span_offset) = {
let mut shapes = flatten_block(&working_set, &block);
// Highlighting externals has a config point because of concerns that using which to resolve
// externals may slow down things too much.
if highlight_resolved_externals {
for (span, shape) in shapes.iter_mut() {
if *shape == FlatShape::External {
let str_contents =
working_set.get_span_contents(Span::new(span.start, span.end));
let str_word = String::from_utf8_lossy(str_contents).to_string();
if which::which(str_word).ok().is_some() {
*shape = FlatShape::ExternalResolved;
}
}
}
}
let shapes = flatten_block(&working_set, &block);
(shapes, self.engine_state.next_span_start())
};
@ -108,7 +91,6 @@ impl Highlighter for NuHighlighter {
FlatShape::InternalCall(_) => add_colored_token(&shape.1, next_token),
FlatShape::External => add_colored_token(&shape.1, next_token),
FlatShape::ExternalArg => add_colored_token(&shape.1, next_token),
FlatShape::ExternalResolved => add_colored_token(&shape.1, next_token),
FlatShape::Keyword => add_colored_token(&shape.1, next_token),
FlatShape::Literal => add_colored_token(&shape.1, next_token),
FlatShape::Operator => add_colored_token(&shape.1, next_token),
@ -252,11 +234,11 @@ fn find_matching_block_end_in_block(
for e in &p.elements {
match e {
PipelineElement::Expression(_, e)
| PipelineElement::Redirection(_, _, e, _)
| PipelineElement::Redirection(_, _, e)
| PipelineElement::And(_, e)
| PipelineElement::Or(_, e)
| PipelineElement::SameTargetRedirection { cmd: (_, e), .. }
| PipelineElement::SeparateRedirection { out: (_, e, _), .. } => {
| PipelineElement::SeparateRedirection { out: (_, e), .. } => {
if e.span.contains(global_cursor_offset) {
if let Some(pos) = find_matching_block_end_in_expr(
line,
@ -329,10 +311,10 @@ fn find_matching_block_end_in_expr(
Expr::ImportPattern(_) => None,
Expr::Overlay(_) => None,
Expr::Signature(_) => None,
Expr::MatchPattern(_) => None,
Expr::MatchBlock(_) => None,
Expr::Nothing => None,
Expr::Garbage => None,
Expr::Spread(_) => None,
Expr::Table(hdr, rows) => {
if expr_last == global_cursor_offset {
@ -364,16 +346,9 @@ fn find_matching_block_end_in_expr(
Some(expr_last)
} else {
// cursor is inside record
for expr in exprs {
match expr {
RecordItem::Pair(k, v) => {
find_in_expr_or_continue!(k);
find_in_expr_or_continue!(v);
}
RecordItem::Spread(_, record) => {
find_in_expr_or_continue!(record);
}
}
for (k, v) in exprs {
find_in_expr_or_continue!(k);
find_in_expr_or_continue!(v);
}
None
}
@ -385,7 +360,6 @@ fn find_matching_block_end_in_expr(
Argument::Named((_, _, opt_expr)) => opt_expr.as_ref(),
Argument::Positional(inner_expr) => Some(inner_expr),
Argument::Unknown(inner_expr) => Some(inner_expr),
Argument::Spread(inner_expr) => Some(inner_expr),
};
if let Some(inner_expr) = opt_expr {

View File

@ -43,13 +43,13 @@ fn gather_env_vars(
let working_set = StateWorkingSet::new(engine_state);
report_error(
&working_set,
&ShellError::GenericError {
error: format!("Environment variable was not captured: {env_str}"),
msg: "".into(),
span: None,
help: Some(msg.into()),
inner: vec![],
},
&ShellError::GenericError(
format!("Environment variable was not captured: {env_str}"),
"".to_string(),
None,
Some(msg.into()),
Vec::new(),
),
);
}
@ -75,15 +75,15 @@ fn gather_env_vars(
let working_set = StateWorkingSet::new(engine_state);
report_error(
&working_set,
&ShellError::GenericError {
error: "Current directory is not a valid utf-8 path".into(),
msg: "".into(),
span: None,
help: Some(format!(
&ShellError::GenericError(
"Current directory is not a valid utf-8 path".to_string(),
"".to_string(),
None,
Some(format!(
"Retrieving current directory failed: {init_cwd:?} not a valid utf-8 path"
)),
inner: vec![],
},
Vec::new(),
),
);
}
}
@ -111,7 +111,7 @@ fn gather_env_vars(
let name = if let Some(Token {
contents: TokenContents::Item,
span,
}) = parts.first()
}) = parts.get(0)
{
let mut working_set = StateWorkingSet::new(engine_state);
let bytes = working_set.get_span_contents(*span);
@ -220,10 +220,6 @@ pub fn eval_source(
source,
false,
);
if let Some(warning) = working_set.parse_warnings.first() {
report_error(&working_set, warning);
}
if let Some(err) = working_set.parse_errors.first() {
set_last_exit_code(stack, 1);
report_error(&working_set, err);

View File

@ -59,29 +59,6 @@ fn extern_completer() -> NuCompleter {
NuCompleter::new(std::sync::Arc::new(engine), stack)
}
#[fixture]
fn custom_completer() -> NuCompleter {
// Create a new engine
let (dir, _, mut engine, mut stack) = new_engine();
// Add record value as example
let record = r#"
let external_completer = {|spans|
$spans
}
$env.config.completions.external = {
enable: true
max_results: 100
completer: $external_completer
}
"#;
assert!(support::merge_input(record.as_bytes(), &mut engine, &mut stack, dir).is_ok());
// Instantiate a new completer
NuCompleter::new(std::sync::Arc::new(engine), stack)
}
#[test]
fn variables_dollar_sign_with_varialblecompletion() {
let (_, _, engine, stack) = new_engine();
@ -91,7 +68,7 @@ fn variables_dollar_sign_with_varialblecompletion() {
let target_dir = "$ ";
let suggestions = completer.complete(target_dir, target_dir.len());
assert_eq!(8, suggestions.len());
assert_eq!(7, suggestions.len());
}
#[rstest]
@ -144,34 +121,15 @@ fn dotnu_completions() {
let completion_str = "source-env ".to_string();
let suggestions = completer.complete(&completion_str, completion_str.len());
assert_eq!(2, suggestions.len());
assert_eq!("custom_completion.nu", suggestions.first().unwrap().value);
#[cfg(windows)]
assert_eq!("directory_completion\\", suggestions.get(1).unwrap().value);
#[cfg(not(windows))]
assert_eq!("directory_completion/", suggestions.get(1).unwrap().value);
assert_eq!(1, suggestions.len());
assert_eq!("custom_completion.nu", suggestions.get(0).unwrap().value);
// Test use completion
let completion_str = "use ".to_string();
let suggestions = completer.complete(&completion_str, completion_str.len());
assert_eq!(2, suggestions.len());
assert_eq!("custom_completion.nu", suggestions.first().unwrap().value);
#[cfg(windows)]
assert_eq!("directory_completion\\", suggestions.get(1).unwrap().value);
#[cfg(not(windows))]
assert_eq!("directory_completion/", suggestions.get(1).unwrap().value);
// Test overlay use completion
let completion_str = "overlay use ".to_string();
let suggestions = completer.complete(&completion_str, completion_str.len());
assert_eq!(2, suggestions.len());
assert_eq!("custom_completion.nu", suggestions.first().unwrap().value);
#[cfg(windows)]
assert_eq!("directory_completion\\", suggestions.get(1).unwrap().value);
#[cfg(not(windows))]
assert_eq!("directory_completion/", suggestions.get(1).unwrap().value);
assert_eq!(1, suggestions.len());
assert_eq!("custom_completion.nu", suggestions.get(0).unwrap().value);
}
#[test]
@ -183,7 +141,7 @@ fn external_completer_trailing_space() {
let suggestions = run_external_completion(block, &input);
assert_eq!(3, suggestions.len());
assert_eq!("gh", suggestions.first().unwrap().value);
assert_eq!("gh", suggestions.get(0).unwrap().value);
assert_eq!("alias", suggestions.get(1).unwrap().value);
assert_eq!("", suggestions.get(2).unwrap().value);
}
@ -195,7 +153,7 @@ fn external_completer_no_trailing_space() {
let suggestions = run_external_completion(block, &input);
assert_eq!(2, suggestions.len());
assert_eq!("gh", suggestions.first().unwrap().value);
assert_eq!("gh", suggestions.get(0).unwrap().value);
assert_eq!("alias", suggestions.get(1).unwrap().value);
}
@ -206,7 +164,7 @@ fn external_completer_pass_flags() {
let suggestions = run_external_completion(block, &input);
assert_eq!(3, suggestions.len());
assert_eq!("gh", suggestions.first().unwrap().value);
assert_eq!("gh", suggestions.get(0).unwrap().value);
assert_eq!("api", suggestions.get(1).unwrap().value);
assert_eq!("--", suggestions.get(2).unwrap().value);
}
@ -227,7 +185,6 @@ fn file_completions() {
let expected_paths: Vec<String> = vec![
folder(dir.join("another")),
file(dir.join("custom_completion.nu")),
folder(dir.join("directory_completion")),
file(dir.join("nushell")),
folder(dir.join("test_a")),
folder(dir.join("test_b")),
@ -343,7 +300,6 @@ fn command_ls_with_filecompletion() {
let expected_paths: Vec<String> = vec![
"another\\".to_string(),
"custom_completion.nu".to_string(),
"directory_completion\\".to_string(),
"nushell".to_string(),
"test_a\\".to_string(),
"test_b\\".to_string(),
@ -354,7 +310,6 @@ fn command_ls_with_filecompletion() {
let expected_paths: Vec<String> = vec![
"another/".to_string(),
"custom_completion.nu".to_string(),
"directory_completion/".to_string(),
"nushell".to_string(),
"test_a/".to_string(),
"test_b/".to_string(),
@ -377,7 +332,6 @@ fn command_open_with_filecompletion() {
let expected_paths: Vec<String> = vec![
"another\\".to_string(),
"custom_completion.nu".to_string(),
"directory_completion\\".to_string(),
"nushell".to_string(),
"test_a\\".to_string(),
"test_b\\".to_string(),
@ -388,7 +342,6 @@ fn command_open_with_filecompletion() {
let expected_paths: Vec<String> = vec![
"another/".to_string(),
"custom_completion.nu".to_string(),
"directory_completion/".to_string(),
"nushell".to_string(),
"test_a/".to_string(),
"test_b/".to_string(),
@ -412,7 +365,6 @@ fn command_rm_with_globcompletion() {
let expected_paths: Vec<String> = vec![
"another\\".to_string(),
"custom_completion.nu".to_string(),
"directory_completion\\".to_string(),
"nushell".to_string(),
"test_a\\".to_string(),
"test_b\\".to_string(),
@ -423,7 +375,6 @@ fn command_rm_with_globcompletion() {
let expected_paths: Vec<String> = vec![
"another/".to_string(),
"custom_completion.nu".to_string(),
"directory_completion/".to_string(),
"nushell".to_string(),
"test_a/".to_string(),
"test_b/".to_string(),
@ -447,7 +398,6 @@ fn command_cp_with_globcompletion() {
let expected_paths: Vec<String> = vec![
"another\\".to_string(),
"custom_completion.nu".to_string(),
"directory_completion\\".to_string(),
"nushell".to_string(),
"test_a\\".to_string(),
"test_b\\".to_string(),
@ -458,7 +408,6 @@ fn command_cp_with_globcompletion() {
let expected_paths: Vec<String> = vec![
"another/".to_string(),
"custom_completion.nu".to_string(),
"directory_completion/".to_string(),
"nushell".to_string(),
"test_a/".to_string(),
"test_b/".to_string(),
@ -482,7 +431,6 @@ fn command_save_with_filecompletion() {
let expected_paths: Vec<String> = vec![
"another\\".to_string(),
"custom_completion.nu".to_string(),
"directory_completion\\".to_string(),
"nushell".to_string(),
"test_a\\".to_string(),
"test_b\\".to_string(),
@ -493,7 +441,6 @@ fn command_save_with_filecompletion() {
let expected_paths: Vec<String> = vec![
"another/".to_string(),
"custom_completion.nu".to_string(),
"directory_completion/".to_string(),
"nushell".to_string(),
"test_a/".to_string(),
"test_b/".to_string(),
@ -517,7 +464,6 @@ fn command_touch_with_filecompletion() {
let expected_paths: Vec<String> = vec![
"another\\".to_string(),
"custom_completion.nu".to_string(),
"directory_completion\\".to_string(),
"nushell".to_string(),
"test_a\\".to_string(),
"test_b\\".to_string(),
@ -528,7 +474,6 @@ fn command_touch_with_filecompletion() {
let expected_paths: Vec<String> = vec![
"another/".to_string(),
"custom_completion.nu".to_string(),
"directory_completion/".to_string(),
"nushell".to_string(),
"test_a/".to_string(),
"test_b/".to_string(),
@ -552,7 +497,6 @@ fn command_watch_with_filecompletion() {
let expected_paths: Vec<String> = vec![
"another\\".to_string(),
"custom_completion.nu".to_string(),
"directory_completion\\".to_string(),
"nushell".to_string(),
"test_a\\".to_string(),
"test_b\\".to_string(),
@ -563,7 +507,6 @@ fn command_watch_with_filecompletion() {
let expected_paths: Vec<String> = vec![
"another/".to_string(),
"custom_completion.nu".to_string(),
"directory_completion/".to_string(),
"nushell".to_string(),
"test_a/".to_string(),
"test_b/".to_string(),
@ -659,7 +602,6 @@ fn folder_with_directorycompletions() {
// Create the expected values
let expected_paths: Vec<String> = vec![
folder(dir.join("another")),
folder(dir.join("directory_completion")),
folder(dir.join("test_a")),
folder(dir.join("test_b")),
folder(dir.join(".hidden_folder")),
@ -874,7 +816,6 @@ fn unknown_command_completion() {
let expected_paths: Vec<String> = vec![
"another\\".to_string(),
"custom_completion.nu".to_string(),
"directory_completion\\".to_string(),
"nushell".to_string(),
"test_a\\".to_string(),
"test_b\\".to_string(),
@ -885,7 +826,6 @@ fn unknown_command_completion() {
let expected_paths: Vec<String> = vec![
"another/".to_string(),
"custom_completion.nu".to_string(),
"directory_completion/".to_string(),
"nushell".to_string(),
"test_a/".to_string(),
"test_b/".to_string(),
@ -936,7 +876,6 @@ fn filecompletions_triggers_after_cursor() {
let expected_paths: Vec<String> = vec![
"another\\".to_string(),
"custom_completion.nu".to_string(),
"directory_completion\\".to_string(),
"nushell".to_string(),
"test_a\\".to_string(),
"test_b\\".to_string(),
@ -947,7 +886,6 @@ fn filecompletions_triggers_after_cursor() {
let expected_paths: Vec<String> = vec![
"another/".to_string(),
"custom_completion.nu".to_string(),
"directory_completion/".to_string(),
"nushell".to_string(),
"test_a/".to_string(),
"test_b/".to_string(),
@ -1000,34 +938,6 @@ fn extern_complete_flags(mut extern_completer: NuCompleter) {
match_suggestions(expected, suggestions);
}
#[rstest]
fn custom_completer_triggers_cursor_before_word(mut custom_completer: NuCompleter) {
let suggestions = custom_completer.complete("cmd foo bar", 8);
let expected: Vec<String> = vec!["cmd".into(), "foo".into(), "".into()];
match_suggestions(expected, suggestions);
}
#[rstest]
fn custom_completer_triggers_cursor_on_word_left_boundary(mut custom_completer: NuCompleter) {
let suggestions = custom_completer.complete("cmd foo bar", 8);
let expected: Vec<String> = vec!["cmd".into(), "foo".into(), "".into()];
match_suggestions(expected, suggestions);
}
#[rstest]
fn custom_completer_triggers_cursor_next_to_word(mut custom_completer: NuCompleter) {
let suggestions = custom_completer.complete("cmd foo bar", 11);
let expected: Vec<String> = vec!["cmd".into(), "foo".into(), "bar".into()];
match_suggestions(expected, suggestions);
}
#[rstest]
fn custom_completer_triggers_cursor_after_word(mut custom_completer: NuCompleter) {
let suggestions = custom_completer.complete("cmd foo bar ", 12);
let expected: Vec<String> = vec!["cmd".into(), "foo".into(), "bar".into(), "".into()];
match_suggestions(expected, suggestions);
}
#[ignore = "was reverted, still needs fixing"]
#[rstest]
fn alias_offset_bug_7648() {

View File

@ -5,21 +5,21 @@ edition = "2021"
license = "MIT"
name = "nu-cmd-base"
repository = "https://github.com/nushell/nushell/tree/main/crates/nu-cmd-base"
version = "0.88.2"
version = "0.87.1"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
nu-engine = { path = "../nu-engine", version = "0.88.2" }
nu-glob = { path = "../nu-glob", version = "0.88.2" }
nu-parser = { path = "../nu-parser", version = "0.88.2" }
nu-path = { path = "../nu-path", version = "0.88.2" }
nu-protocol = { path = "../nu-protocol", version = "0.88.2" }
nu-utils = { path = "../nu-utils", version = "0.88.2" }
nu-engine = { path = "../nu-engine", version = "0.87.1" }
nu-glob = { path = "../nu-glob", version = "0.87.1" }
nu-parser = { path = "../nu-parser", version = "0.87.1" }
nu-path = { path = "../nu-path", version = "0.87.1" }
nu-protocol = { path = "../nu-protocol", version = "0.87.1" }
nu-utils = { path = "../nu-utils", version = "0.87.1" }
indexmap = "2.1"
miette = "5.10.0"
[dev-dependencies]
nu-test-support = { path = "../nu-test-support", version = "0.88.2" }
nu-test-support = { path = "../nu-test-support", version = "0.87.1" }
rstest = "0.18.2"

View File

@ -64,10 +64,12 @@ fn arg_glob_opt(
// user wasn't referring to a specific thing in filesystem, try to glob it.
match glob_with_parent(&pattern.item, options, cwd) {
Ok(p) => Ok(p),
Err(pat_err) => Err(ShellError::InvalidGlobPattern {
msg: pat_err.msg.into(),
span: pattern.span,
}),
Err(pat_err) => {
Err(ShellError::InvalidGlobPattern(
pat_err.msg.into(),
pattern.span, // improve specificity
))
}
}
}

View File

@ -90,11 +90,11 @@ pub fn eval_hook(
if let Some(err) = working_set.parse_errors.first() {
report_error(&working_set, err);
return Err(ShellError::UnsupportedConfigValue {
expected: "valid source code".into(),
value: "source code with syntax errors".into(),
return Err(ShellError::UnsupportedConfigValue(
"valid source code".into(),
"source code with syntax errors".into(),
span,
});
));
}
(output, working_set.render(), vars)
@ -164,11 +164,11 @@ pub fn eval_hook(
{
val
} else {
return Err(ShellError::UnsupportedConfigValue {
expected: "boolean output".to_string(),
value: "other PipelineData variant".to_string(),
span: other_span,
});
return Err(ShellError::UnsupportedConfigValue(
"boolean output".to_string(),
"other PipelineData variant".to_string(),
other_span,
));
}
}
Err(err) => {
@ -176,11 +176,11 @@ pub fn eval_hook(
}
}
} else {
return Err(ShellError::UnsupportedConfigValue {
expected: "block".to_string(),
value: format!("{}", condition.get_type()),
span: other_span,
});
return Err(ShellError::UnsupportedConfigValue(
"block".to_string(),
format!("{}", condition.get_type()),
other_span,
));
}
} else {
// always run the hook
@ -222,11 +222,11 @@ pub fn eval_hook(
if let Some(err) = working_set.parse_errors.first() {
report_error(&working_set, err);
return Err(ShellError::UnsupportedConfigValue {
expected: "valid source code".into(),
value: "source code with syntax errors".into(),
span: source_span,
});
return Err(ShellError::UnsupportedConfigValue(
"valid source code".into(),
"source code with syntax errors".into(),
source_span,
));
}
(output, working_set.render(), vars)
@ -277,11 +277,11 @@ pub fn eval_hook(
)?;
}
other => {
return Err(ShellError::UnsupportedConfigValue {
expected: "block or string".to_string(),
value: format!("{}", other.get_type()),
span: source_span,
});
return Err(ShellError::UnsupportedConfigValue(
"block or string".to_string(),
format!("{}", other.get_type()),
source_span,
));
}
}
}
@ -293,11 +293,11 @@ pub fn eval_hook(
output = run_hook_block(engine_state, stack, val.block_id, input, arguments, span)?;
}
other => {
return Err(ShellError::UnsupportedConfigValue {
expected: "string, block, record, or list of commands".into(),
value: format!("{}", other.get_type()),
span: other.span(),
});
return Err(ShellError::UnsupportedConfigValue(
"string, block, record, or list of commands".into(),
format!("{}", other.get_type()),
other.span(),
));
}
}

View File

@ -72,22 +72,22 @@ fn get_editor_commandline(
let params = editor_cmd.collect::<Result<_, ShellError>>()?;
Ok((editor, params))
}
_ => Err(ShellError::GenericError {
error: "Editor executable is missing".into(),
msg: "Set the first element to an executable".into(),
span: Some(value.span()),
help: Some(HELP_MSG.into()),
inner: vec![],
}),
_ => Err(ShellError::GenericError(
"Editor executable is missing".into(),
"Set the first element to an executable".into(),
Some(value.span()),
Some(HELP_MSG.into()),
vec![],
)),
}
}
Value::String { .. } | Value::List { .. } => Err(ShellError::GenericError {
error: format!("{var_name} should be a non-empty string or list<String>"),
msg: "Specify an executable here".into(),
span: Some(value.span()),
help: Some(HELP_MSG.into()),
inner: vec![],
}),
Value::String { .. } | Value::List { .. } => Err(ShellError::GenericError(
format!("{var_name} should be a non-empty string or list<String>"),
"Specify an executable here".into(),
Some(value.span()),
Some(HELP_MSG.into()),
vec![],
)),
x => Err(ShellError::CantConvert {
to_type: "string or list<string>".into(),
from_type: x.get_type().to_string(),
@ -114,14 +114,13 @@ pub fn get_editor(
} else if let Some(value) = env_vars.get("VISUAL") {
get_editor_commandline(value, "$env.VISUAL")
} else {
Err(ShellError::GenericError {
error: "No editor configured".into(),
msg:
"Please specify one via `$env.config.buffer_editor` or `$env.EDITOR`/`$env.VISUAL`"
.into(),
span: Some(span),
help: Some(HELP_MSG.into()),
inner: vec![],
})
Err(ShellError::GenericError(
"No editor configured".into(),
"Please specify one via `$env.config.buffer_editor` or `$env.EDITOR`/`$env.VISUAL`"
.into(),
Some(span),
Some(HELP_MSG.into()),
vec![],
))
}
}

View File

@ -5,7 +5,7 @@ edition = "2021"
license = "MIT"
name = "nu-cmd-dataframe"
repository = "https://github.com/nushell/nushell/tree/main/crates/nu-cmd-dataframe"
version = "0.88.2"
version = "0.87.1"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
@ -13,22 +13,19 @@ version = "0.88.2"
bench = false
[dependencies]
nu-engine = { path = "../nu-engine", version = "0.88.2" }
nu-parser = { path = "../nu-parser", version = "0.88.2" }
nu-protocol = { path = "../nu-protocol", version = "0.88.2" }
nu-engine = { path = "../nu-engine", version = "0.87.1" }
nu-parser = { path = "../nu-parser", version = "0.87.1" }
nu-protocol = { path = "../nu-protocol", version = "0.87.1" }
# Potential dependencies for extras
chrono = { version = "0.4", features = ["std", "unstable-locales"], default-features = false }
chrono-tz = "0.8"
fancy-regex = "0.12"
fancy-regex = "0.11"
indexmap = { version = "2.1" }
num = { version = "0.4", optional = true }
serde = { version = "1.0", features = ["derive"] }
sqlparser = { version = "0.39", optional = true }
polars-io = { version = "0.35", features = ["avro"], optional = true }
polars-arrow = { version = "0.35", optional = true }
polars-ops = { version = "0.35", optional = true }
polars-plan = { version = "0.35", optional = true }
sqlparser = { version = "0.36.1", optional = true }
polars-io = { version = "0.33", features = ["avro"], optional = true }
[dependencies.polars]
features = [
@ -62,12 +59,12 @@ features = [
"to_dummies",
]
optional = true
version = "0.35"
version = "0.33"
[features]
dataframe = ["num", "polars", "polars-io", "polars-arrow", "polars-ops", "polars-plan", "sqlparser"]
dataframe = ["num", "polars", "polars-io", "sqlparser"]
default = []
[dev-dependencies]
nu-cmd-lang = { path = "../nu-cmd-lang", version = "0.88.2" }
nu-test-support = { path = "../nu-test-support", version = "0.88.2" }
nu-cmd-lang = { path = "../nu-cmd-lang", version = "0.87.1" }
nu-test-support = { path = "../nu-test-support", version = "0.87.1" }

View File

@ -68,24 +68,26 @@ fn command(
let df = NuDataFrame::try_from_pipeline(input, call.head)?;
let new_df = col_string
.first()
.ok_or_else(|| ShellError::GenericError {
error: "Empty names list".into(),
msg: "No column names were found".into(),
span: Some(col_span),
help: None,
inner: vec![],
.get(0)
.ok_or_else(|| {
ShellError::GenericError(
"Empty names list".into(),
"No column names were found".into(),
Some(col_span),
None,
Vec::new(),
)
})
.and_then(|col| {
df.as_ref()
.drop(&col.item)
.map_err(|e| ShellError::GenericError {
error: "Error dropping column".into(),
msg: e.to_string(),
span: Some(col.span),
help: None,
inner: vec![],
})
df.as_ref().drop(&col.item).map_err(|e| {
ShellError::GenericError(
"Error dropping column".into(),
e.to_string(),
Some(col.span),
None,
Vec::new(),
)
})
})?;
// If there are more columns in the drop selection list, these
@ -94,15 +96,15 @@ fn command(
.iter()
.skip(1)
.try_fold(new_df, |new_df, col| {
new_df
.drop(&col.item)
.map_err(|e| ShellError::GenericError {
error: "Error dropping column".into(),
msg: e.to_string(),
span: Some(col.span),
help: None,
inner: vec![],
})
new_df.drop(&col.item).map_err(|e| {
ShellError::GenericError(
"Error dropping column".into(),
e.to_string(),
Some(col.span),
None,
Vec::new(),
)
})
})
.map(|df| PipelineData::Value(NuDataFrame::dataframe_into_value(df, call.head), None))
}

View File

@ -100,12 +100,14 @@ fn command(
df.as_ref()
.unique(subset_slice, keep_strategy, None)
.map_err(|e| ShellError::GenericError {
error: "Error dropping duplicates".into(),
msg: e.to_string(),
span: Some(col_span),
help: None,
inner: vec![],
.map_err(|e| {
ShellError::GenericError(
"Error dropping duplicates".into(),
e.to_string(),
Some(col_span),
None,
Vec::new(),
)
})
.map(|df| PipelineData::Value(NuDataFrame::dataframe_into_value(df, call.head), None))
}

View File

@ -115,12 +115,14 @@ fn command(
df.as_ref()
.drop_nulls(subset_slice)
.map_err(|e| ShellError::GenericError {
error: "Error dropping nulls".into(),
msg: e.to_string(),
span: Some(col_span),
help: None,
inner: vec![],
.map_err(|e| {
ShellError::GenericError(
"Error dropping nulls".into(),
e.to_string(),
Some(col_span),
None,
Vec::new(),
)
})
.map(|df| PipelineData::Value(NuDataFrame::dataframe_into_value(df, call.head), None))
}

View File

@ -88,12 +88,14 @@ fn command(
df.as_ref()
.to_dummies(None, drop_first)
.map_err(|e| ShellError::GenericError {
error: "Error calculating dummies".into(),
msg: e.to_string(),
span: Some(call.head),
help: Some("The only allowed column types for dummies are String or Int".into()),
inner: vec![],
.map_err(|e| {
ShellError::GenericError(
"Error calculating dummies".into(),
e.to_string(),
Some(call.head),
Some("The only allowed column types for dummies are String or Int".into()),
Vec::new(),
)
})
.map(|df| PipelineData::Value(NuDataFrame::dataframe_into_value(df, call.head), None))
}

View File

@ -105,22 +105,26 @@ fn command_eager(
))
} else {
let mask = NuDataFrame::try_from_value(mask_value)?.as_series(mask_span)?;
let mask = mask.bool().map_err(|e| ShellError::GenericError {
error: "Error casting to bool".into(),
msg: e.to_string(),
span: Some(mask_span),
help: Some("Perhaps you want to use a series with booleans as mask".into()),
inner: vec![],
let mask = mask.bool().map_err(|e| {
ShellError::GenericError(
"Error casting to bool".into(),
e.to_string(),
Some(mask_span),
Some("Perhaps you want to use a series with booleans as mask".into()),
Vec::new(),
)
})?;
df.as_ref()
.filter(mask)
.map_err(|e| ShellError::GenericError {
error: "Error filtering dataframe".into(),
msg: e.to_string(),
span: Some(call.head),
help: Some("The only allowed column types for dummies are String or Int".into()),
inner: vec![],
.map_err(|e| {
ShellError::GenericError(
"Error filtering dataframe".into(),
e.to_string(),
Some(call.head),
Some("The only allowed column types for dummies are String or Int".into()),
Vec::new(),
)
})
.map(|df| PipelineData::Value(NuDataFrame::dataframe_into_value(df, call.head), None))
}

View File

@ -70,12 +70,14 @@ fn command(
df.as_ref()
.select(col_string)
.map_err(|e| ShellError::GenericError {
error: "Error selecting columns".into(),
msg: e.to_string(),
span: Some(col_span),
help: None,
inner: vec![],
.map_err(|e| {
ShellError::GenericError(
"Error selecting columns".into(),
e.to_string(),
Some(col_span),
None,
Vec::new(),
)
})
.map(|df| PipelineData::Value(NuDataFrame::dataframe_into_value(df, call.head), None))
}

View File

@ -152,34 +152,38 @@ fn command(
let mut res = df
.as_ref()
.melt(&id_col_string, &val_col_string)
.map_err(|e| ShellError::GenericError {
error: "Error calculating melt".into(),
msg: e.to_string(),
span: Some(call.head),
help: None,
inner: vec![],
.map_err(|e| {
ShellError::GenericError(
"Error calculating melt".into(),
e.to_string(),
Some(call.head),
None,
Vec::new(),
)
})?;
if let Some(name) = &variable_name {
res.rename("variable", &name.item)
.map_err(|e| ShellError::GenericError {
error: "Error renaming column".into(),
msg: e.to_string(),
span: Some(name.span),
help: None,
inner: vec![],
})?;
res.rename("variable", &name.item).map_err(|e| {
ShellError::GenericError(
"Error renaming column".into(),
e.to_string(),
Some(name.span),
None,
Vec::new(),
)
})?;
}
if let Some(name) = &value_name {
res.rename("value", &name.item)
.map_err(|e| ShellError::GenericError {
error: "Error renaming column".into(),
msg: e.to_string(),
span: Some(name.span),
help: None,
inner: vec![],
})?;
res.rename("value", &name.item).map_err(|e| {
ShellError::GenericError(
"Error renaming column".into(),
e.to_string(),
Some(name.span),
None,
Vec::new(),
)
})?;
}
Ok(PipelineData::Value(
@ -194,50 +198,50 @@ fn check_column_datatypes<T: AsRef<str>>(
col_span: Span,
) -> Result<(), ShellError> {
if cols.is_empty() {
return Err(ShellError::GenericError {
error: "Merge error".into(),
msg: "empty column list".into(),
span: Some(col_span),
help: None,
inner: vec![],
});
return Err(ShellError::GenericError(
"Merge error".into(),
"empty column list".into(),
Some(col_span),
None,
Vec::new(),
));
}
// Checking if they are same type
if cols.len() > 1 {
for w in cols.windows(2) {
let l_series = df
.column(w[0].as_ref())
.map_err(|e| ShellError::GenericError {
error: "Error selecting columns".into(),
msg: e.to_string(),
span: Some(col_span),
help: None,
inner: vec![],
})?;
let l_series = df.column(w[0].as_ref()).map_err(|e| {
ShellError::GenericError(
"Error selecting columns".into(),
e.to_string(),
Some(col_span),
None,
Vec::new(),
)
})?;
let r_series = df
.column(w[1].as_ref())
.map_err(|e| ShellError::GenericError {
error: "Error selecting columns".into(),
msg: e.to_string(),
span: Some(col_span),
help: None,
inner: vec![],
})?;
let r_series = df.column(w[1].as_ref()).map_err(|e| {
ShellError::GenericError(
"Error selecting columns".into(),
e.to_string(),
Some(col_span),
None,
Vec::new(),
)
})?;
if l_series.dtype() != r_series.dtype() {
return Err(ShellError::GenericError {
error: "Merge error".into(),
msg: "found different column types in list".into(),
span: Some(col_span),
help: Some(format!(
return Err(ShellError::GenericError(
"Merge error".into(),
"found different column types in list".into(),
Some(col_span),
Some(format!(
"datatypes {} and {} are incompatible",
l_series.dtype(),
r_series.dtype()
)),
inner: vec![],
});
Vec::new(),
));
}
}
}

View File

@ -121,15 +121,15 @@ fn command(
"json" => from_json(engine_state, stack, call),
"jsonl" => from_jsonl(engine_state, stack, call),
"avro" => from_avro(engine_state, stack, call),
_ => Err(ShellError::FileNotFoundCustom {
msg: format!("{msg}. Supported values: csv, tsv, parquet, ipc, arrow, json"),
span: blamed,
}),
_ => Err(ShellError::FileNotFoundCustom(
format!("{msg}. Supported values: csv, tsv, parquet, ipc, arrow, json"),
blamed,
)),
},
None => Err(ShellError::FileNotFoundCustom {
msg: "File without extension".into(),
span: file.span,
}),
None => Err(ShellError::FileNotFoundCustom(
"File without extension".into(),
file.span,
)),
}
.map(|value| PipelineData::Value(value, None))
}
@ -150,16 +150,17 @@ fn from_parquet(
low_memory: false,
cloud_options: None,
use_statistics: false,
hive_partitioning: false,
};
let df: NuLazyFrame = LazyFrame::scan_parquet(file, args)
.map_err(|e| ShellError::GenericError {
error: "Parquet reader error".into(),
msg: format!("{e:?}"),
span: Some(call.head),
help: None,
inner: vec![],
.map_err(|e| {
ShellError::GenericError(
"Parquet reader error".into(),
format!("{e:?}"),
Some(call.head),
None,
Vec::new(),
)
})?
.into();
@ -168,12 +169,14 @@ fn from_parquet(
let file: Spanned<PathBuf> = call.req(engine_state, stack, 0)?;
let columns: Option<Vec<String>> = call.get_flag(engine_state, stack, "columns")?;
let r = File::open(&file.item).map_err(|e| ShellError::GenericError {
error: "Error opening file".into(),
msg: e.to_string(),
span: Some(file.span),
help: None,
inner: vec![],
let r = File::open(&file.item).map_err(|e| {
ShellError::GenericError(
"Error opening file".into(),
e.to_string(),
Some(file.span),
None,
Vec::new(),
)
})?;
let reader = ParquetReader::new(r);
@ -184,12 +187,14 @@ fn from_parquet(
let df: NuDataFrame = reader
.finish()
.map_err(|e| ShellError::GenericError {
error: "Parquet reader error".into(),
msg: format!("{e:?}"),
span: Some(call.head),
help: None,
inner: vec![],
.map_err(|e| {
ShellError::GenericError(
"Parquet reader error".into(),
format!("{e:?}"),
Some(call.head),
None,
Vec::new(),
)
})?
.into();
@ -205,12 +210,14 @@ fn from_avro(
let file: Spanned<PathBuf> = call.req(engine_state, stack, 0)?;
let columns: Option<Vec<String>> = call.get_flag(engine_state, stack, "columns")?;
let r = File::open(&file.item).map_err(|e| ShellError::GenericError {
error: "Error opening file".into(),
msg: e.to_string(),
span: Some(file.span),
help: None,
inner: vec![],
let r = File::open(&file.item).map_err(|e| {
ShellError::GenericError(
"Error opening file".into(),
e.to_string(),
Some(file.span),
None,
Vec::new(),
)
})?;
let reader = AvroReader::new(r);
@ -221,12 +228,14 @@ fn from_avro(
let df: NuDataFrame = reader
.finish()
.map_err(|e| ShellError::GenericError {
error: "Avro reader error".into(),
msg: format!("{e:?}"),
span: Some(call.head),
help: None,
inner: vec![],
.map_err(|e| {
ShellError::GenericError(
"Avro reader error".into(),
format!("{e:?}"),
Some(call.head),
None,
Vec::new(),
)
})?
.into();
@ -249,12 +258,14 @@ fn from_ipc(
};
let df: NuLazyFrame = LazyFrame::scan_ipc(file, args)
.map_err(|e| ShellError::GenericError {
error: "IPC reader error".into(),
msg: format!("{e:?}"),
span: Some(call.head),
help: None,
inner: vec![],
.map_err(|e| {
ShellError::GenericError(
"IPC reader error".into(),
format!("{e:?}"),
Some(call.head),
None,
Vec::new(),
)
})?
.into();
@ -263,12 +274,14 @@ fn from_ipc(
let file: Spanned<PathBuf> = call.req(engine_state, stack, 0)?;
let columns: Option<Vec<String>> = call.get_flag(engine_state, stack, "columns")?;
let r = File::open(&file.item).map_err(|e| ShellError::GenericError {
error: "Error opening file".into(),
msg: e.to_string(),
span: Some(file.span),
help: None,
inner: vec![],
let r = File::open(&file.item).map_err(|e| {
ShellError::GenericError(
"Error opening file".into(),
e.to_string(),
Some(file.span),
None,
Vec::new(),
)
})?;
let reader = IpcReader::new(r);
@ -279,12 +292,14 @@ fn from_ipc(
let df: NuDataFrame = reader
.finish()
.map_err(|e| ShellError::GenericError {
error: "IPC reader error".into(),
msg: format!("{e:?}"),
span: Some(call.head),
help: None,
inner: vec![],
.map_err(|e| {
ShellError::GenericError(
"IPC reader error".into(),
format!("{e:?}"),
Some(call.head),
None,
Vec::new(),
)
})?
.into();
@ -298,12 +313,14 @@ fn from_json(
call: &Call,
) -> Result<Value, ShellError> {
let file: Spanned<PathBuf> = call.req(engine_state, stack, 0)?;
let file = File::open(&file.item).map_err(|e| ShellError::GenericError {
error: "Error opening file".into(),
msg: e.to_string(),
span: Some(file.span),
help: None,
inner: vec![],
let file = File::open(&file.item).map_err(|e| {
ShellError::GenericError(
"Error opening file".into(),
e.to_string(),
Some(file.span),
None,
Vec::new(),
)
})?;
let buf_reader = BufReader::new(file);
@ -311,12 +328,14 @@ fn from_json(
let df: NuDataFrame = reader
.finish()
.map_err(|e| ShellError::GenericError {
error: "Json reader error".into(),
msg: format!("{e:?}"),
span: Some(call.head),
help: None,
inner: vec![],
.map_err(|e| {
ShellError::GenericError(
"Json reader error".into(),
format!("{e:?}"),
Some(call.head),
None,
Vec::new(),
)
})?
.into();
@ -330,12 +349,14 @@ fn from_jsonl(
) -> Result<Value, ShellError> {
let infer_schema: Option<usize> = call.get_flag(engine_state, stack, "infer-schema")?;
let file: Spanned<PathBuf> = call.req(engine_state, stack, 0)?;
let file = File::open(&file.item).map_err(|e| ShellError::GenericError {
error: "Error opening file".into(),
msg: e.to_string(),
span: Some(file.span),
help: None,
inner: vec![],
let file = File::open(&file.item).map_err(|e| {
ShellError::GenericError(
"Error opening file".into(),
e.to_string(),
Some(file.span),
None,
Vec::new(),
)
})?;
let buf_reader = BufReader::new(file);
@ -345,12 +366,14 @@ fn from_jsonl(
let df: NuDataFrame = reader
.finish()
.map_err(|e| ShellError::GenericError {
error: "Json lines reader error".into(),
msg: format!("{e:?}"),
span: Some(call.head),
help: None,
inner: vec![],
.map_err(|e| {
ShellError::GenericError(
"Json lines reader error".into(),
format!("{e:?}"),
Some(call.head),
None,
Vec::new(),
)
})?
.into();
@ -376,19 +399,19 @@ fn from_csv(
None => csv_reader,
Some(d) => {
if d.item.len() != 1 {
return Err(ShellError::GenericError {
error: "Incorrect delimiter".into(),
msg: "Delimiter has to be one character".into(),
span: Some(d.span),
help: None,
inner: vec![],
});
return Err(ShellError::GenericError(
"Incorrect delimiter".into(),
"Delimiter has to be one character".into(),
Some(d.span),
None,
Vec::new(),
));
} else {
let delimiter = match d.item.chars().next() {
Some(d) => d as u8,
None => unreachable!(),
};
csv_reader.with_separator(delimiter)
csv_reader.with_delimiter(delimiter)
}
}
};
@ -407,12 +430,14 @@ fn from_csv(
let df: NuLazyFrame = csv_reader
.finish()
.map_err(|e| ShellError::GenericError {
error: "Parquet reader error".into(),
msg: format!("{e:?}"),
span: Some(call.head),
help: None,
inner: vec![],
.map_err(|e| {
ShellError::GenericError(
"Parquet reader error".into(),
format!("{e:?}"),
Some(call.head),
None,
Vec::new(),
)
})?
.into();
@ -420,12 +445,14 @@ fn from_csv(
} else {
let file: Spanned<PathBuf> = call.req(engine_state, stack, 0)?;
let csv_reader = CsvReader::from_path(&file.item)
.map_err(|e| ShellError::GenericError {
error: "Error creating CSV reader".into(),
msg: e.to_string(),
span: Some(file.span),
help: None,
inner: vec![],
.map_err(|e| {
ShellError::GenericError(
"Error creating CSV reader".into(),
e.to_string(),
Some(file.span),
None,
Vec::new(),
)
})?
.with_encoding(CsvEncoding::LossyUtf8);
@ -433,19 +460,19 @@ fn from_csv(
None => csv_reader,
Some(d) => {
if d.item.len() != 1 {
return Err(ShellError::GenericError {
error: "Incorrect delimiter".into(),
msg: "Delimiter has to be one character".into(),
span: Some(d.span),
help: None,
inner: vec![],
});
return Err(ShellError::GenericError(
"Incorrect delimiter".into(),
"Delimiter has to be one character".into(),
Some(d.span),
None,
Vec::new(),
));
} else {
let delimiter = match d.item.chars().next() {
Some(d) => d as u8,
None => unreachable!(),
};
csv_reader.with_separator(delimiter)
csv_reader.with_delimiter(delimiter)
}
}
};
@ -469,12 +496,14 @@ fn from_csv(
let df: NuDataFrame = csv_reader
.finish()
.map_err(|e| ShellError::GenericError {
error: "Parquet reader error".into(),
msg: format!("{e:?}"),
span: Some(call.head),
help: None,
inner: vec![],
.map_err(|e| {
ShellError::GenericError(
"Parquet reader error".into(),
format!("{e:?}"),
Some(call.head),
None,
Vec::new(),
)
})?
.into();

View File

@ -76,15 +76,15 @@ fn command(
let mut ctx = SQLContext::new();
ctx.register("df", &df.df);
let df_sql = ctx
.execute(&sql_query)
.map_err(|e| ShellError::GenericError {
error: "Dataframe Error".into(),
msg: e.to_string(),
span: Some(call.head),
help: None,
inner: vec![],
})?;
let df_sql = ctx.execute(&sql_query).map_err(|e| {
ShellError::GenericError(
"Dataframe Error".into(),
e.to_string(),
Some(call.head),
None,
Vec::new(),
)
})?;
let lazy = NuLazyFrame::new(false, df_sql);
let eager = lazy.collect(call.head)?;

View File

@ -130,15 +130,15 @@ fn command_eager(
let new_names = extract_strings(new_names)?;
for (from, to) in columns.iter().zip(new_names.iter()) {
df.as_mut()
.rename(from, to)
.map_err(|e| ShellError::GenericError {
error: "Error renaming".into(),
msg: e.to_string(),
span: Some(call.head),
help: None,
inner: vec![],
})?;
df.as_mut().rename(from, to).map_err(|e| {
ShellError::GenericError(
"Error renaming".into(),
e.to_string(),
Some(call.head),
None,
Vec::new(),
)
})?;
}
Ok(PipelineData::Value(df.into_value(call.head), None))

View File

@ -4,8 +4,6 @@ use nu_protocol::{
engine::{Command, EngineState, Stack},
Category, Example, PipelineData, ShellError, Signature, Spanned, SyntaxShape, Type,
};
use polars::prelude::NamedFrom;
use polars::series::Series;
use super::super::values::NuDataFrame;
@ -83,7 +81,7 @@ fn command(
call: &Call,
input: PipelineData,
) -> Result<PipelineData, ShellError> {
let rows: Option<Spanned<i64>> = call.get_flag(engine_state, stack, "n-rows")?;
let rows: Option<Spanned<usize>> = call.get_flag(engine_state, stack, "n-rows")?;
let fraction: Option<Spanned<f64>> = call.get_flag(engine_state, stack, "fraction")?;
let seed: Option<u64> = call
.get_flag::<i64>(engine_state, stack, "seed")?
@ -96,38 +94,42 @@ fn command(
match (rows, fraction) {
(Some(rows), None) => df
.as_ref()
.sample_n(&Series::new("s", &[rows.item]), replace, shuffle, seed)
.map_err(|e| ShellError::GenericError {
error: "Error creating sample".into(),
msg: e.to_string(),
span: Some(rows.span),
help: None,
inner: vec![],
.sample_n(rows.item, replace, shuffle, seed)
.map_err(|e| {
ShellError::GenericError(
"Error creating sample".into(),
e.to_string(),
Some(rows.span),
None,
Vec::new(),
)
}),
(None, Some(frac)) => df
.as_ref()
.sample_frac(&Series::new("frac", &[frac.item]), replace, shuffle, seed)
.map_err(|e| ShellError::GenericError {
error: "Error creating sample".into(),
msg: e.to_string(),
span: Some(frac.span),
help: None,
inner: vec![],
.sample_frac(frac.item, replace, shuffle, seed)
.map_err(|e| {
ShellError::GenericError(
"Error creating sample".into(),
e.to_string(),
Some(frac.span),
None,
Vec::new(),
)
}),
(Some(_), Some(_)) => Err(ShellError::GenericError {
error: "Incompatible flags".into(),
msg: "Only one selection criterion allowed".into(),
span: Some(call.head),
help: None,
inner: vec![],
}),
(None, None) => Err(ShellError::GenericError {
error: "No selection".into(),
msg: "No selection criterion was found".into(),
span: Some(call.head),
help: Some("Perhaps you want to use the flag -n or -f".into()),
inner: vec![],
}),
(Some(_), Some(_)) => Err(ShellError::GenericError(
"Incompatible flags".into(),
"Only one selection criterion allowed".into(),
Some(call.head),
None,
Vec::new(),
)),
(None, None) => Err(ShellError::GenericError(
"No selection".into(),
"No selection criterion was found".into(),
Some(call.head),
Some("Perhaps you want to use the flag -n or -f".into()),
Vec::new(),
)),
}
.map(|df| PipelineData::Value(NuDataFrame::dataframe_into_value(df, call.head), None))
}

View File

@ -2,8 +2,7 @@ use crate::dataframe::eager::sql_expr::parse_sql_expr;
use polars::error::{ErrString, PolarsError};
use polars::prelude::{col, DataFrame, DataType, IntoLazy, LazyFrame};
use sqlparser::ast::{
Expr as SqlExpr, GroupByExpr, Select, SelectItem, SetExpr, Statement, TableFactor,
Value as SQLValue,
Expr as SqlExpr, Select, SelectItem, SetExpr, Statement, TableFactor, Value as SQLValue,
};
use sqlparser::dialect::GenericDialect;
use sqlparser::parser::Parser;
@ -30,7 +29,7 @@ impl SQLContext {
fn execute_select(&self, select_stmt: &Select) -> Result<LazyFrame, PolarsError> {
// Determine involved dataframe
// Implicit join require some more work in query parsers, Explicit join are preferred for now.
let tbl = select_stmt.from.first().ok_or_else(|| {
let tbl = select_stmt.from.get(0).ok_or_else(|| {
PolarsError::ComputeError(ErrString::from("No table found in select statement"))
})?;
let mut alias_map = HashMap::new();
@ -38,7 +37,7 @@ impl SQLContext {
TableFactor::Table { name, alias, .. } => {
let tbl_name = name
.0
.first()
.get(0)
.ok_or_else(|| {
PolarsError::ComputeError(ErrString::from(
"No table found in select statement",
@ -97,13 +96,8 @@ impl SQLContext {
.collect::<Result<Vec<_>, PolarsError>>()?;
// Check for group by
// After projection since there might be number.
let group_by = match &select_stmt.group_by {
GroupByExpr::All =>
Err(
PolarsError::ComputeError("Group-By Error: Only positive number or expression are supported, not all".into())
)?,
GroupByExpr::Expressions(expressions) => expressions
}
let group_by = select_stmt
.group_by
.iter()
.map(
|e|match e {
@ -188,7 +182,7 @@ impl SQLContext {
))
} else {
let ast = ast
.first()
.get(0)
.ok_or_else(|| PolarsError::ComputeError(ErrString::from("No statement found")))?;
Ok(match ast {
Statement::Query(query) => {

View File

@ -2,8 +2,8 @@ use polars::error::PolarsError;
use polars::prelude::{col, lit, DataType, Expr, LiteralValue, PolarsResult as Result, TimeUnit};
use sqlparser::ast::{
ArrayElemTypeDef, BinaryOperator as SQLBinaryOperator, DataType as SQLDataType,
Expr as SqlExpr, Function as SQLFunction, Value as SqlValue, WindowType,
BinaryOperator as SQLBinaryOperator, DataType as SQLDataType, Expr as SqlExpr,
Function as SQLFunction, Value as SqlValue, WindowType,
};
fn map_sql_polars_datatype(data_type: &SQLDataType) -> Result<DataType> {
@ -13,7 +13,7 @@ fn map_sql_polars_datatype(data_type: &SQLDataType) -> Result<DataType> {
| SQLDataType::Uuid
| SQLDataType::Clob(_)
| SQLDataType::Text
| SQLDataType::String(_) => DataType::Utf8,
| SQLDataType::String => DataType::Utf8,
SQLDataType::Float(_) => DataType::Float32,
SQLDataType::Real => DataType::Float32,
SQLDataType::Double => DataType::Float64,
@ -31,12 +31,9 @@ fn map_sql_polars_datatype(data_type: &SQLDataType) -> Result<DataType> {
SQLDataType::Time(_, _) => DataType::Time,
SQLDataType::Timestamp(_, _) => DataType::Datetime(TimeUnit::Microseconds, None),
SQLDataType::Interval => DataType::Duration(TimeUnit::Microseconds),
SQLDataType::Array(array_type_def) => match array_type_def {
ArrayElemTypeDef::AngleBracket(inner_type)
| ArrayElemTypeDef::SquareBracket(inner_type) => {
DataType::List(Box::new(map_sql_polars_datatype(inner_type)?))
}
_ => {
SQLDataType::Array(inner_type) => match inner_type {
Some(inner_type) => DataType::List(Box::new(map_sql_polars_datatype(inner_type)?)),
None => {
return Err(PolarsError::ComputeError(
"SQL Datatype Array(None) was not supported in polars-sql yet!".into(),
))
@ -117,11 +114,7 @@ pub fn parse_sql_expr(expr: &SqlExpr) -> Result<Expr> {
binary_op_(left, right, op)?
}
SqlExpr::Function(sql_function) => parse_sql_function(sql_function)?,
SqlExpr::Cast {
expr,
data_type,
format: _,
} => cast_(parse_sql_expr(expr)?, data_type)?,
SqlExpr::Cast { expr, data_type } => cast_(parse_sql_expr(expr)?, data_type)?,
SqlExpr::Nested(expr) => parse_sql_expr(expr)?,
SqlExpr::Value(value) => literal_expr(value)?,
_ => {

View File

@ -127,23 +127,23 @@ fn command(
if (&0.0..=&1.0).contains(&val) {
Ok(*val)
} else {
Err(ShellError::GenericError {
error: "Incorrect value for quantile".into(),
msg: "value should be between 0 and 1".into(),
span: Some(span),
help: None,
inner: vec![],
})
Err(ShellError::GenericError(
"Incorrect value for quantile".to_string(),
"value should be between 0 and 1".to_string(),
Some(span),
None,
Vec::new(),
))
}
}
Value::Error { error, .. } => Err(*error.clone()),
_ => Err(ShellError::GenericError {
error: "Incorrect value for quantile".into(),
msg: "value should be a float".into(),
span: Some(span),
help: None,
inner: vec![],
}),
_ => Err(ShellError::GenericError(
"Incorrect value for quantile".to_string(),
"value should be a float".to_string(),
Some(span),
None,
Vec::new(),
)),
}
})
.collect::<Result<Vec<f64>, ShellError>>()
@ -250,12 +250,14 @@ fn command(
let res = head.chain(tail).collect::<Vec<Series>>();
DataFrame::new(res)
.map_err(|e| ShellError::GenericError {
error: "Dataframe Error".into(),
msg: e.to_string(),
span: Some(call.head),
help: None,
inner: vec![],
.map_err(|e| {
ShellError::GenericError(
"Dataframe Error".into(),
e.to_string(),
Some(call.head),
None,
Vec::new(),
)
})
.map(|df| PipelineData::Value(NuDataFrame::dataframe_into_value(df, call.head), None))
}

View File

@ -97,41 +97,47 @@ fn command(
let index = NuDataFrame::try_from_value(index_value)?.as_series(index_span)?;
let casted = match index.dtype() {
DataType::UInt32 | DataType::UInt64 | DataType::Int32 | DataType::Int64 => index
.cast(&DataType::UInt32)
.map_err(|e| ShellError::GenericError {
error: "Error casting index list".into(),
msg: e.to_string(),
span: Some(index_span),
help: None,
inner: vec![],
}),
_ => Err(ShellError::GenericError {
error: "Incorrect type".into(),
msg: "Series with incorrect type".into(),
span: Some(call.head),
help: Some("Consider using a Series with type int type".into()),
inner: vec![],
}),
DataType::UInt32 | DataType::UInt64 | DataType::Int32 | DataType::Int64 => {
index.cast(&DataType::UInt32).map_err(|e| {
ShellError::GenericError(
"Error casting index list".into(),
e.to_string(),
Some(index_span),
None,
Vec::new(),
)
})
}
_ => Err(ShellError::GenericError(
"Incorrect type".into(),
"Series with incorrect type".into(),
Some(call.head),
Some("Consider using a Series with type int type".into()),
Vec::new(),
)),
}?;
let indices = casted.u32().map_err(|e| ShellError::GenericError {
error: "Error casting index list".into(),
msg: e.to_string(),
span: Some(index_span),
help: None,
inner: vec![],
let indices = casted.u32().map_err(|e| {
ShellError::GenericError(
"Error casting index list".into(),
e.to_string(),
Some(index_span),
None,
Vec::new(),
)
})?;
NuDataFrame::try_from_pipeline(input, call.head).and_then(|df| {
df.as_ref()
.take(indices)
.map_err(|e| ShellError::GenericError {
error: "Error taking values".into(),
msg: e.to_string(),
span: Some(call.head),
help: None,
inner: vec![],
.map_err(|e| {
ShellError::GenericError(
"Error taking values".into(),
e.to_string(),
Some(call.head),
None,
Vec::new(),
)
})
.map(|df| PipelineData::Value(NuDataFrame::dataframe_into_value(df, call.head), None))
})

View File

@ -58,23 +58,25 @@ fn command(
let mut df = NuDataFrame::try_from_pipeline(input, call.head)?;
let mut file = File::create(&file_name.item).map_err(|e| ShellError::GenericError {
error: "Error with file name".into(),
msg: e.to_string(),
span: Some(file_name.span),
help: None,
inner: vec![],
let mut file = File::create(&file_name.item).map_err(|e| {
ShellError::GenericError(
"Error with file name".into(),
e.to_string(),
Some(file_name.span),
None,
Vec::new(),
)
})?;
IpcWriter::new(&mut file)
.finish(df.as_mut())
.map_err(|e| ShellError::GenericError {
error: "Error saving file".into(),
msg: e.to_string(),
span: Some(file_name.span),
help: None,
inner: vec![],
})?;
IpcWriter::new(&mut file).finish(df.as_mut()).map_err(|e| {
ShellError::GenericError(
"Error saving file".into(),
e.to_string(),
Some(file_name.span),
None,
Vec::new(),
)
})?;
let file_value = Value::string(format!("saved {:?}", &file_name.item), file_name.span);

View File

@ -85,23 +85,27 @@ fn command(
let mut df = NuDataFrame::try_from_pipeline(input, call.head)?;
let file = File::create(&file_name.item).map_err(|e| ShellError::GenericError {
error: "Error with file name".into(),
msg: e.to_string(),
span: Some(file_name.span),
help: None,
inner: vec![],
let file = File::create(&file_name.item).map_err(|e| {
ShellError::GenericError(
"Error with file name".into(),
e.to_string(),
Some(file_name.span),
None,
Vec::new(),
)
})?;
AvroWriter::new(file)
.with_compression(compression)
.finish(df.as_mut())
.map_err(|e| ShellError::GenericError {
error: "Error saving file".into(),
msg: e.to_string(),
span: Some(file_name.span),
help: None,
inner: vec![],
.map_err(|e| {
ShellError::GenericError(
"Error saving file".into(),
e.to_string(),
Some(file_name.span),
None,
Vec::new(),
)
})?;
let file_value = Value::string(format!("saved {:?}", &file_name.item), file_name.span);

View File

@ -74,53 +74,55 @@ fn command(
let mut df = NuDataFrame::try_from_pipeline(input, call.head)?;
let mut file = File::create(&file_name.item).map_err(|e| ShellError::GenericError {
error: "Error with file name".into(),
msg: e.to_string(),
span: Some(file_name.span),
help: None,
inner: vec![],
let mut file = File::create(&file_name.item).map_err(|e| {
ShellError::GenericError(
"Error with file name".into(),
e.to_string(),
Some(file_name.span),
None,
Vec::new(),
)
})?;
let writer = CsvWriter::new(&mut file);
let writer = if no_header {
writer.include_header(false)
writer.has_header(false)
} else {
writer.include_header(true)
writer.has_header(true)
};
let mut writer = match delimiter {
None => writer,
Some(d) => {
if d.item.len() != 1 {
return Err(ShellError::GenericError {
error: "Incorrect delimiter".into(),
msg: "Delimiter has to be one char".into(),
span: Some(d.span),
help: None,
inner: vec![],
});
return Err(ShellError::GenericError(
"Incorrect delimiter".into(),
"Delimiter has to be one char".into(),
Some(d.span),
None,
Vec::new(),
));
} else {
let delimiter = match d.item.chars().next() {
Some(d) => d as u8,
None => unreachable!(),
};
writer.with_separator(delimiter)
writer.with_delimiter(delimiter)
}
}
};
writer
.finish(df.as_mut())
.map_err(|e| ShellError::GenericError {
error: "Error writing to file".into(),
msg: e.to_string(),
span: Some(file_name.span),
help: None,
inner: vec![],
})?;
writer.finish(df.as_mut()).map_err(|e| {
ShellError::GenericError(
"Error writing to file".into(),
e.to_string(),
Some(file_name.span),
None,
Vec::new(),
)
})?;
let file_value = Value::string(format!("saved {:?}", &file_name.item), file_name.span);

View File

@ -58,23 +58,27 @@ fn command(
let mut df = NuDataFrame::try_from_pipeline(input, call.head)?;
let file = File::create(&file_name.item).map_err(|e| ShellError::GenericError {
error: "Error with file name".into(),
msg: e.to_string(),
span: Some(file_name.span),
help: None,
inner: vec![],
let file = File::create(&file_name.item).map_err(|e| {
ShellError::GenericError(
"Error with file name".into(),
e.to_string(),
Some(file_name.span),
None,
Vec::new(),
)
})?;
let buf_writer = BufWriter::new(file);
JsonWriter::new(buf_writer)
.finish(df.as_mut())
.map_err(|e| ShellError::GenericError {
error: "Error saving file".into(),
msg: e.to_string(),
span: Some(file_name.span),
help: None,
inner: vec![],
.map_err(|e| {
ShellError::GenericError(
"Error saving file".into(),
e.to_string(),
Some(file_name.span),
None,
Vec::new(),
)
})?;
let file_value = Value::string(format!("saved {:?}", &file_name.item), file_name.span);

View File

@ -58,23 +58,25 @@ fn command(
let mut df = NuDataFrame::try_from_pipeline(input, call.head)?;
let file = File::create(&file_name.item).map_err(|e| ShellError::GenericError {
error: "Error with file name".into(),
msg: e.to_string(),
span: Some(file_name.span),
help: None,
inner: vec![],
let file = File::create(&file_name.item).map_err(|e| {
ShellError::GenericError(
"Error with file name".into(),
e.to_string(),
Some(file_name.span),
None,
Vec::new(),
)
})?;
ParquetWriter::new(file)
.finish(df.as_mut())
.map_err(|e| ShellError::GenericError {
error: "Error saving file".into(),
msg: e.to_string(),
span: Some(file_name.span),
help: None,
inner: vec![],
})?;
ParquetWriter::new(file).finish(df.as_mut()).map_err(|e| {
ShellError::GenericError(
"Error saving file".into(),
e.to_string(),
Some(file_name.span),
None,
Vec::new(),
)
})?;
let file_value = Value::string(format!("saved {:?}", &file_name.item), file_name.span);

View File

@ -151,12 +151,14 @@ fn command_eager(
df.as_mut()
.with_column(series)
.map_err(|e| ShellError::GenericError {
error: "Error adding column to dataframe".into(),
msg: e.to_string(),
span: Some(column_span),
help: None,
inner: vec![],
.map_err(|e| {
ShellError::GenericError(
"Error adding column to dataframe".into(),
e.to_string(),
Some(column_span),
None,
Vec::new(),
)
})
.map(|df| {
PipelineData::Value(

View File

@ -319,7 +319,7 @@ macro_rules! lazy_expr_command {
expr_command!(
ExprList,
"dfr implode",
"Aggregates a group to a Series.",
"Aggregates a group to a Series",
vec![Example {
description: "",
example: "",
@ -334,7 +334,7 @@ expr_command!(
expr_command!(
ExprAggGroups,
"dfr agg-groups",
"Creates an agg_groups expression.",
"creates an agg_groups expression",
vec![Example {
description: "",
example: "",
@ -349,7 +349,7 @@ expr_command!(
expr_command!(
ExprCount,
"dfr count",
"Creates a count expression.",
"creates a count expression",
vec![Example {
description: "",
example: "",
@ -364,7 +364,7 @@ expr_command!(
expr_command!(
ExprNot,
"dfr expr-not",
"Creates a not expression.",
"creates a not expression",
vec![Example {
description: "Creates a not expression",
example: "(dfr col a) > 2) | dfr expr-not",
@ -379,7 +379,7 @@ expr_command!(
lazy_expr_command!(
ExprMax,
"dfr max",
"Creates a max expression or aggregates columns to their max value.",
"Creates a max expression or aggregates columns to their max value",
vec![
Example {
description: "Max value from columns in a dataframe",
@ -424,7 +424,7 @@ lazy_expr_command!(
lazy_expr_command!(
ExprMin,
"dfr min",
"Creates a min expression or aggregates columns to their min value.",
"Creates a min expression or aggregates columns to their min value",
vec![
Example {
description: "Min value from columns in a dataframe",
@ -469,7 +469,7 @@ lazy_expr_command!(
lazy_expr_command!(
ExprSum,
"dfr sum",
"Creates a sum expression for an aggregation or aggregates columns to their sum value.",
"Creates a sum expression for an aggregation or aggregates columns to their sum value",
vec![
Example {
description: "Sums all columns in a dataframe",
@ -514,7 +514,7 @@ lazy_expr_command!(
lazy_expr_command!(
ExprMean,
"dfr mean",
"Creates a mean expression for an aggregation or aggregates columns to their mean value.",
"Creates a mean expression for an aggregation or aggregates columns to their mean value",
vec![
Example {
description: "Mean value from columns in a dataframe",
@ -559,7 +559,7 @@ lazy_expr_command!(
expr_command!(
ExprMedian,
"dfr median",
"Creates a median expression for an aggregation.",
"Creates a median expression for an aggregation",
vec![Example {
description: "Median aggregation for a group-by",
example: r#"[[a b]; [one 2] [one 4] [two 1]]
@ -590,7 +590,7 @@ expr_command!(
lazy_expr_command!(
ExprStd,
"dfr std",
"Creates a std expression for an aggregation of std value from columns in a dataframe.",
"Creates a std expression for an aggregation of std value from columns in a dataframe",
vec![
Example {
description: "Std value from columns in a dataframe",
@ -636,7 +636,7 @@ lazy_expr_command!(
lazy_expr_command!(
ExprVar,
"dfr var",
"Create a var expression for an aggregation.",
"Create a var expression for an aggregation",
vec![
Example {
description:

View File

@ -15,7 +15,7 @@ impl Command for ExprOtherwise {
}
fn usage(&self) -> &str {
"Completes a when expression."
"completes a when expression."
}
fn signature(&self) -> Signature {

View File

@ -125,13 +125,13 @@ impl Command for LazyAggregate {
let dtype = schema.get(name.as_str());
if matches!(dtype, Some(DataType::Object(..))) {
return Err(ShellError::GenericError {
error: "Object type column not supported for aggregation".into(),
msg: format!("Column '{name}' is type Object"),
span: Some(call.head),
help: Some("Aggregations cannot be performed on Object type columns. Use dtype command to check column types".into()),
inner: vec![],
});
return Err(ShellError::GenericError(
"Object type column not supported for aggregation".into(),
format!("Column '{name}' is type Object"),
Some(call.head),
Some("Aggregations cannot be performed on Object type columns. Use dtype command to check column types".into()),
Vec::new(),
));
}
}
}
@ -171,7 +171,7 @@ fn get_col_name(expr: &Expr) -> Option<String> {
| Expr::Slice { input: expr, .. }
| Expr::Cast { expr, .. }
| Expr::Sort { expr, .. }
| Expr::Gather { expr, .. }
| Expr::Take { expr, .. }
| Expr::SortBy { expr, .. }
| Expr::Exclude(expr, _)
| Expr::Alias(expr, _)
@ -189,7 +189,6 @@ fn get_col_name(expr: &Expr) -> Option<String> {
| Expr::RenameAlias { .. }
| Expr::Count
| Expr::Nth(_)
| Expr::SubPlan(_, _)
| Expr::Selector(_) => None,
}
}

View File

@ -16,7 +16,7 @@ impl Command for LazyFetch {
}
fn usage(&self) -> &str {
"Collects the lazyframe to the selected rows."
"collects the lazyframe to the selected rows."
}
fn signature(&self) -> Signature {
@ -67,12 +67,14 @@ impl Command for LazyFetch {
let eager: NuDataFrame = lazy
.into_polars()
.fetch(rows as usize)
.map_err(|e| ShellError::GenericError {
error: "Error fetching rows".into(),
msg: e.to_string(),
span: Some(call.head),
help: None,
inner: vec![],
.map_err(|e| {
ShellError::GenericError(
"Error fetching rows".into(),
e.to_string(),
Some(call.head),
None,
Vec::new(),
)
})?
.into();

View File

@ -17,7 +17,7 @@ impl Command for LazyFlatten {
}
fn usage(&self) -> &str {
"An alias for dfr explode."
"An alias for dfr explode"
}
fn signature(&self) -> Signature {

View File

@ -121,7 +121,7 @@ lazy_command!(
"dfr reverse",
"Reverses the LazyFrame",
vec![Example {
description: "Reverses the dataframe.",
description: "Reverses the dataframe",
example: "[[a b]; [6 2] [4 2] [2 2]] | dfr into-df | dfr reverse",
result: Some(
NuDataFrame::try_from_columns(vec![
@ -147,7 +147,7 @@ lazy_command!(
lazy_command!(
LazyCache,
"dfr cache",
"Caches operations in a new LazyFrame.",
"Caches operations in a new LazyFrame",
vec![Example {
description: "Caches the result into a new LazyFrame",
example: "[[a b]; [6 2] [4 2] [2 2]] | dfr into-df | dfr reverse | dfr cache",

View File

@ -16,7 +16,7 @@ impl Command for LazySortBy {
}
fn usage(&self) -> &str {
"Sorts a lazy dataframe based on expression(s)."
"sorts a lazy dataframe based on expression(s)."
}
fn signature(&self) -> Signature {
@ -118,13 +118,13 @@ impl Command for LazySortBy {
.get_flag::<Value>(engine_state, stack, "reverse")?
.expect("already checked and it exists")
.span();
return Err(ShellError::GenericError {
error: "Incorrect list size".into(),
msg: "Size doesn't match expression list".into(),
span: Some(span),
help: None,
inner: vec![],
});
return Err(ShellError::GenericError(
"Incorrect list size".into(),
"Size doesn't match expression list".into(),
Some(span),
None,
Vec::new(),
));
} else {
list
}

View File

@ -78,12 +78,14 @@ fn command(
let df = NuDataFrame::try_from_pipeline(input, call.head)?;
let series = df.as_series(call.head)?;
let bool = series.bool().map_err(|_| ShellError::GenericError {
error: "Error converting to bool".into(),
msg: "all-false only works with series of type bool".into(),
span: Some(call.head),
help: None,
inner: vec![],
let bool = series.bool().map_err(|_| {
ShellError::GenericError(
"Error converting to bool".into(),
"all-false only works with series of type bool".into(),
Some(call.head),
None,
Vec::new(),
)
})?;
let value = Value::bool(!bool.any(), call.head);

View File

@ -78,12 +78,14 @@ fn command(
let df = NuDataFrame::try_from_pipeline(input, call.head)?;
let series = df.as_series(call.head)?;
let bool = series.bool().map_err(|_| ShellError::GenericError {
error: "Error converting to bool".into(),
msg: "all-false only works with series of type bool".into(),
span: Some(call.head),
help: None,
inner: vec![],
let bool = series.bool().map_err(|_| {
ShellError::GenericError(
"Error converting to bool".into(),
"all-false only works with series of type bool".into(),
Some(call.head),
None,
Vec::new(),
)
})?;
let value = Value::bool(bool.all(), call.head);

View File

@ -8,7 +8,6 @@ use nu_protocol::{
Value,
};
use polars::prelude::{DataType, IntoSeries};
use polars_ops::prelude::{cum_max, cum_min, cum_sum};
enum CumType {
Min,
@ -22,13 +21,13 @@ impl CumType {
"min" => Ok(Self::Min),
"max" => Ok(Self::Max),
"sum" => Ok(Self::Sum),
_ => Err(ShellError::GenericError {
error: "Wrong operation".into(),
msg: "Operation not valid for cumulative".into(),
span: Some(span),
help: Some("Allowed values: max, min, sum".into()),
inner: vec![],
}),
_ => Err(ShellError::GenericError(
"Wrong operation".into(),
"Operation not valid for cumulative".into(),
Some(span),
Some("Allowed values: max, min, sum".into()),
Vec::new(),
)),
}
}
@ -109,28 +108,21 @@ fn command(
let series = df.as_series(call.head)?;
if let DataType::Object(_) = series.dtype() {
return Err(ShellError::GenericError {
error: "Found object series".into(),
msg: "Series of type object cannot be used for cumulative operation".into(),
span: Some(call.head),
help: None,
inner: vec![],
});
return Err(ShellError::GenericError(
"Found object series".into(),
"Series of type object cannot be used for cumulative operation".into(),
Some(call.head),
None,
Vec::new(),
));
}
let cum_type = CumType::from_str(&cum_type.item, cum_type.span)?;
let mut res = match cum_type {
CumType::Max => cum_max(&series, reverse),
CumType::Min => cum_min(&series, reverse),
CumType::Sum => cum_sum(&series, reverse),
}
.map_err(|e| ShellError::GenericError {
error: "Error creating cumulative".into(),
msg: e.to_string(),
span: Some(call.head),
help: None,
inner: vec![],
})?;
CumType::Max => series.cummax(reverse),
CumType::Min => series.cummin(reverse),
CumType::Sum => series.cumsum(reverse),
};
let name = format!("{}_{}", series.name(), cum_type.to_str());
res.rename(&name);

View File

@ -68,12 +68,14 @@ fn command(
let df = NuDataFrame::try_from_pipeline(input, call.head)?;
let series = df.as_series(call.head)?;
let casted = series.utf8().map_err(|e| ShellError::GenericError {
error: "Error casting to string".into(),
msg: e.to_string(),
span: Some(call.head),
help: None,
inner: vec![],
let casted = series.utf8().map_err(|e| {
ShellError::GenericError(
"Error casting to string".into(),
e.to_string(),
Some(call.head),
None,
Vec::new(),
)
})?;
let res = if not_exact {
@ -83,12 +85,14 @@ fn command(
};
let mut res = res
.map_err(|e| ShellError::GenericError {
error: "Error creating datetime".into(),
msg: e.to_string(),
span: Some(call.head),
help: None,
inner: vec![],
.map_err(|e| {
ShellError::GenericError(
"Error creating datetime".into(),
e.to_string(),
Some(call.head),
None,
Vec::new(),
)
})?
.into_series();

View File

@ -132,12 +132,14 @@ fn command(
let df = NuDataFrame::try_from_pipeline(input, call.head)?;
let series = df.as_series(call.head)?;
let casted = series.utf8().map_err(|e| ShellError::GenericError {
error: "Error casting to string".into(),
msg: e.to_string(),
span: Some(call.head),
help: None,
inner: vec![],
let casted = series.utf8().map_err(|e| {
ShellError::GenericError(
"Error casting to string".into(),
e.to_string(),
Some(call.head),
None,
Vec::new(),
)
})?;
let res = if not_exact {
@ -160,12 +162,14 @@ fn command(
};
let mut res = res
.map_err(|e| ShellError::GenericError {
error: "Error creating datetime".into(),
msg: e.to_string(),
span: Some(call.head),
help: None,
inner: vec![],
.map_err(|e| {
ShellError::GenericError(
"Error creating datetime".into(),
e.to_string(),
Some(call.head),
None,
Vec::new(),
)
})?
.into_series();

View File

@ -65,12 +65,14 @@ fn command(
let df = NuDataFrame::try_from_pipeline(input, call.head)?;
let series = df.as_series(call.head)?;
let casted = series.datetime().map_err(|e| ShellError::GenericError {
error: "Error casting to datetime type".into(),
msg: e.to_string(),
span: Some(call.head),
help: None,
inner: vec![],
let casted = series.datetime().map_err(|e| {
ShellError::GenericError(
"Error casting to datetime type".into(),
e.to_string(),
Some(call.head),
None,
Vec::new(),
)
})?;
let res = casted.day().into_series();

View File

@ -65,12 +65,14 @@ fn command(
let df = NuDataFrame::try_from_pipeline(input, call.head)?;
let series = df.as_series(call.head)?;
let casted = series.datetime().map_err(|e| ShellError::GenericError {
error: "Error casting to datetime type".into(),
msg: e.to_string(),
span: Some(call.head),
help: None,
inner: vec![],
let casted = series.datetime().map_err(|e| {
ShellError::GenericError(
"Error casting to datetime type".into(),
e.to_string(),
Some(call.head),
None,
Vec::new(),
)
})?;
let res = casted.hour().into_series();

View File

@ -65,12 +65,14 @@ fn command(
let df = NuDataFrame::try_from_pipeline(input, call.head)?;
let series = df.as_series(call.head)?;
let casted = series.datetime().map_err(|e| ShellError::GenericError {
error: "Error casting to datetime type".into(),
msg: e.to_string(),
span: Some(call.head),
help: None,
inner: vec![],
let casted = series.datetime().map_err(|e| {
ShellError::GenericError(
"Error casting to datetime type".into(),
e.to_string(),
Some(call.head),
None,
Vec::new(),
)
})?;
let res = casted.minute().into_series();

View File

@ -65,12 +65,14 @@ fn command(
let df = NuDataFrame::try_from_pipeline(input, call.head)?;
let series = df.as_series(call.head)?;
let casted = series.datetime().map_err(|e| ShellError::GenericError {
error: "Error casting to datetime type".into(),
msg: e.to_string(),
span: Some(call.head),
help: None,
inner: vec![],
let casted = series.datetime().map_err(|e| {
ShellError::GenericError(
"Error casting to datetime type".into(),
e.to_string(),
Some(call.head),
None,
Vec::new(),
)
})?;
let res = casted.month().into_series();

View File

@ -65,12 +65,14 @@ fn command(
let df = NuDataFrame::try_from_pipeline(input, call.head)?;
let series = df.as_series(call.head)?;
let casted = series.datetime().map_err(|e| ShellError::GenericError {
error: "Error casting to datetime type".into(),
msg: e.to_string(),
span: Some(call.head),
help: None,
inner: vec![],
let casted = series.datetime().map_err(|e| {
ShellError::GenericError(
"Error casting to datetime type".into(),
e.to_string(),
Some(call.head),
None,
Vec::new(),
)
})?;
let res = casted.nanosecond().into_series();

View File

@ -65,12 +65,14 @@ fn command(
let df = NuDataFrame::try_from_pipeline(input, call.head)?;
let series = df.as_series(call.head)?;
let casted = series.datetime().map_err(|e| ShellError::GenericError {
error: "Error casting to datetime type".into(),
msg: e.to_string(),
span: Some(call.head),
help: None,
inner: vec![],
let casted = series.datetime().map_err(|e| {
ShellError::GenericError(
"Error casting to datetime type".into(),
e.to_string(),
Some(call.head),
None,
Vec::new(),
)
})?;
let res = casted.ordinal().into_series();

View File

@ -65,12 +65,14 @@ fn command(
let df = NuDataFrame::try_from_pipeline(input, call.head)?;
let series = df.as_series(call.head)?;
let casted = series.datetime().map_err(|e| ShellError::GenericError {
error: "Error casting to datetime type".into(),
msg: e.to_string(),
span: Some(call.head),
help: None,
inner: vec![],
let casted = series.datetime().map_err(|e| {
ShellError::GenericError(
"Error casting to datetime type".into(),
e.to_string(),
Some(call.head),
None,
Vec::new(),
)
})?;
let res = casted.second().into_series();

View File

@ -65,12 +65,14 @@ fn command(
let df = NuDataFrame::try_from_pipeline(input, call.head)?;
let series = df.as_series(call.head)?;
let casted = series.datetime().map_err(|e| ShellError::GenericError {
error: "Error casting to datetime type".into(),
msg: e.to_string(),
span: Some(call.head),
help: None,
inner: vec![],
let casted = series.datetime().map_err(|e| {
ShellError::GenericError(
"Error casting to datetime type".into(),
e.to_string(),
Some(call.head),
None,
Vec::new(),
)
})?;
let res = casted.week().into_series();

View File

@ -65,12 +65,14 @@ fn command(
let df = NuDataFrame::try_from_pipeline(input, call.head)?;
let series = df.as_series(call.head)?;
let casted = series.datetime().map_err(|e| ShellError::GenericError {
error: "Error casting to datetime type".into(),
msg: e.to_string(),
span: Some(call.head),
help: None,
inner: vec![],
let casted = series.datetime().map_err(|e| {
ShellError::GenericError(
"Error casting to datetime type".into(),
e.to_string(),
Some(call.head),
None,
Vec::new(),
)
})?;
let res = casted.weekday().into_series();

View File

@ -65,12 +65,14 @@ fn command(
let df = NuDataFrame::try_from_pipeline(input, call.head)?;
let series = df.as_series(call.head)?;
let casted = series.datetime().map_err(|e| ShellError::GenericError {
error: "Error casting to datetime type".into(),
msg: e.to_string(),
span: Some(call.head),
help: None,
inner: vec![],
let casted = series.datetime().map_err(|e| {
ShellError::GenericError(
"Error casting to datetime type".into(),
e.to_string(),
Some(call.head),
None,
Vec::new(),
)
})?;
let res = casted.year().into_series();

View File

@ -67,13 +67,13 @@ fn command(
let df = NuDataFrame::try_from_pipeline(input, call.head)?;
let columns = df.as_ref().get_column_names();
if columns.len() > 1 {
return Err(ShellError::GenericError {
error: "Error using as series".into(),
msg: "dataframe has more than one column".into(),
span: Some(call.head),
help: None,
inner: vec![],
});
return Err(ShellError::GenericError(
"Error using as series".into(),
"dataframe has more than one column".into(),
Some(call.head),
None,
Vec::new(),
));
}
match columns.first() {
@ -85,12 +85,14 @@ fn command(
.lazy()
.select(&[expression])
.collect()
.map_err(|err| ShellError::GenericError {
error: "Error creating index column".into(),
msg: err.to_string(),
span: Some(call.head),
help: None,
inner: vec![],
.map_err(|err| {
ShellError::GenericError(
"Error creating index column".into(),
err.to_string(),
Some(call.head),
None,
Vec::new(),
)
})?;
let value = NuDataFrame::dataframe_into_value(res, call.head);

View File

@ -69,12 +69,14 @@ fn command(
let mut res = df
.as_series(call.head)?
.arg_unique()
.map_err(|e| ShellError::GenericError {
error: "Error extracting unique values".into(),
msg: e.to_string(),
span: Some(call.head),
help: None,
inner: vec![],
.map_err(|e| {
ShellError::GenericError(
"Error extracting unique values".into(),
e.to_string(),
Some(call.head),
None,
Vec::new(),
)
})?
.into_series();
res.rename("arg_unique");

View File

@ -86,33 +86,36 @@ fn command(
let indices = NuDataFrame::try_from_value(indices_value)?.as_series(indices_span)?;
let casted = match indices.dtype() {
DataType::UInt32 | DataType::UInt64 | DataType::Int32 | DataType::Int64 => indices
.as_ref()
.cast(&DataType::UInt32)
.map_err(|e| ShellError::GenericError {
error: "Error casting indices".into(),
msg: e.to_string(),
span: Some(indices_span),
help: None,
inner: vec![],
}),
_ => Err(ShellError::GenericError {
error: "Incorrect type".into(),
msg: "Series with incorrect type".into(),
span: Some(indices_span),
help: Some("Consider using a Series with type int type".into()),
inner: vec![],
}),
DataType::UInt32 | DataType::UInt64 | DataType::Int32 | DataType::Int64 => {
indices.as_ref().cast(&DataType::UInt32).map_err(|e| {
ShellError::GenericError(
"Error casting indices".into(),
e.to_string(),
Some(indices_span),
None,
Vec::new(),
)
})
}
_ => Err(ShellError::GenericError(
"Incorrect type".into(),
"Series with incorrect type".into(),
Some(indices_span),
Some("Consider using a Series with type int type".into()),
Vec::new(),
)),
}?;
let indices = casted
.u32()
.map_err(|e| ShellError::GenericError {
error: "Error casting indices".into(),
msg: e.to_string(),
span: Some(indices_span),
help: None,
inner: vec![],
.map_err(|e| {
ShellError::GenericError(
"Error casting indices".into(),
e.to_string(),
Some(indices_span),
None,
Vec::new(),
)
})?
.into_iter()
.flatten();
@ -121,85 +124,92 @@ fn command(
let series = df.as_series(call.head)?;
let span = value.span();
let res =
match value {
Value::Int { val, .. } => {
let chunked = series.i64().map_err(|e| ShellError::GenericError {
error: "Error casting to i64".into(),
msg: e.to_string(),
span: Some(span),
help: None,
inner: vec![],
let res = match value {
Value::Int { val, .. } => {
let chunked = series.i64().map_err(|e| {
ShellError::GenericError(
"Error casting to i64".into(),
e.to_string(),
Some(span),
None,
Vec::new(),
)
})?;
let res = chunked.set_at_idx(indices, Some(val)).map_err(|e| {
ShellError::GenericError(
"Error setting value".into(),
e.to_string(),
Some(span),
None,
Vec::new(),
)
})?;
NuDataFrame::try_from_series(vec![res.into_series()], call.head)
}
Value::Float { val, .. } => {
let chunked = series.f64().map_err(|e| {
ShellError::GenericError(
"Error casting to f64".into(),
e.to_string(),
Some(span),
None,
Vec::new(),
)
})?;
let res = chunked.set_at_idx(indices, Some(val)).map_err(|e| {
ShellError::GenericError(
"Error setting value".into(),
e.to_string(),
Some(span),
None,
Vec::new(),
)
})?;
NuDataFrame::try_from_series(vec![res.into_series()], call.head)
}
Value::String { val, .. } => {
let chunked = series.utf8().map_err(|e| {
ShellError::GenericError(
"Error casting to string".into(),
e.to_string(),
Some(span),
None,
Vec::new(),
)
})?;
let res = chunked
.set_at_idx(indices, Some(val.as_ref()))
.map_err(|e| {
ShellError::GenericError(
"Error setting value".into(),
e.to_string(),
Some(span),
None,
Vec::new(),
)
})?;
let res = chunked.set_at_idx(indices, Some(val)).map_err(|e| {
ShellError::GenericError {
error: "Error setting value".into(),
msg: e.to_string(),
span: Some(span),
help: None,
inner: vec![],
}
})?;
let mut res = res.into_series();
res.rename("string");
NuDataFrame::try_from_series(vec![res.into_series()], call.head)
}
Value::Float { val, .. } => {
let chunked = series.f64().map_err(|e| ShellError::GenericError {
error: "Error casting to f64".into(),
msg: e.to_string(),
span: Some(span),
help: None,
inner: vec![],
})?;
let res = chunked.set_at_idx(indices, Some(val)).map_err(|e| {
ShellError::GenericError {
error: "Error setting value".into(),
msg: e.to_string(),
span: Some(span),
help: None,
inner: vec![],
}
})?;
NuDataFrame::try_from_series(vec![res.into_series()], call.head)
}
Value::String { val, .. } => {
let chunked = series.utf8().map_err(|e| ShellError::GenericError {
error: "Error casting to string".into(),
msg: e.to_string(),
span: Some(span),
help: None,
inner: vec![],
})?;
let res = chunked
.set_at_idx(indices, Some(val.as_ref()))
.map_err(|e| ShellError::GenericError {
error: "Error setting value".into(),
msg: e.to_string(),
span: Some(span),
help: None,
inner: vec![],
})?;
let mut res = res.into_series();
res.rename("string");
NuDataFrame::try_from_series(vec![res.into_series()], call.head)
}
_ => Err(ShellError::GenericError {
error: "Incorrect value type".into(),
msg: format!(
"this value cannot be set in a series of type '{}'",
series.dtype()
),
span: Some(span),
help: None,
inner: vec![],
}),
};
NuDataFrame::try_from_series(vec![res.into_series()], call.head)
}
_ => Err(ShellError::GenericError(
"Incorrect value type".into(),
format!(
"this value cannot be set in a series of type '{}'",
series.dtype()
),
Some(span),
None,
Vec::new(),
)),
};
res.map(|df| PipelineData::Value(NuDataFrame::into_value(df, call.head), None))
}

View File

@ -94,12 +94,14 @@ fn command(
let mut res = df
.as_ref()
.is_duplicated()
.map_err(|e| ShellError::GenericError {
error: "Error finding duplicates".into(),
msg: e.to_string(),
span: Some(call.head),
help: None,
inner: vec![],
.map_err(|e| {
ShellError::GenericError(
"Error finding duplicates".into(),
e.to_string(),
Some(call.head),
None,
Vec::new(),
)
})?
.into_series();

View File

@ -79,12 +79,14 @@ fn command(
let other = other_df.as_series(other_span)?;
let mut res = is_in(&df, &other)
.map_err(|e| ShellError::GenericError {
error: "Error finding in other".into(),
msg: e.to_string(),
span: Some(call.head),
help: None,
inner: vec![],
.map_err(|e| {
ShellError::GenericError(
"Error finding in other".into(),
e.to_string(),
Some(call.head),
None,
Vec::new(),
)
})?
.into_series();

View File

@ -93,12 +93,14 @@ fn command(
let mut res = df
.as_ref()
.is_unique()
.map_err(|e| ShellError::GenericError {
error: "Error finding unique values".into(),
msg: e.to_string(),
span: Some(call.head),
help: None,
inner: vec![],
.map_err(|e| {
ShellError::GenericError(
"Error finding unique values".into(),
e.to_string(),
Some(call.head),
None,
Vec::new(),
)
})?
.into_series();

View File

@ -68,12 +68,14 @@ fn command(
) -> Result<PipelineData, ShellError> {
let series = df.as_series(call.head)?;
let bool = series.bool().map_err(|e| ShellError::GenericError {
error: "Error inverting mask".into(),
msg: e.to_string(),
span: Some(call.head),
help: None,
inner: vec![],
let bool = series.bool().map_err(|e| {
ShellError::GenericError(
"Error inverting mask".into(),
e.to_string(),
Some(call.head),
None,
Vec::new(),
)
})?;
let res = bool.not();

View File

@ -85,20 +85,22 @@ fn command(
let mask = NuDataFrame::try_from_value(mask_value)?.as_series(mask_span)?;
let bool_mask = match mask.dtype() {
DataType::Boolean => mask.bool().map_err(|e| ShellError::GenericError {
error: "Error casting to bool".into(),
msg: e.to_string(),
span: Some(mask_span),
help: None,
inner: vec![],
}),
_ => Err(ShellError::GenericError {
error: "Incorrect type".into(),
msg: "can only use bool series as mask".into(),
span: Some(mask_span),
help: None,
inner: vec![],
DataType::Boolean => mask.bool().map_err(|e| {
ShellError::GenericError(
"Error casting to bool".into(),
e.to_string(),
Some(mask_span),
None,
Vec::new(),
)
}),
_ => Err(ShellError::GenericError(
"Incorrect type".into(),
"can only use bool series as mask".into(),
Some(mask_span),
None,
Vec::new(),
)),
}?;
let df = NuDataFrame::try_from_pipeline(input, call.head)?;
@ -106,64 +108,70 @@ fn command(
let span = value.span();
let res = match value {
Value::Int { val, .. } => {
let chunked = series.i64().map_err(|e| ShellError::GenericError {
error: "Error casting to i64".into(),
msg: e.to_string(),
span: Some(span),
help: None,
inner: vec![],
let chunked = series.i64().map_err(|e| {
ShellError::GenericError(
"Error casting to i64".into(),
e.to_string(),
Some(span),
None,
Vec::new(),
)
})?;
let res = chunked
.set(bool_mask, Some(val))
.map_err(|e| ShellError::GenericError {
error: "Error setting value".into(),
msg: e.to_string(),
span: Some(span),
help: None,
inner: vec![],
})?;
let res = chunked.set(bool_mask, Some(val)).map_err(|e| {
ShellError::GenericError(
"Error setting value".into(),
e.to_string(),
Some(span),
None,
Vec::new(),
)
})?;
NuDataFrame::try_from_series(vec![res.into_series()], call.head)
}
Value::Float { val, .. } => {
let chunked = series.f64().map_err(|e| ShellError::GenericError {
error: "Error casting to f64".into(),
msg: e.to_string(),
span: Some(span),
help: None,
inner: vec![],
let chunked = series.f64().map_err(|e| {
ShellError::GenericError(
"Error casting to f64".into(),
e.to_string(),
Some(span),
None,
Vec::new(),
)
})?;
let res = chunked
.set(bool_mask, Some(val))
.map_err(|e| ShellError::GenericError {
error: "Error setting value".into(),
msg: e.to_string(),
span: Some(span),
help: None,
inner: vec![],
})?;
let res = chunked.set(bool_mask, Some(val)).map_err(|e| {
ShellError::GenericError(
"Error setting value".into(),
e.to_string(),
Some(span),
None,
Vec::new(),
)
})?;
NuDataFrame::try_from_series(vec![res.into_series()], call.head)
}
Value::String { val, .. } => {
let chunked = series.utf8().map_err(|e| ShellError::GenericError {
error: "Error casting to string".into(),
msg: e.to_string(),
span: Some(span),
help: None,
inner: vec![],
let chunked = series.utf8().map_err(|e| {
ShellError::GenericError(
"Error casting to string".into(),
e.to_string(),
Some(span),
None,
Vec::new(),
)
})?;
let res = chunked.set(bool_mask, Some(val.as_ref())).map_err(|e| {
ShellError::GenericError {
error: "Error setting value".into(),
msg: e.to_string(),
span: Some(span),
help: None,
inner: vec![],
}
ShellError::GenericError(
"Error setting value".into(),
e.to_string(),
Some(span),
None,
Vec::new(),
)
})?;
let mut res = res.into_series();
@ -171,16 +179,16 @@ fn command(
NuDataFrame::try_from_series(vec![res.into_series()], call.head)
}
_ => Err(ShellError::GenericError {
error: "Incorrect value type".into(),
msg: format!(
_ => Err(ShellError::GenericError(
"Incorrect value type".into(),
format!(
"this value cannot be set in a series of type '{}'",
series.dtype()
),
span: Some(span),
help: None,
inner: vec![],
}),
Some(span),
None,
Vec::new(),
)),
};
res.map(|df| PipelineData::Value(NuDataFrame::into_value(df, call.head), None))

View File

@ -83,16 +83,15 @@ fn command(
call: &Call,
df: NuDataFrame,
) -> Result<PipelineData, ShellError> {
let res = df
.as_series(call.head)?
.n_unique()
.map_err(|e| ShellError::GenericError {
error: "Error counting unique values".into(),
msg: e.to_string(),
span: Some(call.head),
help: None,
inner: vec![],
})?;
let res = df.as_series(call.head)?.n_unique().map_err(|e| {
ShellError::GenericError(
"Error counting unique values".into(),
e.to_string(),
Some(call.head),
None,
Vec::new(),
)
})?;
let value = Value::int(res as i64, call.head);

View File

@ -23,13 +23,13 @@ impl RollType {
"max" => Ok(Self::Max),
"sum" => Ok(Self::Sum),
"mean" => Ok(Self::Mean),
_ => Err(ShellError::GenericError {
error: "Wrong operation".into(),
msg: "Operation not valid for cumulative".into(),
span: Some(span),
help: Some("Allowed values: min, max, sum, mean".into()),
inner: vec![],
}),
_ => Err(ShellError::GenericError(
"Wrong operation".into(),
"Operation not valid for cumulative".into(),
Some(span),
Some("Allowed values: min, max, sum, mean".into()),
Vec::new(),
)),
}
}
@ -129,13 +129,13 @@ fn command(
let series = df.as_series(call.head)?;
if let DataType::Object(_) = series.dtype() {
return Err(ShellError::GenericError {
error: "Found object series".into(),
msg: "Series of type object cannot be used for rolling operation".into(),
span: Some(call.head),
help: None,
inner: vec![],
});
return Err(ShellError::GenericError(
"Found object series".into(),
"Series of type object cannot be used for rolling operation".into(),
Some(call.head),
None,
Vec::new(),
));
}
let roll_type = RollType::from_str(&roll_type.item, roll_type.span)?;
@ -158,12 +158,14 @@ fn command(
RollType::Mean => series.rolling_mean(rolling_opts),
};
let mut res = res.map_err(|e| ShellError::GenericError {
error: "Error calculating rolling values".into(),
msg: e.to_string(),
span: Some(call.head),
help: None,
inner: vec![],
let mut res = res.map_err(|e| {
ShellError::GenericError(
"Error calculating rolling values".into(),
e.to_string(),
Some(call.head),
None,
Vec::new(),
)
})?;
let name = format!("{}_{}", series.name(), roll_type.to_str());

View File

@ -9,8 +9,6 @@ use nu_protocol::{
Category, Example, PipelineData, ShellError, Signature, Span, SyntaxShape, Type, Value,
};
use polars_plan::prelude::lit;
#[derive(Clone)]
pub struct Shift;
@ -100,7 +98,7 @@ fn command_lazy(
let lazy: NuLazyFrame = match fill {
Some(fill) => {
let expr = NuExpression::try_from_value(fill)?.into_polars();
lazy.shift_and_fill(lit(shift), expr).into()
lazy.shift_and_fill(shift, expr).into()
}
None => lazy.shift(shift).into(),
};

View File

@ -78,21 +78,25 @@ fn command(
let other_df = NuDataFrame::try_from_value(other)?;
let other_series = other_df.as_series(other_span)?;
let other_chunked = other_series.utf8().map_err(|e| ShellError::GenericError {
error: "The concatenate only with string columns".into(),
msg: e.to_string(),
span: Some(other_span),
help: None,
inner: vec![],
let other_chunked = other_series.utf8().map_err(|e| {
ShellError::GenericError(
"The concatenate only with string columns".into(),
e.to_string(),
Some(other_span),
None,
Vec::new(),
)
})?;
let series = df.as_series(call.head)?;
let chunked = series.utf8().map_err(|e| ShellError::GenericError {
error: "The concatenate only with string columns".into(),
msg: e.to_string(),
span: Some(call.head),
help: None,
inner: vec![],
let chunked = series.utf8().map_err(|e| {
ShellError::GenericError(
"The concatenate only with string columns".into(),
e.to_string(),
Some(call.head),
None,
Vec::new(),
)
})?;
let mut res = chunked.concat(other_chunked);

View File

@ -74,23 +74,25 @@ fn command(
let pattern: String = call.req(engine_state, stack, 0)?;
let series = df.as_series(call.head)?;
let chunked = series.utf8().map_err(|e| ShellError::GenericError {
error: "The contains command only with string columns".into(),
msg: e.to_string(),
span: Some(call.head),
help: None,
inner: vec![],
let chunked = series.utf8().map_err(|e| {
ShellError::GenericError(
"The contains command only with string columns".into(),
e.to_string(),
Some(call.head),
None,
Vec::new(),
)
})?;
let res = chunked
.contains(&pattern, false)
.map_err(|e| ShellError::GenericError {
error: "Error searching in series".into(),
msg: e.to_string(),
span: Some(call.head),
help: None,
inner: vec![],
})?;
let res = chunked.contains(&pattern, false).map_err(|e| {
ShellError::GenericError(
"Error searching in series".into(),
e.to_string(),
Some(call.head),
None,
Vec::new(),
)
})?;
NuDataFrame::try_from_series(vec![res.into_series()], call.head)
.map(|df| PipelineData::Value(NuDataFrame::into_value(df, call.head), None))

View File

@ -86,23 +86,25 @@ fn command(
let df = NuDataFrame::try_from_pipeline(input, call.head)?;
let series = df.as_series(call.head)?;
let chunked = series.utf8().map_err(|e| ShellError::GenericError {
error: "Error conversion to string".into(),
msg: e.to_string(),
span: Some(call.head),
help: None,
inner: vec![],
let chunked = series.utf8().map_err(|e| {
ShellError::GenericError(
"Error conversion to string".into(),
e.to_string(),
Some(call.head),
None,
Vec::new(),
)
})?;
let mut res = chunked
.replace(&pattern, &replace)
.map_err(|e| ShellError::GenericError {
error: "Error finding pattern other".into(),
msg: e.to_string(),
span: Some(call.head),
help: None,
inner: vec![],
})?;
let mut res = chunked.replace(&pattern, &replace).map_err(|e| {
ShellError::GenericError(
"Error finding pattern other".into(),
e.to_string(),
Some(call.head),
None,
Vec::new(),
)
})?;
res.rename(series.name());

View File

@ -86,24 +86,25 @@ fn command(
let df = NuDataFrame::try_from_pipeline(input, call.head)?;
let series = df.as_series(call.head)?;
let chunked = series.utf8().map_err(|e| ShellError::GenericError {
error: "Error conversion to string".into(),
msg: e.to_string(),
span: Some(call.head),
help: None,
inner: vec![],
let chunked = series.utf8().map_err(|e| {
ShellError::GenericError(
"Error conversion to string".into(),
e.to_string(),
Some(call.head),
None,
Vec::new(),
)
})?;
let mut res =
chunked
.replace_all(&pattern, &replace)
.map_err(|e| ShellError::GenericError {
error: "Error finding pattern other".into(),
msg: e.to_string(),
span: Some(call.head),
help: None,
inner: vec![],
})?;
let mut res = chunked.replace_all(&pattern, &replace).map_err(|e| {
ShellError::GenericError(
"Error finding pattern other".into(),
e.to_string(),
Some(call.head),
None,
Vec::new(),
)
})?;
res.rename(series.name());

View File

@ -63,15 +63,17 @@ fn command(
let df = NuDataFrame::try_from_pipeline(input, call.head)?;
let series = df.as_series(call.head)?;
let chunked = series.utf8().map_err(|e| ShellError::GenericError {
error: "Error casting to string".into(),
msg: e.to_string(),
span: Some(call.head),
help: Some("The str-lengths command can only be used with string columns".into()),
inner: vec![],
let chunked = series.utf8().map_err(|e| {
ShellError::GenericError(
"Error casting to string".into(),
e.to_string(),
Some(call.head),
Some("The str-lengths command can only be used with string columns".into()),
Vec::new(),
)
})?;
let res = chunked.as_ref().str_len_bytes().into_series();
let res = chunked.as_ref().str_lengths().into_series();
NuDataFrame::try_from_series(vec![res], call.head)
.map(|df| PipelineData::Value(NuDataFrame::into_value(df, call.head), None))

View File

@ -75,15 +75,25 @@ fn command(
let df = NuDataFrame::try_from_pipeline(input, call.head)?;
let series = df.as_series(call.head)?;
let chunked = series.utf8().map_err(|e| ShellError::GenericError {
error: "Error casting to string".into(),
msg: e.to_string(),
span: Some(call.head),
help: Some("The str-slice command can only be used with string columns".into()),
inner: vec![],
let chunked = series.utf8().map_err(|e| {
ShellError::GenericError(
"Error casting to string".into(),
e.to_string(),
Some(call.head),
Some("The str-slice command can only be used with string columns".into()),
Vec::new(),
)
})?;
let mut res = chunked.str_slice(start, length);
let mut res = chunked.str_slice(start, length).map_err(|e| {
ShellError::GenericError(
"Error slicing series".into(),
e.to_string(),
Some(call.head),
None,
Vec::new(),
)
})?;
res.rename(series.name());
NuDataFrame::try_from_series(vec![res.into_series()], call.head)

View File

@ -72,22 +72,26 @@ fn command(
let df = NuDataFrame::try_from_pipeline(input, call.head)?;
let series = df.as_series(call.head)?;
let casted = series.datetime().map_err(|e| ShellError::GenericError {
error: "Error casting to date".into(),
msg: e.to_string(),
span: Some(call.head),
help: Some("The str-slice command can only be used with string columns".into()),
inner: vec![],
let casted = series.datetime().map_err(|e| {
ShellError::GenericError(
"Error casting to date".into(),
e.to_string(),
Some(call.head),
Some("The str-slice command can only be used with string columns".into()),
Vec::new(),
)
})?;
let res = casted
.strftime(&fmt)
.map_err(|e| ShellError::GenericError {
error: "Error formatting datetime".into(),
msg: e.to_string(),
span: Some(call.head),
help: None,
inner: vec![],
.map_err(|e| {
ShellError::GenericError(
"Error formatting datetime".into(),
e.to_string(),
Some(call.head),
None,
Vec::new(),
)
})?
.into_series();

View File

@ -67,12 +67,14 @@ fn command(
let df = NuDataFrame::try_from_pipeline(input, call.head)?;
let series = df.as_series(call.head)?;
let casted = series.utf8().map_err(|e| ShellError::GenericError {
error: "Error casting to string".into(),
msg: e.to_string(),
span: Some(call.head),
help: Some("The str-slice command can only be used with string columns".into()),
inner: vec![],
let casted = series.utf8().map_err(|e| {
ShellError::GenericError(
"Error casting to string".into(),
e.to_string(),
Some(call.head),
Some("The str-slice command can only be used with string columns".into()),
Vec::new(),
)
})?;
let mut res = casted.to_lowercase();

View File

@ -71,12 +71,14 @@ fn command(
let df = NuDataFrame::try_from_pipeline(input, call.head)?;
let series = df.as_series(call.head)?;
let casted = series.utf8().map_err(|e| ShellError::GenericError {
error: "Error casting to string".into(),
msg: e.to_string(),
span: Some(call.head),
help: Some("The str-slice command can only be used with string columns".into()),
inner: vec![],
let casted = series.utf8().map_err(|e| {
ShellError::GenericError(
"Error casting to string".into(),
e.to_string(),
Some(call.head),
Some("The str-slice command can only be used with string columns".into()),
Vec::new(),
)
})?;
let mut res = casted.to_uppercase();

View File

@ -96,12 +96,14 @@ fn command_eager(
) -> Result<PipelineData, ShellError> {
let series = df.as_series(call.head)?;
let res = series.unique().map_err(|e| ShellError::GenericError {
error: "Error calculating unique values".into(),
msg: e.to_string(),
span: Some(call.head),
help: Some("The str-slice command can only be used with string columns".into()),
inner: vec![],
let res = series.unique().map_err(|e| {
ShellError::GenericError(
"Error calculating unique values".into(),
e.to_string(),
Some(call.head),
Some("The str-slice command can only be used with string columns".into()),
Vec::new(),
)
})?;
NuDataFrame::try_from_series(vec![res.into_series()], call.head)

View File

@ -70,15 +70,15 @@ fn command(
let df = NuDataFrame::try_from_pipeline(input, call.head)?;
let series = df.as_series(call.head)?;
let res = series
.value_counts(false, false)
.map_err(|e| ShellError::GenericError {
error: "Error calculating value counts values".into(),
msg: e.to_string(),
span: Some(call.head),
help: Some("The str-slice command can only be used with string columns".into()),
inner: vec![],
})?;
let res = series.value_counts(false, false).map_err(|e| {
ShellError::GenericError(
"Error calculating value counts values".into(),
e.to_string(),
Some(call.head),
Some("The str-slice command can only be used with string columns".into()),
Vec::new(),
)
})?;
Ok(PipelineData::Value(
NuDataFrame::dataframe_into_value(res, call.head),

Some files were not shown because too many files have changed in this diff Show More