Compare commits

...

134 Commits

Author SHA1 Message Date
JT
da104050e6 Update release.yml 2022-05-04 09:50:33 +12:00
JT
d306b834ca Bump to 0.62 (#5422) 2022-05-04 09:01:27 +12:00
d4371438d1 Pin reedline to v0.5.0 for the next release (#5427)
Release notes: https://github.com/nushell/reedline/releases/tag/v0.5.0

Co-authored-by: JT <547158+jntrnr@users.noreply.github.com>
2022-05-04 08:11:31 +12:00
6a972312d4 added open editor event in config parsing (#5426) 2022-05-04 07:52:53 +12:00
ac48f5a318 Fix coloring when string has spaces (#5425)
* Replace ansi-cut with ansi-str

There's no issues with it we just need to use it later.

Signed-off-by: Maxim Zhiburt <zhiburt@gmail.com>

* Fix color losing in string spliting into Sublines

Signed-off-by: Maxim Zhiburt <zhiburt@gmail.com>
2022-05-03 09:48:43 -05:00
JT
e36649f74b Update path completions to handle spaces (#5419) 2022-05-03 12:37:38 +12:00
1a52460695 Database commands (#5417)
* dabase access commands

* select expression

* select using expressions

* cargo fmt

* alias for database

* database where command

* expression operations

* and and or operators

* limit and sort by commands
2022-05-03 06:38:18 +12:00
ab98ecd55b Fix erroneous removal of "./" folder prefix (#5416) 2022-05-02 12:36:18 -05:00
9a8e939cbe remove ctrl-l from config.nu (#5415) 2022-05-02 08:31:52 -07:00
bb27b9f371 Don't resuggest accepted completions (#5369)
To avoid resuggesting the same completion, add a space after commands or flags that have been accepted via `Enter`. Don't do that for filepaths or external completions

* Add append_whitespace choice for suggestion

Signed-off-by: gipsyh <gipsyh.icu@gmail.com>

* Fixed `test <path>` appending space.

* Update reedline

Co-authored-by: sholderbach <sholderbach@users.noreply.github.com>
2022-05-02 11:35:37 +02:00
1ca3063ac3 Fix CI to run doctests again (#5410)
The faster `cargo nextest` currently doesn't support running the doctests.

Thus, add an additional step for them with cargo's default test runner.

- Fix doctests for the `nu-pretty-hex` crate
2022-05-02 11:32:57 +02:00
7c9a78d922 Fixed ctrl-c in recursion loop bug #5362 (#5409) 2022-05-02 20:18:25 +12:00
49cbc30974 Add ends-with operator and fix dataframe operator behavior (#5395)
* add ends-with operator

* escape needles in dataframe operator regex patterns
2022-05-02 20:02:38 +12:00
07a7bb14bf Fixed interrupting a for-loop over a list bug #5378 (#5408)
Signed-off-by: gipsyh <gipsyh.icu@gmail.com>
2022-05-02 19:56:37 +12:00
74f1c5b67b CI: Add plugins job (#5406) 2022-05-02 19:20:57 +12:00
3b0151aba6 event ClearScrollback is now working in reedline / update default config.nu (#5405) 2022-05-02 19:20:24 +12:00
JT
4a69819f9a Rename =^ to 'starts-with' (#5407) 2022-05-02 19:20:07 +12:00
1f7d3498cd Bump reedline (#5404)
- Fix to the `ClearScrollback` command
- Fix of vi mode `x` so it adds the character to the clipboard
- Vi mode shorthands `s` and `S`
2022-05-02 13:14:24 +12:00
f0b9dc9da1 CI: build virtualenv tests in dev profile for speed (#5396) 2022-05-02 10:01:36 +12:00
JT
96f8691c8d More escaping/unescaping fixes (#5403) 2022-05-02 09:49:31 +12:00
07255e576d Add Miette "fancy" feature to fix plugin builds (#5402) 2022-05-02 08:52:49 +12:00
260be40774 Update reedline to use partial completion changes (#5401) 2022-05-02 08:41:25 +12:00
JT
14c9bd44ef Adds error printing back in a couple places (#5400) 2022-05-02 08:40:46 +12:00
JT
92785ab92c Add unescaping to external command parsing (#5399) 2022-05-02 07:26:29 +12:00
JT
98ab31e15e Move uses of trim_quotes to unescape for filenames (#5398)
* Move uses of trim_quotes to unescape for filenames

* Fix Windows tests
2022-05-02 06:37:20 +12:00
80d57d70cd a little database cleanup (#5394) 2022-05-01 07:44:29 -05:00
8dc199d817 Fix PATH update example (#5393) 2022-05-01 14:53:59 +03:00
435693a8bb Line buffer keybinding (#5390)
* dabase access commands

* select expression

* select using expressions

* cargo fmt

* change keybinding
2022-05-01 08:59:49 +01:00
5077242892 Error printing changes for watch (#5389)
* Move CliError to nu-protocol

clean up comment

* Enable printing errors instead of just returning them

* Nicer Miette error printing in watch command
2022-05-01 19:33:41 +12:00
7a7aa310aa Remove 'empty' block support reminders, for now. (#5214) 2022-04-30 22:32:30 -05:00
07893e01c1 Remove "./" prefix for file path completions (#5387) 2022-04-30 16:54:04 -05:00
JT
f16401152b Make if else more lazy (#5386) 2022-05-01 09:13:21 +12:00
3df03e2e6d nu-cli/completions: complete external args as filepath (#5385) 2022-05-01 08:07:09 +12:00
7c6f976d65 nu-cli/completions: apply correctly nesting for env vars (#5382) 2022-04-30 14:14:04 -05:00
ae9c0fc138 Fix quoting for command line args (#5384)
* Fix quoting for command line args

* Replace custom quoting with escape_quote_string

* Use raw string for now
2022-04-30 13:23:05 -05:00
9da2e142b2 Line buffer editor (#5381)
* allow line editing

* cargo fmt
2022-04-30 15:40:41 +01:00
5999506f87 allows for nushell to have tables without the index column (#5380) 2022-04-30 09:07:46 -05:00
1fc7abcc38 Faster CI (#5374)
* More-parallel CI

* Split all+default caches

* Rename ci job to build-clippy

* cargo nextest

* Remove fmt from tests
2022-04-29 22:48:04 +03:00
2659ea3dbd Revert "nu-cli/completions: better fix for files with special characters (#5254)" (#5372)
This reverts commit 3cf3329e49.
2022-04-29 13:11:41 -05:00
fa27110651 Avoid using time conversion methods that may panic (#5365) 2022-04-29 06:03:39 -05:00
b4f8798a3a rust-cache fix (#5359)
* Enable CI on merges to main

* Re-enable rust-cache for virtualenv tests
2022-04-28 17:57:26 -05:00
7714956276 CI: remove rust-cache from virtualenv tests (#5358) 2022-04-28 15:27:18 -05:00
8e5cc655e9 cleanup version command and add in database feature (#5356)
* cleanup version command and add in database feature

* static-link-openssl
2022-04-28 15:25:04 -05:00
c78e28511d CI: make Clippy reuse build artifacts, other cleanup (#5357)
* CI: move clippy after build so it can reuse build artifacts

* CI: Remove unused rustfmt+clippy from venv
2022-04-28 14:39:21 -05:00
f189369fd7 Change description of sort (#5355) 2022-04-28 14:33:26 -05:00
2516305fa8 CI: enable rust-cache, remove minimal (#5354)
* Enable rust-cache

Add cache buster key

Add rust-cache to python venv

* Remove minimal CI
2022-04-28 13:18:27 -05:00
f2d7454330 Add watch command (#5331) 2022-04-28 09:26:34 -05:00
3cf3329e49 nu-cli/completions: better fix for files with special characters (#5254)
* nu-cli/completions: fix paths with special chars

* add backticks

* fix replace

* added single quotes to check list

* check escape using fold

* fix clippy errors

* fix comment line

* fix conflicts

* change to vec

* skip sort checking

* removed invalid windows path

* remove comment

* added tests for escape function

* fix fn import

* fix fn import error

* test windows issue fix

* fix windows backslash path in the tests

* show expected path on error

* skip test for windows
2022-04-28 08:36:32 -05:00
d2bc2dcbb2 Openssl feature (#5352)
* Move statically linked OpenSSL behind a feature

* Re-add README.txt for releases
2022-04-28 06:33:17 -05:00
4ec4649903 mute false import warning for nu-command test where_ (#5350) 2022-04-27 22:45:39 -07:00
55e5106695 Statically link OpenSSL (#5349) 2022-04-28 12:25:09 +12:00
5f35e4ad1e improve inc plugin docs (#5346)
This is a convenience for anyone using GitHub features to copy paste directly into your local shell
2022-04-27 18:56:32 -05:00
e7831d38ae fixes an issue with an empty selector panic (#5345)
* fixes an issue with an empty selector panic

* missed web_tables

* oops, missed a test
2022-04-27 07:38:36 -05:00
5c9fe85ec4 Database commands (#5343)
* dabase access commands

* select expression

* select using expressions

* cargo fmt
2022-04-27 11:52:31 +01:00
cd5199de31 db info tweaks (#5338)
* Rename db info to db schema

* Change db schema to take db as input
2022-04-26 18:16:46 -05:00
5319544481 db info command (#5335)
* db info WIP

* working now

* clippy
2022-04-26 14:20:59 -05:00
JT
be3f0edc97 Fix 'range' range exclusive (#5334) 2022-04-26 13:39:38 -05:00
fb8f7b114e Fix use of export/alias --help bug (#5332)
* fix alias --help bug

Signed-off-by: SuYuheng <yuheng.su@motiong.com>

* fix export --help bug

Signed-off-by: SuYuheng <yuheng.su@motiong.com>

Co-authored-by: SuYuheng <yuheng.su@motiong.com>
2022-04-26 11:51:49 -05:00
187f2454c8 Move print_pipeline_data to nu-protocol (#5328) 2022-04-26 11:44:57 +12:00
JT
3492d4015d Allow bare words to interpolate (#5327)
* Allow bare words to interpolate

* fix highlighting
2022-04-26 11:44:44 +12:00
190f379ff3 activates optional trim in 'from csv' and 'from tsv' (#5326) 2022-04-25 12:54:14 -05:00
5c2bc73d7b Allows cd (and other commands that depend on current working directory) to use path of type '~user' (#5323)
* Added search terms to math commands

* Attempts to add ~user.

From: // Extend this to work with "~user" style of home paths

* Clippy recommendation

* clippy suggestions, again.

* fixing non-compilation on windows and macos

* fmt apparently does not like my imports

* even more clippy issues.

* less expect(), single conversion, match. Should work for MacOS too.

* Attempted to add functionality for windows: all it does is take the home path of current user, and replace the username.

* silly mistake in Windows version of user_home_dir()

* Update tilde.rs

* user_home_dir now returns a path instead of a string - should be smoother with no conversions to string

* clippy warnings

* clippy warnings 2

* Changed user_home_dir to return PathBuf now.

* Changed user_home_dir to return PathBuf now.

* forgot to fmt

* fixed windows build errors from modifying pathbuf but not returning it

* fixed windows clippy errors from returning () instead of pathbuf

* forgot to fmt

* borrowed path did not live long enough.

* previously, path.push did not work because rest_of_path started with "/" - it was not relative. Removing the / makes it a relative path again.

* Issue fixed.

* Update tilde.rs

* fmt.

* There is now a zero chance of panic. All expect()s have been removed.

* Patched join_path_relative to accommodate ~user paths. Previously, /some/path/~user might have been passed on; now, ~user is taken as absolute.

* fmt

* clippy errors
2022-04-25 06:01:48 -05:00
aeed8670f1 add database feature to extra (#5322) 2022-04-24 18:26:56 -05:00
b38f90d4c7 Adding ~user tilde recognition in file paths (#5251)
* Added search terms to math commands

* Attempts to add ~user.

From: // Extend this to work with "~user" style of home paths

* Clippy recommendation

* clippy suggestions, again.

* fixing non-compilation on windows and macos

* fmt apparently does not like my imports

* even more clippy issues.

* less expect(), single conversion, match. Should work for MacOS too.

* Attempted to add functionality for windows: all it does is take the home path of current user, and replace the username.

* silly mistake in Windows version of user_home_dir()

* Update tilde.rs

* user_home_dir now returns a path instead of a string - should be smoother with no conversions to string

* clippy warnings

* clippy warnings 2

* Changed user_home_dir to return PathBuf now.

* Changed user_home_dir to return PathBuf now.

* forgot to fmt

* fixed windows build errors from modifying pathbuf but not returning it

* fixed windows clippy errors from returning () instead of pathbuf

* forgot to fmt

* borrowed path did not live long enough.

* previously, path.push did not work because rest_of_path started with "/" - it was not relative. Removing the / makes it a relative path again.

* Issue fixed.

* Update tilde.rs

* fmt.

* There is now a zero chance of panic. All expect()s have been removed.
2022-04-24 17:12:57 -05:00
9771270b38 Fuzzy completion matching (#5320)
* Implement fuzzy match algorithm for suggestions

* Use MatchingAlgorithm for custom completions
2022-04-24 16:43:18 -05:00
f6b99b2d8f update build status badge (#5321) 2022-04-24 16:28:54 -05:00
JT
ec611526ac Warn if we see let config = ../.. (#5318) 2022-04-25 08:40:55 +12:00
cd2df83ddc nu-command/filesystem: clean whitespaces from paths in cd and open (#5310) 2022-04-25 07:15:33 +12:00
3eb447030b update contrib to max=500 (#5317) 2022-04-24 13:03:20 -05:00
f2a45b3eac Update ci.yml 2022-04-24 08:03:21 -05:00
e94d13da1b Database commands (#5307)
* database commands

* db commands

* filesystem opens sqlite file

* clippy error

* corrected error in ci file

* removes matrix flag from ci

* flax matrix for clippy

* add conditional compile for tests

* add conditional compile for tests

* correct order of command

* correct error msg

* correct typo
2022-04-24 10:29:21 +01:00
c20ba95885 fix: remove println!() from exec builtin (#5311) 2022-04-24 15:24:44 +12:00
8eab311565 consolidate shell integration behind config setting (#5302)
* consolidate shell integration behind config setting

* write output differently
2022-04-24 12:53:12 +12:00
e2b510b65e update sys with new items, add kernel version to os-info (#5308)
* update sys with new items, add kernel version to os-info

* clippy
2022-04-23 16:33:27 -05:00
e6a70f9846 Add MatchAlgorithm for completion suggestions (#5244)
* Pass completion options to each fetch() call

* Add MatchAlgorithm to CompletionOptions

* Add unit test for MatchAlgorithm

* Pass completion options to directory completer
2022-04-23 10:01:19 -05:00
667eb27d1b feat: add search terms to date (#5306)
* add search terms

* add search terms

* add search terms

* add search terms

* add search terms

* add search terms

* add search terms

* add search terms

* add search patterns

* run cargo fmt --all
2022-04-23 08:54:03 -05:00
b9eb213f36 nu-cli/completions: added completion for $nu (#5303) 2022-04-23 11:49:17 +12:00
JT
cc78446ffd Fix cd - (#5301) 2022-04-23 11:48:10 +12:00
5ff2ae628b nu-cli: directory syntax shape + completions (#5299) 2022-04-22 15:18:51 -05:00
661283c4d2 nu-cli/completions: support record for custom completions (#5298) 2022-04-22 15:17:08 -05:00
JT
ee29a15119 Add 'and' and 'or' operators (#5297) 2022-04-23 07:14:31 +12:00
2a18206771 add virtualenv to integrations (#5280) 2022-04-21 06:50:32 -05:00
a26272b44b Clean up tests and unused documentation code (#5273)
* Delete unused documentation code+test

* Fix up test to account for new select behavior
2022-04-21 06:13:58 -05:00
7e730e28bb Delete obsolete+unused files (#5272) 2022-04-21 17:56:56 +12:00
JT
96253c69fb Use better quoting for commandline args (#5271) 2022-04-21 15:31:52 +12:00
JT
ded9d1cedb Some cleanups for clippy (#5266) 2022-04-21 12:08:12 +12:00
d1cc70fc4a update os-info os to name (#5265) 2022-04-21 10:36:39 +12:00
18c9b62b00 git completion: 'git fetch' for remotes (#5253) 2022-04-21 07:52:44 +12:00
1295495758 typo: seach -> search (#5264) 2022-04-21 07:38:24 +12:00
e97ba9b74c feat: add search terms for conversions (#5259) 2022-04-20 11:48:32 -05:00
09b972f1dc add newlines to end of the default configs (#5256) 2022-04-20 07:56:15 -07:00
0fb6f8f93c refactor html module (#5246)
* refactor around html module

* Update html.rs

fix clippy warning

* minify json
2022-04-20 08:50:14 -05:00
995d8db1fe Set to reedline main branch for development cycle (#5249)
Changes to reedline since `v0.4.0`:

- vi normal mode `I` for inserting at line beginning
- `InsertNewline` edit command that can be bound to `Alt-Enter` if
desired to have line breaks without relying on the `Validator`
- `ClearScreen` will directly clear the visible screen. `Signal::CtrlL` has been
removed.
- `ClearScrollback` will clear the screen and scrollback. Can be used to
mimic macOS `Cmd-K` screen clearing. Helps with #5089
2022-04-20 21:10:33 +12:00
7e97be1dd4 Handle custom values in describe command (#5248) 2022-04-20 16:59:53 +12:00
b501db673a SQLite overhaul: custom value, query db command (#5247)
Clean up query errors
2022-04-20 16:58:21 +12:00
c0ce1e9057 nu-cli/completions: fix file completions with quotes (#5242)
* nu-cli/completions: fix file completions with quotes

* wrap with backticks
2022-04-20 16:54:37 +12:00
4d7b86f278 nu-cli: added tests for file completions (#5232)
* nu-cli: added tests for file completions

* test adding extra sort

* Feature/refactor completion options (#5228)

* Copy completion filter to custom completions

* Remove filter function from completer

This function was a no-op for FileCompletion and CommandCompletion.
Flag- and VariableCompletion just filters with `starts_with` which
happens in both completers anyway and should therefore also be a no-op.
The remaining use case in CustomCompletion was moved into the
CustomCompletion source file.

Filtering should probably happen immediately while fetching completions
to avoid unnecessary memory allocations.

* Add get_sort_by() to Completer trait

* Remove CompletionOptions from Completer::fetch()

* Fix clippy lints

* Apply Completer changes to DotNuCompletion

* add os to $nu based on rust's understanding (#5243)

* add os to $nu based on rust's understanding

* add a few more constants

Co-authored-by: Richard <Tropid@users.noreply.github.com>
Co-authored-by: Darren Schroeder <343840+fdncred@users.noreply.github.com>
2022-04-20 16:54:00 +12:00
f2d47f97da add os to $nu based on rust's understanding (#5243)
* add os to $nu based on rust's understanding

* add a few more constants
2022-04-19 14:11:58 -05:00
0de289f6b7 Feature/refactor completion options (#5228)
* Copy completion filter to custom completions

* Remove filter function from completer

This function was a no-op for FileCompletion and CommandCompletion.
Flag- and VariableCompletion just filters with `starts_with` which
happens in both completers anyway and should therefore also be a no-op.
The remaining use case in CustomCompletion was moved into the
CustomCompletion source file.

Filtering should probably happen immediately while fetching completions
to avoid unnecessary memory allocations.

* Add get_sort_by() to Completer trait

* Remove CompletionOptions from Completer::fetch()

* Fix clippy lints

* Apply Completer changes to DotNuCompletion
2022-04-19 13:59:10 -05:00
JT
ae674bfaec move config back to config.nu (#5237) 2022-04-19 20:54:25 +12:00
JT
76079d5183 Move config to be an env var (#5230)
* Move config to be an env var

* fix fmt and tests
2022-04-19 10:28:01 +12:00
409f1480f5 allow par-each to receive $in (#5229)
Co-authored-by: Yethal <nosuchemail@email.com>
2022-04-19 10:02:58 +12:00
e206555d9d add custom_completion field to .scope command (#5227) 2022-04-19 10:02:03 +12:00
88ec4186ec Added search terms to math commands (#5224) 2022-04-19 09:33:32 +12:00
dd1d9b7623 nu-cli/completions: completion for use and source (#5210)
* nu-cli/completions: completion for use and source

* handle subfolders for different base dirs

* fix clippy errors
2022-04-19 00:59:13 +12:00
1314a87cb0 update miette and switch to GenericErrors (#5222) 2022-04-19 00:34:10 +12:00
cf65f77b02 Simplify known external tests (#5219)
* Simplify known external tests

* Cargo fmt
2022-04-17 14:31:03 -05:00
c9f05f074a nth -> select command (#5217) 2022-04-17 09:54:24 -05:00
7710317224 Add known external tests (#5216)
* Add known external tests

* Add some documentation to the tests

* Document test_hello example

* Set PWD in run_test
2022-04-17 05:39:56 -05:00
0a990ed105 Simplify known external name recovery (#5213)
Prior to this change we would recover the names for known
externals by looking up the span in the engine state. This would fail
when using an alias for two reasons:

1. In cases where we don't have a subcommand, like this:

```
>>> extern bat [filename: string]
>>> alias b = bat
>>> bat some_file
'b' is not recognized as an internal or external command,
operable program or batch file.
```

The problem is that after alias expansion, we replace the span of the
expanded name with the original alias (this is done to alleviate
non-related issues). The span contents we look up therefore contain `b`,
the alias, instead of the expanded command name.

2. In cases where there's a subcommand:
```
>>> alias g = git
>>> g push
thread 'main' panicked at 'internal error: span missing in file contents cache', crates\nu-protocol\src\engine\engine_state.rs:474:9
note: run with `RUST_BACKTRACE=1` environment variable to display a
backtrace
```

In this case, the span in call starts where the expansion for the `g`
alias is defined and end after `push` on the last command entered. This
is not a proper span and causes a panic when we try to look it up. Note
that this is the case for all expanded aliases that involve a
subcommand, but we never actually try to retrieve the contents for that
span in other cases.

Anyway, the new way of looking up the name is arguably cleaner
regardless of the issues mentioned above. But it's nice that it fixes
them too.

Co-authored-by: Hristo Filaretov <h.filaretov@protonmail.com>
2022-04-16 22:07:38 -05:00
a35b975d84 Shell Integration (#5162)
This commit renders ANSI chars in order to provide shell integrations
such Kitty's opening feature that captures the output of the last
command in a pager such as less.

Fixes #5138
2022-04-16 22:03:02 -05:00
6e85b04923 [ls, path relative-to] Fix use of ls ~ | path relative-to ~ (#5212)
* [ls] implement 1b.

> `ls ~` does not return paths relative to the current directory.

We now return `/Users/blah` instead of `../../blah`

* expand lhs and rhs on `path relative-to`

/Users/nimazzuc/projects/nushell〉'~' | path relative-to '~'
/Users/nimazzuc/projects/nushell〉'~/foo' | path relative-to '~'
foo
/Users/nimazzuc/projects/nushell〉'/Users/nimazzuc/foo' | path relative-to '~'
foo
/Users/nimazzuc/projects/nushell〉'~/foo' | path relative-to '/Users/nimazzuc'
foo

* format
2022-04-16 15:05:42 -05:00
4d31139a44 add hex color parsing to ansi (#5209) 2022-04-16 10:44:04 -05:00
1bad40726d cleanup nu-command, remove redundant code (#5208) 2022-04-16 18:16:46 +12:00
cb3276fb3b nu-cli/completions: removed unnecessary bool (#5207) 2022-04-16 13:34:38 +12:00
c17129a92a Fix env capture (#5205)
* Fix env capture

* Add test for env capture
2022-04-16 10:38:27 +12:00
JT
5bf1c98a39 Move to dev version 0.61.1 (#5206) 2022-04-16 09:29:30 +12:00
13b371ab58 nu-cli/completions: add completion for record vars (#5204) 2022-04-16 08:24:41 +12:00
2a3991cfdb nu-cli/completions: add completion for $env. (#5199)
* nu-cli/completions: add completion for $env.

* use stack to avoid showing hidden env vars
2022-04-15 16:17:53 +03:00
583b7b1821 fix: reduce command have not redirected block's evaluation output (#5193)
fixes https://github.com/nushell/nushell/issues/5190
2022-04-15 07:03:16 -05:00
581afc9023 updated cargo.lock with cargo update (#5201) 2022-04-15 06:04:15 -05:00
8e2847431e Avoid duplicating post headers (#5200)
* Avoid duplicating post headers

This should fix #5194

* Update post.rs

Co-authored-by: Darren Schroeder <343840+fdncred@users.noreply.github.com>
2022-04-15 06:02:22 -05:00
6a1378c1bb Update README.md 2022-04-15 05:27:42 -05:00
2fe14a7a5a fix timestamp parsing on 32-bit platforms (#5192)
Fixes #5191
2022-04-14 08:52:32 -05:00
7490392eb9 Add char -i for chars from integers (#5183)
* Revert "Allow integer to `char -u` (#5174)"

This reverts commit cfefb65d55.

* Add `char -i`

* Reword example
2022-04-14 08:34:02 -05:00
9844e6125b Fix completions for git push and git checkout close: #5021 and #4599 (#5188) 2022-04-14 08:17:58 -05:00
56af7e8d5f tweak badge (#5187)
added `?branch=main?event=push` to see if it makes any difference.
2022-04-14 06:48:17 -05:00
dc612e7ffb documented ShellError errors. (#5172)
* documented ShellError errors.

* just a few touch-ups

Co-authored-by: JT <547158+jntrnr@users.noreply.github.com>
2022-04-14 17:08:46 +12:00
1d1dbfd04c update crate chrono-tz to its latest version (#5184) 2022-04-13 21:16:08 -07:00
c150e11cb4 Initial SQLite functionality (#5182)
* Add SQLite functionality to open

* Add in-memory SQLite tests

* clippy fixes

* Fix up old SQLite-related tests
2022-04-13 20:15:02 -07:00
87c684c7da don't join paths to cwd ever in calls to external functions (#5180)
This is a follow-up to #5131, since I don't personally like the way it worked.
2022-04-13 21:42:57 +03:00
10792a29f7 allow default color shortcut names (#5177)
* allow default color shortcut names

* clippy
2022-04-13 07:02:15 -05:00
257290acc2 Add a dockerfile example based on debian bullseye-slim (#5176)
* feat: add nu dockerfile, based on debian bullseye

* use aria2 instead of wget for bad network

* some small fix
2022-04-13 14:48:54 +03:00
cfefb65d55 Allow integer to char -u (#5174) 2022-04-13 13:33:08 +03:00
361 changed files with 9484 additions and 2934 deletions

View File

@ -1,32 +1,30 @@
on: [pull_request]
on:
pull_request:
push: # Run CI on the main branch after every merge. This is important to fill the GitHub Actions cache in a way that pull requests can see it
branches:
- main
name: Continuous integration
name: continuous-integration
jobs:
ci:
build-clippy:
strategy:
fail-fast: false
matrix:
platform: [windows-latest, macos-latest, ubuntu-latest]
style: [all, default, minimal]
style: [all, default]
rust:
- stable
include:
- style: all
flags: '--all-features'
flags: "--all-features"
- style: default
flags: ''
- style: minimal
flags: '--no-default-features'
flags: ""
exclude:
- platform: windows-latest
style: default
- platform: windows-latest
style: minimal
- platform: macos-latest
style: default
- platform: macos-latest
style: minimal
runs-on: ${{ matrix.platform }}
@ -41,30 +39,77 @@ jobs:
override: true
components: rustfmt, clippy
- uses: Swatinem/rust-cache@v1
with:
key: ${{ matrix.style }}v1 # increment this to bust the cache if needed
- name: Rustfmt
uses: actions-rs/cargo@v1
with:
command: fmt
args: --all -- --check
- name: Clippy
uses: actions-rs/cargo@v1
with:
command: clippy
args: --all ${{ matrix.flags }} -- -D warnings -D clippy::unwrap_used -A clippy::needless_collect
- name: Build Nushell
uses: actions-rs/cargo@v1
with:
command: build
args: ${{ matrix.flags }}
args: --workspace ${{ matrix.flags }}
- name: Clippy
uses: actions-rs/cargo@v1
with:
command: clippy
args: --workspace ${{ matrix.flags }} -- -D warnings -D clippy::unwrap_used -A clippy::needless_collect
test:
strategy:
fail-fast: false
matrix:
platform: [windows-latest, macos-latest, ubuntu-latest]
style: [all, default]
rust:
- stable
include:
- style: all
flags: "--all-features"
- style: default
flags: ""
exclude:
- platform: windows-latest
style: default
- platform: macos-latest
style: default
runs-on: ${{ matrix.platform }}
steps:
- uses: actions/checkout@v2
- name: Setup Rust toolchain
uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: ${{ matrix.rust }}
override: true
components: rustfmt, clippy
- uses: Swatinem/rust-cache@v1
with:
key: ${{ matrix.style }}v1 # increment this to bust the cache if needed
- uses: taiki-e/install-action@nextest
- name: Tests
uses: actions-rs/cargo@v1
with:
command: test
args: --all ${{ matrix.flags }}
command: nextest
args: run --all ${{ matrix.flags }}
- name: Doctests
uses: actions-rs/cargo@v1
with:
command: test
args: --workspace --doc ${{ matrix.flags }}
python-virtualenv:
strategy:
@ -87,13 +132,16 @@ jobs:
profile: minimal
toolchain: ${{ matrix.rust }}
override: true
components: rustfmt, clippy
- uses: Swatinem/rust-cache@v1
with:
key: "1" # increment this to bust the cache if needed
- name: Install Nushell
uses: actions-rs/cargo@v1
with:
command: install
args: --path=. --no-default-features
args: --path=. --no-default-features --debug
- name: Setup Python
uses: actions/setup-python@v2
@ -112,3 +160,50 @@ jobs:
- name: Test Nushell in virtualenv
run: cd virtualenv && tox -e ${{ matrix.py }} -- -k nushell
shell: bash
plugins:
strategy:
fail-fast: false
matrix:
platform: [windows-latest, macos-latest, ubuntu-latest]
rust:
- stable
runs-on: ${{ matrix.platform }}
steps:
- uses: actions/checkout@v2
- name: Setup Rust toolchain
uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: ${{ matrix.rust }}
override: true
# This job does not use rust-cache because 1) we have limited cache space, 2) even
# without caching, it's not the slowest job. Revisit if those facts change.
- name: Build nu_plugin_example
uses: actions-rs/cargo@v1
with:
command: build
args: --package nu_plugin_example
- name: Build nu_plugin_gstat
uses: actions-rs/cargo@v1
with:
command: build
args: --package nu_plugin_gstat
- name: Build nu_plugin_inc
uses: actions-rs/cargo@v1
with:
command: build
args: --package nu_plugin_inc
- name: Build nu_plugin_query
uses: actions-rs/cargo@v1
with:
command: build
args: --package nu_plugin_query

View File

@ -27,7 +27,7 @@ jobs:
uses: actions-rs/cargo@v1
with:
command: build
args: --release --all --features=extra
args: --release --all --features=extra,static-link-openssl
# - name: Strip binaries (nu)
# run: strip target/release/nu
@ -89,14 +89,10 @@ jobs:
- name: Copy files to output
run: |
cp target/release/nu target/release/nu_plugin_* output/
cp README.build.txt output/README.txt
cp README.release.txt output/README.txt
cp LICENSE output/LICENSE
rm output/*.d
# Note: If OpenSSL changes, this path will need to be updated
- name: Copy OpenSSL to output
run: cp /usr/lib/x86_64-linux-gnu/libssl.so.1.1 output/
- name: Upload artifact
uses: actions/upload-artifact@v2
with:
@ -121,7 +117,7 @@ jobs:
uses: actions-rs/cargo@v1
with:
command: build
args: --release --all --features=extra
args: --release --all --features=extra,static-link-openssl
# - name: Strip binaries (nu)
# run: strip target/release/nu
@ -183,7 +179,7 @@ jobs:
- name: Copy files to output
run: |
cp target/release/nu target/release/nu_plugin_* output/
cp README.build.txt output/README.txt
cp README.release.txt output/README.txt
cp LICENSE output/LICENSE
rm output/*.d
@ -217,7 +213,7 @@ jobs:
uses: actions-rs/cargo@v1
with:
command: build
args: --release --all --features=extra
args: --release --all --features=extra,static-link-openssl
# - name: Strip binaries (nu.exe)
# run: strip target/release/nu.exe
@ -288,7 +284,7 @@ jobs:
cp LICENSE output\
cp target\release\LICENSE-for-less.txt output\
cp target\release\nu_plugin_*.exe output\
cp README.build.txt output\README.txt
cp README.release.txt output\README.txt
cp target\release\less.exe output\
# Note: If the version of `less.exe` needs to be changed, update this URL
# Similarly, if `less.exe` is checked into the repo, copy from the local path here
@ -361,7 +357,6 @@ jobs:
- name: Restore Linux File Modes
run: |
chmod 755 ${{ steps.info.outputs.linuxdir }}/${{ steps.info.outputs.innerdir }}/nu*
chmod 755 ${{ steps.info.outputs.linuxdir }}/${{ steps.info.outputs.innerdir }}/libssl*
- name: Create Linux tarball
run: tar -zcvf ${{ steps.info.outputs.linuxdir }}.tar.gz ${{ steps.info.outputs.linuxdir }}

3
.gitignore vendored
View File

@ -21,3 +21,6 @@ debian/nu/
# VSCode's IDE items
.vscode/*
# Helix configuration folder
.helix

832
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -11,7 +11,7 @@ name = "nu"
readme = "README.md"
repository = "https://github.com/nushell/nushell"
rust-version = "1.59"
version = "0.61.0"
version = "0.62.0"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
@ -36,28 +36,29 @@ chrono = "0.4.19"
crossterm = "0.23.0"
ctrlc = "3.2.1"
log = "0.4"
miette = "4.1.0"
miette = "4.5.0"
nu-ansi-term = "0.45.1"
nu-cli = { path="./crates/nu-cli", version = "0.61.0" }
nu-color-config = { path = "./crates/nu-color-config", version = "0.61.0" }
nu-command = { path="./crates/nu-command", version = "0.61.0" }
nu-engine = { path="./crates/nu-engine", version = "0.61.0" }
nu-json = { path="./crates/nu-json", version = "0.61.0" }
nu-parser = { path="./crates/nu-parser", version = "0.61.0" }
nu-path = { path="./crates/nu-path", version = "0.61.0" }
nu-plugin = { path = "./crates/nu-plugin", optional = true, version = "0.61.0" }
nu-pretty-hex = { path = "./crates/nu-pretty-hex", version = "0.61.0" }
nu-protocol = { path = "./crates/nu-protocol", version = "0.61.0" }
nu-system = { path = "./crates/nu-system", version = "0.61.0" }
nu-table = { path = "./crates/nu-table", version = "0.61.0" }
nu-term-grid = { path = "./crates/nu-term-grid", version = "0.61.0" }
nu-cli = { path="./crates/nu-cli", version = "0.62.0" }
nu-color-config = { path = "./crates/nu-color-config", version = "0.62.0" }
nu-command = { path="./crates/nu-command", version = "0.62.0" }
nu-engine = { path="./crates/nu-engine", version = "0.62.0" }
nu-json = { path="./crates/nu-json", version = "0.62.0" }
nu-parser = { path="./crates/nu-parser", version = "0.62.0" }
nu-path = { path="./crates/nu-path", version = "0.62.0" }
nu-plugin = { path = "./crates/nu-plugin", optional = true, version = "0.62.0" }
nu-pretty-hex = { path = "./crates/nu-pretty-hex", version = "0.62.0" }
nu-protocol = { path = "./crates/nu-protocol", version = "0.62.0" }
nu-system = { path = "./crates/nu-system", version = "0.62.0" }
nu-table = { path = "./crates/nu-table", version = "0.62.0" }
nu-term-grid = { path = "./crates/nu-term-grid", version = "0.62.0" }
openssl = { version = "0.10.38", features = ["vendored"], optional = true }
pretty_env_logger = "0.4.0"
rayon = "1.5.1"
reedline = { version = "0.4.0", features = ["bashisms"]}
reedline = { version = "0.5.0", features = ["bashisms"]}
is_executable = "1.0.1"
[dev-dependencies]
nu-test-support = { path="./crates/nu-test-support", version = "0.61.0" }
nu-test-support = { path="./crates/nu-test-support", version = "0.62.0" }
tempfile = "3.2.0"
assert_cmd = "2.0.2"
pretty_assertions = "1.0.0"
@ -71,14 +72,15 @@ embed-resource = "1"
[features]
plugin = ["nu-plugin", "nu-cli/plugin", "nu-parser/plugin", "nu-command/plugin", "nu-protocol/plugin", "nu-engine/plugin"]
default = ["plugin", "which-support", "zip-support", "trash-support"]
default = ["plugin", "which-support", "trash-support"]
stable = ["default"]
extra = ["default", "dataframe"]
extra = ["default", "dataframe", "database"]
wasi = []
# Enable to statically link OpenSSL; otherwise the system version will be used. Not enabled by default because it takes a while to build
static-link-openssl = ["dep:openssl"]
# Stable (Default)
which-support = ["nu-command/which-support"]
zip-support = ["nu-command/zip"]
trash-support = ["nu-command/trash-support"]
# Extra
@ -86,6 +88,9 @@ trash-support = ["nu-command/trash-support"]
# Dataframe feature for nushell
dataframe = ["nu-command/dataframe"]
# Database commands for nushell
database = ["nu-command/database"]
[profile.release]
opt-level = "s" # Optimize for size
strip = "debuginfo"

View File

@ -1,24 +0,0 @@
[tasks.lalrpop]
install_crate = { crate_name = "lalrpop", binary = "lalrpop", test_arg = "--help" }
command = "lalrpop"
args = ["src/parser/parser.lalrpop"]
[tasks.baseline]
command = "cargo"
args = ["build", "--bins"]
[tasks.run]
command = "cargo"
args = ["run"]
dependencies = ["baseline"]
[tasks.test]
command = "cargo"
args = ["test"]
dependencies = ["baseline"]
[tasks.check]
command = "cargo"
args = ["check"]
dependencies = ["baseline"]

View File

@ -1 +0,0 @@
Nu will look for the plugins in your PATH on startup. While nu will have some functionality without them, for full functionality you'll need to copy them into your path so they can be loaded.

View File

@ -1,7 +1,7 @@
# README
[![Crates.io](https://img.shields.io/crates/v/nu.svg)](https://crates.io/crates/nu)
[![Build Status](https://github.com/nushell/nushell/actions/workflows/ci.yml/badge.svg)](https://github.com/nushell/nushell/actions)
![Build Status](https://img.shields.io/github/workflow/status/nushell/nushell/continuous-integration)
[![Discord](https://img.shields.io/discord/601130461678272522.svg?logo=discord)](https://discord.gg/NtAbbGn)
[![The Changelog #363](https://img.shields.io/badge/The%20Changelog-%23363-61c192.svg)](https://changelog.com/podcast/363)
[![@nu_shell](https://img.shields.io/badge/twitter-@nu_shell-1DA1F3?style=flat-square)](https://twitter.com/nu_shell)
@ -275,6 +275,7 @@ Please submit an issue or PR to be added to this list.
- [starship](https://github.com/starship/starship)
- [oh-my-posh](https://ohmyposh.dev)
- [Couchbase Shell](https://couchbase.sh)
- [virtualenv](https://github.com/pypa/virtualenv)
### Mentions
- [The Python Launcher for Unix](https://github.com/brettcannon/python-launcher#how-do-i-get-a-table-of-python-executables-in-nushell)
@ -285,7 +286,7 @@ See [Contributing](CONTRIBUTING.md) for details.
Thanks to all the people who already contributed!
<a href="https://github.com/nushell/nushell/graphs/contributors">
<img src="https://contributors-img.web.app/image?repo=nushell/nushell" />
<img src="https://contributors-img.web.app/image?repo=nushell/nushell&max=500" />
</a>
## License

3
README.release.txt Normal file
View File

@ -0,0 +1,3 @@
To use Nu plugins, use the register command to tell Nu where to find the plugin. For example:
> register -e json ./nu_plugin_query

View File

@ -4,21 +4,25 @@ description = "CLI-related functionality for Nushell"
edition = "2021"
license = "MIT"
name = "nu-cli"
version = "0.61.0"
version = "0.62.0"
[dev-dependencies]
nu-test-support = { path="../nu-test-support", version = "0.62.0" }
nu-command = { path = "../nu-command", version = "0.62.0" }
[dependencies]
nu-engine = { path = "../nu-engine", version = "0.61.0" }
nu-path = { path = "../nu-path", version = "0.61.0" }
nu-parser = { path = "../nu-parser", version = "0.61.0" }
nu-protocol = { path = "../nu-protocol", version = "0.61.0" }
nu-utils = { path = "../nu-utils", version = "0.61.0" }
nu-engine = { path = "../nu-engine", version = "0.62.0" }
nu-path = { path = "../nu-path", version = "0.62.0" }
nu-parser = { path = "../nu-parser", version = "0.62.0" }
nu-protocol = { path = "../nu-protocol", version = "0.62.0" }
nu-utils = { path = "../nu-utils", version = "0.62.0" }
nu-ansi-term = "0.45.1"
nu-color-config = { path = "../nu-color-config", version = "0.61.0" }
nu-color-config = { path = "../nu-color-config", version = "0.62.0" }
reedline = { version = "0.5.0", features = ["bashisms"]}
crossterm = "0.23.0"
miette = { version = "4.4.0", features = ["fancy"] }
miette = { version = "4.5.0", features = ["fancy"] }
thiserror = "1.0.29"
reedline = { version = "0.4.0", features = ["bashisms"]}
fuzzy-matcher = "0.3.7"
log = "0.4"
is_executable = "1.0.1"

View File

@ -2,11 +2,11 @@ use crate::util::report_error;
use log::info;
use miette::Result;
use nu_engine::{convert_env_values, eval_block};
use nu_parser::{parse, trim_quotes};
use nu_parser::parse;
use nu_protocol::engine::Stack;
use nu_protocol::{
engine::{EngineState, StateDelta, StateWorkingSet},
Config, PipelineData, Spanned,
PipelineData, Spanned,
};
use std::path::Path;
@ -22,16 +22,7 @@ pub fn evaluate_commands(
let (block, delta) = {
let mut working_set = StateWorkingSet::new(engine_state);
let (input, _) = if commands.item.starts_with('\'') || commands.item.starts_with('"') {
(
trim_quotes(commands.item.as_bytes()),
commands.span.start + 1,
)
} else {
(commands.item.as_bytes(), commands.span.start)
};
let (output, err) = parse(&mut working_set, None, input, false, &[]);
let (output, err) = parse(&mut working_set, None, commands.item.as_bytes(), false, &[]);
if let Some(err) = err {
report_error(&working_set, &err);
@ -46,15 +37,7 @@ pub fn evaluate_commands(
report_error(&working_set, &err);
}
let config = match stack.get_config() {
Ok(config) => config,
Err(e) => {
let working_set = StateWorkingSet::new(engine_state);
report_error(&working_set, &e);
Config::default()
}
};
let config = engine_state.get_config().clone();
// Merge the delta in case env vars changed in the config
match nu_engine::env::current_dir(engine_state, stack) {

View File

@ -12,50 +12,19 @@ pub trait Completer {
span: Span,
offset: usize,
pos: usize,
) -> (Vec<Suggestion>, CompletionOptions);
options: &CompletionOptions,
) -> Vec<Suggestion>;
// Filter results using the completion options
fn filter(
&self,
prefix: Vec<u8>,
items: Vec<Suggestion>,
options: CompletionOptions,
) -> Vec<Suggestion> {
items
.into_iter()
.filter(|it| {
// Minimise clones for new functionality
match (options.case_sensitive, options.positional) {
(true, true) => it.value.as_bytes().starts_with(&prefix),
(true, false) => it
.value
.contains(std::str::from_utf8(&prefix).unwrap_or("")),
(false, positional) => {
let value = it.value.to_lowercase();
let prefix = std::str::from_utf8(&prefix).unwrap_or("").to_lowercase();
if positional {
value.starts_with(&prefix)
} else {
value.contains(&prefix)
}
}
}
})
.collect()
fn get_sort_by(&self) -> SortBy {
SortBy::Ascending
}
// Sort results using the completion options
fn sort(
&self,
items: Vec<Suggestion>,
prefix: Vec<u8>,
options: CompletionOptions,
) -> Vec<Suggestion> {
fn sort(&self, items: Vec<Suggestion>, prefix: Vec<u8>) -> Vec<Suggestion> {
let prefix_str = String::from_utf8_lossy(&prefix).to_string();
let mut filtered_items = items;
// Sort items
match options.sort_by {
match self.get_sort_by() {
SortBy::LevenshteinDistance => {
filtered_items.sort_by(|a, b| {
let a_distance = levenshtein_distance(&prefix_str, &a.value);

View File

@ -1,7 +1,7 @@
use crate::completions::{
file_completions::file_path_completion, Completer, CompletionOptions, SortBy,
file_completions::file_path_completion, Completer, CompletionOptions, MatchAlgorithm, SortBy,
};
use nu_parser::{trim_quotes, FlatShape};
use nu_parser::{unescape_unquote_string, FlatShape};
use nu_protocol::{
engine::{EngineState, StateWorkingSet},
Span,
@ -32,7 +32,11 @@ impl CommandCompletion {
}
}
fn external_command_completion(&self, prefix: &str) -> Vec<String> {
fn external_command_completion(
&self,
prefix: &str,
match_algorithm: MatchAlgorithm,
) -> Vec<String> {
let mut executables = vec![];
let paths = self.engine_state.env_vars.get("PATH");
@ -53,7 +57,8 @@ impl CommandCompletion {
) && matches!(
item.path()
.file_name()
.map(|x| x.to_string_lossy().starts_with(prefix)),
.map(|x| match_algorithm
.matches_str(&x.to_string_lossy(), prefix)),
Some(true)
) && is_executable::is_executable(&item.path())
{
@ -76,11 +81,14 @@ impl CommandCompletion {
span: Span,
offset: usize,
find_externals: bool,
match_algorithm: MatchAlgorithm,
) -> Vec<Suggestion> {
let prefix = working_set.get_span_contents(span);
let partial = working_set.get_span_contents(span);
let filter_predicate = |command: &[u8]| match_algorithm.matches_u8(command, partial);
let results = working_set
.find_commands_by_prefix(prefix)
.find_commands_by_predicate(filter_predicate)
.into_iter()
.map(move |x| Suggestion {
value: String::from_utf8_lossy(&x.0).to_string(),
@ -90,11 +98,11 @@ impl CommandCompletion {
start: span.start - offset,
end: span.end - offset,
},
append_whitespace: true,
});
let results_aliases =
working_set
.find_aliases_by_prefix(prefix)
let results_aliases = working_set
.find_aliases_by_predicate(filter_predicate)
.into_iter()
.map(move |x| Suggestion {
value: String::from_utf8_lossy(&x).to_string(),
@ -104,15 +112,16 @@ impl CommandCompletion {
start: span.start - offset,
end: span.end - offset,
},
append_whitespace: true,
});
let mut results = results.chain(results_aliases).collect::<Vec<_>>();
let prefix = working_set.get_span_contents(span);
let prefix = String::from_utf8_lossy(prefix).to_string();
let partial = working_set.get_span_contents(span);
let partial = String::from_utf8_lossy(partial).to_string();
let results = if find_externals {
let results_external =
self.external_command_completion(&prefix)
let results_external = self
.external_command_completion(&partial, match_algorithm)
.into_iter()
.map(move |x| Suggestion {
value: x,
@ -122,6 +131,7 @@ impl CommandCompletion {
start: span.start - offset,
end: span.end - offset,
},
append_whitespace: true,
});
for external in results_external {
@ -131,6 +141,7 @@ impl CommandCompletion {
description: None,
extra: None,
span: external.span,
append_whitespace: true,
})
} else {
results.push(external)
@ -154,7 +165,8 @@ impl Completer for CommandCompletion {
span: Span,
offset: usize,
pos: usize,
) -> (Vec<Suggestion>, CompletionOptions) {
options: &CompletionOptions,
) -> Vec<Suggestion> {
let last = self
.flattened
.iter()
@ -172,9 +184,6 @@ impl Completer for CommandCompletion {
})
.last();
// Options
let options = CompletionOptions::new(true, true, SortBy::LevenshteinDistance);
// The last item here would be the earliest shape that could possible by part of this subcommand
let subcommands = if let Some(last) = last {
self.complete_commands(
@ -185,13 +194,14 @@ impl Completer for CommandCompletion {
},
offset,
false,
options.match_algorithm,
)
} else {
vec![]
};
if !subcommands.is_empty() {
return (subcommands, options);
return subcommands;
}
let commands = if matches!(self.flat_shape, nu_parser::FlatShape::External)
@ -199,7 +209,7 @@ impl Completer for CommandCompletion {
|| ((span.end - span.start) == 0)
{
// we're in a gap or at a command
self.complete_commands(working_set, span, offset, true)
self.complete_commands(working_set, span, offset, true, options.match_algorithm)
} else {
vec![]
};
@ -225,14 +235,16 @@ impl Completer for CommandCompletion {
};
// let prefix = working_set.get_span_contents(flat.0);
let prefix = String::from_utf8_lossy(&prefix).to_string();
let output = file_path_completion(span, &prefix, &cwd)
file_path_completion(span, &prefix, &cwd, options.match_algorithm)
.into_iter()
.map(move |x| {
if self.flat_idx == 0 {
// We're in the command position
if x.1.starts_with('"') && !matches!(preceding_byte.get(0), Some(b'^')) {
let trimmed = trim_quotes(x.1.as_bytes());
let trimmed = String::from_utf8_lossy(trimmed).to_string();
if (x.1.starts_with('"') || x.1.starts_with('\'') || x.1.starts_with('`'))
&& !matches!(preceding_byte.get(0), Some(b'^'))
{
let (trimmed, _) = unescape_unquote_string(x.1.as_bytes(), span);
let expanded = nu_path::canonicalize_with(trimmed, &cwd);
if let Ok(expanded) = expanded {
@ -259,16 +271,14 @@ impl Completer for CommandCompletion {
start: x.0.start - offset,
end: x.0.end - offset,
},
append_whitespace: false,
})
.chain(subcommands.into_iter())
.chain(commands.into_iter())
.collect::<Vec<_>>();
(output, options)
.collect::<Vec<_>>()
}
// Replace base filter with no filter once all the results are already based in the current path
fn filter(&self, _: Vec<u8>, items: Vec<Suggestion>, _: CompletionOptions) -> Vec<Suggestion> {
items
fn get_sort_by(&self) -> SortBy {
SortBy::LevenshteinDistance
}
}

View File

@ -1,28 +1,27 @@
use crate::completions::{
CommandCompletion, Completer, CustomCompletion, FileCompletion, FlagCompletion,
VariableCompletion,
CommandCompletion, Completer, CompletionOptions, CustomCompletion, DirectoryCompletion,
DotNuCompletion, FileCompletion, FlagCompletion, MatchAlgorithm, VariableCompletion,
};
use nu_parser::{flatten_expression, parse, FlatShape};
use nu_protocol::{
engine::{EngineState, Stack, StateWorkingSet},
Span, Value,
Span,
};
use reedline::{Completer as ReedlineCompleter, Suggestion};
use std::str;
use std::sync::Arc;
#[derive(Clone)]
pub struct NuCompleter {
engine_state: Arc<EngineState>,
stack: Stack,
config: Option<Value>,
}
impl NuCompleter {
pub fn new(engine_state: Arc<EngineState>, stack: Stack, config: Option<Value>) -> Self {
pub fn new(engine_state: Arc<EngineState>, stack: Stack) -> Self {
Self {
engine_state,
stack,
config,
}
}
@ -36,15 +35,20 @@ impl NuCompleter {
offset: usize,
pos: usize,
) -> Vec<Suggestion> {
// Fetch
let (mut suggestions, options) =
completer.fetch(working_set, prefix.clone(), new_span, offset, pos);
let config = self.engine_state.get_config();
// Filter
suggestions = completer.filter(prefix.clone(), suggestions, options.clone());
let mut options = CompletionOptions::default();
if config.completion_algorithm == "fuzzy" {
options.match_algorithm = MatchAlgorithm::Fuzzy;
}
// Fetch
let mut suggestions =
completer.fetch(working_set, prefix.clone(), new_span, offset, pos, &options);
// Sort
suggestions = completer.sort(suggestions, prefix, options);
suggestions = completer.sort(suggestions, prefix);
suggestions
}
@ -69,6 +73,10 @@ impl NuCompleter {
for (flat_idx, flat) in flattened.iter().enumerate() {
if pos >= flat.0.start && pos < flat.0.end {
// Context variables
let most_left_var =
most_left_variable(flat_idx, &working_set, flattened.clone());
// Create a new span
let new_span = Span {
start: flat.0.start,
@ -79,9 +87,37 @@ impl NuCompleter {
let mut prefix = working_set.get_span_contents(flat.0).to_vec();
prefix.remove(pos - flat.0.start);
// Completions that depends on the previous expression (e.g: use, source)
if flat_idx > 0 {
if let Some(previous_expr) = flattened.get(flat_idx - 1) {
// Read the content for the previous expression
let prev_expr_str =
working_set.get_span_contents(previous_expr.0).to_vec();
// Completion for .nu files
if prev_expr_str == b"use" || prev_expr_str == b"source" {
let mut completer =
DotNuCompletion::new(self.engine_state.clone());
return self.process_completion(
&mut completer,
&working_set,
prefix,
new_span,
offset,
pos,
);
}
}
}
// Variables completion
if prefix.starts_with(b"$") {
let mut completer = VariableCompletion::new(self.engine_state.clone());
if prefix.starts_with(b"$") || most_left_var.is_some() {
let mut completer = VariableCompletion::new(
self.engine_state.clone(),
self.stack.clone(),
most_left_var.unwrap_or((vec![], vec![])),
);
return self.process_completion(
&mut completer,
@ -113,7 +149,6 @@ impl NuCompleter {
let mut completer = CustomCompletion::new(
self.engine_state.clone(),
self.stack.clone(),
self.config.clone(),
*decl_id,
line,
);
@ -127,7 +162,22 @@ impl NuCompleter {
pos,
);
}
FlatShape::Filepath | FlatShape::GlobPattern => {
FlatShape::Directory => {
let mut completer =
DirectoryCompletion::new(self.engine_state.clone());
return self.process_completion(
&mut completer,
&working_set,
prefix,
new_span,
offset,
pos,
);
}
FlatShape::Filepath
| FlatShape::GlobPattern
| FlatShape::ExternalArg => {
let mut completer = FileCompletion::new(self.engine_state.clone());
return self.process_completion(
@ -172,3 +222,53 @@ impl ReedlineCompleter for NuCompleter {
self.completion_helper(line, pos)
}
}
// reads the most left variable returning it's name (e.g: $myvar)
// and the depth (a.b.c)
fn most_left_variable(
idx: usize,
working_set: &StateWorkingSet<'_>,
flattened: Vec<(Span, FlatShape)>,
) -> Option<(Vec<u8>, Vec<Vec<u8>>)> {
// Reverse items to read the list backwards and truncate
// because the only items that matters are the ones before the current index
let mut rev = flattened;
rev.truncate(idx);
rev = rev.into_iter().rev().collect();
// Store the variables and sub levels found and reverse to correct order
let mut variables_found: Vec<Vec<u8>> = vec![];
let mut found_var = false;
for item in rev.clone() {
let result = working_set.get_span_contents(item.0).to_vec();
match item.1 {
FlatShape::Variable => {
variables_found.push(result);
found_var = true;
break;
}
FlatShape::String => {
variables_found.push(result);
}
_ => {
break;
}
}
}
// If most left var was not found
if !found_var {
return None;
}
// Reverse the order back
variables_found = variables_found.into_iter().rev().collect();
// Extract the variable and the sublevels
let var = variables_found.first().unwrap_or(&vec![]).to_vec();
let sublevels: Vec<Vec<u8>> = variables_found.into_iter().skip(1).collect();
Some((var, sublevels))
}

View File

@ -1,25 +1,93 @@
#[derive(Clone)]
use std::fmt::Display;
use fuzzy_matcher::{skim::SkimMatcherV2, FuzzyMatcher};
use nu_parser::trim_quotes_str;
#[derive(Copy, Clone)]
pub enum SortBy {
LevenshteinDistance,
Ascending,
None,
}
/// Describes how suggestions should be matched.
#[derive(Copy, Clone, Debug)]
pub enum MatchAlgorithm {
/// Only show suggestions which begin with the given input
///
/// Example:
/// "git switch" is matched by "git sw"
Prefix,
/// Only show suggestions which contain the input chars at any place
///
/// Example:
/// "git checkout" is matched by "gco"
Fuzzy,
}
impl MatchAlgorithm {
/// Returns whether the `needle` search text matches the given `haystack`.
pub fn matches_str(&self, haystack: &str, needle: &str) -> bool {
let haystack = trim_quotes_str(haystack);
let needle = trim_quotes_str(needle);
match *self {
MatchAlgorithm::Prefix => haystack.starts_with(needle),
MatchAlgorithm::Fuzzy => {
let matcher = SkimMatcherV2::default();
matcher.fuzzy_match(haystack, needle).is_some()
}
}
}
/// Returns whether the `needle` search text matches the given `haystack`.
pub fn matches_u8(&self, haystack: &[u8], needle: &[u8]) -> bool {
match *self {
MatchAlgorithm::Prefix => haystack.starts_with(needle),
MatchAlgorithm::Fuzzy => {
let haystack_str = String::from_utf8_lossy(haystack);
let needle_str = String::from_utf8_lossy(needle);
let matcher = SkimMatcherV2::default();
matcher.fuzzy_match(&haystack_str, &needle_str).is_some()
}
}
}
}
impl TryFrom<String> for MatchAlgorithm {
type Error = InvalidMatchAlgorithm;
fn try_from(value: String) -> Result<Self, Self::Error> {
match value.as_str() {
"prefix" => Ok(Self::Prefix),
"fuzzy" => Ok(Self::Fuzzy),
_ => Err(InvalidMatchAlgorithm::Unknown),
}
}
}
#[derive(Debug)]
pub enum InvalidMatchAlgorithm {
Unknown,
}
impl Display for InvalidMatchAlgorithm {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match *self {
InvalidMatchAlgorithm::Unknown => write!(f, "unknown match algorithm"),
}
}
}
impl std::error::Error for InvalidMatchAlgorithm {}
#[derive(Clone)]
pub struct CompletionOptions {
pub case_sensitive: bool,
pub positional: bool,
pub sort_by: SortBy,
}
impl CompletionOptions {
pub fn new(case_sensitive: bool, positional: bool, sort_by: SortBy) -> Self {
Self {
case_sensitive,
positional,
sort_by,
}
}
pub match_algorithm: MatchAlgorithm,
}
impl Default for CompletionOptions {
@ -28,6 +96,42 @@ impl Default for CompletionOptions {
case_sensitive: true,
positional: true,
sort_by: SortBy::Ascending,
match_algorithm: MatchAlgorithm::Prefix,
}
}
}
#[cfg(test)]
mod test {
use super::MatchAlgorithm;
#[test]
fn match_algorithm_prefix() {
let algorithm = MatchAlgorithm::Prefix;
assert!(algorithm.matches_str("example text", ""));
assert!(algorithm.matches_str("example text", "examp"));
assert!(!algorithm.matches_str("example text", "text"));
assert!(algorithm.matches_u8(&[1, 2, 3], &[]));
assert!(algorithm.matches_u8(&[1, 2, 3], &[1, 2]));
assert!(!algorithm.matches_u8(&[1, 2, 3], &[2, 3]));
}
#[test]
fn match_algorithm_fuzzy() {
let algorithm = MatchAlgorithm::Fuzzy;
assert!(algorithm.matches_str("example text", ""));
assert!(algorithm.matches_str("example text", "examp"));
assert!(algorithm.matches_str("example text", "ext"));
assert!(algorithm.matches_str("example text", "mplxt"));
assert!(!algorithm.matches_str("example text", "mpp"));
assert!(algorithm.matches_u8(&[1, 2, 3], &[]));
assert!(algorithm.matches_u8(&[1, 2, 3], &[1, 2]));
assert!(algorithm.matches_u8(&[1, 2, 3], &[2, 3]));
assert!(algorithm.matches_u8(&[1, 2, 3], &[1, 3]));
assert!(!algorithm.matches_u8(&[1, 2, 3], &[2, 2]));
}
}

View File

@ -1,9 +1,9 @@
use crate::completions::{Completer, CompletionOptions, SortBy};
use crate::completions::{Completer, CompletionOptions, MatchAlgorithm, SortBy};
use nu_engine::eval_call;
use nu_protocol::{
ast::{Argument, Call, Expr, Expression},
engine::{EngineState, Stack, StateWorkingSet},
PipelineData, Span, Type, Value, CONFIG_VARIABLE_ID,
PipelineData, Span, Type, Value,
};
use reedline::Suggestion;
use std::sync::Arc;
@ -11,25 +11,19 @@ use std::sync::Arc;
pub struct CustomCompletion {
engine_state: Arc<EngineState>,
stack: Stack,
config: Option<Value>,
decl_id: usize,
line: String,
sort_by: SortBy,
}
impl CustomCompletion {
pub fn new(
engine_state: Arc<EngineState>,
stack: Stack,
config: Option<Value>,
decl_id: usize,
line: String,
) -> Self {
pub fn new(engine_state: Arc<EngineState>, stack: Stack, decl_id: usize, line: String) -> Self {
Self {
engine_state,
stack,
config,
decl_id,
line,
sort_by: SortBy::None,
}
}
@ -40,10 +34,9 @@ impl CustomCompletion {
offset: usize,
) -> Vec<Suggestion> {
list.filter_map(move |x| {
let s = x.as_string();
match s {
Ok(s) => Some(Suggestion {
// Match for string values
if let Ok(s) = x.as_string() {
return Some(Suggestion {
value: s,
description: None,
extra: None,
@ -51,9 +44,48 @@ impl CustomCompletion {
start: span.start - offset,
end: span.end - offset,
},
}),
Err(_) => None,
append_whitespace: false,
});
}
// Match for record values
if let Ok((cols, vals)) = x.as_record() {
let mut suggestion = Suggestion {
value: String::from(""), // Initialize with empty string
description: None,
extra: None,
span: reedline::Span {
start: span.start - offset,
end: span.end - offset,
},
append_whitespace: false,
};
// Iterate the cols looking for `value` and `description`
cols.iter().zip(vals).for_each(|it| {
// Match `value` column
if it.0 == "value" {
// Convert the value to string
if let Ok(val_str) = it.1.as_string() {
// Update the suggestion value
suggestion.value = val_str;
}
}
// Match `description` column
if it.0 == "description" {
// Convert the value to string
if let Ok(desc_str) = it.1.as_string() {
// Update the suggestion value
suggestion.description = Some(desc_str);
}
}
});
return Some(suggestion);
}
None
})
.collect()
}
@ -63,28 +95,15 @@ impl Completer for CustomCompletion {
fn fetch(
&mut self,
_: &StateWorkingSet,
_: Vec<u8>,
prefix: Vec<u8>,
span: Span,
offset: usize,
pos: usize,
) -> (Vec<Suggestion>, CompletionOptions) {
completion_options: &CompletionOptions,
) -> Vec<Suggestion> {
// Line position
let line_pos = pos - offset;
// Set up our initial config to start from
if let Some(conf) = &self.config {
self.stack.vars.insert(CONFIG_VARIABLE_ID, conf.clone());
} else {
self.stack.vars.insert(
CONFIG_VARIABLE_ID,
Value::Record {
cols: vec![],
vals: vec![],
span: Span { start: 0, end: 0 },
},
);
}
// Call custom declaration
let result = eval_call(
&self.engine_state,
@ -112,8 +131,10 @@ impl Completer for CustomCompletion {
PipelineData::new(span),
);
let mut custom_completion_options = None;
// Parse result
let (suggestions, options) = match result {
let suggestions = match result {
Ok(pd) => {
let value = pd.into_value(span);
match &value {
@ -128,14 +149,18 @@ impl Completer for CustomCompletion {
.unwrap_or_default();
let options = value.get_data_by_key("options");
let options = if let Some(Value::Record { .. }) = &options {
if let Some(Value::Record { .. }) = &options {
let options = options.unwrap_or_default();
let should_sort = options
.get_data_by_key("sort")
.and_then(|val| val.as_bool().ok())
.unwrap_or(false);
CompletionOptions {
if should_sort {
self.sort_by = SortBy::Ascending;
}
custom_completion_options = Some(CompletionOptions {
case_sensitive: options
.get_data_by_key("case_sensitive")
.and_then(|val| val.as_bool().ok())
@ -149,23 +174,60 @@ impl Completer for CustomCompletion {
} else {
SortBy::None
},
match_algorithm: match options
.get_data_by_key("completion_algorithm")
{
Some(option) => option
.as_string()
.ok()
.and_then(|option| option.try_into().ok())
.unwrap_or(MatchAlgorithm::Prefix),
None => completion_options.match_algorithm,
},
});
}
completions
}
Value::List { vals, .. } => self.map_completions(vals.iter(), span, offset),
_ => vec![],
}
}
_ => vec![],
};
if let Some(custom_completion_options) = custom_completion_options {
filter(&prefix, suggestions, &custom_completion_options)
} else {
CompletionOptions::default()
};
filter(&prefix, suggestions, completion_options)
}
}
(completions, options)
}
Value::List { vals, .. } => {
let completions = self.map_completions(vals.iter(), span, offset);
(completions, CompletionOptions::default())
}
_ => (vec![], CompletionOptions::default()),
}
}
_ => (vec![], CompletionOptions::default()),
};
(suggestions, options)
fn get_sort_by(&self) -> SortBy {
self.sort_by
}
}
fn filter(prefix: &[u8], items: Vec<Suggestion>, options: &CompletionOptions) -> Vec<Suggestion> {
items
.into_iter()
.filter(|it| match options.match_algorithm {
MatchAlgorithm::Prefix => match (options.case_sensitive, options.positional) {
(true, true) => it.value.as_bytes().starts_with(prefix),
(true, false) => it.value.contains(std::str::from_utf8(prefix).unwrap_or("")),
(false, positional) => {
let value = it.value.to_lowercase();
let prefix = std::str::from_utf8(prefix).unwrap_or("").to_lowercase();
if positional {
value.starts_with(&prefix)
} else {
value.contains(&prefix)
}
}
},
MatchAlgorithm::Fuzzy => options
.match_algorithm
.matches_u8(it.value.as_bytes(), prefix),
})
.collect()
}

View File

@ -0,0 +1,159 @@
use crate::completions::{matches, Completer, CompletionOptions};
use nu_protocol::{
engine::{EngineState, StateWorkingSet},
levenshtein_distance, Span,
};
use reedline::Suggestion;
use std::path::Path;
use std::sync::Arc;
use super::{partial_from, prepend_base_dir, MatchAlgorithm};
const SEP: char = std::path::MAIN_SEPARATOR;
#[derive(Clone)]
pub struct DirectoryCompletion {
engine_state: Arc<EngineState>,
}
impl DirectoryCompletion {
pub fn new(engine_state: Arc<EngineState>) -> Self {
Self { engine_state }
}
}
impl Completer for DirectoryCompletion {
fn fetch(
&mut self,
_: &StateWorkingSet,
prefix: Vec<u8>,
span: Span,
offset: usize,
_: usize,
options: &CompletionOptions,
) -> Vec<Suggestion> {
let cwd = if let Some(d) = self.engine_state.env_vars.get("PWD") {
match d.as_string() {
Ok(s) => s,
Err(_) => "".to_string(),
}
} else {
"".to_string()
};
let partial = String::from_utf8_lossy(&prefix).to_string();
// Filter only the folders
let output: Vec<_> = directory_completion(span, &partial, &cwd, options.match_algorithm)
.into_iter()
.map(move |x| Suggestion {
value: x.1,
description: None,
extra: None,
span: reedline::Span {
start: x.0.start - offset,
end: x.0.end - offset,
},
append_whitespace: false,
})
.collect();
output
}
// Sort results prioritizing the non hidden folders
fn sort(&self, items: Vec<Suggestion>, prefix: Vec<u8>) -> Vec<Suggestion> {
let prefix_str = String::from_utf8_lossy(&prefix).to_string();
// Sort items
let mut sorted_items = items;
sorted_items.sort_by(|a, b| a.value.cmp(&b.value));
sorted_items.sort_by(|a, b| {
let a_distance = levenshtein_distance(&prefix_str, &a.value);
let b_distance = levenshtein_distance(&prefix_str, &b.value);
a_distance.cmp(&b_distance)
});
// Separate the results between hidden and non hidden
let mut hidden: Vec<Suggestion> = vec![];
let mut non_hidden: Vec<Suggestion> = vec![];
for item in sorted_items.into_iter() {
let item_path = Path::new(&item.value);
if let Some(value) = item_path.file_name() {
if let Some(value) = value.to_str() {
if value.starts_with('.') {
hidden.push(item);
} else {
non_hidden.push(item);
}
}
}
}
// Append the hidden folders to the non hidden vec to avoid creating a new vec
non_hidden.append(&mut hidden);
non_hidden
}
}
pub fn directory_completion(
span: nu_protocol::Span,
partial: &str,
cwd: &str,
match_algorithm: MatchAlgorithm,
) -> Vec<(nu_protocol::Span, String)> {
let original_input = partial;
let (base_dir_name, partial) = partial_from(partial);
let base_dir = nu_path::expand_path_with(&base_dir_name, cwd);
// This check is here as base_dir.read_dir() with base_dir == "" will open the current dir
// which we don't want in this case (if we did, base_dir would already be ".")
if base_dir == Path::new("") {
return Vec::new();
}
if let Ok(result) = base_dir.read_dir() {
return result
.filter_map(|entry| {
entry.ok().and_then(|entry| {
if let Ok(metadata) = entry.metadata() {
if metadata.is_dir() {
let mut file_name = entry.file_name().to_string_lossy().into_owned();
if matches(&partial, &file_name, match_algorithm) {
let mut path = if prepend_base_dir(original_input, &base_dir_name) {
format!("{}{}", base_dir_name, file_name)
} else {
file_name.to_string()
};
if entry.path().is_dir() {
path.push(SEP);
file_name.push(SEP);
}
// Fix files or folders with quotes
if path.contains('\'') || path.contains('"') || path.contains(' ') {
path = format!("`{}`", path);
}
Some((span, path))
} else {
None
}
} else {
None
}
} else {
None
}
})
})
.collect();
}
Vec::new()
}

View File

@ -0,0 +1,124 @@
use crate::completions::{
file_path_completion, partial_from, Completer, CompletionOptions, SortBy,
};
use nu_protocol::{
engine::{EngineState, StateWorkingSet},
Span,
};
use reedline::Suggestion;
use std::sync::Arc;
const SEP: char = std::path::MAIN_SEPARATOR;
#[derive(Clone)]
pub struct DotNuCompletion {
engine_state: Arc<EngineState>,
}
impl DotNuCompletion {
pub fn new(engine_state: Arc<EngineState>) -> Self {
Self { engine_state }
}
}
impl Completer for DotNuCompletion {
fn fetch(
&mut self,
_: &StateWorkingSet,
prefix: Vec<u8>,
span: Span,
offset: usize,
_: usize,
options: &CompletionOptions,
) -> Vec<Suggestion> {
let prefix_str = String::from_utf8_lossy(&prefix).to_string();
let mut search_dirs: Vec<String> = vec![];
let (base_dir, mut partial) = partial_from(&prefix_str);
let mut is_current_folder = false;
// Fetch the lib dirs
let lib_dirs: Vec<String> =
if let Some(lib_dirs) = self.engine_state.env_vars.get("NU_LIB_DIRS") {
lib_dirs
.as_list()
.into_iter()
.flat_map(|it| {
it.iter().map(|x| {
x.as_path()
.expect("internal error: failed to convert lib path")
})
})
.map(|it| {
it.into_os_string()
.into_string()
.expect("internal error: failed to convert OS path")
})
.collect()
} else {
vec![]
};
// Check if the base_dir is a folder
if base_dir != "./" {
// Add the base dir into the directories to be searched
search_dirs.push(base_dir.clone());
// Reset the partial adding the basic dir back
// in order to make the span replace work properly
let mut base_dir_partial = base_dir;
base_dir_partial.push_str(&partial);
partial = base_dir_partial;
} else {
// Fetch the current folder
let current_folder = if let Some(d) = self.engine_state.env_vars.get("PWD") {
match d.as_string() {
Ok(s) => s,
Err(_) => "".to_string(),
}
} else {
"".to_string()
};
is_current_folder = true;
// Add the current folder and the lib dirs into the
// directories to be searched
search_dirs.push(current_folder);
search_dirs.extend(lib_dirs);
}
// Fetch the files filtering the ones that ends with .nu
// and transform them into suggestions
let output: Vec<Suggestion> = search_dirs
.into_iter()
.flat_map(|it| {
file_path_completion(span, &partial, &it, options.match_algorithm)
.into_iter()
.filter(|it| {
// Different base dir, so we list the .nu files or folders
if !is_current_folder {
it.1.ends_with(".nu") || it.1.ends_with(SEP)
} else {
// Lib dirs, so we filter only the .nu files
it.1.ends_with(".nu")
}
})
.map(move |x| Suggestion {
value: x.1,
description: None,
extra: None,
span: reedline::Span {
start: x.0.start - offset,
end: x.0.end - offset,
},
append_whitespace: true,
})
})
.collect();
output
}
fn get_sort_by(&self) -> SortBy {
SortBy::LevenshteinDistance
}
}

View File

@ -1,4 +1,4 @@
use crate::completions::{Completer, CompletionOptions};
use crate::completions::{Completer, CompletionOptions, MatchAlgorithm};
use nu_protocol::{
engine::{EngineState, StateWorkingSet},
levenshtein_distance, Span,
@ -28,7 +28,8 @@ impl Completer for FileCompletion {
span: Span,
offset: usize,
_: usize,
) -> (Vec<Suggestion>, CompletionOptions) {
options: &CompletionOptions,
) -> Vec<Suggestion> {
let cwd = if let Some(d) = self.engine_state.env_vars.get("PWD") {
match d.as_string() {
Ok(s) => s,
@ -38,7 +39,7 @@ impl Completer for FileCompletion {
"".to_string()
};
let prefix = String::from_utf8_lossy(&prefix).to_string();
let output: Vec<_> = file_path_completion(span, &prefix, &cwd)
let output: Vec<_> = file_path_completion(span, &prefix, &cwd, options.match_algorithm)
.into_iter()
.map(move |x| Suggestion {
value: x.1,
@ -48,26 +49,20 @@ impl Completer for FileCompletion {
start: x.0.start - offset,
end: x.0.end - offset,
},
append_whitespace: false,
})
.collect();
// Options
let options = CompletionOptions::default();
(output, options)
output
}
// Sort results prioritizing the non hidden folders
fn sort(
&self,
items: Vec<Suggestion>,
prefix: Vec<u8>,
_: CompletionOptions, // Ignore the given options, once it's a custom sorting
) -> Vec<Suggestion> {
fn sort(&self, items: Vec<Suggestion>, prefix: Vec<u8>) -> Vec<Suggestion> {
let prefix_str = String::from_utf8_lossy(&prefix).to_string();
// Sort items
let mut sorted_items = items;
sorted_items.sort_by(|a, b| a.value.cmp(&b.value));
sorted_items.sort_by(|a, b| {
let a_distance = levenshtein_distance(&prefix_str, &a.value);
let b_distance = levenshtein_distance(&prefix_str, &b.value);
@ -97,21 +92,11 @@ impl Completer for FileCompletion {
non_hidden
}
// Replace base filter with no filter once all the results are already based in the current path
fn filter(&self, _: Vec<u8>, items: Vec<Suggestion>, _: CompletionOptions) -> Vec<Suggestion> {
items
}
}
pub fn file_path_completion(
span: nu_protocol::Span,
partial: &str,
cwd: &str,
) -> Vec<(nu_protocol::Span, String)> {
let partial = partial.replace('\'', "");
pub fn partial_from(input: &str) -> (String, String) {
let partial = input.replace('`', "");
let (base_dir_name, partial) = {
// If partial is only a word we want to search in the current dir
let (base, rest) = partial.rsplit_once(is_separator).unwrap_or((".", &partial));
// On windows, this standardizes paths to use \
@ -119,8 +104,18 @@ pub fn file_path_completion(
// rsplit_once removes the separator
base.push(SEP);
(base, rest)
};
(base.to_string(), rest.to_string())
}
pub fn file_path_completion(
span: nu_protocol::Span,
partial: &str,
cwd: &str,
match_algorithm: MatchAlgorithm,
) -> Vec<(nu_protocol::Span, String)> {
let original_input = partial;
let (base_dir_name, partial) = partial_from(partial);
let base_dir = nu_path::expand_path_with(&base_dir_name, cwd);
// This check is here as base_dir.read_dir() with base_dir == "" will open the current dir
@ -134,15 +129,21 @@ pub fn file_path_completion(
.filter_map(|entry| {
entry.ok().and_then(|entry| {
let mut file_name = entry.file_name().to_string_lossy().into_owned();
if matches(partial, &file_name) {
let mut path = format!("{}{}", base_dir_name, file_name);
if matches(&partial, &file_name, match_algorithm) {
let mut path = if prepend_base_dir(original_input, &base_dir_name) {
format!("{}{}", base_dir_name, file_name)
} else {
file_name.to_string()
};
if entry.path().is_dir() {
path.push(SEP);
file_name.push(SEP);
}
if path.contains(' ') {
path = format!("\'{}\'", path);
// Fix files or folders with quotes
if path.contains('\'') || path.contains('"') || path.contains(' ') {
path = format!("`{}`", path);
}
Some((span, path))
@ -157,7 +158,26 @@ pub fn file_path_completion(
Vec::new()
}
pub fn matches(partial: &str, from: &str) -> bool {
from.to_ascii_lowercase()
.starts_with(&partial.to_ascii_lowercase())
pub fn matches(partial: &str, from: &str, match_algorithm: MatchAlgorithm) -> bool {
match_algorithm.matches_str(&from.to_ascii_lowercase(), &partial.to_ascii_lowercase())
}
/// Returns whether the base_dir should be prepended to the file path
pub fn prepend_base_dir(input: &str, base_dir: &str) -> bool {
if base_dir == format!(".{}", SEP) {
// if the current base_dir path is the local folder we only add a "./" prefix if the user
// input already includes a local folder prefix.
let manually_entered = {
let mut chars = input.chars();
let first_char = chars.next();
let second_char = chars.next();
first_char == Some('.') && second_char.map(is_separator).unwrap_or(false)
};
manually_entered
} else {
// always prepend the base dir if it is a subfolder
true
}
}

View File

@ -26,7 +26,8 @@ impl Completer for FlagCompletion {
span: Span,
offset: usize,
_: usize,
) -> (Vec<Suggestion>, CompletionOptions) {
options: &CompletionOptions,
) -> Vec<Suggestion> {
// Check if it's a flag
if let Expr::Call(call) = &self.expression.expr {
let decl = working_set.get_decl(call.decl_id);
@ -40,7 +41,8 @@ impl Completer for FlagCompletion {
let mut named = vec![0; short.len_utf8()];
short.encode_utf8(&mut named);
named.insert(0, b'-');
if named.starts_with(&prefix) {
if options.match_algorithm.matches_u8(&named, &prefix) {
output.push(Suggestion {
value: String::from_utf8_lossy(&named).to_string(),
description: Some(flag_desc.to_string()),
@ -49,6 +51,7 @@ impl Completer for FlagCompletion {
start: span.start - offset,
end: span.end - offset,
},
append_whitespace: true,
});
}
}
@ -60,7 +63,8 @@ impl Completer for FlagCompletion {
let mut named = named.long.as_bytes().to_vec();
named.insert(0, b'-');
named.insert(0, b'-');
if named.starts_with(&prefix) {
if options.match_algorithm.matches_u8(&named, &prefix) {
output.push(Suggestion {
value: String::from_utf8_lossy(&named).to_string(),
description: Some(flag_desc.to_string()),
@ -69,13 +73,14 @@ impl Completer for FlagCompletion {
start: span.start - offset,
end: span.end - offset,
},
append_whitespace: true,
});
}
}
return (output, CompletionOptions::default());
return output;
}
(vec![], CompletionOptions::default())
vec![]
}
}

View File

@ -3,6 +3,8 @@ mod command_completions;
mod completer;
mod completion_options;
mod custom_completions;
mod directory_completions;
mod dotnu_completions;
mod file_completions;
mod flag_completions;
mod variable_completions;
@ -10,8 +12,12 @@ mod variable_completions;
pub use base::Completer;
pub use command_completions::CommandCompletion;
pub use completer::NuCompleter;
pub use completion_options::{CompletionOptions, SortBy};
pub use completion_options::{CompletionOptions, MatchAlgorithm, SortBy};
pub use custom_completions::CustomCompletion;
pub use file_completions::{file_path_completion, FileCompletion};
pub use directory_completions::DirectoryCompletion;
pub use dotnu_completions::DotNuCompletion;
pub use file_completions::{
file_path_completion, matches, partial_from, prepend_base_dir, FileCompletion,
};
pub use flag_completions::FlagCompletion;
pub use variable_completions::VariableCompletion;

View File

@ -1,19 +1,32 @@
use crate::completions::{Completer, CompletionOptions};
use nu_engine::eval_variable;
use nu_protocol::{
engine::{EngineState, StateWorkingSet},
Span,
engine::{EngineState, Stack, StateWorkingSet},
Span, Value,
};
use reedline::Suggestion;
use std::str;
use std::sync::Arc;
#[derive(Clone)]
pub struct VariableCompletion {
engine_state: Arc<EngineState>,
stack: Stack,
var_context: (Vec<u8>, Vec<Vec<u8>>), // tuple with $var and the sublevels (.b.c.d)
}
impl VariableCompletion {
pub fn new(engine_state: Arc<EngineState>) -> Self {
Self { engine_state }
pub fn new(
engine_state: Arc<EngineState>,
stack: Stack,
var_context: (Vec<u8>, Vec<Vec<u8>>),
) -> Self {
Self {
engine_state,
stack,
var_context,
}
}
}
@ -25,51 +38,136 @@ impl Completer for VariableCompletion {
span: Span,
offset: usize,
_: usize,
) -> (Vec<Suggestion>, CompletionOptions) {
options: &CompletionOptions,
) -> Vec<Suggestion> {
let mut output = vec![];
let builtins = ["$nu", "$in", "$config", "$env", "$nothing"];
let var_str = std::str::from_utf8(&self.var_context.0)
.unwrap_or("")
.to_lowercase();
let var_id = working_set.find_variable(&self.var_context.0);
let current_span = reedline::Span {
start: span.start - offset,
end: span.end - offset,
};
let sublevels_count = self.var_context.1.len();
// Completions for the given variable
if !var_str.is_empty() {
// Completion for $env.<tab>
if var_str.as_str() == "$env" {
let env_vars = self.stack.get_env_vars(&self.engine_state);
// Return nested values
if sublevels_count > 0 {
// Extract the target var ($env.<target-var>)
let target_var = self.var_context.1[0].clone();
let target_var_str =
str::from_utf8(&target_var).unwrap_or_default().to_string();
// Everything after the target var is the nested level ($env.<target-var>.<nested_levels>...)
let nested_levels: Vec<Vec<u8>> =
self.var_context.1.clone().into_iter().skip(1).collect();
if let Some(val) = env_vars.get(&target_var_str) {
return nested_suggestions(val.clone(), nested_levels, current_span);
}
} else {
// No nesting provided, return all env vars
for env_var in env_vars {
if options
.match_algorithm
.matches_u8(env_var.0.as_bytes(), &prefix)
{
output.push(Suggestion {
value: env_var.0,
description: None,
extra: None,
span: current_span,
append_whitespace: false,
});
}
}
return output;
}
}
// Completions for $nu.<tab>
if var_str.as_str() == "$nu" {
// Eval nu var
if let Ok(nuval) = eval_variable(
&self.engine_state,
&self.stack,
nu_protocol::NU_VARIABLE_ID,
nu_protocol::Span {
start: current_span.start,
end: current_span.end,
},
) {
return nested_suggestions(nuval, self.var_context.1.clone(), current_span);
}
}
// Completion other variable types
if let Some(var_id) = var_id {
// Extract the variable value from the stack
let var = self.stack.get_var(
var_id,
Span {
start: span.start,
end: span.end,
},
);
// If the value exists and it's of type Record
if let Ok(value) = var {
return nested_suggestions(value, self.var_context.1.clone(), current_span);
}
}
}
// Variable completion (e.g: $en<tab> to complete $env)
for builtin in builtins {
if builtin.as_bytes().starts_with(&prefix) {
if options
.match_algorithm
.matches_u8(builtin.as_bytes(), &prefix)
{
output.push(Suggestion {
value: builtin.to_string(),
description: None,
extra: None,
span: reedline::Span {
start: span.start - offset,
end: span.end - offset,
},
span: current_span,
append_whitespace: false,
});
}
}
// Working set scope vars
for scope in &working_set.delta.scope {
for v in &scope.vars {
if v.0.starts_with(&prefix) {
if options.match_algorithm.matches_u8(v.0, &prefix) {
output.push(Suggestion {
value: String::from_utf8_lossy(v.0).to_string(),
description: None,
extra: None,
span: reedline::Span {
start: span.start - offset,
end: span.end - offset,
},
span: current_span,
append_whitespace: false,
});
}
}
}
// Permanent state vars
for scope in &self.engine_state.scope {
for v in &scope.vars {
if v.0.starts_with(&prefix) {
if options.match_algorithm.matches_u8(v.0, &prefix) {
output.push(Suggestion {
value: String::from_utf8_lossy(v.0).to_string(),
description: None,
extra: None,
span: reedline::Span {
start: span.start - offset,
end: span.end - offset,
},
span: current_span,
append_whitespace: false,
});
}
}
@ -77,6 +175,70 @@ impl Completer for VariableCompletion {
output.dedup();
(output, CompletionOptions::default())
output
}
}
// Find recursively the values for sublevels
// if no sublevels are set it returns the current value
fn nested_suggestions(
val: Value,
sublevels: Vec<Vec<u8>>,
current_span: reedline::Span,
) -> Vec<Suggestion> {
let mut output: Vec<Suggestion> = vec![];
let value = recursive_value(val, sublevels);
match value {
Value::Record {
cols,
vals: _,
span: _,
} => {
// Add all the columns as completion
for item in cols {
output.push(Suggestion {
value: item,
description: None,
extra: None,
span: current_span,
append_whitespace: false,
});
}
output
}
_ => output,
}
}
// Extracts the recursive value (e.g: $var.a.b.c)
fn recursive_value(val: Value, sublevels: Vec<Vec<u8>>) -> Value {
// Go to next sublevel
if let Some(next_sublevel) = sublevels.clone().into_iter().next() {
match val {
Value::Record {
cols,
vals,
span: _,
} => {
for item in cols.into_iter().zip(vals.into_iter()) {
// Check if index matches with sublevel
if item.0.as_bytes().to_vec() == next_sublevel {
// If matches try to fetch recursively the next
return recursive_value(item.1, sublevels.into_iter().skip(1).collect());
}
}
// Current sublevel value not found
return Value::Nothing {
span: Span { start: 0, end: 0 },
};
}
_ => return val,
}
}
val
}

View File

@ -1,7 +1,6 @@
mod commands;
mod completions;
mod config_files;
mod errors;
mod eval_file;
mod menus;
mod nu_highlight;
@ -15,9 +14,8 @@ mod util;
mod validation;
pub use commands::evaluate_commands;
pub use completions::NuCompleter;
pub use completions::{FileCompletion, NuCompleter};
pub use config_files::eval_config_contents;
pub use errors::CliError;
pub use eval_file::evaluate_file;
pub use menus::{DescriptionMenu, NuHelpCompleter};
pub use nu_highlight::NuHighlight;
@ -25,7 +23,6 @@ pub use print::Print;
pub use prompt::NushellPrompt;
pub use repl::evaluate_repl;
pub use syntax_highlight::NuHighlighter;
pub use util::print_pipeline_data;
pub use util::{eval_source, gather_parent_env_vars, get_init_cwd, report_error};
pub use validation::NuValidator;

View File

@ -272,7 +272,7 @@ impl DescriptionMenu {
}
fn no_records_msg(&self, use_ansi_coloring: bool) -> String {
let msg = "TYPE TO START SEACH";
let msg = "TYPE TO START SEARCH";
if use_ansi_coloring {
format!(
"{}{}{}",

View File

@ -92,6 +92,7 @@ impl NuHelpCompleter {
start: pos,
end: pos + line.len(),
},
append_whitespace: false,
}
})
.collect()

View File

@ -155,6 +155,7 @@ fn convert_to_suggestions(
description,
extra,
span,
append_whitespace: false,
}]
}
Value::List { vals, .. } => vals
@ -169,6 +170,7 @@ fn convert_to_suggestions(
start: 0,
end: line.len(),
},
append_whitespace: false,
}],
}
}

View File

@ -22,7 +22,7 @@ impl Command for NuHighlight {
fn run(
&self,
engine_state: &EngineState,
stack: &mut Stack,
_stack: &mut Stack,
call: &Call,
input: PipelineData,
) -> Result<PipelineData, ShellError> {
@ -30,7 +30,7 @@ impl Command for NuHighlight {
let ctrlc = engine_state.ctrlc.clone();
let engine_state = engine_state.clone();
let config = stack.get_config()?;
let config = engine_state.get_config().clone();
let highlighter = crate::NuHighlighter {
engine_state,

View File

@ -34,7 +34,7 @@ impl Command for Print {
let head = call.head;
for arg in args {
crate::util::print_pipeline_data(arg.into_pipeline_data(), engine_state, stack)?;
arg.into_pipeline_data().print(engine_state, stack)?;
}
Ok(PipelineData::new(head))

View File

@ -7,6 +7,9 @@ use {
std::borrow::Cow,
};
const PROMPT_MARKER_BEFORE_PS1: &str = "\x1b]133;A\x1b\\"; // OSC 133;A ST
const PROMPT_MARKER_BEFORE_PS2: &str = "\x1b]133;A;k=s\x1b\\"; // OSC 133;A;k=s ST
/// Nushell prompt definition
#[derive(Clone)]
pub struct NushellPrompt {
@ -16,6 +19,7 @@ pub struct NushellPrompt {
default_vi_insert_prompt_indicator: Option<String>,
default_vi_normal_prompt_indicator: Option<String>,
default_multiline_indicator: Option<String>,
shell_integration: bool,
}
impl Default for NushellPrompt {
@ -33,6 +37,7 @@ impl NushellPrompt {
default_vi_insert_prompt_indicator: None,
default_vi_normal_prompt_indicator: None,
default_multiline_indicator: None,
shell_integration: false,
}
}
@ -82,20 +87,34 @@ impl NushellPrompt {
fn default_wrapped_custom_string(&self, str: String) -> String {
format!("({})", str)
}
pub(crate) fn enable_shell_integration(&mut self) {
self.shell_integration = true
}
}
impl Prompt for NushellPrompt {
fn render_prompt_left(&self) -> Cow<str> {
if let Some(prompt_string) = &self.left_prompt_string {
prompt_string.replace('\n', "\r\n").into()
// Just before starting to draw the PS1 prompt send the escape code (see
// https://sw.kovidgoyal.net/kitty/shell-integration/#notes-for-shell-developers)
let mut prompt = if self.shell_integration {
String::from(PROMPT_MARKER_BEFORE_PS1)
} else {
String::new()
};
prompt.push_str(&match &self.left_prompt_string {
Some(prompt_string) => prompt_string.replace('\n', "\r\n"),
None => {
let default = DefaultPrompt::new();
default
.render_prompt_left()
.to_string()
.replace('\n', "\r\n")
.into()
}
});
prompt.into()
}
fn render_prompt_right(&self) -> Cow<str> {
@ -136,10 +155,21 @@ impl Prompt for NushellPrompt {
}
fn render_prompt_multiline_indicator(&self) -> Cow<str> {
match &self.default_multiline_indicator {
Some(indicator) => indicator.as_str().into(),
None => "::: ".into(),
}
// Just before starting to draw the PS1 prompt send the escape code (see
// https://sw.kovidgoyal.net/kitty/shell-integration/#notes-for-shell-developers)
let mut prompt = if self.shell_integration {
String::from(PROMPT_MARKER_BEFORE_PS2)
} else {
String::new()
};
prompt.push_str(
self.default_multiline_indicator
.as_ref()
.unwrap_or(&String::from("::: ")),
);
prompt.into()
}
fn render_prompt_history_search_indicator(

View File

@ -147,6 +147,10 @@ pub(crate) fn update_prompt<'prompt>(
(prompt_vi_insert_string, prompt_vi_normal_string),
);
if config.shell_integration {
nu_prompt.enable_shell_integration();
}
let ret_val = nu_prompt as &dyn Prompt;
if is_perf_true {
info!("update_prompt {}:{}:{}", file!(), line!(), column!());

View File

@ -814,6 +814,7 @@ fn event_from_record(
"none" => ReedlineEvent::None,
"actionhandler" => ReedlineEvent::ActionHandler,
"clearscreen" => ReedlineEvent::ClearScreen,
"clearscrollback" => ReedlineEvent::ClearScrollback,
"historyhintcomplete" => ReedlineEvent::HistoryHintComplete,
"historyhintwordcomplete" => ReedlineEvent::HistoryHintWordComplete,
"ctrld" => ReedlineEvent::CtrlD,
@ -836,6 +837,7 @@ fn event_from_record(
"menuprevious" => ReedlineEvent::MenuPrevious,
"menupagenext" => ReedlineEvent::MenuPageNext,
"menupageprevious" => ReedlineEvent::MenuPagePrevious,
"openeditor" => ReedlineEvent::OpenEditor,
"menu" => {
let menu = extract_value("name", cols, vals, span)?;
ReedlineEvent::Menu(menu.into_string("", config))
@ -881,6 +883,7 @@ fn edit_from_record(
let value = extract_value("value", cols, vals, span)?;
EditCommand::InsertString(value.into_string("", config))
}
"insertnewline" => EditCommand::InsertNewline,
"backspace" => EditCommand::Backspace,
"delete" => EditCommand::Delete,
"backspaceword" => EditCommand::BackspaceWord,

View File

@ -15,12 +15,16 @@ use nu_protocol::engine::Stack;
use nu_protocol::PipelineData;
use nu_protocol::{
engine::{EngineState, StateWorkingSet},
Config, ShellError, Span, Value, CONFIG_VARIABLE_ID,
ShellError, Span, Value,
};
use reedline::{DefaultHinter, Emacs, Vi};
use std::io::{self, Write};
use std::path::PathBuf;
use std::{sync::atomic::Ordering, time::Instant};
const PROMPT_MARKER_BEFORE_CMD: &str = "\x1b]133;C\x1b\\"; // OSC 133;C ST
const RESET_APPLICATION_MODE: &str = "\x1b[?1l";
pub fn evaluate_repl(
engine_state: &mut EngineState,
stack: &mut Stack,
@ -76,15 +80,7 @@ pub fn evaluate_repl(
// Get the config once for the history `max_history_size`
// Updating that will not be possible in one session
let mut config = match stack.get_config() {
Ok(config) => config,
Err(e) => {
let working_set = StateWorkingSet::new(engine_state);
report_error(&working_set, &e);
Config::default()
}
};
let mut config = engine_state.get_config();
if is_perf_true {
info!("setup reedline {}:{}:{}", file!(), line!(), column!());
@ -114,26 +110,18 @@ pub fn evaluate_repl(
);
}
config = match stack.get_config() {
Ok(config) => config,
Err(e) => {
let working_set = StateWorkingSet::new(engine_state);
report_error(&working_set, &e);
Config::default()
//Reset the ctrl-c handler
if let Some(ctrlc) = &mut engine_state.ctrlc {
ctrlc.store(false, Ordering::SeqCst);
}
};
config = engine_state.get_config();
if is_perf_true {
info!("setup colors {}:{}:{}", file!(), line!(), column!());
}
let color_hm = get_color_config(&config);
//Reset the ctrl-c handler
if let Some(ctrlc) = &mut engine_state.ctrlc {
ctrlc.store(false, Ordering::SeqCst);
}
let color_hm = get_color_config(config);
if is_perf_true {
info!("update reedline {}:{}:{}", file!(), line!(), column!());
@ -151,7 +139,6 @@ pub fn evaluate_repl(
.with_completer(Box::new(NuCompleter::new(
engine_reference.clone(),
stack.clone(),
stack.vars.get(&CONFIG_VARIABLE_ID).cloned(),
)))
.with_quick_completions(config.quick_completions)
.with_partial_completions(config.partial_completions)
@ -165,7 +152,7 @@ pub fn evaluate_repl(
line_editor.disable_hints()
};
line_editor = match add_menus(line_editor, engine_reference, stack, &config) {
line_editor = match add_menus(line_editor, engine_reference, stack, config) {
Ok(line_editor) => line_editor,
Err(e) => {
let working_set = StateWorkingSet::new(engine_state);
@ -174,6 +161,8 @@ pub fn evaluate_repl(
}
};
line_editor = line_editor.with_buffer_editor(config.buffer_editor.clone(), "nu".into());
if config.sync_history_on_enter {
if is_perf_true {
info!("sync history {}:{}:{}", file!(), line!(), column!());
@ -186,7 +175,7 @@ pub fn evaluate_repl(
}
// Changing the line editor based on the found keybindings
line_editor = match reedline_config::create_keybindings(&config) {
line_editor = match reedline_config::create_keybindings(config) {
Ok(keybindings) => match keybindings {
KeybindingsMode::Emacs(keybindings) => {
let edit_mode = Box::new(Emacs::new(keybindings));
@ -211,13 +200,8 @@ pub fn evaluate_repl(
info!("prompt_update {}:{}:{}", file!(), line!(), column!());
}
let prompt = prompt_update::update_prompt(
&config,
engine_state,
stack,
&mut nu_prompt,
is_perf_true,
);
let prompt =
prompt_update::update_prompt(config, engine_state, stack, &mut nu_prompt, is_perf_true);
entry_num += 1;
@ -231,6 +215,8 @@ pub fn evaluate_repl(
}
let input = line_editor.read_line(prompt);
let use_shell_integration = config.shell_integration;
match input {
Ok(Signal::Success(s)) => {
let start_time = Instant::now();
@ -255,7 +241,7 @@ pub fn evaluate_repl(
report_error(
&working_set,
&ShellError::DirectoryNotFound(tokens.0[0].span),
&ShellError::DirectoryNotFound(tokens.0[0].span, None),
);
}
@ -320,6 +306,22 @@ pub fn evaluate_repl(
let _ = std::env::set_current_dir(path);
engine_state.env_vars.insert("PWD".into(), cwd);
}
if use_shell_integration {
// Just before running a command/program, send the escape code (see
// https://sw.kovidgoyal.net/kitty/shell-integration/#notes-for-shell-developers)
let mut ansi_escapes = String::from(PROMPT_MARKER_BEFORE_CMD);
ansi_escapes.push_str(RESET_APPLICATION_MODE);
if let Some(cwd) = stack.get_env_var(engine_state, "PWD") {
let path = cwd.as_string()?;
ansi_escapes.push_str(&format!("\x1b]2;{}\x07", path));
}
// print!("{}", ansi_escapes);
match io::stdout().write_all(ansi_escapes.as_bytes()) {
Ok(it) => it,
Err(err) => print!("error: {}", err),
};
}
}
Ok(Signal::CtrlC) => {
// `Reedline` clears the line content. New prompt is shown
@ -329,9 +331,6 @@ pub fn evaluate_repl(
println!();
break;
}
Ok(Signal::CtrlL) => {
line_editor.clear_screen().into_diagnostic()?;
}
Err(err) => {
let message = err.to_string();
if !message.contains("duration") {

View File

@ -178,6 +178,11 @@ impl Highlighter for NuHighlighter {
get_shape_color(shape.1.to_string(), &self.config),
next_token,
)),
FlatShape::Directory => output.push((
// nushell Directory
get_shape_color(shape.1.to_string(), &self.config),
next_token,
)),
FlatShape::GlobPattern => output.push((
// nushell GlobPattern
get_shape_color(shape.1.to_string(), &self.config),

View File

@ -1,129 +1,59 @@
use crate::CliError;
use log::trace;
use nu_engine::eval_block;
use nu_parser::{lex, parse, trim_quotes, Token, TokenContents};
use nu_parser::{escape_quote_string, lex, parse, unescape_unquote_string, Token, TokenContents};
use nu_protocol::engine::StateWorkingSet;
use nu_protocol::CliError;
use nu_protocol::{
ast::Call,
engine::{EngineState, Stack},
PipelineData, ShellError, Span, Value,
};
#[cfg(windows)]
use nu_utils::enable_vt_processing;
use std::io::Write;
use std::path::PathBuf;
pub fn print_pipeline_data(
input: PipelineData,
engine_state: &EngineState,
stack: &mut Stack,
) -> Result<(), ShellError> {
// If the table function is in the declarations, then we can use it
// to create the table value that will be printed in the terminal
let config = stack.get_config().unwrap_or_default();
let stdout = std::io::stdout();
if let PipelineData::ExternalStream {
stdout: stream,
exit_code,
..
} = input
{
if let Some(stream) = stream {
for s in stream {
let _ = stdout.lock().write_all(s?.as_binary()?);
}
}
// Make sure everything has finished
if let Some(exit_code) = exit_code {
let _: Vec<_> = exit_code.into_iter().collect();
}
return Ok(());
}
match engine_state.find_decl("table".as_bytes()) {
Some(decl_id) => {
let table = engine_state.get_decl(decl_id).run(
engine_state,
stack,
&Call::new(Span::new(0, 0)),
input,
)?;
for item in table {
let stdout = std::io::stdout();
if let Value::Error { error } = item {
return Err(error);
}
let mut out = item.into_string("\n", &config);
out.push('\n');
match stdout.lock().write_all(out.as_bytes()) {
Ok(_) => (),
Err(err) => eprintln!("{}", err),
};
}
}
None => {
for item in input {
let stdout = std::io::stdout();
if let Value::Error { error } = item {
return Err(error);
}
let mut out = item.into_string("\n", &config);
out.push('\n');
match stdout.lock().write_all(out.as_bytes()) {
Ok(_) => (),
Err(err) => eprintln!("{}", err),
};
}
}
};
Ok(())
}
// This will collect environment variables from std::env and adds them to a stack.
//
// In order to ensure the values have spans, it first creates a dummy file, writes the collected
// env vars into it (in a "NAME"="value" format, quite similar to the output of the Unix 'env'
// tool), then uses the file to get the spans. The file stays in memory, no filesystem IO is done.
pub fn gather_parent_env_vars(engine_state: &mut EngineState) {
gather_env_vars(std::env::vars(), engine_state);
}
fn gather_env_vars(vars: impl Iterator<Item = (String, String)>, engine_state: &mut EngineState) {
fn report_capture_error(engine_state: &EngineState, env_str: &str, msg: &str) {
let working_set = StateWorkingSet::new(engine_state);
report_error(
&working_set,
&ShellError::LabeledError(
&ShellError::GenericError(
format!("Environment variable was not captured: {}", env_str),
msg.into(),
"".to_string(),
None,
Some(msg.into()),
Vec::new(),
),
);
}
fn put_env_to_fake_file(name: &str, val: &str, fake_env_file: &mut String) {
fake_env_file.push('`');
fake_env_file.push_str(name);
fake_env_file.push('`');
fake_env_file.push_str(&escape_quote_string(name));
fake_env_file.push('=');
fake_env_file.push('`');
fake_env_file.push_str(val);
fake_env_file.push('`');
fake_env_file.push_str(&escape_quote_string(val));
fake_env_file.push('\n');
}
let mut fake_env_file = String::new();
let mut has_pwd = false;
// Make sure we always have PWD
if std::env::var("PWD").is_err() {
// Write all the env vars into a fake file
for (name, val) in vars {
if name == "PWD" {
has_pwd = true;
}
put_env_to_fake_file(&name, &val, &mut fake_env_file);
}
if !has_pwd {
match std::env::current_dir() {
Ok(cwd) => {
put_env_to_fake_file("PWD", &cwd.to_string_lossy(), &mut fake_env_file);
@ -133,20 +63,18 @@ pub fn gather_parent_env_vars(engine_state: &mut EngineState) {
let working_set = StateWorkingSet::new(engine_state);
report_error(
&working_set,
&ShellError::LabeledError(
&ShellError::GenericError(
"Current directory not found".to_string(),
format!("Retrieving current directory failed: {:?}", e),
"".to_string(),
None,
Some(format!("Retrieving current directory failed: {:?}", e)),
Vec::new(),
),
);
}
}
}
// Write all the env vars into a fake file
for (name, val) in std::env::vars() {
put_env_to_fake_file(&name, &val, &mut fake_env_file);
}
// Lex the fake file, assign spans to all environment variables and add them
// to stack
let span_offset = engine_state.next_span_start();
@ -184,8 +112,19 @@ pub fn gather_parent_env_vars(engine_state: &mut EngineState) {
continue;
}
let bytes = trim_quotes(bytes);
String::from_utf8_lossy(bytes).to_string()
let (bytes, parse_error) = unescape_unquote_string(bytes, *span);
if parse_error.is_some() {
report_capture_error(
engine_state,
&String::from_utf8_lossy(contents),
"Got unparsable name.",
);
continue;
}
bytes
} else {
report_capture_error(
engine_state,
@ -213,10 +152,20 @@ pub fn gather_parent_env_vars(engine_state: &mut EngineState) {
continue;
}
let bytes = trim_quotes(bytes);
let (bytes, parse_error) = unescape_unquote_string(bytes, *span);
if parse_error.is_some() {
report_capture_error(
engine_state,
&String::from_utf8_lossy(contents),
"Got unparsable value.",
);
continue;
}
Value::String {
val: String::from_utf8_lossy(bytes).to_string(),
val: bytes,
span: *span,
}
} else {
@ -288,7 +237,7 @@ pub fn eval_source(
set_last_exit_code(stack, 0);
}
if let Err(err) = print_pipeline_data(pipeline_data, engine_state, stack) {
if let Err(err) = pipeline_data.print(engine_state, stack) {
let working_set = StateWorkingSet::new(engine_state);
report_error(&working_set, &err);
@ -350,3 +299,32 @@ pub fn get_init_cwd() -> PathBuf {
},
}
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn test_gather_env_vars() {
let mut engine_state = EngineState::new();
let symbols = r##" !"#$%&'()*+,-./:;<=>?@[\]^_`{|}~"##;
gather_env_vars(
[
("FOO".into(), "foo".into()),
("SYMBOLS".into(), symbols.into()),
(symbols.into(), "symbols".into()),
]
.into_iter(),
&mut engine_state,
);
let env = engine_state.env_vars;
assert!(matches!(env.get("FOO"), Some(Value::String { val, .. }) if val == "foo"));
assert!(matches!(env.get("SYMBOLS"), Some(Value::String { val, .. }) if val == symbols));
assert!(matches!(env.get(symbols), Some(Value::String { val, .. }) if val == "symbols"));
assert!(env.get("PWD").is_some());
assert_eq!(env.len(), 4);
}
}

View File

@ -0,0 +1,107 @@
use std::path::PathBuf;
use nu_cli::NuCompleter;
use nu_command::create_default_context;
use nu_protocol::engine::{EngineState, Stack};
use nu_test_support::fs;
use reedline::{Completer, Suggestion};
const SEP: char = std::path::MAIN_SEPARATOR;
#[test]
fn file_completions() {
// Create a new engine
let (dir, dir_str, engine) = new_engine();
let stack = Stack::new();
// Instatiate a new completer
let mut completer = NuCompleter::new(std::sync::Arc::new(engine), stack);
// Test completions for the current folder
let target_dir = format!("cp {}", dir_str);
let suggestions = completer.complete(&target_dir, target_dir.len());
// Create the expected values
let expected_paths: Vec<String> = vec![
file(dir.join("nushell")),
folder(dir.join("test_a")),
folder(dir.join("test_b")),
folder(dir.join("another")),
file(dir.join(".hidden_file")),
folder(dir.join(".hidden_folder")),
];
// Match the results
match_suggestions(expected_paths, suggestions);
// Test completions for the completions/another folder
let target_dir = format!("cd {}", folder(dir.join("another")));
let suggestions = completer.complete(&target_dir, target_dir.len());
// Create the expected values
let expected_paths: Vec<String> = vec![file(dir.join("another").join("newfile"))];
// Match the results
match_suggestions(expected_paths, suggestions);
}
#[test]
fn folder_completions() {
// Create a new engine
let (dir, dir_str, engine) = new_engine();
let stack = Stack::new();
// Instatiate a new completer
let mut completer = NuCompleter::new(std::sync::Arc::new(engine), stack);
// Test completions for the current folder
let target_dir = format!("cd {}", dir_str);
let suggestions = completer.complete(&target_dir, target_dir.len());
// Create the expected values
let expected_paths: Vec<String> = vec![
folder(dir.join("test_a")),
folder(dir.join("test_b")),
folder(dir.join("another")),
folder(dir.join(".hidden_folder")),
];
// Match the results
match_suggestions(expected_paths, suggestions);
}
// creates a new engine with the current path into the completions fixtures folder
pub fn new_engine() -> (PathBuf, String, EngineState) {
// Target folder inside assets
let dir = fs::fixtures().join("completions");
let mut dir_str = dir
.clone()
.into_os_string()
.into_string()
.unwrap_or_default();
dir_str.push(SEP);
// Create a default engine
(dir.clone(), dir_str, create_default_context(dir))
}
// match a list of suggestions with the expected values
pub fn match_suggestions(expected: Vec<String>, suggestions: Vec<Suggestion>) {
expected.iter().zip(suggestions).for_each(|it| {
assert_eq!(it.0, &it.1.value);
});
}
// append the separator to the converted path
pub fn folder(path: PathBuf) -> String {
let mut converted_path = file(path);
converted_path.push(SEP);
converted_path
}
// convert a given path to string
pub fn file(path: PathBuf) -> String {
path.into_os_string().into_string().unwrap_or_default()
}

View File

@ -4,11 +4,11 @@ description = "Color configuration code used by Nushell"
edition = "2021"
license = "MIT"
name = "nu-color-config"
version = "0.61.0"
version = "0.62.0"
[dependencies]
nu-protocol = { path = "../nu-protocol", version = "0.61.0" }
nu-protocol = { path = "../nu-protocol", version = "0.62.0" }
nu-ansi-term = "0.45.1"
nu-json = { path = "../nu-json", version = "0.61.0" }
nu-table = { path = "../nu-table", version = "0.61.0" }
nu-json = { path = "../nu-json", version = "0.62.0" }
nu-table = { path = "../nu-table", version = "0.62.0" }
serde = { version="1.0.123", features=["derive"] }

View File

@ -161,6 +161,13 @@ pub fn lookup_ansi_color_style(s: &str) -> Style {
"dgrbl" | "dark_gray_blink" => Color::DarkGray.blink(),
"dgrst" | "dark_gray_strike" => Color::DarkGray.strikethrough(),
"def" | "default" => Color::Default.normal(),
"defb" | "default_bold" => Color::Default.bold(),
"defu" | "default_underline" => Color::Default.underline(),
"defi" | "default_italic" => Color::Default.italic(),
"defd" | "default_dimmed" => Color::Default.dimmed(),
"defr" | "default_reverse" => Color::Default.reverse(),
_ => Color::White.normal(),
}
}

View File

@ -29,6 +29,7 @@ pub fn get_shape_color(shape: String, conf: &Config) -> Style {
"shape_record" => Style::new().fg(Color::Cyan).bold(),
"shape_block" => Style::new().fg(Color::Blue).bold(),
"shape_filepath" => Style::new().fg(Color::Cyan),
"shape_directory" => Style::new().fg(Color::Cyan),
"shape_globpattern" => Style::new().fg(Color::Cyan).bold(),
"shape_variable" => Style::new().fg(Color::Purple),
"shape_flag" => Style::new().fg(Color::Blue).bold(),

View File

@ -4,25 +4,25 @@ description = "Nushell's built-in commands"
edition = "2021"
license = "MIT"
name = "nu-command"
version = "0.61.0"
version = "0.62.0"
build = "build.rs"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
nu-color-config = { path = "../nu-color-config", version = "0.61.0" }
nu-engine = { path = "../nu-engine", version = "0.61.0" }
nu-glob = { path = "../nu-glob", version = "0.61.0" }
nu-json = { path = "../nu-json", version = "0.61.0" }
nu-parser = { path = "../nu-parser", version = "0.61.0" }
nu-path = { path = "../nu-path", version = "0.61.0" }
nu-pretty-hex = { path = "../nu-pretty-hex", version = "0.61.0" }
nu-protocol = { path = "../nu-protocol", version = "0.61.0" }
nu-system = { path = "../nu-system", version = "0.61.0" }
nu-table = { path = "../nu-table", version = "0.61.0" }
nu-term-grid = { path = "../nu-term-grid", version = "0.61.0" }
nu-test-support = { path = "../nu-test-support", version = "0.61.0" }
nu-utils = { path = "../nu-utils", version = "0.61.0" }
nu-color-config = { path = "../nu-color-config", version = "0.62.0" }
nu-engine = { path = "../nu-engine", version = "0.62.0" }
nu-glob = { path = "../nu-glob", version = "0.62.0" }
nu-json = { path = "../nu-json", version = "0.62.0" }
nu-parser = { path = "../nu-parser", version = "0.62.0" }
nu-path = { path = "../nu-path", version = "0.62.0" }
nu-pretty-hex = { path = "../nu-pretty-hex", version = "0.62.0" }
nu-protocol = { path = "../nu-protocol", version = "0.62.0" }
nu-system = { path = "../nu-system", version = "0.62.0" }
nu-table = { path = "../nu-table", version = "0.62.0" }
nu-term-grid = { path = "../nu-term-grid", version = "0.62.0" }
nu-test-support = { path = "../nu-test-support", version = "0.62.0" }
nu-utils = { path = "../nu-utils", version = "0.62.0" }
nu-ansi-term = "0.45.1"
# Potential dependencies for extras
@ -31,7 +31,7 @@ bytesize = "1.1.0"
calamine = "0.18.0"
chrono = { version = "0.4.19", features = ["serde"] }
chrono-humanize = "0.2.1"
chrono-tz = "0.6.0"
chrono-tz = "0.6.1"
crossterm = "0.23.0"
csv = "1.1.6"
dialoguer = "0.9.0"
@ -53,6 +53,7 @@ lscolors = { version = "0.9.0", features = ["crossterm"]}
md5 = { package = "md-5", version = "0.10.0" }
meval = "0.2.0"
mime = "0.3.16"
notify = "4.0.17"
num = { version = "0.4.0", optional = true }
pathdiff = "0.2.1"
quick-xml = "0.22"
@ -78,9 +79,10 @@ unicode-segmentation = "1.8.0"
url = "2.2.1"
uuid = { version = "0.8.2", features = ["v4"] }
which = { version = "4.2.2", optional = true }
reedline = { version = "0.4.0", features = ["bashisms"]}
reedline = { version = "0.5.0", features = ["bashisms"]}
wax = { version = "0.4.0", features = ["diagnostics"] }
zip = { version="0.5.9", optional = true }
rusqlite = { version = "0.27.0", features = ["bundled"], optional = true }
sqlparser = { version = "0.16.0", features = ["serde"], optional = true }
[target.'cfg(unix)'.dependencies]
umask = "1.0.0"
@ -105,6 +107,7 @@ trash-support = ["trash"]
which-support = ["which"]
plugin = ["nu-parser/plugin"]
dataframe = ["polars", "num"]
database = ["sqlparser", "rusqlite"]
[build-dependencies]
shadow-rs = "0.11.0"

File diff suppressed because one or more lines are too long

View File

@ -21,6 +21,10 @@ impl Command for Fmt {
Signature::build("fmt").category(Category::Conversions)
}
fn search_terms(&self) -> Vec<&str> {
vec!["display", "render", "format"]
}
fn examples(&self) -> Vec<Example> {
vec![Example {
description: "Get a record containing multiple formats for the number 42",

View File

@ -28,6 +28,10 @@ impl Command for SubCommand {
"Convert value to a binary primitive"
}
fn search_terms(&self) -> Vec<&str> {
vec!["convert", "binary", "bytes", "bin"]
}
fn run(
&self,
engine_state: &EngineState,

View File

@ -27,6 +27,10 @@ impl Command for SubCommand {
"Convert value to boolean"
}
fn search_terms(&self) -> Vec<&str> {
vec!["convert", "boolean", "true", "false", "1", "0"]
}
fn run(
&self,
engine_state: &EngineState,
@ -136,11 +140,14 @@ fn string_to_boolean(s: &str, span: Span) -> Result<bool, ShellError> {
let val = o.parse::<f64>();
match val {
Ok(f) => Ok(f.abs() >= f64::EPSILON),
Err(_) => Err(ShellError::CantConvertWithHelp(
Err(_) => Err(ShellError::CantConvert(
"boolean".to_string(),
"string".to_string(),
span,
r#"the strings "true" and "false" can be converted into a bool"#.to_string(),
Some(
r#"the strings "true" and "false" can be converted into a bool"#
.to_string(),
),
)),
}
}

View File

@ -104,6 +104,10 @@ impl Command for SubCommand {
"Convert text into a datetime"
}
fn search_terms(&self) -> Vec<&str> {
vec!["convert", "date", "time", "timezone", "UTC"]
}
fn examples(&self) -> Vec<Example> {
vec![
Example {
@ -260,11 +264,11 @@ fn action(
Ok(d) => Value::Date { val: d, span: head },
Err(reason) => {
return Value::Error {
error: ShellError::CantConvertWithHelp(
error: ShellError::CantConvert(
format!("could not parse as datetime using format '{}'", dt.0),
reason.to_string(),
head,
"you can use `into datetime` without a format string to enable flexible parsing".to_string()
Some("you can use `into datetime` without a format string to enable flexible parsing".to_string())
),
}
}

View File

@ -25,6 +25,10 @@ impl Command for SubCommand {
"Convert text into a decimal"
}
fn search_terms(&self) -> Vec<&str> {
vec!["convert", "number", "floating"]
}
fn run(
&self,
engine_state: &EngineState,
@ -101,7 +105,12 @@ fn action(input: &Value, head: Span) -> Value {
match other.parse::<f64>() {
Ok(x) => Value::Float { val: x, span: head },
Err(reason) => Value::Error {
error: ShellError::CantConvert("float".to_string(), reason.to_string(), *span),
error: ShellError::CantConvert(
"float".to_string(),
reason.to_string(),
*span,
None,
),
},
}
}

View File

@ -28,6 +28,10 @@ impl Command for SubCommand {
"Convert value to duration"
}
fn search_terms(&self) -> Vec<&str> {
vec!["convert", "time", "period"]
}
fn run(
&self,
engine_state: &EngineState,
@ -151,11 +155,11 @@ fn string_to_duration(s: &str, span: Span) -> Result<i64, ShellError> {
}
}
Err(ShellError::CantConvertWithHelp(
Err(ShellError::CantConvert(
"duration".to_string(),
"string".to_string(),
span,
"supported units are ns, us, ms, sec, min, hr, day, and wk".to_string(),
Some("supported units are ns, us, ms, sec, min, hr, day, and wk".to_string()),
))
}

View File

@ -27,6 +27,10 @@ impl Command for SubCommand {
"Convert value to filesize"
}
fn search_terms(&self) -> Vec<&str> {
vec!["convert", "number", "size", "bytes"]
}
fn run(
&self,
engine_state: &EngineState,
@ -148,7 +152,12 @@ pub fn action(input: &Value, span: Span) -> Value {
fn int_from_string(a_string: &str, span: Span) -> Result<i64, ShellError> {
match a_string.trim().parse::<bytesize::ByteSize>() {
Ok(n) => Ok(n.0 as i64),
Err(_) => Err(ShellError::CantConvert("int".into(), "string".into(), span)),
Err(_) => Err(ShellError::CantConvert(
"int".into(),
"string".into(),
span,
None,
)),
}
}

View File

@ -33,6 +33,10 @@ impl Command for SubCommand {
"Convert value to integer"
}
fn search_terms(&self) -> Vec<&str> {
vec!["convert", "number", "natural"]
}
fn run(
&self,
engine_state: &EngineState,
@ -207,7 +211,7 @@ fn convert_int(input: &Value, head: Span, radix: u32) -> Value {
match i64::from_str_radix(&i, radix) {
Ok(n) => Value::Int { val: n, span: head },
Err(_reason) => Value::Error {
error: ShellError::CantConvert("int".to_string(), "string".to_string(), head),
error: ShellError::CantConvert("int".to_string(), "string".to_string(), head, None),
},
}
}
@ -219,28 +223,29 @@ fn int_from_string(a_string: &str, span: Span) -> Result<i64, ShellError> {
let num = match i64::from_str_radix(b.trim_start_matches("0b"), 2) {
Ok(n) => n,
Err(_reason) => {
return Err(ShellError::CantConvertWithHelp(
return Err(ShellError::CantConvert(
"int".to_string(),
"string".to_string(),
span,
r#"digits following "0b" can only be 0 or 1"#.to_string(),
Some(r#"digits following "0b" can only be 0 or 1"#.to_string()),
))
}
};
Ok(num)
}
h if h.starts_with("0x") => {
let num = match i64::from_str_radix(h.trim_start_matches("0x"), 16) {
let num =
match i64::from_str_radix(h.trim_start_matches("0x"), 16) {
Ok(n) => n,
Err(_reason) => {
return Err(ShellError::CantConvertWithHelp(
Err(_reason) => return Err(ShellError::CantConvert(
"int".to_string(),
"string".to_string(),
span,
Some(
r#"hexadecimal digits following "0x" should be in 0-9, a-f, or A-F"#
.to_string(),
))
}
),
)),
};
Ok(num)
}
@ -252,6 +257,7 @@ fn int_from_string(a_string: &str, span: Span) -> Result<i64, ShellError> {
"int".to_string(),
"string".to_string(),
span,
None,
)),
},
},

View File

@ -37,6 +37,10 @@ impl Command for SubCommand {
"Convert value to string"
}
fn search_terms(&self) -> Vec<&str> {
vec!["convert", "str", "text"]
}
fn run(
&self,
engine_state: &EngineState,
@ -138,7 +142,7 @@ fn string_helper(
let head = call.head;
let decimals_value: Option<i64> = call.get_flag(engine_state, stack, "decimals")?;
let column_paths: Vec<CellPath> = call.rest(engine_state, stack, 0)?;
let config = stack.get_config().unwrap_or_default();
let config = engine_state.get_config().clone();
if let Some(decimal_val) = decimals_value {
if decimals && decimal_val.is_negative() {
@ -258,15 +262,20 @@ pub fn action(
),
},
Value::Binary { .. } => Value::Error {
error: ShellError::CantConvertWithHelp(
error: ShellError::CantConvert(
"string".into(),
"binary".into(),
span,
"try using the `decode` command".into(),
Some("try using the `decode` command".into()),
),
},
x => Value::Error {
error: ShellError::CantConvert(String::from("string"), x.get_type().to_string(), span),
error: ShellError::CantConvert(
String::from("string"),
x.get_type().to_string(),
span,
None,
),
},
}
}

View File

@ -25,12 +25,12 @@ impl Command for Debug {
fn run(
&self,
engine_state: &EngineState,
stack: &mut Stack,
_stack: &mut Stack,
call: &Call,
input: PipelineData,
) -> Result<PipelineData, ShellError> {
let head = call.head;
let config = stack.get_config().unwrap_or_default();
let config = engine_state.get_config().clone();
let raw = call.has_flag("raw");
input.map(

View File

@ -35,8 +35,13 @@ impl Command for Describe {
))
} else {
let value = input.into_value(call.head);
let description = match value {
Value::CustomValue { val, .. } => val.value_string(),
_ => value.get_type().to_string(),
};
Ok(Value::String {
val: value.get_type().to_string(),
val: description,
span: head,
}
.into_pipeline_data())

View File

@ -39,10 +39,12 @@ impl Command for ErrorMake {
Ok(make_error(&arg)
.map(|err| Value::Error { error: err })
.unwrap_or_else(|| Value::Error {
error: ShellError::SpannedLabeledError(
error: ShellError::GenericError(
"Creating error value not supported.".into(),
"unsupported error format".into(),
span,
Some(span),
None,
Vec::new(),
),
})
.into_pipeline_data())
@ -52,10 +54,12 @@ impl Command for ErrorMake {
make_error(&value)
.map(|err| Value::Error { error: err })
.unwrap_or_else(|| Value::Error {
error: ShellError::SpannedLabeledError(
error: ShellError::GenericError(
"Creating error value not supported.".into(),
"unsupported error format".into(),
span,
Some(span),
None,
Vec::new(),
),
})
},
@ -103,20 +107,26 @@ fn make_error(value: &Value) -> Option<ShellError> {
Some(Value::String {
val: label_text, ..
}),
) => Some(ShellError::SpannedLabeledError(
) => Some(ShellError::GenericError(
message,
label_text,
Span {
Some(Span {
start: start as usize,
end: end as usize,
},
}),
None,
Vec::new(),
)),
_ => None,
}
}
(Some(Value::String { val: message, .. }), None) => {
Some(ShellError::UnlabeledError(message))
}
(Some(Value::String { val: message, .. }), None) => Some(ShellError::GenericError(
message,
"".to_string(),
None,
None,
Vec::new(),
)),
_ => None,
}
} else {

View File

@ -2,8 +2,8 @@ use nu_engine::{eval_block, eval_expression, CallExt};
use nu_protocol::ast::Call;
use nu_protocol::engine::{CaptureBlock, Command, EngineState, Stack};
use nu_protocol::{
Category, Example, IntoInterruptiblePipelineData, PipelineData, Signature, Span, SyntaxShape,
Value,
Category, Example, IntoInterruptiblePipelineData, ListStream, PipelineData, Signature, Span,
SyntaxShape, Value,
};
#[derive(Clone)]
@ -88,8 +88,8 @@ https://www.nushell.sh/book/thinking_in_nushell.html#parsing-and-evaluation-are-
let redirect_stderr = call.redirect_stderr;
match values {
Value::List { vals, .. } => Ok(vals
.into_iter()
Value::List { vals, .. } => {
Ok(ListStream::from_stream(vals.into_iter(), ctrlc.clone())
.enumerate()
.map(move |(idx, x)| {
stack.with_env(&orig_env_vars, &orig_env_hidden);
@ -127,7 +127,8 @@ https://www.nushell.sh/book/thinking_in_nushell.html#parsing-and-evaluation-are-
}
})
.filter(|x| !x.is_nothing())
.into_pipeline_data(ctrlc)),
.into_pipeline_data(ctrlc))
}
Value::Range { val, .. } => Ok(val
.into_range_iter(ctrlc.clone())?
.enumerate()

View File

@ -54,11 +54,6 @@ impl Command for Help {
example: "help commands",
result: None,
},
Example {
description: "generate documentation",
example: "help generate_docs",
result: None,
},
Example {
description: "show help for single command",
example: "help match",

View File

@ -48,10 +48,12 @@ https://www.nushell.sh/book/thinking_in_nushell.html#parsing-and-evaluation-are-
{
pat
} else {
return Err(ShellError::SpannedLabeledError(
return Err(ShellError::GenericError(
"Unexpected import".into(),
"import pattern not supported".into(),
call.head,
Some(call.head),
None,
Vec::new(),
));
};

View File

@ -1,9 +1,8 @@
use nu_engine::{eval_block, eval_expression, CallExt};
use nu_engine::{eval_block, eval_expression, eval_expression_with_input, CallExt};
use nu_protocol::ast::Call;
use nu_protocol::engine::{CaptureBlock, Command, EngineState, Stack};
use nu_protocol::{
Category, Example, FromValue, IntoPipelineData, PipelineData, ShellError, Signature,
SyntaxShape, Value,
Category, Example, FromValue, PipelineData, ShellError, Signature, SyntaxShape, Value,
};
#[derive(Clone)]
@ -85,12 +84,24 @@ https://www.nushell.sh/book/thinking_in_nushell.html#parsing-and-evaluation-are-
call.redirect_stderr,
)
} else {
eval_expression(engine_state, stack, else_expr)
.map(|x| x.into_pipeline_data())
eval_expression_with_input(
engine_state,
stack,
else_expr,
input,
call.redirect_stdout,
call.redirect_stderr,
)
}
} else {
eval_expression(engine_state, stack, else_case)
.map(|x| x.into_pipeline_data())
eval_expression_with_input(
engine_state,
stack,
else_case,
input,
call.redirect_stdout,
call.redirect_stderr,
)
}
} else {
Ok(PipelineData::new(call.head))
@ -100,6 +111,7 @@ https://www.nushell.sh/book/thinking_in_nushell.html#parsing-and-evaluation-are-
"bool".into(),
x.get_type().to_string(),
result.span()?,
None,
)),
}
}

View File

@ -178,7 +178,7 @@ The most common form of data in Nushell is the table. Tables contain rows and
columns of data. In each cell of the table, there is data that you can access
using Nushell commands.
To get the 3rd row in the table, you can use the `nth` command:
To get the 3rd row in the table, you can use the `select` command:
```
ls | select 2
```

View File

@ -46,10 +46,12 @@ https://www.nushell.sh/book/thinking_in_nushell.html#parsing-and-evaluation-are-
{
pat
} else {
return Err(ShellError::SpannedLabeledError(
return Err(ShellError::GenericError(
"Unexpected import".into(),
"import pattern not supported".into(),
call.head,
Some(call.head),
None,
Vec::new(),
));
};
@ -120,13 +122,15 @@ https://www.nushell.sh/book/thinking_in_nushell.html#parsing-and-evaluation-are-
} else {
// TODO: This is a workaround since call.positional[0].span points at 0 for some reason
// when this error is triggered
return Err(ShellError::SpannedLabeledError(
return Err(ShellError::GenericError(
format!(
"Could not import from '{}'",
String::from_utf8_lossy(&import_pattern.head.name)
),
"module does not exist".to_string(),
import_pattern.head.span,
Some(import_pattern.head.span),
None,
Vec::new(),
));
}

View File

@ -198,122 +198,40 @@ pub fn version(
fn features_enabled() -> Vec<String> {
let mut names = vec!["default".to_string()];
// NOTE: There should be another way to know
// features on.
#[cfg(feature = "ctrlc")]
{
names.push("ctrlc".to_string());
}
// #[cfg(feature = "rich-benchmark")]
// {
// names.push("rich-benchmark".to_string());
// }
#[cfg(feature = "rustyline-support")]
{
names.push("rustyline".to_string());
}
#[cfg(feature = "term")]
{
names.push("term".to_string());
}
#[cfg(feature = "uuid_crate")]
{
names.push("uuid".to_string());
}
// NOTE: There should be another way to know features on.
#[cfg(feature = "which-support")]
{
names.push("which".to_string());
}
#[cfg(feature = "zip")]
{
// always include it?
names.push("zip".to_string());
}
#[cfg(feature = "clipboard-cli")]
{
names.push("clipboard-cli".to_string());
}
#[cfg(feature = "trash-support")]
{
names.push("trash".to_string());
}
#[cfg(feature = "database")]
{
names.push("database".to_string());
}
#[cfg(feature = "dataframe")]
{
names.push("dataframe".to_string());
}
#[cfg(feature = "table-pager")]
#[cfg(feature = "static-link-openssl")]
{
names.push("table-pager".to_string());
names.push("static-link-openssl".to_string());
}
// #[cfg(feature = "binaryview")]
// {
// names.push("binaryview".to_string());
// }
// #[cfg(feature = "start")]
// {
// names.push("start".to_string());
// }
// #[cfg(feature = "bson")]
// {
// names.push("bson".to_string());
// }
// #[cfg(feature = "sqlite")]
// {
// names.push("sqlite".to_string());
// }
// #[cfg(feature = "s3")]
// {
// names.push("s3".to_string());
// }
// #[cfg(feature = "chart")]
// {
// names.push("chart".to_string());
// }
// #[cfg(feature = "xpath")]
// {
// names.push("xpath".to_string());
// }
// #[cfg(feature = "selector")]
// {
// names.push("selector".to_string());
// }
// #[cfg(feature = "extra")]
// {
// names.push("extra".to_string());
// }
// #[cfg(feature = "preserve_order")]
// {
// names.push("preserve_order".to_string());
// }
// #[cfg(feature = "wee_alloc")]
// {
// names.push("wee_alloc".to_string());
// }
// #[cfg(feature = "console_error_panic_hook")]
// {
// names.push("console_error_panic_hook".to_string());
// }
#[cfg(feature = "extra")]
{
names.push("extra".to_string());
}
names.sort();

View File

@ -0,0 +1,143 @@
use crate::database::values::dsl::ExprDb;
use super::super::SQLiteDatabase;
use nu_engine::CallExt;
use nu_protocol::{
ast::Call,
engine::{Command, EngineState, Stack},
Category, Example, IntoPipelineData, PipelineData, ShellError, Signature, Span, SyntaxShape,
Value,
};
use sqlparser::ast::{BinaryOperator, Expr, Query, Select, SetExpr};
#[derive(Clone)]
pub struct AndDb;
impl Command for AndDb {
fn name(&self) -> &str {
"db and"
}
fn usage(&self) -> &str {
"Includes an AND clause for a query or expression"
}
fn signature(&self) -> Signature {
Signature::build(self.name())
.required("where", SyntaxShape::Any, "Where expression on the table")
.category(Category::Custom("database".into()))
}
fn search_terms(&self) -> Vec<&str> {
vec!["database", "where"]
}
fn examples(&self) -> Vec<Example> {
vec![
Example {
description: "selects a column from a database with a where clause",
example: r#"db open db.mysql
| db select a
| db from table_1
| db where ((db col a) > 1)
| db and ((db col b) == 1)
| db describe"#,
result: None,
},
Example {
description: "Creates a nested where clause",
example: r#"db open db.mysql
| db select a
| db from table_1
| db where ((db col a) > 1 | db and ((db col a) < 10))
| db describe"#,
result: None,
},
]
}
fn run(
&self,
engine_state: &EngineState,
stack: &mut Stack,
call: &Call,
input: PipelineData,
) -> Result<PipelineData, ShellError> {
let value: Value = call.req(engine_state, stack, 0)?;
let expr = ExprDb::try_from_value(&value)?.into_native();
let value = input.into_value(call.head);
if let Ok(expression) = ExprDb::try_from_value(&value) {
let expression = Expr::BinaryOp {
left: Box::new(expression.into_native()),
op: BinaryOperator::And,
right: Box::new(expr),
};
let expression: ExprDb = Expr::Nested(Box::new(expression)).into();
Ok(expression.into_value(call.head).into_pipeline_data())
} else if let Ok(mut db) = SQLiteDatabase::try_from_value(value.clone()) {
db.query = match db.query {
Some(query) => Some(modify_query(query, expr, call.head)?),
None => {
return Err(ShellError::GenericError(
"Connection without query".into(),
"Missing query in the connection".into(),
Some(call.head),
None,
Vec::new(),
))
}
};
Ok(db.into_value(call.head).into_pipeline_data())
} else {
Err(ShellError::CantConvert(
"expression or query".into(),
value.get_type().to_string(),
value.span()?,
None,
))
}
}
}
fn modify_query(mut query: Query, expression: Expr, span: Span) -> Result<Query, ShellError> {
query.body = match query.body {
SetExpr::Select(select) => Ok(SetExpr::Select(modify_select(select, expression, span)?)),
_ => Err(ShellError::GenericError(
"Query without a select".into(),
"Missing a WHERE clause before an AND clause".into(),
Some(span),
None,
Vec::new(),
)),
}?;
Ok(query)
}
fn modify_select(
mut select: Box<Select>,
expression: Expr,
span: Span,
) -> Result<Box<Select>, ShellError> {
let new_expression = match &select.selection {
Some(expr) => Ok(Expr::BinaryOp {
left: Box::new(expr.clone()),
op: BinaryOperator::And,
right: Box::new(expression),
}),
None => Err(ShellError::GenericError(
"Query without a select".into(),
"Missing a WHERE clause before an AND clause".into(),
Some(span),
None,
Vec::new(),
)),
}?;
select.as_mut().selection = Some(new_expression);
Ok(select)
}

View File

@ -0,0 +1,49 @@
use nu_protocol::{
ast::Call,
engine::{Command, EngineState, Stack},
Category, Example, IntoPipelineData, PipelineData, ShellError, Signature,
};
use super::super::SQLiteDatabase;
#[derive(Clone)]
pub struct CollectDb;
impl Command for CollectDb {
fn name(&self) -> &str {
"db collect"
}
fn signature(&self) -> Signature {
Signature::build(self.name()).category(Category::Custom("database".into()))
}
fn usage(&self) -> &str {
"Query a database using SQL."
}
fn examples(&self) -> Vec<Example> {
vec![Example {
description: "Collect from a select query",
example: "open foo.db | db select a | db from table_1 | db collect",
result: None,
}]
}
fn search_terms(&self) -> Vec<&str> {
vec!["database", "collect"]
}
fn run(
&self,
_engine_state: &EngineState,
_stack: &mut Stack,
call: &Call,
input: PipelineData,
) -> Result<PipelineData, ShellError> {
let db = SQLiteDatabase::try_from_pipeline(input, call.head)?;
db.collect(call.head)
.map(IntoPipelineData::into_pipeline_data)
}
}

View File

@ -0,0 +1,42 @@
use nu_engine::get_full_help;
use nu_protocol::{
ast::Call,
engine::{Command, EngineState, Stack},
Category, IntoPipelineData, PipelineData, ShellError, Signature, Value,
};
#[derive(Clone)]
pub struct Database;
impl Command for Database {
fn name(&self) -> &str {
"db"
}
fn usage(&self) -> &str {
"Database commands"
}
fn signature(&self) -> Signature {
Signature::build(self.name()).category(Category::Custom("database".into()))
}
fn run(
&self,
engine_state: &EngineState,
stack: &mut Stack,
call: &Call,
_input: PipelineData,
) -> Result<PipelineData, ShellError> {
Ok(Value::String {
val: get_full_help(
&Database.signature(),
&Database.examples(),
engine_state,
stack,
),
span: call.head,
}
.into_pipeline_data())
}
}

View File

@ -0,0 +1,47 @@
use nu_protocol::{
ast::Call,
engine::{Command, EngineState, Stack},
Category, Example, IntoPipelineData, PipelineData, ShellError, Signature,
};
use super::super::SQLiteDatabase;
#[derive(Clone)]
pub struct DescribeDb;
impl Command for DescribeDb {
fn name(&self) -> &str {
"db describe"
}
fn signature(&self) -> Signature {
Signature::build(self.name()).category(Category::Custom("database".into()))
}
fn usage(&self) -> &str {
"Describes connection and query of the DB object"
}
fn examples(&self) -> Vec<Example> {
vec![Example {
description: "Describe SQLite database constructed query",
example: "db open foo.db | db select table_1 | db describe",
result: None,
}]
}
fn search_terms(&self) -> Vec<&str> {
vec!["database", "SQLite"]
}
fn run(
&self,
_engine_state: &EngineState,
_stack: &mut Stack,
call: &Call,
input: PipelineData,
) -> Result<PipelineData, ShellError> {
let db = SQLiteDatabase::try_from_pipeline(input, call.head)?;
Ok(db.describe(call.head).into_pipeline_data())
}
}

View File

@ -0,0 +1,128 @@
use super::super::SQLiteDatabase;
use nu_engine::CallExt;
use nu_protocol::{
ast::Call,
engine::{Command, EngineState, Stack},
Category, Example, IntoPipelineData, PipelineData, ShellError, Signature, SyntaxShape,
};
use sqlparser::ast::{Ident, ObjectName, Query, Select, SetExpr, TableFactor, TableWithJoins};
#[derive(Clone)]
pub struct FromDb;
impl Command for FromDb {
fn name(&self) -> &str {
"db from"
}
fn usage(&self) -> &str {
"Select section from query statement for a DB"
}
fn signature(&self) -> Signature {
Signature::build(self.name())
.required(
"select",
SyntaxShape::String,
"Name of table to select from",
)
.category(Category::Custom("database".into()))
}
fn search_terms(&self) -> Vec<&str> {
vec!["database", "from"]
}
fn examples(&self) -> Vec<Example> {
vec![Example {
description: "Selects table from database",
example: "db open db.mysql | db from table_a",
result: None,
}]
}
fn run(
&self,
engine_state: &EngineState,
stack: &mut Stack,
call: &Call,
input: PipelineData,
) -> Result<PipelineData, ShellError> {
let table: String = call.req(engine_state, stack, 0)?;
let mut db = SQLiteDatabase::try_from_pipeline(input, call.head)?;
db.query = match db.query {
None => Some(create_query(table)),
Some(query) => Some(modify_query(query, table)),
};
Ok(db.into_value(call.head).into_pipeline_data())
}
}
fn create_query(table: String) -> Query {
Query {
with: None,
body: SetExpr::Select(Box::new(create_select(table))),
order_by: Vec::new(),
limit: None,
offset: None,
fetch: None,
lock: None,
}
}
fn modify_query(mut query: Query, table: String) -> Query {
query.body = match query.body {
SetExpr::Select(select) => SetExpr::Select(modify_select(select, table)),
_ => SetExpr::Select(Box::new(create_select(table))),
};
query
}
fn modify_select(mut select: Box<Select>, table: String) -> Box<Select> {
select.as_mut().from = create_from(table);
select
}
fn create_select(table: String) -> Select {
Select {
distinct: false,
top: None,
projection: Vec::new(),
into: None,
from: create_from(table),
lateral_views: Vec::new(),
selection: None,
group_by: Vec::new(),
cluster_by: Vec::new(),
distribute_by: Vec::new(),
sort_by: Vec::new(),
having: None,
}
}
// This function needs more work
// It needs to define multi tables and joins
// I assume we will need to define expressions for the columns instead of strings
fn create_from(table: String) -> Vec<TableWithJoins> {
let ident = Ident {
value: table,
quote_style: None,
};
let table_factor = TableFactor::Table {
name: ObjectName(vec![ident]),
alias: None,
args: Vec::new(),
with_hints: Vec::new(),
};
let table = TableWithJoins {
relation: table_factor,
joins: Vec::new(),
};
vec![table]
}

View File

@ -0,0 +1,77 @@
use super::super::SQLiteDatabase;
use crate::database::values::dsl::ExprDb;
use nu_engine::CallExt;
use nu_protocol::{
ast::Call,
engine::{Command, EngineState, Stack},
Category, Example, IntoPipelineData, PipelineData, ShellError, Signature, SyntaxShape, Value,
};
#[derive(Clone)]
pub struct LimitDb;
impl Command for LimitDb {
fn name(&self) -> &str {
"db limit"
}
fn usage(&self) -> &str {
"Limit result from query"
}
fn signature(&self) -> Signature {
Signature::build(self.name())
.required(
"limit",
SyntaxShape::Int,
"Number of rows to extract for query",
)
.category(Category::Custom("database".into()))
}
fn search_terms(&self) -> Vec<&str> {
vec!["database", "limit"]
}
fn examples(&self) -> Vec<Example> {
vec![Example {
description: "Limits selection from table",
example: r#"db open db.mysql
| db from table_a
| db select a
| db limit 10
| db describe"#,
result: None,
}]
}
fn run(
&self,
engine_state: &EngineState,
stack: &mut Stack,
call: &Call,
input: PipelineData,
) -> Result<PipelineData, ShellError> {
let limit: Value = call.req(engine_state, stack, 0)?;
let expr = ExprDb::try_from_value(&limit)?.into_native();
let mut db = SQLiteDatabase::try_from_pipeline(input, call.head)?;
db.query = match db.query {
Some(mut query) => {
query.limit = Some(expr);
Some(query)
}
None => {
return Err(ShellError::GenericError(
"Connection without query".into(),
"The connection needs a query defined".into(),
Some(call.head),
None,
Vec::new(),
))
}
};
Ok(db.into_value(call.head).into_pipeline_data())
}
}

View File

@ -0,0 +1,62 @@
mod and;
mod collect;
mod command;
mod describe;
mod from;
mod limit;
mod open;
mod or;
mod order_by;
mod query;
mod schema;
mod select;
mod where_;
// Temporal module to create Query objects
mod testing;
use testing::TestingDb;
use nu_protocol::engine::StateWorkingSet;
use and::AndDb;
use collect::CollectDb;
use command::Database;
use describe::DescribeDb;
use from::FromDb;
use limit::LimitDb;
use open::OpenDb;
use or::OrDb;
use order_by::OrderByDb;
use query::QueryDb;
use schema::SchemaDb;
use select::ProjectionDb;
use where_::WhereDb;
pub fn add_commands_decls(working_set: &mut StateWorkingSet) {
macro_rules! bind_command {
( $command:expr ) => {
working_set.add_decl(Box::new($command));
};
( $( $command:expr ),* ) => {
$( working_set.add_decl(Box::new($command)); )*
};
}
// Series commands
bind_command!(
AndDb,
CollectDb,
Database,
DescribeDb,
FromDb,
QueryDb,
LimitDb,
ProjectionDb,
OpenDb,
OrderByDb,
OrDb,
SchemaDb,
TestingDb,
WhereDb
);
}

View File

@ -0,0 +1,52 @@
use super::super::SQLiteDatabase;
use nu_engine::CallExt;
use nu_protocol::{
ast::Call,
engine::{Command, EngineState, Stack},
Category, Example, IntoPipelineData, PipelineData, ShellError, Signature, Spanned, SyntaxShape,
};
use std::path::PathBuf;
#[derive(Clone)]
pub struct OpenDb;
impl Command for OpenDb {
fn name(&self) -> &str {
"db open"
}
fn signature(&self) -> Signature {
Signature::build(self.name())
.required("query", SyntaxShape::Filepath, "SQLite file to be opened")
.category(Category::Custom("database".into()))
}
fn usage(&self) -> &str {
"Open a database"
}
fn search_terms(&self) -> Vec<&str> {
vec!["database", "open"]
}
fn examples(&self) -> Vec<Example> {
vec![Example {
description: "Open a sqlite file",
example: r#"db open file.sqlite"#,
result: None,
}]
}
fn run(
&self,
engine_state: &EngineState,
stack: &mut Stack,
call: &Call,
_input: PipelineData,
) -> Result<PipelineData, ShellError> {
let path: Spanned<PathBuf> = call.req(engine_state, stack, 0)?;
SQLiteDatabase::try_from_path(path.item.as_path(), path.span)
.map(|db| db.into_value(call.head).into_pipeline_data())
}
}

View File

@ -0,0 +1,143 @@
use crate::database::values::dsl::ExprDb;
use super::super::SQLiteDatabase;
use nu_engine::CallExt;
use nu_protocol::{
ast::Call,
engine::{Command, EngineState, Stack},
Category, Example, IntoPipelineData, PipelineData, ShellError, Signature, Span, SyntaxShape,
Value,
};
use sqlparser::ast::{BinaryOperator, Expr, Query, Select, SetExpr};
#[derive(Clone)]
pub struct OrDb;
impl Command for OrDb {
fn name(&self) -> &str {
"db or"
}
fn usage(&self) -> &str {
"Includes an OR clause for a query or expression"
}
fn signature(&self) -> Signature {
Signature::build(self.name())
.required("where", SyntaxShape::Any, "Where expression on the table")
.category(Category::Custom("database".into()))
}
fn search_terms(&self) -> Vec<&str> {
vec!["database", "where"]
}
fn examples(&self) -> Vec<Example> {
vec![
Example {
description: "selects a column from a database with a where clause",
example: r#"db open db.mysql
| db select a
| db from table_1
| db where ((db col a) > 1)
| db or ((db col b) == 1)
| db describe"#,
result: None,
},
Example {
description: "Creates a nested where clause",
example: r#"db open db.mysql
| db select a
| db from table_1
| db where ((db col a) > 1 | db or ((db col a) < 10))
| db describe"#,
result: None,
},
]
}
fn run(
&self,
engine_state: &EngineState,
stack: &mut Stack,
call: &Call,
input: PipelineData,
) -> Result<PipelineData, ShellError> {
let value: Value = call.req(engine_state, stack, 0)?;
let expr = ExprDb::try_from_value(&value)?.into_native();
let value = input.into_value(call.head);
if let Ok(expression) = ExprDb::try_from_value(&value) {
let expression = Expr::BinaryOp {
left: Box::new(expression.into_native()),
op: BinaryOperator::Or,
right: Box::new(expr),
};
let expression: ExprDb = Expr::Nested(Box::new(expression)).into();
Ok(expression.into_value(call.head).into_pipeline_data())
} else if let Ok(mut db) = SQLiteDatabase::try_from_value(value.clone()) {
db.query = match db.query {
Some(query) => Some(modify_query(query, expr, call.head)?),
None => {
return Err(ShellError::GenericError(
"Connection without query".into(),
"Missing query in the connection".into(),
Some(call.head),
None,
Vec::new(),
))
}
};
Ok(db.into_value(call.head).into_pipeline_data())
} else {
Err(ShellError::CantConvert(
"expression or query".into(),
value.get_type().to_string(),
value.span()?,
None,
))
}
}
}
fn modify_query(mut query: Query, expression: Expr, span: Span) -> Result<Query, ShellError> {
query.body = match query.body {
SetExpr::Select(select) => Ok(SetExpr::Select(modify_select(select, expression, span)?)),
_ => Err(ShellError::GenericError(
"Query without a select".into(),
"Missing a WHERE clause before an OR clause".into(),
Some(span),
None,
Vec::new(),
)),
}?;
Ok(query)
}
fn modify_select(
mut select: Box<Select>,
expression: Expr,
span: Span,
) -> Result<Box<Select>, ShellError> {
let new_expression = match &select.selection {
Some(expr) => Ok(Expr::BinaryOp {
left: Box::new(expr.clone()),
op: BinaryOperator::Or,
right: Box::new(expression),
}),
None => Err(ShellError::GenericError(
"Query without a select".into(),
"Missing a WHERE clause before an OR clause".into(),
Some(span),
None,
Vec::new(),
)),
}?;
select.as_mut().selection = Some(new_expression);
Ok(select)
}

View File

@ -0,0 +1,97 @@
use crate::database::values::dsl::ExprDb;
use super::super::SQLiteDatabase;
use nu_engine::CallExt;
use nu_protocol::{
ast::Call,
engine::{Command, EngineState, Stack},
Category, Example, IntoPipelineData, PipelineData, ShellError, Signature, SyntaxShape, Value,
};
use sqlparser::ast::OrderByExpr;
#[derive(Clone)]
pub struct OrderByDb;
impl Command for OrderByDb {
fn name(&self) -> &str {
"db order-by"
}
fn usage(&self) -> &str {
"Orders by query"
}
fn signature(&self) -> Signature {
Signature::build(self.name())
.switch("ascending", "Order by ascending values", Some('a'))
.switch("nulls_first", "Show nulls first in order", Some('n'))
.rest(
"select",
SyntaxShape::Any,
"Select expression(s) on the table",
)
.category(Category::Custom("database".into()))
}
fn search_terms(&self) -> Vec<&str> {
vec!["database", "select"]
}
fn examples(&self) -> Vec<Example> {
vec![Example {
description: "orders query by a column",
example: r#"db open db.mysql
| db from table_a
| db select a
| db order-by a
| db describe"#,
result: None,
}]
}
fn run(
&self,
engine_state: &EngineState,
stack: &mut Stack,
call: &Call,
input: PipelineData,
) -> Result<PipelineData, ShellError> {
let asc = call.has_flag("ascending");
let nulls_first = call.has_flag("nulls_first");
let vals: Vec<Value> = call.rest(engine_state, stack, 0)?;
let value = Value::List {
vals,
span: call.head,
};
let expressions = ExprDb::extract_exprs(value)?;
let mut db = SQLiteDatabase::try_from_pipeline(input, call.head)?;
db.query = match db.query {
Some(mut query) => {
let mut order_expr: Vec<OrderByExpr> = expressions
.into_iter()
.map(|expr| OrderByExpr {
expr,
asc: if asc { Some(asc) } else { None },
nulls_first: if nulls_first { Some(nulls_first) } else { None },
})
.collect();
query.order_by.append(&mut order_expr);
Some(query)
}
None => {
return Err(ShellError::GenericError(
"Connection without query".into(),
"The connection needs a query defined".into(),
Some(call.head),
None,
Vec::new(),
))
}
};
Ok(db.into_value(call.head).into_pipeline_data())
}
}

View File

@ -0,0 +1,57 @@
use nu_engine::CallExt;
use nu_protocol::{
ast::Call,
engine::{Command, EngineState, Stack},
Category, Example, IntoPipelineData, PipelineData, ShellError, Signature, Spanned, SyntaxShape,
};
use super::super::SQLiteDatabase;
#[derive(Clone)]
pub struct QueryDb;
impl Command for QueryDb {
fn name(&self) -> &str {
"db query"
}
fn signature(&self) -> Signature {
Signature::build(self.name())
.required(
"query",
SyntaxShape::String,
"SQL to execute against the database",
)
.category(Category::Custom("database".into()))
}
fn usage(&self) -> &str {
"Query a database using SQL."
}
fn examples(&self) -> Vec<Example> {
vec![Example {
description: "Get 1 table out of a SQLite database",
example: r#"db open foo.db | db query "SELECT * FROM Bar""#,
result: None,
}]
}
fn search_terms(&self) -> Vec<&str> {
vec!["database", "SQLite"]
}
fn run(
&self,
engine_state: &EngineState,
stack: &mut Stack,
call: &Call,
input: PipelineData,
) -> Result<PipelineData, ShellError> {
let sql: Spanned<String> = call.req(engine_state, stack, 0)?;
let db = SQLiteDatabase::try_from_pipeline(input, call.head)?;
db.query(&sql, call.head)
.map(IntoPipelineData::into_pipeline_data)
}
}

View File

@ -0,0 +1,240 @@
use super::super::SQLiteDatabase;
use crate::database::values::definitions::db_row::DbRow;
use nu_protocol::{
ast::Call,
engine::{Command, EngineState, Stack},
Category, Example, PipelineData, ShellError, Signature, Value,
};
#[derive(Clone)]
pub struct SchemaDb;
impl Command for SchemaDb {
fn name(&self) -> &str {
"db schema"
}
fn signature(&self) -> Signature {
Signature::build(self.name()).category(Category::Custom("database".into()))
}
fn usage(&self) -> &str {
"Show database information, including its schema."
}
fn examples(&self) -> Vec<Example> {
vec![Example {
description: "Show the schema of a SQLite database",
example: r#"open foo.db | db schema"#,
result: None,
}]
}
fn search_terms(&self) -> Vec<&str> {
vec!["database", "info", "SQLite", "schema"]
}
fn run(
&self,
_engine_state: &EngineState,
_stack: &mut Stack,
call: &Call,
input: PipelineData,
) -> Result<PipelineData, ShellError> {
let mut cols = vec![];
let mut vals = vec![];
let span = call.head;
let sqlite_db = SQLiteDatabase::try_from_pipeline(input, span)?;
let conn = sqlite_db.open_connection().map_err(|e| {
ShellError::GenericError(
"Error opening file".into(),
e.to_string(),
Some(span),
None,
Vec::new(),
)
})?;
let dbs = sqlite_db.get_databases_and_tables(&conn).map_err(|e| {
ShellError::GenericError(
"Error getting databases and tables".into(),
e.to_string(),
Some(span),
None,
Vec::new(),
)
})?;
cols.push("db_filename".into());
vals.push(Value::String {
val: sqlite_db.path.to_string_lossy().to_string(),
span,
});
for db in dbs {
let tables = db.tables();
let mut table_list: Vec<Value> = vec![];
let mut table_names = vec![];
let mut table_values = vec![];
for table in tables {
let columns = sqlite_db.get_columns(&conn, &table).map_err(|e| {
ShellError::GenericError(
"Error getting database columns".into(),
e.to_string(),
Some(span),
None,
Vec::new(),
)
})?;
// a record of column name = column value
let mut column_info = vec![];
for t in columns {
let mut col_names = vec![];
let mut col_values = vec![];
let fields = t.fields();
let columns = t.columns();
for (k, v) in fields.iter().zip(columns.iter()) {
col_names.push(k.clone());
col_values.push(Value::string(v.clone(), span));
}
column_info.push(Value::Record {
cols: col_names.clone(),
vals: col_values.clone(),
span,
});
}
let constraints = sqlite_db.get_constraints(&conn, &table).map_err(|e| {
ShellError::GenericError(
"Error getting DB constraints".into(),
e.to_string(),
Some(span),
None,
Vec::new(),
)
})?;
let mut constraint_info = vec![];
for constraint in constraints {
let mut con_cols = vec![];
let mut con_vals = vec![];
let fields = constraint.fields();
let columns = constraint.columns();
for (k, v) in fields.iter().zip(columns.iter()) {
con_cols.push(k.clone());
con_vals.push(Value::string(v.clone(), span));
}
constraint_info.push(Value::Record {
cols: con_cols.clone(),
vals: con_vals.clone(),
span,
});
}
let foreign_keys = sqlite_db.get_foreign_keys(&conn, &table).map_err(|e| {
ShellError::GenericError(
"Error getting DB Foreign Keys".into(),
e.to_string(),
Some(span),
None,
Vec::new(),
)
})?;
let mut foreign_key_info = vec![];
for fk in foreign_keys {
let mut fk_cols = vec![];
let mut fk_vals = vec![];
let fields = fk.fields();
let columns = fk.columns();
for (k, v) in fields.iter().zip(columns.iter()) {
fk_cols.push(k.clone());
fk_vals.push(Value::string(v.clone(), span));
}
foreign_key_info.push(Value::Record {
cols: fk_cols.clone(),
vals: fk_vals.clone(),
span,
});
}
let indexes = sqlite_db.get_indexes(&conn, &table).map_err(|e| {
ShellError::GenericError(
"Error getting DB Indexes".into(),
e.to_string(),
Some(span),
None,
Vec::new(),
)
})?;
let mut index_info = vec![];
for index in indexes {
let mut idx_cols = vec![];
let mut idx_vals = vec![];
let fields = index.fields();
let columns = index.columns();
for (k, v) in fields.iter().zip(columns.iter()) {
idx_cols.push(k.clone());
idx_vals.push(Value::string(v.clone(), span));
}
index_info.push(Value::Record {
cols: idx_cols.clone(),
vals: idx_vals.clone(),
span,
});
}
table_names.push(table.name);
table_values.push(Value::Record {
cols: vec![
"columns".into(),
"constraints".into(),
"foreign_keys".into(),
"indexes".into(),
],
vals: vec![
Value::List {
vals: column_info,
span,
},
Value::List {
vals: constraint_info,
span,
},
Value::List {
vals: foreign_key_info,
span,
},
Value::List {
vals: index_info,
span,
},
],
span,
});
}
table_list.push(Value::Record {
cols: table_names,
vals: table_values,
span,
});
cols.push("databases".into());
let mut rcols = vec![];
let mut rvals = vec![];
rcols.push("name".into());
rvals.push(Value::string(db.name().to_string(), span));
rcols.push("tables".into());
rvals.append(&mut table_list);
vals.push(Value::Record {
cols: rcols,
vals: rvals,
span,
});
}
Ok(PipelineData::Value(
Value::Record { cols, vals, span },
None,
))
}
}

View File

@ -0,0 +1,116 @@
use super::{super::values::dsl::SelectDb, super::SQLiteDatabase};
use nu_engine::CallExt;
use nu_protocol::{
ast::Call,
engine::{Command, EngineState, Stack},
Category, Example, IntoPipelineData, PipelineData, ShellError, Signature, SyntaxShape, Value,
};
use sqlparser::ast::{Query, Select, SelectItem, SetExpr};
#[derive(Clone)]
pub struct ProjectionDb;
impl Command for ProjectionDb {
fn name(&self) -> &str {
"db select"
}
fn usage(&self) -> &str {
"Creates a select statement for a DB"
}
fn signature(&self) -> Signature {
Signature::build(self.name())
.rest(
"select",
SyntaxShape::Any,
"Select expression(s) on the table",
)
.category(Category::Custom("database".into()))
}
fn search_terms(&self) -> Vec<&str> {
vec!["database", "select"]
}
fn examples(&self) -> Vec<Example> {
vec![
Example {
description: "selects a column from a database",
example: "db open db.mysql | db select a | db describe",
result: None,
},
Example {
description: "selects columns from a database",
example: "db open db.mysql | db select a b c | db describe",
result: None,
},
]
}
fn run(
&self,
engine_state: &EngineState,
stack: &mut Stack,
call: &Call,
input: PipelineData,
) -> Result<PipelineData, ShellError> {
let vals: Vec<Value> = call.rest(engine_state, stack, 0)?;
let value = Value::List {
vals,
span: call.head,
};
let projection = SelectDb::extract_selects(value)?;
let mut db = SQLiteDatabase::try_from_pipeline(input, call.head)?;
db.query = match db.query {
None => Some(create_query(projection)),
Some(query) => Some(modify_query(query, projection)),
};
Ok(db.into_value(call.head).into_pipeline_data())
}
}
fn create_query(expressions: Vec<SelectItem>) -> Query {
Query {
with: None,
body: SetExpr::Select(Box::new(create_select(expressions))),
order_by: Vec::new(),
limit: None,
offset: None,
fetch: None,
lock: None,
}
}
fn modify_query(mut query: Query, expressions: Vec<SelectItem>) -> Query {
query.body = match query.body {
SetExpr::Select(select) => SetExpr::Select(modify_select(select, expressions)),
_ => SetExpr::Select(Box::new(create_select(expressions))),
};
query
}
fn modify_select(mut select: Box<Select>, projection: Vec<SelectItem>) -> Box<Select> {
select.as_mut().projection = projection;
select
}
fn create_select(projection: Vec<SelectItem>) -> Select {
Select {
distinct: false,
top: None,
projection,
into: None,
from: Vec::new(),
lateral_views: Vec::new(),
selection: None,
group_by: Vec::new(),
cluster_by: Vec::new(),
distribute_by: Vec::new(),
sort_by: Vec::new(),
having: None,
}
}

View File

@ -0,0 +1,76 @@
use nu_engine::CallExt;
use nu_protocol::{
ast::Call,
engine::{Command, EngineState, Stack},
Category, Example, IntoPipelineData, PipelineData, ShellError, Signature, Spanned, SyntaxShape,
Value,
};
use sqlparser::dialect::GenericDialect;
use sqlparser::parser::Parser;
#[derive(Clone)]
pub struct TestingDb;
impl Command for TestingDb {
fn name(&self) -> &str {
"db testing"
}
fn signature(&self) -> Signature {
Signature::build(self.name())
.required(
"query",
SyntaxShape::String,
"SQL to execute to create the query object",
)
.category(Category::Custom("database".into()))
}
fn usage(&self) -> &str {
"Create query object"
}
fn examples(&self) -> Vec<Example> {
vec![Example {
description: "",
example: "",
result: None,
}]
}
fn search_terms(&self) -> Vec<&str> {
vec!["database", "SQLite"]
}
fn run(
&self,
engine_state: &EngineState,
stack: &mut Stack,
call: &Call,
_input: PipelineData,
) -> Result<PipelineData, ShellError> {
let sql: Spanned<String> = call.req(engine_state, stack, 0)?;
let dialect = GenericDialect {}; // or AnsiDialect, or your own dialect ...
let ast = Parser::parse_sql(&dialect, sql.item.as_str()).map_err(|e| {
ShellError::GenericError(
"Error creating AST".into(),
e.to_string(),
Some(sql.span),
None,
Vec::new(),
)
})?;
let value = match ast.get(0) {
None => Value::nothing(call.head),
Some(statement) => Value::String {
val: format!("{:#?}", statement),
span: call.head,
},
};
Ok(value.into_pipeline_data())
}
}

View File

@ -0,0 +1,103 @@
use crate::database::values::dsl::ExprDb;
use super::super::SQLiteDatabase;
use nu_engine::CallExt;
use nu_protocol::{
ast::Call,
engine::{Command, EngineState, Stack},
Category, Example, IntoPipelineData, PipelineData, ShellError, Signature, SyntaxShape, Value,
};
use sqlparser::ast::{Expr, Query, Select, SetExpr};
#[derive(Clone)]
pub struct WhereDb;
impl Command for WhereDb {
fn name(&self) -> &str {
"db where"
}
fn usage(&self) -> &str {
"Includes a where statement for a query"
}
fn signature(&self) -> Signature {
Signature::build(self.name())
.required("where", SyntaxShape::Any, "Where expression on the table")
.category(Category::Custom("database".into()))
}
fn search_terms(&self) -> Vec<&str> {
vec!["database", "where"]
}
fn examples(&self) -> Vec<Example> {
vec![Example {
description: "selects a column from a database with a where clause",
example: r#"db open db.mysql
| db select a
| db from table_1
| db where ((db col a) > 1)
| db describe"#,
result: None,
}]
}
fn run(
&self,
engine_state: &EngineState,
stack: &mut Stack,
call: &Call,
input: PipelineData,
) -> Result<PipelineData, ShellError> {
let value: Value = call.req(engine_state, stack, 0)?;
let expr = ExprDb::try_from_value(&value)?.into_native();
let mut db = SQLiteDatabase::try_from_pipeline(input, call.head)?;
db.query = match db.query {
Some(query) => Some(modify_query(query, expr)),
None => {
return Err(ShellError::GenericError(
"Connection without query".into(),
"The connection needs a query defined".into(),
Some(call.head),
None,
Vec::new(),
))
}
};
Ok(db.into_value(call.head).into_pipeline_data())
}
}
fn modify_query(mut query: Query, expression: Expr) -> Query {
query.body = match query.body {
SetExpr::Select(select) => SetExpr::Select(modify_select(select, expression)),
_ => SetExpr::Select(Box::new(create_select(expression))),
};
query
}
fn modify_select(mut select: Box<Select>, expression: Expr) -> Box<Select> {
select.as_mut().selection = Some(expression);
select
}
fn create_select(expression: Expr) -> Select {
Select {
distinct: false,
top: None,
into: None,
projection: Vec::new(),
from: Vec::new(),
lateral_views: Vec::new(),
selection: Some(expression),
group_by: Vec::new(),
cluster_by: Vec::new(),
distribute_by: Vec::new(),
sort_by: Vec::new(),
having: None,
}
}

View File

@ -0,0 +1,153 @@
use crate::{
database::values::dsl::{ExprDb, SelectDb},
SQLiteDatabase,
};
use nu_engine::CallExt;
use nu_protocol::{
ast::Call,
engine::{Command, EngineState, Stack},
Category, Example, IntoPipelineData, PipelineData, ShellError, Signature, SyntaxShape,
};
use sqlparser::ast::{Ident, SelectItem, SetExpr, TableAlias, TableFactor};
#[derive(Clone)]
pub struct AliasExpr;
impl Command for AliasExpr {
fn name(&self) -> &str {
"db as"
}
fn signature(&self) -> Signature {
Signature::build(self.name())
.required("alias", SyntaxShape::String, "alias name")
.category(Category::Custom("database".into()))
}
fn usage(&self) -> &str {
"Creates an alias for a column selection"
}
fn examples(&self) -> Vec<Example> {
vec![
Example {
description: "Creates an alias for a column selection",
example: "db col name_a | db as new_a",
result: None,
},
Example {
description: "Creates an alias for a table",
example: r#"db open name
| db select a
| db from table_a
| db as table_a_new
| db describe"#,
result: None,
},
]
}
fn search_terms(&self) -> Vec<&str> {
vec!["database", "column", "expression"]
}
fn run(
&self,
engine_state: &EngineState,
stack: &mut Stack,
call: &Call,
input: PipelineData,
) -> Result<PipelineData, ShellError> {
let alias: String = call.req(engine_state, stack, 0)?;
let value = input.into_value(call.head);
if let Ok(expr) = ExprDb::try_from_value(&value) {
alias_selection(expr.into_native().into(), alias, call)
} else if let Ok(select) = SelectDb::try_from_value(&value) {
alias_selection(select, alias, call)
} else if let Ok(db) = SQLiteDatabase::try_from_value(value.clone()) {
alias_db(db, alias, call)
} else {
Err(ShellError::CantConvert(
"expression or query".into(),
value.get_type().to_string(),
value.span()?,
None,
))
}
}
}
fn alias_selection(
select: SelectDb,
alias: String,
call: &Call,
) -> Result<PipelineData, ShellError> {
let select = match select.into_native() {
SelectItem::UnnamedExpr(expr) => SelectItem::ExprWithAlias {
expr,
alias: Ident {
value: alias,
quote_style: None,
},
},
SelectItem::ExprWithAlias { expr, .. } => SelectItem::ExprWithAlias {
expr,
alias: Ident {
value: alias,
quote_style: None,
},
},
select => select,
};
let select: SelectDb = select.into();
Ok(select.into_value(call.head).into_pipeline_data())
}
fn alias_db(
mut db: SQLiteDatabase,
new_alias: String,
call: &Call,
) -> Result<PipelineData, ShellError> {
match db.query {
None => Err(ShellError::GenericError(
"Error creating alias".into(),
"there is no query defined yet".into(),
Some(call.head),
None,
Vec::new(),
)),
Some(ref mut query) => match &mut query.body {
SetExpr::Select(ref mut select) => {
select.as_mut().from.iter_mut().for_each(|table| {
let new_alias = Some(TableAlias {
name: Ident {
value: new_alias.clone(),
quote_style: None,
},
columns: Vec::new(),
});
if let TableFactor::Table { ref mut alias, .. } = table.relation {
*alias = new_alias;
} else if let TableFactor::Derived { ref mut alias, .. } = table.relation {
*alias = new_alias;
} else if let TableFactor::TableFunction { ref mut alias, .. } = table.relation
{
*alias = new_alias;
}
});
Ok(db.into_value(call.head).into_pipeline_data())
}
_ => Err(ShellError::GenericError(
"Error creating alias".into(),
"Query has no select from defined".into(),
Some(call.head),
None,
Vec::new(),
)),
},
}
}

View File

@ -0,0 +1,51 @@
use crate::database::values::dsl::ExprDb;
use nu_engine::CallExt;
use nu_protocol::{
ast::Call,
engine::{Command, EngineState, Stack},
Category, Example, IntoPipelineData, PipelineData, ShellError, Signature, SyntaxShape, Value,
};
#[derive(Clone)]
pub struct ColExpr;
impl Command for ColExpr {
fn name(&self) -> &str {
"db col"
}
fn signature(&self) -> Signature {
Signature::build(self.name())
.required("name", SyntaxShape::String, "column name")
.category(Category::Custom("database".into()))
}
fn usage(&self) -> &str {
"Creates column expression for database"
}
fn examples(&self) -> Vec<Example> {
vec![Example {
description: "Creates a named column expression",
example: "col name_1",
result: None,
}]
}
fn search_terms(&self) -> Vec<&str> {
vec!["database", "column", "expression"]
}
fn run(
&self,
engine_state: &EngineState,
stack: &mut Stack,
call: &Call,
_input: PipelineData,
) -> Result<PipelineData, ShellError> {
let value: Value = call.req(engine_state, stack, 0)?;
let expression = ExprDb::try_from_value(&value)?;
Ok(expression.into_value(call.head).into_pipeline_data())
}
}

View File

@ -0,0 +1,21 @@
mod alias;
mod col;
use nu_protocol::engine::StateWorkingSet;
use alias::AliasExpr;
use col::ColExpr;
pub fn add_expression_decls(working_set: &mut StateWorkingSet) {
macro_rules! bind_command {
( $command:expr ) => {
working_set.add_decl(Box::new($command));
};
( $( $command:expr ),* ) => {
$( working_set.add_decl(Box::new($command)); )*
};
}
// Series commands
bind_command!(AliasExpr, ColExpr);
}

View File

@ -0,0 +1,16 @@
mod commands;
mod values;
mod expressions;
pub use commands::add_commands_decls;
pub use expressions::add_expression_decls;
use nu_protocol::engine::StateWorkingSet;
pub use values::{
convert_sqlite_row_to_nu_value, convert_sqlite_value_to_nu_value, open_connection_in_memory,
SQLiteDatabase,
};
pub fn add_database_decls(working_set: &mut StateWorkingSet) {
add_commands_decls(working_set);
add_expression_decls(working_set);
}

View File

@ -0,0 +1,27 @@
use super::db_table::DbTable;
// Thank you gobang
// https://github.com/TaKO8Ki/gobang/blob/main/database-tree/src/lib.rs
#[derive(Clone, PartialEq, Debug)]
pub struct Db {
pub name: String,
pub tables: Vec<DbTable>,
}
impl Db {
pub fn new(database: String, tables: Vec<DbTable>) -> Self {
Self {
name: database,
tables,
}
}
pub fn name(&self) -> &str {
self.name.as_str()
}
pub fn tables(&self) -> Vec<DbTable> {
self.tables.clone()
}
}

View File

@ -0,0 +1,51 @@
use crate::database::values::definitions::db_row::DbRow;
#[derive(Debug)]
pub struct DbColumn {
/// Column Index
pub cid: Option<i32>,
/// Column Name
pub name: Option<String>,
/// Column Type
pub r#type: Option<String>,
/// Column has a NOT NULL constraint
pub notnull: Option<i16>,
/// Column DEFAULT Value
pub default: Option<String>,
/// Column is part of the PRIMARY KEY
pub pk: Option<i16>,
}
impl DbRow for DbColumn {
fn fields(&self) -> Vec<String> {
vec![
"cid".to_string(),
"name".to_string(),
"type".to_string(),
"notnull".to_string(),
"default".to_string(),
"pk".to_string(),
]
}
fn columns(&self) -> Vec<String> {
vec![
self.cid
.as_ref()
.map_or(String::new(), |cid| cid.to_string()),
self.name
.as_ref()
.map_or(String::new(), |name| name.to_string()),
self.r#type
.as_ref()
.map_or(String::new(), |r#type| r#type.to_string()),
self.notnull
.as_ref()
.map_or(String::new(), |notnull| notnull.to_string()),
self.default
.as_ref()
.map_or(String::new(), |default| default.to_string()),
self.pk.as_ref().map_or(String::new(), |pk| pk.to_string()),
]
}
}

View File

@ -0,0 +1,26 @@
use super::db_row::DbRow;
#[derive(Debug)]
pub struct DbConstraint {
pub name: String,
pub column_name: String,
pub origin: String,
}
impl DbRow for DbConstraint {
fn fields(&self) -> Vec<String> {
vec![
"name".to_string(),
"column_name".to_string(),
"origin".to_string(),
]
}
fn columns(&self) -> Vec<String> {
vec![
self.name.to_string(),
self.column_name.to_string(),
self.origin.to_string(),
]
}
}

View File

@ -0,0 +1,32 @@
use super::db_row::DbRow;
#[derive(Debug)]
pub struct DbForeignKey {
pub column_name: Option<String>,
pub ref_table: Option<String>,
pub ref_column: Option<String>,
}
impl DbRow for DbForeignKey {
fn fields(&self) -> Vec<String> {
vec![
"column_name".to_string(),
"ref_table".to_string(),
"ref_column".to_string(),
]
}
fn columns(&self) -> Vec<String> {
vec![
self.column_name
.as_ref()
.map_or(String::new(), |r#type| r#type.to_string()),
self.ref_table
.as_ref()
.map_or(String::new(), |r#type| r#type.to_string()),
self.ref_column
.as_ref()
.map_or(String::new(), |r#type| r#type.to_string()),
]
}
}

View File

@ -0,0 +1,32 @@
use super::db_row::DbRow;
#[derive(Debug)]
pub struct DbIndex {
pub name: Option<String>,
pub column_name: Option<String>,
pub seqno: Option<i16>,
}
impl DbRow for DbIndex {
fn fields(&self) -> Vec<String> {
vec![
"name".to_string(),
"column_name".to_string(),
"seqno".to_string(),
]
}
fn columns(&self) -> Vec<String> {
vec![
self.name
.as_ref()
.map_or(String::new(), |name| name.to_string()),
self.column_name
.as_ref()
.map_or(String::new(), |column_name| column_name.to_string()),
self.seqno
.as_ref()
.map_or(String::new(), |seqno| seqno.to_string()),
]
}
}

View File

@ -0,0 +1,4 @@
pub trait DbRow: std::marker::Send {
fn fields(&self) -> Vec<String>;
fn columns(&self) -> Vec<String>;
}

View File

@ -0,0 +1,7 @@
use super::db_table::DbTable;
#[derive(Clone, PartialEq, Debug)]
pub struct DbSchema {
pub name: String,
pub tables: Vec<DbTable>,
}

View File

@ -0,0 +1,8 @@
#[derive(Debug, Clone, PartialEq)]
pub struct DbTable {
pub name: String,
pub create_time: Option<chrono::DateTime<chrono::Utc>>,
pub update_time: Option<chrono::DateTime<chrono::Utc>>,
pub engine: Option<String>,
pub schema: Option<String>,
}

View File

@ -0,0 +1,8 @@
pub mod db;
pub mod db_column;
pub mod db_constraint;
pub mod db_foreignkey;
pub mod db_index;
pub mod db_row;
pub mod db_schema;
pub mod db_table;

View File

@ -0,0 +1,308 @@
use nu_protocol::{
ast::{Operator, PathMember},
CustomValue, ShellError, Span, Type, Value,
};
use serde::{Deserialize, Serialize};
use sqlparser::ast::{BinaryOperator, Expr, Ident};
#[derive(Debug, Serialize, Deserialize)]
pub struct ExprDb(Expr);
// Referenced access to the native expression
impl AsRef<Expr> for ExprDb {
fn as_ref(&self) -> &Expr {
&self.0
}
}
impl AsMut<Expr> for ExprDb {
fn as_mut(&mut self) -> &mut Expr {
&mut self.0
}
}
impl From<Expr> for ExprDb {
fn from(expr: Expr) -> Self {
Self(expr)
}
}
impl CustomValue for ExprDb {
fn clone_value(&self, span: Span) -> Value {
let cloned = Self(self.0.clone());
Value::CustomValue {
val: Box::new(cloned),
span,
}
}
fn value_string(&self) -> String {
self.typetag_name().to_string()
}
fn to_base_value(&self, span: Span) -> Result<Value, ShellError> {
Ok(self.to_value(span))
}
fn as_any(&self) -> &dyn std::any::Any {
self
}
fn follow_path_int(&self, count: usize, span: Span) -> Result<Value, ShellError> {
let path = PathMember::Int { val: count, span };
ExprDb::expr_to_value(self.as_ref(), span).follow_cell_path(&[path])
}
fn follow_path_string(&self, column_name: String, span: Span) -> Result<Value, ShellError> {
let path = PathMember::String {
val: column_name,
span,
};
ExprDb::expr_to_value(self.as_ref(), span).follow_cell_path(&[path])
}
fn typetag_name(&self) -> &'static str {
"DB expresssion"
}
fn typetag_deserialize(&self) {
unimplemented!("typetag_deserialize")
}
fn operation(
&self,
lhs_span: Span,
operator: Operator,
op: Span,
right: &Value,
) -> Result<Value, ShellError> {
let right_expr = match right {
Value::CustomValue { .. } => ExprDb::try_from_value(right).map(ExprDb::into_native),
Value::String { val, .. } => Ok(Expr::Value(
sqlparser::ast::Value::SingleQuotedString(val.clone()),
)),
Value::Int { val, .. } => Ok(Expr::Value(sqlparser::ast::Value::Number(
format!("{}", val),
false,
))),
Value::Bool { val, .. } => Ok(Expr::Value(sqlparser::ast::Value::Boolean(*val))),
_ => Err(ShellError::OperatorMismatch {
op_span: op,
lhs_ty: Type::Custom,
lhs_span,
rhs_ty: right.get_type(),
rhs_span: right.span()?,
}),
}?;
let sql_operator = match operator {
Operator::Equal => Ok(BinaryOperator::Eq),
Operator::NotEqual => Ok(BinaryOperator::NotEq),
Operator::LessThan => Ok(BinaryOperator::Lt),
Operator::GreaterThan => Ok(BinaryOperator::Gt),
Operator::LessThanOrEqual => Ok(BinaryOperator::LtEq),
Operator::GreaterThanOrEqual => Ok(BinaryOperator::GtEq),
Operator::RegexMatch => Ok(BinaryOperator::PGRegexMatch),
Operator::NotRegexMatch => Ok(BinaryOperator::PGRegexNotMatch),
Operator::Plus => Ok(BinaryOperator::Plus),
Operator::Minus => Ok(BinaryOperator::Minus),
Operator::Multiply => Ok(BinaryOperator::Multiply),
Operator::Divide => Ok(BinaryOperator::Divide),
Operator::Modulo => Ok(BinaryOperator::Modulo),
Operator::And => Ok(BinaryOperator::And),
Operator::Or => Ok(BinaryOperator::Or),
Operator::In
| Operator::NotIn
| Operator::Pow
| Operator::StartsWith
| Operator::EndsWith => Err(ShellError::UnsupportedOperator(operator, op)),
}?;
let expr = Expr::BinaryOp {
left: Box::new(self.as_ref().clone()),
op: sql_operator,
right: Box::new(right_expr),
};
Ok(ExprDb(expr).into_value(lhs_span))
}
}
impl ExprDb {
pub fn try_from_value(value: &Value) -> Result<Self, ShellError> {
match value {
Value::CustomValue { val, span } => match val.as_any().downcast_ref::<Self>() {
Some(expr) => Ok(Self(expr.0.clone())),
None => Err(ShellError::CantConvert(
"db expression".into(),
"non-expression".into(),
*span,
None,
)),
},
Value::String { val, .. } => Ok(Expr::Identifier(Ident {
value: val.clone(),
quote_style: None,
})
.into()),
Value::Int { val, .. } => {
Ok(Expr::Value(sqlparser::ast::Value::Number(format!("{}", val), false)).into())
}
x => Err(ShellError::CantConvert(
"database".into(),
x.get_type().to_string(),
x.span()?,
None,
)),
}
}
pub fn into_value(self, span: Span) -> Value {
Value::CustomValue {
val: Box::new(self),
span,
}
}
pub fn into_native(self) -> Expr {
self.0
}
pub fn to_value(&self, span: Span) -> Value {
ExprDb::expr_to_value(self.as_ref(), span)
}
// Convenient function to extrac multiple Expr that could be inside a nushell Value
pub fn extract_exprs(value: Value) -> Result<Vec<Expr>, ShellError> {
ExtractedExpr::extract_exprs(value).map(ExtractedExpr::into_exprs)
}
}
enum ExtractedExpr {
Single(Expr),
List(Vec<ExtractedExpr>),
}
impl ExtractedExpr {
fn into_exprs(self) -> Vec<Expr> {
match self {
Self::Single(expr) => vec![expr],
Self::List(exprs) => exprs
.into_iter()
.flat_map(ExtractedExpr::into_exprs)
.collect(),
}
}
fn extract_exprs(value: Value) -> Result<ExtractedExpr, ShellError> {
match value {
Value::String { val, .. } => {
let expr = Expr::Identifier(Ident {
value: val,
quote_style: None,
});
Ok(ExtractedExpr::Single(expr))
}
Value::Int { val, .. } => {
let expr = Expr::Value(sqlparser::ast::Value::Number(format!("{}", val), false));
Ok(ExtractedExpr::Single(expr))
}
Value::Bool { val, .. } => {
let expr = Expr::Value(sqlparser::ast::Value::Boolean(val));
Ok(ExtractedExpr::Single(expr))
}
Value::CustomValue { .. } => {
let expr = ExprDb::try_from_value(&value)?.into_native();
Ok(ExtractedExpr::Single(expr))
}
Value::List { vals, .. } => vals
.into_iter()
.map(Self::extract_exprs)
.collect::<Result<Vec<ExtractedExpr>, ShellError>>()
.map(ExtractedExpr::List),
x => Err(ShellError::CantConvert(
"selection".into(),
x.get_type().to_string(),
x.span()?,
None,
)),
}
}
}
impl ExprDb {
pub fn expr_to_value(expr: &Expr, span: Span) -> Value {
match expr {
Expr::Identifier(ident) => {
let cols = vec!["value".into(), "quoted_style".into()];
let val = Value::String {
val: ident.value.to_string(),
span,
};
let style = Value::String {
val: format!("{:?}", ident.quote_style),
span,
};
Value::Record {
cols,
vals: vec![val, style],
span,
}
}
Expr::Value(value) => Value::String {
val: format!("{}", value),
span,
},
Expr::BinaryOp { left, op, right } => {
let cols = vec!["left".into(), "op".into(), "right".into()];
let left = ExprDb::expr_to_value(left.as_ref(), span);
let right = ExprDb::expr_to_value(right.as_ref(), span);
let op = Value::String {
val: format!("{}", op),
span,
};
let vals = vec![left, op, right];
Value::Record { cols, vals, span }
}
Expr::Nested(expr) => ExprDb::expr_to_value(expr, span),
Expr::CompoundIdentifier(_) => todo!(),
Expr::IsNull(_) => todo!(),
Expr::IsNotNull(_) => todo!(),
Expr::IsDistinctFrom(_, _) => todo!(),
Expr::IsNotDistinctFrom(_, _) => todo!(),
Expr::InList { .. } => todo!(),
Expr::InSubquery { .. } => todo!(),
Expr::InUnnest { .. } => todo!(),
Expr::Between { .. } => todo!(),
Expr::UnaryOp { .. } => todo!(),
Expr::Cast { .. } => todo!(),
Expr::TryCast { .. } => todo!(),
Expr::Extract { .. } => todo!(),
Expr::Substring { .. } => todo!(),
Expr::Trim { .. } => todo!(),
Expr::Collate { .. } => todo!(),
Expr::TypedString { .. } => todo!(),
Expr::MapAccess { .. } => todo!(),
Expr::Function(_) => todo!(),
Expr::Case { .. } => todo!(),
Expr::Exists(_) => todo!(),
Expr::Subquery(_) => todo!(),
Expr::ListAgg(_) => todo!(),
Expr::GroupingSets(_) => todo!(),
Expr::Cube(_) => todo!(),
Expr::Rollup(_) => todo!(),
Expr::Tuple(_) => todo!(),
Expr::ArrayIndex { .. } => todo!(),
Expr::Array(_) => todo!(),
}
}
}

View File

@ -0,0 +1,5 @@
mod expression;
mod select_item;
pub(crate) use expression::ExprDb;
pub(crate) use select_item::SelectDb;

View File

@ -0,0 +1,263 @@
use super::ExprDb;
use nu_protocol::{ast::PathMember, CustomValue, ShellError, Span, Value};
use serde::{Deserialize, Serialize};
use sqlparser::ast::{Expr, Ident, ObjectName, SelectItem};
#[derive(Debug, Serialize, Deserialize)]
pub struct SelectDb(SelectItem);
// Referenced access to the native expression
impl AsRef<SelectItem> for SelectDb {
fn as_ref(&self) -> &SelectItem {
&self.0
}
}
impl AsMut<SelectItem> for SelectDb {
fn as_mut(&mut self) -> &mut SelectItem {
&mut self.0
}
}
impl From<SelectItem> for SelectDb {
fn from(selection: SelectItem) -> Self {
Self(selection)
}
}
impl From<Expr> for SelectDb {
fn from(expr: Expr) -> Self {
SelectItem::UnnamedExpr(expr).into()
}
}
impl CustomValue for SelectDb {
fn clone_value(&self, span: Span) -> Value {
let cloned = Self(self.0.clone());
Value::CustomValue {
val: Box::new(cloned),
span,
}
}
fn value_string(&self) -> String {
self.typetag_name().to_string()
}
fn to_base_value(&self, span: Span) -> Result<Value, ShellError> {
Ok(self.to_value(span))
}
fn as_any(&self) -> &dyn std::any::Any {
self
}
fn follow_path_int(&self, count: usize, span: Span) -> Result<Value, ShellError> {
let path = PathMember::Int { val: count, span };
SelectDb::select_to_value(self.as_ref(), span).follow_cell_path(&[path])
}
fn follow_path_string(&self, column_name: String, span: Span) -> Result<Value, ShellError> {
let path = PathMember::String {
val: column_name,
span,
};
SelectDb::select_to_value(self.as_ref(), span).follow_cell_path(&[path])
}
fn typetag_name(&self) -> &'static str {
"DB selection"
}
fn typetag_deserialize(&self) {
unimplemented!("typetag_deserialize")
}
}
impl SelectDb {
pub fn try_from_value(value: &Value) -> Result<Self, ShellError> {
match value {
Value::CustomValue { val, span } => match val.as_any().downcast_ref::<Self>() {
Some(expr) => Ok(Self(expr.0.clone())),
None => Err(ShellError::CantConvert(
"db selection".into(),
"non-expression".into(),
*span,
None,
)),
},
Value::String { val, .. } => match val.as_str() {
"*" => Ok(SelectItem::Wildcard.into()),
name if (name.contains('.') && name.contains('*')) => {
let parts: Vec<Ident> = name
.split('.')
.filter(|part| part != &"*")
.map(|part| Ident {
value: part.to_string(),
quote_style: None,
})
.collect();
Ok(SelectItem::QualifiedWildcard(ObjectName(parts)).into())
}
name if name.contains('.') => {
let parts: Vec<Ident> = name
.split('.')
.map(|part| Ident {
value: part.to_string(),
quote_style: None,
})
.collect();
let expr = Expr::CompoundIdentifier(parts);
Ok(SelectItem::UnnamedExpr(expr).into())
}
_ => {
let expr = Expr::Identifier(Ident {
value: val.clone(),
quote_style: None,
});
Ok(SelectItem::UnnamedExpr(expr).into())
}
},
x => Err(ShellError::CantConvert(
"selection".into(),
x.get_type().to_string(),
x.span()?,
None,
)),
}
}
pub fn into_value(self, span: Span) -> Value {
Value::CustomValue {
val: Box::new(self),
span,
}
}
pub fn into_native(self) -> SelectItem {
self.0
}
pub fn to_value(&self, span: Span) -> Value {
SelectDb::select_to_value(self.as_ref(), span)
}
}
impl SelectDb {
fn select_to_value(select: &SelectItem, span: Span) -> Value {
match select {
SelectItem::UnnamedExpr(expr) => ExprDb::expr_to_value(expr, span),
SelectItem::ExprWithAlias { expr, alias } => {
let expr = ExprDb::expr_to_value(expr, span);
let val = Value::String {
val: alias.value.to_string(),
span,
};
let style = Value::String {
val: format!("{:?}", alias.quote_style),
span,
};
let cols = vec!["value".into(), "quoted_style".into()];
let alias = Value::Record {
cols,
vals: vec![val, style],
span,
};
let cols = vec!["expression".into(), "alias".into()];
Value::Record {
cols,
vals: vec![expr, alias],
span,
}
}
SelectItem::QualifiedWildcard(object) => {
let vals: Vec<Value> = object
.0
.iter()
.map(|ident| Value::String {
val: ident.value.clone(),
span,
})
.collect();
Value::List { vals, span }
}
SelectItem::Wildcard => Value::String {
val: "*".into(),
span,
},
}
}
// Convenient function to extrac multiple SelectItem that could be inside a
// nushell Value
pub fn extract_selects(value: Value) -> Result<Vec<SelectItem>, ShellError> {
ExtractedSelect::extract_selects(value).map(ExtractedSelect::into_selects)
}
}
// Enum to represent the parsing of the selects from Value
enum ExtractedSelect {
Single(SelectItem),
List(Vec<ExtractedSelect>),
}
impl ExtractedSelect {
fn into_selects(self) -> Vec<SelectItem> {
match self {
Self::Single(select) => vec![select],
Self::List(selects) => selects
.into_iter()
.flat_map(ExtractedSelect::into_selects)
.collect(),
}
}
fn extract_selects(value: Value) -> Result<ExtractedSelect, ShellError> {
match value {
Value::String { val, .. } => {
let expr = Expr::Identifier(Ident {
value: val,
quote_style: None,
});
Ok(ExtractedSelect::Single(SelectItem::UnnamedExpr(expr)))
}
Value::CustomValue { .. } => {
if let Ok(expr) = ExprDb::try_from_value(&value) {
Ok(ExtractedSelect::Single(SelectItem::UnnamedExpr(
expr.into_native(),
)))
} else if let Ok(select) = SelectDb::try_from_value(&value) {
Ok(ExtractedSelect::Single(select.into_native()))
} else {
Err(ShellError::CantConvert(
"selection".into(),
value.get_type().to_string(),
value.span()?,
None,
))
}
}
Value::List { vals, .. } => vals
.into_iter()
.map(Self::extract_selects)
.collect::<Result<Vec<ExtractedSelect>, ShellError>>()
.map(ExtractedSelect::List),
x => Err(ShellError::CantConvert(
"selection".into(),
x.get_type().to_string(),
x.span()?,
None,
)),
}
}
}

View File

@ -0,0 +1,8 @@
pub mod definitions;
pub mod dsl;
pub mod sqlite;
pub use sqlite::{
convert_sqlite_row_to_nu_value, convert_sqlite_value_to_nu_value, open_connection_in_memory,
SQLiteDatabase,
};

View File

@ -0,0 +1,650 @@
use crate::database::values::definitions::{
db::Db, db_column::DbColumn, db_constraint::DbConstraint, db_foreignkey::DbForeignKey,
db_index::DbIndex, db_table::DbTable,
};
use nu_protocol::{CustomValue, PipelineData, ShellError, Span, Spanned, Value};
use rusqlite::{types::ValueRef, Connection, Row};
use serde::{Deserialize, Serialize};
use sqlparser::ast::Query;
use std::{
fs::File,
io::Read,
path::{Path, PathBuf},
};
const SQLITE_MAGIC_BYTES: &[u8] = "SQLite format 3\0".as_bytes();
#[derive(Debug, Serialize, Deserialize)]
pub struct SQLiteDatabase {
// I considered storing a SQLite connection here, but decided against it because
// 1) YAGNI, 2) it's not obvious how cloning a connection could work, 3) state
// management gets tricky quick. Revisit this approach if we find a compelling use case.
pub path: PathBuf,
pub query: Option<Query>,
}
impl SQLiteDatabase {
pub fn new(path: &Path) -> Self {
Self {
path: PathBuf::from(path),
query: None,
}
}
pub fn try_from_path(path: &Path, span: Span) -> Result<Self, ShellError> {
let mut file =
File::open(path).map_err(|e| ShellError::ReadingFile(e.to_string(), span))?;
let mut buf: [u8; 16] = [0; 16];
file.read_exact(&mut buf)
.map_err(|e| ShellError::ReadingFile(e.to_string(), span))
.and_then(|_| {
if buf == SQLITE_MAGIC_BYTES {
Ok(SQLiteDatabase::new(path))
} else {
Err(ShellError::ReadingFile("Not a SQLite file".into(), span))
}
})
}
pub fn try_from_value(value: Value) -> Result<Self, ShellError> {
match value {
Value::CustomValue { val, span } => match val.as_any().downcast_ref::<Self>() {
Some(db) => Ok(Self {
path: db.path.clone(),
query: db.query.clone(),
}),
None => Err(ShellError::CantConvert(
"database".into(),
"non-database".into(),
span,
None,
)),
},
x => Err(ShellError::CantConvert(
"database".into(),
x.get_type().to_string(),
x.span()?,
None,
)),
}
}
pub fn try_from_pipeline(input: PipelineData, span: Span) -> Result<Self, ShellError> {
let value = input.into_value(span);
Self::try_from_value(value)
}
pub fn into_value(self, span: Span) -> Value {
Value::CustomValue {
val: Box::new(self),
span,
}
}
pub fn query(&self, sql: &Spanned<String>, call_span: Span) -> Result<Value, ShellError> {
let db = open_sqlite_db(&self.path, call_span)?;
run_sql_query(db, sql).map_err(|e| {
ShellError::GenericError(
"Failed to query SQLite database".into(),
e.to_string(),
Some(sql.span),
None,
Vec::new(),
)
})
}
pub fn collect(&self, call_span: Span) -> Result<Value, ShellError> {
let sql = match &self.query {
Some(query) => Ok(format!("{}", query)),
None => Err(ShellError::GenericError(
"Error collecting from db".into(),
"No query found in connection".into(),
Some(call_span),
None,
Vec::new(),
)),
}?;
let sql = Spanned {
item: sql,
span: call_span,
};
let db = open_sqlite_db(&self.path, call_span)?;
run_sql_query(db, &sql).map_err(|e| {
ShellError::GenericError(
"Failed to query SQLite database".into(),
e.to_string(),
Some(sql.span),
None,
Vec::new(),
)
})
}
pub fn describe(&self, span: Span) -> Value {
let cols = vec!["connection".to_string(), "query".to_string()];
let connection = Value::String {
val: self.path.to_str().unwrap_or("").to_string(),
span,
};
let query = match &self.query {
Some(query) => format!("{query}"),
None => "".into(),
};
let query = Value::String { val: query, span };
Value::Record {
cols,
vals: vec![connection, query],
span,
}
}
pub fn open_connection(&self) -> Result<Connection, rusqlite::Error> {
let conn = match Connection::open(self.path.to_string_lossy().to_string()) {
Ok(conn) => conn,
Err(err) => return Err(err),
};
Ok(conn)
}
pub fn get_databases_and_tables(&self, conn: &Connection) -> Result<Vec<Db>, rusqlite::Error> {
// let conn = open_connection(path)?;
let mut db_query = conn.prepare("SELECT name FROM pragma_database_list")?;
let databases = db_query.query_map([], |row| {
let name: String = row.get(0)?;
Ok(Db::new(name, self.get_tables(conn)?))
})?;
let mut db_list = vec![];
for db in databases {
db_list.push(db?);
}
Ok(db_list)
}
pub fn get_databases(&self, conn: &Connection) -> Result<Vec<String>, rusqlite::Error> {
let mut db_query = conn.prepare("SELECT name FROM pragma_database_list")?;
let mut db_list = vec![];
let _ = db_query.query_map([], |row| {
let name: String = row.get(0)?;
db_list.push(name);
Ok(())
})?;
Ok(db_list)
}
pub fn get_tables(&self, conn: &Connection) -> Result<Vec<DbTable>, rusqlite::Error> {
let mut table_names =
conn.prepare("SELECT name FROM sqlite_master WHERE type = 'table'")?;
let rows = table_names.query_map([], |row| row.get(0))?;
let mut tables = Vec::new();
for row in rows {
let table_name: String = row?;
tables.push(DbTable {
name: table_name,
create_time: None,
update_time: None,
engine: None,
schema: None,
})
}
Ok(tables.into_iter().collect())
}
fn get_column_info(&self, row: &Row) -> Result<DbColumn, rusqlite::Error> {
let dbc = DbColumn {
cid: row.get("cid")?,
name: row.get("name")?,
r#type: row.get("type")?,
notnull: row.get("notnull")?,
default: row.get("dflt_value")?,
pk: row.get("pk")?,
};
Ok(dbc)
}
pub fn get_columns(
&self,
conn: &Connection,
table: &DbTable,
) -> Result<Vec<DbColumn>, rusqlite::Error> {
let mut column_names = conn.prepare(&format!(
"SELECT * FROM pragma_table_info('{}');",
table.name
))?;
let mut columns: Vec<DbColumn> = Vec::new();
let rows = column_names.query_and_then([], |row| self.get_column_info(row))?;
for row in rows {
columns.push(row?);
}
Ok(columns)
}
fn get_constraint_info(&self, row: &Row) -> Result<DbConstraint, rusqlite::Error> {
let dbc = DbConstraint {
name: row.get("index_name")?,
column_name: row.get("column_name")?,
origin: row.get("origin")?,
};
Ok(dbc)
}
pub fn get_constraints(
&self,
conn: &Connection,
table: &DbTable,
) -> Result<Vec<DbConstraint>, rusqlite::Error> {
let mut column_names = conn.prepare(&format!(
"
SELECT
p.origin,
s.name AS index_name,
i.name AS column_name
FROM
sqlite_master s
JOIN pragma_index_list(s.tbl_name) p ON s.name = p.name,
pragma_index_info(s.name) i
WHERE
s.type = 'index'
AND tbl_name = '{}'
AND NOT p.origin = 'c'
",
&table.name
))?;
let mut constraints: Vec<DbConstraint> = Vec::new();
let rows = column_names.query_and_then([], |row| self.get_constraint_info(row))?;
for row in rows {
constraints.push(row?);
}
Ok(constraints)
}
fn get_foreign_keys_info(&self, row: &Row) -> Result<DbForeignKey, rusqlite::Error> {
let dbc = DbForeignKey {
column_name: row.get("from")?,
ref_table: row.get("table")?,
ref_column: row.get("to")?,
};
Ok(dbc)
}
pub fn get_foreign_keys(
&self,
conn: &Connection,
table: &DbTable,
) -> Result<Vec<DbForeignKey>, rusqlite::Error> {
let mut column_names = conn.prepare(&format!(
"SELECT p.`from`, p.`to`, p.`table` FROM pragma_foreign_key_list('{}') p",
&table.name
))?;
let mut foreign_keys: Vec<DbForeignKey> = Vec::new();
let rows = column_names.query_and_then([], |row| self.get_foreign_keys_info(row))?;
for row in rows {
foreign_keys.push(row?);
}
Ok(foreign_keys)
}
fn get_index_info(&self, row: &Row) -> Result<DbIndex, rusqlite::Error> {
let dbc = DbIndex {
name: row.get("index_name")?,
column_name: row.get("name")?,
seqno: row.get("seqno")?,
};
Ok(dbc)
}
pub fn get_indexes(
&self,
conn: &Connection,
table: &DbTable,
) -> Result<Vec<DbIndex>, rusqlite::Error> {
let mut column_names = conn.prepare(&format!(
"
SELECT
m.name AS index_name,
p.*
FROM
sqlite_master m,
pragma_index_info(m.name) p
WHERE
m.type = 'index'
AND m.tbl_name = '{}'
",
&table.name,
))?;
let mut indexes: Vec<DbIndex> = Vec::new();
let rows = column_names.query_and_then([], |row| self.get_index_info(row))?;
for row in rows {
indexes.push(row?);
}
Ok(indexes)
}
}
impl CustomValue for SQLiteDatabase {
fn clone_value(&self, span: Span) -> Value {
let cloned = SQLiteDatabase {
path: self.path.clone(),
query: self.query.clone(),
};
Value::CustomValue {
val: Box::new(cloned),
span,
}
}
fn value_string(&self) -> String {
self.typetag_name().to_string()
}
fn to_base_value(&self, span: Span) -> Result<Value, ShellError> {
let db = open_sqlite_db(&self.path, span)?;
read_entire_sqlite_db(db, span).map_err(|e| {
ShellError::GenericError(
"Failed to read from SQLite database".into(),
e.to_string(),
Some(span),
None,
Vec::new(),
)
})
}
fn as_any(&self) -> &dyn std::any::Any {
self
}
fn follow_path_int(&self, _count: usize, span: Span) -> Result<Value, ShellError> {
// In theory we could support this, but tables don't have an especially well-defined order
Err(ShellError::IncompatiblePathAccess("SQLite databases do not support integer-indexed access. Try specifying a table name instead".into(), span))
}
fn follow_path_string(&self, _column_name: String, span: Span) -> Result<Value, ShellError> {
let db = open_sqlite_db(&self.path, span)?;
read_single_table(db, _column_name, span).map_err(|e| {
ShellError::GenericError(
"Failed to read from SQLite database".into(),
e.to_string(),
Some(span),
None,
Vec::new(),
)
})
}
fn typetag_name(&self) -> &'static str {
"SQLiteDatabase"
}
fn typetag_deserialize(&self) {
unimplemented!("typetag_deserialize")
}
}
fn open_sqlite_db(path: &Path, call_span: Span) -> Result<Connection, nu_protocol::ShellError> {
let path = path.to_string_lossy().to_string();
Connection::open(path).map_err(|e| {
ShellError::GenericError(
"Failed to open SQLite database".into(),
e.to_string(),
Some(call_span),
None,
Vec::new(),
)
})
}
fn run_sql_query(conn: Connection, sql: &Spanned<String>) -> Result<Value, rusqlite::Error> {
let mut stmt = conn.prepare(&sql.item)?;
let results = stmt.query([])?;
let nu_records = results
.mapped(|row| Result::Ok(convert_sqlite_row_to_nu_value(row, sql.span)))
.into_iter()
.collect::<Result<Vec<Value>, rusqlite::Error>>()?;
Ok(Value::List {
vals: nu_records,
span: sql.span,
})
}
fn read_single_table(
conn: Connection,
table_name: String,
call_span: Span,
) -> Result<Value, rusqlite::Error> {
let mut stmt = conn.prepare(&format!("SELECT * FROM {}", table_name))?;
let results = stmt.query([])?;
let nu_records = results
.mapped(|row| Result::Ok(convert_sqlite_row_to_nu_value(row, call_span)))
.into_iter()
.collect::<Result<Vec<Value>, rusqlite::Error>>()?;
Ok(Value::List {
vals: nu_records,
span: call_span,
})
}
fn read_entire_sqlite_db(conn: Connection, call_span: Span) -> Result<Value, rusqlite::Error> {
let mut table_names: Vec<String> = Vec::new();
let mut tables: Vec<Value> = Vec::new();
let mut get_table_names =
conn.prepare("SELECT name FROM sqlite_master WHERE type = 'table'")?;
let rows = get_table_names.query_map([], |row| row.get(0))?;
for row in rows {
let table_name: String = row?;
table_names.push(table_name.clone());
let mut rows = Vec::new();
let mut table_stmt = conn.prepare(&format!("select * from [{}]", table_name))?;
let mut table_rows = table_stmt.query([])?;
while let Some(table_row) = table_rows.next()? {
rows.push(convert_sqlite_row_to_nu_value(table_row, call_span))
}
let table_record = Value::List {
vals: rows,
span: call_span,
};
tables.push(table_record);
}
Ok(Value::Record {
cols: table_names,
vals: tables,
span: call_span,
})
}
pub fn convert_sqlite_row_to_nu_value(row: &Row, span: Span) -> Value {
let mut vals = Vec::new();
let colnamestr = row.as_ref().column_names().to_vec();
let colnames = colnamestr.iter().map(|s| s.to_string()).collect();
for (i, c) in row.as_ref().column_names().iter().enumerate() {
let _column = c.to_string();
let val = convert_sqlite_value_to_nu_value(row.get_ref_unwrap(i), span);
vals.push(val);
}
Value::Record {
cols: colnames,
vals,
span,
}
}
pub fn convert_sqlite_value_to_nu_value(value: ValueRef, span: Span) -> Value {
match value {
ValueRef::Null => Value::Nothing { span },
ValueRef::Integer(i) => Value::Int { val: i, span },
ValueRef::Real(f) => Value::Float { val: f, span },
ValueRef::Text(buf) => {
let s = match std::str::from_utf8(buf) {
Ok(v) => v,
Err(_) => {
return Value::Error {
error: ShellError::NonUtf8(span),
}
}
};
Value::String {
val: s.to_string(),
span,
}
}
ValueRef::Blob(u) => Value::Binary {
val: u.to_vec(),
span,
},
}
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn can_read_empty_db() {
let db = open_connection_in_memory().unwrap();
let converted_db = read_entire_sqlite_db(db, Span::test_data()).unwrap();
let expected = Value::Record {
cols: vec![],
vals: vec![],
span: Span::test_data(),
};
assert_eq!(converted_db, expected);
}
#[test]
fn can_read_empty_table() {
let db = open_connection_in_memory().unwrap();
db.execute(
"CREATE TABLE person (
id INTEGER PRIMARY KEY,
name TEXT NOT NULL,
data BLOB
)",
[],
)
.unwrap();
let converted_db = read_entire_sqlite_db(db, Span::test_data()).unwrap();
let expected = Value::Record {
cols: vec!["person".to_string()],
vals: vec![Value::List {
vals: vec![],
span: Span::test_data(),
}],
span: Span::test_data(),
};
assert_eq!(converted_db, expected);
}
#[test]
fn can_read_null_and_non_null_data() {
let span = Span::test_data();
let db = open_connection_in_memory().unwrap();
db.execute(
"CREATE TABLE item (
id INTEGER PRIMARY KEY,
name TEXT
)",
[],
)
.unwrap();
db.execute("INSERT INTO item (id, name) VALUES (123, NULL)", [])
.unwrap();
db.execute("INSERT INTO item (id, name) VALUES (456, 'foo bar')", [])
.unwrap();
let converted_db = read_entire_sqlite_db(db, span).unwrap();
let expected = Value::Record {
cols: vec!["item".to_string()],
vals: vec![Value::List {
vals: vec![
Value::Record {
cols: vec!["id".to_string(), "name".to_string()],
vals: vec![Value::Int { val: 123, span }, Value::Nothing { span }],
span,
},
Value::Record {
cols: vec!["id".to_string(), "name".to_string()],
vals: vec![
Value::Int { val: 456, span },
Value::String {
val: "foo bar".to_string(),
span,
},
],
span,
},
],
span,
}],
span,
};
assert_eq!(converted_db, expected);
}
}
pub fn open_connection_in_memory() -> Result<Connection, ShellError> {
let db = match Connection::open_in_memory() {
Ok(conn) => conn,
Err(err) => {
return Err(ShellError::GenericError(
"Failed to open SQLite connection in memory".into(),
err.to_string(),
Some(Span::test_data()),
None,
Vec::new(),
))
}
};
Ok(db)
}

View File

@ -43,17 +43,21 @@ impl Operation {
"last" => Ok(Operation::Last),
"nunique" => Ok(Operation::Nunique),
"quantile" => match quantile {
None => Err(ShellError::SpannedLabeledError(
None => Err(ShellError::GenericError(
"Quantile value not fount".into(),
"Quantile operation requires quantile value".into(),
name.span,
Some(name.span),
None,
Vec::new(),
)),
Some(value) => {
if (value.item < 0.0) | (value.item > 1.0) {
Err(ShellError::SpannedLabeledError(
Err(ShellError::GenericError(
"Inappropriate quantile".into(),
"Quantile value should be between 0.0 and 1.0".into(),
value.span,
Some(value.span),
None,
Vec::new(),
))
} else {
Ok(Operation::Quantile(value.item))
@ -82,11 +86,12 @@ impl Operation {
match did_you_mean(&possibilities, selection) {
Some(suggestion) => Err(ShellError::DidYouMean(suggestion, name.span)),
None => Err(ShellError::SpannedLabeledErrorHelp(
None => Err(ShellError::GenericError(
"Operation not fount".into(),
"Operation does not exist".into(),
name.span,
"Perhaps you want: mean, sum, min, max, first, last, nunique, quantile, median, var, std, or count".into(),
Some(name.span),
Some("Perhaps you want: mean, sum, min, max, first, last, nunique, quantile, median, var, std, or count".into()),
Vec::new(),
))
}
}
@ -239,17 +244,21 @@ fn command(
None,
))
}
_ => Err(ShellError::SpannedLabeledError(
_ => Err(ShellError::GenericError(
"Incorrect datatype".into(),
"no groupby or dataframe found in input stream".into(),
call.head,
Some(call.head),
None,
Vec::new(),
)),
}
}
_ => Err(ShellError::SpannedLabeledError(
_ => Err(ShellError::GenericError(
"Incorrect datatype".into(),
"no groupby or dataframe found in input stream".into(),
call.head,
Some(call.head),
None,
Vec::new(),
)),
}
}
@ -283,7 +292,13 @@ fn perform_groupby_aggregation(
_ => operation_span,
};
ShellError::SpannedLabeledError("Error calculating aggregation".into(), e.to_string(), span)
ShellError::GenericError(
"Error calculating aggregation".into(),
e.to_string(),
Some(span),
None,
Vec::new(),
)
})?;
if !explicit {
@ -335,10 +350,12 @@ fn perform_dataframe_aggregation(
Operation::Quantile(quantile) => dataframe
.quantile(quantile, QuantileInterpolOptions::default())
.map_err(|e| {
ShellError::SpannedLabeledError(
ShellError::GenericError(
"Error calculating quantile".into(),
e.to_string(),
operation_span,
Some(operation_span),
None,
Vec::new(),
)
}),
Operation::Median => Ok(dataframe.median()),
@ -358,11 +375,15 @@ fn perform_dataframe_aggregation(
match did_you_mean(&possibilities, operation.to_str()) {
Some(suggestion) => Err(ShellError::DidYouMean(suggestion, operation_span)),
None => Err(ShellError::SpannedLabeledErrorHelp(
None => Err(ShellError::GenericError(
"Operation not fount".into(),
"Operation does not exist".into(),
operation_span,
"Perhaps you want: mean, sum, min, max, quantile, median, var, or std".into(),
Some(operation_span),
Some(
"Perhaps you want: mean, sum, min, max, quantile, median, var, or std"
.into(),
),
Vec::new(),
)),
}
}

View File

@ -62,7 +62,13 @@ fn command(
let df = NuDataFrame::try_from_pipeline(input, call.head)?;
let res = df.as_ref().column(&column.item).map_err(|e| {
ShellError::SpannedLabeledError("Error selecting column".into(), e.to_string(), column.span)
ShellError::GenericError(
"Error selecting column".into(),
e.to_string(),
Some(column.span),
None,
Vec::new(),
)
})?;
NuDataFrame::try_from_series(vec![res.clone()], call.head)

View File

@ -121,18 +121,22 @@ fn command(
if (&0.0..=&1.0).contains(&val) {
Ok(*val)
} else {
Err(ShellError::SpannedLabeledError(
Err(ShellError::GenericError(
"Incorrect value for quantile".to_string(),
"value should be between 0 and 1".to_string(),
*span,
Some(*span),
None,
Vec::new(),
))
}
}
_ => match value.span() {
Ok(span) => Err(ShellError::SpannedLabeledError(
Ok(span) => Err(ShellError::GenericError(
"Incorrect value for quantile".to_string(),
"value should be a float".to_string(),
span,
Some(span),
None,
Vec::new(),
)),
Err(e) => Err(e),
},
@ -242,7 +246,13 @@ fn command(
DataFrame::new(res)
.map_err(|e| {
ShellError::SpannedLabeledError("Dataframe Error".into(), e.to_string(), call.head)
ShellError::GenericError(
"Dataframe Error".into(),
e.to_string(),
Some(call.head),
None,
Vec::new(),
)
})
.map(|df| PipelineData::Value(NuDataFrame::dataframe_into_value(df, call.head), None))
}

View File

@ -66,18 +66,22 @@ fn command(
let new_df = col_string
.get(0)
.ok_or_else(|| {
ShellError::SpannedLabeledError(
ShellError::GenericError(
"Empty names list".into(),
"No column names where found".into(),
col_span,
Some(col_span),
None,
Vec::new(),
)
})
.and_then(|col| {
df.as_ref().drop(&col.item).map_err(|e| {
ShellError::SpannedLabeledError(
ShellError::GenericError(
"Error dropping column".into(),
e.to_string(),
col.span,
Some(col.span),
None,
Vec::new(),
)
})
})?;
@ -89,10 +93,12 @@ fn command(
.skip(1)
.try_fold(new_df, |new_df, col| {
new_df.drop(&col.item).map_err(|e| {
ShellError::SpannedLabeledError(
ShellError::GenericError(
"Error dropping column".into(),
e.to_string(),
col.span,
Some(col.span),
None,
Vec::new(),
)
})
})

View File

@ -97,10 +97,12 @@ fn command(
df.as_ref()
.distinct(subset_slice, keep_strategy)
.map_err(|e| {
ShellError::SpannedLabeledError(
ShellError::GenericError(
"Error dropping duplicates".into(),
e.to_string(),
col_span,
Some(col_span),
None,
Vec::new(),
)
})
.map(|df| PipelineData::Value(NuDataFrame::dataframe_into_value(df, call.head), None))

View File

@ -112,7 +112,13 @@ fn command(
df.as_ref()
.drop_nulls(subset_slice)
.map_err(|e| {
ShellError::SpannedLabeledError("Error dropping nulls".into(), e.to_string(), col_span)
ShellError::GenericError(
"Error dropping nulls".into(),
e.to_string(),
Some(col_span),
None,
Vec::new(),
)
})
.map(|df| PipelineData::Value(NuDataFrame::dataframe_into_value(df, call.head), None))
}

View File

@ -115,11 +115,12 @@ fn command(
df.as_ref()
.to_dummies()
.map_err(|e| {
ShellError::SpannedLabeledErrorHelp(
ShellError::GenericError(
"Error calculating dummies".into(),
e.to_string(),
call.head,
"The only allowed column types for dummies are String or Int".into(),
Some(call.head),
Some("The only allowed column types for dummies are String or Int".into()),
Vec::new(),
)
})
.map(|df| PipelineData::Value(NuDataFrame::dataframe_into_value(df, call.head), None))

Some files were not shown because too many files have changed in this diff Show More