Compare commits

...

324 Commits

Author SHA1 Message Date
b2eecfb110 Bump to 0.14 (#1766) 2020-05-13 04:32:51 +12:00
b0aa142542 Add examples for some more commands (#1765) 2020-05-13 03:54:29 +12:00
247d8b00f8 doc: fix prepend example definition (#1761)
It seems that the description was copy-pasted by mistake from the
append command.
2020-05-12 19:46:21 +12:00
0b520eeaf0 Add a batch of help examples (#1759) 2020-05-12 17:17:17 +12:00
c3535b5c67 It-expansion fixes (#1757)
* It-expansion fixes

* fix clippy
2020-05-12 15:58:16 +12:00
8b9a8daa1d Add a batch of help examples (#1755) 2020-05-12 13:00:55 +12:00
c5ea4a31bd Adding coloring to help examples (#1754) 2020-05-12 11:06:40 +12:00
2275575575 Improve list view and ranges (#1753) 2020-05-12 08:06:09 +12:00
c3a066eeb4 Add examples to commands (#1752)
* Pass &dyn WholeStreamCommand to get_help

* Add an optional example to the WholeStreamCommand trait

* Add an example to the alias command
2020-05-12 08:05:44 +12:00
42eb658c37 Add a simplified list view (#1749) 2020-05-11 14:47:27 +12:00
a2e9bbd358 Improve duration math and printing (#1748)
* Improve duration math and printing

* Fix test
2020-05-11 13:44:49 +12:00
8951a01e58 update cal documentation (#1746) 2020-05-11 13:19:14 +12:00
f702aae72f Don't include year and month by default, adds an option to display th… (#1745)
* Don't include year and month by default, adds an option to display the quarters of the year

* Add a test for cal that checks that year requested is in the return

* rustfmt the cal tests
2020-05-11 12:35:24 +12:00
f5e03aaf1c Add cal command (#1739)
* Add cal command

* Fix docmentation to show commands on two lines

* Use bullet points on flag documentation for cal

* Dereference day before calling string method

* Silence Clippy warning regarding a function with too many arguments

* Update cal flag descriptions and documentation

* Add some tests for the cal command
2020-05-10 11:05:48 +12:00
0f0847e45b Move 'start' to use ShellError (#1743)
* Move 'start' to use ShellError

* Remove unnecessary changes

* Add missing macOS change

* Add default

* More fixed

* More fixed
2020-05-10 08:08:53 +12:00
ccd5d69fd1 Bug fix start (#1738)
* fix bug on linux; added start to the stable list

* add to stable and fix clippy lint
2020-05-10 05:28:57 +12:00
55374ee54f Fix help text for alias command. (#1742)
* Fix help text for alias command.

* Rust fmt
2020-05-09 12:16:14 -05:00
f93ff9ec33 Make grouping more flexible. (#1741) 2020-05-09 12:15:47 -05:00
9a94b3c656 start command in nushell (#1727)
* mvp for start command

* modified the signature of the start command

* parse filenames

* working model for macos is done

* refactored to read from pipes

* start command works well on macos; manual testing reveals need of --args flag support

* implemented start error; color printing of warning and errors

* ran clippy and fixed warnings

* fix a clippy lint that was caught in pipeline

* fix dead code clippy lint for windows

* add cfg annotation to import
2020-05-09 06:19:48 +12:00
e04b89f747 [Gitpod] Refactor Gitpod configuration and add full Debugging support (#1728) 2020-05-09 05:41:24 +12:00
180c1204f3 Use playground instead of depending on fixture format files. (#1726) 2020-05-07 06:58:35 -05:00
96e5fc05a3 Pick->Select rename. Integration tests changes. (#1725)
Pick->Select rename. Integration tests changes.
2020-05-07 06:03:43 -05:00
c3efdf2689 Rename edit command to update. (#1724)
Rename edit command to update.
2020-05-07 00:33:30 -05:00
27fdef5479 Read exit status before failing in failed read from stdout pipe (#1723) 2020-05-07 13:42:01 +12:00
7ce8026916 Ignore empty arguments passed to externals (#1722) 2020-05-07 09:18:56 +12:00
8a9fc6a721 Fix changing to a new Windows drive (#1721)
* Fix changing to a new Windows drive

* Update cli.rs
2020-05-07 05:51:03 +12:00
c06a692709 Bash-like shorthand with-env (#1718)
* Bash-like shorthand with-env

* fix clippy warning
2020-05-06 18:57:37 +12:00
b37e420c7c Add with-env command (#1717) 2020-05-06 15:56:31 +12:00
22e70478a4 docs/commands: add to.md, update subcommands (#1715)
This adds a top-level document for the new `to` command, with a list (of links) of all the subcommands.

All the to-* subcommands keep their filename, but the content is updated to use the new subcommand syntax.

Since not all subcommands have documentation, some items in the list are just text without a link. Also filled the list for the undocumented from* commands in the same style.

Fixes #1709
2020-05-05 20:05:23 +12:00
8ab2b92405 docs/commands: add from.md, update subcommands (#1712)
This adds a top-level document for the new `from` command, with a list of links of all the subcommands.

All the from-* subcommands keep their filename, but the content is updated to use the new subcommand syntax.

Needs matching update for to*

Ref #1709
2020-05-05 09:01:31 +12:00
3201c90647 Extend to/from usage text to indicate subcommands (#1711)
Both to and from without a subcommand only print the helptext. Expand the usage line a bit, so a glance at `help commands` indicates the existance of the subcommands and mentions some common formats.

Ref a9968046ed
Ref #1708
2020-05-05 09:00:29 +12:00
454f560eaa Properly deserialize history args (#1710) 2020-05-05 07:50:10 +12:00
d2ac506de3 Changes to allow plugins to be loaded in a multi-threaded manner (#1694)
* Changes to allow plugins to be loaded in a multi-threaded manner in order to decrease startup time.

* Ran rust fmt and clippy to find and fix first pass errors.
Updated launch.jason to make debugging easier in vscode.
Also added tasks.json so tasks like clippy can be ran easily.

* ran fmt again

* Delete launch.json

Remove IDE settings file

* Remove IDE settings file

* Ignore vscode IDE settings

* Cloned the context instead of Arc/Mutexing it.

Co-authored-by: Darren Schroeder <fdncred@hotmail.com>
Co-authored-by: Jonathan Turner <jonathandturner@users.noreply.github.com>
2020-05-05 06:15:24 +12:00
a9968046ed Add subcommands. Switch from-* and to-* to them (#1708) 2020-05-04 20:44:33 +12:00
453087248a Properly drain iterating pipe so we can see errors (#1707) 2020-05-04 15:29:32 +12:00
81ff598d6c Fix column bugs associated with previous refactoring (#1705)
* Fix: the symlink target column will only dispaly if either the `full` or `with_symlink_targets` options are given

* If the metadata for every item in the size column is None, do not show the size column

* Do not show the symlink target column if the metadata is None for all the items in the table
2020-05-04 14:58:11 +12:00
d7d487de73 Basic documentation for the wrap command (#1704) 2020-05-04 04:54:21 +12:00
8d69c77989 Display either dir metadata size or dir apparent size in ls (#1696)
* Show dir size in ls command

* Add the option to show the apparent directory size via `ls --du`
2020-05-03 17:09:17 +12:00
0779a46179 docs/commands: add alias.md (#1697)
* docs/commands: add alias.md

* docs/commands/alias: drop reference to bash
2020-05-03 16:49:27 +12:00
ada92f41b4 Parse file size (byte) units in all mixes of case (#1693) 2020-05-02 14:14:07 -04:00
ef3049f5a1 Reduce some repetitive code in files.rs (#1692) 2020-05-02 11:14:29 -04:00
1dab82ffa1 Gitignore JetBrains' IDE items (#1691) 2020-05-01 09:43:53 +12:00
e9e3fac59d Remove bin.is_file() because it's expensive (#1689)
This one change takes the startup time from 2.8 seconds to 1.2 seconds in my testing on Windows.
2020-05-01 06:43:59 +12:00
7d403a6cc7 Escape some symbols in external args (#1687)
* Escape some symbols in external args

* Don't escape on Windows, which does its own

* fix warning
2020-04-30 16:54:07 +12:00
cf53264438 Table operating commands. (#1686)
* Table operating commands.

* Updated merge test for clarity.

* More clarity.

* Better like this..
2020-04-29 23:18:24 -05:00
d834708be8 Finish remove the last traces of per-item (#1685) 2020-04-30 14:23:40 +12:00
f8b4784891 Add .DS_Store to .gitignore (#1684) 2020-04-30 13:12:18 +12:00
789b28ac8a Convert if expression to match (#1683) 2020-04-30 12:59:50 +12:00
db8219e798 extend it-expansion to externals (#1682)
* extend it-expansion to externals

* trim the carriage return for external strings
2020-04-30 07:09:14 +12:00
73d5310c9c make it-expansion work through blocks when necessary (#1681) 2020-04-29 19:51:46 +12:00
8d197e1b2f Show symlink sizes in ls (#1680)
* Show symlink sizes in ls

* Reduce redundancy in the size code of ls
2020-04-29 19:23:26 +12:00
c704157bc8 Replace ichwh with which and some fixes for auto-cd (canonicalization) (#1672)
* fix: absolutize path against its parent if it was a symlink.

On Linux this happens because Rust calls readlink but doesn't canonicalize the resultant path.

* feat: playground function to create symlinks

* fix: use playground dirs

* feat: test for #1631, shift tests names

* Creation of FilesystemShell with custom location may fail

* Replace ichwh with which

* Creation of FilesystemShell with custom location may fail

* Replace ichwh with which

* fix: add ichwh again since it cannot be completely replaced

* fix: replace one more use of which
2020-04-28 05:49:53 +12:00
6abb9181d5 bump to 0.13.1 (#1670) 2020-04-27 18:50:14 +12:00
006171d669 Fix per-item run_alias (#1669)
* Fix per-item run_alias

* Fix 1609 also
2020-04-27 18:10:34 +12:00
8bd3cedce1 It expansion (#1668)
* First step in it-expansion

* Fix tests

* fix clippy warnings
2020-04-27 14:04:54 +12:00
6f2ef05195 Resolves https://github.com/nushell/nushell/issues/1658 (#1660)
For example, when running the following:

    crates/nu-cli/src

nushell currently parses this as an external command. Before running the command, we check to see if
it's a directory. If it is, we "auto cd" into that directory, otherwise we go through normal
external processing.

If we put a trailing slash on it though, shells typically interpret that as "user is explicitly
referencing directory". So

    crates/nu-cli/src/

should not be interpreted as "run an external command". We intercept a trailing slash in the head
position of a command in a pipeline as such, and inject a `cd` internal command.
2020-04-27 13:22:01 +12:00
80025ea684 Rows and values can be checked for emptiness. Allows to set a value if desired. (#1665) 2020-04-26 12:30:52 -05:00
a62745eefb make trim apply to strings contained in tables (also at deeper nesting (#1664)
* make trim apply to strings contained in tables (also at deeper nesting
levels), not just top-level strings

* remove unnecessary clone (thanks clippy)
2020-04-27 05:26:02 +12:00
2ac501f88e Adds drop number of rows command (#1663)
* Fix access to columns with quoted names

* Add a drop number of rows from end of table
2020-04-26 18:34:45 +12:00
df90d9e4b6 Fix access to columns with quoted names (#1662) 2020-04-26 18:01:55 +12:00
ad7a3fd908 Add not-in: operator (#1661) 2020-04-26 17:32:17 +12:00
ad8ab5b04d Add from-eml command (#1656)
* from-eml initial ver

* Adding tests for `from-eml`

* Add eml to prepares_and_decorates_filesystem_source_files

* Sort the file order

Co-authored-by: Jonathan Turner <jonathandturner@users.noreply.github.com>
2020-04-26 16:26:35 +12:00
e7767ab7b3 suppress the parser error for an insufficient number of required arguments if the named keyword argument 'help' is given (#1659) 2020-04-26 04:45:45 +12:00
846a779516 Fix cd'ing to symlinked directories (#1651)
* fix: absolutize path against its parent if it was a symlink.

On Linux this happens because Rust calls readlink but doesn't canonicalize the resultant path.

* feat: playground function to create symlinks

* fix: use playground dirs

* feat: test for #1631, shift tests names
2020-04-25 18:09:00 +12:00
e7a4f31b38 Docs: Mention how to use str --find-replace in the docs. (#1653)
Co-authored-by: Christoph Siedentop <christoph@siedentop.name>
2020-04-25 18:07:38 +12:00
10768b6ecf str plugin can capitalize and trim strings. (#1652)
* Str plugin can capitalize.

* Str plugin can trim.
2020-04-24 16:37:58 -05:00
716c4def03 Add key_timeout config option (#1649) 2020-04-25 05:28:38 +12:00
0e510ad42b Values can be used as keys for grouping. (#1648) 2020-04-23 20:55:18 -05:00
3c222916c6 [docs]: How to run tests. (#1647)
Co-authored-by: Christoph Siedentop <christoph@siedentop.name>
2020-04-24 12:20:55 +12:00
6887554888 [docs/enter] Warn about enter opening multiple shells (#1645)
Opening a JSON with a top-level list, opens one shell per list element. This can be extremely confusing to unexpected users.
2020-04-24 12:17:11 +12:00
d7bd77829f Bump rustyline (#1644)
* Bump rustyline

* Fix new clippy warnings

* Add pipeline command
2020-04-24 08:00:49 +12:00
9e8434326d Update azure-pipelines.yml 2020-04-24 07:23:56 +12:00
27bff35c79 Update azure-pipelines.yml 2020-04-24 07:21:27 +12:00
e2fae63a42 Update azure-pipelines.yml 2020-04-24 07:04:10 +12:00
701711eada Be more resilient with startup lines (#1642) 2020-04-23 17:22:01 +12:00
9ec2aca86f Support completion for paths with multiple dots (#1640)
* refactor: expand_path and expand_ndots now work for any string.

* refactor: refactor test and add new ones.

* refactor: convert expanded to owned string

* feat: pub export of expand_ndots

* feat: add completion for ndots in fs-shell
2020-04-23 16:17:38 +12:00
818171cc2c Make default completion mode OS dependent (#1636) 2020-04-23 10:53:40 +12:00
b3c623396f Fix to-csv command (#1635)
* Fix --headerless option of to-csv and to-tsv

Before to-csv --headerless split the "headerfull" output on lines,
skipped the first, and then concatenated them together. That meant
that all remaining output would be put on the same line, without
any delimiter, making it unusable. Now we replace the range of the
first line with the empty string, maintaining the rest of the
output unchanged.

* Remove extra space in indentation of to_delimited_data

* Add --separator <string> argument to to-csv

This functionaliy has been present before, but wasn't exposed
to the command.
2020-04-23 05:08:53 +12:00
88f06c81b2 Update Cargo.toml 2020-04-22 06:34:32 +12:00
e6b315f05b Bump ichwh to version 0.3.4 (#1627)
Fixes #1626 and [this ichwh issue](https://gitlab.com/avandesa/ichwh-rs/-/issues/3)
2020-04-22 05:37:14 +12:00
01ef6b0732 Add config to disable table index column (#1623) 2020-04-21 18:09:22 +12:00
c7e11a5a28 bump to 0.13.0 (#1625) 2020-04-21 17:01:03 +12:00
ce0231049e Fix the panic running a bad alias (#1624) 2020-04-21 16:11:34 +12:00
0f7b270740 Add exit code verification (#1622) 2020-04-21 15:14:18 +12:00
72cf57dd99 Add booleans and fix semicolon shortcircuit (#1620) 2020-04-21 12:30:01 +12:00
e4fdb36511 External vars (#1615)
* fix empty table and missing spans

* wip

* WIP

* WIP

* working version with vars

* tidying

* WIP

* Fix external quoting issue
2020-04-21 09:45:11 +12:00
2ffb14c7d0 fix empty table and missing spans (#1614) 2020-04-20 19:44:19 +12:00
eec94e4016 Semicolon (#1613)
* WIP on blocks

* Getting further

* add some tests
2020-04-20 18:41:51 +12:00
6412bfd58d Move alias arguments to optional arguments (#1612)
This will allow people to use more variables to capture more, if necessary, without having to implement required/optional/rest
2020-04-20 16:04:40 +12:00
522a828687 Move external closer to internal (#1611)
* Refactor InputStream and affected commands.

First, making `values` private and leaning on the `Stream` implementation makes
consumes of `InputStream` less likely to have to change in the future, if we
change what an `InputStream` is internally.

Second, we're dropping `Option<InputStream>` as the input to pipelines,
internals, and externals. Instead, `InputStream.is_empty` can be used to check
for "emptiness". Empty streams are typically only ever used as the first input
to a pipeline.

* Add run_external internal command.

We want to push external commands closer to internal commands, eventually
eliminating the concept of "external" completely. This means we can consolidate
a couple of things:

- Variable evaluation (for example, `$it`, `$nu`, alias vars)
- Behaviour of whole stream vs per-item external execution

It should also make it easier for us to start introducing argument signatures
for external commands,

* Update run_external.rs

* Update run_external.rs

* Update run_external.rs

* Update run_external.rs

Co-authored-by: Jonathan Turner <jonathandturner@users.noreply.github.com>
2020-04-20 15:30:44 +12:00
6b8c6dec0e Bump ichwh minimum version to 0.3.3 (#1607)
The new version includes a fix for [this issue]

[this issue]: https://gitlab.com/avandesa/ichwh-rs/-/issues/2
2020-04-19 13:38:14 +12:00
2b0212880e Simplify cp command and allow multiple recursive copying (#1580)
* Better message error

* Use custom canonicalize in FileStructure build

* Better glob error in ls

* Use custom canonicalize, remove some duplicate code in cd.

* Enable recursive copying with patterns.

* Change test to fit new error message

* Test recursive with glob pattern

* Show that not matches were found in cp

* Fix typo in message error

* Change old canonicalize usage, follow newest changes
2020-04-19 11:05:24 +12:00
a16a91ede8 Fix precedence parsing of parens. Limit use (#1606) 2020-04-19 06:39:06 +12:00
c2a9bc3bf4 Add better docs to parser (#1604) 2020-04-19 05:30:40 +12:00
e5a79d09df Fix the versions up a bit to prevent breakage (#1602)
* Fix the versions up a bit to prevent breakage

* fix up the quickcheck test to not fail on parse error
2020-04-18 15:31:57 +12:00
7974e09eeb Math operators (#1601)
* Add some math operations

* WIP for adding compound expressions

* precedence parsing

* paren expressions

* better lhs handling

* add compound comparisons and shorthand lefthand parsing

* Add or comparison and shorthand paths
2020-04-18 13:50:58 +12:00
52d2d2b888 A random set of fixes (#1600) 2020-04-17 18:19:49 +12:00
ee778d2b03 WIP fix for the error bubbling (#1597) 2020-04-16 16:25:24 +12:00
928188b18e Redesign custom canonicalize, enable multiple dots expansion earlier. (#1588)
* Expand n dots early where tilde was also expanded.

* Remove normalize, not needed.
New function absolutize, doesn't follow links neither checks existence.
Renamed canonicalize_existing to canonicalize, works as expected.

* Remove normalize usages, change canonicalize.

* Treat strings as paths
2020-04-16 11:29:22 +12:00
59d516064c Add alias support to scripts and -c (#1593) 2020-04-16 05:50:35 +12:00
bd5836e25d Aliases (#1589)
* WIP getting scopes right

* finish adding initial support

* Finish with alias and add startup commands
2020-04-15 17:43:23 +12:00
e3da037b80 Canonical expr block (#1584)
* Add the canonical form for an expression block

* Remove commented out code
2020-04-14 06:59:33 +12:00
08a09e2273 Pipeline blocks (#1579)
* Making Commands match what UntaggedValue needs

* WIP

* WIP

* WIP

* Moved to expressions for conditions

* Add 'each' command to use command blocks

* More cleanup

* Add test for 'each'

* Instead use an expression block
2020-04-13 19:59:57 +12:00
85d6b24be3 Add more cmd builtins (#1583) 2020-04-13 13:46:59 +12:00
ed583bd79b Bump to latest ichwh (#1582) 2020-04-13 08:01:23 +12:00
e0fc09ac52 move rustyline to 6.1.1 to fix windows crash (#1581) 2020-04-13 07:01:44 +12:00
38b2846024 Split canonicalize function in two for missing and existing behavior (#1576)
* Split allow missing logic in two functions

* Replace use of old canonicalize
2020-04-12 20:33:38 +12:00
57c62de66f Remove erronous GPL license header (#1578) 2020-04-12 20:16:50 +12:00
dd4935fb23 Add quickcheck (#1574)
* Move uptime to being a duration value

* Adds our first quickcheck test
2020-04-12 07:05:59 +12:00
18dd009ca8 Unified path expansion under new module and better canonicalize (#1571)
* New 'path' module under nu-cli.
Added normalize and canonicalize method.
Added some unit tests.

* Replace old usages of normalize and canonicalize.

* Fix reading symlinks and existence logic.

* Better explained
2020-04-12 07:05:29 +12:00
c0dda36217 Update CONTRIBUTING.md 2020-04-12 06:54:16 +12:00
75b72f844e Create CONTRIBUTING.md (#998)
* Create CONTRIBUTING.md

* mention gitpod

* Update CONTRIBUTING.md

* mention community supports

* fix capitalization issues
2020-04-12 06:52:53 +12:00
fbddc12c02 Move uptime to being a duration value (#1573) 2020-04-11 19:40:56 +12:00
8e7e8c17e1 Make trash support optional (#1572) 2020-04-11 18:53:53 +12:00
8ac9d781fd Remove source text where not needed (#1567) 2020-04-10 19:56:48 +12:00
c86cf31aac some minor improvements and removing dead code (#1563) 2020-04-10 07:48:10 +12:00
2c513d1883 More dep bumps (#1562) 2020-04-09 10:28:20 +12:00
04702530a3 Bump a lot of deps (#1560) 2020-04-07 19:51:17 +12:00
c9f424977e actually bump version (#1559) 2020-04-07 07:18:47 +12:00
183c8407de fix nu variable. tweak shells (#1558) 2020-04-07 05:30:54 +12:00
d0618b0b32 Enable the use of multiple dots in FS Shell commands (#1547)
Every dot after `..` means another parent directory.
2020-04-06 07:28:56 -04:00
c4daa2e40f Add experimental new parser (#1554)
Move to an experimental new parser
2020-04-06 19:16:14 +12:00
0a198b9bd0 Have FilesystemShell#rm correctly delete symlinks (#1550) 2020-04-05 17:17:52 -04:00
6a604491f5 ci(docker-publish): force remove non-executable (#1540) 2020-04-02 04:20:18 +13:00
791f7dd9c3 Bump to 0.12.0 (#1538) 2020-04-01 06:25:21 +13:00
a4c1b092ba Add configurations for table headers (#1537)
* Add configurations for table headers

* lint

Co-authored-by: Amanita-Muscaria <nope>
2020-03-31 12:19:48 +13:00
6e71c1008d Change get to remove blanks (#1534)
Remove blank values when getting a column of values
2020-03-30 15:36:21 +13:00
906d0b920f A few improvements to du implementation: (#1533)
1. Fixed a bug where `--all` wasn't showing files at the root directories.
2. More use of `Result`'s `map` and `map_err` methods.
3. Making tables be homogeneous so one can, for example, `get directories`.
2020-03-29 21:16:09 -04:00
efbf4f48c6 Fix poor message for executable that user doesn't have permissi… (#1535)
Previously, if the user didn't have the appropriate permissions to execute the
binary/script, they would see "command not found", which is confusing.

This commit eliminates the `which` crate in favour of `ichwh`, which deals
better with permissions by not dealing with them at all! This is closer to the
behaviour of `which` in many shells. Permission checks are then left up to the
caller to deal with.
2020-03-29 21:15:55 -04:00
2ddab3e8ce Some small improvements to du readability.
Mostly, making more use of `map` and `map_err` in `Result`. One benefit
is that at least one location had duplicated logic for how to map the
error, which is no longer the case after this commit.
2020-03-29 17:03:01 -04:00
35dc7438a5 Make use of interruptible stream in various places 2020-03-29 17:03:01 -04:00
2a54ee0c54 Introduce InterruptibleStream type.
An interruptible stream can query an `AtomicBool. If that bool is true,
the stream will no longer produce any values.

Also introducing the `Interruptible` trait, which extends any `Stream`
with the `interruptible` function, to simplify the construction and
allow chaining.
2020-03-29 17:03:01 -04:00
cad2741e9e Split input and output streams into separate modules 2020-03-29 17:03:01 -04:00
ae5f3c8210 WIP: 1486/first row as headers (#1530)
* headers plugin

* Remove plugin

* Add non-functioning headers command

* Add ability to extract headers from first row

* Refactor header extraction

* Rebuild indexmap with proper headers

* Rebuild result properly

* Compiling, probably wrapped too much?

* Refactoring

* Deal with case of empty header cell

* Deal with case of empty header cell

* Fix formatting

* Fix linting, attempt 2.

* Move whole_stream_command(Headers) to more appropriate section

* ... more linting

* Return Err(ShellError...) instead of panic, yield each row instead of entire table

* Insert Column[index] if no header info is found.

* Update error description

* Add initial test

* Add tests for headers command

* Lint test cases in headers

* Change ShellError for headers, Add sample_headers file to utils.rs

* Add empty sheet to test file

* Revert "Add empty sheet to test file"

This reverts commit a4bf38a31d.

* Show error message when given empty table
2020-03-29 15:05:57 +13:00
a5e97ca549 Respect CARGO_TARGET_DIR when set (#1528)
This makes the `binaries` function respect the `CARGO_TARGET_DIR` environment variable when set. If it's not present it falls back to the regular target directory used by Cargo.
2020-03-27 17:13:59 -04:00
06f87cfbe8 Add support for removing multiple files at once (#1526) 2020-03-25 16:19:01 -04:00
d4e78c6f47 Improve the rotated row wrap (#1524) 2020-03-25 06:27:16 +13:00
3653400ebc testing fix to matrix to define all variables (#1522)
there is currently a bug with invalid syntax for some of the
docker build steps, and I think this is because there are build
variables in the matrix that are not defined. This PR will
attempt to resolve this issue by defining all missing variables
for each row in the matrix.

Signed-off-by: vsoch <vsochat@stanford.edu>
2020-03-24 16:20:39 +13:00
81a48d6d0e Fix '/' and '..' not being valid mv targets (#1519)
* Fix '/' and '..' not being valid mv targets

If `/` or `../` is specified as the destination for `mv`, it will fail with an error message saying it's not a valid destination. This fixes it to account for the fact that `Path::file_name` return `None` when the file name evaluates to `/` or `..`. It will only take the slow(er) path if `Path::file_name` returns `None` in its initial check.

Fixes #1291

* Add test
2020-03-24 14:00:48 +13:00
f030ab3f12 Add experimental auto-rotate (#1516) 2020-03-23 09:55:30 +13:00
0dc0c6a10a Add quickstart option to Docker section in README (#1515) 2020-03-23 09:18:50 +13:00
53c8185af3 Fixes the crash for ps --full in Windows (#1514)
* Fixes the crash for `ps --full` in Windows

* Update ps.rs
2020-03-23 08:28:02 +13:00
36b5d063c1 Simplify and improve listing for which. (#1510)
* Simplified implementation
* Show executables, even if the current user doesn't have permissions to
  execute them.
2020-03-22 09:11:39 -04:00
a7ec00a037 Add documentation for from-ics and from-vcf (#1509) 2020-03-21 14:50:13 +13:00
918822ae0d Fix numeric comparison with nothing (#1508) 2020-03-21 11:02:49 +13:00
ab5e24a0e7 WIP: Add vcard/ical support (#1504)
* Initial from-ical implementation

* Initial from-vcard implementation

* Rename from-ics and from-vcf for autoconvert

* Remove redundant clones

* Add from-vcf and from-ics tests

Co-authored-by: Jonathan Turner <jonathandturner@users.noreply.github.com>
2020-03-21 08:35:09 +13:00
b5ea522f0e Add a --full mode to ps (#1507)
* Add a --full mode to ps

* Use a slightly older heim
2020-03-20 20:53:49 +13:00
afa963fd50 Add is_dir check to auto-cd (#1506)
* Add markdown output

* Add is_dir() check
2020-03-20 16:57:36 +13:00
1e343ff00c Add markdown output (#1503) 2020-03-20 08:18:24 +13:00
21a543a901 Make sum plugin as internal command. (#1501) 2020-03-18 18:46:00 -05:00
390deb4ff7 Windows needs to remember auto-cd paths when changing drives (#1500)
* Windows needs to remember auto-cd paths when changing drives

* Windows needs to remember auto-cd paths when changing drives
2020-03-18 15:10:45 +13:00
1c4cb30d64 Add documentation for skip and skip-while (#1499) 2020-03-18 14:22:35 +13:00
1ec2ec72b5 Add automatic change directory (#1496)
* Allow automatic cd in cli mode

* Set correct priority for auto-cd and add test
2020-03-18 07:13:38 +13:00
0d244a9701 Open fails silently, fix #1493 (#1495)
* Fix #1493

The error was wrongfully discarded

* Run cargo fmt
2020-03-17 17:40:04 +13:00
b36d21e76f Infer types from regular delimited plain text unstructured files. (#1494)
* Infer types from regular delimited plain text unstructured files.

* Nothing resolves to an empty string.
2020-03-16 15:50:45 -05:00
d8c4565413 Csv errors (#1490)
* Add error message for csv parsing failures

* Add csv error prettyfier

* Improve readability of the error

Line 2: error is easier to understand than:
Line 2, error

* Remove unnecessary use of the format! macro

Replacing it with .to_string() fixes a clippy warning

* Improve consistency with JSON parsing errors
2020-03-16 12:32:02 -05:00
22ba4c2a2f Add svg support to to-html (#1492) 2020-03-16 20:19:18 +13:00
8d19b21b9f Custom canonicalize method on Filesystem Shell. (#1485)
* Custom canonicalize method for FilesystemShell.

* Use custom canonicalize method.
Fixed missing import.

* Move function body to already impl body.

* Create test that aims to resolve.
2020-03-16 19:28:18 +13:00
45a3afdc79 Update Cargo.toml 2020-03-16 06:12:28 +13:00
2d078849cb Add simple to-html output and bump version (#1487) 2020-03-15 16:04:44 +13:00
b6363f3ce1 Added new flag '--all/-a' for Ls, also refactor some code (#1483)
* Utility function to detect hidden folders.
Implemented for Unix and Windows.

* Rename function argument.

* Revert "Rename function argument."

This reverts commit e7ab70f0f0.

* Add flag '--all/-a' to Ls

* Rename function argument.

* Check if flag '--all/-a' is present and path is hidden.
Replace match with map_err for glob result.
Remove redundancy in stream body.
Included comments on new stream body.
Replace async_stream::stream with async_stream::try_stream.
Minor tweaks to is_empty_dir.
Fix and refactor is_hidden_dir.

* Fix "implicit" bool coerse

* Fixed clippy errors
2020-03-14 06:27:04 +13:00
5ca9e12b7f Fix whitespace and typos (#1481)
* Remove EOL whitespace in files other than docs

* Break paragraphs into lines

See http://rhodesmill.org/brandon/2012/one-sentence-per-line/ for the rationale

* Fix various typos

* Remove EOL whitespace in docs/commands/*.md
2020-03-14 06:23:41 +13:00
5b0b2f1ddd Fixes #1204 : sys | get host.users displays the same user (#1480)
account twice while only one exists (macOS)

- renamed host.users to host.sessions
2020-03-12 14:01:55 +13:00
3afb53b8ce fix typo in calc command documentation (#1477)
minimumum -> minimum
2020-03-11 11:20:22 -04:00
b40d16310c More relaxed file modes for now. (#1476) 2020-03-11 13:19:15 +13:00
d3718d00db Merge shuffle nu plugin as core command. (#1475) 2020-03-10 17:00:08 -05:00
f716f61fc1 Update Cargo.lock 2020-03-11 08:53:26 +13:00
b2ce669791 Update Cargo.toml 2020-03-11 08:51:53 +13:00
cd155f63e1 Update Cargo.toml 2020-03-11 08:51:17 +13:00
9eaa6877f3 Update Cargo.toml 2020-03-11 08:50:51 +13:00
a6b6afbca9 Update Cargo.toml 2020-03-11 08:08:13 +13:00
62666bebc9 Bump to 0.11.0 (#1474) 2020-03-11 06:34:19 +13:00
d1fcce0cd3 Fixes #1427: Prints help message with -h switch (#1454)
For some commands like `which` -h flag would trigger an error asking for
missing required parameters instead of printing the help message as it
does with --help. This commit adds a check in the command parser to
avoid that.
2020-03-11 05:59:50 +13:00
a2443fbe02 Remove unused parsing logic. (#1473)
* Remove unused parsing logic.

* Run tokens iteration tests baseline.

* Pass lint.

* lifetimes can be elided without being explicit.
2020-03-10 04:31:42 -05:00
db16b56fe1 Columnpath support when passing fields for formatting. (#1472) 2020-03-10 01:55:03 -05:00
54bf671a50 Fix deleting / showing ls named pipes and other fs objects no… (#1461)
* Fix deleting named pipes
* Use std::os::unix::fs::FileTypeExt to show correct type for unix-specific fs objects; Fix formatting

Co-authored-by: Linards Kalvāns <linards.kalvans@twino.eu>
2020-03-09 09:02:53 -04:00
755d0e648b Eliminate some compiler warnings (#1468)
- Unnecessary parentheses
- Deprecated `description()` method
2020-03-09 08:19:07 +13:00
e440d8c939 Bump some deps (#1467) 2020-03-09 08:18:44 +13:00
01dd358a18 Don't emit a newline in autoview. (#1466)
The extra newline character makes it hard to use nu as part of an
external processing pipeline, since the extra character could taint the
results. For example:

```
$ nu -c 'echo test | xxd'
00000000: 7465 7374                                test
```

versus

```
nu -c 'echo test' | xxd
00000000: 7465 7374 0a                             test.
```
2020-03-09 08:18:24 +13:00
50fb97f6b7 Merge env into $nu and simplify table/get (#1463) 2020-03-08 18:33:30 +13:00
ebf139f5e5 Auto-detect string / binary in save command (#1459)
* Auto-detect string / binary in save command

* Linter
2020-03-08 07:33:29 +13:00
8925ca5da3 Move to bytes/string hybrid codec (#1457)
* WIP: move to bytes codec

* Progress on adding collect helpers

* Progress on adding collect helpers

* Add in line splitting back to lines

* Lines outputting line primitives

* Close to ready?

* Finish fixing lines

* clippy fixes

* fmt fixes

* removed unused code

* Cleanup a few bits

* Cleanup a few bits

* Cleanup a few more bits

* Fix failing test with corrected test case
2020-03-07 05:06:39 +13:00
287652573b Fix and refactor cd for Filesystem Shell. (#1452)
* Fix and refactor cd for Filesystem Shell.
Reorder check conditions, don't check existence twice.
If building for unix check exec bit on folder.

* Import PermissionsExt only on unix target.

* It seems that this is the correct way?
2020-03-06 20:13:47 +13:00
db24ad8f36 Add --num parameter to limit the number of output lines (#1455)
Add `--num` parameter to limit the numer of returned elements
2020-03-05 05:26:46 -05:00
f88674f353 Nu internals are logged under nu filter. (#1451) 2020-03-05 05:18:53 -05:00
59cb0ba381 Color appropiately commands. (#1453) 2020-03-04 23:22:42 -05:00
c4cfab5e16 Make feature options available downstream to nu-cli subcrate. (#1450) 2020-03-04 15:31:12 -05:00
b2c5af457e Move most of the root package into a subcrate. (#1445)
This improves incremental build time when working on what was previously
the root package. For example, previously all plugins would be rebuilt
with a change to `src/commands/classified/external.rs`, but now only
`nu-cli` will have to be rebuilt (and anything that depends on it).
2020-03-04 13:58:20 -05:00
c731a5b628 Columns can be renamed. (#1447) 2020-03-03 16:01:24 -05:00
f97f9d4af3 Update deps locklfile (#1446)
Update deps lockfile
2020-03-03 15:34:22 -05:00
ed7d3fed66 Add shuffle plugin (#1443)
* Add shuffle plugin

see #1437

* Change plugin to integrate into nu structure and build system
2020-03-03 08:44:12 +13:00
7304d06c0b Use threads to avoid blocking reads/writes in externals. (#1440)
In particular, one thing that we can't (properly) do before this commit
is consuming an infinite input stream. For example:

```
yes | grep y | head -n10
```

will give 10 "y"s in most shells, but blocks indefinitely in nu. This PR
resolves that by doing blocking I/O in threads, and reducing the `await`
calls we currently have in our pipeline code.
2020-03-02 06:19:09 +13:00
ca615d9389 Bump to 0.10.1 (#1442) 2020-03-01 20:59:13 +13:00
6d096206b6 Add support for compound shorthand flags (#1414)
* Break multicharacter shorthand flags into single character flags

* Remove shorthand flag test
2020-03-01 13:20:42 +13:00
2a8cb24309 Add support for downloading unsupported mime types (#1441) 2020-03-01 13:14:36 +13:00
8d38743e27 Add docs for debug (#1438)
* Add docs for `debug`

* Put debug docs in right folder
Also fixed minor spacing problem
2020-03-01 04:09:28 +13:00
eabfa2de54 Let ls ignore permission errors (#1435)
* Create a function to create an empty directory entry

* Print an empty directory entry if permission is denied

* Fix rustfmt whitespace issues.

* Made metadata optional for `dir_entry_dict`.

Removed `empty_dir_entry_dict` as its not needed anymore.
2020-02-29 14:33:52 +13:00
a86a0abb90 Plugin documentation (#1431)
* Add very basic documentation. Need to play with rest of the api to figure out what it does

* Add some documentation to more of the Plugin API methods

* fmt
2020-02-24 15:28:46 +13:00
adcda450d5 Update LICENSE 2020-02-21 10:49:46 +13:00
147b9d4436 Add Better-TOML (#1417) 2020-02-19 16:59:42 -05:00
c43a58d9d6 Fix incorrect display for zero-size files (#1422) 2020-02-19 09:57:58 -05:00
e38442782e Command documentation for du (#1416) 2020-02-19 09:55:22 +13:00
b98f893217 add a touch command (#1399) 2020-02-19 09:54:32 +13:00
bd6556eee1 Use proper file extension for uniq command docs (#1411) 2020-02-18 09:37:46 -05:00
18d988d4c8 Restrict short-hand flag detection to exact match. (#1406) 2020-02-18 01:58:30 -05:00
0f7c723672 Bump version to 0.10.0 (#1403) 2020-02-18 16:56:09 +13:00
afce2fd0f9 Revert "Display rows in the same table regardless of their column order given they are equal. (#1392)" (#1401)
This reverts commit 4fd9974204.
2020-02-17 17:34:37 -08:00
4fd9974204 Display rows in the same table regardless of their column order given they are equal. (#1392) 2020-02-16 20:35:01 -05:00
71615f77a7 Fix minor typo in calc command error (#1395) 2020-02-16 16:02:41 -05:00
9bc5022c9c Force a \n at the end of a stdout stream (#1391)
* Force a \n at the end of a stdout stream

* clippy
2020-02-14 18:15:32 -08:00
552848b8b9 Leave raw mode correctly. (#1388)
Co-authored-by: Jonathan Turner <jonathandturner@users.noreply.github.com>
2020-02-14 17:31:21 -05:00
8ae8ebd107 Add support for multiline script files (#1386)
* Add support for multiline script files

* clippy
2020-02-13 21:24:18 -08:00
473e9f9422 Tiny improvement to sys (#1385) 2020-02-13 08:33:55 -08:00
96985aa692 Fix invalid shorthand flag (#1384) 2020-02-13 07:47:34 -08:00
0961da406d Add string to datetime to str plugin (#1381)
* Add string to datetime to str plugin

* Test string to date/time conversion
2020-02-13 07:47:04 -08:00
84927d52b5 Refuse internal command execution given unexpected arguments. (#1383) 2020-02-13 02:34:43 -05:00
73312b506f Finer grained parsing and coloring command tail. (#1382) 2020-02-12 20:20:19 -05:00
c1bec3b443 Return error on a divide by zero (#1376)
Return error on a divide by zero
2020-02-12 08:38:04 -05:00
c0be02a434 Short-hand flags (#1378)
* typo fixes

* Change signature to take in short-hand flags

* update help information

* Parse short-hand flags as their long counterparts

* lints

* Modified a couple tests to use shorthand flags
2020-02-11 18:24:31 -08:00
2ab8d035e6 External it and nu variable column path fetch support. (#1379) 2020-02-11 18:25:56 -05:00
24094acee9 Allow switch flags anywhere in the pipeline. (#1375) 2020-02-11 03:49:00 -05:00
0b2be52bb5 Only add quotes if not in Windows (which adds its own?) (#1374)
* Only add quotes if not in Windows (which adds its own?)

* Only add quotes if not in Windows (which adds its own?)
2020-02-10 23:07:44 -08:00
6a371802b4 Add block size to du (#1341)
* Add block size to du

* Change blocks to physical size

* Use path instead of strings for file/directory names

* Why don't I just use paths instead of strings anyway?

* shorten physical size and apparent size to physical and apparent resp.
2020-02-10 12:32:18 -08:00
29ccb9f5cd Ensure stable plugins get installed. (#1373) 2020-02-10 15:32:10 -05:00
20ab125861 bump version (#1370) 2020-02-10 09:18:00 -08:00
fb532f3f4e Prototype shebang support (#1368)
* Add shebang support to nu.

* Move test file

* Add test for scripts

Co-authored-by: Jason Gedge <jason.gedge@shopify.com>
2020-02-10 08:49:45 -08:00
a29d52158e Do not panic when failing to decode lines from external stdout (#1364) 2020-02-10 07:37:48 -08:00
dc50e61f26 Switch stdin redirect to manual. Add test (#1367) 2020-02-09 22:55:07 -08:00
a2668e3327 Add some nu_source docs for meta.rs (#1366)
* Add some docs for meta.rs

* add better explanation for Span merging

* Add some doc tests - not sure how to get them to run

* get rid of doc comments for the temporary method

* add doc test for is_unknown

* fmt
2020-02-09 18:08:14 -08:00
e606407d79 Add error codes to -c (#1361) 2020-02-08 20:04:53 -08:00
5f4fae5b06 Pipeline sink refactor (#1359)
* Refactor pipeline ahead of block changes. Add '-c' commandline option

* Update pipelining an error value

* Fmt

* Clippy

* Add stdin redirect for -c flag

* Add stdin redirect for -c flag
2020-02-08 18:24:33 -08:00
3687603799 Only spawn external once when no $it argument (#1358) 2020-02-08 17:57:05 -08:00
643b532537 Fixed mv not throwing error when the source path was invalid (#1351)
* Fixed mv not throwing error when the source path was invalid

* Fixed failing test

* Fixed another lint error

* Fix $PATH conflicts in .gitpod.Dockerfile (#1349)

- Use the correct user for gitpod Dockerfile.
- Remove unneeded packages (curl, rustc) from gitpod Dockerfile.

* Added test to check for the error

* Fixed linting error

* Fixed mv not moving files on Windows. (#1342)

Move files correctly in windows.

* Fixed mv not throwing error when the source path was invalid

* Fixed failing test

* Fixed another lint error

* Added test to check for the error

* Fixed linting error

* Changed error message

* Typo and fixed test

Co-authored-by: Sean Hellum <seanhellum45@gmail.com>
2020-02-07 12:40:48 -05:00
ed86b1fbe8 Fixed mv not moving files on Windows. (#1342)
Move files correctly in windows.
2020-02-07 11:24:01 -05:00
44a114111e Fix $PATH conflicts in .gitpod.Dockerfile (#1349)
- Use the correct user for gitpod Dockerfile.
- Remove unneeded packages (curl, rustc) from gitpod Dockerfile.
2020-02-06 15:20:18 -05:00
812a76d588 Update more futures-preview to futures (#1346) 2020-02-05 20:28:42 -08:00
e3be849c2a Futures v0.3 upgrade (#1344)
* Upgrade futures, async-stream, and futures_codec

These were the last three dependencies on futures-preview. `nu` itself
is now fully dependent on `futures@0.3`, as opposed to `futures-preview`
alpha.

Because the update to `futures` from `0.3.0-alpha.19` to `0.3.0` removed
the `Stream` implementation of `VecDeque` ([changelog][changelog]), most
commands that convert a `VecDeque` to an `OutputStream` broke and had to
be fixed.

The current solution is to now convert `VecDeque`s to a `Stream` via
`futures::stream::iter`. However, it may be useful for `futures` to
create an `IntoStream` trait, implemented on the `std::collections` (or
really any `IntoIterator`). If something like this happends, it may be
worthwhile to update the trait implementations on `OutputStream` and
refactor these commands again.

While upgrading `futures_codec`, we remove a custom implementation of
`LinesCodec`, as one has been added to the library. There's also a small
refactor to make the stream output more idiomatic.

[changelog]: https://github.com/rust-lang/futures-rs/blob/master/CHANGELOG.md#030---2019-11-5

* Upgrade sys & ps plugin dependencies

They were previously dependent on `futures-preview`, and `nu_plugin_ps`
was dependent on an old version of `futures-timer`.

* Remove dependency on futures-timer from nu

* Update Cargo.lock

* Fix formatting

* Revert fmt regressions

CI is still on 1.40.0, but the latest rustfmt v1.41.0 has changes to the
`val @ pattern` syntax, causing the linting job to fail.

* Fix clippy warnings
2020-02-05 19:46:48 -08:00
ba1b67c072 Attempt rustup update on each PR (#1345)
* Attempt update on each PR

* Update fmt
2020-02-05 19:28:49 -08:00
fa910b95b7 Have from-ssv not fail for header-only inputs (#1334) 2020-02-05 11:54:14 -08:00
427bde83f7 Allow cp to overwrite existing files (#1339) 2020-02-05 01:54:05 -05:00
7a0bc6bc46 Opt-out unused heim features from sys/ps plugins. (#1335) 2020-02-04 01:51:14 -05:00
c6da56949c Add support for plugin names containing numbers (#1321)
* Add ability to have numbers in plugin name. Plugin must start with alphabetic char

* remove the first character as alphabetic requirement

* Update cli.rs

Going ahead and changing to plus to prevent issue notryanb found

* Update cli.rs

Co-authored-by: Jonathan Turner <jonathandturner@users.noreply.github.com>
2020-02-01 22:08:38 -08:00
5b398d2ed2 Adding cross-platform kill command (#1326)
* Adding kill command, unclean code

* Removing old comments

* Added quiet option, supports variable number of ids

* Made it per_item_command, calling commands directly without the shell
2020-02-01 10:46:28 -08:00
dcdfa2a866 Improve tests and labeling in FilesystemShell (#1305)
Additional `ls` command tests and better FilesystemShell error and label messages.
2020-02-01 03:34:34 -05:00
9474fa1ea5 Improved code in du command (#1320)
Made the code a little easier to read
2020-02-01 03:32:06 -05:00
49a1385543 Make tests work from directory names with spaces (#1325) 2020-01-31 22:12:56 -08:00
6427ea2331 Update Cargo.lock for ichwh fix (#1312)
`ichwh@0.3.1` fixes a bug that causes path searches to fail. We update
`Cargo.lock` to fix this.

Resolves #1207
2020-01-31 22:11:42 -08:00
3610baa227 Default plugins are independent and called from Nu. (#1322) 2020-01-31 17:45:33 -05:00
4e201d20ca Paths from Nu config take priority over external paths. (#1319) 2020-01-31 14:19:47 -05:00
1fa21ff056 Exclude images to reduce crate by 3MB (#1316)
Maybe there are more candidates for exclusion, but 'images/'
seemed obviously unnecessary.

Something I started realizing lately is that cargo puts most
of the root directory into the crate archive, causing huge
crates to appear on crates.io.

Now that I am in China, I do seem to notice every kilobyte.
2020-01-31 10:38:26 -05:00
0bbd12e37f Improve the default help message (#1313) 2020-01-30 20:13:14 -08:00
7df8fdfb28 Rename the now-deprecated add command docs into insert comm… (#1307) 2020-01-30 08:15:20 -05:00
6a39cd8546 Add docs for the calc command (#1290) 2020-01-29 08:34:54 -05:00
dc3370b103 Make a calc command (#1280) 2020-01-29 08:34:36 -05:00
ac5ad45783 Pretty Nu print default, pretty print regular secondary as raw flag. (#1302) 2020-01-29 02:46:54 -05:00
8ef5c47515 Update cargo flags (#1295)
* Update cargo flags

See https://github.com/nushell/nushell.github.io/issues/29

* Update to suggested flag
2020-01-28 22:44:49 -08:00
5b19bebe7d Isolate environment state changes into Host. (#1296)
Moves the state changes for setting and removing environment variables
into the context's host as opposed to calling `std::env::*` directly
from anywhere else.

Introduced FakeHost to prevent environemnt state changes leaking
between unit tests and cause random test failures.
2020-01-29 00:40:06 -05:00
2c529cd849 Fix bug where --with-symlink-targets would not display the targets column (#1300) 2020-01-28 21:36:20 -08:00
407f36af29 Remove unused dep (#1298) 2020-01-29 16:44:03 +13:00
763fcbc137 Bump to 0.9.0 (#1297) 2020-01-29 15:17:02 +13:00
7061af712e ls will return error if no files/folders match path/pattern (#1286)
* `ls` will return error if no files/folders match path/pattern

* Revert changes to src/data/files.rs

* Add a name_only flag to dir_entry_dict

Add name_only flag to indicate if the caller only cares about filenames
or wants the whole path

* Update ls changes from feedback

* Little cleanup

* Resolve merge conflicts

* lints
2020-01-29 05:58:31 +13:00
9b4ba09c95 Nu env vars from config have higher priority. (#1294) 2020-01-28 02:10:15 -05:00
9ec6d0c90e Add --with-symlink-targets option for the ls command that displays a new column for the target files of symlinks (#1292)
* Add `--with-symlink-targets` option for the `ls` command that displays a new column for the target files of symlinks

* Fix clippy warning

Co-authored-by: Jonathan Turner <jonathandturner@users.noreply.github.com>
2020-01-28 19:48:41 +13:00
f20a4a42e8 Let's the expander look for tokens from start. (#1293) 2020-01-28 01:03:28 -05:00
caa6830184 Baseline environment and configuration work. (#1287) 2020-01-27 22:13:22 -05:00
f8be1becf2 Updated rustyline to 6.0.0. Added completion_mode config (#1289)
* Updated rustyline to 6.0.0. Added completion_mode config

* Formatted completion_mode config
2020-01-27 16:41:17 +13:00
af51a0e6f0 Update motto 2020-01-27 16:32:02 +13:00
23d11d5e84 Nu source overview (#1282)
* add some notes into README for more elaboration

* rewrite the overview

* remove unused first line

* add last part about tracing and debugging

* change the wording to make it easier to read

* Add example of metadata system

* Add contact information as other helpful links
2020-01-27 15:55:02 +13:00
6da9e2aced Upgrade crossterm (#1288)
* WIP

* Finish porting to new crossterm

* Fmt
2020-01-27 15:51:46 +13:00
32dfb32741 Switch from subprocess crate to the builtin std::process (#1284)
* Switch from subprocess crate to the builtin std::process

* Update external.rs

* Update external.rs

* Update external.rs

Co-authored-by: Jonathan Turner <jonathandturner@users.noreply.github.com>
2020-01-26 16:03:21 +13:00
d48f99cb0e compute directory sizes from contained files and directories (#1250)
* compute directory sizes from contained files and directories

* De-lint

* Revert "De-lint"

This reverts commit 9df9fc07d777014fef8f5749a84b4e52e1ee652a.

* Revert "compute directory sizes from contained files and directories"

This reverts commit d43583e9aa20438bd613f78a36e641c9fd48cae3.

* Nu du command

* Nu du for you

* Add async support

* Lints

* so much bug fixing
2020-01-26 15:43:29 +13:00
35359cbc22 Buffer tables until a timeout or threshold is met (#1283) 2020-01-26 09:09:51 +13:00
b52dbcc8ef Separate dissimilar tables into separate tables (#1281)
* Allow the table command to stream

* Next part of table view refactor
2020-01-26 07:10:20 +13:00
4429a75e17 Make ls show only the file name (#1276)
* Make ls show only the file name

* Refactor and remove unwraps

* Put functionality in separate flag
2020-01-26 05:20:33 +13:00
583f27dc41 Added attributes to from-xml command (#1272)
* Added attributes to from-xml command

* Added attributes as their own rows

* Removed unneccesary lifetime declarations

* from-xml now has children and attributes side by side

* Fixed tests and linting

* Fixed lint-problem
2020-01-26 05:16:40 +13:00
83db5c34c3 Add docs for the from-ods and from-xlsx commands (#1279) 2020-01-26 04:31:20 +13:00
cdbfdf282f Allow the table command to stream (#1278) 2020-01-25 16:13:12 +13:00
a5e1372bc2 RM error on bad filename (#1244)
* rm error on bad filename

* De-lint

* Fix error message in test
2020-01-25 08:16:41 +13:00
798a24eda5 Soften restrictions for external parameters (#1277)
* Soften restrictions for external parameters

* Add test
2020-01-25 08:14:49 +13:00
a2bb23d78c Update README.md 2020-01-25 06:51:24 +13:00
d38a63473b Improve shelling out (#1273)
Improvements to shelling out
2020-01-24 08:24:31 +13:00
2b37ae3e81 Switch to using subprocess::shell (#1264)
* Switch to using `shell`

Switch to using the shell for subprocess to enable more natural shelling out.

* Update external.rs

* This is a test with .shell() for external

* El pollo loco's PR

* co co co

* Attempt to fix windows

* Fmt

* Less is more?

Co-authored-by: Andrés N. Robalino <andres@androbtech.com>
2020-01-24 05:21:05 +13:00
bc5a969562 [Gitpod] Add some VSCode extensions (#1268)
VSCode extensions for productive work.
2020-01-23 00:51:08 -05:00
fe4ad5f77e Color named type help especial case. (#1263)
Refactored out help named type as switch.
2020-01-22 19:36:48 -05:00
07191754bf Update ichwh to 3.0 (#1267)
The [newest update for ichwh][changes] introduced some breaking changes.
This PR bumps the version and refactors the `which` command to take
these into account.

[changes]: https://gitlab.com/avandesa/ichwh-rs/blob/master/CHANGELOG.md#030-2020-01-22
2020-01-23 12:26:49 +13:00
66bd331ba9 Make futures-timer a non-optional dependency (#1265)
Originally, it was only brought in with the `ps` feature enabled.
However, commit #ba7a17, made the crate used in
`src/commands/classified/external.rs` unconditionally, causing the build
to fail when built without the `ps` feature.

This commit fixes the problem by making it a non-optional dependency.
2020-01-23 10:56:29 +13:00
762c798670 It ls test setup rewrite. (#1260) 2020-01-21 22:56:12 -05:00
3c01526869 Test binaries no longer belong to stable or default features. (#1259) 2020-01-21 22:00:27 -05:00
7efb31a4e4 Restructure and streamline token expansion (#1123)
Restructure and streamline token expansion

The purpose of this commit is to streamline the token expansion code, by
removing aspects of the code that are no longer relevant, removing
pointless duplication, and eliminating the need to pass the same
arguments to `expand_syntax`.

The first big-picture change in this commit is that instead of a handful
of `expand_` functions, which take a TokensIterator and ExpandContext, a
smaller number of methods on the `TokensIterator` do the same job.

The second big-picture change in this commit is fully eliminating the
coloring traits, making coloring a responsibility of the base expansion
implementations. This also means that the coloring tracer is merged into
the expansion tracer, so you can follow a single expansion and see how
the expansion process produced colored tokens.

One side effect of this change is that the expander itself is marginally
more error-correcting. The error correction works by switching from
structured expansion to `BackoffColoringMode` when an unexpected token
is found, which guarantees that all spans of the source are colored, but
may not be the most optimal error recovery strategy.

That said, because `BackoffColoringMode` only extends as far as a
closing delimiter (`)`, `]`, `}`) or pipe (`|`), it does result in
fairly granular correction strategy.

The current code still produces an `Err` (plus a complete list of
colored shapes) from the parsing process if any errors are encountered,
but this could easily be addressed now that the underlying expansion is
error-correcting.

This commit also colors any spans that are syntax errors in red, and
causes the parser to include some additional information about what
tokens were expected at any given point where an error was encountered,
so that completions and hinting could be more robust in the future.

Co-authored-by: Jonathan Turner <jonathandturner@users.noreply.github.com>
Co-authored-by: Andrés N. Robalino <andres@androbtech.com>
2020-01-21 17:45:03 -05:00
c8dd7838a8 Bump Pipeline images (#1255)
* Bump Pipeline images

* Update azure-pipelines.yml

* Update azure-pipelines.yml
2020-01-22 10:39:31 +13:00
3b57ee5dda Add internal clear command (#1249)
* Add clear.rs

* update

* update

* cross-platformify

* update

* fix

* format

* fix warnings

* update implementation

* remove return

* remove semicolon

* change from `.output()` to `.status()`

* format
2020-01-20 20:05:32 +13:00
fb977ab941 add automated setup badge and add .gitpod.yml patch (#1246)
* add automated setup badge and add .gitpod.yml patch

* Update .gitpod.yml
2020-01-20 14:40:04 +13:00
e059c74a06 Add support for primitive values to sort-by (#1241)
* Remove redundant clone

* Add support for primitive values to sort-by #1238
2020-01-20 08:08:36 +13:00
47d987d37f Add ctrl_c to RunnablePerItemContext. (#1239)
Also, this commit makes `ls` a per-item command.

A command that processes things item by item may still take some time to stream
out the results from a single item. For example, `ls` on a directory with a lot
of files could be interrupted in the middle of showing all of these files.
2020-01-19 15:25:07 +13:00
3abfefc025 More docs and random fixes (#1237) 2020-01-19 08:42:36 +13:00
a5c5b4e711 Add --help for commands (#1226)
* WIP --help works for PerItemCommands.

* De-linting

* Add more comments (#1228)

* Add some more docs

* More docs

* More docs

* More docs (#1229)

* Add some more docs

* More docs

* More docs

* Add more docs

* External commands: wrap values that contain spaces in quotes (#1214) (#1220)

* External commands: wrap values that contain spaces in quotes (#1214)

* Add fn's argument_contains_whitespace & add_quotes (#1214)

*  Fix formatting with cargo fmt

* Don't wrap argument in quotes when $it is already quoted (#1214)

* Implement --help for internal commands

* Externals now spawn independently. (#1230)

This commit changes the way we shell out externals when using the `"$it"` argument. Also pipes per row to an external's stdin if no `"$it"` argument is present for external commands. 

Further separation of logic (preparing the external's command arguments, getting the data for piping, emitting values, spawning processes) will give us a better idea for lower level details regarding external commands until we can find the right abstractions for making them more generic and unify within the pipeline calling logic of Nu internal's and external's.

* Poll externals quicker. (#1231)

* WIP --help works for PerItemCommands.

* De-linting

* Implement --help for internal commands

* Make having --help the default

* Update test to include new default switch

Co-authored-by: Jonathan Turner <jonathandturner@users.noreply.github.com>
Co-authored-by: Koenraad Verheyden <mail@koenraadverheyden.com>
Co-authored-by: Andrés N. Robalino <andres@androbtech.com>
2020-01-18 11:46:18 +13:00
ba9cb753d5 Bump some of our dependencies (#1234) 2020-01-18 09:35:48 +13:00
ba7a1752db Poll externals quicker. (#1231) 2020-01-16 06:27:12 -05:00
29431e73c2 Externals now spawn independently. (#1230)
This commit changes the way we shell out externals when using the `"$it"` argument. Also pipes per row to an external's stdin if no `"$it"` argument is present for external commands. 

Further separation of logic (preparing the external's command arguments, getting the data for piping, emitting values, spawning processes) will give us a better idea for lower level details regarding external commands until we can find the right abstractions for making them more generic and unify within the pipeline calling logic of Nu internal's and external's.
2020-01-16 04:05:53 -05:00
d29fe6f6de External commands: wrap values that contain spaces in quotes (#1214) (#1220)
* External commands: wrap values that contain spaces in quotes (#1214)

* Add fn's argument_contains_whitespace & add_quotes (#1214)

*  Fix formatting with cargo fmt

* Don't wrap argument in quotes when $it is already quoted (#1214)
2020-01-16 13:38:16 +13:00
e2e9abab0a More docs (#1229)
* Add some more docs

* More docs

* More docs

* Add more docs
2020-01-16 07:32:46 +13:00
2956b0b087 Add more comments (#1228)
* Add some more docs

* More docs

* More docs
2020-01-16 05:28:31 +13:00
b32eceffb3 Add some comments (#1225) 2020-01-14 20:38:56 +13:00
3adf52b1c4 update .gitignore to exclude target directories in the crate directory (#1221) 2020-01-14 20:14:24 +13:00
78a644da2b Restrict Nu with a cleaned environment. (#1222) 2020-01-13 23:17:20 -05:00
98028433ad $it: add conversion from Int for external commands (#1218) 2020-01-13 18:57:44 -05:00
2ab5803f00 $it: add conversion from Path for external commands (#1210)
* $it: add conversion from Path for external commands (#1203)

* Replace PathBuf::to_str with to_string_lossy
2020-01-14 05:41:18 +13:00
65980c7beb Revert 8cadc5a4 (#1211) 2020-01-13 19:38:58 +13:00
29fd8b55fb Keep dummies in default features for convenience. (#1212) 2020-01-13 01:17:56 -05:00
2f039b3abc Fix crash when attempting to enter help shell (#1201)
`enter help` would result in a crash
2020-01-13 17:27:00 +13:00
d3dae05714 Groundwork for coverage with Nu internals. (#1205) 2020-01-12 16:44:22 -05:00
5fd3191d91 Fix randomly failing test (#1200)
* Fix randomly failing test

* Fix randomly failing test
2020-01-13 06:03:28 +13:00
0dcd90cb8f Silence stdout for test runs. (#1198) 2020-01-12 04:14:10 -05:00
02d0a4107e A few ls improvements. New welcome message (#1195) 2020-01-12 09:49:20 +13:00
63885c4ee6 Change black to other colors (#1194) 2020-01-12 06:21:59 +13:00
147bfefd7e Sort ls case-insensitively by default (#1192) 2020-01-11 20:59:55 +13:00
60043df917 Allow ColumnPaths when picking tables. (#1191) 2020-01-11 01:45:09 -05:00
6d3a30772d Get error message improvements. (#1185)
More especific "get" command error messages + Test refactoring.
2020-01-10 10:44:24 -05:00
347f91ab53 Have internal/external/pipelines taken an optional InputStream. (#1182)
Primarily, this fixes an issue where we always open a stdin pipe for
external commands, which can break various interactive commands (e.g.,
editors).
2020-01-09 22:31:44 -08:00
5692a08e7f Update README.md 2020-01-10 09:40:30 +13:00
515a3b33f8 Thin-lines for tables for better rendering (#1181)
The thick lines are pretty subtle and some fonts have issues with it. Seems keeping the lines consistent works better across fonts.
2020-01-09 12:33:02 -08:00
c3e466e464 Make debug command always prettty-print (Resolves #1178) (#1180) 2020-01-09 11:24:21 -08:00
00c0327031 Support more Values to plain string. (#1169)
* Support more Values to plain string.

* Continue converting to delimited data for simple values.
2020-01-08 06:12:59 -05:00
7451414b9e Eliminate ClassifiedInputStream in favour of InputStream. (#1056) 2020-01-07 13:00:01 -08:00
565 changed files with 27237 additions and 24752 deletions

View File

@ -4,25 +4,25 @@ trigger:
strategy:
matrix:
linux-stable:
image: ubuntu-16.04
image: ubuntu-18.04
style: 'unflagged'
macos-stable:
image: macos-10.14
style: 'unflagged'
windows-stable:
image: vs2017-win2016
image: windows-2019
style: 'unflagged'
linux-nightly-canary:
image: ubuntu-16.04
image: ubuntu-18.04
style: 'canary'
macos-nightly-canary:
image: macos-10.14
style: 'canary'
windows-nightly-canary:
image: vs2017-win2016
image: windows-2019
style: 'canary'
fmt:
image: ubuntu-16.04
image: ubuntu-18.04
style: 'fmt'
pool:
@ -35,20 +35,24 @@ steps:
then
sudo apt-get -y install libxcb-composite0-dev libx11-dev
fi
curl https://sh.rustup.rs -sSf | sh -s -- -y --no-modify-path --default-toolchain "stable"
export PATH=$HOME/.cargo/bin:$PATH
if [ "$(uname)" == "Darwin" ]; then
curl https://sh.rustup.rs -sSf | sh -s -- -y --no-modify-path --default-toolchain "stable"
echo "Installing clippy"
rustup component add clippy --toolchain stable-x86_64-apple-darwin
export PATH=$HOME/.cargo/bin:$PATH
fi
rustup update
rustc -Vv
echo "##vso[task.prependpath]$HOME/.cargo/bin"
rustup component add rustfmt --toolchain "stable"
rustup component add rustfmt
displayName: Install Rust
- bash: RUSTFLAGS="-D warnings" cargo test --all --features=stable
- bash: RUSTFLAGS="-D warnings" cargo test --all --features stable,test-bins
condition: eq(variables['style'], 'unflagged')
displayName: Run tests
- bash: RUSTFLAGS="-D warnings" cargo clippy --all --features=stable -- -D clippy::result_unwrap_used -D clippy::option_unwrap_used
condition: eq(variables['style'], 'unflagged')
displayName: Check clippy lints
- bash: NUSHELL_ENABLE_ALL_FLAGS=1 RUSTFLAGS="-D warnings" cargo test --all --features=stable
- bash: NUSHELL_ENABLE_ALL_FLAGS=1 RUSTFLAGS="-D warnings" cargo test --all --features stable,test-bins
condition: eq(variables['style'], 'canary')
displayName: Run tests
- bash: NUSHELL_ENABLE_ALL_FLAGS=1 RUSTFLAGS="-D warnings" cargo clippy --all --features=stable -- -D clippy::result_unwrap_used -D clippy::option_unwrap_used

View File

@ -1,3 +1,3 @@
[build]
#rustflags = ["--cfg", "coloring_in_tokens"]
#rustflags = ["--cfg", "data_processing_primitives"]

View File

@ -38,7 +38,7 @@ workflows:
extra_build_args: --cache-from=quay.io/nushell/nu-base:devel
filters:
branches:
ignore:
ignore:
- master
before_build:
- pull_cache

1
.dockerignore Normal file
View File

@ -0,0 +1 @@
target

View File

@ -24,7 +24,7 @@ jobs:
run: |
cross build --target ${{ matrix.arch }} --release
# leave only the executable file
rm -rd target/${{ matrix.arch }}/release/{*/*,*.d,*.rlib,.fingerprint}
rm -frd target/${{ matrix.arch }}/release/{*/*,*.d,*.rlib,.fingerprint}
find . -empty -delete
- uses: actions/upload-artifact@master
with:
@ -52,15 +52,15 @@ jobs:
- glibc
- musl
include:
- { tag: alpine, base-image: alpine, arch: x86_64-unknown-linux-musl, plugin: true }
- { tag: slim, base-image: 'debian:stable-slim', arch: x86_64-unknown-linux-gnu, plugin: true }
- { tag: debian, base-image: debian, arch: x86_64-unknown-linux-gnu, plugin: true }
- { tag: glibc-busybox, base-image: 'busybox:glibc', arch: x86_64-unknown-linux-gnu, use-patch: true }
- { tag: musl-busybox, base-image: 'busybox:musl', arch: x86_64-unknown-linux-musl, }
- { tag: musl-distroless, base-image: 'gcr.io/distroless/static', arch: x86_64-unknown-linux-musl, }
- { tag: glibc-distroless, base-image: 'gcr.io/distroless/cc', arch: x86_64-unknown-linux-gnu, use-patch: true }
- { tag: glibc, base-image: scratch, arch: x86_64-unknown-linux-gnu, }
- { tag: musl, base-image: scratch, arch: x86_64-unknown-linux-musl, }
- { tag: alpine, base-image: alpine, arch: x86_64-unknown-linux-musl, plugin: true, use-patch: false}
- { tag: slim, base-image: 'debian:stable-slim', arch: x86_64-unknown-linux-gnu, plugin: true, use-patch: false}
- { tag: debian, base-image: debian, arch: x86_64-unknown-linux-gnu, plugin: true, use-patch: false}
- { tag: glibc-busybox, base-image: 'busybox:glibc', arch: x86_64-unknown-linux-gnu, plugin: false, use-patch: true }
- { tag: musl-busybox, base-image: 'busybox:musl', arch: x86_64-unknown-linux-musl, plugin: false, use-patch: false}
- { tag: musl-distroless, base-image: 'gcr.io/distroless/static', arch: x86_64-unknown-linux-musl, plugin: false, use-patch: false}
- { tag: glibc-distroless, base-image: 'gcr.io/distroless/cc', arch: x86_64-unknown-linux-gnu, plugin: false, use-patch: true }
- { tag: glibc, base-image: scratch, arch: x86_64-unknown-linux-gnu, plugin: false, use-patch: false}
- { tag: musl, base-image: scratch, arch: x86_64-unknown-linux-musl, plugin: false, use-patch: false}
steps:
- uses: actions/checkout@v1
- uses: actions/download-artifact@master

10
.gitignore vendored
View File

@ -3,6 +3,7 @@
**/*.rs.bk
history.txt
tests/fixtures/nuplayground
crates/*/target
# Debian/Ubuntu
debian/.debhelper/
@ -10,3 +11,12 @@ debian/debhelper-build-stamp
debian/files
debian/nu.substvars
debian/nu/
# macOS junk
.DS_Store
# JetBrains' IDE items
.idea/*
# VSCode's IDE items
.vscode/*

19
.gitpod.Dockerfile vendored
View File

@ -1,7 +1,14 @@
FROM gitpod/workspace-full
USER root
RUN apt-get update && apt-get install -y libssl-dev \
libxcb-composite0-dev \
pkg-config \
curl \
rustc
USER gitpod
RUN sudo apt-get update && \
sudo apt-get install -y \
libssl-dev \
libxcb-composite0-dev \
pkg-config \
libpython3.6 \
rust-lldb \
&& sudo rm -rf /var/lib/apt/lists/*
ENV RUST_LLDB=/usr/bin/lldb-8

View File

@ -1,21 +1,25 @@
image:
file: .gitpod.Dockerfile
tasks:
- init: cargo install nu --features=stable
- name: Clippy
init: cargo clippy --all --features=stable -- -D clippy::result_unwrap_used -D clippy::option_unwrap_used
- name: Testing
init: cargo test --all --features=stable,test-bins
- name: Build
init: cargo build --features=stable
- name: Nu
init: cargo install --path . --features=stable
command: nu
github:
prebuilds:
# enable for the master/default branch (defaults to true)
master: true
# enable for all branches in this repo (defaults to false)
branches: true
# enable for pull requests coming from this repo (defaults to true)
pullRequests: true
# enable for pull requests coming from forks (defaults to false)
pullRequestsFromForks: true
# add a "Review in Gitpod" button as a comment to pull requests (defaults to true)
addComment: true
# add a "Review in Gitpod" button to pull requests (defaults to false)
addBadge: false
# add a label once the prebuild is ready to pull requests (defaults to false)
addLabel: prebuilt-in-gitpod
vscode:
extensions:
- hbenl.vscode-test-explorer@2.15.0:koqDUMWDPJzELp/hdS/lWw==
- Swellaby.vscode-rust-test-adapter@0.11.0:Xg+YeZZQiVpVUsIkH+uiiw==
- serayuzgur.crates@0.4.7:HMkoguLcXp9M3ud7ac3eIw==
- belfz.search-crates-io@1.2.1:kSLnyrOhXtYPjQpKnMr4eQ==
- bungcip.better-toml@0.3.2:3QfgGxxYtGHfJKQU7H0nEw==
- webfreak.debug@0.24.0:1zVcRsAhewYEX3/A9xjMNw==

14
.theia/launch.json Normal file
View File

@ -0,0 +1,14 @@
{
"version": "0.2.0",
"configurations": [
{
"type": "gdb",
"request": "launch",
"name": "Debug Rust Code",
"preLaunchTask": "cargo",
"target": "${workspaceFolder}/target/debug/nu",
"cwd": "${workspaceFolder}",
"valuesFormatting": "parseText"
}
]
}

12
.theia/tasks.json Normal file
View File

@ -0,0 +1,12 @@
{
"tasks": [
{
"command": "cargo",
"args": [
"build"
],
"type": "process",
"label": "cargo",
}
],
}

28
CONTRIBUTING.md Normal file
View File

@ -0,0 +1,28 @@
Welcome to nushell!
*Note: for a more complete guide see [The nu contributor book](https://github.com/nushell/contributor-book)*
For speedy contributions open it in Gitpod, nu will be pre-installed with the latest build in a VSCode like editor all from your browser.
[![Open in Gitpod](https://gitpod.io/button/open-in-gitpod.svg)](https://gitpod.io/#https://github.com/nushell/nushell)
To get live support from the community see our [Discord](https://discordapp.com/invite/NtAbbGn), [Twitter](https://twitter.com/nu_shell) or file an issue or feature request here on [GitHub](https://github.com/nushell/nushell/issues/new/choose)!
<!--WIP-->
# Developing
## Set up
This is no different than other Rust projects.
```shell
git clone https://github.com/nushell/nushell
cd nushell
cargo build
```
## Tests
Run tests with:
```shell
cargo test --all --features=stable,test-bins
```

1810
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -1,8 +1,8 @@
[package]
name = "nu"
version = "0.8.0"
authors = ["Yehuda Katz <wycats@gmail.com>", "Jonathan Turner <jonathan.d.turner@gmail.com>", "Andrés N. Robalino <andres@androbtech.com>"]
description = "A shell for the GitHub era"
version = "0.14.0"
authors = ["The Nu Project Contributors"]
description = "A new type of shell"
license = "MIT"
edition = "2018"
readme = "README.md"
@ -10,141 +10,66 @@ default-run = "nu"
repository = "https://github.com/nushell/nushell"
homepage = "https://www.nushell.sh"
documentation = "https://www.nushell.sh/book/"
exclude = ["images"]
[workspace]
members = [
"crates/nu-macros",
"crates/nu-errors",
"crates/nu-source",
"crates/nu_plugin_average",
"crates/nu_plugin_binaryview",
"crates/nu_plugin_fetch",
"crates/nu_plugin_inc",
"crates/nu_plugin_match",
"crates/nu_plugin_post",
"crates/nu_plugin_ps",
"crates/nu_plugin_str",
"crates/nu_plugin_sum",
"crates/nu_plugin_sys",
"crates/nu_plugin_textview",
"crates/nu_plugin_tree",
"crates/nu-protocol",
"crates/nu-plugin",
"crates/nu-parser",
"crates/nu-value-ext",
"crates/nu-build"
]
members = ["crates/*/"]
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
nu-source = { version = "0.8.0", path = "./crates/nu-source" }
nu-plugin = { version = "0.8.0", path = "./crates/nu-plugin" }
nu-protocol = { version = "0.8.0", path = "./crates/nu-protocol" }
nu-errors = { version = "0.8.0", path = "./crates/nu-errors" }
nu-parser = { version = "0.8.0", path = "./crates/nu-parser" }
nu-value-ext = { version = "0.8.0", path = "./crates/nu-value-ext" }
nu_plugin_average = {version = "0.8.0", path = "./crates/nu_plugin_average", optional=true}
nu_plugin_binaryview = {version = "0.8.0", path = "./crates/nu_plugin_binaryview", optional=true}
nu_plugin_fetch = {version = "0.8.0", path = "./crates/nu_plugin_fetch", optional=true}
nu_plugin_inc = {version = "0.8.0", path = "./crates/nu_plugin_inc", optional=true}
nu_plugin_match = {version = "0.8.0", path = "./crates/nu_plugin_match", optional=true}
nu_plugin_post = {version = "0.8.0", path = "./crates/nu_plugin_post", optional=true}
nu_plugin_ps = {version = "0.8.0", path = "./crates/nu_plugin_ps", optional=true}
nu_plugin_str = {version = "0.8.0", path = "./crates/nu_plugin_str", optional=true}
nu_plugin_sum = {version = "0.8.0", path = "./crates/nu_plugin_sum", optional=true}
nu_plugin_sys = {version = "0.8.0", path = "./crates/nu_plugin_sys", optional=true}
nu_plugin_textview = {version = "0.8.0", path = "./crates/nu_plugin_textview", optional=true}
nu_plugin_tree = {version = "0.8.0", path = "./crates/nu_plugin_tree", optional=true}
nu-macros = { version = "0.8.0", path = "./crates/nu-macros" }
nu-cli = { version = "0.14.0", path = "./crates/nu-cli" }
nu-source = { version = "0.14.0", path = "./crates/nu-source" }
nu-plugin = { version = "0.14.0", path = "./crates/nu-plugin" }
nu-protocol = { version = "0.14.0", path = "./crates/nu-protocol" }
nu-errors = { version = "0.14.0", path = "./crates/nu-errors" }
nu-parser = { version = "0.14.0", path = "./crates/nu-parser" }
nu-value-ext = { version = "0.14.0", path = "./crates/nu-value-ext" }
nu_plugin_average = { version = "0.14.0", path = "./crates/nu_plugin_average", optional=true }
nu_plugin_binaryview = { version = "0.14.0", path = "./crates/nu_plugin_binaryview", optional=true }
nu_plugin_fetch = { version = "0.14.0", path = "./crates/nu_plugin_fetch", optional=true }
nu_plugin_inc = { version = "0.14.0", path = "./crates/nu_plugin_inc", optional=true }
nu_plugin_match = { version = "0.14.0", path = "./crates/nu_plugin_match", optional=true }
nu_plugin_post = { version = "0.14.0", path = "./crates/nu_plugin_post", optional=true }
nu_plugin_ps = { version = "0.14.0", path = "./crates/nu_plugin_ps", optional=true }
nu_plugin_start = { version = "0.1.0", path = "./crates/nu_plugin_start", optional=true }
nu_plugin_str = { version = "0.14.0", path = "./crates/nu_plugin_str", optional=true }
nu_plugin_sys = { version = "0.14.0", path = "./crates/nu_plugin_sys", optional=true }
nu_plugin_textview = { version = "0.14.0", path = "./crates/nu_plugin_textview", optional=true }
nu_plugin_tree = { version = "0.14.0", path = "./crates/nu_plugin_tree", optional=true }
crossterm = { version = "0.17.2", optional = true }
semver = { version = "0.9.0", optional = true }
syntect = { version = "4.1", default-features = false, features = ["default-fancy"], optional = true}
url = { version = "2.1.1", optional = true }
query_interface = "0.3.5"
typetag = "0.1.4"
rustyline = "5.0.6"
chrono = { version = "0.4.10", features = ["serde"] }
derive-new = "0.5.8"
prettytable-rs = "0.8.0"
itertools = "0.8.2"
ansi_term = "0.12.1"
nom = "5.0.1"
dunce = "1.0.0"
indexmap = { version = "1.3.0", features = ["serde-1"] }
chrono-humanize = "0.0.11"
byte-unit = "3.0.3"
base64 = "0.11"
futures-preview = { version = "=0.3.0-alpha.19", features = ["compat", "io-compat"] }
async-stream = "0.1.2"
futures_codec = "0.2.5"
num-traits = "0.2.10"
term = "0.5.2"
bytes = "0.4.12"
log = "0.4.8"
pretty_env_logger = "0.3.1"
serde = { version = "1.0.103", features = ["derive"] }
bson = { version = "0.14.0", features = ["decimal128"] }
serde_json = "1.0.44"
serde-hjson = "0.9.1"
serde_yaml = "0.8"
serde_bytes = "0.11.3"
getset = "0.0.9"
language-reporting = "0.4.0"
app_dirs = "1.2.1"
csv = "1.1"
toml = "0.5.5"
clap = "2.33.0"
git2 = { version = "0.10.2", default_features = false }
dirs = "2.0.2"
glob = "0.3.0"
ctrlc = "3.1.3"
roxmltree = "0.7.3"
nom_locate = "1.0.0"
nom-tracable = "0.4.1"
unicode-xid = "0.2.0"
serde_ini = "0.2.0"
subprocess = "0.1.18"
pretty-hex = "0.1.1"
hex = "0.4"
tempfile = "3.1.0"
ichwh = "0.2"
textwrap = {version = "0.11.0", features = ["term_size"]}
shellexpand = "1.0.0"
pin-utils = "0.1.0-alpha.4"
num-bigint = { version = "0.2.3", features = ["serde"] }
bigdecimal = { version = "0.1.0", features = ["serde"] }
serde_urlencoded = "0.6.1"
trash = "1.0.0"
regex = "1"
cfg-if = "0.1"
strip-ansi-escapes = "0.1.0"
calamine = "0.16"
umask = "0.1"
futures-util = "0.3.1"
termcolor = "1.0.5"
natural = "0.3.0"
parking_lot = "0.10.0"
ctrlc = "3.1.4"
dunce = "1.0.0"
futures = { version = "0.3", features = ["compat", "io-compat"] }
log = "0.4.8"
pretty_env_logger = "0.4.0"
clipboard = {version = "0.5", optional = true }
ptree = {version = "0.2" }
starship = { version = "0.28", optional = true}
heim = {version = "0.0.9", optional = true}
battery = {version = "0.7.5", optional = true}
syntect = {version = "3.2.0", optional = true }
onig_sys = {version = "=69.1.0", optional = true }
crossterm = {version = "0.10.2", optional = true}
futures-timer = {version = "1.0.2", optional = true}
url = {version = "2.1.0", optional = true}
[dev-dependencies]
nu-test-support = { version = "0.14.0", path = "./crates/nu-test-support" }
[build-dependencies]
toml = "0.5.6"
serde = { version = "1.0.106", features = ["derive"] }
nu-build = { version = "0.14.0", path = "./crates/nu-build" }
[features]
# Test executables
test-bins = []
default = ["sys", "ps", "textview", "inc", "str"]
stable = ["sys", "ps", "textview", "inc", "str", "starship-prompt", "binaryview", "match", "tree", "average", "sum", "post", "fetch", "clipboard"]
stable = ["default", "starship-prompt", "binaryview", "match", "tree", "average", "post", "fetch", "clipboard-cli", "trash-support", "start"]
# Default
sys = ["heim", "battery"]
ps = ["heim", "futures-timer"]
textview = ["crossterm", "syntect", "onig_sys", "url"]
inc = ["nu_plugin_inc"]
textview = ["crossterm", "syntect", "url", "nu_plugin_textview"]
sys = ["nu_plugin_sys"]
ps = ["nu_plugin_ps"]
inc = ["semver", "nu_plugin_inc"]
str = ["nu_plugin_str"]
# Stable
@ -153,28 +78,38 @@ binaryview = ["nu_plugin_binaryview"]
fetch = ["nu_plugin_fetch"]
match = ["nu_plugin_match"]
post = ["nu_plugin_post"]
starship-prompt = ["starship"]
sum = ["nu_plugin_sum"]
trace = ["nu-parser/trace"]
tree = ["nu_plugin_tree"]
start = ["nu_plugin_start"]
[dependencies.rusqlite]
version = "0.20.0"
features = ["bundled", "blob"]
clipboard-cli = ["nu-cli/clipboard-cli"]
starship-prompt = ["nu-cli/starship-prompt"]
trash-support = ["nu-cli/trash-support"]
[dev-dependencies]
pretty_assertions = "0.6.1"
nu-test-support = { version = "0.8.0", path = "./crates/nu-test-support" }
[[bin]]
name = "fail"
path = "crates/nu-test-support/src/bins/fail.rs"
required-features = ["test-bins"]
[build-dependencies]
toml = "0.5.5"
serde = { version = "1.0.103", features = ["derive"] }
nu-build = { version = "0.8.0", path = "./crates/nu-build" }
[[bin]]
name = "chop"
path = "crates/nu-test-support/src/bins/chop.rs"
required-features = ["test-bins"]
[lib]
name = "nu"
doctest = false
path = "src/lib.rs"
[[bin]]
name = "cococo"
path = "crates/nu-test-support/src/bins/cococo.rs"
required-features = ["test-bins"]
[[bin]]
name = "nonu"
path = "crates/nu-test-support/src/bins/nonu.rs"
required-features = ["test-bins"]
[[bin]]
name = "iecho"
path = "crates/nu-test-support/src/bins/iecho.rs"
required-features = ["test-bins"]
# Core plugins that ship with `cargo install nu` by default
# Currently, Cargo limits us to installing only one binary
@ -204,6 +139,42 @@ name = "nu_plugin_core_sys"
path = "src/plugins/nu_plugin_core_sys.rs"
required-features = ["sys"]
# Stable plugins
[[bin]]
name = "nu_plugin_stable_average"
path = "src/plugins/nu_plugin_stable_average.rs"
required-features = ["average"]
[[bin]]
name = "nu_plugin_stable_fetch"
path = "src/plugins/nu_plugin_stable_fetch.rs"
required-features = ["fetch"]
[[bin]]
name = "nu_plugin_stable_binaryview"
path = "src/plugins/nu_plugin_stable_binaryview.rs"
required-features = ["binaryview"]
[[bin]]
name = "nu_plugin_stable_match"
path = "src/plugins/nu_plugin_stable_match.rs"
required-features = ["match"]
[[bin]]
name = "nu_plugin_stable_post"
path = "src/plugins/nu_plugin_stable_post.rs"
required-features = ["post"]
[[bin]]
name = "nu_plugin_stable_tree"
path = "src/plugins/nu_plugin_stable_tree.rs"
required-features = ["tree"]
[[bin]]
name = "nu_plugin_stable_start"
path = "src/plugins/nu_plugin_stable_start.rs"
required-features = ["start"]
# Main nu binary
[[bin]]
name = "nu"

View File

@ -1,6 +1,6 @@
MIT License
Copyright (c) 2019 Yehuda Katz, Jonathan Turner
Copyright (c) 2019 - 2020 Yehuda Katz, Jonathan Turner
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal

205
README.md
View File

@ -1,3 +1,4 @@
[![Gitpod Ready-to-Code](https://img.shields.io/badge/Gitpod-Ready--to--Code-blue?logo=gitpod)](https://gitpod.io/#https://github.com/nushell/nushell)
[![Crates.io](https://img.shields.io/crates/v/nu.svg)](https://crates.io/crates/nu)
[![Build Status](https://dev.azure.com/nushell/nushell/_apis/build/status/nushell.nushell?branchName=master)](https://dev.azure.com/nushell/nushell/_build/latest?definitionId=2&branchName=master)
[![Discord](https://img.shields.io/discord/601130461678272522.svg?logo=discord)](https://discord.gg/NtAbbGn)
@ -6,24 +7,33 @@
# Nu Shell
A modern shell for the GitHub era.
A new type of shell.
![Example of nushell](images/nushell-autocomplete.gif "Example of nushell")
# Status
This project has reached a minimum-viable product level of quality. While contributors dogfood it as their daily driver, it may be unstable for some commands. Future releases will work to fill out missing features and improve stability. Its design is also subject to change as it matures.
This project has reached a minimum-viable product level of quality.
While contributors dogfood it as their daily driver, it may be unstable for some commands.
Future releases will work to fill out missing features and improve stability.
Its design is also subject to change as it matures.
Nu comes with a set of built-in commands (listed below). If a command is unknown, the command will shell-out and execute it (using cmd on Windows or bash on Linux and macOS), correctly passing through stdin, stdout, and stderr, so things like your daily git workflows and even `vim` will work just fine.
Nu comes with a set of built-in commands (listed below).
If a command is unknown, the command will shell-out and execute it (using cmd on Windows or bash on Linux and macOS), correctly passing through stdin, stdout, and stderr, so things like your daily git workflows and even `vim` will work just fine.
# Learning more
There are a few good resources to learn about Nu. There is a [book](https://www.nushell.sh/book/) about Nu that is currently in progress. The book focuses on using Nu and its core concepts.
There are a few good resources to learn about Nu.
There is a [book](https://www.nushell.sh/book/) about Nu that is currently in progress.
The book focuses on using Nu and its core concepts.
If you're a developer who would like to contribute to Nu, we're also working on a [book for developers](https://www.nushell.sh/contributor-book/) to help you get started. There are also [good first issues](https://github.com/nushell/nushell/issues?q=is%3Aopen+is%3Aissue+label%3A%22good+first+issue%22) to help you dive in.
If you're a developer who would like to contribute to Nu, we're also working on a [book for developers](https://www.nushell.sh/contributor-book/) to help you get started.
There are also [good first issues](https://github.com/nushell/nushell/issues?q=is%3Aopen+is%3Aissue+label%3A%22good+first+issue%22) to help you dive in.
We also have an active [Discord](https://discord.gg/NtAbbGn) and [Twitter](https://twitter.com/nu_shell) if you'd like to come and chat with us.
You can also find more learning resources in our [documentation](https://www.nushell.sh/documentation.html) site.
Try it in Gitpod.
[![Open in Gitpod](https://gitpod.io/button/open-in-gitpod.svg)](https://gitpod.io/#https://github.com/nushell/nushell)
@ -55,11 +65,21 @@ cargo install nu
You can also build Nu yourself with all the bells and whistles (be sure to have installed the [dependencies](https://www.nushell.sh/book/en/installation.html#dependencies) for your platform), once you have checked out this repo with git:
```
cargo build --all --features=stable
cargo build --workspace --features=stable
```
## Docker
### Quickstart
Want to try Nu right away? Execute the following to get started.
```bash
docker run -it quay.io/nushell/nu:latest
```
### Guide
If you want to pull a pre-built container, you can browse tags for the [nushell organization](https://quay.io/organization/nushell)
on Quay.io. Pulling a container would come down to:
@ -104,11 +124,18 @@ The second container is a bit smaller if the size is important to you.
# Philosophy
Nu draws inspiration from projects like PowerShell, functional programming languages, and modern CLI tools. Rather than thinking of files and services as raw streams of text, Nu looks at each input as something with structure. For example, when you list the contents of a directory, what you get back is a table of rows, where each row represents an item in that directory. These values can be piped through a series of steps, in a series of commands called a 'pipeline'.
Nu draws inspiration from projects like PowerShell, functional programming languages, and modern CLI tools.
Rather than thinking of files and services as raw streams of text, Nu looks at each input as something with structure.
For example, when you list the contents of a directory, what you get back is a table of rows, where each row represents an item in that directory.
These values can be piped through a series of steps, in a series of commands called a 'pipeline'.
## Pipelines
In Unix, it's common to pipe between commands to split up a sophisticated command over multiple steps. Nu takes this a step further and builds heavily on the idea of _pipelines_. Just as the Unix philosophy, Nu allows commands to output from stdout and read from stdin. Additionally, commands can output structured data (you can think of this as a third kind of stream). Commands that work in the pipeline fit into one of three categories:
In Unix, it's common to pipe between commands to split up a sophisticated command over multiple steps.
Nu takes this a step further and builds heavily on the idea of _pipelines_.
Just as the Unix philosophy, Nu allows commands to output from stdout and read from stdin.
Additionally, commands can output structured data (you can think of this as a third kind of stream).
Commands that work in the pipeline fit into one of three categories:
* Commands that produce a stream (eg, `ls`)
* Commands that filter a stream (eg, `where type == "Directory"`)
@ -118,7 +145,7 @@ Commands are separated by the pipe symbol (`|`) to denote a pipeline flowing lef
```
/home/jonathan/Source/nushell(master)> ls | where type == "Directory" | autoview
━━━━┯━━━━━━━━━━━┯━━━━━━━━━━━┯━━━━━━━━━━┯━━━━━━━━┯━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━
────┬───────────┬───────────┬──────────┬────────┬──────────────┬────────────────
# │ name │ type │ readonly │ size │ accessed │ modified
────┼───────────┼───────────┼──────────┼────────┼──────────────┼────────────────
0 │ .azure │ Directory │ │ 4.1 KB │ 2 months ago │ a day ago
@ -129,59 +156,61 @@ Commands are separated by the pipe symbol (`|`) to denote a pipeline flowing lef
5 │ src │ Directory │ │ 4.1 KB │ 2 months ago │ 37 minutes ago
6 │ assets │ Directory │ │ 4.1 KB │ a month ago │ a month ago
7 │ docs │ Directory │ │ 4.1 KB │ 2 months ago │ 2 months ago
━━━━┷━━━━━━━━━━━┷━━━━━━━━━━━┷━━━━━━━━━━┷━━━━━━━━┷━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━
────┴───────────┴───────────┴──────────┴────────┴──────────────┴────────────────
```
Because most of the time you'll want to see the output of a pipeline, `autoview` is assumed. We could have also written the above:
Because most of the time you'll want to see the output of a pipeline, `autoview` is assumed.
We could have also written the above:
```
/home/jonathan/Source/nushell(master)> ls | where type == Directory
```
Being able to use the same commands and compose them differently is an important philosophy in Nu. For example, we could use the built-in `ps` command as well to get a list of the running processes, using the same `where` as above.
Being able to use the same commands and compose them differently is an important philosophy in Nu.
For example, we could use the built-in `ps` command as well to get a list of the running processes, using the same `where` as above.
```text
/home/jonathan/Source/nushell(master)> ps | where cpu > 0
━━━┯━━━━━━━┯━━━━━━━━━━━━━━━━━┯━━━━━━━━━━┯━━━━━━━━━━
───┬───────┬─────────────────┬──────────┬──────────
# │ pid │ name │ status │ cpu
───┼───────┼─────────────────┼──────────┼──────────
0 │ 992 │ chrome │ Sleeping │ 6.988768
1 │ 4240 │ chrome │ Sleeping │ 5.645982
2 │ 13973 │ qemu-system-x86 │ Sleeping │ 4.996551
3 │ 15746 │ nu │ Sleeping │ 84.59905
━━━┷━━━━━━━┷━━━━━━━━━━━━━━━━━┷━━━━━━━━━━┷━━━━━━━━━━
───┴───────┴─────────────────┴──────────┴──────────
```
## Opening files
Nu can load file and URL contents as raw text or as structured data (if it recognizes the format). For example, you can load a .toml file as structured data and explore it:
Nu can load file and URL contents as raw text or as structured data (if it recognizes the format).
For example, you can load a .toml file as structured data and explore it:
```
/home/jonathan/Source/nushell(master)> open Cargo.toml
━━━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━
──────────────────┬────────────────┬──────────────────
bin │ dependencies │ dev-dependencies
──────────────────┼────────────────┼──────────────────
[table: 12 rows] │ [table: 1 row] │ [table: 1 row]
━━━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━
──────────────────┴────────────────┴──────────────────
```
We can pipeline this into a command that gets the contents of one of the columns:
```
/home/jonathan/Source/nushell(master)> open Cargo.toml | get package
━━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━━━━━━━━━━┯━━━━━━━━━┯━━━━━━━━━┯━━━━━━┯━━━━━━━━━
─────────────────┬────────────────────────────┬─────────┬─────────┬──────┬─────────
authors │ description │ edition │ license │ name │ version
─────────────────┼────────────────────────────┼─────────┼─────────┼──────┼─────────
[table: 3 rows] │ A shell for the GitHub era │ 2018 │ MIT │ nu │ 0.6.1
━━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━━━━━━━━━━┷━━━━━━━━━┷━━━━━━━━━┷━━━━━━┷━━━━━━━━━
[table: 3 rows] │ A shell for the GitHub era │ 2018 │ MIT │ nu │ 0.9.0
─────────────────┴────────────────────────────┴─────────┴─────────┴──────┴─────────
```
Finally, we can use commands outside of Nu once we have the data we want:
```
/home/jonathan/Source/nushell(master)> open Cargo.toml | get package.version | echo $it
0.6.1
0.9.0
```
Here we use the variable `$it` to refer to the value being piped to the external command.
@ -190,13 +219,14 @@ Here we use the variable `$it` to refer to the value being piped to the external
Nu has early support for configuring the shell. It currently supports the following settings:
| Variable | Type | Description |
| ------------- | ------------- | ----- |
| path | table of strings | PATH to use to find binaries |
| env | row | the environment variables to pass to external commands |
| ctrlc_exit | boolean | whether or not to exit Nu after multiple ctrl-c presses |
| table_mode | "light" or other | enable lightweight or normal tables |
| edit_mode | "vi" or "emacs" | changes line editing to "vi" or "emacs" mode |
| Variable | Type | Description |
| --------------- | -------------------- | -------------------------------------------------------------- |
| path | table of strings | PATH to use to find binaries |
| env | row | the environment variables to pass to external commands |
| ctrlc_exit | boolean | whether or not to exit Nu after multiple ctrl-c presses |
| table_mode | "light" or other | enable lightweight or normal tables |
| edit_mode | "vi" or "emacs" | changes line editing to "vi" or "emacs" mode |
| completion_mode | "circular" or "list" | changes completion type to "circular" (default) or "list" mode |
To set one of these variables, you can use `config --set`. For example:
@ -207,19 +237,26 @@ To set one of these variables, you can use `config --set`. For example:
## Shells
Nu will work inside of a single directory and allow you to navigate around your filesystem by default. Nu also offers a way of adding additional working directories that you can jump between, allowing you to work in multiple directories at the same time.
Nu will work inside of a single directory and allow you to navigate around your filesystem by default.
Nu also offers a way of adding additional working directories that you can jump between, allowing you to work in multiple directories at the same time.
To do so, use the `enter` command, which will allow you create a new "shell" and enter it at the specified path. You can toggle between this new shell and the original shell with the `p` (for previous) and `n` (for next), allowing you to navigate around a ring buffer of shells. Once you're done with a shell, you can `exit` it and remove it from the ring buffer.
To do so, use the `enter` command, which will allow you create a new "shell" and enter it at the specified path.
You can toggle between this new shell and the original shell with the `p` (for previous) and `n` (for next), allowing you to navigate around a ring buffer of shells.
Once you're done with a shell, you can `exit` it and remove it from the ring buffer.
Finally, to get a list of all the current shells, you can use the `shells` command.
## Plugins
Nu supports plugins that offer additional functionality to the shell and follow the same structured data model that built-in commands use. This allows you to extend nu for your needs.
Nu supports plugins that offer additional functionality to the shell and follow the same structured data model that built-in commands use.
This allows you to extend nu for your needs.
There are a few examples in the `plugins` directory.
Plugins are binaries that are available in your path and follow a `nu_plugin_*` naming convention. These binaries interact with nu via a simple JSON-RPC protocol where the command identifies itself and passes along its configuration, which then makes it available for use. If the plugin is a filter, data streams to it one element at a time, and it can stream data back in return via stdin/stdout. If the plugin is a sink, it is given the full vector of final data and is given free reign over stdin/stdout to use as it pleases.
Plugins are binaries that are available in your path and follow a `nu_plugin_*` naming convention.
These binaries interact with nu via a simple JSON-RPC protocol where the command identifies itself and passes along its configuration, which then makes it available for use.
If the plugin is a filter, data streams to it one element at a time, and it can stream data back in return via stdin/stdout.
If the plugin is a sink, it is given the full vector of final data and is given free reign over stdin/stdout to use as it pleases.
# Goals
@ -236,108 +273,14 @@ Nu adheres closely to a set of goals that make up its design philosophy. As feat
* Finally, Nu views data functionally. Rather than using mutation, pipelines act as a means to load, change, and save data without mutable state.
# Commands
## Initial commands
| command | description |
| ------------- | ------------- |
| cd path | Change to a new path |
| cp source path | Copy files |
| date (--utc) | Get the current datetime |
| fetch url | Fetch contents from a url and retrieve data as a table if possible |
| help | Display help information about commands |
| ls (path) | View the contents of the current or given path |
| mkdir path | Make directories, creates intermediary directories as required. |
| mv source target | Move files or directories. |
| open filename | Load a file into a cell, convert to table if possible (avoid by appending '--raw') |
| post url body (--user <user>) (--password <password>) | Post content to a url and retrieve data as a table if possible |
| ps | View current processes |
| sys | View information about the current system |
| which filename | Finds a program file. |
| rm {file or directory} | Remove a file, (for removing directory append '--recursive') |
| version | Display Nu version |
## Shell commands
| command | description |
| ------- | ----------- |
| exit (--now) | Exit the current shell (or all shells) |
| enter (path) | Create a new shell and begin at this path |
| p | Go to previous shell |
| n | Go to next shell |
| shells | Display the list of current shells |
You can find a list of Nu commands, complete with documentation, in [quick command references](https://www.nushell.sh/documentation.html#quick-command-references).
## Filters on tables (structured data)
| command | description |
| ------------- | ------------- |
| append row-data | Append a row to the end of the table |
| compact ...columns | Remove rows where given columns are empty |
| count | Show the total number of rows |
| default column row-data | Sets a default row's column if missing |
| edit column-or-column-path value | Edit an existing column to have a new value |
| embed column | Creates a new table of one column with the given name, and places the current table inside of it |
| first amount | Show only the first number of rows |
| format pattern | Format table row data as a string following the given pattern |
| get column-or-column-path | Open column and get data from the corresponding cells |
| group-by column | Creates a new table with the data from the table rows grouped by the column given |
| histogram column ...column-names | Creates a new table with a histogram based on the column name passed in, optionally give the frequency column name
| inc (column-or-column-path) | Increment a value or version. Optionally use the column of a table |
| insert column-or-column-path value | Insert a new column to the table |
| last amount | Show only the last number of rows |
| nth ...row-numbers | Return only the selected rows |
| pick ...columns | Down-select table to only these columns |
| pivot --header-row <headers> | Pivot the tables, making columns into rows and vice versa |
| prepend row-data | Prepend a row to the beginning of the table |
| reject ...columns | Remove the given columns from the table |
| reverse | Reverses the table. |
| skip amount | Skip a number of rows |
| skip-while condition | Skips rows while the condition matches |
| split-by column | Creates a new table with the data from the inner tables splitted by the column given |
| sort-by ...columns | Sort by the given columns |
| str (column) | Apply string function. Optionally use the column of a table |
| sum | Sum a column of values |
| tags | Read the tags (metadata) for values |
| to-bson | Convert table into .bson binary data |
| to-csv | Convert table into .csv text |
| to-json | Convert table into .json text |
| to-sqlite | Convert table to sqlite .db binary data |
| to-toml | Convert table into .toml text |
| to-tsv | Convert table into .tsv text |
| to-url | Convert table to a urlencoded string |
| to-yaml | Convert table into .yaml text |
| where condition | Filter table to match the condition |
# Contributing
## Filters on text (unstructured data)
| command | description |
| ------------- | ------------- |
| from-bson | Parse binary data as .bson and create table |
| from-csv | Parse text as .csv and create table |
| from-ini | Parse text as .ini and create table |
| from-json | Parse text as .json and create table |
| from-sqlite | Parse binary data as sqlite .db and create table |
| from-ssv --minimum-spaces <minimum number of spaces to count as a separator> | Parse text as space-separated values and create table |
| from-toml | Parse text as .toml and create table |
| from-tsv | Parse text as .tsv and create table |
| from-url | Parse urlencoded string and create a table |
| from-xml | Parse text as .xml and create a table |
| from-yaml | Parse text as a .yaml/.yml and create a table |
| lines | Split single string into rows, one per line |
| parse pattern | Convert text to a table by matching the given pattern |
| size | Gather word count statistics on the text |
| split-column sep ...column-names | Split row contents across multiple columns via the separator, optionally give the columns names |
| split-row sep | Split row contents over multiple rows via the separator |
| trim | Trim leading and following whitespace from text data |
| {external-command} $it | Run external command with given arguments, replacing $it with each row text |
## Consuming commands
| command | description |
| ------------- | ------------- |
| autoview | View the contents of the pipeline as a table or list |
| binaryview | Autoview of binary data (optional feature) |
| clip | Copy the contents of the pipeline to the copy/paste buffer (optional feature) |
| save filename | Save the contents of the pipeline to a file |
| table | View the contents of the pipeline as a table |
| textview | Autoview of text data |
| tree | View the contents of the pipeline as a tree (optional feature) |
See [Contributing](CONTRIBUTING.md) for details.
# License
The project is made available under the MIT license. See "LICENSE" for more information.
The project is made available under the MIT license. See the `LICENSE` file for more information.

View File

@ -50,3 +50,11 @@ textview in own crate
Combine atomic and atomic_parse in parser
at_end_possible_ws needs to be comment and separator sensitive
Eliminate unnecessary `nodes` parser
#[derive(HasSpan)]
Figure out a solution for the duplication in stuff like NumberShape vs. NumberExpressionShape
use `struct Expander` from signature.rs

View File

@ -1,7 +1,7 @@
[package]
name = "nu-build"
version = "0.8.0"
authors = ["Yehuda Katz <wycats@gmail.com>", "Jonathan Turner <jonathan.d.turner@gmail.com>", "Andrés N. Robalino <andres@androbtech.com>"]
version = "0.14.0"
authors = ["The Nu Project Contributors"]
edition = "2018"
description = "Core build system for nushell"
license = "MIT"
@ -10,7 +10,7 @@ license = "MIT"
doctest = false
[dependencies]
serde = { version = "1.0.103", features = ["derive"] }
serde = { version = "1.0.106", features = ["derive"] }
lazy_static = "1.4.0"
serde_json = "1.0.44"
toml = "0.5.5"
serde_json = "1.0.51"
toml = "0.5.6"

112
crates/nu-cli/Cargo.toml Normal file
View File

@ -0,0 +1,112 @@
[package]
name = "nu-cli"
version = "0.14.0"
authors = ["The Nu Project Contributors"]
description = "CLI for nushell"
edition = "2018"
license = "MIT"
[lib]
doctest = false
[dependencies]
nu-source = { version = "0.14.0", path = "../nu-source" }
nu-plugin = { version = "0.14.0", path = "../nu-plugin" }
nu-protocol = { version = "0.14.0", path = "../nu-protocol" }
nu-errors = { version = "0.14.0", path = "../nu-errors" }
nu-parser = { version = "0.14.0", path = "../nu-parser" }
nu-value-ext = { version = "0.14.0", path = "../nu-value-ext" }
nu-test-support = { version = "0.14.0", path = "../nu-test-support" }
ansi_term = "0.12.1"
app_dirs = "1.2.1"
async-stream = "0.2"
base64 = "0.12.0"
bigdecimal = { version = "0.1.0", features = ["serde"] }
bson = { version = "0.14.1", features = ["decimal128"] }
byte-unit = "3.0.3"
bytes = "0.5.4"
calamine = "0.16"
cfg-if = "0.1"
chrono = { version = "0.4.11", features = ["serde"] }
clap = "2.33.0"
csv = "1.1"
ctrlc = "3.1.4"
derive-new = "0.5.8"
dirs = "2.0.2"
dunce = "1.0.0"
eml-parser = "0.1.0"
filesize = "0.2.0"
futures = { version = "0.3", features = ["compat", "io-compat"] }
futures-util = "0.3.4"
futures_codec = "0.4"
getset = "0.1.0"
git2 = { version = "0.13.1", default_features = false }
glob = "0.3.0"
hex = "0.4"
htmlescape = "0.3.1"
ical = "0.6.*"
ichwh = "0.3.4"
indexmap = { version = "1.3.2", features = ["serde-1"] }
itertools = "0.9.0"
language-reporting = "0.4.0"
log = "0.4.8"
meval = "0.2"
natural = "0.5.0"
num-bigint = { version = "0.2.6", features = ["serde"] }
num-traits = "0.2.11"
parking_lot = "0.10.0"
pin-utils = "0.1.0-alpha.4"
pretty-hex = "0.1.1"
pretty_env_logger = "0.4.0"
prettytable-rs = "0.8.0"
ptree = {version = "0.2" }
query_interface = "0.3.5"
rand = "0.7"
regex = "1"
roxmltree = "0.10.1"
rustyline = "6.1.2"
serde = { version = "1.0.106", features = ["derive"] }
serde-hjson = "0.9.1"
serde_bytes = "0.11.3"
serde_ini = "0.2.0"
serde_json = "1.0.51"
serde_urlencoded = "0.6.1"
serde_yaml = "0.8"
shellexpand = "2.0.0"
strip-ansi-escapes = "0.1.0"
tempfile = "3.1.0"
term = "0.5.2"
termcolor = "1.1.0"
textwrap = {version = "0.11.0", features = ["term_size"]}
toml = "0.5.6"
typetag = "0.1.4"
umask = "0.1"
unicode-xid = "0.2.0"
which = "3"
trash = { version = "1.0.0", optional = true }
clipboard = { version = "0.5", optional = true }
starship = { version = "0.39.0", optional = true }
rayon = "1.3.0"
[target.'cfg(unix)'.dependencies]
users = "0.10.0"
[dependencies.rusqlite]
version = "0.22.0"
features = ["bundled", "blob"]
[build-dependencies]
nu-build = { version = "0.14.0", path = "../nu-build" }
[dev-dependencies]
quickcheck = "0.9"
quickcheck_macros = "0.9"
[features]
stable = []
starship-prompt = ["starship"]
clipboard-cli = ["clipboard"]
trash-support = ["trash"]

938
crates/nu-cli/src/cli.rs Normal file
View File

@ -0,0 +1,938 @@
use crate::commands::classified::block::run_block;
use crate::commands::classified::external::{MaybeTextCodec, StringOrBinary};
use crate::commands::plugin::JsonRpc;
use crate::commands::plugin::{PluginCommand, PluginSink};
use crate::commands::whole_stream_command;
use crate::context::Context;
#[cfg(not(feature = "starship-prompt"))]
use crate::git::current_branch;
use crate::path::canonicalize;
use crate::prelude::*;
use futures_codec::FramedRead;
use nu_errors::ShellError;
use nu_protocol::hir::{ClassifiedCommand, Expression, InternalCommand, Literal, NamedArguments};
use nu_protocol::{Primitive, ReturnSuccess, Scope, Signature, UntaggedValue, Value};
use log::{debug, trace};
use rustyline::error::ReadlineError;
use rustyline::{
self, config::Configurer, config::EditMode, At, Cmd, ColorMode, CompletionType, Config, Editor,
KeyPress, Movement, Word,
};
use std::error::Error;
use std::io::{BufRead, BufReader, Write};
use std::iter::Iterator;
use std::path::{Path, PathBuf};
use std::sync::atomic::Ordering;
use rayon::prelude::*;
fn load_plugin(path: &std::path::Path, context: &mut Context) -> Result<(), ShellError> {
let mut child = std::process::Command::new(path)
.stdin(std::process::Stdio::piped())
.stdout(std::process::Stdio::piped())
.spawn()
.expect("Failed to spawn child process");
let stdin = child.stdin.as_mut().expect("Failed to open stdin");
let stdout = child.stdout.as_mut().expect("Failed to open stdout");
let mut reader = BufReader::new(stdout);
let request = JsonRpc::new("config", Vec::<Value>::new());
let request_raw = serde_json::to_string(&request)?;
stdin.write_all(format!("{}\n", request_raw).as_bytes())?;
let path = dunce::canonicalize(path)?;
let mut input = String::new();
let result = match reader.read_line(&mut input) {
Ok(count) => {
trace!("processing response ({} bytes)", count);
trace!("response: {}", input);
let response = serde_json::from_str::<JsonRpc<Result<Signature, ShellError>>>(&input);
match response {
Ok(jrpc) => match jrpc.params {
Ok(params) => {
let fname = path.to_string_lossy();
trace!("processing {:?}", params);
let name = params.name.clone();
let fname = fname.to_string();
if context.get_command(&name).is_some() {
trace!("plugin {:?} already loaded.", &name);
} else if params.is_filter {
context.add_commands(vec![whole_stream_command(PluginCommand::new(
name, fname, params,
))]);
} else {
context.add_commands(vec![whole_stream_command(PluginSink::new(
name, fname, params,
))]);
}
Ok(())
}
Err(e) => Err(e),
},
Err(e) => {
trace!("incompatible plugin {:?}", input);
Err(ShellError::untagged_runtime_error(format!(
"Error: {:?}",
e
)))
}
}
}
Err(e) => Err(ShellError::untagged_runtime_error(format!(
"Error: {:?}",
e
))),
};
let _ = child.wait();
result
}
fn search_paths() -> Vec<std::path::PathBuf> {
use std::env;
let mut search_paths = Vec::new();
// Automatically add path `nu` is in as a search path
if let Ok(exe_path) = env::current_exe() {
if let Some(exe_dir) = exe_path.parent() {
search_paths.push(exe_dir.to_path_buf());
}
}
#[cfg(not(debug_assertions))]
{
match env::var_os("PATH") {
Some(paths) => {
search_paths.extend(env::split_paths(&paths).collect::<Vec<_>>());
}
None => println!("PATH is not defined in the environment."),
}
}
search_paths
}
pub fn load_plugins(context: &mut Context) -> Result<(), ShellError> {
let opts = glob::MatchOptions {
case_sensitive: false,
require_literal_separator: false,
require_literal_leading_dot: false,
};
for path in search_paths() {
let mut pattern = path.to_path_buf();
pattern.push(std::path::Path::new("nu_plugin_[a-z0-9][a-z0-9]*"));
let plugs: Vec<_> = glob::glob_with(&pattern.to_string_lossy(), opts)?
.filter_map(|x| x.ok())
.collect();
let _failures: Vec<_> = plugs
.par_iter()
.map(|path| {
let bin_name = {
if let Some(name) = path.file_name() {
match name.to_str() {
Some(raw) => raw,
None => "",
}
} else {
""
}
};
let is_valid_name = {
#[cfg(windows)]
{
bin_name
.chars()
.all(|c| c.is_ascii_alphanumeric() || c == '_' || c == '.')
}
#[cfg(not(windows))]
{
bin_name
.chars()
.all(|c| c.is_ascii_alphanumeric() || c == '_')
}
};
let is_executable = {
#[cfg(windows)]
{
bin_name.ends_with(".exe") || bin_name.ends_with(".bat")
}
#[cfg(not(windows))]
{
true
}
};
if is_valid_name && is_executable {
trace!("Trying {:?}", path.display());
// we are ok if this plugin load fails
let _ = load_plugin(&path, &mut context.clone());
}
})
.collect();
}
Ok(())
}
pub struct History;
impl History {
pub fn path() -> PathBuf {
const FNAME: &str = "history.txt";
config::user_data()
.map(|mut p| {
p.push(FNAME);
p
})
.unwrap_or_else(|_| PathBuf::from(FNAME))
}
}
#[allow(dead_code)]
fn create_default_starship_config() -> Option<toml::Value> {
let mut map = toml::value::Table::new();
map.insert("add_newline".into(), toml::Value::Boolean(false));
let mut git_branch = toml::value::Table::new();
git_branch.insert("symbol".into(), toml::Value::String("📙 ".into()));
map.insert("git_branch".into(), toml::Value::Table(git_branch));
let mut git_status = toml::value::Table::new();
git_status.insert("disabled".into(), toml::Value::Boolean(true));
map.insert("git_status".into(), toml::Value::Table(git_status));
Some(toml::Value::Table(map))
}
pub fn create_default_context(
syncer: &mut crate::EnvironmentSyncer,
interactive: bool,
) -> Result<Context, Box<dyn Error>> {
syncer.load_environment();
let mut context = Context::basic()?;
syncer.sync_env_vars(&mut context);
syncer.sync_path_vars(&mut context);
{
use crate::commands::*;
context.add_commands(vec![
// System/file operations
whole_stream_command(Pwd),
whole_stream_command(Ls),
whole_stream_command(Du),
whole_stream_command(Cd),
whole_stream_command(Remove),
whole_stream_command(Open),
whole_stream_command(Config),
whole_stream_command(Help),
whole_stream_command(History),
whole_stream_command(Save),
whole_stream_command(Touch),
whole_stream_command(Cpy),
whole_stream_command(Date),
whole_stream_command(Cal),
whole_stream_command(Calc),
whole_stream_command(Mkdir),
whole_stream_command(Move),
whole_stream_command(Kill),
whole_stream_command(Version),
whole_stream_command(Clear),
whole_stream_command(What),
whole_stream_command(Which),
whole_stream_command(Debug),
whole_stream_command(Alias),
whole_stream_command(WithEnv),
// Statistics
whole_stream_command(Size),
whole_stream_command(Count),
// Metadata
whole_stream_command(Tags),
// Shells
whole_stream_command(Next),
whole_stream_command(Previous),
whole_stream_command(Shells),
whole_stream_command(Enter),
whole_stream_command(Exit),
// Viewers
whole_stream_command(Autoview),
whole_stream_command(Table),
// Text manipulation
whole_stream_command(SplitColumn),
whole_stream_command(SplitRow),
whole_stream_command(Lines),
whole_stream_command(Trim),
whole_stream_command(Echo),
whole_stream_command(Parse),
// Column manipulation
whole_stream_command(Reject),
whole_stream_command(Select),
whole_stream_command(Get),
whole_stream_command(Update),
whole_stream_command(Insert),
whole_stream_command(SplitBy),
// Row manipulation
whole_stream_command(Reverse),
whole_stream_command(Append),
whole_stream_command(Prepend),
whole_stream_command(SortBy),
whole_stream_command(GroupBy),
whole_stream_command(First),
whole_stream_command(Last),
whole_stream_command(Nth),
whole_stream_command(Drop),
whole_stream_command(Format),
whole_stream_command(Where),
whole_stream_command(Compact),
whole_stream_command(Default),
whole_stream_command(Skip),
whole_stream_command(SkipUntil),
whole_stream_command(SkipWhile),
whole_stream_command(Keep),
whole_stream_command(KeepUntil),
whole_stream_command(KeepWhile),
whole_stream_command(Range),
whole_stream_command(Rename),
whole_stream_command(Uniq),
whole_stream_command(Each),
whole_stream_command(IsEmpty),
// Table manipulation
whole_stream_command(Merge),
whole_stream_command(Shuffle),
whole_stream_command(Wrap),
whole_stream_command(Pivot),
whole_stream_command(Headers),
// Data processing
whole_stream_command(Histogram),
whole_stream_command(Sum),
// File format output
whole_stream_command(To),
whole_stream_command(ToBSON),
whole_stream_command(ToCSV),
whole_stream_command(ToHTML),
whole_stream_command(ToJSON),
whole_stream_command(ToSQLite),
whole_stream_command(ToDB),
whole_stream_command(ToMarkdown),
whole_stream_command(ToTOML),
whole_stream_command(ToTSV),
whole_stream_command(ToURL),
whole_stream_command(ToYAML),
// File format input
whole_stream_command(From),
whole_stream_command(FromCSV),
whole_stream_command(FromEML),
whole_stream_command(FromTSV),
whole_stream_command(FromSSV),
whole_stream_command(FromINI),
whole_stream_command(FromBSON),
whole_stream_command(FromJSON),
whole_stream_command(FromODS),
whole_stream_command(FromDB),
whole_stream_command(FromSQLite),
whole_stream_command(FromTOML),
whole_stream_command(FromURL),
whole_stream_command(FromXLSX),
whole_stream_command(FromXML),
whole_stream_command(FromYAML),
whole_stream_command(FromYML),
whole_stream_command(FromIcs),
whole_stream_command(FromVcf),
// "Private" commands (not intended to be accessed directly)
whole_stream_command(RunExternalCommand { interactive }),
]);
cfg_if::cfg_if! {
if #[cfg(data_processing_primitives)] {
context.add_commands(vec![
whole_stream_command(ReduceBy),
whole_stream_command(EvaluateBy),
whole_stream_command(TSortBy),
whole_stream_command(MapMaxBy),
]);
}
}
#[cfg(feature = "clipboard")]
{
context.add_commands(vec![whole_stream_command(
crate::commands::clip::clipboard::Clip,
)]);
}
}
Ok(context)
}
pub async fn run_vec_of_pipelines(
pipelines: Vec<String>,
redirect_stdin: bool,
) -> Result<(), Box<dyn Error>> {
let mut syncer = crate::EnvironmentSyncer::new();
let mut context = create_default_context(&mut syncer, false)?;
let _ = crate::load_plugins(&mut context);
let cc = context.ctrl_c.clone();
ctrlc::set_handler(move || {
cc.store(true, Ordering::SeqCst);
})
.expect("Error setting Ctrl-C handler");
if context.ctrl_c.load(Ordering::SeqCst) {
context.ctrl_c.store(false, Ordering::SeqCst);
}
// before we start up, let's run our startup commands
if let Ok(config) = crate::data::config::config(Tag::unknown()) {
if let Some(commands) = config.get("startup") {
match commands {
Value {
value: UntaggedValue::Table(pipelines),
..
} => {
for pipeline in pipelines {
if let Ok(pipeline_string) = pipeline.as_string() {
let _ = run_pipeline_standalone(
pipeline_string,
false,
&mut context,
false,
)
.await;
}
}
}
_ => {
println!("warning: expected a table of pipeline strings as startup commands");
}
}
}
}
for pipeline in pipelines {
run_pipeline_standalone(pipeline, redirect_stdin, &mut context, true).await?;
}
Ok(())
}
pub async fn run_pipeline_standalone(
pipeline: String,
redirect_stdin: bool,
context: &mut Context,
exit_on_error: bool,
) -> Result<(), Box<dyn Error>> {
let line = process_line(Ok(pipeline), context, redirect_stdin, false).await;
match line {
LineResult::Success(line) => {
let error_code = {
let errors = context.current_errors.clone();
let errors = errors.lock();
if errors.len() > 0 {
1
} else {
0
}
};
context.maybe_print_errors(Text::from(line));
if error_code != 0 && exit_on_error {
std::process::exit(error_code);
}
}
LineResult::Error(line, err) => {
context.with_host(|host| {
print_err(err, host, &Text::from(line.clone()));
});
context.maybe_print_errors(Text::from(line));
if exit_on_error {
std::process::exit(1);
}
}
_ => {}
}
Ok(())
}
/// The entry point for the CLI. Will register all known internal commands, load experimental commands, load plugins, then prepare the prompt and line reader for input.
pub async fn cli() -> Result<(), Box<dyn Error>> {
#[cfg(windows)]
const DEFAULT_COMPLETION_MODE: CompletionType = CompletionType::Circular;
#[cfg(not(windows))]
const DEFAULT_COMPLETION_MODE: CompletionType = CompletionType::List;
let mut syncer = crate::EnvironmentSyncer::new();
let mut context = create_default_context(&mut syncer, true)?;
let _ = load_plugins(&mut context);
let config = Config::builder().color_mode(ColorMode::Forced).build();
let mut rl: Editor<_> = Editor::with_config(config);
// add key bindings to move over a whole word with Ctrl+ArrowLeft and Ctrl+ArrowRight
rl.bind_sequence(
KeyPress::ControlLeft,
Cmd::Move(Movement::BackwardWord(1, Word::Vi)),
);
rl.bind_sequence(
KeyPress::ControlRight,
Cmd::Move(Movement::ForwardWord(1, At::AfterEnd, Word::Vi)),
);
#[cfg(windows)]
{
let _ = ansi_term::enable_ansi_support();
}
// we are ok if history does not exist
let _ = rl.load_history(&History::path());
let cc = context.ctrl_c.clone();
ctrlc::set_handler(move || {
cc.store(true, Ordering::SeqCst);
})
.expect("Error setting Ctrl-C handler");
let mut ctrlcbreak = false;
// before we start up, let's run our startup commands
if let Ok(config) = crate::data::config::config(Tag::unknown()) {
if let Some(commands) = config.get("startup") {
match commands {
Value {
value: UntaggedValue::Table(pipelines),
..
} => {
for pipeline in pipelines {
if let Ok(pipeline_string) = pipeline.as_string() {
let _ = run_pipeline_standalone(
pipeline_string,
false,
&mut context,
false,
)
.await;
}
}
}
_ => {
println!("warning: expected a table of pipeline strings as startup commands");
}
}
}
}
loop {
if context.ctrl_c.load(Ordering::SeqCst) {
context.ctrl_c.store(false, Ordering::SeqCst);
continue;
}
let cwd = context.shell_manager.path();
rl.set_helper(Some(crate::shell::Helper::new(context.clone())));
let edit_mode = config::config(Tag::unknown())?
.get("edit_mode")
.map(|s| match s.value.expect_string() {
"vi" => EditMode::Vi,
"emacs" => EditMode::Emacs,
_ => EditMode::Emacs,
})
.unwrap_or(EditMode::Emacs);
rl.set_edit_mode(edit_mode);
let key_timeout = config::config(Tag::unknown())?
.get("key_timeout")
.map(|s| s.value.expect_int())
.unwrap_or(1);
rl.set_keyseq_timeout(key_timeout as i32);
let completion_mode = config::config(Tag::unknown())?
.get("completion_mode")
.map(|s| match s.value.expect_string() {
"list" => CompletionType::List,
"circular" => CompletionType::Circular,
_ => DEFAULT_COMPLETION_MODE,
})
.unwrap_or(DEFAULT_COMPLETION_MODE);
rl.set_completion_type(completion_mode);
let colored_prompt = {
#[cfg(feature = "starship-prompt")]
{
std::env::set_var("STARSHIP_SHELL", "");
let mut starship_context =
starship::context::Context::new_with_dir(clap::ArgMatches::default(), cwd);
match starship_context.config.config {
None => {
starship_context.config.config = create_default_starship_config();
}
Some(toml::Value::Table(t)) if t.is_empty() => {
starship_context.config.config = create_default_starship_config();
}
_ => {}
};
starship::print::get_prompt(starship_context)
}
#[cfg(not(feature = "starship-prompt"))]
{
format!(
"\x1b[32m{}{}\x1b[m> ",
cwd,
match current_branch() {
Some(s) => format!("({})", s),
None => "".to_string(),
}
)
}
};
let prompt = {
if let Ok(bytes) = strip_ansi_escapes::strip(&colored_prompt) {
String::from_utf8_lossy(&bytes).to_string()
} else {
"> ".to_string()
}
};
rl.helper_mut().expect("No helper").colored_prompt = colored_prompt;
let mut initial_command = Some(String::new());
let mut readline = Err(ReadlineError::Eof);
while let Some(ref cmd) = initial_command {
readline = rl.readline_with_initial(&prompt, (&cmd, ""));
initial_command = None;
}
let line = process_line(readline, &mut context, false, true).await;
// Check the config to see if we need to update the path
// TODO: make sure config is cached so we don't path this load every call
// FIXME: we probably want to be a bit more graceful if we can't set the environment
syncer.reload();
syncer.sync_env_vars(&mut context);
syncer.sync_path_vars(&mut context);
match line {
LineResult::Success(line) => {
rl.add_history_entry(line.clone());
let _ = rl.save_history(&History::path());
context.maybe_print_errors(Text::from(line));
}
LineResult::Error(line, err) => {
rl.add_history_entry(line.clone());
let _ = rl.save_history(&History::path());
context.with_host(|host| {
print_err(err, host, &Text::from(line.clone()));
});
context.maybe_print_errors(Text::from(line.clone()));
}
LineResult::CtrlC => {
let config_ctrlc_exit = config::config(Tag::unknown())?
.get("ctrlc_exit")
.map(|s| match s.value.expect_string() {
"true" => true,
_ => false,
})
.unwrap_or(false); // default behavior is to allow CTRL-C spamming similar to other shells
if !config_ctrlc_exit {
continue;
}
if ctrlcbreak {
let _ = rl.save_history(&History::path());
std::process::exit(0);
} else {
context.with_host(|host| host.stdout("CTRL-C pressed (again to quit)"));
ctrlcbreak = true;
continue;
}
}
LineResult::Break => {
break;
}
}
ctrlcbreak = false;
}
// we are ok if we can not save history
let _ = rl.save_history(&History::path());
Ok(())
}
fn chomp_newline(s: &str) -> &str {
if s.ends_with('\n') {
&s[..s.len() - 1]
} else {
s
}
}
enum LineResult {
Success(String),
Error(String, ShellError),
CtrlC,
Break,
}
/// Process the line by parsing the text to turn it into commands, classify those commands so that we understand what is being called in the pipeline, and then run this pipeline
async fn process_line(
readline: Result<String, ReadlineError>,
ctx: &mut Context,
redirect_stdin: bool,
cli_mode: bool,
) -> LineResult {
match &readline {
Ok(line) if line.trim() == "" => LineResult::Success(line.clone()),
Ok(line) => {
let line = chomp_newline(line);
let result = match nu_parser::lite_parse(&line, 0) {
Err(err) => {
return LineResult::Error(line.to_string(), err.into());
}
Ok(val) => val,
};
debug!("=== Parsed ===");
debug!("{:#?}", result);
let mut classified_block = nu_parser::classify_block(&result, ctx.registry());
debug!("{:#?}", classified_block);
//println!("{:#?}", pipeline);
if let Some(failure) = classified_block.failed {
return LineResult::Error(line.to_string(), failure.into());
}
// There's a special case to check before we process the pipeline:
// If we're giving a path by itself
// ...and it's not a command in the path
// ...and it doesn't have any arguments
// ...and we're in the CLI
// ...then change to this directory
if cli_mode
&& classified_block.block.block.len() == 1
&& classified_block.block.block[0].list.len() == 1
{
if let ClassifiedCommand::Internal(InternalCommand {
ref name, ref args, ..
}) = classified_block.block.block[0].list[0]
{
let internal_name = name;
let name = args
.positional
.as_ref()
.and_then(|potionals| {
potionals.get(0).map(|e| {
if let Expression::Literal(Literal::String(ref s)) = e.expr {
&s
} else {
""
}
})
})
.unwrap_or("");
if internal_name == "run_external"
&& args
.positional
.as_ref()
.map(|ref v| v.len() == 1)
.unwrap_or(true)
&& args
.named
.as_ref()
.map(NamedArguments::is_empty)
.unwrap_or(true)
&& canonicalize(ctx.shell_manager.path(), name).is_ok()
&& Path::new(&name).is_dir()
&& which::which(&name).is_err()
{
// Here we work differently if we're in Windows because of the expected Windows behavior
#[cfg(windows)]
{
if name.ends_with(':') {
// This looks like a drive shortcut. We need to a) switch drives and b) go back to the previous directory we were viewing on that drive
// But first, we need to save where we are now
let current_path = ctx.shell_manager.path();
let split_path: Vec<_> = current_path.split(':').collect();
if split_path.len() > 1 {
ctx.windows_drives_previous_cwd
.lock()
.insert(split_path[0].to_string(), current_path);
}
let name = name.to_uppercase();
let new_drive: Vec<_> = name.split(':').collect();
if let Some(val) =
ctx.windows_drives_previous_cwd.lock().get(new_drive[0])
{
ctx.shell_manager.set_path(val.to_string());
return LineResult::Success(line.to_string());
} else {
ctx.shell_manager
.set_path(format!("{}\\", name.to_string()));
return LineResult::Success(line.to_string());
}
} else {
ctx.shell_manager.set_path(name.to_string());
return LineResult::Success(line.to_string());
}
}
#[cfg(not(windows))]
{
ctx.shell_manager.set_path(name.to_string());
return LineResult::Success(line.to_string());
}
}
}
}
let input_stream = if redirect_stdin {
let file = futures::io::AllowStdIo::new(std::io::stdin());
let stream = FramedRead::new(file, MaybeTextCodec).map(|line| {
if let Ok(line) = line {
match line {
StringOrBinary::String(s) => Ok(Value {
value: UntaggedValue::Primitive(Primitive::String(s)),
tag: Tag::unknown(),
}),
StringOrBinary::Binary(b) => Ok(Value {
value: UntaggedValue::Primitive(Primitive::Binary(
b.into_iter().collect(),
)),
tag: Tag::unknown(),
}),
}
} else {
panic!("Internal error: could not read lines of text from stdin")
}
});
stream.to_input_stream()
} else {
InputStream::empty()
};
classified_block.block.expand_it_usage();
trace!("{:#?}", classified_block);
let env = ctx.get_env();
match run_block(&classified_block.block, ctx, input_stream, &Scope::env(env)).await {
Ok(input) => {
// Running a pipeline gives us back a stream that we can then
// work through. At the top level, we just want to pull on the
// values to compute them.
use futures::stream::TryStreamExt;
let context = RunnableContext {
input,
shell_manager: ctx.shell_manager.clone(),
host: ctx.host.clone(),
ctrl_c: ctx.ctrl_c.clone(),
registry: ctx.registry.clone(),
name: Tag::unknown(),
};
if let Ok(mut output_stream) = crate::commands::autoview::autoview(context) {
loop {
match output_stream.try_next().await {
Ok(Some(ReturnSuccess::Value(Value {
value: UntaggedValue::Error(e),
..
}))) => return LineResult::Error(line.to_string(), e),
Ok(Some(_item)) => {
if ctx.ctrl_c.load(Ordering::SeqCst) {
break;
}
}
Ok(None) => break,
Err(e) => return LineResult::Error(line.to_string(), e),
}
}
}
LineResult::Success(line.to_string())
}
Err(err) => LineResult::Error(line.to_string(), err),
}
}
Err(ReadlineError::Interrupted) => LineResult::CtrlC,
Err(ReadlineError::Eof) => LineResult::Break,
Err(err) => {
outln!("Error: {:?}", err);
LineResult::Break
}
}
}
pub fn print_err(err: ShellError, host: &dyn Host, source: &Text) {
if let Some(diag) = err.into_diagnostic() {
let writer = host.err_termcolor();
let mut source = source.to_string();
source.push_str(" ");
let files = nu_parser::Files::new(source);
let _ = std::panic::catch_unwind(move || {
let _ = language_reporting::emit(
&mut writer.lock(),
&files,
&diag,
&language_reporting::DefaultConfig,
);
});
}
}
#[cfg(test)]
mod tests {
#[quickcheck]
fn quickcheck_parse(data: String) -> bool {
if let Ok(lite_block) = nu_parser::lite_parse(&data, 0) {
let context = crate::context::Context::basic().unwrap();
let _ = nu_parser::classify_block(&lite_block, context.registry());
}
true
}
}

View File

@ -4,9 +4,12 @@ pub(crate) mod macros;
mod from_delimited_data;
mod to_delimited_data;
pub(crate) mod alias;
pub(crate) mod append;
pub(crate) mod args;
pub(crate) mod autoview;
pub(crate) mod cal;
pub(crate) mod calc;
pub(crate) mod cd;
pub(crate) mod classified;
pub(crate) mod clip;
@ -18,17 +21,21 @@ pub(crate) mod cp;
pub(crate) mod date;
pub(crate) mod debug;
pub(crate) mod default;
pub(crate) mod drop;
pub(crate) mod du;
pub(crate) mod each;
pub(crate) mod echo;
pub(crate) mod edit;
pub(crate) mod enter;
pub(crate) mod env;
#[allow(unused)]
pub(crate) mod evaluate_by;
pub(crate) mod exit;
pub(crate) mod first;
pub(crate) mod format;
pub(crate) mod from;
pub(crate) mod from_bson;
pub(crate) mod from_csv;
pub(crate) mod from_eml;
pub(crate) mod from_ics;
pub(crate) mod from_ini;
pub(crate) mod from_json;
pub(crate) mod from_ods;
@ -37,27 +44,33 @@ pub(crate) mod from_ssv;
pub(crate) mod from_toml;
pub(crate) mod from_tsv;
pub(crate) mod from_url;
pub(crate) mod from_vcf;
pub(crate) mod from_xlsx;
pub(crate) mod from_xml;
pub(crate) mod from_yaml;
pub(crate) mod get;
pub(crate) mod group_by;
pub(crate) mod headers;
pub(crate) mod help;
pub(crate) mod histogram;
pub(crate) mod history;
pub(crate) mod insert;
pub(crate) mod is_empty;
pub(crate) mod keep;
pub(crate) mod keep_until;
pub(crate) mod keep_while;
pub(crate) mod last;
pub(crate) mod lines;
pub(crate) mod ls;
#[allow(unused)]
pub(crate) mod map_max_by;
pub(crate) mod merge;
pub(crate) mod mkdir;
pub(crate) mod mv;
pub(crate) mod next;
pub(crate) mod nth;
pub(crate) mod open;
pub(crate) mod parse;
pub(crate) mod pick;
pub(crate) mod pivot;
pub(crate) mod plugin;
pub(crate) mod prepend;
@ -67,24 +80,34 @@ pub(crate) mod range;
#[allow(unused)]
pub(crate) mod reduce_by;
pub(crate) mod reject;
pub(crate) mod rename;
pub(crate) mod reverse;
pub(crate) mod rm;
pub(crate) mod run_alias;
pub(crate) mod run_external;
pub(crate) mod save;
pub(crate) mod select;
pub(crate) mod shells;
pub(crate) mod shuffle;
pub(crate) mod size;
pub(crate) mod skip;
pub(crate) mod skip_until;
pub(crate) mod skip_while;
pub(crate) mod sort_by;
pub(crate) mod split_by;
pub(crate) mod split_column;
pub(crate) mod split_row;
pub(crate) mod sum;
#[allow(unused)]
pub(crate) mod t_sort_by;
pub(crate) mod table;
pub(crate) mod tags;
pub(crate) mod to;
pub(crate) mod to_bson;
pub(crate) mod to_csv;
pub(crate) mod to_html;
pub(crate) mod to_json;
pub(crate) mod to_md;
pub(crate) mod to_sqlite;
pub(crate) mod to_toml;
pub(crate) mod to_tsv;
@ -92,20 +115,24 @@ pub(crate) mod to_url;
pub(crate) mod to_yaml;
pub(crate) mod trim;
pub(crate) mod uniq;
pub(crate) mod update;
pub(crate) mod version;
pub(crate) mod what;
pub(crate) mod where_;
pub(crate) mod which_;
pub(crate) mod with_env;
pub(crate) mod wrap;
pub(crate) use autoview::Autoview;
pub(crate) use cd::Cd;
pub(crate) use command::{
per_item_command, whole_stream_command, Command, PerItemCommand, RawCommandArgs,
UnevaluatedCallInfo, WholeStreamCommand,
whole_stream_command, Command, Example, UnevaluatedCallInfo, WholeStreamCommand,
};
pub(crate) use alias::Alias;
pub(crate) use append::Append;
pub(crate) use cal::Cal;
pub(crate) use calc::Calc;
pub(crate) use compact::Compact;
pub(crate) use config::Config;
pub(crate) use count::Count;
@ -113,17 +140,28 @@ pub(crate) use cp::Cpy;
pub(crate) use date::Date;
pub(crate) use debug::Debug;
pub(crate) use default::Default;
pub(crate) use drop::Drop;
pub(crate) use du::Du;
pub(crate) use each::Each;
pub(crate) use echo::Echo;
pub(crate) use edit::Edit;
pub(crate) use is_empty::IsEmpty;
pub(crate) use update::Update;
pub(crate) mod kill;
pub(crate) use kill::Kill;
pub(crate) mod clear;
pub(crate) use clear::Clear;
pub(crate) mod touch;
pub(crate) use enter::Enter;
pub(crate) use env::Env;
#[allow(unused_imports)]
pub(crate) use evaluate_by::EvaluateBy;
pub(crate) use exit::Exit;
pub(crate) use first::First;
pub(crate) use format::Format;
pub(crate) use from::From;
pub(crate) use from_bson::FromBSON;
pub(crate) use from_csv::FromCSV;
pub(crate) use from_eml::FromEML;
pub(crate) use from_ics::FromIcs;
pub(crate) use from_ini::FromINI;
pub(crate) use from_json::FromJSON;
pub(crate) use from_ods::FromODS;
@ -133,28 +171,33 @@ pub(crate) use from_ssv::FromSSV;
pub(crate) use from_toml::FromTOML;
pub(crate) use from_tsv::FromTSV;
pub(crate) use from_url::FromURL;
pub(crate) use from_vcf::FromVcf;
pub(crate) use from_xlsx::FromXLSX;
pub(crate) use from_xml::FromXML;
pub(crate) use from_yaml::FromYAML;
pub(crate) use from_yaml::FromYML;
pub(crate) use get::Get;
pub(crate) use group_by::GroupBy;
pub(crate) use headers::Headers;
pub(crate) use help::Help;
pub(crate) use histogram::Histogram;
pub(crate) use history::History;
pub(crate) use insert::Insert;
pub(crate) use keep::Keep;
pub(crate) use keep_until::KeepUntil;
pub(crate) use keep_while::KeepWhile;
pub(crate) use last::Last;
pub(crate) use lines::Lines;
pub(crate) use ls::Ls;
#[allow(unused_imports)]
pub(crate) use map_max_by::MapMaxBy;
pub(crate) use merge::Merge;
pub(crate) use mkdir::Mkdir;
pub(crate) use mv::Move;
pub(crate) use next::Next;
pub(crate) use nth::Nth;
pub(crate) use open::Open;
pub(crate) use parse::Parse;
pub(crate) use pick::Pick;
pub(crate) use pivot::Pivot;
pub(crate) use prepend::Prepend;
pub(crate) use prev::Previous;
@ -163,34 +206,45 @@ pub(crate) use range::Range;
#[allow(unused_imports)]
pub(crate) use reduce_by::ReduceBy;
pub(crate) use reject::Reject;
pub(crate) use rename::Rename;
pub(crate) use reverse::Reverse;
pub(crate) use rm::Remove;
pub(crate) use run_external::RunExternalCommand;
pub(crate) use save::Save;
pub(crate) use select::Select;
pub(crate) use shells::Shells;
pub(crate) use shuffle::Shuffle;
pub(crate) use size::Size;
pub(crate) use skip::Skip;
pub(crate) use skip_until::SkipUntil;
pub(crate) use skip_while::SkipWhile;
pub(crate) use sort_by::SortBy;
pub(crate) use split_by::SplitBy;
pub(crate) use split_column::SplitColumn;
pub(crate) use split_row::SplitRow;
pub(crate) use sum::Sum;
#[allow(unused_imports)]
pub(crate) use t_sort_by::TSortBy;
pub(crate) use table::Table;
pub(crate) use tags::Tags;
pub(crate) use to::To;
pub(crate) use to_bson::ToBSON;
pub(crate) use to_csv::ToCSV;
pub(crate) use to_html::ToHTML;
pub(crate) use to_json::ToJSON;
pub(crate) use to_md::ToMarkdown;
pub(crate) use to_sqlite::ToDB;
pub(crate) use to_sqlite::ToSQLite;
pub(crate) use to_toml::ToTOML;
pub(crate) use to_tsv::ToTSV;
pub(crate) use to_url::ToURL;
pub(crate) use to_yaml::ToYAML;
pub(crate) use touch::Touch;
pub(crate) use trim::Trim;
pub(crate) use uniq::Uniq;
pub(crate) use version::Version;
pub(crate) use what::What;
pub(crate) use where_::Where;
pub(crate) use which_::Which;
pub(crate) use with_env::WithEnv;
pub(crate) use wrap::Wrap;

View File

@ -0,0 +1,80 @@
use crate::commands::WholeStreamCommand;
use crate::context::CommandRegistry;
use crate::prelude::*;
use nu_errors::ShellError;
use nu_protocol::{hir::Block, CommandAction, ReturnSuccess, Signature, SyntaxShape, Value};
use nu_source::Tagged;
pub struct Alias;
#[derive(Deserialize)]
pub struct AliasArgs {
pub name: Tagged<String>,
pub args: Vec<Value>,
pub block: Block,
}
impl WholeStreamCommand for Alias {
fn name(&self) -> &str {
"alias"
}
fn signature(&self) -> Signature {
Signature::build("alias")
.required("name", SyntaxShape::String, "the name of the alias")
.required("args", SyntaxShape::Table, "the arguments to the alias")
.required(
"block",
SyntaxShape::Block,
"the block to run as the body of the alias",
)
}
fn usage(&self) -> &str {
"Define a shortcut for another command."
}
fn run(
&self,
args: CommandArgs,
registry: &CommandRegistry,
) -> Result<OutputStream, ShellError> {
args.process(registry, alias)?.run()
}
fn examples(&self) -> &[Example] {
&[
Example {
description: "An alias without parameters",
example: "alias say-hi [] { echo 'Hello!' }",
},
Example {
description: "An alias with a single parameter",
example: "alias l [x] { ls $x }",
},
]
}
}
pub fn alias(
AliasArgs {
name,
args: list,
block,
}: AliasArgs,
_: RunnableContext,
) -> Result<OutputStream, ShellError> {
let stream = async_stream! {
let mut args: Vec<String> = vec![];
for item in list.iter() {
if let Ok(string) = item.as_string() {
args.push(format!("${}", string));
} else {
yield Err(ShellError::labeled_error("Expected a string", "expected a string", item.tag()));
}
}
yield ReturnSuccess::action(CommandAction::AddAlias(name.to_string(), args, block.clone()))
};
Ok(stream.to_output_stream())
}

View File

@ -35,6 +35,13 @@ impl WholeStreamCommand for Append {
) -> Result<OutputStream, ShellError> {
args.process(registry, append)?.run()
}
fn examples(&self) -> &[Example] {
&[Example {
description: "Add something to the end of a list or table",
example: "echo [1 2 3] | append 4",
}]
}
}
fn append(
@ -43,6 +50,7 @@ fn append(
) -> Result<OutputStream, ShellError> {
let mut after: VecDeque<Value> = VecDeque::new();
after.push_back(row);
let after = futures::stream::iter(after);
Ok(OutputStream::from_input(input.values.chain(after)))
Ok(OutputStream::from_input(input.chain(after)))
}

View File

@ -0,0 +1,334 @@
use crate::commands::UnevaluatedCallInfo;
use crate::commands::WholeStreamCommand;
use crate::data::value::format_leaf;
use crate::prelude::*;
use nu_errors::ShellError;
use nu_protocol::{hir, hir::Expression, hir::Literal, hir::SpannedExpression};
use nu_protocol::{Primitive, ReturnSuccess, Scope, Signature, UntaggedValue, Value};
use std::sync::atomic::AtomicBool;
use std::sync::atomic::Ordering;
pub struct Autoview;
impl WholeStreamCommand for Autoview {
fn name(&self) -> &str {
"autoview"
}
fn signature(&self) -> Signature {
Signature::build("autoview")
}
fn usage(&self) -> &str {
"View the contents of the pipeline as a table or list."
}
fn run(
&self,
args: CommandArgs,
registry: &CommandRegistry,
) -> Result<OutputStream, ShellError> {
autoview(RunnableContext {
input: args.input,
registry: registry.clone(),
shell_manager: args.shell_manager,
host: args.host,
ctrl_c: args.ctrl_c,
name: args.call_info.name_tag,
})
}
fn examples(&self) -> &[Example] {
&[
Example {
description: "Automatically view the results",
example: "ls | autoview",
},
Example {
description: "Autoview is also implied. The above can be written as",
example: "ls",
},
]
}
}
pub struct RunnableContextWithoutInput {
pub shell_manager: ShellManager,
pub host: Arc<parking_lot::Mutex<Box<dyn Host>>>,
pub ctrl_c: Arc<AtomicBool>,
pub registry: CommandRegistry,
pub name: Tag,
}
impl RunnableContextWithoutInput {
pub fn convert(context: RunnableContext) -> (InputStream, RunnableContextWithoutInput) {
let new_context = RunnableContextWithoutInput {
shell_manager: context.shell_manager,
host: context.host,
ctrl_c: context.ctrl_c,
registry: context.registry,
name: context.name,
};
(context.input, new_context)
}
}
pub fn autoview(context: RunnableContext) -> Result<OutputStream, ShellError> {
let binary = context.get_command("binaryview");
let text = context.get_command("textview");
let table = context.get_command("table");
Ok(OutputStream::new(async_stream! {
let (mut input_stream, context) = RunnableContextWithoutInput::convert(context);
match input_stream.next().await {
Some(x) => {
match input_stream.next().await {
Some(y) => {
let ctrl_c = context.ctrl_c.clone();
let stream = async_stream! {
yield Ok(x);
yield Ok(y);
loop {
match input_stream.next().await {
Some(z) => {
if ctrl_c.load(Ordering::SeqCst) {
break;
}
yield Ok(z);
}
_ => break,
}
}
};
let stream = stream.to_input_stream();
if let Some(table) = table {
let command_args = create_default_command_args(&context).with_input(stream);
let result = table.run(command_args, &context.registry);
result.collect::<Vec<_>>().await;
}
}
_ => {
match x {
Value {
value: UntaggedValue::Primitive(Primitive::String(ref s)),
tag: Tag { anchor, span },
} if anchor.is_some() => {
if let Some(text) = text {
let mut stream = VecDeque::new();
stream.push_back(UntaggedValue::string(s).into_value(Tag { anchor, span }));
let command_args = create_default_command_args(&context).with_input(stream);
let result = text.run(command_args, &context.registry);
result.collect::<Vec<_>>().await;
} else {
out!("{}", s);
}
}
Value {
value: UntaggedValue::Primitive(Primitive::String(s)),
..
} => {
out!("{}", s);
}
Value {
value: UntaggedValue::Primitive(Primitive::Line(ref s)),
tag: Tag { anchor, span },
} if anchor.is_some() => {
if let Some(text) = text {
let mut stream = VecDeque::new();
stream.push_back(UntaggedValue::string(s).into_value(Tag { anchor, span }));
let command_args = create_default_command_args(&context).with_input(stream);
let result = text.run(command_args, &context.registry);
result.collect::<Vec<_>>().await;
} else {
out!("{}\n", s);
}
}
Value {
value: UntaggedValue::Primitive(Primitive::Line(s)),
..
} => {
out!("{}\n", s);
}
Value {
value: UntaggedValue::Primitive(Primitive::Path(s)),
..
} => {
out!("{}", s.display());
}
Value {
value: UntaggedValue::Primitive(Primitive::Int(n)),
..
} => {
out!("{}", n);
}
Value {
value: UntaggedValue::Primitive(Primitive::Decimal(n)),
..
} => {
out!("{}", n);
}
Value {
value: UntaggedValue::Primitive(Primitive::Boolean(b)),
..
} => {
out!("{}", b);
}
Value {
value: UntaggedValue::Primitive(Primitive::Duration(d)),
..
} => {
let output = format_leaf(&x).plain_string(100_000);
out!("{}", output);
}
Value {
value: UntaggedValue::Primitive(Primitive::Date(d)),
..
} => {
out!("{}", d);
}
Value {
value: UntaggedValue::Primitive(Primitive::Range(_)),
..
} => {
let output = format_leaf(&x).plain_string(100_000);
out!("{}", output);
}
Value { value: UntaggedValue::Primitive(Primitive::Binary(ref b)), .. } => {
if let Some(binary) = binary {
let mut stream = VecDeque::new();
stream.push_back(x);
let command_args = create_default_command_args(&context).with_input(stream);
let result = binary.run(command_args, &context.registry);
result.collect::<Vec<_>>().await;
} else {
use pretty_hex::*;
out!("{:?}", b.hex_dump());
}
}
Value { value: UntaggedValue::Error(e), .. } => {
yield Err(e);
}
Value { value: UntaggedValue::Row(row), ..} => {
use prettytable::format::{FormatBuilder, LinePosition, LineSeparator};
use prettytable::{color, Attr, Cell, Row, Table};
use crate::data::value::{format_leaf, style_leaf};
use textwrap::fill;
let termwidth = std::cmp::max(textwrap::termwidth(), 20);
enum TableMode {
Light,
Normal,
}
let mut table = Table::new();
let table_mode = crate::data::config::config(Tag::unknown());
let table_mode = if let Some(s) = table_mode?.get("table_mode") {
match s.as_string() {
Ok(typ) if typ == "light" => TableMode::Light,
_ => TableMode::Normal,
}
} else {
TableMode::Normal
};
match table_mode {
TableMode::Light => {
table.set_format(
FormatBuilder::new()
.separator(LinePosition::Title, LineSeparator::new('─', '─', ' ', ' '))
.padding(1, 1)
.build(),
);
}
_ => {
table.set_format(
FormatBuilder::new()
.column_separator('│')
.separator(LinePosition::Top, LineSeparator::new('─', '┬', ' ', ' '))
.separator(LinePosition::Title, LineSeparator::new('─', '┼', ' ', ' '))
.separator(LinePosition::Bottom, LineSeparator::new('─', '┴', ' ', ' '))
.padding(1, 1)
.build(),
);
}
}
let mut max_key_len = 0;
for (key, _) in row.entries.iter() {
max_key_len = std::cmp::max(max_key_len, key.chars().count());
}
if max_key_len > (termwidth/2 - 1) {
max_key_len = termwidth/2 - 1;
}
let max_val_len = termwidth - max_key_len - 5;
for (key, value) in row.entries.iter() {
table.add_row(Row::new(vec![Cell::new(&fill(&key, max_key_len)).with_style(Attr::ForegroundColor(color::GREEN)).with_style(Attr::Bold),
Cell::new(&fill(&format_leaf(value).plain_string(100_000), max_val_len))]));
}
table.printstd();
// table.print_term(&mut *context.host.lock().out_terminal().ok_or_else(|| ShellError::untagged_runtime_error("Could not open terminal for output"))?)
// .map_err(|_| ShellError::untagged_runtime_error("Internal error: could not print to terminal (for unix systems check to make sure TERM is set)"))?;
}
Value { value: ref item, .. } => {
if let Some(table) = table {
let mut stream = VecDeque::new();
stream.push_back(x);
let command_args = create_default_command_args(&context).with_input(stream);
let result = table.run(command_args, &context.registry);
result.collect::<Vec<_>>().await;
} else {
out!("{:?}", item);
}
}
}
}
}
}
_ => {
//out!("<no results>");
}
}
// Needed for async_stream to type check
if false {
yield ReturnSuccess::value(UntaggedValue::nothing().into_untagged_value());
}
}))
}
fn create_default_command_args(context: &RunnableContextWithoutInput) -> RawCommandArgs {
let span = context.name.span;
RawCommandArgs {
host: context.host.clone(),
ctrl_c: context.ctrl_c.clone(),
shell_manager: context.shell_manager.clone(),
call_info: UnevaluatedCallInfo {
args: hir::Call {
head: Box::new(SpannedExpression::new(
Expression::Literal(Literal::String(String::new())),
span,
)),
positional: None,
named: None,
span,
is_last: true,
},
name_tag: context.name.clone(),
scope: Scope::empty(),
},
}
}

View File

@ -0,0 +1,300 @@
use crate::prelude::*;
use chrono::{DateTime, Datelike, Local, NaiveDate};
use nu_errors::ShellError;
use nu_protocol::Dictionary;
use crate::commands::{command::EvaluatedWholeStreamCommandArgs, WholeStreamCommand};
use indexmap::IndexMap;
use nu_protocol::{Signature, SyntaxShape, UntaggedValue, Value};
pub struct Cal;
impl WholeStreamCommand for Cal {
fn name(&self) -> &str {
"cal"
}
fn signature(&self) -> Signature {
Signature::build("cal")
.switch("year", "Display the year column", Some('y'))
.switch("quarter", "Display the quarter column", Some('q'))
.switch("month", "Display the month column", Some('m'))
.named(
"full-year",
SyntaxShape::Int,
"Display a year-long calendar for the specified year",
None,
)
.switch(
"month-names",
"Display the month names instead of integers",
None,
)
}
fn usage(&self) -> &str {
"Display a calendar."
}
fn run(
&self,
args: CommandArgs,
registry: &CommandRegistry,
) -> Result<OutputStream, ShellError> {
cal(args, registry)
}
fn examples(&self) -> &[Example] {
&[
Example {
description: "This month's calendar",
example: "cal",
},
Example {
description: "The calendar for all of 2012",
example: "cal --full-year 2012",
},
]
}
}
pub fn cal(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream, ShellError> {
let args = args.evaluate_once(registry)?;
let mut calendar_vec_deque = VecDeque::new();
let tag = args.call_info.name_tag.clone();
let (current_year, current_month, current_day) = get_current_date();
if args.has("full-year") {
let mut day_value: Option<u32> = Some(current_day);
let mut year_value = current_year as u64;
if let Some(year) = args.get("full-year") {
if let Ok(year_u64) = year.as_u64() {
year_value = year_u64;
}
if year_value != current_year as u64 {
day_value = None
}
}
add_year_to_table(
&mut calendar_vec_deque,
&tag,
year_value as i32,
current_year,
current_month,
day_value,
&args,
);
} else {
let (day_start_offset, number_of_days_in_month, _) =
get_month_information(current_year, current_month, current_year);
add_month_to_table(
&mut calendar_vec_deque,
&tag,
current_year,
current_month,
Some(current_day),
day_start_offset,
number_of_days_in_month as usize,
&args,
);
}
Ok(futures::stream::iter(calendar_vec_deque).to_output_stream())
}
fn get_current_date() -> (i32, u32, u32) {
let local_now: DateTime<Local> = Local::now();
let current_year: i32 = local_now.date().year();
let current_month: u32 = local_now.date().month();
let current_day: u32 = local_now.date().day();
(current_year, current_month, current_day)
}
fn add_year_to_table(
mut calendar_vec_deque: &mut VecDeque<Value>,
tag: &Tag,
mut selected_year: i32,
current_year: i32,
current_month: u32,
current_day_option: Option<u32>,
args: &EvaluatedWholeStreamCommandArgs,
) {
for month_number in 1..=12 {
let (day_start_offset, number_of_days_in_month, chosen_date_is_valid) =
get_month_information(selected_year, month_number, current_year);
if !chosen_date_is_valid {
selected_year = current_year;
}
let mut new_current_day_option: Option<u32> = None;
if let Some(current_day) = current_day_option {
if month_number == current_month {
new_current_day_option = Some(current_day)
}
}
add_month_to_table(
&mut calendar_vec_deque,
&tag,
selected_year,
month_number,
new_current_day_option,
day_start_offset,
number_of_days_in_month,
&args,
);
}
}
#[allow(clippy::too_many_arguments)]
fn add_month_to_table(
calendar_vec_deque: &mut VecDeque<Value>,
tag: &Tag,
year: i32,
month: u32,
_current_day_option: Option<u32>, // Can be used in the future to display current day
day_start_offset: usize,
number_of_days_in_month: usize,
args: &EvaluatedWholeStreamCommandArgs,
) {
let day_limit = number_of_days_in_month + day_start_offset;
let mut day_count: usize = 1;
let days_of_the_week = [
"sunday",
"monday",
"tuesday",
"wednesday",
"thurday",
"friday",
"saturday",
];
let should_show_year_column = args.has("year");
let should_show_month_column = args.has("month");
let should_show_quarter_column = args.has("quarter");
let should_show_month_names = args.has("month-names");
while day_count <= day_limit {
let mut indexmap = IndexMap::new();
if should_show_year_column {
indexmap.insert("year".to_string(), UntaggedValue::int(year).into_value(tag));
}
if should_show_quarter_column {
indexmap.insert(
"quarter".to_string(),
UntaggedValue::int(get_quarter_number(month)).into_value(tag),
);
}
if should_show_month_column {
let month_value = if should_show_month_names {
UntaggedValue::string(get_month_name(month)).into_value(tag)
} else {
UntaggedValue::int(month).into_value(tag)
};
indexmap.insert("month".to_string(), month_value);
}
for day in &days_of_the_week {
let value = if (day_count <= day_limit) && (day_count > day_start_offset) {
UntaggedValue::int(day_count - day_start_offset).into_value(tag)
} else {
UntaggedValue::nothing().into_value(tag)
};
indexmap.insert((*day).to_string(), value);
day_count += 1;
}
calendar_vec_deque
.push_back(UntaggedValue::Row(Dictionary::from(indexmap)).into_value(tag));
}
}
fn get_quarter_number(month_number: u32) -> u8 {
match month_number {
1..=3 => 1,
4..=6 => 2,
7..=9 => 3,
_ => 4,
}
}
fn get_month_name(month_number: u32) -> String {
let month_name = match month_number {
1 => "january",
2 => "february",
3 => "march",
4 => "april",
5 => "may",
6 => "june",
7 => "july",
8 => "august",
9 => "september",
10 => "october",
11 => "november",
_ => "december",
};
month_name.to_string()
}
fn get_month_information(
selected_year: i32,
month: u32,
current_year: i32,
) -> (usize, usize, bool) {
let (naive_date, chosen_date_is_valid_one) =
get_safe_naive_date(selected_year, month, current_year);
let weekday = naive_date.weekday();
let (days_in_month, chosen_date_is_valid_two) =
get_days_in_month(selected_year, month, current_year);
(
weekday.num_days_from_sunday() as usize,
days_in_month,
chosen_date_is_valid_one && chosen_date_is_valid_two,
)
}
fn get_days_in_month(selected_year: i32, month: u32, current_year: i32) -> (usize, bool) {
// Chrono does not provide a method to output the amount of days in a month
// This is a workaround taken from the example code from the Chrono docs here:
// https://docs.rs/chrono/0.3.0/chrono/naive/date/struct.NaiveDate.html#example-30
let (adjusted_year, adjusted_month) = if month == 12 {
(selected_year + 1, 1)
} else {
(selected_year, month + 1)
};
let (naive_date, chosen_date_is_valid) =
get_safe_naive_date(adjusted_year, adjusted_month, current_year);
(naive_date.pred().day() as usize, chosen_date_is_valid)
}
fn get_safe_naive_date(
selected_year: i32,
selected_month: u32,
current_year: i32,
) -> (NaiveDate, bool) {
if let Some(naive_date) = NaiveDate::from_ymd_opt(selected_year, selected_month, 1) {
return (naive_date, true);
}
(NaiveDate::from_ymd(current_year, selected_month, 1), false)
}

View File

@ -0,0 +1,74 @@
use crate::commands::WholeStreamCommand;
use crate::prelude::*;
use nu_errors::ShellError;
use nu_protocol::{Primitive, ReturnSuccess, UntaggedValue, Value};
pub struct Calc;
#[derive(Deserialize)]
pub struct CalcArgs {}
impl WholeStreamCommand for Calc {
fn name(&self) -> &str {
"calc"
}
fn usage(&self) -> &str {
"Parse a math expression into a number"
}
fn run(
&self,
args: CommandArgs,
registry: &CommandRegistry,
) -> Result<OutputStream, ShellError> {
args.process(registry, calc)?.run()
}
fn examples(&self) -> &[Example] {
&[Example {
description: "Calculate math in the pipeline",
example: "echo '10 / 4' | calc",
}]
}
}
pub fn calc(
_: CalcArgs,
RunnableContext { input, name, .. }: RunnableContext,
) -> Result<OutputStream, ShellError> {
Ok(input
.map(move |input| {
if let Ok(string) = input.as_string() {
match parse(&string, &input.tag) {
Ok(value) => ReturnSuccess::value(value),
Err(err) => Err(ShellError::labeled_error(
"Calculation error",
err,
&input.tag.span,
)),
}
} else {
Err(ShellError::labeled_error(
"Expected a string from pipeline",
"requires string input",
name.clone(),
))
}
})
.to_output_stream())
}
pub fn parse(math_expression: &str, tag: impl Into<Tag>) -> Result<Value, String> {
use std::f64;
let num = meval::eval_str(math_expression);
match num {
Ok(num) => {
if num == f64::INFINITY || num == f64::NEG_INFINITY {
return Err(String::from("cannot represent result"));
}
Ok(UntaggedValue::from(Primitive::from(num)).into_value(tag))
}
Err(error) => Err(error.to_string()),
}
}

View File

@ -0,0 +1,66 @@
use crate::commands::WholeStreamCommand;
use crate::prelude::*;
use std::path::PathBuf;
use nu_errors::ShellError;
use nu_protocol::{Signature, SyntaxShape};
use nu_source::Tagged;
#[derive(Deserialize)]
pub struct CdArgs {
pub(crate) path: Option<Tagged<PathBuf>>,
}
pub struct Cd;
impl WholeStreamCommand for Cd {
fn name(&self) -> &str {
"cd"
}
fn signature(&self) -> Signature {
Signature::build("cd").optional(
"directory",
SyntaxShape::Path,
"the directory to change to",
)
}
fn usage(&self) -> &str {
"Change to a new path."
}
fn run(
&self,
args: CommandArgs,
registry: &CommandRegistry,
) -> Result<OutputStream, ShellError> {
args.process(registry, cd)?.run()
}
fn examples(&self) -> &[Example] {
&[
Example {
description: "Change to a new directory called 'dirname'",
example: "cd dirname",
},
Example {
description: "Change to your home directory",
example: "cd",
},
Example {
description: "Change to your home directory (alternate version)",
example: "cd ~",
},
Example {
description: "Change to the previous directory",
example: "cd -",
},
]
}
}
fn cd(args: CdArgs, context: RunnableContext) -> Result<OutputStream, ShellError> {
context.shell_manager.cd(args, &context)
}

View File

@ -0,0 +1,95 @@
use crate::commands::classified::expr::run_expression_block;
use crate::commands::classified::internal::run_internal_command;
use crate::context::Context;
use crate::prelude::*;
use crate::stream::InputStream;
use futures::stream::TryStreamExt;
use nu_errors::ShellError;
use nu_protocol::hir::{Block, ClassifiedCommand, Commands};
use nu_protocol::{ReturnSuccess, Scope, UntaggedValue, Value};
use std::sync::atomic::Ordering;
pub(crate) async fn run_block(
block: &Block,
ctx: &mut Context,
mut input: InputStream,
scope: &Scope,
) -> Result<InputStream, ShellError> {
let mut output: Result<InputStream, ShellError> = Ok(InputStream::empty());
for pipeline in &block.block {
match output {
Ok(inp) if inp.is_empty() => {}
Ok(inp) => {
let mut output_stream = inp.to_output_stream();
loop {
match output_stream.try_next().await {
Ok(Some(ReturnSuccess::Value(Value {
value: UntaggedValue::Error(e),
..
}))) => return Err(e),
Ok(Some(_item)) => {
if let Some(err) = ctx.get_errors().get(0) {
ctx.clear_errors();
return Err(err.clone());
}
if ctx.ctrl_c.load(Ordering::SeqCst) {
break;
}
}
Ok(None) => {
if let Some(err) = ctx.get_errors().get(0) {
ctx.clear_errors();
return Err(err.clone());
}
break;
}
Err(e) => return Err(e),
}
}
}
Err(e) => {
return Err(e);
}
}
output = run_pipeline(pipeline, ctx, input, scope).await;
input = InputStream::empty();
}
output
}
async fn run_pipeline(
commands: &Commands,
ctx: &mut Context,
mut input: InputStream,
scope: &Scope,
) -> Result<InputStream, ShellError> {
let mut iter = commands.list.clone().into_iter().peekable();
loop {
let item: Option<ClassifiedCommand> = iter.next();
let next: Option<&ClassifiedCommand> = iter.peek();
input = match (item, next) {
(Some(ClassifiedCommand::Dynamic(_)), _) | (_, Some(ClassifiedCommand::Dynamic(_))) => {
return Err(ShellError::unimplemented("Dynamic commands"))
}
(Some(ClassifiedCommand::Expr(expr)), _) => {
run_expression_block(*expr, ctx, input, scope)?
}
(Some(ClassifiedCommand::Error(err)), _) => return Err(err.into()),
(_, Some(ClassifiedCommand::Error(err))) => return Err(err.clone().into()),
(Some(ClassifiedCommand::Internal(left)), _) => {
run_internal_command(left, ctx, input, scope)?
}
(None, _) => break,
};
}
Ok(input)
}

View File

@ -1,7 +1,7 @@
use derive_new::new;
use nu_parser::hir;
use nu_protocol::hir;
#[derive(new, Debug, Eq, PartialEq)]
#[derive(new, Debug)]
pub(crate) struct Command {
pub(crate) args: hir::Call,
}

View File

@ -0,0 +1,27 @@
use crate::evaluate::evaluate_baseline_expr;
use crate::prelude::*;
use log::{log_enabled, trace};
use futures::stream::once;
use nu_errors::ShellError;
use nu_protocol::hir::SpannedExpression;
use nu_protocol::Scope;
pub(crate) fn run_expression_block(
expr: SpannedExpression,
context: &mut Context,
_input: InputStream,
scope: &Scope,
) -> Result<InputStream, ShellError> {
if log_enabled!(log::Level::Trace) {
trace!(target: "nu::run::expr", "->");
trace!(target: "nu::run::expr", "{:?}", expr);
}
let scope = scope.clone();
let registry = context.registry().clone();
let output = evaluate_baseline_expr(&expr, &registry, &scope)?;
Ok(once(async { Ok(output) }).to_input_stream())
}

View File

@ -0,0 +1,625 @@
use crate::evaluate::evaluate_baseline_expr;
use crate::futures::ThreadedReceiver;
use crate::prelude::*;
use std::io::Write;
use std::ops::Deref;
use std::process::{Command, Stdio};
use std::sync::mpsc;
use bytes::{BufMut, Bytes, BytesMut};
use futures::executor::block_on_stream;
// use futures::stream::StreamExt;
use futures_codec::FramedRead;
use log::trace;
use nu_errors::ShellError;
use nu_protocol::hir::ExternalCommand;
use nu_protocol::{Primitive, Scope, ShellTypeName, UntaggedValue, Value};
use nu_source::Tag;
pub enum StringOrBinary {
String(String),
Binary(Vec<u8>),
}
pub struct MaybeTextCodec;
impl futures_codec::Encoder for MaybeTextCodec {
type Item = StringOrBinary;
type Error = std::io::Error;
fn encode(&mut self, item: Self::Item, dst: &mut BytesMut) -> Result<(), Self::Error> {
match item {
StringOrBinary::String(s) => {
dst.reserve(s.len());
dst.put(s.as_bytes());
Ok(())
}
StringOrBinary::Binary(b) => {
dst.reserve(b.len());
dst.put(Bytes::from(b));
Ok(())
}
}
}
}
impl futures_codec::Decoder for MaybeTextCodec {
type Item = StringOrBinary;
type Error = std::io::Error;
fn decode(&mut self, src: &mut BytesMut) -> Result<Option<Self::Item>, Self::Error> {
let v: Vec<u8> = src.to_vec();
match String::from_utf8(v) {
Ok(s) => {
src.clear();
if s.is_empty() {
Ok(None)
} else {
Ok(Some(StringOrBinary::String(s)))
}
}
Err(err) => {
// Note: the longest UTF-8 character per Unicode spec is currently 6 bytes. If we fail somewhere earlier than the last 6 bytes,
// we know that we're failing to understand the string encoding and not just seeing a partial character. When this happens, let's
// fall back to assuming it's a binary buffer.
if src.is_empty() {
Ok(None)
} else if src.len() > 6 && (src.len() - err.utf8_error().valid_up_to() > 6) {
// Fall back to assuming binary
let buf = src.to_vec();
src.clear();
Ok(Some(StringOrBinary::Binary(buf)))
} else {
// Looks like a utf-8 string, so let's assume that
let buf = src.split_to(err.utf8_error().valid_up_to() + 1);
String::from_utf8(buf.to_vec())
.map(|x| Some(StringOrBinary::String(x)))
.map_err(|e| std::io::Error::new(std::io::ErrorKind::InvalidData, e))
}
}
}
}
}
pub(crate) async fn run_external_command(
command: ExternalCommand,
context: &mut Context,
input: InputStream,
scope: &Scope,
is_last: bool,
) -> Result<InputStream, ShellError> {
trace!(target: "nu::run::external", "-> {}", command.name);
if !did_find_command(&command.name).await {
return Err(ShellError::labeled_error(
"Command not found",
"command not found",
&command.name_tag,
));
}
run_with_stdin(command, context, input, scope, is_last)
}
fn run_with_stdin(
command: ExternalCommand,
context: &mut Context,
input: InputStream,
scope: &Scope,
is_last: bool,
) -> Result<InputStream, ShellError> {
let path = context.shell_manager.path();
let input = trace_stream!(target: "nu::trace_stream::external::stdin", "input" = input);
let mut command_args = vec![];
for arg in command.args.iter() {
let value = evaluate_baseline_expr(arg, &context.registry, scope)?;
// Skip any arguments that don't really exist, treating them as optional
// FIXME: we may want to preserve the gap in the future, though it's hard to say
// what value we would put in its place.
if value.value.is_none() {
continue;
}
// Do the cleanup that we need to do on any argument going out:
let trimmed_value_string = value.as_string()?.trim_end_matches('\n').to_string();
let value_string;
#[cfg(not(windows))]
{
value_string = trimmed_value_string
.replace('$', "\\$")
.replace('"', "\\\"")
.to_string()
}
#[cfg(windows)]
{
value_string = trimmed_value_string
}
command_args.push(value_string);
}
let process_args = command_args
.iter()
.map(|arg| {
let arg = expand_tilde(arg.deref(), dirs::home_dir);
#[cfg(not(windows))]
{
if argument_contains_whitespace(&arg) && !argument_is_quoted(&arg) {
add_quotes(&arg)
} else {
arg.as_ref().to_string()
}
}
#[cfg(windows)]
{
if let Some(unquoted) = remove_quotes(&arg) {
unquoted.to_string()
} else {
arg.as_ref().to_string()
}
}
})
.collect::<Vec<String>>();
spawn(&command, &path, &process_args[..], input, is_last, scope)
}
fn spawn(
command: &ExternalCommand,
path: &str,
args: &[String],
input: InputStream,
is_last: bool,
scope: &Scope,
) -> Result<InputStream, ShellError> {
let command = command.clone();
let mut process = {
#[cfg(windows)]
{
let mut process = Command::new("cmd");
process.arg("/c");
process.arg(&command.name);
for arg in args {
// Clean the args before we use them:
let arg = arg.replace("|", "\\|");
process.arg(&arg);
}
process
}
#[cfg(not(windows))]
{
let cmd_with_args = vec![command.name.clone(), args.join(" ")].join(" ");
let mut process = Command::new("sh");
process.arg("-c").arg(cmd_with_args);
process
}
};
process.current_dir(path);
trace!(target: "nu::run::external", "cwd = {:?}", &path);
process.env_clear();
process.envs(scope.env.iter());
// We want stdout regardless of what
// we are doing ($it case or pipe stdin)
if !is_last {
process.stdout(Stdio::piped());
trace!(target: "nu::run::external", "set up stdout pipe");
}
// open since we have some contents for stdin
if !input.is_empty() {
process.stdin(Stdio::piped());
trace!(target: "nu::run::external", "set up stdin pipe");
}
trace!(target: "nu::run::external", "built command {:?}", process);
// TODO Switch to async_std::process once it's stabilized
if let Ok(mut child) = process.spawn() {
let (tx, rx) = mpsc::sync_channel(0);
let mut stdin = child.stdin.take();
let stdin_write_tx = tx.clone();
let stdout_read_tx = tx;
let stdin_name_tag = command.name_tag.clone();
let stdout_name_tag = command.name_tag;
std::thread::spawn(move || {
if !input.is_empty() {
let mut stdin_write = stdin
.take()
.expect("Internal error: could not get stdin pipe for external command");
for value in block_on_stream(input) {
match &value.value {
UntaggedValue::Primitive(Primitive::Nothing) => continue,
UntaggedValue::Primitive(Primitive::String(s))
| UntaggedValue::Primitive(Primitive::Line(s)) => {
if let Err(e) = stdin_write.write(s.as_bytes()) {
let message = format!("Unable to write to stdin (error = {})", e);
let _ = stdin_write_tx.send(Ok(Value {
value: UntaggedValue::Error(ShellError::labeled_error(
message,
"application may have closed before completing pipeline",
&stdin_name_tag,
)),
tag: stdin_name_tag,
}));
return Err(());
}
}
UntaggedValue::Primitive(Primitive::Binary(b)) => {
if let Err(e) = stdin_write.write(b) {
let message = format!("Unable to write to stdin (error = {})", e);
let _ = stdin_write_tx.send(Ok(Value {
value: UntaggedValue::Error(ShellError::labeled_error(
message,
"application may have closed before completing pipeline",
&stdin_name_tag,
)),
tag: stdin_name_tag,
}));
return Err(());
}
}
unsupported => {
let _ = stdin_write_tx.send(Ok(Value {
value: UntaggedValue::Error(ShellError::labeled_error(
format!(
"Received unexpected type from pipeline ({})",
unsupported.type_name()
),
"expected a string",
stdin_name_tag.clone(),
)),
tag: stdin_name_tag,
}));
return Err(());
}
};
}
}
Ok(())
});
std::thread::spawn(move || {
if !is_last {
let stdout = if let Some(stdout) = child.stdout.take() {
stdout
} else {
let _ = stdout_read_tx.send(Ok(Value {
value: UntaggedValue::Error(ShellError::labeled_error(
"Can't redirect the stdout for external command",
"can't redirect stdout",
&stdout_name_tag,
)),
tag: stdout_name_tag,
}));
return Err(());
};
let file = futures::io::AllowStdIo::new(stdout);
let stream = FramedRead::new(file, MaybeTextCodec);
for line in block_on_stream(stream) {
match line {
Ok(line) => match line {
StringOrBinary::String(s) => {
let result = stdout_read_tx.send(Ok(Value {
value: UntaggedValue::Primitive(Primitive::String(s.clone())),
tag: stdout_name_tag.clone(),
}));
if result.is_err() {
break;
}
}
StringOrBinary::Binary(b) => {
let result = stdout_read_tx.send(Ok(Value {
value: UntaggedValue::Primitive(Primitive::Binary(
b.into_iter().collect(),
)),
tag: stdout_name_tag.clone(),
}));
if result.is_err() {
break;
}
}
},
Err(e) => {
// If there's an exit status, it makes sense that we may error when
// trying to read from its stdout pipe (likely been closed). In that
// case, don't emit an error.
let should_error = match child.wait() {
Ok(exit_status) => !exit_status.success(),
Err(_) => true,
};
if should_error {
let _ = stdout_read_tx.send(Ok(Value {
value: UntaggedValue::Error(ShellError::labeled_error(
format!("Unable to read from stdout ({})", e),
"unable to read from stdout",
&stdout_name_tag,
)),
tag: stdout_name_tag.clone(),
}));
}
return Ok(());
}
}
}
}
// We can give an error when we see a non-zero exit code, but this is different
// than what other shells will do.
let external_failed = match child.wait() {
Err(_) => true,
Ok(exit_status) => !exit_status.success(),
};
if external_failed {
let cfg = crate::data::config::config(Tag::unknown());
if let Ok(cfg) = cfg {
if cfg.contains_key("nonzero_exit_errors") {
let _ = stdout_read_tx.send(Ok(Value {
value: UntaggedValue::Error(ShellError::labeled_error(
"External command failed",
"command failed",
&stdout_name_tag,
)),
tag: stdout_name_tag.clone(),
}));
}
}
let _ = stdout_read_tx.send(Ok(Value {
value: UntaggedValue::Error(ShellError::external_non_zero()),
tag: stdout_name_tag,
}));
}
Ok(())
});
let stream = ThreadedReceiver::new(rx);
Ok(stream.to_input_stream())
} else {
Err(ShellError::labeled_error(
"Failed to spawn process",
"failed to spawn",
&command.name_tag,
))
}
}
async fn did_find_command(name: &str) -> bool {
#[cfg(not(windows))]
{
which::which(name).is_ok()
}
#[cfg(windows)]
{
if which::which(name).is_ok() {
true
} else {
let cmd_builtins = [
"call", "cls", "color", "date", "dir", "echo", "find", "hostname", "pause",
"start", "time", "title", "ver", "copy", "mkdir", "rename", "rd", "rmdir", "type",
"mklink",
];
cmd_builtins.contains(&name)
}
}
}
fn expand_tilde<SI: ?Sized, P, HD>(input: &SI, home_dir: HD) -> std::borrow::Cow<str>
where
SI: AsRef<str>,
P: AsRef<std::path::Path>,
HD: FnOnce() -> Option<P>,
{
shellexpand::tilde_with_context(input, home_dir)
}
#[allow(unused)]
pub fn argument_contains_whitespace(argument: &str) -> bool {
argument.chars().any(|c| c.is_whitespace())
}
fn argument_is_quoted(argument: &str) -> bool {
if argument.len() < 2 {
return false;
}
(argument.starts_with('"') && argument.ends_with('"'))
|| (argument.starts_with('\'') && argument.ends_with('\''))
}
#[allow(unused)]
fn add_quotes(argument: &str) -> String {
format!("\"{}\"", argument)
}
#[allow(unused)]
fn remove_quotes(argument: &str) -> Option<&str> {
if !argument_is_quoted(argument) {
return None;
}
let size = argument.len();
Some(&argument[1..size - 1])
}
#[allow(unused)]
fn shell_os_paths() -> Vec<std::path::PathBuf> {
let mut original_paths = vec![];
if let Some(paths) = std::env::var_os("PATH") {
original_paths = std::env::split_paths(&paths).collect::<Vec<_>>();
}
original_paths
}
#[cfg(test)]
mod tests {
use super::{
add_quotes, argument_contains_whitespace, argument_is_quoted, expand_tilde, remove_quotes,
run_external_command, Context, InputStream,
};
use futures::executor::block_on;
use nu_errors::ShellError;
use nu_protocol::Scope;
use nu_test_support::commands::ExternalBuilder;
// async fn read(mut stream: OutputStream) -> Option<Value> {
// match stream.try_next().await {
// Ok(val) => {
// if let Some(val) = val {
// val.raw_value()
// } else {
// None
// }
// }
// Err(_) => None,
// }
// }
async fn non_existent_run() -> Result<(), ShellError> {
let cmd = ExternalBuilder::for_name("i_dont_exist.exe").build();
let input = InputStream::empty();
let mut ctx = Context::basic().expect("There was a problem creating a basic context.");
assert!(
run_external_command(cmd, &mut ctx, input, &Scope::empty(), false)
.await
.is_err()
);
Ok(())
}
// async fn failure_run() -> Result<(), ShellError> {
// let cmd = ExternalBuilder::for_name("fail").build();
// let mut ctx = Context::basic().expect("There was a problem creating a basic context.");
// let stream = run_external_command(cmd, &mut ctx, None, false)
// .await?
// .expect("There was a problem running the external command.");
// match read(stream.into()).await {
// Some(Value {
// value: UntaggedValue::Error(_),
// ..
// }) => {}
// None | _ => panic!("Command didn't fail."),
// }
// Ok(())
// }
// #[test]
// fn identifies_command_failed() -> Result<(), ShellError> {
// block_on(failure_run())
// }
#[test]
fn identifies_command_not_found() -> Result<(), ShellError> {
block_on(non_existent_run())
}
#[test]
fn checks_contains_whitespace_from_argument_to_be_passed_in() {
assert_eq!(argument_contains_whitespace("andrés"), false);
assert_eq!(argument_contains_whitespace("and rés"), true);
assert_eq!(argument_contains_whitespace(r#"and\ rés"#), true);
}
#[test]
fn checks_quotes_from_argument_to_be_passed_in() {
assert_eq!(argument_is_quoted(""), false);
assert_eq!(argument_is_quoted("'"), false);
assert_eq!(argument_is_quoted("'a"), false);
assert_eq!(argument_is_quoted("a"), false);
assert_eq!(argument_is_quoted("a'"), false);
assert_eq!(argument_is_quoted("''"), true);
assert_eq!(argument_is_quoted(r#"""#), false);
assert_eq!(argument_is_quoted(r#""a"#), false);
assert_eq!(argument_is_quoted(r#"a"#), false);
assert_eq!(argument_is_quoted(r#"a""#), false);
assert_eq!(argument_is_quoted(r#""""#), true);
assert_eq!(argument_is_quoted("'andrés"), false);
assert_eq!(argument_is_quoted("andrés'"), false);
assert_eq!(argument_is_quoted(r#""andrés"#), false);
assert_eq!(argument_is_quoted(r#"andrés""#), false);
assert_eq!(argument_is_quoted("'andrés'"), true);
assert_eq!(argument_is_quoted(r#""andrés""#), true);
}
#[test]
fn adds_quotes_to_argument_to_be_passed_in() {
assert_eq!(add_quotes("andrés"), "\"andrés\"");
//assert_eq!(add_quotes("\"andrés\""), "\"andrés\"");
}
#[test]
fn strips_quotes_from_argument_to_be_passed_in() {
assert_eq!(remove_quotes(""), None);
assert_eq!(remove_quotes("'"), None);
assert_eq!(remove_quotes("'a"), None);
assert_eq!(remove_quotes("a"), None);
assert_eq!(remove_quotes("a'"), None);
assert_eq!(remove_quotes("''"), Some(""));
assert_eq!(remove_quotes(r#"""#), None);
assert_eq!(remove_quotes(r#""a"#), None);
assert_eq!(remove_quotes(r#"a"#), None);
assert_eq!(remove_quotes(r#"a""#), None);
assert_eq!(remove_quotes(r#""""#), Some(""));
assert_eq!(remove_quotes("'andrés"), None);
assert_eq!(remove_quotes("andrés'"), None);
assert_eq!(remove_quotes(r#""andrés"#), None);
assert_eq!(remove_quotes(r#"andrés""#), None);
assert_eq!(remove_quotes("'andrés'"), Some("andrés"));
assert_eq!(remove_quotes(r#""andrés""#), Some("andrés"));
}
#[test]
fn expands_tilde_if_starts_with_tilde_character() {
assert_eq!(
expand_tilde("~", || Some(std::path::Path::new("the_path_to_nu_light"))),
"the_path_to_nu_light"
);
}
#[test]
fn does_not_expand_tilde_if_tilde_is_not_first_character() {
assert_eq!(
expand_tilde("1~1", || Some(std::path::Path::new("the_path_to_nu_light"))),
"1~1"
);
}
}

View File

@ -1,42 +1,39 @@
use crate::commands::command::whole_stream_command;
use crate::commands::run_alias::AliasCommand;
use crate::commands::UnevaluatedCallInfo;
use crate::prelude::*;
use log::{log_enabled, trace};
use nu_errors::ShellError;
use nu_parser::InternalCommand;
use nu_protocol::{CommandAction, Primitive, ReturnSuccess, UntaggedValue, Value};
use nu_protocol::hir::InternalCommand;
use nu_protocol::{CommandAction, Primitive, ReturnSuccess, Scope, UntaggedValue, Value};
use super::ClassifiedInputStream;
pub(crate) async fn run_internal_command(
pub(crate) fn run_internal_command(
command: InternalCommand,
context: &mut Context,
input: ClassifiedInputStream,
source: Text,
input: InputStream,
scope: &Scope,
) -> Result<InputStream, ShellError> {
if log_enabled!(log::Level::Trace) {
trace!(target: "nu::run::internal", "->");
trace!(target: "nu::run::internal", "{}", command.name);
trace!(target: "nu::run::internal", "{}", command.args.debug(&source));
}
let objects: InputStream =
trace_stream!(target: "nu::trace_stream::internal", "input" = input.objects);
let objects: InputStream = trace_stream!(target: "nu::trace_stream::internal", "input" = input);
let internal_command = context.expect_command(&command.name);
let result = {
context.run_command(
internal_command?,
command.name_tag.clone(),
Tag::unknown_anchor(command.name_span),
command.args.clone(),
&source,
scope,
objects,
)
};
let result = trace_out_stream!(target: "nu::trace_stream::internal", "output" = result);
let mut result = result.values;
let mut result = trace_out_stream!(target: "nu::trace_stream::internal", "output" = result);
let mut context = context.clone();
let scope = scope.clone();
let stream = async_stream! {
let mut soft_errs: Vec<ShellError> = vec![];
@ -46,31 +43,32 @@ pub(crate) async fn run_internal_command(
match item {
Ok(ReturnSuccess::Action(action)) => match action {
CommandAction::ChangePath(path) => {
context.shell_manager.set_path(path)?;
context.shell_manager.set_path(path);
}
CommandAction::Exit => std::process::exit(0), // TODO: save history.txt
CommandAction::Error(err) => {
context.error(err)?;
context.error(err);
break;
}
CommandAction::AutoConvert(tagged_contents, extension) => {
let contents_tag = tagged_contents.tag.clone();
let command_name = format!("from-{}", extension);
let command_name = format!("from {}", extension);
let command = command.clone();
if let Some(converter) = context.registry.get_command(&command_name)? {
if let Some(converter) = context.registry.get_command(&command_name) {
let new_args = RawCommandArgs {
host: context.host.clone(),
ctrl_c: context.ctrl_c.clone(),
shell_manager: context.shell_manager.clone(),
call_info: UnevaluatedCallInfo {
args: nu_parser::hir::Call {
args: nu_protocol::hir::Call {
head: command.args.head,
positional: None,
named: None,
span: Span::unknown()
span: Span::unknown(),
is_last: false,
},
source: source.clone(),
name_tag: command.name_tag,
name_tag: Tag::unknown_anchor(command.name_span),
scope: scope.clone(),
}
};
let mut result = converter.run(new_args.with_input(vec![tagged_contents]), &context.registry);
@ -99,48 +97,62 @@ pub(crate) async fn run_internal_command(
value: UntaggedValue::Primitive(Primitive::String(cmd)),
tag,
} => {
let result = context.shell_manager.insert_at_current(Box::new(
context.shell_manager.insert_at_current(Box::new(
HelpShell::for_command(
UntaggedValue::string(cmd).into_value(tag),
&context.registry(),
)?,
));
result?
}
_ => {
let result = context.shell_manager.insert_at_current(Box::new(
context.shell_manager.insert_at_current(Box::new(
HelpShell::index(&context.registry())?,
));
result?
}
}
}
CommandAction::EnterValueShell(value) => {
context
.shell_manager
.insert_at_current(Box::new(ValueShell::new(value)))?;
.insert_at_current(Box::new(ValueShell::new(value)));
}
CommandAction::EnterShell(location) => {
context.shell_manager.insert_at_current(Box::new(
FilesystemShell::with_location(location, context.registry().clone()),
))?;
FilesystemShell::with_location(location, context.registry().clone())?,
));
}
CommandAction::AddAlias(name, args, block) => {
context.add_commands(vec![
whole_stream_command(AliasCommand::new(
name,
args,
block,
))
]);
}
CommandAction::PreviousShell => {
context.shell_manager.prev()?;
context.shell_manager.prev();
}
CommandAction::NextShell => {
context.shell_manager.next()?;
context.shell_manager.next();
}
CommandAction::LeaveShell => {
context.shell_manager.remove_at_current()?;
if context.shell_manager.is_empty()? {
context.shell_manager.remove_at_current();
if context.shell_manager.is_empty() {
std::process::exit(0); // TODO: save history.txt
}
}
},
Ok(ReturnSuccess::Value(Value {
value: UntaggedValue::Error(err),
..
})) => {
context.error(err.clone());
yield Err(err);
break;
}
Ok(ReturnSuccess::Value(v)) => {
yielded = true;
yield Ok(v);
@ -153,7 +165,7 @@ pub(crate) async fn run_internal_command(
let mut buffer = termcolor::Buffer::ansi();
let _ = doc.render_raw(
context.with_host(|host| host.width() - 5)?,
context.with_host(|host| host.width() - 5),
&mut nu_source::TermColored::new(&mut buffer),
);
@ -163,7 +175,7 @@ pub(crate) async fn run_internal_command(
}
Err(err) => {
context.error(err)?;
context.error(err);
break;
}
}

View File

@ -0,0 +1,8 @@
pub(crate) mod block;
mod dynamic;
pub(crate) mod expr;
pub(crate) mod external;
pub(crate) mod internal;
#[allow(unused_imports)]
pub(crate) use dynamic::Command as DynamicCommand;

View File

@ -0,0 +1,46 @@
use crate::commands::WholeStreamCommand;
use crate::prelude::*;
use nu_errors::ShellError;
use nu_protocol::Signature;
use std::process::Command;
pub struct Clear;
impl WholeStreamCommand for Clear {
fn name(&self) -> &str {
"clear"
}
fn signature(&self) -> Signature {
Signature::build("clear")
}
fn usage(&self) -> &str {
"clears the terminal"
}
fn run(
&self,
args: CommandArgs,
registry: &CommandRegistry,
) -> Result<OutputStream, ShellError> {
clear(args, registry)
}
fn examples(&self) -> &[Example] {
&[Example {
description: "Clear the screen",
example: "clear",
}]
}
}
fn clear(_args: CommandArgs, _registry: &CommandRegistry) -> Result<OutputStream, ShellError> {
if cfg!(windows) {
Command::new("cmd")
.args(&["/C", "cls"])
.status()
.expect("failed to execute process");
} else if cfg!(unix) {
Command::new("/bin/sh")
.args(&["-c", "clear"])
.status()
.expect("failed to execute process");
}
Ok(OutputStream::empty())
}

View File

@ -34,6 +34,13 @@ pub mod clipboard {
) -> Result<OutputStream, ShellError> {
args.process(registry, clip)?.run()
}
fn examples(&self) -> &[Example] {
&[Example {
description: "Save text to the clipboard",
example: "echo 'secret value' | clip",
}]
}
}
pub fn clip(
@ -41,7 +48,7 @@ pub mod clipboard {
RunnableContext { input, name, .. }: RunnableContext,
) -> Result<OutputStream, ShellError> {
let stream = async_stream! {
let values: Vec<Value> = input.values.collect().await;
let values: Vec<Value> = input.collect().await;
let mut clip_stream = inner_clip(values, name).await;
while let Some(value) = clip_stream.next().await {

View File

@ -1,3 +1,4 @@
use crate::commands::help::get_help;
use crate::context::CommandRegistry;
use crate::deserializer::ConfigDeserializer;
use crate::evaluate::evaluate_args::evaluate_args;
@ -5,7 +6,7 @@ use crate::prelude::*;
use derive_new::new;
use getset::Getters;
use nu_errors::ShellError;
use nu_parser::hir;
use nu_protocol::hir;
use nu_protocol::{CallInfo, EvaluatedArgs, ReturnValue, Scope, Signature, Value};
use serde::{Deserialize, Serialize};
use std::ops::Deref;
@ -14,50 +15,38 @@ use std::sync::atomic::AtomicBool;
#[derive(Deserialize, Serialize, Debug, Clone)]
pub struct UnevaluatedCallInfo {
pub args: hir::Call,
pub source: Text,
pub name_tag: Tag,
pub scope: Scope,
}
impl UnevaluatedCallInfo {
pub fn evaluate(
self,
registry: &CommandRegistry,
scope: &Scope,
) -> Result<CallInfo, ShellError> {
let args = evaluate_args(&self.args, registry, scope, &self.source)?;
pub fn evaluate(self, registry: &CommandRegistry) -> Result<CallInfo, ShellError> {
let args = evaluate_args(&self.args, registry, &self.scope)?;
Ok(CallInfo {
args,
name_tag: self.name_tag,
})
}
}
pub trait CallInfoExt {
fn process<'de, T: Deserialize<'de>>(
&self,
shell_manager: &ShellManager,
callback: fn(T, &RunnablePerItemContext) -> Result<OutputStream, ShellError>,
) -> Result<RunnablePerItemArgs<T>, ShellError>;
}
pub fn evaluate_with_new_it(
self,
registry: &CommandRegistry,
it: &Value,
) -> Result<CallInfo, ShellError> {
let mut scope = self.scope.clone();
scope = scope.set_it(it.clone());
let args = evaluate_args(&self.args, registry, &scope)?;
impl CallInfoExt for CallInfo {
fn process<'de, T: Deserialize<'de>>(
&self,
shell_manager: &ShellManager,
callback: fn(T, &RunnablePerItemContext) -> Result<OutputStream, ShellError>,
) -> Result<RunnablePerItemArgs<T>, ShellError> {
let mut deserializer = ConfigDeserializer::from_call_info(self.clone());
Ok(RunnablePerItemArgs {
args: T::deserialize(&mut deserializer)?,
context: RunnablePerItemContext {
shell_manager: shell_manager.clone(),
name: self.name_tag.clone(),
},
callback,
Ok(CallInfo {
args,
name_tag: self.name_tag,
})
}
pub fn switch_present(&self, switch: &str) -> bool {
self.args.switch_preset(switch)
}
}
#[derive(Getters)]
@ -80,7 +69,7 @@ pub struct RawCommandArgs {
}
impl RawCommandArgs {
pub fn with_input(self, input: Vec<Value>) -> CommandArgs {
pub fn with_input(self, input: impl Into<InputStream>) -> CommandArgs {
CommandArgs {
host: self.host,
ctrl_c: self.ctrl_c,
@ -106,7 +95,7 @@ impl CommandArgs {
let ctrl_c = self.ctrl_c.clone();
let shell_manager = self.shell_manager.clone();
let input = self.input;
let call_info = self.call_info.evaluate(registry, &Scope::empty())?;
let call_info = self.call_info.evaluate(registry)?;
Ok(EvaluatedWholeStreamCommandArgs::new(
host,
@ -126,7 +115,12 @@ impl CommandArgs {
let ctrl_c = self.ctrl_c.clone();
let shell_manager = self.shell_manager.clone();
let input = self.input;
let call_info = self.call_info.evaluate(registry, scope)?;
let call_info = UnevaluatedCallInfo {
name_tag: self.call_info.name_tag,
args: self.call_info.args,
scope: scope.clone(),
};
let call_info = call_info.evaluate(registry)?;
Ok(EvaluatedWholeStreamCommandArgs::new(
host,
@ -137,10 +131,6 @@ impl CommandArgs {
))
}
pub fn source(&self) -> Text {
self.call_info.source.clone()
}
pub fn process<'de, T: Deserialize<'de>, O: ToOutputStream>(
self,
registry: &CommandRegistry,
@ -148,7 +138,6 @@ impl CommandArgs {
) -> Result<RunnableArgs<T, O>, ShellError> {
let shell_manager = self.shell_manager.clone();
let host = self.host.clone();
let source = self.source();
let ctrl_c = self.ctrl_c.clone();
let args = self.evaluate_once(registry)?;
let call_info = args.call_info.clone();
@ -160,8 +149,7 @@ impl CommandArgs {
args: T::deserialize(&mut deserializer)?,
context: RunnableContext {
input,
commands: registry.clone(),
source,
registry: registry.clone(),
shell_manager,
name: name_tag,
host,
@ -185,7 +173,6 @@ impl CommandArgs {
let shell_manager = self.shell_manager.clone();
let host = self.host.clone();
let source = self.source();
let ctrl_c = self.ctrl_c.clone();
let args = self.evaluate_once(registry)?;
let call_info = args.call_info.clone();
@ -198,8 +185,7 @@ impl CommandArgs {
args: T::deserialize(&mut deserializer)?,
context: RunnableContext {
input,
commands: registry.clone(),
source,
registry: registry.clone(),
shell_manager,
name: name_tag,
host,
@ -211,36 +197,18 @@ impl CommandArgs {
}
}
pub struct RunnablePerItemContext {
pub shell_manager: ShellManager,
pub name: Tag,
}
pub struct RunnableContext {
pub input: InputStream,
pub shell_manager: ShellManager,
pub host: Arc<parking_lot::Mutex<Box<dyn Host>>>,
pub source: Text,
pub ctrl_c: Arc<AtomicBool>,
pub commands: CommandRegistry,
pub registry: CommandRegistry,
pub name: Tag,
}
impl RunnableContext {
pub fn get_command(&self, name: &str) -> Result<Option<Arc<Command>>, ShellError> {
self.commands.get_command(name)
}
}
pub struct RunnablePerItemArgs<T> {
args: T,
context: RunnablePerItemContext,
callback: fn(T, &RunnablePerItemContext) -> Result<OutputStream, ShellError>,
}
impl<T> RunnablePerItemArgs<T> {
pub fn run(self) -> Result<OutputStream, ShellError> {
(self.callback)(self.args, &self.context)
pub fn get_command(&self, name: &str) -> Option<Command> {
self.registry.get_command(name)
}
}
@ -365,6 +333,14 @@ impl EvaluatedCommandArgs {
self.call_info.args.nth(pos)
}
/// Get the nth positional argument, error if not possible
pub fn expect_nth(&self, pos: usize) -> Result<&Value, ShellError> {
match self.call_info.args.nth(pos) {
None => Err(ShellError::unimplemented("Better error: expect_nth")),
Some(item) => Ok(item),
}
}
pub fn get(&self, name: &str) -> Option<&Value> {
self.call_info.args.get(name)
}
@ -374,6 +350,11 @@ impl EvaluatedCommandArgs {
}
}
pub struct Example {
pub example: &'static str,
pub description: &'static str,
}
pub trait WholeStreamCommand: Send + Sync {
fn name(&self) -> &str;
@ -392,139 +373,64 @@ pub trait WholeStreamCommand: Send + Sync {
fn is_binary(&self) -> bool {
false
}
}
pub trait PerItemCommand: Send + Sync {
fn name(&self) -> &str;
fn signature(&self) -> Signature {
Signature::new(self.name()).desc(self.usage()).filter()
}
fn usage(&self) -> &str;
fn run(
&self,
call_info: &CallInfo,
registry: &CommandRegistry,
raw_args: &RawCommandArgs,
input: Value,
) -> Result<OutputStream, ShellError>;
fn is_binary(&self) -> bool {
false
fn examples(&self) -> &[Example] {
&[]
}
}
pub enum Command {
WholeStream(Arc<dyn WholeStreamCommand>),
PerItem(Arc<dyn PerItemCommand>),
}
#[derive(Clone)]
pub struct Command(Arc<dyn WholeStreamCommand>);
impl PrettyDebugWithSource for Command {
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
match self {
Command::WholeStream(command) => b::typed(
"whole stream command",
b::description(command.name())
+ b::space()
+ b::equals()
+ b::space()
+ command.signature().pretty_debug(source),
),
Command::PerItem(command) => b::typed(
"per item command",
b::description(command.name())
+ b::space()
+ b::equals()
+ b::space()
+ command.signature().pretty_debug(source),
),
}
b::typed(
"whole stream command",
b::description(self.name())
+ b::space()
+ b::equals()
+ b::space()
+ self.signature().pretty_debug(source),
)
}
}
impl std::fmt::Debug for Command {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Command::WholeStream(command) => write!(f, "WholeStream({})", command.name()),
Command::PerItem(command) => write!(f, "PerItem({})", command.name()),
}
write!(f, "Command({})", self.name())
}
}
impl Command {
pub fn name(&self) -> &str {
match self {
Command::WholeStream(command) => command.name(),
Command::PerItem(command) => command.name(),
}
self.0.name()
}
pub fn signature(&self) -> Signature {
match self {
Command::WholeStream(command) => command.signature(),
Command::PerItem(command) => command.signature(),
}
self.0.signature()
}
pub fn usage(&self) -> &str {
match self {
Command::WholeStream(command) => command.usage(),
Command::PerItem(command) => command.usage(),
}
self.0.usage()
}
pub fn run(&self, args: CommandArgs, registry: &CommandRegistry) -> OutputStream {
match self {
Command::WholeStream(command) => match command.run(args, registry) {
if args.call_info.switch_present("help") {
get_help(&*self.0, registry).into()
} else {
match self.0.run(args, registry) {
Ok(stream) => stream,
Err(err) => OutputStream::one(Err(err)),
},
Command::PerItem(command) => self.run_helper(command.clone(), args, registry.clone()),
}
}
}
fn run_helper(
&self,
command: Arc<dyn PerItemCommand>,
args: CommandArgs,
registry: CommandRegistry,
) -> OutputStream {
let raw_args = RawCommandArgs {
host: args.host,
ctrl_c: args.ctrl_c,
shell_manager: args.shell_manager,
call_info: args.call_info,
};
let out = args
.input
.values
.map(move |x| {
let call_info = raw_args
.clone()
.call_info
.evaluate(&registry, &Scope::it_value(x.clone()));
match call_info {
Ok(call_info) => match command.run(&call_info, &registry, &raw_args, x) {
Ok(o) => o,
Err(e) => VecDeque::from(vec![ReturnValue::Err(e)]).to_output_stream(),
},
Err(e) => VecDeque::from(vec![ReturnValue::Err(e)]).to_output_stream(),
}
})
.flatten();
out.to_output_stream()
}
pub fn is_binary(&self) -> bool {
match self {
Command::WholeStream(command) => command.is_binary(),
Command::PerItem(command) => command.is_binary(),
}
self.0.is_binary()
}
pub fn stream_command(&self) -> &dyn WholeStreamCommand {
&*self.0
}
}
@ -559,9 +465,9 @@ impl WholeStreamCommand for FnFilterCommand {
let registry: CommandRegistry = registry.clone();
let func = self.func;
let result = input.values.map(move |it| {
let result = input.map(move |it| {
let registry = registry.clone();
let call_info = match call_info.clone().evaluate(&registry, &Scope::it_value(it)) {
let call_info = match call_info.clone().evaluate_with_new_it(&registry, &it) {
Err(err) => return OutputStream::from(vec![Err(err)]).values,
Ok(args) => args,
};
@ -586,10 +492,6 @@ impl WholeStreamCommand for FnFilterCommand {
}
}
pub fn whole_stream_command(command: impl WholeStreamCommand + 'static) -> Arc<Command> {
Arc::new(Command::WholeStream(Arc::new(command)))
}
pub fn per_item_command(command: impl PerItemCommand + 'static) -> Arc<Command> {
Arc::new(Command::PerItem(Arc::new(command)))
pub fn whole_stream_command(command: impl WholeStreamCommand + 'static) -> Command {
Command(Arc::new(command))
}

View File

@ -33,13 +33,20 @@ impl WholeStreamCommand for Compact {
) -> Result<OutputStream, ShellError> {
args.process(registry, compact)?.run()
}
fn examples(&self) -> &[Example] {
&[Example {
description: "Remove all directory entries, except those with a 'target'",
example: "ls -af | compact target",
}]
}
}
pub fn compact(
CompactArgs { rest: columns }: CompactArgs,
RunnableContext { input, .. }: RunnableContext,
) -> Result<OutputStream, ShellError> {
let objects = input.values.filter(move |item| {
let objects = input.filter(move |item| {
let keep = if columns.is_empty() {
item.is_some()
} else {

View File

@ -31,21 +31,34 @@ impl WholeStreamCommand for Config {
"load",
SyntaxShape::Path,
"load the config from the path give",
Some('l'),
)
.named(
"set",
SyntaxShape::Any,
"set a value in the config, eg) --set [key value]",
Some('s'),
)
.named(
"set_into",
SyntaxShape::Member,
SyntaxShape::String,
"sets a variable from values in the pipeline",
Some('i'),
)
.named("get", SyntaxShape::Any, "get a value from the config")
.named("remove", SyntaxShape::Any, "remove a value from the config")
.switch("clear", "clear the config")
.switch("path", "return the path to the config file")
.named(
"get",
SyntaxShape::Any,
"get a value from the config",
Some('g'),
)
.named(
"remove",
SyntaxShape::Any,
"remove a value from the config",
Some('r'),
)
.switch("clear", "clear the config", Some('c'))
.switch("path", "return the path to the config file", Some('p'))
}
fn usage(&self) -> &str {
@ -59,6 +72,39 @@ impl WholeStreamCommand for Config {
) -> Result<OutputStream, ShellError> {
args.process(registry, config)?.run()
}
fn examples(&self) -> &[Example] {
&[
Example {
description: "See all config values",
example: "config",
},
Example {
description: "Set completion_mode to circular",
example: "config --set [completion_mode circular]",
},
Example {
description: "Store the contents of the pipeline as a path",
example: "echo ['/usr/bin' '/bin'] | config --set_into path",
},
Example {
description: "Get the current startup commands",
example: "config --get startup",
},
Example {
description: "Remove the startup commands",
example: "config --remove startup",
},
Example {
description: "Clear the config (be careful!)",
example: "config --clear",
},
Example {
description: "Get the path to the current config file",
example: "config --path",
},
]
}
}
pub fn config(
@ -111,7 +157,7 @@ pub fn config(
yield ReturnSuccess::value(UntaggedValue::Row(result.into()).into_value(&value.tag));
}
else if let Some(v) = set_into {
let rows: Vec<Value> = input.values.collect().await;
let rows: Vec<Value> = input.collect().await;
let key = v.to_string();
if rows.len() == 0 {

View File

@ -20,7 +20,7 @@ impl WholeStreamCommand for Count {
}
fn usage(&self) -> &str {
"Show the total number of rows."
"Show the total number of rows or items."
}
fn run(
@ -30,6 +30,13 @@ impl WholeStreamCommand for Count {
) -> Result<OutputStream, ShellError> {
args.process(registry, count)?.run()
}
fn examples(&self) -> &[Example] {
&[Example {
description: "Count the number of files/directories in the current directory",
example: "ls | count",
}]
}
}
pub fn count(
@ -37,7 +44,7 @@ pub fn count(
RunnableContext { input, name, .. }: RunnableContext,
) -> Result<OutputStream, ShellError> {
let stream = async_stream! {
let rows: Vec<Value> = input.values.collect().await;
let rows: Vec<Value> = input.collect().await;
yield ReturnSuccess::value(UntaggedValue::int(rows.len()).into_value(name))
};

View File

@ -0,0 +1,63 @@
use crate::commands::WholeStreamCommand;
use crate::context::CommandRegistry;
use crate::prelude::*;
use nu_errors::ShellError;
use nu_protocol::{Signature, SyntaxShape};
use nu_source::Tagged;
use std::path::PathBuf;
pub struct Cpy;
#[derive(Deserialize)]
pub struct CopyArgs {
pub src: Tagged<PathBuf>,
pub dst: Tagged<PathBuf>,
pub recursive: Tagged<bool>,
}
impl WholeStreamCommand for Cpy {
fn name(&self) -> &str {
"cp"
}
fn signature(&self) -> Signature {
Signature::build("cp")
.required("src", SyntaxShape::Pattern, "the place to copy from")
.required("dst", SyntaxShape::Path, "the place to copy to")
.switch(
"recursive",
"copy recursively through subdirectories",
Some('r'),
)
}
fn usage(&self) -> &str {
"Copy files."
}
fn run(
&self,
args: CommandArgs,
registry: &CommandRegistry,
) -> Result<OutputStream, ShellError> {
args.process(registry, cp)?.run()
}
fn examples(&self) -> &[Example] {
&[
Example {
description: "Copy myfile to dir_b",
example: "cp myfile dir_b",
},
Example {
description: "Recursively copy dir_a to dir_b",
example: "cp -r dir_a dir_b",
},
]
}
}
pub fn cp(args: CopyArgs, context: RunnableContext) -> Result<OutputStream, ShellError> {
let shell_manager = context.shell_manager.clone();
shell_manager.cp(args, &context)
}

View File

@ -18,8 +18,8 @@ impl WholeStreamCommand for Date {
fn signature(&self) -> Signature {
Signature::build("date")
.switch("utc", "use universal time (UTC)")
.switch("local", "use the local time")
.switch("utc", "use universal time (UTC)", Some('u'))
.switch("local", "use the local time", Some('l'))
}
fn usage(&self) -> &str {
@ -33,6 +33,19 @@ impl WholeStreamCommand for Date {
) -> Result<OutputStream, ShellError> {
date(args, registry)
}
fn examples(&self) -> &[Example] {
&[
Example {
description: "Get the current local time and date",
example: "date",
},
Example {
description: "Get the current UTC time and date",
example: "date --utc",
},
]
}
}
pub fn date_to_value<T: TimeZone>(dt: DateTime<T>, tag: Tag) -> Value
@ -91,5 +104,5 @@ pub fn date(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStrea
date_out.push_back(value);
Ok(date_out.to_output_stream())
Ok(futures::stream::iter(date_out).to_output_stream())
}

View File

@ -6,7 +6,9 @@ use nu_protocol::{ReturnSuccess, Signature, UntaggedValue};
pub struct Debug;
#[derive(Deserialize)]
pub struct DebugArgs {}
pub struct DebugArgs {
raw: bool,
}
impl WholeStreamCommand for Debug {
fn name(&self) -> &str {
@ -14,7 +16,7 @@ impl WholeStreamCommand for Debug {
}
fn signature(&self) -> Signature {
Signature::build("debug")
Signature::build("debug").switch("raw", "Prints the raw value representation.", Some('r'))
}
fn usage(&self) -> &str {
@ -31,13 +33,18 @@ impl WholeStreamCommand for Debug {
}
fn debug_value(
_args: DebugArgs,
DebugArgs { raw }: DebugArgs,
RunnableContext { input, .. }: RunnableContext,
) -> Result<impl ToOutputStream, ShellError> {
Ok(input
.values
.map(|v| {
ReturnSuccess::value(UntaggedValue::string(format!("{:?}", v)).into_untagged_value())
.map(move |v| {
if raw {
ReturnSuccess::value(
UntaggedValue::string(format!("{:#?}", v)).into_untagged_value(),
)
} else {
ReturnSuccess::debug_value(v)
}
})
.to_output_stream())
}

View File

@ -40,6 +40,13 @@ impl WholeStreamCommand for Default {
) -> Result<OutputStream, ShellError> {
args.process(registry, default)?.run()
}
fn examples(&self) -> &[Example] {
&[Example {
description: "Give a default 'target' to all file entries",
example: "ls -af | default target 'nothing'",
}]
}
}
fn default(
@ -47,7 +54,6 @@ fn default(
RunnableContext { input, .. }: RunnableContext,
) -> Result<OutputStream, ShellError> {
let stream = input
.values
.map(move |item| {
let mut result = VecDeque::new();
@ -67,7 +73,8 @@ fn default(
} else {
result.push_back(ReturnSuccess::value(item));
}
result
futures::stream::iter(result)
})
.flatten();

View File

@ -0,0 +1,73 @@
use crate::commands::WholeStreamCommand;
use crate::context::CommandRegistry;
use crate::prelude::*;
use nu_errors::ShellError;
use nu_protocol::{ReturnSuccess, Signature, SyntaxShape, Value};
use nu_source::Tagged;
pub struct Drop;
#[derive(Deserialize)]
pub struct DropArgs {
rows: Option<Tagged<u64>>,
}
impl WholeStreamCommand for Drop {
fn name(&self) -> &str {
"drop"
}
fn signature(&self) -> Signature {
Signature::build("drop").optional(
"rows",
SyntaxShape::Number,
"starting from the back, the number of rows to drop",
)
}
fn usage(&self) -> &str {
"Drop the last number of rows."
}
fn run(
&self,
args: CommandArgs,
registry: &CommandRegistry,
) -> Result<OutputStream, ShellError> {
args.process(registry, drop)?.run()
}
fn examples(&self) -> &[Example] {
&[
Example {
description: "Remove the last item of a list/table",
example: "echo [1 2 3] | drop",
},
Example {
description: "Remove the last 2 items of a list/table",
example: "echo [1 2 3] | drop 2",
},
]
}
}
fn drop(DropArgs { rows }: DropArgs, context: RunnableContext) -> Result<OutputStream, ShellError> {
let stream = async_stream! {
let v: Vec<_> = context.input.into_vec().await;
let rows_to_drop = if let Some(quantity) = rows {
*quantity as usize
} else {
1
};
if rows_to_drop < v.len() {
let k = v.len() - rows_to_drop;
for x in v[0..k].iter() {
let y: Value = x.clone();
yield ReturnSuccess::value(y)
}
}
};
Ok(stream.to_output_stream())
}

View File

@ -0,0 +1,399 @@
use crate::commands::WholeStreamCommand;
use crate::prelude::*;
use filesize::file_real_size_fast;
use glob::*;
use indexmap::map::IndexMap;
use nu_errors::ShellError;
use nu_protocol::{ReturnSuccess, Signature, SyntaxShape, UntaggedValue, Value};
use nu_source::Tagged;
use std::path::PathBuf;
const NAME: &str = "du";
const GLOB_PARAMS: MatchOptions = MatchOptions {
case_sensitive: true,
require_literal_separator: true,
require_literal_leading_dot: false,
};
pub struct Du;
#[derive(Deserialize, Clone)]
pub struct DuArgs {
path: Option<Tagged<PathBuf>>,
all: bool,
deref: bool,
exclude: Option<Tagged<String>>,
#[serde(rename = "max-depth")]
max_depth: Option<Tagged<u64>>,
#[serde(rename = "min-size")]
min_size: Option<Tagged<u64>>,
}
impl WholeStreamCommand for Du {
fn name(&self) -> &str {
NAME
}
fn signature(&self) -> Signature {
Signature::build(NAME)
.optional("path", SyntaxShape::Pattern, "starting directory")
.switch(
"all",
"Output file sizes as well as directory sizes",
Some('a'),
)
.switch(
"deref",
"Dereference symlinks to their targets for size",
Some('r'),
)
.named(
"exclude",
SyntaxShape::Pattern,
"Exclude these file names",
Some('x'),
)
.named(
"max-depth",
SyntaxShape::Int,
"Directory recursion limit",
Some('d'),
)
.named(
"min-size",
SyntaxShape::Int,
"Exclude files below this size",
Some('m'),
)
}
fn usage(&self) -> &str {
"Find disk usage sizes of specified items"
}
fn run(
&self,
args: CommandArgs,
registry: &CommandRegistry,
) -> Result<OutputStream, ShellError> {
args.process(registry, du)?.run()
}
fn examples(&self) -> &[Example] {
&[Example {
description: "Disk usage of the current directory",
example: "du *",
}]
}
}
fn du(args: DuArgs, ctx: RunnableContext) -> Result<OutputStream, ShellError> {
let tag = ctx.name.clone();
let exclude = args.exclude.map_or(Ok(None), move |x| {
Pattern::new(&x.item)
.map(Option::Some)
.map_err(|e| ShellError::labeled_error(e.msg, "glob error", x.tag.clone()))
})?;
let include_files = args.all;
let paths = match args.path {
Some(p) => {
let p = p.item.to_str().expect("Why isn't this encoded properly?");
glob::glob_with(p, GLOB_PARAMS)
}
None => glob::glob_with("*", GLOB_PARAMS),
}
.map_err(|e| ShellError::labeled_error(e.msg, "glob error", tag.clone()))?
.filter(move |p| {
if include_files {
true
} else {
match p {
Ok(f) if f.is_dir() => true,
Err(e) if e.path().is_dir() => true,
_ => false,
}
}
})
.map(|v| v.map_err(glob_err_into));
let ctrl_c = ctx.ctrl_c;
let all = args.all;
let deref = args.deref;
let max_depth = args.max_depth.map(|f| f.item);
let min_size = args.min_size.map(|f| f.item);
let params = DirBuilder {
tag: tag.clone(),
min: min_size,
deref,
exclude,
all,
};
let stream = futures::stream::iter(paths)
.interruptible(ctrl_c)
.map(move |path| match path {
Ok(p) => {
if p.is_dir() {
Ok(ReturnSuccess::Value(
DirInfo::new(p, &params, max_depth).into(),
))
} else {
FileInfo::new(p, deref, tag.clone()).map(|v| ReturnSuccess::Value(v.into()))
}
}
Err(e) => Err(e),
});
Ok(stream.to_output_stream())
}
pub struct DirBuilder {
tag: Tag,
min: Option<u64>,
deref: bool,
exclude: Option<Pattern>,
all: bool,
}
impl DirBuilder {
pub fn new(
tag: Tag,
min: Option<u64>,
deref: bool,
exclude: Option<Pattern>,
all: bool,
) -> DirBuilder {
DirBuilder {
tag,
min,
deref,
exclude,
all,
}
}
}
pub struct DirInfo {
dirs: Vec<DirInfo>,
files: Vec<FileInfo>,
errors: Vec<ShellError>,
size: u64,
blocks: u64,
path: PathBuf,
tag: Tag,
}
struct FileInfo {
path: PathBuf,
size: u64,
blocks: Option<u64>,
tag: Tag,
}
impl FileInfo {
fn new(path: impl Into<PathBuf>, deref: bool, tag: Tag) -> Result<Self, ShellError> {
let path = path.into();
let m = if deref {
std::fs::metadata(&path)
} else {
std::fs::symlink_metadata(&path)
};
match m {
Ok(d) => {
let block_size = file_real_size_fast(&path, &d).ok();
Ok(FileInfo {
path,
blocks: block_size,
size: d.len(),
tag,
})
}
Err(e) => Err(e.into()),
}
}
}
impl DirInfo {
pub fn new(path: impl Into<PathBuf>, params: &DirBuilder, depth: Option<u64>) -> Self {
let path = path.into();
let mut s = Self {
dirs: Vec::new(),
errors: Vec::new(),
files: Vec::new(),
size: 0,
blocks: 0,
tag: params.tag.clone(),
path,
};
match std::fs::read_dir(&s.path) {
Ok(d) => {
for f in d {
match f {
Ok(i) => match i.file_type() {
Ok(t) if t.is_dir() => s = s.add_dir(i.path(), depth, &params),
Ok(_t) => s = s.add_file(i.path(), &params),
Err(e) => s = s.add_error(e.into()),
},
Err(e) => s = s.add_error(e.into()),
}
}
}
Err(e) => s = s.add_error(e.into()),
}
s
}
fn add_dir(
mut self,
path: impl Into<PathBuf>,
mut depth: Option<u64>,
params: &DirBuilder,
) -> Self {
if let Some(current) = depth {
if let Some(new) = current.checked_sub(1) {
depth = Some(new);
} else {
return self;
}
}
let d = DirInfo::new(path, &params, depth);
self.size += d.size;
self.blocks += d.blocks;
self.dirs.push(d);
self
}
fn add_file(mut self, f: impl Into<PathBuf>, params: &DirBuilder) -> Self {
let f = f.into();
let include = params
.exclude
.as_ref()
.map_or(true, |x| !x.matches_path(&f));
if include {
match FileInfo::new(f, params.deref, self.tag.clone()) {
Ok(file) => {
let inc = params.min.map_or(true, |s| file.size >= s);
if inc {
self.size += file.size;
self.blocks += file.blocks.unwrap_or(0);
if params.all {
self.files.push(file);
}
}
}
Err(e) => self = self.add_error(e),
}
}
self
}
fn add_error(mut self, e: ShellError) -> Self {
self.errors.push(e);
self
}
pub fn get_size(&self) -> u64 {
self.size
}
}
fn glob_err_into(e: GlobError) -> ShellError {
let e = e.into_error();
ShellError::from(e)
}
fn value_from_vec<V>(vec: Vec<V>, tag: &Tag) -> Value
where
V: Into<Value>,
{
if vec.is_empty() {
UntaggedValue::nothing()
} else {
let values = vec.into_iter().map(Into::into).collect::<Vec<Value>>();
UntaggedValue::Table(values)
}
.retag(tag)
}
impl From<DirInfo> for Value {
fn from(d: DirInfo) -> Self {
let mut r: IndexMap<String, Value> = IndexMap::new();
r.insert(
"path".to_string(),
UntaggedValue::path(d.path).retag(&d.tag),
);
r.insert(
"apparent".to_string(),
UntaggedValue::bytes(d.size).retag(&d.tag),
);
r.insert(
"physical".to_string(),
UntaggedValue::bytes(d.blocks).retag(&d.tag),
);
r.insert("directories".to_string(), value_from_vec(d.dirs, &d.tag));
r.insert("files".to_string(), value_from_vec(d.files, &d.tag));
if !d.errors.is_empty() {
let v = UntaggedValue::Table(
d.errors
.into_iter()
.map(move |e| UntaggedValue::Error(e).into_untagged_value())
.collect::<Vec<Value>>(),
)
.retag(&d.tag);
r.insert("errors".to_string(), v);
}
Value {
value: UntaggedValue::row(r),
tag: d.tag,
}
}
}
impl From<FileInfo> for Value {
fn from(f: FileInfo) -> Self {
let mut r: IndexMap<String, Value> = IndexMap::new();
r.insert(
"path".to_string(),
UntaggedValue::path(f.path).retag(&f.tag),
);
r.insert(
"apparent".to_string(),
UntaggedValue::bytes(f.size).retag(&f.tag),
);
let b = f
.blocks
.map(UntaggedValue::bytes)
.unwrap_or_else(UntaggedValue::nothing)
.retag(&f.tag);
r.insert("physical".to_string(), b);
r.insert(
"directories".to_string(),
UntaggedValue::nothing().retag(&f.tag),
);
r.insert("files".to_string(), UntaggedValue::nothing().retag(&f.tag));
UntaggedValue::row(r).retag(&f.tag)
}
}

View File

@ -0,0 +1,109 @@
use crate::commands::classified::block::run_block;
use crate::commands::WholeStreamCommand;
use crate::context::CommandRegistry;
use crate::prelude::*;
use futures::stream::once;
use nu_errors::ShellError;
use nu_protocol::{
hir::Block, hir::Expression, hir::SpannedExpression, hir::Synthetic, ReturnSuccess, Signature,
SyntaxShape,
};
pub struct Each;
#[derive(Deserialize)]
pub struct EachArgs {
block: Block,
}
impl WholeStreamCommand for Each {
fn name(&self) -> &str {
"each"
}
fn signature(&self) -> Signature {
Signature::build("each").required(
"block",
SyntaxShape::Block,
"the block to run on each row",
)
}
fn usage(&self) -> &str {
"Run a block on each row of the table."
}
fn run(
&self,
args: CommandArgs,
registry: &CommandRegistry,
) -> Result<OutputStream, ShellError> {
Ok(args.process_raw(registry, each)?.run())
}
fn examples(&self) -> &[Example] {
&[Example {
description: "Print the name of each file",
example: "ls | each { echo $it.name }",
}]
}
}
fn is_expanded_it_usage(head: &SpannedExpression) -> bool {
match &*head {
SpannedExpression {
expr: Expression::Synthetic(Synthetic::String(s)),
..
} if s == "expanded-each" => true,
_ => false,
}
}
fn each(
each_args: EachArgs,
context: RunnableContext,
raw_args: RawCommandArgs,
) -> Result<OutputStream, ShellError> {
let block = each_args.block;
let scope = raw_args.call_info.scope.clone();
let registry = context.registry.clone();
let mut input_stream = context.input;
let stream = async_stream! {
while let Some(input) = input_stream.next().await {
let mut context = Context::from_raw(&raw_args, &registry);
let input_clone = input.clone();
let input_stream = if is_expanded_it_usage(&raw_args.call_info.args.head) {
InputStream::empty()
} else {
once(async { Ok(input) }).to_input_stream()
};
let result = run_block(
&block,
&mut context,
input_stream,
&scope.clone().set_it(input_clone),
).await;
match result {
Ok(mut stream) => {
while let Some(result) = stream.next().await {
yield Ok(ReturnSuccess::Value(result));
}
let errors = context.get_errors();
if let Some(error) = errors.first() {
yield Err(error.clone());
}
}
Err(e) => {
yield Err(e);
}
}
}
};
Ok(stream.to_output_stream())
}

View File

@ -0,0 +1,78 @@
use crate::commands::WholeStreamCommand;
use crate::prelude::*;
use nu_errors::ShellError;
use nu_protocol::{ReturnSuccess, Signature, SyntaxShape, UntaggedValue, Value};
pub struct Echo;
#[derive(Deserialize)]
pub struct EchoArgs {
pub rest: Vec<Value>,
}
impl WholeStreamCommand for Echo {
fn name(&self) -> &str {
"echo"
}
fn signature(&self) -> Signature {
Signature::build("echo").rest(SyntaxShape::Any, "the values to echo")
}
fn usage(&self) -> &str {
"Echo the arguments back to the user."
}
fn run(
&self,
args: CommandArgs,
registry: &CommandRegistry,
) -> Result<OutputStream, ShellError> {
args.process(registry, echo)?.run()
}
fn examples(&self) -> &[Example] {
&[
Example {
description: "Put a hello message in the pipeline",
example: "echo 'hello'",
},
Example {
description: "Print the value of the special '$nu' variable",
example: "echo $nu",
},
]
}
}
fn echo(args: EchoArgs, _: RunnableContext) -> Result<OutputStream, ShellError> {
let mut output = vec![];
for i in args.rest {
match i.as_string() {
Ok(s) => {
output.push(Ok(ReturnSuccess::Value(
UntaggedValue::string(s).into_value(i.tag.clone()),
)));
}
_ => match i {
Value {
value: UntaggedValue::Table(table),
..
} => {
for value in table {
output.push(Ok(ReturnSuccess::Value(value.clone())));
}
}
_ => {
output.push(Ok(ReturnSuccess::Value(i.clone())));
}
},
}
}
// TODO: This whole block can probably be replaced with `.map()`
let stream = futures::stream::iter(output);
Ok(stream.to_output_stream())
}

View File

@ -0,0 +1,168 @@
use crate::commands::UnevaluatedCallInfo;
use crate::commands::WholeStreamCommand;
use crate::context::CommandRegistry;
use crate::prelude::*;
use nu_errors::ShellError;
use nu_protocol::{
CommandAction, Primitive, ReturnSuccess, Signature, SyntaxShape, UntaggedValue, Value,
};
use nu_source::Tagged;
use std::path::PathBuf;
pub struct Enter;
#[derive(Deserialize)]
pub struct EnterArgs {
location: Tagged<PathBuf>,
}
impl WholeStreamCommand for Enter {
fn name(&self) -> &str {
"enter"
}
fn signature(&self) -> Signature {
Signature::build("enter").required(
"location",
SyntaxShape::Path,
"the location to create a new shell from",
)
}
fn usage(&self) -> &str {
"Create a new shell and begin at this path."
}
fn run(
&self,
args: CommandArgs,
registry: &CommandRegistry,
) -> Result<OutputStream, ShellError> {
Ok(args.process_raw(registry, enter)?.run())
}
fn examples(&self) -> &[Example] {
&[
Example {
description: "Enter a path as a new shell",
example: "enter ../projectB",
},
Example {
description: "Enter a file as a new shell",
example: "enter package.json",
},
]
}
}
fn enter(
EnterArgs { location }: EnterArgs,
RunnableContext {
registry,
name: tag,
..
}: RunnableContext,
raw_args: RawCommandArgs,
) -> Result<OutputStream, ShellError> {
let location_string = location.display().to_string();
let location_clone = location_string.clone();
if location_string.starts_with("help") {
let spec = location_string.split(':').collect::<Vec<&str>>();
if spec.len() == 2 {
let (_, command) = (spec[0], spec[1]);
if registry.has(command) {
return Ok(vec![Ok(ReturnSuccess::Action(CommandAction::EnterHelpShell(
UntaggedValue::string(command).into_value(Tag::unknown()),
)))]
.into());
}
}
Ok(vec![Ok(ReturnSuccess::Action(CommandAction::EnterHelpShell(
UntaggedValue::nothing().into_value(Tag::unknown()),
)))]
.into())
} else if location.is_dir() {
Ok(vec![Ok(ReturnSuccess::Action(CommandAction::EnterShell(
location_clone,
)))]
.into())
} else {
let stream = async_stream! {
// If it's a file, attempt to open the file as a value and enter it
let cwd = raw_args.shell_manager.path();
let full_path = std::path::PathBuf::from(cwd);
let (file_extension, contents, contents_tag) =
crate::commands::open::fetch(
&full_path,
&PathBuf::from(location_clone),
tag.span,
).await?;
match contents {
UntaggedValue::Primitive(Primitive::String(_)) => {
let tagged_contents = contents.into_value(&contents_tag);
if let Some(extension) = file_extension {
let command_name = format!("from {}", extension);
if let Some(converter) =
registry.get_command(&command_name)
{
let new_args = RawCommandArgs {
host: raw_args.host,
ctrl_c: raw_args.ctrl_c,
shell_manager: raw_args.shell_manager,
call_info: UnevaluatedCallInfo {
args: nu_protocol::hir::Call {
head: raw_args.call_info.args.head,
positional: None,
named: None,
span: Span::unknown(),
is_last: false,
},
name_tag: raw_args.call_info.name_tag,
scope: raw_args.call_info.scope.clone()
},
};
let mut result = converter.run(
new_args.with_input(vec![tagged_contents]),
&registry,
);
let result_vec: Vec<Result<ReturnSuccess, ShellError>> =
result.drain_vec().await;
for res in result_vec {
match res {
Ok(ReturnSuccess::Value(Value {
value,
..
})) => {
yield Ok(ReturnSuccess::Action(CommandAction::EnterValueShell(
Value {
value,
tag: contents_tag.clone(),
})));
}
x => yield x,
}
}
} else {
yield Ok(ReturnSuccess::Action(CommandAction::EnterValueShell(tagged_contents)));
}
} else {
yield Ok(ReturnSuccess::Action(CommandAction::EnterValueShell(tagged_contents)));
}
}
_ => {
let tagged_contents = contents.into_value(contents_tag);
yield Ok(ReturnSuccess::Action(CommandAction::EnterValueShell(tagged_contents)));
}
}
};
Ok(stream.to_output_stream())
}
}

View File

@ -23,6 +23,7 @@ impl WholeStreamCommand for EvaluateBy {
"evaluate_with",
SyntaxShape::String,
"the name of the column to evaluate by",
Some('w'),
)
}
@ -44,7 +45,7 @@ pub fn evaluate_by(
RunnableContext { input, name, .. }: RunnableContext,
) -> Result<OutputStream, ShellError> {
let stream = async_stream! {
let values: Vec<Value> = input.values.collect().await;
let values: Vec<Value> = input.collect().await;
if values.is_empty() {
yield Err(ShellError::labeled_error(

View File

@ -12,7 +12,7 @@ impl WholeStreamCommand for Exit {
}
fn signature(&self) -> Signature {
Signature::build("exit").switch("now", "exit out of the shell immediately")
Signature::build("exit").switch("now", "exit out of the shell immediately", Some('n'))
}
fn usage(&self) -> &str {
@ -26,6 +26,19 @@ impl WholeStreamCommand for Exit {
) -> Result<OutputStream, ShellError> {
exit(args, registry)
}
fn examples(&self) -> &[Example] {
&[
Example {
description: "Exit the current shell",
example: "exit",
},
Example {
description: "Exit all shells (exiting Nu)",
example: "exit --now",
},
]
}
}
pub fn exit(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream, ShellError> {

View File

@ -9,7 +9,7 @@ pub struct First;
#[derive(Deserialize)]
pub struct FirstArgs {
rows: Option<Tagged<u64>>,
rows: Option<Tagged<usize>>,
}
impl WholeStreamCommand for First {
@ -36,6 +36,19 @@ impl WholeStreamCommand for First {
) -> Result<OutputStream, ShellError> {
args.process(registry, first)?.run()
}
fn examples(&self) -> &[Example] {
&[
Example {
description: "Return the first item of a list/table",
example: "echo [1 2 3] | first",
},
Example {
description: "Return the first 2 items of a list/table",
example: "echo [1 2 3] | first 2",
},
]
}
}
fn first(
@ -48,7 +61,5 @@ fn first(
1
};
Ok(OutputStream::from_input(
context.input.values.take(rows_desired),
))
Ok(OutputStream::from_input(context.input.take(rows_desired)))
}

View File

@ -0,0 +1,173 @@
use crate::commands::WholeStreamCommand;
use crate::context::CommandRegistry;
use crate::prelude::*;
use nu_errors::ShellError;
use nu_protocol::{ColumnPath, ReturnSuccess, Signature, SyntaxShape, UntaggedValue, Value};
use nu_source::Tagged;
use nu_value_ext::{as_column_path, get_data_by_column_path};
use std::borrow::Borrow;
pub struct Format;
#[derive(Deserialize)]
pub struct FormatArgs {
pattern: Tagged<String>,
}
impl WholeStreamCommand for Format {
fn name(&self) -> &str {
"format"
}
fn signature(&self) -> Signature {
Signature::build("format").required(
"pattern",
SyntaxShape::String,
"the pattern to output. Eg) \"{foo}: {bar}\"",
)
}
fn usage(&self) -> &str {
"Format columns into a string using a simple pattern."
}
fn run(
&self,
args: CommandArgs,
registry: &CommandRegistry,
) -> Result<OutputStream, ShellError> {
args.process(registry, format_command)?.run()
}
fn examples(&self) -> &[Example] {
&[Example {
description: "Print filenames with their sizes",
example: "ls | format '{name}: {size}'",
}]
}
}
fn format_command(
FormatArgs { pattern }: FormatArgs,
RunnableContext { input, .. }: RunnableContext,
) -> Result<OutputStream, ShellError> {
let pattern_tag = pattern.tag.clone();
let format_pattern = format(&pattern);
let commands = format_pattern;
let mut input = input;
let stream = async_stream! {
while let Some(value) = input.next().await {
match value {
value
@
Value {
value: UntaggedValue::Row(_),
..
} => {
let mut output = String::new();
for command in &commands {
match command {
FormatCommand::Text(s) => {
output.push_str(&s);
}
FormatCommand::Column(c) => {
let key = to_column_path(&c, &pattern_tag)?;
let fetcher = get_data_by_column_path(
&value,
&key,
Box::new(move |(_, _, error)| error),
);
if let Ok(c) = fetcher {
output
.push_str(&value::format_leaf(c.borrow()).plain_string(100_000))
}
// That column doesn't match, so don't emit anything
}
}
}
yield ReturnSuccess::value(
UntaggedValue::string(output).into_untagged_value())
}
_ => yield ReturnSuccess::value(
UntaggedValue::string(String::new()).into_untagged_value()),
};
}
};
Ok(stream.to_output_stream())
}
#[derive(Debug)]
enum FormatCommand {
Text(String),
Column(String),
}
fn format(input: &str) -> Vec<FormatCommand> {
let mut output = vec![];
let mut loop_input = input.chars();
loop {
let mut before = String::new();
while let Some(c) = loop_input.next() {
if c == '{' {
break;
}
before.push(c);
}
if !before.is_empty() {
output.push(FormatCommand::Text(before.to_string()));
}
// Look for column as we're now at one
let mut column = String::new();
while let Some(c) = loop_input.next() {
if c == '}' {
break;
}
column.push(c);
}
if !column.is_empty() {
output.push(FormatCommand::Column(column.to_string()));
}
if before.is_empty() && column.is_empty() {
break;
}
}
output
}
fn to_column_path(
path_members: &str,
tag: impl Into<Tag>,
) -> Result<Tagged<ColumnPath>, ShellError> {
let tag = tag.into();
as_column_path(
&UntaggedValue::Table(
path_members
.split('.')
.map(|x| {
let member = match x.parse::<u64>() {
Ok(v) => UntaggedValue::int(v),
Err(_) => UntaggedValue::string(x),
};
member.into_value(&tag)
})
.collect(),
)
.into_value(&tag),
)
}

View File

@ -0,0 +1,28 @@
use crate::commands::WholeStreamCommand;
use crate::prelude::*;
use nu_errors::ShellError;
use nu_protocol::Signature;
pub struct From;
impl WholeStreamCommand for From {
fn name(&self) -> &str {
"from"
}
fn signature(&self) -> Signature {
Signature::build("from")
}
fn usage(&self) -> &str {
"Parse content (string or binary) as a table (input format based on subcommand, like csv, ini, json, toml)"
}
fn run(
&self,
_args: CommandArgs,
registry: &CommandRegistry,
) -> Result<OutputStream, ShellError> {
Ok(crate::commands::help::get_help(&*self, registry).into())
}
}

View File

@ -10,15 +10,15 @@ pub struct FromBSON;
impl WholeStreamCommand for FromBSON {
fn name(&self) -> &str {
"from-bson"
"from bson"
}
fn signature(&self) -> Signature {
Signature::build("from-bson")
Signature::build("from bson")
}
fn usage(&self) -> &str {
"Parse text as .bson and create table."
"Parse binary as .bson and create table."
}
fn run(
@ -28,6 +28,13 @@ impl WholeStreamCommand for FromBSON {
) -> Result<OutputStream, ShellError> {
from_bson(args, registry)
}
fn examples(&self) -> &[Example] {
&[Example {
description: "Convert bson data to a table",
example: "open file.bin | from bson",
}]
}
}
fn bson_array(input: &[Bson], tag: Tag) -> Result<Vec<Value>, ShellError> {
@ -205,32 +212,18 @@ fn from_bson(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStre
let input = args.input;
let stream = async_stream! {
let values: Vec<Value> = input.values.collect().await;
let bytes = input.collect_binary(tag.clone()).await?;
for value in values {
let value_tag = &value.tag;
match value.value {
UntaggedValue::Primitive(Primitive::Binary(vb)) =>
match from_bson_bytes_to_value(vb, tag.clone()) {
Ok(x) => yield ReturnSuccess::value(x),
Err(_) => {
yield Err(ShellError::labeled_error_with_secondary(
"Could not parse as BSON",
"input cannot be parsed as BSON",
tag.clone(),
"value originates from here",
value_tag,
))
}
}
_ => yield Err(ShellError::labeled_error_with_secondary(
"Expected a string from pipeline",
"requires string input",
match from_bson_bytes_to_value(bytes.item, tag.clone()) {
Ok(x) => yield ReturnSuccess::value(x),
Err(_) => {
yield Err(ShellError::labeled_error_with_secondary(
"Could not parse as BSON",
"input cannot be parsed as BSON",
tag.clone(),
"value originates from here",
value_tag,
)),
bytes.tag,
))
}
}
};

View File

@ -14,17 +14,22 @@ pub struct FromCSVArgs {
impl WholeStreamCommand for FromCSV {
fn name(&self) -> &str {
"from-csv"
"from csv"
}
fn signature(&self) -> Signature {
Signature::build("from-csv")
Signature::build("from csv")
.named(
"separator",
SyntaxShape::String,
"a character to separate columns, defaults to ','",
Some('s'),
)
.switch(
"headerless",
"don't treat the first row as column names",
None,
)
.switch("headerless", "don't treat the first row as column names")
}
fn usage(&self) -> &str {
@ -38,6 +43,23 @@ impl WholeStreamCommand for FromCSV {
) -> Result<OutputStream, ShellError> {
args.process(registry, from_csv)?.run()
}
fn examples(&self) -> &[Example] {
&[
Example {
description: "Convert comma-separated data to a table",
example: "open data.txt | from csv",
},
Example {
description: "Convert comma-separated data to a table, ignoring headers",
example: "open data.txt | from csv --headerless",
},
Example {
description: "Convert semicolon-separated data to a table",
example: "open data.txt | from csv --separator ';'",
},
]
}
}
fn from_csv(

View File

@ -1,7 +1,7 @@
use crate::prelude::*;
use csv::ReaderBuilder;
use csv::{ErrorKind, ReaderBuilder};
use nu_errors::ShellError;
use nu_protocol::{Primitive, ReturnSuccess, TaggedDictBuilder, UntaggedValue, Value};
use nu_protocol::{ReturnSuccess, TaggedDictBuilder, UntaggedValue, Value};
fn from_delimited_string_to_value(
s: String,
@ -27,10 +27,13 @@ fn from_delimited_string_to_value(
for row in reader.records() {
let mut tagged_row = TaggedDictBuilder::new(&tag);
for (value, header) in row?.iter().zip(headers.iter()) {
tagged_row.insert_value(
header,
UntaggedValue::Primitive(Primitive::String(String::from(value))).into_value(&tag),
)
if let Ok(i) = value.parse::<i64>() {
tagged_row.insert_value(header, UntaggedValue::int(i).into_value(&tag))
} else if let Ok(f) = value.parse::<f64>() {
tagged_row.insert_value(header, UntaggedValue::decimal(f).into_value(&tag))
} else {
tagged_row.insert_value(header, UntaggedValue::string(value).into_value(&tag))
}
}
rows.push(tagged_row.into_value());
}
@ -47,28 +50,9 @@ pub fn from_delimited_data(
let name_tag = name;
let stream = async_stream! {
let values: Vec<Value> = input.values.collect().await;
let concat_string = input.collect_string(name_tag.clone()).await?;
let mut concat_string = String::new();
let mut latest_tag: Option<Tag> = None;
for value in values {
let value_tag = &value.tag;
latest_tag = Some(value_tag.clone());
if let Ok(s) = value.as_string() {
concat_string.push_str(&s);
} else {
yield Err(ShellError::labeled_error_with_secondary(
"Expected a string from pipeline",
"requires string input",
name_tag.clone(),
"value originates from here",
value_tag.clone(),
))
}
}
match from_delimited_string_to_value(concat_string, headerless, sep, name_tag.clone()) {
match from_delimited_string_to_value(concat_string.item, headerless, sep, name_tag.clone()) {
Ok(x) => match x {
Value { value: UntaggedValue::Table(list), .. } => {
for l in list {
@ -77,15 +61,18 @@ pub fn from_delimited_data(
}
x => yield ReturnSuccess::value(x),
},
Err(_) => if let Some(last_tag) = latest_tag {
let line_one = format!("Could not parse as {}", format_name);
Err(err) => {
let line_one = match pretty_csv_error(err) {
Some(pretty) => format!("Could not parse as {} ({})", format_name,pretty),
None => format!("Could not parse as {}", format_name),
};
let line_two = format!("input cannot be parsed as {}", format_name);
yield Err(ShellError::labeled_error_with_secondary(
line_one,
line_two,
name_tag.clone(),
"value originates from here",
last_tag.clone(),
concat_string.tag,
))
} ,
}
@ -93,3 +80,26 @@ pub fn from_delimited_data(
Ok(stream.to_output_stream())
}
fn pretty_csv_error(err: csv::Error) -> Option<String> {
match err.kind() {
ErrorKind::UnequalLengths {
pos,
expected_len,
len,
} => {
if let Some(pos) = pos {
Some(format!(
"Line {}: expected {} fields, found {}",
pos.line(),
expected_len,
len
))
} else {
Some(format!("Expected {} fields, found {}", expected_len, len))
}
}
ErrorKind::Seek => Some("Internal error while parsing csv".to_string()),
_ => None,
}
}

View File

@ -0,0 +1,122 @@
use crate::commands::WholeStreamCommand;
use crate::prelude::*;
use ::eml_parser::eml::*;
use ::eml_parser::EmlParser;
use nu_errors::ShellError;
use nu_protocol::{ReturnSuccess, Signature, SyntaxShape, TaggedDictBuilder, UntaggedValue};
use nu_source::Tagged;
pub struct FromEML;
const DEFAULT_BODY_PREVIEW: usize = 50;
#[derive(Deserialize, Clone)]
pub struct FromEMLArgs {
#[serde(rename(deserialize = "preview-body"))]
preview_body: Option<Tagged<usize>>,
}
impl WholeStreamCommand for FromEML {
fn name(&self) -> &str {
"from eml"
}
fn signature(&self) -> Signature {
Signature::build("from eml").named(
"preview-body",
SyntaxShape::Int,
"How many bytes of the body to preview",
Some('b'),
)
}
fn usage(&self) -> &str {
"Parse text as .eml and create table."
}
fn run(
&self,
args: CommandArgs,
registry: &CommandRegistry,
) -> Result<OutputStream, ShellError> {
args.process(registry, from_eml)?.run()
}
}
fn emailaddress_to_value(tag: &Tag, email_address: &EmailAddress) -> TaggedDictBuilder {
let mut dict = TaggedDictBuilder::with_capacity(tag, 2);
let (n, a) = match email_address {
EmailAddress::AddressOnly { address } => {
(UntaggedValue::nothing(), UntaggedValue::string(address))
}
EmailAddress::NameAndEmailAddress { name, address } => {
(UntaggedValue::string(name), UntaggedValue::string(address))
}
};
dict.insert_untagged("Name", n);
dict.insert_untagged("Address", a);
dict
}
fn headerfieldvalue_to_value(tag: &Tag, value: &HeaderFieldValue) -> UntaggedValue {
use HeaderFieldValue::*;
match value {
SingleEmailAddress(address) => emailaddress_to_value(tag, address).into_untagged_value(),
MultipleEmailAddresses(addresses) => UntaggedValue::Table(
addresses
.iter()
.map(|a| emailaddress_to_value(tag, a).into_value())
.collect(),
),
Unstructured(s) => UntaggedValue::string(s),
Empty => UntaggedValue::nothing(),
}
}
fn from_eml(
eml_args: FromEMLArgs,
runnable_context: RunnableContext,
) -> Result<OutputStream, ShellError> {
let input = runnable_context.input;
let tag = runnable_context.name;
let stream = async_stream! {
let value = input.collect_string(tag.clone()).await?;
let body_preview = eml_args.preview_body.map(|b| b.item).unwrap_or(DEFAULT_BODY_PREVIEW);
let eml = EmlParser::from_string(value.item)
.with_body_preview(body_preview)
.parse()
.map_err(|_| ShellError::labeled_error("Could not parse .eml file", "could not parse .eml file", &tag))?;
let mut dict = TaggedDictBuilder::new(&tag);
if let Some(subj) = eml.subject {
dict.insert_untagged("Subject", UntaggedValue::string(subj));
}
if let Some(from) = eml.from {
dict.insert_untagged("From", headerfieldvalue_to_value(&tag, &from));
}
if let Some(to) = eml.to {
dict.insert_untagged("To", headerfieldvalue_to_value(&tag, &to));
}
for HeaderField{ name, value } in eml.headers.iter() {
dict.insert_untagged(name, headerfieldvalue_to_value(&tag, &value));
}
if let Some(body) = eml.body {
dict.insert_untagged("Body", UntaggedValue::string(body));
}
yield ReturnSuccess::value(dict.into_value());
};
Ok(stream.to_output_stream())
}

View File

@ -0,0 +1,240 @@
extern crate ical;
use crate::commands::WholeStreamCommand;
use crate::prelude::*;
use ical::parser::ical::component::*;
use ical::property::Property;
use nu_errors::ShellError;
use nu_protocol::{Primitive, ReturnSuccess, Signature, TaggedDictBuilder, UntaggedValue, Value};
use std::io::BufReader;
pub struct FromIcs;
impl WholeStreamCommand for FromIcs {
fn name(&self) -> &str {
"from ics"
}
fn signature(&self) -> Signature {
Signature::build("from ics")
}
fn usage(&self) -> &str {
"Parse text as .ics and create table."
}
fn run(
&self,
args: CommandArgs,
registry: &CommandRegistry,
) -> Result<OutputStream, ShellError> {
from_ics(args, registry)
}
}
fn from_ics(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream, ShellError> {
let args = args.evaluate_once(registry)?;
let tag = args.name_tag();
let input = args.input;
let stream = async_stream! {
let input_string = input.collect_string(tag.clone()).await?.item;
let input_bytes = input_string.as_bytes();
let buf_reader = BufReader::new(input_bytes);
let parser = ical::IcalParser::new(buf_reader);
for calendar in parser {
match calendar {
Ok(c) => yield ReturnSuccess::value(calendar_to_value(c, tag.clone())),
Err(_) => yield Err(ShellError::labeled_error(
"Could not parse as .ics",
"input cannot be parsed as .ics",
tag.clone()
)),
}
}
};
Ok(stream.to_output_stream())
}
fn calendar_to_value(calendar: IcalCalendar, tag: Tag) -> Value {
let mut row = TaggedDictBuilder::new(tag.clone());
row.insert_untagged(
"properties",
properties_to_value(calendar.properties, tag.clone()),
);
row.insert_untagged("events", events_to_value(calendar.events, tag.clone()));
row.insert_untagged("alarms", alarms_to_value(calendar.alarms, tag.clone()));
row.insert_untagged("to-Dos", todos_to_value(calendar.todos, tag.clone()));
row.insert_untagged(
"journals",
journals_to_value(calendar.journals, tag.clone()),
);
row.insert_untagged(
"free-busys",
free_busys_to_value(calendar.free_busys, tag.clone()),
);
row.insert_untagged("timezones", timezones_to_value(calendar.timezones, tag));
row.into_value()
}
fn events_to_value(events: Vec<IcalEvent>, tag: Tag) -> UntaggedValue {
UntaggedValue::table(
&events
.into_iter()
.map(|event| {
let mut row = TaggedDictBuilder::new(tag.clone());
row.insert_untagged(
"properties",
properties_to_value(event.properties, tag.clone()),
);
row.insert_untagged("alarms", alarms_to_value(event.alarms, tag.clone()));
row.into_value()
})
.collect::<Vec<Value>>(),
)
}
fn alarms_to_value(alarms: Vec<IcalAlarm>, tag: Tag) -> UntaggedValue {
UntaggedValue::table(
&alarms
.into_iter()
.map(|alarm| {
let mut row = TaggedDictBuilder::new(tag.clone());
row.insert_untagged(
"properties",
properties_to_value(alarm.properties, tag.clone()),
);
row.into_value()
})
.collect::<Vec<Value>>(),
)
}
fn todos_to_value(todos: Vec<IcalTodo>, tag: Tag) -> UntaggedValue {
UntaggedValue::table(
&todos
.into_iter()
.map(|todo| {
let mut row = TaggedDictBuilder::new(tag.clone());
row.insert_untagged(
"properties",
properties_to_value(todo.properties, tag.clone()),
);
row.insert_untagged("alarms", alarms_to_value(todo.alarms, tag.clone()));
row.into_value()
})
.collect::<Vec<Value>>(),
)
}
fn journals_to_value(journals: Vec<IcalJournal>, tag: Tag) -> UntaggedValue {
UntaggedValue::table(
&journals
.into_iter()
.map(|journal| {
let mut row = TaggedDictBuilder::new(tag.clone());
row.insert_untagged(
"properties",
properties_to_value(journal.properties, tag.clone()),
);
row.into_value()
})
.collect::<Vec<Value>>(),
)
}
fn free_busys_to_value(free_busys: Vec<IcalFreeBusy>, tag: Tag) -> UntaggedValue {
UntaggedValue::table(
&free_busys
.into_iter()
.map(|free_busy| {
let mut row = TaggedDictBuilder::new(tag.clone());
row.insert_untagged(
"properties",
properties_to_value(free_busy.properties, tag.clone()),
);
row.into_value()
})
.collect::<Vec<Value>>(),
)
}
fn timezones_to_value(timezones: Vec<IcalTimeZone>, tag: Tag) -> UntaggedValue {
UntaggedValue::table(
&timezones
.into_iter()
.map(|timezone| {
let mut row = TaggedDictBuilder::new(tag.clone());
row.insert_untagged(
"properties",
properties_to_value(timezone.properties, tag.clone()),
);
row.insert_untagged(
"transitions",
timezone_transitions_to_value(timezone.transitions, tag.clone()),
);
row.into_value()
})
.collect::<Vec<Value>>(),
)
}
fn timezone_transitions_to_value(
transitions: Vec<IcalTimeZoneTransition>,
tag: Tag,
) -> UntaggedValue {
UntaggedValue::table(
&transitions
.into_iter()
.map(|transition| {
let mut row = TaggedDictBuilder::new(tag.clone());
row.insert_untagged(
"properties",
properties_to_value(transition.properties, tag.clone()),
);
row.into_value()
})
.collect::<Vec<Value>>(),
)
}
fn properties_to_value(properties: Vec<Property>, tag: Tag) -> UntaggedValue {
UntaggedValue::table(
&properties
.into_iter()
.map(|prop| {
let mut row = TaggedDictBuilder::new(tag.clone());
let name = UntaggedValue::string(prop.name);
let value = match prop.value {
Some(val) => UntaggedValue::string(val),
None => UntaggedValue::Primitive(Primitive::Nothing),
};
let params = match prop.params {
Some(param_list) => params_to_value(param_list, tag.clone()).into(),
None => UntaggedValue::Primitive(Primitive::Nothing),
};
row.insert_untagged("name", name);
row.insert_untagged("value", value);
row.insert_untagged("params", params);
row.into_value()
})
.collect::<Vec<Value>>(),
)
}
fn params_to_value(params: Vec<(String, Vec<String>)>, tag: Tag) -> Value {
let mut row = TaggedDictBuilder::new(tag);
for (param_name, param_values) in params {
let values: Vec<Value> = param_values.into_iter().map(|val| val.into()).collect();
let values = UntaggedValue::table(&values);
row.insert_untagged(param_name, values);
}
row.into_value()
}

View File

@ -8,11 +8,11 @@ pub struct FromINI;
impl WholeStreamCommand for FromINI {
fn name(&self) -> &str {
"from-ini"
"from ini"
}
fn signature(&self) -> Signature {
Signature::build("from-ini")
Signature::build("from ini")
}
fn usage(&self) -> &str {
@ -66,32 +66,12 @@ pub fn from_ini_string_to_value(
fn from_ini(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream, ShellError> {
let args = args.evaluate_once(registry)?;
let tag = args.name_tag();
let span = tag.span;
let input = args.input;
let stream = async_stream! {
let values: Vec<Value> = input.values.collect().await;
let concat_string = input.collect_string(tag.clone()).await?;
let mut concat_string = String::new();
let mut latest_tag: Option<Tag> = None;
for value in values {
latest_tag = Some(value.tag.clone());
let value_span = value.tag.span;
if let Ok(s) = value.as_string() {
concat_string.push_str(&s);
} else {
yield Err(ShellError::labeled_error_with_secondary(
"Expected a string from pipeline",
"requires string input",
span,
"value originates from here",
value_span,
))
}
}
match from_ini_string_to_value(concat_string, tag.clone()) {
match from_ini_string_to_value(concat_string.item, tag.clone()) {
Ok(x) => match x {
Value { value: UntaggedValue::Table(list), .. } => {
for l in list {
@ -100,15 +80,15 @@ fn from_ini(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStrea
}
x => yield ReturnSuccess::value(x),
},
Err(_) => if let Some(last_tag) = latest_tag {
Err(_) => {
yield Err(ShellError::labeled_error_with_secondary(
"Could not parse as INI",
"input cannot be parsed as INI",
&tag,
"value originates from here",
last_tag,
concat_string.tag,
))
} ,
}
}
};

View File

@ -12,11 +12,15 @@ pub struct FromJSONArgs {
impl WholeStreamCommand for FromJSON {
fn name(&self) -> &str {
"from-json"
"from json"
}
fn signature(&self) -> Signature {
Signature::build("from-json").switch("objects", "treat each line as a separate value")
Signature::build("from json").switch(
"objects",
"treat each line as a separate value",
Some('o'),
)
}
fn usage(&self) -> &str {
@ -70,35 +74,13 @@ fn from_json(
FromJSONArgs { objects }: FromJSONArgs,
RunnableContext { input, name, .. }: RunnableContext,
) -> Result<OutputStream, ShellError> {
let name_span = name.span;
let name_tag = name;
let stream = async_stream! {
let values: Vec<Value> = input.values.collect().await;
let mut concat_string = String::new();
let mut latest_tag: Option<Tag> = None;
for value in values {
latest_tag = Some(value.tag.clone());
let value_span = value.tag.span;
if let Ok(s) = value.as_string() {
concat_string.push_str(&s);
} else {
yield Err(ShellError::labeled_error_with_secondary(
"Expected a string from pipeline",
"requires string input",
name_span,
"value originates from here",
value_span,
))
}
}
let concat_string = input.collect_string(name_tag.clone()).await?;
if objects {
for json_str in concat_string.lines() {
for json_str in concat_string.item.lines() {
if json_str.is_empty() {
continue;
}
@ -106,20 +88,22 @@ fn from_json(
match from_json_string_to_value(json_str.to_string(), &name_tag) {
Ok(x) =>
yield ReturnSuccess::value(x),
Err(_) => {
if let Some(ref last_tag) = latest_tag {
yield Err(ShellError::labeled_error_with_secondary(
"Could nnot parse as JSON",
"input cannot be parsed as JSON",
&name_tag,
"value originates from here",
last_tag))
}
Err(e) => {
let mut message = "Could not parse as JSON (".to_string();
message.push_str(&e.to_string());
message.push_str(")");
yield Err(ShellError::labeled_error_with_secondary(
message,
"input cannot be parsed as JSON",
&name_tag,
"value originates from here",
concat_string.tag.clone()))
}
}
}
} else {
match from_json_string_to_value(concat_string, name_tag.clone()) {
match from_json_string_to_value(concat_string.item, name_tag.clone()) {
Ok(x) =>
match x {
Value { value: UntaggedValue::Table(list), .. } => {
@ -129,15 +113,17 @@ fn from_json(
}
x => yield ReturnSuccess::value(x),
}
Err(_) => {
if let Some(last_tag) = latest_tag {
yield Err(ShellError::labeled_error_with_secondary(
"Could not parse as JSON",
"input cannot be parsed as JSON",
name_tag,
"value originates from here",
last_tag))
}
Err(e) => {
let mut message = "Could not parse as JSON (".to_string();
message.push_str(&e.to_string());
message.push_str(")");
yield Err(ShellError::labeled_error_with_secondary(
message,
"input cannot be parsed as JSON",
name_tag,
"value originates from here",
concat_string.tag))
}
}
}

View File

@ -0,0 +1,98 @@
use crate::commands::WholeStreamCommand;
use crate::prelude::*;
use crate::TaggedListBuilder;
use calamine::*;
use nu_errors::ShellError;
use nu_protocol::{ReturnSuccess, Signature, TaggedDictBuilder, UntaggedValue};
use std::io::Cursor;
pub struct FromODS;
#[derive(Deserialize)]
pub struct FromODSArgs {
headerless: bool,
}
impl WholeStreamCommand for FromODS {
fn name(&self) -> &str {
"from ods"
}
fn signature(&self) -> Signature {
Signature::build("from ods").switch(
"headerless",
"don't treat the first row as column names",
None,
)
}
fn usage(&self) -> &str {
"Parse OpenDocument Spreadsheet(.ods) data and create table."
}
fn run(
&self,
args: CommandArgs,
registry: &CommandRegistry,
) -> Result<OutputStream, ShellError> {
args.process(registry, from_ods)?.run()
}
}
fn from_ods(
FromODSArgs {
headerless: _headerless,
}: FromODSArgs,
runnable_context: RunnableContext,
) -> Result<OutputStream, ShellError> {
let input = runnable_context.input;
let tag = runnable_context.name;
let stream = async_stream! {
let bytes = input.collect_binary(tag.clone()).await?;
let mut buf: Cursor<Vec<u8>> = Cursor::new(bytes.item);
let mut ods = Ods::<_>::new(buf).map_err(|_| ShellError::labeled_error(
"Could not load ods file",
"could not load ods file",
&tag))?;
let mut dict = TaggedDictBuilder::new(&tag);
let sheet_names = ods.sheet_names().to_owned();
for sheet_name in &sheet_names {
let mut sheet_output = TaggedListBuilder::new(&tag);
if let Some(Ok(current_sheet)) = ods.worksheet_range(sheet_name) {
for row in current_sheet.rows() {
let mut row_output = TaggedDictBuilder::new(&tag);
for (i, cell) in row.iter().enumerate() {
let value = match cell {
DataType::Empty => UntaggedValue::nothing(),
DataType::String(s) => UntaggedValue::string(s),
DataType::Float(f) => UntaggedValue::decimal(*f),
DataType::Int(i) => UntaggedValue::int(*i),
DataType::Bool(b) => UntaggedValue::boolean(*b),
_ => UntaggedValue::nothing(),
};
row_output.insert_untagged(&format!("Column{}", i), value);
}
sheet_output.push_untagged(row_output.into_untagged_value());
}
dict.insert_untagged(sheet_name, sheet_output.into_untagged_value());
} else {
yield Err(ShellError::labeled_error(
"Could not load sheet",
"could not load sheet",
&tag));
}
}
yield ReturnSuccess::value(dict.into_value());
};
Ok(stream.to_output_stream())
}

View File

@ -10,11 +10,11 @@ pub struct FromSQLite;
impl WholeStreamCommand for FromSQLite {
fn name(&self) -> &str {
"from-sqlite"
"from sqlite"
}
fn signature(&self) -> Signature {
Signature::build("from-sqlite")
Signature::build("from sqlite")
}
fn usage(&self) -> &str {
@ -34,11 +34,11 @@ pub struct FromDB;
impl WholeStreamCommand for FromDB {
fn name(&self) -> &str {
"from-db"
"from db"
}
fn signature(&self) -> Signature {
Signature::build("from-db")
Signature::build("from db")
}
fn usage(&self) -> &str {
@ -138,39 +138,25 @@ fn from_sqlite(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputSt
let input = args.input;
let stream = async_stream! {
let values: Vec<Value> = input.values.collect().await;
for value in values {
let value_tag = &value.tag;
match value.value {
UntaggedValue::Primitive(Primitive::Binary(vb)) =>
match from_sqlite_bytes_to_value(vb, tag.clone()) {
Ok(x) => match x {
Value { value: UntaggedValue::Table(list), .. } => {
for l in list {
yield ReturnSuccess::value(l);
}
}
_ => yield ReturnSuccess::value(x),
}
Err(_) => {
yield Err(ShellError::labeled_error_with_secondary(
"Could not parse as SQLite",
"input cannot be parsed as SQLite",
&tag,
"value originates from here",
value_tag,
))
}
let bytes = input.collect_binary(tag.clone()).await?;
match from_sqlite_bytes_to_value(bytes.item, tag.clone()) {
Ok(x) => match x {
Value { value: UntaggedValue::Table(list), .. } => {
for l in list {
yield ReturnSuccess::value(l);
}
_ => yield Err(ShellError::labeled_error_with_secondary(
"Expected binary data from pipeline",
"requires binary data input",
}
_ => yield ReturnSuccess::value(x),
}
Err(err) => {
println!("{:?}", err);
yield Err(ShellError::labeled_error_with_secondary(
"Could not parse as SQLite",
"input cannot be parsed as SQLite",
&tag,
"value originates from here",
value_tag,
)),
bytes.tag,
))
}
}
};

View File

@ -17,7 +17,7 @@ pub struct FromSSVArgs {
minimum_spaces: Option<Tagged<usize>>,
}
const STRING_REPRESENTATION: &str = "from-ssv";
const STRING_REPRESENTATION: &str = "from ssv";
const DEFAULT_MINIMUM_SPACES: usize = 2;
impl WholeStreamCommand for FromSSV {
@ -27,12 +27,17 @@ impl WholeStreamCommand for FromSSV {
fn signature(&self) -> Signature {
Signature::build(STRING_REPRESENTATION)
.switch("headerless", "don't treat the first row as column names")
.switch("aligned-columns", "assume columns are aligned")
.switch(
"headerless",
"don't treat the first row as column names",
None,
)
.switch("aligned-columns", "assume columns are aligned", Some('a'))
.named(
"minimum-spaces",
SyntaxShape::Int,
"the mininum spaces to separate columns",
"the minimum spaces to separate columns",
Some('m'),
)
}
@ -197,15 +202,17 @@ fn string_to_table(
headerless: bool,
aligned_columns: bool,
split_at: usize,
) -> Option<Vec<Vec<(String, String)>>> {
) -> Vec<Vec<(String, String)>> {
let mut lines = s.lines().filter(|l| !l.trim().is_empty());
let separator = " ".repeat(std::cmp::max(split_at, 1));
let (ls, header_options) = if headerless {
(lines, HeaderOptions::WithoutHeaders)
} else {
let headers = lines.next()?;
(lines, HeaderOptions::WithHeaders(headers))
match lines.next() {
Some(header) => (lines, HeaderOptions::WithHeaders(header)),
None => return vec![],
}
};
let f = if aligned_columns {
@ -214,11 +221,7 @@ fn string_to_table(
parse_separated_columns
};
let parsed = f(ls, header_options, &separator);
match parsed.len() {
0 => None,
_ => Some(parsed),
}
f(ls, header_options, &separator)
}
fn from_ssv_string_to_value(
@ -229,7 +232,7 @@ fn from_ssv_string_to_value(
tag: impl Into<Tag>,
) -> Option<Value> {
let tag = tag.into();
let rows = string_to_table(s, headerless, aligned_columns, split_at)?
let rows = string_to_table(s, headerless, aligned_columns, split_at)
.iter()
.map(|row| {
let mut tagged_dict = TaggedDictBuilder::new(&tag);
@ -256,45 +259,26 @@ fn from_ssv(
RunnableContext { input, name, .. }: RunnableContext,
) -> Result<OutputStream, ShellError> {
let stream = async_stream! {
let values: Vec<Value> = input.values.collect().await;
let mut concat_string = String::new();
let mut latest_tag: Option<Tag> = None;
let concat_string = input.collect_string(name.clone()).await?;
let split_at = match minimum_spaces {
Some(number) => number.item,
None => DEFAULT_MINIMUM_SPACES
};
for value in values {
let value_tag = value.tag.clone();
latest_tag = Some(value_tag.clone());
if let Ok(s) = value.as_string() {
concat_string.push_str(&s);
}
else {
yield Err(ShellError::labeled_error_with_secondary (
"Expected a string from pipeline",
"requires string input",
&name,
"value originates from here",
&value_tag
))
}
}
match from_ssv_string_to_value(&concat_string, headerless, aligned_columns, split_at, name.clone()) {
match from_ssv_string_to_value(&concat_string.item, headerless, aligned_columns, split_at, name.clone()) {
Some(x) => match x {
Value { value: UntaggedValue::Table(list), ..} => {
for l in list { yield ReturnSuccess::value(l) }
}
x => yield ReturnSuccess::value(x)
},
None => if let Some(tag) = latest_tag {
None => {
yield Err(ShellError::labeled_error_with_secondary(
"Could not parse as SSV",
"input cannot be parsed ssv",
&name,
"value originates from here",
&tag,
&concat_string.tag,
))
},
}
@ -323,10 +307,10 @@ mod tests {
let result = string_to_table(input, false, true, 1);
assert_eq!(
result,
Some(vec![
vec![
vec![owned("a", "1"), owned("b", "2")],
vec![owned("a", "3"), owned("b", "4")]
])
]
);
}
@ -338,10 +322,7 @@ mod tests {
2
"#;
let result = string_to_table(input, false, true, 1);
assert_eq!(
result,
Some(vec![vec![owned("a", "1")], vec![owned("a", "2")]])
);
assert_eq!(result, vec![vec![owned("a", "1")], vec![owned("a", "2")]]);
}
#[test]
@ -354,21 +335,14 @@ mod tests {
let result = string_to_table(input, true, true, 1);
assert_eq!(
result,
Some(vec![
vec![
vec![owned("Column1", "a"), owned("Column2", "b")],
vec![owned("Column1", "1"), owned("Column2", "2")],
vec![owned("Column1", "3"), owned("Column2", "4")]
])
]
);
}
#[test]
fn it_returns_none_given_an_empty_string() {
let input = "";
let result = string_to_table(input, true, true, 1);
assert!(result.is_none());
}
#[test]
fn it_allows_a_predefined_number_of_spaces() {
let input = r#"
@ -380,18 +354,18 @@ mod tests {
let result = string_to_table(input, false, true, 3);
assert_eq!(
result,
Some(vec![
vec![
vec![
owned("column a", "entry 1"),
owned("column b", "entry number 2")
],
vec![owned("column a", "3"), owned("column b", "four")]
])
]
);
}
#[test]
fn it_trims_remaining_separator_space() -> Result<(), ShellError> {
fn it_trims_remaining_separator_space() {
let input = r#"
colA colB colC
val1 val2 val3
@ -399,17 +373,14 @@ mod tests {
let trimmed = |s: &str| s.trim() == s;
let result = string_to_table(input, false, true, 2)
.ok_or_else(|| ShellError::unexpected("table couldn't be parsed"))?;
let result = string_to_table(input, false, true, 2);
assert!(result
.iter()
.all(|row| row.iter().all(|(a, b)| trimmed(a) && trimmed(b))));
Ok(())
}
#[test]
fn it_keeps_empty_columns() -> Result<(), ShellError> {
fn it_keeps_empty_columns() {
let input = r#"
colA col B col C
val2 val3
@ -417,8 +388,7 @@ mod tests {
val7 val8
"#;
let result = string_to_table(input, false, true, 2)
.ok_or_else(|| ShellError::unexpected("table couldn't be parsed"))?;
let result = string_to_table(input, false, true, 2);
assert_eq!(
result,
vec![
@ -439,38 +409,43 @@ mod tests {
],
]
);
Ok(())
}
#[test]
fn it_uses_the_full_final_column() -> Result<(), ShellError> {
fn it_can_produce_an_empty_stream_for_header_only_input() {
let input = "colA col B";
let result = string_to_table(input, false, true, 2);
let expected: Vec<Vec<(String, String)>> = vec![];
assert_eq!(expected, result);
}
#[test]
fn it_uses_the_full_final_column() {
let input = r#"
colA col B
val1 val2 trailing value that should be included
"#;
let result = string_to_table(input, false, true, 2)
.ok_or_else(|| ShellError::unexpected("table couldn't be parsed"))?;
let result = string_to_table(input, false, true, 2);
assert_eq!(
result,
vec![vec![
owned("colA", "val1"),
owned("col B", "val2 trailing value that should be included"),
],]
]]
);
Ok(())
}
#[test]
fn it_handles_empty_values_when_headerless_and_aligned_columns() -> Result<(), ShellError> {
fn it_handles_empty_values_when_headerless_and_aligned_columns() {
let input = r#"
a multi-word value b d
1 3-3 4
last
"#;
let result = string_to_table(input, true, true, 2)
.ok_or_else(|| ShellError::unexpected("table couldn't be parsed"))?;
let result = string_to_table(input, true, true, 2);
assert_eq!(
result,
vec![
@ -497,27 +472,21 @@ mod tests {
],
]
);
Ok(())
}
#[test]
fn input_is_parsed_correctly_if_either_option_works() -> Result<(), ShellError> {
fn input_is_parsed_correctly_if_either_option_works() {
let input = r#"
docker-registry docker-registry=default docker-registry=default 172.30.78.158 5000/TCP
kubernetes component=apiserver,provider=kubernetes <none> 172.30.0.2 443/TCP
kubernetes-ro component=apiserver,provider=kubernetes <none> 172.30.0.1 80/TCP
"#;
let aligned_columns_headerless = string_to_table(input, true, true, 2)
.ok_or_else(|| ShellError::unexpected("table couldn't be parsed"))?;
let separator_headerless = string_to_table(input, true, false, 2)
.ok_or_else(|| ShellError::unexpected("table couldn't be parsed"))?;
let aligned_columns_with_headers = string_to_table(input, false, true, 2)
.ok_or_else(|| ShellError::unexpected("table couldn't be parsed"))?;
let separator_with_headers = string_to_table(input, false, false, 2)
.ok_or_else(|| ShellError::unexpected("table couldn't be parsed"))?;
let aligned_columns_headerless = string_to_table(input, true, true, 2);
let separator_headerless = string_to_table(input, true, false, 2);
let aligned_columns_with_headers = string_to_table(input, false, true, 2);
let separator_with_headers = string_to_table(input, false, false, 2);
assert_eq!(aligned_columns_headerless, separator_headerless);
assert_eq!(aligned_columns_with_headers, separator_with_headers);
Ok(())
}
}

View File

@ -7,11 +7,11 @@ pub struct FromTOML;
impl WholeStreamCommand for FromTOML {
fn name(&self) -> &str {
"from-toml"
"from toml"
}
fn signature(&self) -> Signature {
Signature::build("from-toml")
Signature::build("from toml")
}
fn usage(&self) -> &str {
@ -69,33 +69,11 @@ pub fn from_toml(
) -> Result<OutputStream, ShellError> {
let args = args.evaluate_once(registry)?;
let tag = args.name_tag();
let name_span = tag.span;
let input = args.input;
let stream = async_stream! {
let values: Vec<Value> = input.values.collect().await;
let mut concat_string = String::new();
let mut latest_tag: Option<Tag> = None;
for value in values {
latest_tag = Some(value.tag.clone());
let value_span = value.tag.span;
if let Ok(s) = value.as_string() {
concat_string.push_str(&s);
}
else {
yield Err(ShellError::labeled_error_with_secondary(
"Expected a string from pipeline",
"requires string input",
name_span,
"value originates from here",
value_span,
))
}
}
match from_toml_string_to_value(concat_string, tag.clone()) {
let concat_string = input.collect_string(tag.clone()).await?;
match from_toml_string_to_value(concat_string.item, tag.clone()) {
Ok(x) => match x {
Value { value: UntaggedValue::Table(list), .. } => {
for l in list {
@ -104,15 +82,15 @@ pub fn from_toml(
}
x => yield ReturnSuccess::value(x),
},
Err(_) => if let Some(last_tag) = latest_tag {
Err(_) => {
yield Err(ShellError::labeled_error_with_secondary(
"Could not parse as TOML",
"input cannot be parsed as TOML",
&tag,
"value originates from here",
last_tag,
concat_string.tag,
))
} ,
}
}
};

View File

@ -13,12 +13,15 @@ pub struct FromTSVArgs {
impl WholeStreamCommand for FromTSV {
fn name(&self) -> &str {
"from-tsv"
"from tsv"
}
fn signature(&self) -> Signature {
Signature::build("from-tsv")
.switch("headerless", "don't treat the first row as column names")
Signature::build("from tsv").switch(
"headerless",
"don't treat the first row as column names",
None,
)
}
fn usage(&self) -> &str {

View File

@ -1,17 +1,17 @@
use crate::commands::WholeStreamCommand;
use crate::prelude::*;
use nu_errors::ShellError;
use nu_protocol::{ReturnSuccess, Signature, TaggedDictBuilder, UntaggedValue, Value};
use nu_protocol::{ReturnSuccess, Signature, TaggedDictBuilder, UntaggedValue};
pub struct FromURL;
impl WholeStreamCommand for FromURL {
fn name(&self) -> &str {
"from-url"
"from url"
}
fn signature(&self) -> Signature {
Signature::build("from-url")
Signature::build("from url")
}
fn usage(&self) -> &str {
@ -30,32 +30,12 @@ impl WholeStreamCommand for FromURL {
fn from_url(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream, ShellError> {
let args = args.evaluate_once(registry)?;
let tag = args.name_tag();
let name_span = tag.span;
let input = args.input;
let stream = async_stream! {
let values: Vec<Value> = input.values.collect().await;
let concat_string = input.collect_string(tag.clone()).await?;
let mut concat_string = String::new();
let mut latest_tag: Option<Tag> = None;
for value in values {
latest_tag = Some(value.tag.clone());
let value_span = value.tag.span;
if let Ok(s) = value.as_string() {
concat_string.push_str(&s);
} else {
yield Err(ShellError::labeled_error_with_secondary(
"Expected a string from pipeline",
"requires string input",
name_span,
"value originates from here",
value_span,
))
}
}
let result = serde_urlencoded::from_str::<Vec<(String, String)>>(&concat_string);
let result = serde_urlencoded::from_str::<Vec<(String, String)>>(&concat_string.item);
match result {
Ok(result) => {
@ -68,15 +48,13 @@ fn from_url(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStrea
yield ReturnSuccess::value(row.into_value());
}
_ => {
if let Some(last_tag) = latest_tag {
yield Err(ShellError::labeled_error_with_secondary(
"String not compatible with url-encoding",
"input not url-encoded",
tag,
"value originates from here",
last_tag,
));
}
yield Err(ShellError::labeled_error_with_secondary(
"String not compatible with url-encoding",
"input not url-encoded",
tag,
"value originates from here",
concat_string.tag,
));
}
}
};

View File

@ -0,0 +1,102 @@
extern crate ical;
use crate::commands::WholeStreamCommand;
use crate::prelude::*;
use ical::parser::vcard::component::*;
use ical::property::Property;
use nu_errors::ShellError;
use nu_protocol::{Primitive, ReturnSuccess, Signature, TaggedDictBuilder, UntaggedValue, Value};
use std::io::BufReader;
pub struct FromVcf;
impl WholeStreamCommand for FromVcf {
fn name(&self) -> &str {
"from vcf"
}
fn signature(&self) -> Signature {
Signature::build("from vcf")
}
fn usage(&self) -> &str {
"Parse text as .vcf and create table."
}
fn run(
&self,
args: CommandArgs,
registry: &CommandRegistry,
) -> Result<OutputStream, ShellError> {
from_vcf(args, registry)
}
}
fn from_vcf(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream, ShellError> {
let args = args.evaluate_once(registry)?;
let tag = args.name_tag();
let input = args.input;
let stream = async_stream! {
let input_string = input.collect_string(tag.clone()).await?.item;
let input_bytes = input_string.as_bytes();
let buf_reader = BufReader::new(input_bytes);
let parser = ical::VcardParser::new(buf_reader);
for contact in parser {
match contact {
Ok(c) => yield ReturnSuccess::value(contact_to_value(c, tag.clone())),
Err(_) => yield Err(ShellError::labeled_error(
"Could not parse as .vcf",
"input cannot be parsed as .vcf",
tag.clone()
)),
}
}
};
Ok(stream.to_output_stream())
}
fn contact_to_value(contact: VcardContact, tag: Tag) -> Value {
let mut row = TaggedDictBuilder::new(tag.clone());
row.insert_untagged("properties", properties_to_value(contact.properties, tag));
row.into_value()
}
fn properties_to_value(properties: Vec<Property>, tag: Tag) -> UntaggedValue {
UntaggedValue::table(
&properties
.into_iter()
.map(|prop| {
let mut row = TaggedDictBuilder::new(tag.clone());
let name = UntaggedValue::string(prop.name);
let value = match prop.value {
Some(val) => UntaggedValue::string(val),
None => UntaggedValue::Primitive(Primitive::Nothing),
};
let params = match prop.params {
Some(param_list) => params_to_value(param_list, tag.clone()).into(),
None => UntaggedValue::Primitive(Primitive::Nothing),
};
row.insert_untagged("name", name);
row.insert_untagged("value", value);
row.insert_untagged("params", params);
row.into_value()
})
.collect::<Vec<Value>>(),
)
}
fn params_to_value(params: Vec<(String, Vec<String>)>, tag: Tag) -> Value {
let mut row = TaggedDictBuilder::new(tag);
for (param_name, param_values) in params {
let values: Vec<Value> = param_values.into_iter().map(|val| val.into()).collect();
let values = UntaggedValue::table(&values);
row.insert_untagged(param_name, values);
}
row.into_value()
}

View File

@ -0,0 +1,99 @@
use crate::commands::WholeStreamCommand;
use crate::prelude::*;
use crate::TaggedListBuilder;
use calamine::*;
use nu_errors::ShellError;
use nu_protocol::{ReturnSuccess, Signature, TaggedDictBuilder, UntaggedValue};
use std::io::Cursor;
pub struct FromXLSX;
#[derive(Deserialize)]
pub struct FromXLSXArgs {
headerless: bool,
}
impl WholeStreamCommand for FromXLSX {
fn name(&self) -> &str {
"from xlsx"
}
fn signature(&self) -> Signature {
Signature::build("from xlsx").switch(
"headerless",
"don't treat the first row as column names",
None,
)
}
fn usage(&self) -> &str {
"Parse binary Excel(.xlsx) data and create table."
}
fn run(
&self,
args: CommandArgs,
registry: &CommandRegistry,
) -> Result<OutputStream, ShellError> {
args.process(registry, from_xlsx)?.run()
}
}
fn from_xlsx(
FromXLSXArgs {
headerless: _headerless,
}: FromXLSXArgs,
runnable_context: RunnableContext,
) -> Result<OutputStream, ShellError> {
let input = runnable_context.input;
let tag = runnable_context.name;
let stream = async_stream! {
let value = input.collect_binary(tag.clone()).await?;
let mut buf: Cursor<Vec<u8>> = Cursor::new(value.item);
let mut xls = Xlsx::<_>::new(buf).map_err(|_| {
ShellError::labeled_error("Could not load xlsx file", "could not load xlsx file", &tag)
})?;
let mut dict = TaggedDictBuilder::new(&tag);
let sheet_names = xls.sheet_names().to_owned();
for sheet_name in &sheet_names {
let mut sheet_output = TaggedListBuilder::new(&tag);
if let Some(Ok(current_sheet)) = xls.worksheet_range(sheet_name) {
for row in current_sheet.rows() {
let mut row_output = TaggedDictBuilder::new(&tag);
for (i, cell) in row.iter().enumerate() {
let value = match cell {
DataType::Empty => UntaggedValue::nothing(),
DataType::String(s) => UntaggedValue::string(s),
DataType::Float(f) => UntaggedValue::decimal(*f),
DataType::Int(i) => UntaggedValue::int(*i),
DataType::Bool(b) => UntaggedValue::boolean(*b),
_ => UntaggedValue::nothing(),
};
row_output.insert_untagged(&format!("Column{}", i), value);
}
sheet_output.push_untagged(row_output.into_untagged_value());
}
dict.insert_untagged(sheet_name, sheet_output.into_untagged_value());
} else {
yield Err(ShellError::labeled_error(
"Could not load sheet",
"could not load sheet",
&tag,
));
}
}
yield ReturnSuccess::value(dict.into_value());
};
Ok(stream.to_output_stream())
}

View File

@ -0,0 +1,303 @@
use crate::commands::WholeStreamCommand;
use crate::prelude::*;
use nu_errors::ShellError;
use nu_protocol::{Primitive, ReturnSuccess, Signature, TaggedDictBuilder, UntaggedValue, Value};
pub struct FromXML;
impl WholeStreamCommand for FromXML {
fn name(&self) -> &str {
"from xml"
}
fn signature(&self) -> Signature {
Signature::build("from xml")
}
fn usage(&self) -> &str {
"Parse text as .xml and create table."
}
fn run(
&self,
args: CommandArgs,
registry: &CommandRegistry,
) -> Result<OutputStream, ShellError> {
from_xml(args, registry)
}
}
fn from_attributes_to_value(attributes: &[roxmltree::Attribute], tag: impl Into<Tag>) -> Value {
let tag = tag.into();
let mut collected = TaggedDictBuilder::new(tag);
for a in attributes {
collected.insert_untagged(String::from(a.name()), UntaggedValue::string(a.value()));
}
collected.into_value()
}
fn from_node_to_value<'a, 'd>(n: &roxmltree::Node<'a, 'd>, tag: impl Into<Tag>) -> Value {
let tag = tag.into();
if n.is_element() {
let name = n.tag_name().name().trim().to_string();
let mut children_values = vec![];
for c in n.children() {
children_values.push(from_node_to_value(&c, &tag));
}
let children_values: Vec<Value> = children_values
.into_iter()
.filter(|x| match x {
Value {
value: UntaggedValue::Primitive(Primitive::String(f)),
..
} => {
!f.trim().is_empty() // non-whitespace characters?
}
_ => true,
})
.collect();
let mut collected = TaggedDictBuilder::new(&tag);
let attribute_value: Value = from_attributes_to_value(&n.attributes(), &tag);
let mut row = TaggedDictBuilder::new(&tag);
row.insert_untagged(
String::from("children"),
UntaggedValue::Table(children_values),
);
row.insert_untagged(String::from("attributes"), attribute_value);
collected.insert_untagged(name, row.into_value());
collected.into_value()
} else if n.is_comment() {
UntaggedValue::string("<comment>").into_value(tag)
} else if n.is_pi() {
UntaggedValue::string("<processing_instruction>").into_value(tag)
} else if n.is_text() {
match n.text() {
Some(text) => UntaggedValue::string(text).into_value(tag),
None => UntaggedValue::string("<error>").into_value(tag),
}
} else {
UntaggedValue::string("<unknown>").into_value(tag)
}
}
fn from_document_to_value(d: &roxmltree::Document, tag: impl Into<Tag>) -> Value {
from_node_to_value(&d.root_element(), tag)
}
pub fn from_xml_string_to_value(s: String, tag: impl Into<Tag>) -> Result<Value, roxmltree::Error> {
let parsed = roxmltree::Document::parse(&s)?;
Ok(from_document_to_value(&parsed, tag))
}
fn from_xml(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream, ShellError> {
let args = args.evaluate_once(registry)?;
let tag = args.name_tag();
let input = args.input;
let stream = async_stream! {
let concat_string = input.collect_string(tag.clone()).await?;
match from_xml_string_to_value(concat_string.item, tag.clone()) {
Ok(x) => match x {
Value { value: UntaggedValue::Table(list), .. } => {
for l in list {
yield ReturnSuccess::value(l);
}
}
x => yield ReturnSuccess::value(x),
},
Err(_) => {
yield Err(ShellError::labeled_error_with_secondary(
"Could not parse as XML",
"input cannot be parsed as XML",
&tag,
"value originates from here",
&concat_string.tag,
))
} ,
}
};
Ok(stream.to_output_stream())
}
#[cfg(test)]
mod tests {
use crate::commands::from_xml;
use indexmap::IndexMap;
use nu_protocol::{UntaggedValue, Value};
use nu_source::*;
fn string(input: impl Into<String>) -> Value {
UntaggedValue::string(input.into()).into_untagged_value()
}
fn row(entries: IndexMap<String, Value>) -> Value {
UntaggedValue::row(entries).into_untagged_value()
}
fn table(list: &[Value]) -> Value {
UntaggedValue::table(list).into_untagged_value()
}
fn parse(xml: &str) -> Result<Value, roxmltree::Error> {
from_xml::from_xml_string_to_value(xml.to_string(), Tag::unknown())
}
#[test]
fn parses_empty_element() -> Result<(), roxmltree::Error> {
let source = "<nu></nu>";
assert_eq!(
parse(source)?,
row(indexmap! {
"nu".into() => row(indexmap! {
"children".into() => table(&[]),
"attributes".into() => row(indexmap! {})
})
})
);
Ok(())
}
#[test]
fn parses_element_with_text() -> Result<(), roxmltree::Error> {
let source = "<nu>La era de los tres caballeros</nu>";
assert_eq!(
parse(source)?,
row(indexmap! {
"nu".into() => row(indexmap! {
"children".into() => table(&[string("La era de los tres caballeros")]),
"attributes".into() => row(indexmap! {})
})
})
);
Ok(())
}
#[test]
fn parses_element_with_elements() -> Result<(), roxmltree::Error> {
let source = "\
<nu>
<dev>Andrés</dev>
<dev>Jonathan</dev>
<dev>Yehuda</dev>
</nu>";
assert_eq!(
parse(source)?,
row(indexmap! {
"nu".into() => row(indexmap! {
"children".into() => table(&[
row(indexmap! {
"dev".into() => row(indexmap! {
"children".into() => table(&[string("Andrés")]),
"attributes".into() => row(indexmap! {})
})
}),
row(indexmap! {
"dev".into() => row(indexmap! {
"children".into() => table(&[string("Jonathan")]),
"attributes".into() => row(indexmap! {})
})
}),
row(indexmap! {
"dev".into() => row(indexmap! {
"children".into() => table(&[string("Yehuda")]),
"attributes".into() => row(indexmap! {})
})
})
]),
"attributes".into() => row(indexmap! {})
})
})
);
Ok(())
}
#[test]
fn parses_element_with_attribute() -> Result<(), roxmltree::Error> {
let source = "\
<nu version=\"2.0\">
</nu>";
assert_eq!(
parse(source)?,
row(indexmap! {
"nu".into() => row(indexmap! {
"children".into() => table(&[]),
"attributes".into() => row(indexmap! {
"version".into() => string("2.0")
})
})
})
);
Ok(())
}
#[test]
fn parses_element_with_attribute_and_element() -> Result<(), roxmltree::Error> {
let source = "\
<nu version=\"2.0\">
<version>2.0</version>
</nu>";
assert_eq!(
parse(source)?,
row(indexmap! {
"nu".into() => row(indexmap! {
"children".into() => table(&[
row(indexmap! {
"version".into() => row(indexmap! {
"children".into() => table(&[string("2.0")]),
"attributes".into() => row(indexmap! {})
})
})
]),
"attributes".into() => row(indexmap! {
"version".into() => string("2.0")
})
})
})
);
Ok(())
}
#[test]
fn parses_element_with_multiple_attributes() -> Result<(), roxmltree::Error> {
let source = "\
<nu version=\"2.0\" age=\"25\">
</nu>";
assert_eq!(
parse(source)?,
row(indexmap! {
"nu".into() => row(indexmap! {
"children".into() => table(&[]),
"attributes".into() => row(indexmap! {
"version".into() => string("2.0"),
"age".into() => string("25")
})
})
})
);
Ok(())
}
}

View File

@ -7,11 +7,11 @@ pub struct FromYAML;
impl WholeStreamCommand for FromYAML {
fn name(&self) -> &str {
"from-yaml"
"from yaml"
}
fn signature(&self) -> Signature {
Signature::build("from-yaml")
Signature::build("from yaml")
}
fn usage(&self) -> &str {
@ -31,11 +31,11 @@ pub struct FromYML;
impl WholeStreamCommand for FromYML {
fn name(&self) -> &str {
"from-yml"
"from yml"
}
fn signature(&self) -> Signature {
Signature::build("from-yml")
Signature::build("from yml")
}
fn usage(&self) -> &str {
@ -121,34 +121,12 @@ pub fn from_yaml_string_to_value(s: String, tag: impl Into<Tag>) -> Result<Value
fn from_yaml(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream, ShellError> {
let args = args.evaluate_once(registry)?;
let tag = args.name_tag();
let name_span = tag.span;
let input = args.input;
let stream = async_stream! {
let values: Vec<Value> = input.values.collect().await;
let concat_string = input.collect_string(tag.clone()).await?;
let mut concat_string = String::new();
let mut latest_tag: Option<Tag> = None;
for value in values {
latest_tag = Some(value.tag.clone());
let value_span = value.tag.span;
if let Ok(s) = value.as_string() {
concat_string.push_str(&s);
}
else {
yield Err(ShellError::labeled_error_with_secondary(
"Expected a string from pipeline",
"requires string input",
name_span,
"value originates from here",
value_span,
))
}
}
match from_yaml_string_to_value(concat_string, tag.clone()) {
match from_yaml_string_to_value(concat_string.item, tag.clone()) {
Ok(x) => match x {
Value { value: UntaggedValue::Table(list), .. } => {
for l in list {
@ -157,15 +135,15 @@ fn from_yaml(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStre
}
x => yield ReturnSuccess::value(x),
},
Err(_) => if let Some(last_tag) = latest_tag {
Err(_) => {
yield Err(ShellError::labeled_error_with_secondary(
"Could not parse as YAML",
"input cannot be parsed as YAML",
&tag,
"value originates from here",
&last_tag,
&concat_string.tag,
))
} ,
}
}
};

View File

@ -0,0 +1,254 @@
use crate::commands::WholeStreamCommand;
use crate::prelude::*;
use indexmap::set::IndexSet;
use log::trace;
use nu_errors::ShellError;
use nu_protocol::{
did_you_mean, ColumnPath, PathMember, Primitive, ReturnSuccess, ReturnValue, Signature,
SyntaxShape, UnspannedPathMember, UntaggedValue, Value,
};
use nu_source::span_for_spanned_list;
use nu_value_ext::get_data_by_column_path;
pub struct Get;
#[derive(Deserialize)]
pub struct GetArgs {
rest: Vec<ColumnPath>,
}
impl WholeStreamCommand for Get {
fn name(&self) -> &str {
"get"
}
fn signature(&self) -> Signature {
Signature::build("get").rest(
SyntaxShape::ColumnPath,
"optionally return additional data by path",
)
}
fn usage(&self) -> &str {
"Open given cells as text."
}
fn run(
&self,
args: CommandArgs,
registry: &CommandRegistry,
) -> Result<OutputStream, ShellError> {
args.process(registry, get)?.run()
}
fn examples(&self) -> &[Example] {
&[
Example {
description: "Extract the name of files as a list",
example: "ls | get name",
},
Example {
description: "Extract the cpu list from the sys information",
example: "sys | get cpu",
},
]
}
}
pub fn get_column_path(path: &ColumnPath, obj: &Value) -> Result<Value, ShellError> {
let fields = path.clone();
get_data_by_column_path(
obj,
path,
Box::new(move |(obj_source, column_path_tried, error)| {
let path_members_span = span_for_spanned_list(fields.members().iter().map(|p| p.span));
match &obj_source.value {
UntaggedValue::Table(rows) => match column_path_tried {
PathMember {
unspanned: UnspannedPathMember::String(column),
..
} => {
let primary_label = format!("There isn't a column named '{}'", &column);
let suggestions: IndexSet<_> = rows
.iter()
.filter_map(|r| did_you_mean(&r, &column_path_tried))
.map(|s| s[0].1.to_owned())
.collect();
let mut existing_columns: IndexSet<_> = IndexSet::default();
let mut names: Vec<String> = vec![];
for row in rows {
for field in row.data_descriptors() {
if !existing_columns.contains(&field[..]) {
existing_columns.insert(field.clone());
names.push(field);
}
}
}
if names.is_empty() {
return ShellError::labeled_error_with_secondary(
"Unknown column",
primary_label,
column_path_tried.span,
"Appears to contain rows. Try indexing instead.",
column_path_tried.span.since(path_members_span),
);
} else {
return ShellError::labeled_error_with_secondary(
"Unknown column",
primary_label,
column_path_tried.span,
format!(
"Perhaps you meant '{}'? Columns available: {}",
suggestions
.iter()
.map(|x| x.to_owned())
.collect::<Vec<String>>()
.join(","),
names.join(",")
),
column_path_tried.span.since(path_members_span),
);
};
}
PathMember {
unspanned: UnspannedPathMember::Int(idx),
..
} => {
let total = rows.len();
let secondary_label = if total == 1 {
"The table only has 1 row".to_owned()
} else {
format!("The table only has {} rows (0 to {})", total, total - 1)
};
return ShellError::labeled_error_with_secondary(
"Row not found",
format!("There isn't a row indexed at {}", idx),
column_path_tried.span,
secondary_label,
column_path_tried.span.since(path_members_span),
);
}
},
UntaggedValue::Row(columns) => match column_path_tried {
PathMember {
unspanned: UnspannedPathMember::String(column),
..
} => {
let primary_label = format!("There isn't a column named '{}'", &column);
if let Some(suggestions) = did_you_mean(&obj_source, column_path_tried) {
return ShellError::labeled_error_with_secondary(
"Unknown column",
primary_label,
column_path_tried.span,
format!(
"Perhaps you meant '{}'? Columns available: {}",
suggestions[0].1,
&obj_source.data_descriptors().join(",")
),
column_path_tried.span.since(path_members_span),
);
}
}
PathMember {
unspanned: UnspannedPathMember::Int(idx),
..
} => {
return ShellError::labeled_error_with_secondary(
"No rows available",
format!("A row at '{}' can't be indexed.", &idx),
column_path_tried.span,
format!(
"Appears to contain columns. Columns available: {}",
columns.keys().join(",")
),
column_path_tried.span.since(path_members_span),
)
}
},
_ => {}
}
if let Some(suggestions) = did_you_mean(&obj_source, column_path_tried) {
return ShellError::labeled_error(
"Unknown column",
format!("did you mean '{}'?", suggestions[0].1),
column_path_tried.span.since(path_members_span),
);
}
error
}),
)
}
pub fn get(
GetArgs { rest: mut fields }: GetArgs,
RunnableContext { mut input, .. }: RunnableContext,
) -> Result<OutputStream, ShellError> {
if fields.is_empty() {
let stream = async_stream! {
let mut vec = input.drain_vec().await;
let descs = nu_protocol::merge_descriptors(&vec);
for desc in descs {
yield ReturnSuccess::value(desc);
}
};
let stream: BoxStream<'static, ReturnValue> = stream.boxed();
Ok(stream.to_output_stream())
} else {
let member = fields.remove(0);
trace!("get {:?} {:?}", member, fields);
let stream = input
.map(move |item| {
let mut result = VecDeque::new();
let member = vec![member.clone()];
let column_paths = vec![&member, &fields]
.into_iter()
.flatten()
.collect::<Vec<&ColumnPath>>();
for path in column_paths {
let res = get_column_path(&path, &item);
match res {
Ok(got) => match got {
Value {
value: UntaggedValue::Table(rows),
..
} => {
for item in rows {
result.push_back(ReturnSuccess::value(item.clone()));
}
}
Value {
value: UntaggedValue::Primitive(Primitive::Nothing),
..
} => {}
other => result.push_back(ReturnSuccess::value(other.clone())),
},
Err(reason) => result.push_back(ReturnSuccess::value(
UntaggedValue::Error(reason).into_untagged_value(),
)),
}
}
futures::stream::iter(result)
})
.flatten();
Ok(stream.to_output_stream())
}
}

View File

@ -1,15 +1,16 @@
use crate::commands::WholeStreamCommand;
use crate::prelude::*;
use nu_errors::ShellError;
use nu_protocol::{ReturnSuccess, Signature, SyntaxShape, TaggedDictBuilder, UntaggedValue, Value};
use nu_protocol::{ReturnSuccess, Signature, SyntaxShape, Value};
use nu_source::Tagged;
use nu_value_ext::get_data_by_key;
pub struct GroupBy;
#[derive(Deserialize)]
pub struct GroupByArgs {
column_name: Tagged<String>,
date: Tagged<bool>,
format: Option<Tagged<String>>,
}
impl WholeStreamCommand for GroupBy {
@ -18,11 +19,19 @@ impl WholeStreamCommand for GroupBy {
}
fn signature(&self) -> Signature {
Signature::build("group-by").required(
"column_name",
SyntaxShape::String,
"the name of the column to group by",
)
Signature::build("group-by")
.required(
"column_name",
SyntaxShape::String,
"the name of the column to group by",
)
.named(
"format",
SyntaxShape::String,
"Specify date and time formatting",
Some('f'),
)
.switch("date", "by date", Some('d'))
}
fn usage(&self) -> &str {
@ -36,14 +45,30 @@ impl WholeStreamCommand for GroupBy {
) -> Result<OutputStream, ShellError> {
args.process(registry, group_by)?.run()
}
fn examples(&self) -> &[Example] {
&[Example {
description: "Group files by type",
example: "ls | group-by type",
}]
}
}
enum Grouper {
Default,
ByDate(Option<String>),
}
pub fn group_by(
GroupByArgs { column_name }: GroupByArgs,
GroupByArgs {
column_name,
date,
format,
}: GroupByArgs,
RunnableContext { input, name, .. }: RunnableContext,
) -> Result<OutputStream, ShellError> {
let stream = async_stream! {
let values: Vec<Value> = input.values.collect().await;
let values: Vec<Value> = input.collect().await;
if values.is_empty() {
yield Err(ShellError::labeled_error(
@ -52,9 +77,38 @@ pub fn group_by(
column_name.span()
))
} else {
match group(&column_name, values, name) {
Ok(grouped) => yield ReturnSuccess::value(grouped),
Err(err) => yield Err(err)
let grouper = if let Tagged { item: true, tag } = date {
if let Some(Tagged { item: fmt, tag }) = format {
Grouper::ByDate(Some(fmt))
} else {
Grouper::ByDate(None)
}
} else {
Grouper::Default
};
match grouper {
Grouper::Default => {
match crate::utils::data::group(column_name, &values, None, &name) {
Ok(grouped) => yield ReturnSuccess::value(grouped),
Err(err) => yield Err(err),
}
}
Grouper::ByDate(None) => {
match crate::utils::data::group(column_name, &values, Some(Box::new(|row: &Value| row.format("%Y-%b-%d"))), &name) {
Ok(grouped) => yield ReturnSuccess::value(grouped),
Err(err) => yield Err(err),
}
}
Grouper::ByDate(Some(fmt)) => {
match crate::utils::data::group(column_name, &values, Some(Box::new(move |row: &Value| {
row.format(&fmt)
})), &name) {
Ok(grouped) => yield ReturnSuccess::value(grouped),
Err(err) => yield Err(err),
}
}
}
}
};
@ -67,50 +121,7 @@ pub fn group(
values: Vec<Value>,
tag: impl Into<Tag>,
) -> Result<Value, ShellError> {
let tag = tag.into();
let mut groups: indexmap::IndexMap<String, Vec<Value>> = indexmap::IndexMap::new();
for value in values {
let group_key = get_data_by_key(&value, column_name.borrow_spanned());
if let Some(group_key) = group_key {
let group_key = group_key.as_string()?.to_string();
let group = groups.entry(group_key).or_insert(vec![]);
group.push(value);
} else {
let possibilities = value.data_descriptors();
let mut possible_matches: Vec<_> = possibilities
.iter()
.map(|x| (natural::distance::levenshtein_distance(x, column_name), x))
.collect();
possible_matches.sort();
if !possible_matches.is_empty() {
return Err(ShellError::labeled_error(
"Unknown column",
format!("did you mean '{}'?", possible_matches[0].1),
column_name.tag(),
));
} else {
return Err(ShellError::labeled_error(
"Unknown column",
"row does not contain this column",
column_name.tag(),
));
}
}
}
let mut out = TaggedDictBuilder::new(&tag);
for (k, v) in groups.iter() {
out.insert_untagged(k, UntaggedValue::table(v));
}
Ok(out.into_value())
crate::utils::data::group(column_name.clone(), &values, None, tag)
}
#[cfg(test)]

View File

@ -0,0 +1,90 @@
use crate::commands::WholeStreamCommand;
use crate::context::CommandRegistry;
use crate::prelude::*;
use futures::stream::StreamExt;
use indexmap::IndexMap;
use nu_errors::ShellError;
use nu_protocol::Dictionary;
use nu_protocol::{ReturnSuccess, Signature, UntaggedValue, Value};
pub struct Headers;
#[derive(Deserialize)]
pub struct HeadersArgs {}
impl WholeStreamCommand for Headers {
fn name(&self) -> &str {
"headers"
}
fn signature(&self) -> Signature {
Signature::build("headers")
}
fn usage(&self) -> &str {
"Use the first row of the table as column names"
}
fn run(
&self,
args: CommandArgs,
registry: &CommandRegistry,
) -> Result<OutputStream, ShellError> {
args.process(registry, headers)?.run()
}
fn examples(&self) -> &[Example] {
&[Example {
description: "Create headers for a raw string",
example: "echo \"a b c|1 2 3\" | split-row \"|\" | split-column \" \" | headers",
}]
}
}
pub fn headers(
HeadersArgs {}: HeadersArgs,
RunnableContext { input, .. }: RunnableContext,
) -> Result<OutputStream, ShellError> {
let stream = async_stream! {
let rows: Vec<Value> = input.collect().await;
if rows.len() < 1 {
yield Err(ShellError::untagged_runtime_error("Couldn't find headers, was the input a properly formatted, non-empty table?"));
}
//the headers are the first row in the table
let headers: Vec<String> = match &rows[0].value {
UntaggedValue::Row(d) => {
Ok(d.entries.iter().map(|(k, v)| {
match v.as_string() {
Ok(s) => s,
Err(_) => { //If a cell that should contain a header name is empty, we name the column Column[index]
match d.entries.get_full(k) {
Some((index, _, _)) => format!("Column{}", index),
None => "unknownColumn".to_string()
}
}
}
}).collect())
}
_ => Err(ShellError::unexpected_eof("Could not get headers, is the table empty?", rows[0].tag.span))
}?;
//Each row is a dictionary with the headers as keys
for r in rows.iter().skip(1) {
match &r.value {
UntaggedValue::Row(d) => {
let mut i = 0;
let mut entries = IndexMap::new();
for (_, v) in d.entries.iter() {
entries.insert(headers[i].clone(), v.clone());
i += 1;
}
yield Ok(ReturnSuccess::Value(UntaggedValue::Row(Dictionary{entries}).into_value(r.tag.clone())))
}
_ => yield Err(ShellError::unexpected_eof("Couldn't iterate through rows, was the input a properly formatted table?", r.tag.span))
}
}
};
Ok(stream.to_output_stream())
}

View File

@ -0,0 +1,292 @@
use crate::commands::WholeStreamCommand;
use crate::data::command_dict;
use crate::prelude::*;
use nu_errors::ShellError;
use nu_protocol::{
NamedType, PositionalType, ReturnSuccess, Signature, SyntaxShape, TaggedDictBuilder,
UntaggedValue,
};
use nu_source::{SpannedItem, Tagged};
use nu_value_ext::get_data_by_key;
pub struct Help;
#[derive(Deserialize)]
pub struct HelpArgs {
rest: Vec<Tagged<String>>,
}
impl WholeStreamCommand for Help {
fn name(&self) -> &str {
"help"
}
fn signature(&self) -> Signature {
Signature::build("help").rest(SyntaxShape::String, "the name of command to get help on")
}
fn usage(&self) -> &str {
"Display help information about commands."
}
fn run(
&self,
args: CommandArgs,
registry: &CommandRegistry,
) -> Result<OutputStream, ShellError> {
args.process(registry, help)?.run()
}
}
fn help(
HelpArgs { rest }: HelpArgs,
RunnableContext { registry, name, .. }: RunnableContext,
) -> Result<OutputStream, ShellError> {
if let Some(document) = rest.get(0) {
let mut help = VecDeque::new();
if document.item == "commands" {
let mut sorted_names = registry.names();
sorted_names.sort();
for cmd in sorted_names {
// If it's a subcommand, don't list it during the commands list
if cmd.contains(' ') {
continue;
}
let mut short_desc = TaggedDictBuilder::new(name.clone());
let document_tag = document.tag.clone();
let value = command_dict(
registry.get_command(&cmd).ok_or_else(|| {
ShellError::labeled_error(
format!("Could not load {}", cmd),
"could not load command",
document_tag,
)
})?,
name.clone(),
);
short_desc.insert_untagged("name", cmd);
short_desc.insert_untagged(
"description",
get_data_by_key(&value, "usage".spanned_unknown())
.ok_or_else(|| {
ShellError::labeled_error(
"Expected a usage key",
"expected a 'usage' key",
&value.tag,
)
})?
.as_string()?,
);
help.push_back(ReturnSuccess::value(short_desc.into_value()));
}
} else if rest.len() == 2 {
// Check for a subcommand
let command_name = format!("{} {}", rest[0].item, rest[1].item);
if let Some(command) = registry.get_command(&command_name) {
return Ok(get_help(command.stream_command(), &registry).into());
}
} else if let Some(command) = registry.get_command(&document.item) {
return Ok(get_help(command.stream_command(), &registry).into());
} else {
return Err(ShellError::labeled_error(
"Can't find command (use 'help commands' for full list)",
"can't find command",
document.tag.span,
));
}
let help = futures::stream::iter(help);
Ok(help.to_output_stream())
} else {
let msg = r#"Welcome to Nushell.
Here are some tips to help you get started.
* help commands - list all available commands
* help <command name> - display help about a particular command
Nushell works on the idea of a "pipeline". Pipelines are commands connected with the '|' character.
Each stage in the pipeline works together to load, parse, and display information to you.
[Examples]
List the files in the current directory, sorted by size:
ls | sort-by size
Get information about the current system:
sys | get host
Get the processes on your system actively using CPU:
ps | where cpu > 0
You can also learn more at https://www.nushell.sh/book/"#;
let output_stream = futures::stream::iter(vec![ReturnSuccess::value(
UntaggedValue::string(msg).into_value(name),
)]);
Ok(output_stream.to_output_stream())
}
}
#[allow(clippy::cognitive_complexity)]
pub(crate) fn get_help(
cmd: &dyn WholeStreamCommand,
registry: &CommandRegistry,
) -> impl Into<OutputStream> {
let cmd_name = cmd.name();
let signature = cmd.signature();
let mut help = VecDeque::new();
let mut long_desc = String::new();
long_desc.push_str(&cmd.usage());
long_desc.push_str("\n");
let mut subcommands = String::new();
for name in registry.names() {
if name.starts_with(&format!("{} ", cmd_name)) {
let subcommand = registry.get_command(&name).expect("This shouldn't happen");
subcommands.push_str(&format!(" {} - {}\n", name, subcommand.usage()));
}
}
let mut one_liner = String::new();
one_liner.push_str(&signature.name);
one_liner.push_str(" ");
for positional in &signature.positional {
match &positional.0 {
PositionalType::Mandatory(name, _m) => {
one_liner.push_str(&format!("<{}> ", name));
}
PositionalType::Optional(name, _o) => {
one_liner.push_str(&format!("({}) ", name));
}
}
}
if signature.rest_positional.is_some() {
one_liner.push_str(" ...args");
}
if !subcommands.is_empty() {
one_liner.push_str("<subcommand> ");
}
if !signature.named.is_empty() {
one_liner.push_str("{flags} ");
}
long_desc.push_str(&format!("\nUsage:\n > {}\n", one_liner));
if !subcommands.is_empty() {
long_desc.push_str("\nSubcommands:\n");
long_desc.push_str(&subcommands);
}
if !signature.positional.is_empty() || signature.rest_positional.is_some() {
long_desc.push_str("\nParameters:\n");
for positional in signature.positional {
match positional.0 {
PositionalType::Mandatory(name, _m) => {
long_desc.push_str(&format!(" <{}> {}\n", name, positional.1));
}
PositionalType::Optional(name, _o) => {
long_desc.push_str(&format!(" ({}) {}\n", name, positional.1));
}
}
}
if let Some(rest_positional) = signature.rest_positional {
long_desc.push_str(&format!(" ...args: {}\n", rest_positional.1));
}
}
if !signature.named.is_empty() {
long_desc.push_str("\nFlags:\n");
for (flag, ty) in signature.named {
let msg = match ty.0 {
NamedType::Switch(s) => {
if let Some(c) = s {
format!(
" -{}, --{}{} {}\n",
c,
flag,
if !ty.1.is_empty() { ":" } else { "" },
ty.1
)
} else {
format!(
" --{}{} {}\n",
flag,
if !ty.1.is_empty() { ":" } else { "" },
ty.1
)
}
}
NamedType::Mandatory(s, m) => {
if let Some(c) = s {
format!(
" -{}, --{} <{}> (required parameter){} {}\n",
c,
flag,
m.display(),
if !ty.1.is_empty() { ":" } else { "" },
ty.1
)
} else {
format!(
" --{} <{}> (required parameter){} {}\n",
flag,
m.display(),
if !ty.1.is_empty() { ":" } else { "" },
ty.1
)
}
}
NamedType::Optional(s, o) => {
if let Some(c) = s {
format!(
" -{}, --{} <{}>{} {}\n",
c,
flag,
o.display(),
if !ty.1.is_empty() { ":" } else { "" },
ty.1
)
} else {
format!(
" --{} <{}>{} {}\n",
flag,
o.display(),
if !ty.1.is_empty() { ":" } else { "" },
ty.1
)
}
}
};
long_desc.push_str(&msg);
}
}
let examples = cmd.examples();
if !examples.is_empty() {
long_desc.push_str("\nExamples:");
}
for example in examples {
long_desc.push_str("\n");
long_desc.push_str(" ");
long_desc.push_str(example.description);
let colored_example =
crate::shell::helper::Painter::paint_string(example.example, registry);
long_desc.push_str(&format!("\n > {}\n", colored_example));
}
long_desc.push_str("\n");
help.push_back(ReturnSuccess::value(
UntaggedValue::string(long_desc).into_value(Tag::from((0, cmd_name.len(), None))),
));
help
}

View File

@ -30,7 +30,7 @@ impl WholeStreamCommand for Histogram {
"the name of the column to graph by",
)
.rest(
SyntaxShape::Member,
SyntaxShape::String,
"column name to give the histogram's frequency column",
)
}
@ -46,6 +46,24 @@ impl WholeStreamCommand for Histogram {
) -> Result<OutputStream, ShellError> {
args.process(registry, histogram)?.run()
}
fn examples(&self) -> &[Example] {
&[
Example {
description: "Get a histogram for the types of files",
example: "ls | histogram type",
},
Example {
description:
"Get a histogram for the types of files, with frequency column named count",
example: "ls | histogram type count",
},
Example {
description: "Get a histogram for a list of numbers",
example: "echo [1 2 3 1 2 3 1 1 1 1 3 2 1 1 3] | wrap | histogram Column",
},
]
}
}
pub fn histogram(
@ -53,7 +71,7 @@ pub fn histogram(
RunnableContext { input, name, .. }: RunnableContext,
) -> Result<OutputStream, ShellError> {
let stream = async_stream! {
let values: Vec<Value> = input.values.collect().await;
let values: Vec<Value> = input.collect().await;
let Tagged { item: group_by, .. } = column_name.clone();

View File

@ -0,0 +1,55 @@
use crate::cli::History as HistoryFile;
use crate::commands::WholeStreamCommand;
use crate::prelude::*;
use nu_errors::ShellError;
use nu_protocol::{ReturnSuccess, Signature, UntaggedValue};
use std::fs::File;
use std::io::{BufRead, BufReader};
pub struct History;
#[derive(Deserialize)]
pub struct HistoryArgs {}
impl WholeStreamCommand for History {
fn name(&self) -> &str {
"history"
}
fn signature(&self) -> Signature {
Signature::build("history")
}
fn usage(&self) -> &str {
"Display command history."
}
fn run(
&self,
args: CommandArgs,
registry: &CommandRegistry,
) -> Result<OutputStream, ShellError> {
args.process(registry, history)?.run()
}
}
fn history(
_: HistoryArgs,
RunnableContext { name: tag, .. }: RunnableContext,
) -> Result<OutputStream, ShellError> {
let stream = async_stream! {
let history_path = HistoryFile::path();
let file = File::open(history_path);
if let Ok(file) = file {
let reader = BufReader::new(file);
for line in reader.lines() {
if let Ok(line) = line {
yield ReturnSuccess::value(UntaggedValue::string(line).into_value(tag.clone()));
}
}
} else {
yield Err(ShellError::labeled_error("Could not open history", "history file could not be opened", tag.clone()));
}
};
Ok(stream.to_output_stream())
}

View File

@ -0,0 +1,77 @@
use crate::commands::WholeStreamCommand;
use crate::context::CommandRegistry;
use crate::prelude::*;
use nu_errors::ShellError;
use nu_protocol::{ColumnPath, ReturnSuccess, Signature, SyntaxShape, UntaggedValue, Value};
use nu_value_ext::ValueExt;
pub struct Insert;
#[derive(Deserialize)]
pub struct InsertArgs {
column: ColumnPath,
value: Value,
}
impl WholeStreamCommand for Insert {
fn name(&self) -> &str {
"insert"
}
fn signature(&self) -> Signature {
Signature::build("insert")
.required(
"column",
SyntaxShape::ColumnPath,
"the column name to insert",
)
.required(
"value",
SyntaxShape::String,
"the value to give the cell(s)",
)
}
fn usage(&self) -> &str {
"Insert a new column with a given value."
}
fn run(
&self,
args: CommandArgs,
registry: &CommandRegistry,
) -> Result<OutputStream, ShellError> {
args.process(registry, insert)?.run()
}
}
fn insert(
InsertArgs { column, value }: InsertArgs,
RunnableContext { input, .. }: RunnableContext,
) -> Result<OutputStream, ShellError> {
let mut input = input;
let stream = async_stream! {
match input.next().await {
Some(obj @ Value {
value: UntaggedValue::Row(_),
..
}) => match obj.insert_data_at_column_path(&column, value.clone()) {
Ok(v) => yield Ok(ReturnSuccess::Value(v)),
Err(err) => yield Err(err),
},
Some(Value { tag, ..}) => {
yield Err(ShellError::labeled_error(
"Unrecognized type in stream",
"original value",
tag,
));
}
None => {}
};
};
Ok(stream.to_output_stream())
}

View File

@ -0,0 +1,198 @@
use crate::commands::WholeStreamCommand;
use crate::context::CommandRegistry;
use crate::prelude::*;
use nu_errors::ShellError;
use nu_protocol::{ColumnPath, ReturnSuccess, Signature, SyntaxShape, UntaggedValue, Value};
use nu_source::Tagged;
use nu_value_ext::ValueExt;
enum IsEmptyFor {
Value,
RowWithFieldsAndFallback(Vec<Tagged<ColumnPath>>, Value),
RowWithField(Tagged<ColumnPath>),
RowWithFieldAndFallback(Box<Tagged<ColumnPath>>, Value),
}
pub struct IsEmpty;
#[derive(Deserialize)]
pub struct IsEmptyArgs {
rest: Vec<Value>,
}
impl WholeStreamCommand for IsEmpty {
fn name(&self) -> &str {
"empty?"
}
fn signature(&self) -> Signature {
Signature::build("empty?").rest(
SyntaxShape::Any,
"the names of the columns to check emptiness followed by the replacement value.",
)
}
fn usage(&self) -> &str {
"Checks emptiness. The last value is the replacement value for any empty column(s) given to check against the table."
}
fn run(
&self,
args: CommandArgs,
registry: &CommandRegistry,
) -> Result<OutputStream, ShellError> {
args.process(registry, is_empty)?.run()
}
}
fn is_empty(
IsEmptyArgs { rest }: IsEmptyArgs,
RunnableContext { input, .. }: RunnableContext,
) -> Result<OutputStream, ShellError> {
Ok(input
.map(move |value| {
let value_tag = value.tag();
let action = if rest.len() <= 2 {
let field = rest.get(0);
let replacement_if_true = rest.get(1);
match (field, replacement_if_true) {
(Some(field), Some(replacement_if_true)) => {
IsEmptyFor::RowWithFieldAndFallback(
Box::new(field.as_column_path()?),
replacement_if_true.clone(),
)
}
(Some(field), None) => IsEmptyFor::RowWithField(field.as_column_path()?),
(_, _) => IsEmptyFor::Value,
}
} else {
// let no_args = vec![];
let mut arguments = rest.iter().rev();
let replacement_if_true = match arguments.next() {
Some(arg) => arg.clone(),
None => UntaggedValue::boolean(value.is_empty()).into_value(&value_tag),
};
IsEmptyFor::RowWithFieldsAndFallback(
arguments
.map(|a| a.as_column_path())
.filter_map(Result::ok)
.collect(),
replacement_if_true,
)
};
match action {
IsEmptyFor::Value => Ok(ReturnSuccess::Value(
UntaggedValue::boolean(value.is_empty()).into_value(value_tag),
)),
IsEmptyFor::RowWithFieldsAndFallback(fields, default) => {
let mut out = value;
for field in fields.iter() {
let val =
out.get_data_by_column_path(&field, Box::new(move |(_, _, err)| err))?;
let emptiness_value = match out {
obj
@
Value {
value: UntaggedValue::Row(_),
..
} => {
if val.is_empty() {
match obj.replace_data_at_column_path(&field, default.clone()) {
Some(v) => Ok(v),
None => Err(ShellError::labeled_error(
"empty? could not find place to check emptiness",
"column name",
&field.tag,
)),
}
} else {
Ok(obj)
}
}
_ => Err(ShellError::labeled_error(
"Unrecognized type in stream",
"original value",
&value_tag,
)),
};
out = emptiness_value?;
}
Ok(ReturnSuccess::Value(out))
}
IsEmptyFor::RowWithField(field) => {
let val =
value.get_data_by_column_path(&field, Box::new(move |(_, _, err)| err))?;
match &value {
obj
@
Value {
value: UntaggedValue::Row(_),
..
} => {
if val.is_empty() {
match obj.replace_data_at_column_path(
&field,
UntaggedValue::boolean(true).into_value(&value_tag),
) {
Some(v) => Ok(ReturnSuccess::Value(v)),
None => Err(ShellError::labeled_error(
"empty? could not find place to check emptiness",
"column name",
&field.tag,
)),
}
} else {
Ok(ReturnSuccess::Value(value))
}
}
_ => Err(ShellError::labeled_error(
"Unrecognized type in stream",
"original value",
&value_tag,
)),
}
}
IsEmptyFor::RowWithFieldAndFallback(field, default) => {
let val =
value.get_data_by_column_path(&field, Box::new(move |(_, _, err)| err))?;
match &value {
obj
@
Value {
value: UntaggedValue::Row(_),
..
} => {
if val.is_empty() {
match obj.replace_data_at_column_path(&field, default) {
Some(v) => Ok(ReturnSuccess::Value(v)),
None => Err(ShellError::labeled_error(
"empty? could not find place to check emptiness",
"column name",
&field.tag,
)),
}
} else {
Ok(ReturnSuccess::Value(value))
}
}
_ => Err(ShellError::labeled_error(
"Unrecognized type in stream",
"original value",
&value_tag,
)),
}
}
}
})
.to_output_stream())
}

View File

@ -0,0 +1,62 @@
use crate::commands::WholeStreamCommand;
use crate::context::CommandRegistry;
use crate::prelude::*;
use nu_errors::ShellError;
use nu_protocol::{Signature, SyntaxShape};
use nu_source::Tagged;
pub struct Keep;
#[derive(Deserialize)]
pub struct KeepArgs {
rows: Option<Tagged<usize>>,
}
impl WholeStreamCommand for Keep {
fn name(&self) -> &str {
"keep"
}
fn signature(&self) -> Signature {
Signature::build("keep").optional(
"rows",
SyntaxShape::Int,
"starting from the front, the number of rows to keep",
)
}
fn usage(&self) -> &str {
"Keep the number of rows only"
}
fn run(
&self,
args: CommandArgs,
registry: &CommandRegistry,
) -> Result<OutputStream, ShellError> {
args.process(registry, keep)?.run()
}
fn examples(&self) -> &[Example] {
&[
Example {
description: "Keep the first row",
example: "ls | keep",
},
Example {
description: "Keep the first four rows",
example: "ls | keep 4",
},
]
}
}
fn keep(KeepArgs { rows }: KeepArgs, context: RunnableContext) -> Result<OutputStream, ShellError> {
let rows_desired = if let Some(quantity) = rows {
*quantity
} else {
1
};
Ok(OutputStream::from_input(context.input.take(rows_desired)))
}

View File

@ -0,0 +1,98 @@
use crate::commands::WholeStreamCommand;
use crate::evaluate::evaluate_baseline_expr;
use crate::prelude::*;
use log::trace;
use nu_errors::ShellError;
use nu_protocol::{hir::ClassifiedCommand, Signature, SyntaxShape, UntaggedValue, Value};
pub struct KeepUntil;
impl WholeStreamCommand for KeepUntil {
fn name(&self) -> &str {
"keep-until"
}
fn signature(&self) -> Signature {
Signature::build("keep-until")
.required(
"condition",
SyntaxShape::Math,
"the condition that must be met to stop keeping rows",
)
.filter()
}
fn usage(&self) -> &str {
"Keeps rows until the condition matches."
}
fn run(
&self,
args: CommandArgs,
registry: &CommandRegistry,
) -> Result<OutputStream, ShellError> {
let registry = registry.clone();
let scope = args.call_info.scope.clone();
let call_info = args.evaluate_once(&registry)?;
let block = call_info.args.expect_nth(0)?.clone();
let condition = match block {
Value {
value: UntaggedValue::Block(block),
tag,
} => {
if block.block.len() != 1 {
return Err(ShellError::labeled_error(
"Expected a condition",
"expected a condition",
tag,
));
}
match block.block[0].list.get(0) {
Some(item) => match item {
ClassifiedCommand::Expr(expr) => expr.clone(),
_ => {
return Err(ShellError::labeled_error(
"Expected a condition",
"expected a condition",
tag,
))
}
},
None => {
return Err(ShellError::labeled_error(
"Expected a condition",
"expected a condition",
tag,
));
}
}
}
Value { tag, .. } => {
return Err(ShellError::labeled_error(
"Expected a condition",
"expected a condition",
tag,
));
}
};
let objects = call_info.input.take_while(move |item| {
let condition = condition.clone();
trace!("ITEM = {:?}", item);
let result =
evaluate_baseline_expr(&*condition, &registry, &scope.clone().set_it(item.clone()));
trace!("RESULT = {:?}", result);
let return_value = match result {
Ok(ref v) if v.is_true() => false,
_ => true,
};
futures::future::ready(return_value)
});
Ok(objects.from_input_stream())
}
}

View File

@ -0,0 +1,98 @@
use crate::commands::WholeStreamCommand;
use crate::evaluate::evaluate_baseline_expr;
use crate::prelude::*;
use log::trace;
use nu_errors::ShellError;
use nu_protocol::{hir::ClassifiedCommand, Signature, SyntaxShape, UntaggedValue, Value};
pub struct KeepWhile;
impl WholeStreamCommand for KeepWhile {
fn name(&self) -> &str {
"keep-while"
}
fn signature(&self) -> Signature {
Signature::build("keep-while")
.required(
"condition",
SyntaxShape::Math,
"the condition that must be met to keep rows",
)
.filter()
}
fn usage(&self) -> &str {
"Keeps rows while the condition matches."
}
fn run(
&self,
args: CommandArgs,
registry: &CommandRegistry,
) -> Result<OutputStream, ShellError> {
let registry = registry.clone();
let scope = args.call_info.scope.clone();
let call_info = args.evaluate_once(&registry)?;
let block = call_info.args.expect_nth(0)?.clone();
let condition = match block {
Value {
value: UntaggedValue::Block(block),
tag,
} => {
if block.block.len() != 1 {
return Err(ShellError::labeled_error(
"Expected a condition",
"expected a condition",
tag,
));
}
match block.block[0].list.get(0) {
Some(item) => match item {
ClassifiedCommand::Expr(expr) => expr.clone(),
_ => {
return Err(ShellError::labeled_error(
"Expected a condition",
"expected a condition",
tag,
))
}
},
None => {
return Err(ShellError::labeled_error(
"Expected a condition",
"expected a condition",
tag,
));
}
}
}
Value { tag, .. } => {
return Err(ShellError::labeled_error(
"Expected a condition",
"expected a condition",
tag,
));
}
};
let objects = call_info.input.take_while(move |item| {
let condition = condition.clone();
trace!("ITEM = {:?}", item);
let result =
evaluate_baseline_expr(&*condition, &registry, &scope.clone().set_it(item.clone()));
trace!("RESULT = {:?}", result);
let return_value = match result {
Ok(ref v) if v.is_true() => true,
_ => false,
};
futures::future::ready(return_value)
});
Ok(objects.from_input_stream())
}
}

View File

@ -0,0 +1,113 @@
use crate::commands::WholeStreamCommand;
use crate::context::CommandRegistry;
use crate::prelude::*;
use nu_errors::ShellError;
use nu_protocol::{Signature, SyntaxShape};
use nu_source::Tagged;
use std::process::{Command, Stdio};
pub struct Kill;
#[derive(Deserialize)]
pub struct KillArgs {
pub pid: Tagged<u64>,
pub rest: Vec<Tagged<u64>>,
pub force: Tagged<bool>,
pub quiet: Tagged<bool>,
}
impl WholeStreamCommand for Kill {
fn name(&self) -> &str {
"kill"
}
fn signature(&self) -> Signature {
Signature::build("kill")
.required(
"pid",
SyntaxShape::Int,
"process id of process that is to be killed",
)
.rest(SyntaxShape::Int, "rest of processes to kill")
.switch("force", "forcefully kill the process", Some('f'))
.switch("quiet", "won't print anything to the console", Some('q'))
}
fn usage(&self) -> &str {
"Kill a process using the process id."
}
fn run(
&self,
args: CommandArgs,
registry: &CommandRegistry,
) -> Result<OutputStream, ShellError> {
args.process(registry, kill)?.run()
}
fn examples(&self) -> &[Example] {
&[
Example {
description: "Kill the pid using the most memory",
example: "ps | sort-by mem | last | kill $it.pid",
},
Example {
description: "Force kill a given pid",
example: "kill --force 12345",
},
]
}
}
fn kill(
KillArgs {
pid,
rest,
force,
quiet,
}: KillArgs,
_context: RunnableContext,
) -> Result<OutputStream, ShellError> {
let mut cmd = if cfg!(windows) {
let mut cmd = Command::new("taskkill");
if *force {
cmd.arg("/F");
}
cmd.arg("/PID");
cmd.arg(pid.item().to_string());
// each pid must written as `/PID 0` otherwise
// taskkill will act as `killall` unix command
for id in &rest {
cmd.arg("/PID");
cmd.arg(id.item().to_string());
}
cmd
} else {
let mut cmd = Command::new("kill");
if *force {
cmd.arg("-9");
}
cmd.arg(pid.item().to_string());
cmd.args(rest.iter().map(move |id| id.item().to_string()));
cmd
};
// pipe everything to null
if *quiet {
cmd.stdin(Stdio::null())
.stdout(Stdio::null())
.stderr(Stdio::null());
}
cmd.status().expect("failed to execute shell command");
Ok(OutputStream::empty())
}

View File

@ -36,6 +36,19 @@ impl WholeStreamCommand for Last {
) -> Result<OutputStream, ShellError> {
args.process(registry, last)?.run()
}
fn examples(&self) -> &[Example] {
&[
Example {
description: "Get the last row",
example: "ls | last",
},
Example {
description: "Get the last three rows",
example: "ls | last 3",
},
]
}
}
fn last(LastArgs { rows }: LastArgs, context: RunnableContext) -> Result<OutputStream, ShellError> {

View File

@ -0,0 +1,123 @@
use crate::commands::WholeStreamCommand;
use crate::prelude::*;
use nu_errors::ShellError;
use nu_protocol::{Primitive, ReturnSuccess, Signature, UntaggedValue, Value};
pub struct Lines;
impl WholeStreamCommand for Lines {
fn name(&self) -> &str {
"lines"
}
fn signature(&self) -> Signature {
Signature::build("lines")
}
fn usage(&self) -> &str {
"Split single string into rows, one per line."
}
fn run(
&self,
args: CommandArgs,
registry: &CommandRegistry,
) -> Result<OutputStream, ShellError> {
lines(args, registry)
}
fn examples(&self) -> &[Example] {
&[Example {
description: "Split output from an external command into lines",
example: "^ls -l | lines",
}]
}
}
fn ends_with_line_ending(st: &str) -> bool {
let mut temp = st.to_string();
let last = temp.pop();
if let Some(c) = last {
c == '\n'
} else {
false
}
}
fn lines(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream, ShellError> {
let args = args.evaluate_once(registry)?;
let tag = args.name_tag();
let name_span = tag.span;
let mut input = args.input;
let mut leftover = vec![];
let mut leftover_string = String::new();
let stream = async_stream! {
loop {
match input.next().await {
Some(Value { value: UntaggedValue::Primitive(Primitive::String(st)), ..}) => {
let mut st = leftover_string.clone() + &st;
leftover.clear();
let mut lines: Vec<String> = st.lines().map(|x| x.to_string()).collect();
if !ends_with_line_ending(&st) {
if let Some(last) = lines.pop() {
leftover_string = last;
} else {
leftover_string.clear();
}
} else {
leftover_string.clear();
}
let success_lines: Vec<_> = lines.iter().map(|x| ReturnSuccess::value(UntaggedValue::line(x).into_untagged_value())).collect();
yield futures::stream::iter(success_lines)
}
Some(Value { value: UntaggedValue::Primitive(Primitive::Line(st)), ..}) => {
let mut st = leftover_string.clone() + &st;
leftover.clear();
let mut lines: Vec<String> = st.lines().map(|x| x.to_string()).collect();
if !ends_with_line_ending(&st) {
if let Some(last) = lines.pop() {
leftover_string = last;
} else {
leftover_string.clear();
}
} else {
leftover_string.clear();
}
let success_lines: Vec<_> = lines.iter().map(|x| ReturnSuccess::value(UntaggedValue::line(x).into_untagged_value())).collect();
yield futures::stream::iter(success_lines)
}
Some( Value { tag: value_span, ..}) => {
yield futures::stream::iter(vec![Err(ShellError::labeled_error_with_secondary(
"Expected a string from pipeline",
"requires string input",
name_span,
"value originates from here",
value_span,
))]);
}
None => {
if !leftover.is_empty() {
let mut st = leftover_string.clone();
if let Ok(extra) = String::from_utf8(leftover) {
st.push_str(&extra);
}
yield futures::stream::iter(vec![ReturnSuccess::value(UntaggedValue::string(st).into_untagged_value())])
}
break;
}
}
}
if !leftover_string.is_empty() {
yield futures::stream::iter(vec![ReturnSuccess::value(UntaggedValue::string(leftover_string).into_untagged_value())]);
}
}
.flatten();
Ok(stream.to_output_stream())
}

View File

@ -0,0 +1,90 @@
use crate::commands::WholeStreamCommand;
use crate::prelude::*;
use nu_errors::ShellError;
use nu_protocol::{Signature, SyntaxShape};
use nu_source::Tagged;
use std::path::PathBuf;
pub struct Ls;
#[derive(Deserialize)]
pub struct LsArgs {
pub path: Option<Tagged<PathBuf>>,
pub all: bool,
pub full: bool,
#[serde(rename = "short-names")]
pub short_names: bool,
#[serde(rename = "with-symlink-targets")]
pub with_symlink_targets: bool,
#[serde(rename = "du")]
pub du: bool,
}
impl WholeStreamCommand for Ls {
fn name(&self) -> &str {
"ls"
}
fn signature(&self) -> Signature {
Signature::build("ls")
.optional(
"path",
SyntaxShape::Pattern,
"a path to get the directory contents from",
)
.switch("all", "also show hidden files", Some('a'))
.switch(
"full",
"list all available columns for each entry",
Some('f'),
)
.switch(
"short-names",
"only print the file names and not the path",
Some('s'),
)
.switch(
"with-symlink-targets",
"display the paths to the target files that symlinks point to",
Some('w'),
)
.switch(
"du",
"display the apparent directory size in place of the directory metadata size",
Some('d'),
)
}
fn usage(&self) -> &str {
"View the contents of the current or given path."
}
fn run(
&self,
args: CommandArgs,
registry: &CommandRegistry,
) -> Result<OutputStream, ShellError> {
args.process(registry, ls)?.run()
}
fn examples(&self) -> &[Example] {
&[
Example {
description: "List all files in the current directory",
example: "ls",
},
Example {
description: "List all files in a subdirectory",
example: "ls subdir",
},
Example {
description: "List all rust files",
example: "ls *.rs",
},
]
}
}
fn ls(args: LsArgs, context: RunnableContext) -> Result<OutputStream, ShellError> {
context.shell_manager.ls(args, &context)
}

View File

@ -24,6 +24,7 @@ impl WholeStreamCommand for MapMaxBy {
"column_name",
SyntaxShape::String,
"the name of the column to map-max the table's rows",
Some('c'),
)
}
@ -45,7 +46,7 @@ pub fn map_max_by(
RunnableContext { input, name, .. }: RunnableContext,
) -> Result<OutputStream, ShellError> {
let stream = async_stream! {
let values: Vec<Value> = input.values.collect().await;
let values: Vec<Value> = input.collect().await;
if values.is_empty() {

View File

@ -0,0 +1,103 @@
use crate::commands::classified::block::run_block;
use crate::commands::WholeStreamCommand;
use crate::context::CommandRegistry;
use crate::data::value::merge_values;
use crate::prelude::*;
use indexmap::IndexMap;
use nu_errors::ShellError;
use nu_protocol::{hir::Block, ReturnSuccess, Signature, SyntaxShape, UntaggedValue, Value};
pub struct Merge;
#[derive(Deserialize)]
pub struct MergeArgs {
block: Block,
}
impl WholeStreamCommand for Merge {
fn name(&self) -> &str {
"merge"
}
fn signature(&self) -> Signature {
Signature::build("merge").required(
"block",
SyntaxShape::Block,
"the block to run and merge into the table",
)
}
fn usage(&self) -> &str {
"Merge a table."
}
fn run(
&self,
args: CommandArgs,
registry: &CommandRegistry,
) -> Result<OutputStream, ShellError> {
Ok(args.process_raw(registry, merge)?.run())
}
fn examples(&self) -> &[Example] {
&[Example {
description: "Merge a 1-based index column with some ls output",
example: "ls | select name | keep 3 | merge { echo [1 2 3] | wrap index }",
}]
}
}
fn merge(
merge_args: MergeArgs,
context: RunnableContext,
raw_args: RawCommandArgs,
) -> Result<OutputStream, ShellError> {
let block = merge_args.block;
let registry = context.registry.clone();
let mut input = context.input;
let scope = raw_args.call_info.scope.clone();
let mut context = Context::from_raw(&raw_args, &registry);
let stream = async_stream! {
let table: Option<Vec<Value>> = match run_block(&block,
&mut context,
InputStream::empty(),
&scope).await {
Ok(mut stream) => Some(stream.drain_vec().await),
Err(err) => {
yield Err(err);
return;
}
};
let table = table.unwrap_or_else(|| vec![Value {
value: UntaggedValue::row(IndexMap::default()),
tag: raw_args.call_info.name_tag,
}]);
let mut idx = 0;
while let Some(value) = input.next().await {
let other = table.get(idx);
match other {
Some(replacement) => {
match merge_values(&value.value, &replacement.value) {
Ok(merged_value) => yield ReturnSuccess::value(merged_value.into_value(&value.tag)),
Err(err) => {
let message = format!("The row at {:?} types mismatch", idx);
yield Err(ShellError::labeled_error("Could not merge", &message, &value.tag));
}
}
}
None => yield ReturnSuccess::value(value),
}
idx += 1;
}
};
Ok(stream.to_output_stream())
}

View File

@ -1,8 +1,8 @@
use crate::commands::command::RunnablePerItemContext;
use crate::commands::WholeStreamCommand;
use crate::context::CommandRegistry;
use crate::prelude::*;
use nu_errors::ShellError;
use nu_protocol::{CallInfo, Signature, SyntaxShape, Value};
use nu_protocol::{Signature, SyntaxShape};
use nu_source::Tagged;
use std::path::PathBuf;
@ -13,7 +13,7 @@ pub struct MkdirArgs {
pub rest: Vec<Tagged<PathBuf>>,
}
impl PerItemCommand for Mkdir {
impl WholeStreamCommand for Mkdir {
fn name(&self) -> &str {
"mkdir"
}
@ -28,16 +28,21 @@ impl PerItemCommand for Mkdir {
fn run(
&self,
call_info: &CallInfo,
_registry: &CommandRegistry,
raw_args: &RawCommandArgs,
_input: Value,
args: CommandArgs,
registry: &CommandRegistry,
) -> Result<OutputStream, ShellError> {
call_info.process(&raw_args.shell_manager, mkdir)?.run()
args.process(registry, mkdir)?.run()
}
fn examples(&self) -> &[Example] {
&[Example {
description: "Make a directory named foo",
example: "mkdir foo",
}]
}
}
fn mkdir(args: MkdirArgs, context: &RunnablePerItemContext) -> Result<OutputStream, ShellError> {
fn mkdir(args: MkdirArgs, context: RunnableContext) -> Result<OutputStream, ShellError> {
let shell_manager = context.shell_manager.clone();
shell_manager.mkdir(args, context)
shell_manager.mkdir(args, &context)
}

View File

@ -1,8 +1,8 @@
use crate::commands::command::RunnablePerItemContext;
use crate::commands::WholeStreamCommand;
use crate::context::CommandRegistry;
use crate::prelude::*;
use nu_errors::ShellError;
use nu_protocol::{CallInfo, Signature, SyntaxShape, Value};
use nu_protocol::{Signature, SyntaxShape};
use nu_source::Tagged;
use std::path::PathBuf;
@ -14,7 +14,7 @@ pub struct MoveArgs {
pub dst: Tagged<PathBuf>,
}
impl PerItemCommand for Move {
impl WholeStreamCommand for Move {
fn name(&self) -> &str {
"mv"
}
@ -39,16 +39,31 @@ impl PerItemCommand for Move {
fn run(
&self,
call_info: &CallInfo,
_registry: &CommandRegistry,
raw_args: &RawCommandArgs,
_input: Value,
args: CommandArgs,
registry: &CommandRegistry,
) -> Result<OutputStream, ShellError> {
call_info.process(&raw_args.shell_manager, mv)?.run()
args.process(registry, mv)?.run()
}
fn examples(&self) -> &[Example] {
&[
Example {
description: "Rename a file",
example: "mv before.txt after.txt",
},
Example {
description: "Move a file into a directory",
example: "mv test.txt my/subdirectory",
},
Example {
description: "Move many files into a directory",
example: "mv *.txt my/subdirectory",
},
]
}
}
fn mv(args: MoveArgs, context: &RunnablePerItemContext) -> Result<OutputStream, ShellError> {
fn mv(args: MoveArgs, context: RunnableContext) -> Result<OutputStream, ShellError> {
let shell_manager = context.shell_manager.clone();
shell_manager.mv(args, context)
shell_manager.mv(args, &context)
}

View File

@ -22,7 +22,7 @@ impl WholeStreamCommand for Nth {
Signature::build("nth")
.required(
"row number",
SyntaxShape::Any,
SyntaxShape::Int,
"the number of the row to return",
)
.rest(SyntaxShape::Any, "Optionally return more rows")
@ -39,6 +39,19 @@ impl WholeStreamCommand for Nth {
) -> Result<OutputStream, ShellError> {
args.process(registry, nth)?.run()
}
fn examples(&self) -> &[Example] {
&[
Example {
description: "Get the second row",
example: "echo [first second third] | get 1",
},
Example {
description: "Get the first and third rows",
example: "echo [first second third] | get 0 2",
},
]
}
}
fn nth(
@ -49,7 +62,6 @@ fn nth(
RunnableContext { input, .. }: RunnableContext,
) -> Result<OutputStream, ShellError> {
let stream = input
.values
.enumerate()
.map(move |(idx, item)| {
let row_number = vec![row_number.clone()];
@ -68,7 +80,7 @@ fn nth(
result.push_back(ReturnSuccess::value(item));
}
result
futures::stream::iter(result)
})
.flatten();

View File

@ -1,14 +1,19 @@
use crate::commands::WholeStreamCommand;
use crate::prelude::*;
use nu_errors::ShellError;
use nu_protocol::{
CallInfo, CommandAction, ReturnSuccess, Signature, SyntaxShape, UntaggedValue, Value,
};
use nu_source::{AnchorLocation, Span};
use nu_protocol::{CommandAction, ReturnSuccess, Signature, SyntaxShape, UntaggedValue};
use nu_source::{AnchorLocation, Span, Tagged};
use std::path::{Path, PathBuf};
pub struct Open;
impl PerItemCommand for Open {
#[derive(Deserialize)]
pub struct OpenArgs {
path: Tagged<PathBuf>,
raw: Tagged<bool>,
}
impl WholeStreamCommand for Open {
fn name(&self) -> &str {
"open"
}
@ -20,7 +25,11 @@ impl PerItemCommand for Open {
SyntaxShape::Path,
"the file path to load values from",
)
.switch("raw", "load content as a string insead of a table")
.switch(
"raw",
"load content as a string instead of a table",
Some('r'),
)
}
fn usage(&self) -> &str {
@ -29,36 +38,23 @@ impl PerItemCommand for Open {
fn run(
&self,
call_info: &CallInfo,
_registry: &CommandRegistry,
raw_args: &RawCommandArgs,
_input: Value,
args: CommandArgs,
registry: &CommandRegistry,
) -> Result<OutputStream, ShellError> {
run(call_info, raw_args)
args.process(registry, open)?.run()
}
}
fn run(call_info: &CallInfo, raw_args: &RawCommandArgs) -> Result<OutputStream, ShellError> {
let shell_manager = &raw_args.shell_manager;
let cwd = PathBuf::from(shell_manager.path()?);
fn open(
OpenArgs { path, raw }: OpenArgs,
RunnableContext { shell_manager, .. }: RunnableContext,
) -> Result<OutputStream, ShellError> {
let cwd = PathBuf::from(shell_manager.path());
let full_path = cwd;
let path = call_info.args.nth(0).ok_or_else(|| {
ShellError::labeled_error(
"No file or directory specified",
"for command",
&call_info.name_tag,
)
})?;
let path_buf = path.as_path()?;
let path_str = path_buf.display().to_string();
let path_span = path.tag.span;
let has_raw = call_info.args.has("raw");
let stream = async_stream! {
let result = fetch(&full_path, &path_str, path_span).await;
let result = fetch(&full_path, &path.item, path.tag.span).await;
if let Err(e) = result {
yield Err(e);
@ -66,12 +62,12 @@ fn run(call_info: &CallInfo, raw_args: &RawCommandArgs) -> Result<OutputStream,
}
let (file_extension, contents, contents_tag) = result?;
let file_extension = if has_raw {
let file_extension = if raw.item {
None
} else {
// If the extension could not be determined via mimetype, try to use the path
// extension. Some file types do not declare their mimetypes (such as bson files).
file_extension.or(path_str.split('.').last().map(String::from))
file_extension.or(path.extension().map(|x| x.to_string_lossy().to_string()))
};
let tagged_contents = contents.into_value(&contents_tag);
@ -88,7 +84,7 @@ fn run(call_info: &CallInfo, raw_args: &RawCommandArgs) -> Result<OutputStream,
pub async fn fetch(
cwd: &PathBuf,
location: &str,
location: &PathBuf,
span: Span,
) -> Result<(Option<String>, UntaggedValue, Tag), ShellError> {
let mut cwd = cwd.clone();

View File

@ -0,0 +1,167 @@
use crate::commands::WholeStreamCommand;
use crate::context::CommandRegistry;
use crate::prelude::*;
use nu_errors::ShellError;
use nu_protocol::{ReturnSuccess, Signature, SyntaxShape, TaggedDictBuilder, UntaggedValue};
use nu_source::Tagged;
use regex::Regex;
#[derive(Debug)]
enum ParseCommand {
Text(String),
Column(String),
}
fn parse(input: &str) -> Vec<ParseCommand> {
let mut output = vec![];
//let mut loop_input = input;
let mut loop_input = input.chars();
loop {
let mut before = String::new();
while let Some(c) = loop_input.next() {
if c == '{' {
break;
}
before.push(c);
}
if !before.is_empty() {
output.push(ParseCommand::Text(before.to_string()));
}
// Look for column as we're now at one
let mut column = String::new();
while let Some(c) = loop_input.next() {
if c == '}' {
break;
}
column.push(c);
}
if !column.is_empty() {
output.push(ParseCommand::Column(column.to_string()));
}
if before.is_empty() && column.is_empty() {
break;
}
}
output
}
fn column_names(commands: &[ParseCommand]) -> Vec<String> {
let mut output = vec![];
for command in commands {
if let ParseCommand::Column(c) = command {
output.push(c.clone());
}
}
output
}
fn build_regex(commands: &[ParseCommand]) -> String {
let mut output = String::new();
for command in commands {
match command {
ParseCommand::Text(s) => {
output.push_str(&s.replace("(", "\\("));
}
ParseCommand::Column(_) => {
output.push_str("(.*)");
}
}
}
output
}
pub struct Parse;
#[derive(Deserialize)]
pub struct ParseArgs {
pattern: Tagged<String>,
}
impl WholeStreamCommand for Parse {
fn name(&self) -> &str {
"parse"
}
fn signature(&self) -> Signature {
Signature::build("parse").required(
"pattern",
SyntaxShape::String,
"the pattern to match. Eg) \"{foo}: {bar}\"",
)
}
fn usage(&self) -> &str {
"Parse columns from string data using a simple pattern."
}
fn run(
&self,
args: CommandArgs,
registry: &CommandRegistry,
) -> Result<OutputStream, ShellError> {
args.process(registry, parse_command)?.run()
}
fn examples(&self) -> &[Example] {
&[Example {
description: "Parse values from a string into a table",
example: r#"echo "data: 123" | parse "{key}: {value}""#,
}]
}
}
fn parse_command(
ParseArgs { pattern }: ParseArgs,
RunnableContext { name, input, .. }: RunnableContext,
) -> Result<OutputStream, ShellError> {
let parse_pattern = parse(&pattern.item);
let parse_regex = build_regex(&parse_pattern);
let column_names = column_names(&parse_pattern);
let name = name.span;
let regex = Regex::new(&parse_regex).map_err(|_| {
ShellError::labeled_error(
"Could not parse regex",
"could not parse regex",
&pattern.tag,
)
})?;
Ok(input
.map(move |value| {
if let Ok(s) = value.as_string() {
let mut output = vec![];
for cap in regex.captures_iter(&s) {
let mut dict = TaggedDictBuilder::new(value.tag());
for (idx, column_name) in column_names.iter().enumerate() {
dict.insert_untagged(
column_name,
UntaggedValue::string(cap[idx + 1].to_string()),
);
}
output.push(Ok(ReturnSuccess::Value(dict.into_value())));
}
output
} else {
vec![Err(ShellError::labeled_error_with_secondary(
"Expected string input",
"expected string input",
name,
"value originated here",
value.tag,
))]
}
})
.map(futures::stream::iter)
.flatten()
.to_output_stream())
}

View File

@ -1,7 +1,9 @@
use crate::commands::WholeStreamCommand;
use crate::prelude::*;
use nu_errors::ShellError;
use nu_protocol::{ReturnSuccess, Signature, SyntaxShape, TaggedDictBuilder, UntaggedValue, Value};
use nu_protocol::{
merge_descriptors, ReturnSuccess, Signature, SyntaxShape, TaggedDictBuilder, UntaggedValue,
};
use nu_source::{SpannedItem, Tagged};
use nu_value_ext::get_data_by_key;
@ -23,8 +25,16 @@ impl WholeStreamCommand for Pivot {
fn signature(&self) -> Signature {
Signature::build("pivot")
.switch("header-row", "treat the first row as column names")
.switch("ignore-titles", "don't pivot the column names into values")
.switch(
"header-row",
"treat the first row as column names",
Some('r'),
)
.switch(
"ignore-titles",
"don't pivot the column names into values",
Some('i'),
)
.rest(
SyntaxShape::String,
"the names to give columns once pivoted",
@ -44,18 +54,6 @@ impl WholeStreamCommand for Pivot {
}
}
fn merge_descriptors(values: &[Value]) -> Vec<String> {
let mut ret = vec![];
for value in values {
for desc in value.data_descriptors() {
if !ret.contains(&desc) {
ret.push(desc);
}
}
}
ret
}
pub fn pivot(args: PivotArgs, context: RunnableContext) -> Result<OutputStream, ShellError> {
let stream = async_stream! {
let input = context.input.into_vec().await;

View File

@ -3,7 +3,7 @@ use crate::prelude::*;
use derive_new::new;
use log::trace;
use nu_errors::ShellError;
use nu_protocol::{Primitive, ReturnSuccess, ReturnValue, Scope, Signature, UntaggedValue, Value};
use nu_protocol::{Primitive, ReturnSuccess, ReturnValue, Signature, UntaggedValue, Value};
use serde::{self, Deserialize, Serialize};
use std::io::prelude::*;
use std::io::BufReader;
@ -71,10 +71,13 @@ pub fn filter_plugin(
) -> Result<OutputStream, ShellError> {
trace!("filter_plugin :: {}", path);
let args = args.evaluate_once_with_scope(
registry,
&Scope::it_value(UntaggedValue::string("$it").into_untagged_value()),
)?;
let scope = &args
.call_info
.scope
.clone()
.set_it(UntaggedValue::string("$it").into_untagged_value());
let args = args.evaluate_once_with_scope(registry, &scope)?;
let mut child = std::process::Command::new(path)
.stdin(std::process::Stdio::piped())
@ -84,16 +87,18 @@ pub fn filter_plugin(
let mut bos: VecDeque<Value> = VecDeque::new();
bos.push_back(UntaggedValue::Primitive(Primitive::BeginningOfStream).into_untagged_value());
let bos = futures::stream::iter(bos);
let mut eos: VecDeque<Value> = VecDeque::new();
eos.push_back(UntaggedValue::Primitive(Primitive::EndOfStream).into_untagged_value());
let eos = futures::stream::iter(eos);
let call_info = args.call_info.clone();
trace!("filtering :: {:?}", call_info);
let stream = bos
.chain(args.input.values)
.chain(args.input)
.chain(eos)
.map(move |v| match v {
Value {
@ -294,6 +299,7 @@ pub fn filter_plugin(
}
}
})
.map(futures::stream::iter) // convert to a stream
.flatten();
Ok(stream.to_output_stream())
@ -337,7 +343,7 @@ pub fn sink_plugin(
let call_info = args.call_info.clone();
let stream = async_stream! {
let input: Vec<Value> = args.input.values.collect().await;
let input: Vec<Value> = args.input.collect().await;
let request = JsonRpc::new("sink", (call_info.clone(), input));
let request_raw = serde_json::to_string(&request);

View File

@ -35,14 +35,20 @@ impl WholeStreamCommand for Prepend {
) -> Result<OutputStream, ShellError> {
args.process(registry, prepend)?.run()
}
fn examples(&self) -> &[Example] {
&[Example {
description: "Add something to the beginning of a list or table",
example: "echo [2 3 4] | prepend 1",
}]
}
}
fn prepend(
PrependArgs { row }: PrependArgs,
RunnableContext { input, .. }: RunnableContext,
) -> Result<OutputStream, ShellError> {
let mut prepend: VecDeque<Value> = VecDeque::new();
prepend.push_back(row);
let prepend = futures::stream::iter(vec![row]);
Ok(OutputStream::from_input(prepend.chain(input.values)))
Ok(prepend.chain(input).to_output_stream())
}

View File

@ -25,6 +25,13 @@ impl WholeStreamCommand for Pwd {
) -> Result<OutputStream, ShellError> {
pwd(args, registry)
}
fn examples(&self) -> &[Example] {
&[Example {
description: "Print the current working directory",
example: "pwd",
}]
}
}
pub fn pwd(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream, ShellError> {

View File

@ -47,7 +47,8 @@ fn range(
let (from, _) = range.from;
let (to, _) = range.to;
Ok(OutputStream::from_input(
input.values.skip(*from).take(*to - *from + 1),
))
let from = *from as usize;
let to = *to as usize;
Ok(input.skip(from).take(to - from + 1).to_output_stream())
}

Some files were not shown because too many files have changed in this diff Show More