mirror of
https://github.com/nushell/nushell.git
synced 2025-06-30 22:50:14 +02:00
Move std-rfc
into Nushell (#15042)
Move `std-rfc` into Nushell. `use std-rfc/<submodule>` now works "out-of-the-box"
This commit is contained in:
@ -294,7 +294,7 @@ system-clipboard = [
|
||||
trash-support = ["nu-command/trash-support", "nu-cmd-lang/trash-support"]
|
||||
|
||||
# SQLite commands for nushell
|
||||
sqlite = ["nu-command/sqlite", "nu-cmd-lang/sqlite"]
|
||||
sqlite = ["nu-command/sqlite", "nu-cmd-lang/sqlite", "nu-std/sqlite"]
|
||||
|
||||
[profile.release]
|
||||
opt-level = "s" # Optimize for size
|
||||
|
@ -17,3 +17,7 @@ log = "0.4"
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
[features]
|
||||
sqlite = []
|
||||
|
||||
|
@ -1,11 +1,9 @@
|
||||
#![doc = include_str!("../README.md")]
|
||||
use log::trace;
|
||||
use nu_engine::eval_block;
|
||||
use nu_parser::parse;
|
||||
use nu_protocol::{
|
||||
debugger::WithoutDebug,
|
||||
engine::{FileStack, Stack, StateWorkingSet, VirtualPath},
|
||||
report_parse_error, PipelineData, VirtualPathId,
|
||||
engine::{FileStack, StateWorkingSet, VirtualPath},
|
||||
report_parse_error, VirtualPathId,
|
||||
};
|
||||
use std::path::PathBuf;
|
||||
|
||||
@ -22,6 +20,7 @@ pub fn load_standard_library(
|
||||
trace!("load_standard_library");
|
||||
|
||||
let mut working_set = StateWorkingSet::new(engine_state);
|
||||
|
||||
// Contents of the std virtual directory
|
||||
let mut std_virt_paths = vec![];
|
||||
|
||||
@ -81,8 +80,70 @@ pub fn load_standard_library(
|
||||
let std_dir = PathBuf::from("std").to_string_lossy().to_string();
|
||||
let _ = working_set.add_virtual_path(std_dir, VirtualPath::Dir(std_virt_paths));
|
||||
|
||||
// Add std-rfc files
|
||||
let mut std_rfc_virt_paths = vec![];
|
||||
|
||||
// std-rfc/mod.nu
|
||||
let std_rfc_mod_virt_file_id = create_virt_file(
|
||||
&mut working_set,
|
||||
"std-rfc/mod.nu",
|
||||
include_str!("../std-rfc/mod.nu"),
|
||||
);
|
||||
std_rfc_virt_paths.push(std_rfc_mod_virt_file_id);
|
||||
|
||||
// Submodules/subdirectories ... std-rfc/<module>/mod.nu
|
||||
let mut std_rfc_submodules = vec![
|
||||
(
|
||||
"mod.nu",
|
||||
"std-rfc/clip",
|
||||
include_str!("../std-rfc/clip/mod.nu"),
|
||||
),
|
||||
(
|
||||
"mod.nu",
|
||||
"std-rfc/conversions",
|
||||
include_str!("../std-rfc/conversions/mod.nu"),
|
||||
),
|
||||
#[cfg(feature = "sqlite")]
|
||||
("mod.nu", "std-rfc/kv", include_str!("../std-rfc/kv/mod.nu")),
|
||||
(
|
||||
"mod.nu",
|
||||
"std-rfc/path",
|
||||
include_str!("../std-rfc/path/mod.nu"),
|
||||
),
|
||||
(
|
||||
"mod.nu",
|
||||
"std-rfc/str",
|
||||
include_str!("../std-rfc/str/mod.nu"),
|
||||
),
|
||||
(
|
||||
"mod.nu",
|
||||
"std-rfc/tables",
|
||||
include_str!("../std-rfc/tables/mod.nu"),
|
||||
),
|
||||
];
|
||||
|
||||
for (filename, std_rfc_subdir_name, content) in std_rfc_submodules.drain(..) {
|
||||
let mod_dir = PathBuf::from(std_rfc_subdir_name);
|
||||
let name = mod_dir.join(filename);
|
||||
let virt_file_id = create_virt_file(&mut working_set, &name.to_string_lossy(), content);
|
||||
|
||||
// Place file in virtual subdir
|
||||
let mod_dir_filelist = vec![virt_file_id];
|
||||
|
||||
let virt_dir_id = working_set.add_virtual_path(
|
||||
mod_dir.to_string_lossy().to_string(),
|
||||
VirtualPath::Dir(mod_dir_filelist),
|
||||
);
|
||||
// Add the subdir to the list of paths in std
|
||||
std_rfc_virt_paths.push(virt_dir_id);
|
||||
}
|
||||
|
||||
// Create std virtual dir with all subdirs and files
|
||||
let std_rfc_dir = PathBuf::from("std-rfc").to_string_lossy().to_string();
|
||||
let _ = working_set.add_virtual_path(std_rfc_dir, VirtualPath::Dir(std_rfc_virt_paths));
|
||||
|
||||
// Load prelude
|
||||
let (block, delta) = {
|
||||
let (_, delta) = {
|
||||
let source = r#"
|
||||
# Prelude
|
||||
use std/prelude *
|
||||
@ -95,7 +156,7 @@ use std/prelude *
|
||||
|
||||
let block = parse(
|
||||
&mut working_set,
|
||||
Some("loading stdlib"),
|
||||
Some("loading stdlib prelude"),
|
||||
source.as_bytes(),
|
||||
false,
|
||||
);
|
||||
@ -112,13 +173,5 @@ use std/prelude *
|
||||
|
||||
engine_state.merge_delta(delta)?;
|
||||
|
||||
// We need to evaluate the module in order to run the `export-env` blocks.
|
||||
let mut stack = Stack::new();
|
||||
let pipeline_data = PipelineData::Empty;
|
||||
|
||||
eval_block::<WithoutDebug>(engine_state, &mut stack, &block, pipeline_data)?;
|
||||
|
||||
engine_state.merge_env(&mut stack)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
71
crates/nu-std/std-rfc/clip/mod.nu
Normal file
71
crates/nu-std/std-rfc/clip/mod.nu
Normal file
@ -0,0 +1,71 @@
|
||||
# Commands for interacting with the system clipboard
|
||||
#
|
||||
# > These commands require your terminal to support OSC 52
|
||||
# > Terminal multiplexers such as screen, tmux, zellij etc may interfere with this command
|
||||
|
||||
# Copy input to system clipboard
|
||||
#
|
||||
# # Example
|
||||
# ```nushell
|
||||
# >_ "Hello" | clip copy
|
||||
# ```
|
||||
export def copy [
|
||||
--ansi (-a) # Copy ansi formatting
|
||||
]: any -> nothing {
|
||||
let input = $in | collect
|
||||
let text = match ($input | describe -d | get type) {
|
||||
$type if $type in [ table, record, list ] => {
|
||||
$input | table -e
|
||||
}
|
||||
_ => {$input}
|
||||
}
|
||||
|
||||
let do_strip_ansi = match $ansi {
|
||||
true => {{||}}
|
||||
false => {{|| ansi strip }}
|
||||
}
|
||||
|
||||
let output = (
|
||||
$text
|
||||
| do $do_strip_ansi
|
||||
| encode base64
|
||||
)
|
||||
|
||||
print -n $'(ansi osc)52;c;($output)(ansi st)'
|
||||
}
|
||||
|
||||
# Paste contents of system clipboard
|
||||
#
|
||||
# # Example
|
||||
# ```nushell
|
||||
# >_ clip paste
|
||||
# "Hello"
|
||||
# ```
|
||||
export def paste []: [nothing -> string] {
|
||||
try {
|
||||
term query $'(ansi osc)52;c;?(ansi st)' -p $'(ansi osc)52;c;' -t (ansi st)
|
||||
} catch {
|
||||
error make -u {
|
||||
msg: "Terminal did not responds to OSC 52 paste request."
|
||||
help: $"Check if your terminal supports OSC 52."
|
||||
}
|
||||
}
|
||||
| decode
|
||||
| decode base64
|
||||
| decode
|
||||
}
|
||||
|
||||
# Add a prefix to each line of the content to be copied
|
||||
#
|
||||
# # Example: Format output for Nushell doc
|
||||
# ls | clip prefix '# => ' | clip copy
|
||||
export def prefix [prefix: string]: any -> string {
|
||||
let input = $in | collect
|
||||
match ($input | describe -d | get type) {
|
||||
$type if $type in [ table, record, list ] => {
|
||||
$input | table -e
|
||||
}
|
||||
_ => {$input}
|
||||
}
|
||||
| str replace -r --all '(?m)(.*)' $'($prefix)$1'
|
||||
}
|
121
crates/nu-std/std-rfc/conversions/mod.nu
Normal file
121
crates/nu-std/std-rfc/conversions/mod.nu
Normal file
@ -0,0 +1,121 @@
|
||||
# Convert a Nushell value to a list
|
||||
#
|
||||
# Primary useful for range-to-list, but other types are accepted as well.
|
||||
#
|
||||
# Example:
|
||||
#
|
||||
# 1..10 | into list
|
||||
export def "into list" []: any -> list {
|
||||
let input = $in
|
||||
let type = ($input | describe --detailed | get type)
|
||||
match $type {
|
||||
range => {$input | each {||}}
|
||||
list => $input
|
||||
table => $input
|
||||
_ => [ $input ]
|
||||
}
|
||||
}
|
||||
|
||||
# Convert a list of columns into a table
|
||||
#
|
||||
# Examples:
|
||||
#
|
||||
# [
|
||||
# ([ 1 2 3 ] | wrap a)
|
||||
# ([ 4 5 6 ] | wrap b)
|
||||
# ([ 7 8 9 ] | wrap c)
|
||||
# ] | columns-into-table
|
||||
# => ╭───┬───┬───┬───╮
|
||||
# => │ # │ a │ b │ c │
|
||||
# => ├───┼───┼───┼───┤
|
||||
# => │ 0 │ 1 │ 4 │ 7 │
|
||||
# => │ 1 │ 2 │ 5 │ 8 │
|
||||
# => │ 2 │ 3 │ 6 │ 9 │
|
||||
# => ╰───┴───┴───┴───╯
|
||||
#
|
||||
# Can roundtrip with `table-into-columns`
|
||||
#
|
||||
# ls | table-into-columns | columns-into-table
|
||||
# => ╭───┬────────────────────────┬──────┬────────┬────────────────╮
|
||||
# => │ # │ name │ type │ size │ modified │
|
||||
# => ├───┼────────────────────────┼──────┼────────┼────────────────┤
|
||||
# => │ 0 │ into-list.nu │ file │ 378 B │ 40 minutes ago │
|
||||
# => │ 1 │ mod.nu │ file │ 28 B │ 41 minutes ago │
|
||||
# => │ 2 │ name-values.nu │ file │ 394 B │ 34 minutes ago │
|
||||
# => │ 3 │ record-into-columns.nu │ file │ 1.3 kB │ 27 minutes ago │
|
||||
# => ╰───┴────────────────────────┴──────┴────────┴────────────────╯
|
||||
export def columns-into-table []: [list<table> -> table] {
|
||||
reduce {|it| merge $it}
|
||||
}
|
||||
|
||||
# Convert a record, where each value is a list, into a list of columns.
|
||||
# { a: [ 1 2 3 ], b: [ 4 5 6 ] } | record-into-columns
|
||||
# => ╭───┬───────────╮
|
||||
# => │ 0 │ ╭───┬───╮ │
|
||||
# => │ │ │ # │ a │ │
|
||||
# => │ │ ├───┼───┤ │
|
||||
# => │ │ │ 0 │ 1 │ │
|
||||
# => │ │ │ 1 │ 2 │ │
|
||||
# => │ │ │ 2 │ 3 │ │
|
||||
# => │ │ ╰───┴───╯ │
|
||||
# => │ 1 │ ╭───┬───╮ │
|
||||
# => │ │ │ # │ b │ │
|
||||
# => │ │ ├───┼───┤ │
|
||||
# => │ │ │ 0 │ 4 │ │
|
||||
# => │ │ │ 1 │ 5 │ │
|
||||
# => │ │ │ 2 │ 6 │ │
|
||||
# => │ │ ╰───┴───╯ │
|
||||
# => ╰───┴───────────╯
|
||||
# =>
|
||||
# This can be especially useful when combined with `columns-into-table`, as in:
|
||||
#
|
||||
# { a: [ 1 2 3 ], b: [ 4 5 6 ] } | record-into-columns
|
||||
# | columns-into-table
|
||||
# => ╭───┬───┬───╮
|
||||
# => │ # │ a │ b │
|
||||
# => ├───┼───┼───┤
|
||||
# => │ 0 │ 1 │ 4 │
|
||||
# => │ 1 │ 2 │ 5 │
|
||||
# => │ 2 │ 3 │ 6 │
|
||||
# => ╰───┴───┴───╯
|
||||
# =>
|
||||
export def record-into-columns []: [record -> list] {
|
||||
items {|key, val| $val | wrap $key}
|
||||
}
|
||||
|
||||
# Convert/split a table into a list of columns
|
||||
#
|
||||
# Examples:
|
||||
# ls | table-into-columns
|
||||
# => Returns a list of 4 tables, one for each of the `ls` columns
|
||||
#
|
||||
# Can be roundtripped with `columns-into-table`
|
||||
#
|
||||
# ls | table-into-columns | columns-into-table
|
||||
# => ╭───┬────────────────────────┬──────┬────────┬────────────────╮
|
||||
# => │ # │ name │ type │ size │ modified │
|
||||
# => ├───┼────────────────────────┼──────┼────────┼────────────────┤
|
||||
# => │ 0 │ into-list.nu │ file │ 378 B │ 40 minutes ago │
|
||||
# => │ 1 │ mod.nu │ file │ 28 B │ 41 minutes ago │
|
||||
# => │ 2 │ name-values.nu │ file │ 394 B │ 34 minutes ago │
|
||||
# => │ 3 │ record-into-columns.nu │ file │ 1.3 kB │ 27 minutes ago │
|
||||
# => ╰───┴────────────────────────┴──────┴────────┴────────────────╯
|
||||
export def table-into-columns []: [table -> list<table>] {
|
||||
let IN = $in
|
||||
$IN | columns | each {|col| $IN | select $col}
|
||||
}
|
||||
|
||||
# Assign keynames to a list of values, effectively converting the list to a record.
|
||||
#
|
||||
# Example:
|
||||
#
|
||||
# [ 1 2 3 ] | name-values a b c
|
||||
# => ╭───┬───╮
|
||||
# => │ a │ 1 │
|
||||
# => │ b │ 2 │
|
||||
# => │ c │ 3 │
|
||||
# => ╰───┴───╯
|
||||
export def name-values [...names: string]: [list -> record] {
|
||||
let IN = $in
|
||||
0.. | zip $IN | into record | rename ...$names
|
||||
}
|
210
crates/nu-std/std-rfc/kv/mod.nu
Normal file
210
crates/nu-std/std-rfc/kv/mod.nu
Normal file
@ -0,0 +1,210 @@
|
||||
# kv module
|
||||
#
|
||||
# use std-rfc/kv *
|
||||
#
|
||||
# Easily store and retrieve key-value pairs
|
||||
# in a pipeline.
|
||||
#
|
||||
# A common request is to be able to assign a
|
||||
# pipeline result to a variable. While it's
|
||||
# not currently possible to use a "let" statement
|
||||
# within a pipeline, this module provides an
|
||||
# alternative. Think of each key as a variable
|
||||
# that can be set and retrieved.
|
||||
|
||||
# Stores the pipeline value for later use
|
||||
#
|
||||
# If the key already exists, it is updated
|
||||
# to the new value provided.
|
||||
#
|
||||
# Usage:
|
||||
# <input> | kv set <key> <value?>
|
||||
#
|
||||
# Example:
|
||||
# ls ~ | kv set "home snapshot"
|
||||
# kv set foo 5
|
||||
export def "kv set" [
|
||||
key: string
|
||||
value_or_closure?: any
|
||||
--return (-r): string # Whether and what to return to the pipeline output
|
||||
--universal (-u)
|
||||
] {
|
||||
# Pipeline input is preferred, but prioritize
|
||||
# parameter if present. This allows $in to be
|
||||
# used in the parameter if needed.
|
||||
let input = $in
|
||||
|
||||
# If passed a closure, execute it
|
||||
let arg_type = ($value_or_closure | describe)
|
||||
let value = match $arg_type {
|
||||
closure => { $input | do $value_or_closure }
|
||||
_ => ($value_or_closure | default $input)
|
||||
}
|
||||
|
||||
# Store values as nuons for type-integrity
|
||||
let kv_pair = {
|
||||
session: '' # Placeholder
|
||||
key: $key
|
||||
value: ($value | to nuon)
|
||||
}
|
||||
|
||||
let db_open = (db_setup --universal=$universal)
|
||||
try {
|
||||
# Delete the existing key if it does exist
|
||||
do $db_open | query db $"DELETE FROM std_kv_store WHERE key = '($key)'"
|
||||
}
|
||||
|
||||
match $universal {
|
||||
true => { $kv_pair | into sqlite (universal_db_path) -t std_kv_store }
|
||||
false => { $kv_pair | stor insert -t std_kv_store }
|
||||
}
|
||||
|
||||
# The value that should be returned from `kv set`
|
||||
# By default, this is the input to `kv set`, even if
|
||||
# overridden by a positional parameter.
|
||||
# This can also be:
|
||||
# input: (Default) The pipeline input to `kv set`, even if
|
||||
# overridden by a positional parameter. `null` if no
|
||||
# pipeline input was used.
|
||||
# ---
|
||||
# value: If a positional parameter was used for the value, then
|
||||
# return it, otherwise return the input (whatever was set).
|
||||
# If the positional was a closure, return the result of the
|
||||
# closure on the pipeline input.
|
||||
# ---
|
||||
# all: The entire contents of the existing kv table are returned
|
||||
match ($return | default 'input') {
|
||||
'all' => (kv list --universal=$universal)
|
||||
'a' => (kv list --universal=$universal)
|
||||
'value' => $value
|
||||
'v' => $value
|
||||
'input' => $input
|
||||
'in' => $input
|
||||
'i' => $input
|
||||
_ => {
|
||||
error make {
|
||||
msg: "Invalid --return option"
|
||||
label: {
|
||||
text: "Must be 'all'/'a', 'value'/'v', or 'input'/'in'/'i'"
|
||||
span: (metadata $return).span
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
# Retrieves a stored value by key
|
||||
#
|
||||
# Counterpart of "kv set". Returns null
|
||||
# if the key is not found.
|
||||
#
|
||||
# Usage:
|
||||
# kv get <key> | <pipeline>
|
||||
export def "kv get" [
|
||||
key: string # Key of the kv-pair to retrieve
|
||||
--universal (-u)
|
||||
] {
|
||||
let db_open = (db_setup --universal=$universal)
|
||||
do $db_open
|
||||
# Hack to turn a SQLiteDatabase into a table
|
||||
| $in.std_kv_store | wrap temp | get temp
|
||||
| where key == $key
|
||||
# Should only be one occurrence of each key in the stor
|
||||
| get -i value.0
|
||||
| match $in {
|
||||
# Key not found
|
||||
null => null
|
||||
# Key found
|
||||
_ => { from nuon }
|
||||
}
|
||||
}
|
||||
|
||||
# List the currently stored key-value pairs
|
||||
#
|
||||
# Returns results as the Nushell value rather
|
||||
# than the stored nuon.
|
||||
export def "kv list" [
|
||||
--universal (-u)
|
||||
] {
|
||||
let db_open = (db_setup --universal=$universal)
|
||||
|
||||
do $db_open | $in.std_kv_store? | each {|kv_pair|
|
||||
{
|
||||
key: $kv_pair.key
|
||||
value: ($kv_pair.value | from nuon )
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
# Returns and removes a key-value pair
|
||||
export def --env "kv drop" [
|
||||
key: string # Key of the kv-pair to drop
|
||||
--universal (-u)
|
||||
] {
|
||||
let db_open = (db_setup --universal=$universal)
|
||||
|
||||
let value = (kv get --universal=$universal $key)
|
||||
|
||||
try {
|
||||
do $db_open
|
||||
# Hack to turn a SQLiteDatabase into a table
|
||||
| query db $"DELETE FROM std_kv_store WHERE key = '($key)'"
|
||||
}
|
||||
|
||||
if $universal and ($env.NU_KV_UNIVERSALS? | default false) {
|
||||
hide-env $key
|
||||
}
|
||||
|
||||
$value
|
||||
}
|
||||
|
||||
def universal_db_path [] {
|
||||
$env.NU_UNIVERSAL_KV_PATH?
|
||||
| default (
|
||||
$nu.data-dir | path join "std_kv_variables.sqlite3"
|
||||
)
|
||||
}
|
||||
|
||||
def db_setup [
|
||||
--universal
|
||||
] : nothing -> closure {
|
||||
try {
|
||||
match $universal {
|
||||
true => {
|
||||
# Ensure universal sqlite db and table exists
|
||||
let uuid = (random uuid)
|
||||
let dummy_record = {
|
||||
session: ''
|
||||
key: $uuid
|
||||
value: ''
|
||||
}
|
||||
$dummy_record | into sqlite (universal_db_path) -t std_kv_store
|
||||
open (universal_db_path) | query db $"DELETE FROM std_kv_store WHERE key = '($uuid)'"
|
||||
}
|
||||
false => {
|
||||
# Create the stor table if it doesn't exist
|
||||
stor create -t std_kv_store -c {session: str, key: str, value: str} | ignore
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
# Return the correct closure for opening on-disk vs. in-memory
|
||||
match $universal {
|
||||
true => {|| {|| open (universal_db_path)}}
|
||||
false => {|| {|| stor open}}
|
||||
}
|
||||
}
|
||||
|
||||
# This hook can be added to $env.config.hooks.pre_execution to enable
|
||||
# "universal variables" similar to the Fish shell. Adding, changing, or
|
||||
# removing a universal variable will immediately update the corresponding
|
||||
# environment variable in all running Nushell sessions.
|
||||
export def "kv universal-variable-hook" [] {
|
||||
{||
|
||||
kv list --universal
|
||||
| transpose -dr
|
||||
| load-env
|
||||
|
||||
$env.NU_KV_UNIVERSALS = true
|
||||
}
|
||||
}
|
9
crates/nu-std/std-rfc/mod.nu
Normal file
9
crates/nu-std/std-rfc/mod.nu
Normal file
@ -0,0 +1,9 @@
|
||||
export use conversions *
|
||||
export use tables *
|
||||
export use path *
|
||||
export module clip
|
||||
export module str
|
||||
|
||||
# kv module depends on sqlite feature, which may not be available in some builds
|
||||
const kv_module = if ("sqlite" in (version).features) { "std-rfc/kv" } else { null }
|
||||
export use $kv_module *
|
71
crates/nu-std/std-rfc/path/mod.nu
Normal file
71
crates/nu-std/std-rfc/path/mod.nu
Normal file
@ -0,0 +1,71 @@
|
||||
# Helper function for `path with` commands
|
||||
def with-field [field: string, value: string] {
|
||||
path parse
|
||||
| update $field $value
|
||||
| path join
|
||||
}
|
||||
|
||||
# Replace extension of input file paths.
|
||||
#
|
||||
# Note that it doesn't change the file name locally.
|
||||
#
|
||||
# # Example
|
||||
# - setting path ext to `rs`
|
||||
# ```nushell
|
||||
# > "ab.txt" | path with-extension "rs"
|
||||
# ab.rs
|
||||
# > "ab.txt" | path with-extension ".rs"
|
||||
# ab.rs
|
||||
#
|
||||
# - setting a list of input path ext to `rs`
|
||||
# > ["ab.txt", "cd.exe"] | path with-extension "rs"
|
||||
# ╭───┬──────────╮
|
||||
# │ 0 │ ab.rs │
|
||||
# │ 1 │ cd.rs │
|
||||
# ╰───┴──────────╯
|
||||
# ```
|
||||
export def with-extension [ext: string] {
|
||||
let path = $in
|
||||
let ext_trim = if $ext starts-with "." {
|
||||
$ext | str substring 1..
|
||||
} else {
|
||||
$ext
|
||||
}
|
||||
$path | with-field extension $ext_trim
|
||||
}
|
||||
|
||||
# Replace stem of input file paths.
|
||||
#
|
||||
# Note that it doesn't change the file name locally.
|
||||
#
|
||||
# # Example
|
||||
# - replace stem with "share"
|
||||
# ```nushell
|
||||
# > "/usr/bin" | path with-stem "share"
|
||||
# /usr/share
|
||||
#
|
||||
# - replace stem with "nushell"
|
||||
# > ["/home/alice/", "/home/bob/secret.txt"] | path with-stem "nushell"
|
||||
# ╭───┬───────────────────────╮
|
||||
# │ 0 │ /home/nushell │
|
||||
# │ 1 │ /home/bob/nushell.txt │
|
||||
# ╰───┴───────────────────────╯
|
||||
# ```
|
||||
export def with-stem [stem: string] { with-field stem $stem }
|
||||
|
||||
# Replace parent field of input file paths.
|
||||
#
|
||||
# # Example
|
||||
# - replace parent path with `/usr/share`
|
||||
# ```nushell
|
||||
# > "/etc/foobar" | path with-parent "/usr/share/"
|
||||
# /usr/share/foobar
|
||||
#
|
||||
# - replace parent path with `/root/` for all filenames in list
|
||||
# > ["/home/rose/meow", "/home/fdncred/"] | path with-parent "/root/"
|
||||
# ╭───┬───────────────╮
|
||||
# │ 0 │ /root/meow │
|
||||
# │ 1 │ /root/fdncred │
|
||||
# ╰───┴───────────────╯
|
||||
# ```
|
||||
export def with-parent [parent: string] { with-field parent $parent }
|
136
crates/nu-std/std-rfc/str/mod.nu
Normal file
136
crates/nu-std/std-rfc/str/mod.nu
Normal file
@ -0,0 +1,136 @@
|
||||
# Removes common indent from a multi-line string based on the number of spaces on the last line.
|
||||
#
|
||||
# Example - Two leading spaces are removed from all lines:
|
||||
#
|
||||
# > let s = "
|
||||
# Heading
|
||||
# Indented Line
|
||||
# Another Indented Line
|
||||
#
|
||||
# Another Heading
|
||||
# "
|
||||
# > $a | str dedent
|
||||
#
|
||||
# # => Heading
|
||||
# # => Indented Line
|
||||
# # => Another Indented Line
|
||||
# # =>
|
||||
# # => Another Heading
|
||||
export def dedent [
|
||||
--tabs (-t)
|
||||
]: string -> string {
|
||||
let string = $in
|
||||
|
||||
if ($string !~ $'^\s*(char lsep)') {
|
||||
return (error make {
|
||||
msg: 'First line must be empty'
|
||||
})
|
||||
}
|
||||
|
||||
if ($string !~ $'(char lsep)[ \t]*$') {
|
||||
return (error make {
|
||||
msg: 'Last line must contain only whitespace indicating the dedent'
|
||||
})
|
||||
}
|
||||
|
||||
# Get indent characters from the last line
|
||||
let indent_chars = $string
|
||||
| str replace -r $"\(?s\).*(char lsep)\([ \t]*\)$" '$1'
|
||||
|
||||
# Skip the first and last lines
|
||||
let lines = (
|
||||
$string
|
||||
| lines
|
||||
| skip
|
||||
| # Only drop if there is whitespace. Otherwise, `lines`
|
||||
| # drops a 0-length line anyway
|
||||
| if ($indent_chars | str length) > 0 { drop } else {}
|
||||
| enumerate
|
||||
| rename lineNumber text
|
||||
)
|
||||
|
||||
# Has to be done outside the replacement block or the error
|
||||
# is converted to text. This is probably a Nushell bug, and
|
||||
# this code can be recombined with the next iterator when
|
||||
# the Nushell behavior is fixed.
|
||||
for line in $lines {
|
||||
# Skip lines with whitespace-only
|
||||
if $line.text like '^\s*$' { continue }
|
||||
# Error if any line doesn't start with enough indentation
|
||||
if ($line.text | parse -r $"^\(($indent_chars)\)" | get capture0?.0?) != $indent_chars {
|
||||
error make {
|
||||
msg: $"Line ($line.lineNumber + 1) must have an indent of ($indent_chars | str length) or more."
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
$lines
|
||||
| each {|line|
|
||||
# Don't operate on lines containing only whitespace
|
||||
if ($line.text not-like '^\s*$') {
|
||||
$line.text | str replace $indent_chars ''
|
||||
} else {
|
||||
$line.text
|
||||
}
|
||||
}
|
||||
| str join (char line_sep)
|
||||
}
|
||||
|
||||
# Remove common indent from a multi-line string based on the line with the smallest indent
|
||||
#
|
||||
# Example - Two leading spaces are removed from all lines:
|
||||
#
|
||||
# > let s = "
|
||||
# Heading
|
||||
# Indented Line
|
||||
# Another Indented Line
|
||||
#
|
||||
# Another Heading
|
||||
# "
|
||||
# > $a | str dedent
|
||||
#
|
||||
# # => Heading
|
||||
# # => Indented Line
|
||||
# # => Another Indented Line
|
||||
# # =>
|
||||
# # => Another Heading
|
||||
#
|
||||
export def unindent [
|
||||
--tabs (-t) # String uses tabs instead of spaces for indentation
|
||||
]: string -> string {
|
||||
let indent_char = match $tabs {
|
||||
true => '\t'
|
||||
false => ' '
|
||||
}
|
||||
|
||||
let text = (
|
||||
$in
|
||||
| # Remove the first line if it is only whitespace (tabs or spaces)
|
||||
| str replace -r $'^[ \t]*(char lsep)' ''
|
||||
| # Remove the last line if it is only whitespace (tabs or spaces)
|
||||
| str replace -r $'(char lsep)[ \t]*$' ''
|
||||
)
|
||||
|
||||
# Early return if there is only a single, empty (other than whitespace) line
|
||||
if ($text like '^[ \t]*$') {
|
||||
return $text
|
||||
}
|
||||
|
||||
let minimumIndent = (
|
||||
$text
|
||||
| lines
|
||||
| # Ignore indentation in any line that is only whitespace
|
||||
| where $it not-like '^[ \t]*$'
|
||||
| # Replaces the text with its indentation
|
||||
| each {
|
||||
str replace -r $"^\(($indent_char)*\).*" '$1'
|
||||
| str length
|
||||
}
|
||||
| math min
|
||||
)
|
||||
|
||||
let indent_chars = ('' | fill -c $indent_char -w $minimumIndent)
|
||||
|
||||
$text
|
||||
| str replace -r --all $"\(?m\)^($indent_chars)" ''
|
||||
}
|
221
crates/nu-std/std-rfc/tables/mod.nu
Normal file
221
crates/nu-std/std-rfc/tables/mod.nu
Normal file
@ -0,0 +1,221 @@
|
||||
# Run aggregate operations on output of `group-by --to-table`.
|
||||
#
|
||||
# # Example
|
||||
#
|
||||
# - group files by type and extension, and get stats about their sizes
|
||||
# ```nushell
|
||||
# >_ ls | group-by type { get name | path parse | get extension } --to-table | aggregate size
|
||||
# ```
|
||||
#
|
||||
# - group data by multiple columns, and run custom aggregate operations
|
||||
# ```nushell
|
||||
# >_ open movies.csv
|
||||
# | group-by Lead_Studio Genre --to-table
|
||||
# | aggregate Worldwide_Gross Profitability --ops {avg: {math avg}, std: {math stddev}}
|
||||
# ```
|
||||
#
|
||||
# - run aggregate operations without grouping the input
|
||||
# ```nushell
|
||||
# >_ open movies.csv | aggregate Year
|
||||
# ```
|
||||
export def aggregate [
|
||||
--ops: record, # default = {min: {math min}, avg: {math avg}, max: {math max}, sum: {math sum}}
|
||||
...columns: cell-path, # columns to perform aggregations on
|
||||
]: [
|
||||
table -> table<count: int>,
|
||||
record -> error,
|
||||
] {
|
||||
def aggregate-default-ops [] {
|
||||
{
|
||||
min: {math min},
|
||||
avg: {math avg},
|
||||
max: {math max},
|
||||
sum: {math sum},
|
||||
}
|
||||
}
|
||||
|
||||
def aggregate-col-name [col: cell-path, op_name: string]: [nothing -> string] {
|
||||
$col | split cell-path | get value | str join "." | $"($in)_($op_name)"
|
||||
}
|
||||
|
||||
def get-item-with-error [
|
||||
col: cell-path,
|
||||
opts: record<span: record<start: int, end: int>, items: bool>
|
||||
]: [table -> any] {
|
||||
try {
|
||||
get $col
|
||||
} catch {
|
||||
let full_cellpath = if $opts.items {
|
||||
$col
|
||||
| split cell-path
|
||||
| prepend {value: items, optional: false}
|
||||
| into cell-path
|
||||
} else {
|
||||
$col
|
||||
}
|
||||
error make {
|
||||
msg: $"Cannot find column '($full_cellpath)'",
|
||||
label: {
|
||||
text: "value originates here",
|
||||
span: $opts.span
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
def "error not-a-table" [span: record<start: int, end:int>] {
|
||||
error make {
|
||||
msg: "input must be a table",
|
||||
label: {
|
||||
text: "from here",
|
||||
span: $span
|
||||
},
|
||||
help: "Are you using `group-by`? Make sure to use its `--to-table` flag."
|
||||
}
|
||||
}
|
||||
|
||||
let IN = $in
|
||||
let md = metadata $in
|
||||
|
||||
let first = try { $IN | first } catch { error not-a-table $md.span }
|
||||
if not (($first | describe) starts-with record) {
|
||||
error not-a-table $md.span
|
||||
}
|
||||
|
||||
let grouped = "items" in $first
|
||||
|
||||
let IN = if $grouped {
|
||||
$IN
|
||||
} else {
|
||||
[{items: $IN}]
|
||||
}
|
||||
|
||||
let agg_ops = $ops | default (aggregate-default-ops)
|
||||
|
||||
let results = $IN
|
||||
| update items {|group|
|
||||
let column_results = $columns
|
||||
| each {|col| # col: cell-path
|
||||
let column = $group.items | get-item-with-error $col {span: $md.span, items: $grouped}
|
||||
let agg_results = $agg_ops | items {|op_name, op| # op_name: string, op: closure
|
||||
$column | do $op | wrap (aggregate-col-name $col $op_name)
|
||||
}
|
||||
|
||||
for r in $agg_results {
|
||||
if ($r | describe) == error {
|
||||
return $r
|
||||
}
|
||||
}
|
||||
|
||||
$agg_results
|
||||
| reduce {|it| merge $it}
|
||||
}
|
||||
|
||||
# Manually propagate errors
|
||||
for r in $column_results {
|
||||
if ($r | describe) == error {
|
||||
return $r
|
||||
}
|
||||
}
|
||||
|
||||
$column_results
|
||||
| reduce --fold {} {|it| merge $it}
|
||||
| insert count ($group.items | length)
|
||||
| roll right # put count as the first column
|
||||
}
|
||||
|
||||
# Manually propagate errors
|
||||
for r in $results {
|
||||
if ($r.items | describe) == error {
|
||||
return $r.items
|
||||
}
|
||||
}
|
||||
|
||||
$results | flatten items
|
||||
}
|
||||
|
||||
# Used in reject-column-slices and select-column-slices
|
||||
def col-indices [ ...slices ] {
|
||||
use std-rfc/conversions *
|
||||
|
||||
let indices = (
|
||||
$slices
|
||||
| reduce -f [] {|slice,indices|
|
||||
$indices ++ ($slice | into list)
|
||||
}
|
||||
)
|
||||
|
||||
$in | columns
|
||||
| select slices $indices
|
||||
| get item
|
||||
}
|
||||
|
||||
# Used in select-row-slices and reject-row-slices
|
||||
def row-indices [ ...slices ] {
|
||||
use std-rfc/conversions *
|
||||
|
||||
$slices
|
||||
| reduce -f [] {|slice,indices|
|
||||
$indices ++ ($slice | into list)
|
||||
}
|
||||
}
|
||||
|
||||
# Selects one or more rows while keeping the original indices.
|
||||
#
|
||||
# Example - Selects the first, fifth, and sixth rows from the table:
|
||||
#
|
||||
# ls / | select slices 0 4..5
|
||||
#
|
||||
# Example - Select the 4th row.
|
||||
#
|
||||
# Note that the difference between this and `select 3` is that the index (#) column shows the *original* (pre-select) position in the table.
|
||||
#
|
||||
# ls | select slices 3
|
||||
export def "select slices" [ ...slices ] {
|
||||
enumerate
|
||||
| flatten
|
||||
| select ...(row-indices ...$slices)
|
||||
}
|
||||
|
||||
# Rejects one or more rows while keeping the original indices.
|
||||
#
|
||||
# Example - Rejects the first, fifth, and sixth rows from the table:
|
||||
#
|
||||
# ls / | reject slices 0 4..5
|
||||
export def "reject slices" [ ...slices ] {
|
||||
enumerate
|
||||
| flatten
|
||||
| collect
|
||||
| reject ...(row-indices ...$slices)
|
||||
}
|
||||
|
||||
# Select one or more columns by their indices
|
||||
#
|
||||
# Example:
|
||||
#
|
||||
# ls -l | select column-slices 0 10..12 | first 3
|
||||
# # => ╭───┬────────────────────┬──────────────┬─────────────┬──────────────╮
|
||||
# # => │ # │ name │ created │ accessed │ modified │
|
||||
# # => ├───┼────────────────────┼──────────────┼─────────────┼──────────────┤
|
||||
# # => │ 0 │ CITATION.cff │ 3 months ago │ 4 hours ago │ 3 months ago │
|
||||
# # => │ 1 │ CODE_OF_CONDUCT.md │ 7 months ago │ 4 hours ago │ 7 months ago │
|
||||
# # => │ 2 │ CONTRIBUTING.md │ 3 months ago │ 4 hours ago │ 3 months ago │
|
||||
# # => ╰───┴────────────────────┴──────────────┴─────────────┴──────────────╯
|
||||
export def "select column-slices" [
|
||||
...slices
|
||||
] {
|
||||
let column_selector = ($in | col-indices ...$slices)
|
||||
$in | select ...$column_selector
|
||||
}
|
||||
|
||||
# Reject one or more columns by their indices
|
||||
#
|
||||
# Example:
|
||||
#
|
||||
# ls | reject column-slices 0 4 5 | first 3
|
||||
export def "reject column-slices" [
|
||||
...slices
|
||||
] {
|
||||
let column_selector = ($in | col-indices ...$slices)
|
||||
$in | reject ...$column_selector
|
||||
}
|
@ -164,7 +164,7 @@ export def ($test_function_name) [] {
|
||||
open $test.file
|
||||
| lines
|
||||
| append ($test_function)
|
||||
| str join (char nl)
|
||||
| str join (char lsep)
|
||||
| save $rendered_module_path
|
||||
|
||||
let result = (
|
||||
|
110
crates/nu-std/tests/test_std-rfc_conversions.nu
Normal file
110
crates/nu-std/tests/test_std-rfc_conversions.nu
Normal file
@ -0,0 +1,110 @@
|
||||
use std assert
|
||||
use std-rfc/conversions *
|
||||
|
||||
#[test]
|
||||
def range-into-list [] {
|
||||
assert equal (
|
||||
1..10 | into list
|
||||
) (
|
||||
[ 1 2 3 4 5 6 7 8 9 10 ]
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
def string-into-list [] {
|
||||
assert equal (
|
||||
"foo" | into list
|
||||
) (
|
||||
[ foo ]
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
def range-stride-into-list [] {
|
||||
assert equal (
|
||||
0..2..10 | into list
|
||||
) (
|
||||
[ 0 2 4 6 8 10 ]
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
def null-into-list [] {
|
||||
assert equal (
|
||||
null | into list | get 0 | describe
|
||||
) (
|
||||
"nothing"
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
def list-into-list [] {
|
||||
assert equal (
|
||||
[ foo bar baz ] | into list
|
||||
) (
|
||||
[ foo bar baz ]
|
||||
)
|
||||
|
||||
}
|
||||
|
||||
#[test]
|
||||
def table-into-columns--roundtrip [] {
|
||||
assert equal (
|
||||
ls
|
||||
) (
|
||||
ls | table-into-columns | columns-into-table
|
||||
)
|
||||
}
|
||||
|
||||
const test_record_of_lists = {
|
||||
a: [ 1 2 3 ]
|
||||
b: [ 4 5 6 ]
|
||||
}
|
||||
|
||||
#[test]
|
||||
def record-into-columns--simple [] {
|
||||
let actual = (
|
||||
$test_record_of_lists
|
||||
| record-into-columns
|
||||
| get 1.b.2
|
||||
)
|
||||
|
||||
let expected = 6
|
||||
|
||||
assert equal $actual $expected
|
||||
}
|
||||
|
||||
#[test]
|
||||
def table-into-columns--simple [] {
|
||||
let actual = (
|
||||
ls | table-into-columns | get 1 | columns | get 0
|
||||
)
|
||||
let expected = 'type'
|
||||
|
||||
assert equal $actual $expected
|
||||
}
|
||||
|
||||
#[test]
|
||||
def name-values--simple [] {
|
||||
let actual = (
|
||||
[ 1 2 3 ] | name-values one two three
|
||||
| get 'two'
|
||||
)
|
||||
|
||||
let expected = 2
|
||||
|
||||
assert equal $actual $expected
|
||||
}
|
||||
|
||||
#[test]
|
||||
def name-values--missing-keyname [] {
|
||||
let actual = (
|
||||
[ 1 2 3 ] | name-values one two
|
||||
| columns
|
||||
)
|
||||
|
||||
# Column/key names are strings, even those that came from the index ('2')
|
||||
let expected = [ 'one' 'two' '2' ]
|
||||
|
||||
assert equal $actual $expected
|
||||
}
|
283
crates/nu-std/tests/test_std-rfc_kv.nu
Normal file
283
crates/nu-std/tests/test_std-rfc_kv.nu
Normal file
@ -0,0 +1,283 @@
|
||||
use std/assert
|
||||
const kv_module = if ("sqlite" in (version).features) { "std-rfc/kv" } else { null }
|
||||
use $kv_module *
|
||||
|
||||
# It's important to use random keys and to clean-up
|
||||
# after since the user running these tests may have
|
||||
# either an existing local stor or universal db.
|
||||
|
||||
#[test]
|
||||
def simple-local-set [] {
|
||||
if ('sqlite' not-in (version).features) { return }
|
||||
|
||||
let key = (random uuid)
|
||||
|
||||
kv set $key 42
|
||||
let actual = (kv get $key)
|
||||
let expected = 42
|
||||
assert equal $actual $expected
|
||||
|
||||
kv drop $key | ignore
|
||||
}
|
||||
|
||||
#[test]
|
||||
def local-pipeline_set_returns_value [] {
|
||||
if ('sqlite' not-in (version).features) { return }
|
||||
|
||||
let key = (random uuid)
|
||||
let actual = (42 | kv set $key)
|
||||
let expected = 42
|
||||
assert equal $actual $expected
|
||||
|
||||
let actual = (kv get $key)
|
||||
let expected = 42
|
||||
assert equal $actual $expected
|
||||
|
||||
kv drop $key | ignore
|
||||
}
|
||||
|
||||
#[test]
|
||||
def local-multiple_assignment [] {
|
||||
if ('sqlite' not-in (version).features) { return }
|
||||
|
||||
let key1 = (random uuid)
|
||||
let key2 = (random uuid)
|
||||
let key3 = (random uuid)
|
||||
|
||||
"test value" | kv set $key1 | kv set $key2 | kv set $key3
|
||||
let expected = "test value"
|
||||
assert equal (kv get $key1) $expected
|
||||
assert equal (kv get $key2) $expected
|
||||
assert equal (kv get $key3) $expected
|
||||
assert equal (kv get $key3) (kv get $key1)
|
||||
|
||||
kv drop $key1
|
||||
kv drop $key2
|
||||
kv drop $key3
|
||||
}
|
||||
|
||||
#[test]
|
||||
def local-transpose_to_record [] {
|
||||
if ('sqlite' not-in (version).features) { return }
|
||||
|
||||
let key1 = (random uuid)
|
||||
let key2 = (random uuid)
|
||||
let key3 = (random uuid)
|
||||
|
||||
"test value" | kv set $key1 | kv set $key2 | kv set $key3
|
||||
|
||||
let record = (kv list | transpose -dr)
|
||||
let actual = ($record | select $key1)
|
||||
let expected = { $key1: "test value" }
|
||||
|
||||
assert equal $actual $expected
|
||||
|
||||
kv drop $key1
|
||||
kv drop $key2
|
||||
kv drop $key3
|
||||
}
|
||||
|
||||
#[test]
|
||||
def local-using_closure [] {
|
||||
if ('sqlite' not-in (version).features) { return }
|
||||
|
||||
let name_key = (random uuid)
|
||||
let size_key = (random uuid)
|
||||
|
||||
ls
|
||||
| kv set $name_key { get name }
|
||||
| kv set $size_key { get size }
|
||||
|
||||
let expected = "list<string>"
|
||||
let actual = (kv get $name_key | describe)
|
||||
assert equal $actual $expected
|
||||
|
||||
let expected = "list<filesize>"
|
||||
let actual = (kv get $size_key | describe)
|
||||
assert equal $actual $expected
|
||||
|
||||
kv drop $name_key
|
||||
kv drop $size_key
|
||||
}
|
||||
|
||||
#[test]
|
||||
def local-return-entire-list [] {
|
||||
if ('sqlite' not-in (version).features) { return }
|
||||
|
||||
let key1 = (random uuid)
|
||||
let key2 = (random uuid)
|
||||
|
||||
let expected = 'value1'
|
||||
$expected | kv set $key1
|
||||
|
||||
let actual = (
|
||||
'value2'
|
||||
| kv set --return all $key2 # Set $key2, but return the entire kv store
|
||||
| transpose -dr # Convert to record for easier retrieval
|
||||
| get $key1 # Attempt to retrieve key1 (set previously)
|
||||
)
|
||||
|
||||
assert equal $actual $expected
|
||||
kv drop $key1
|
||||
kv drop $key2
|
||||
}
|
||||
|
||||
#[test]
|
||||
def local-return_value_only [] {
|
||||
if ('sqlite' not-in (version).features) { return }
|
||||
|
||||
let key = (random uuid)
|
||||
|
||||
let expected = 'VALUE'
|
||||
let actual = ('value' | kv set -r v $key {str upcase})
|
||||
|
||||
assert equal $actual $expected
|
||||
|
||||
kv drop $key
|
||||
|
||||
}
|
||||
|
||||
#[test]
|
||||
def universal-simple_set [] {
|
||||
if ('sqlite' not-in (version).features) { return }
|
||||
$env.NU_UNIVERSAL_KV_PATH = (mktemp -t --suffix .sqlite3)
|
||||
|
||||
let key = (random uuid)
|
||||
|
||||
kv set -u $key 42
|
||||
let actual = (kv get -u $key)
|
||||
let expected = 42
|
||||
assert equal $actual $expected
|
||||
|
||||
kv drop -u $key | ignore
|
||||
rm $env.NU_UNIVERSAL_KV_PATH
|
||||
}
|
||||
|
||||
#[test]
|
||||
def universal-pipeline_set_returns_value [] {
|
||||
if ('sqlite' not-in (version).features) { return }
|
||||
$env.NU_UNIVERSAL_KV_PATH = (mktemp -t --suffix .sqlite3)
|
||||
|
||||
let key = (random uuid)
|
||||
let actual = (42 | kv set -u $key)
|
||||
let expected = 42
|
||||
assert equal $actual $expected
|
||||
|
||||
let actual = (kv get -u $key)
|
||||
let expected = 42
|
||||
assert equal $actual $expected
|
||||
|
||||
kv drop -u $key | ignore
|
||||
rm $env.NU_UNIVERSAL_KV_PATH
|
||||
}
|
||||
|
||||
#[test]
|
||||
def universal-multiple_assignment [] {
|
||||
if ('sqlite' not-in (version).features) { return }
|
||||
$env.NU_UNIVERSAL_KV_PATH = (mktemp -t --suffix .sqlite3)
|
||||
|
||||
let key1 = (random uuid)
|
||||
let key2 = (random uuid)
|
||||
let key3 = (random uuid)
|
||||
|
||||
"test value" | kv set -u $key1 | kv set -u $key2 | kv set -u $key3
|
||||
let expected = "test value"
|
||||
assert equal (kv get -u $key1) $expected
|
||||
assert equal (kv get -u $key2) $expected
|
||||
assert equal (kv get -u $key3) $expected
|
||||
assert equal (kv get $key3) (kv get $key1)
|
||||
|
||||
kv drop -u $key1
|
||||
kv drop -u $key2
|
||||
kv drop -u $key3
|
||||
rm $env.NU_UNIVERSAL_KV_PATH
|
||||
}
|
||||
|
||||
#[test]
|
||||
def universal-transpose_to_record [] {
|
||||
if ('sqlite' not-in (version).features) { return }
|
||||
$env.NU_UNIVERSAL_KV_PATH = (mktemp -t --suffix .sqlite3)
|
||||
|
||||
let key1 = (random uuid)
|
||||
let key2 = (random uuid)
|
||||
let key3 = (random uuid)
|
||||
|
||||
"test value" | kv set -u $key1 | kv set -u $key2 | kv set -u $key3
|
||||
|
||||
let record = (kv list -u | transpose -dr)
|
||||
let actual = ($record | select $key1)
|
||||
let expected = { $key1: "test value" }
|
||||
|
||||
assert equal $actual $expected
|
||||
|
||||
kv drop -u $key1
|
||||
kv drop -u $key2
|
||||
kv drop -u $key3
|
||||
rm $env.NU_UNIVERSAL_KV_PATH
|
||||
}
|
||||
|
||||
#[test]
|
||||
def universal-using_closure [] {
|
||||
if ('sqlite' not-in (version).features) { return }
|
||||
$env.NU_UNIVERSAL_KV_PATH = (mktemp -t --suffix .sqlite3)
|
||||
|
||||
let name_key = (random uuid)
|
||||
let size_key = (random uuid)
|
||||
|
||||
ls
|
||||
| kv set -u $name_key { get name }
|
||||
| kv set -u $size_key { get size }
|
||||
|
||||
let expected = "list<string>"
|
||||
let actual = (kv get -u $name_key | describe)
|
||||
assert equal $actual $expected
|
||||
|
||||
let expected = "list<filesize>"
|
||||
let actual = (kv get -u $size_key | describe)
|
||||
assert equal $actual $expected
|
||||
|
||||
kv drop -u $name_key
|
||||
kv drop -u $size_key
|
||||
rm $env.NU_UNIVERSAL_KV_PATH
|
||||
}
|
||||
|
||||
#[test]
|
||||
def universal-return-entire-list [] {
|
||||
if ('sqlite' not-in (version).features) { return }
|
||||
$env.NU_UNIVERSAL_KV_PATH = (mktemp -t --suffix .sqlite3)
|
||||
|
||||
let key1 = (random uuid)
|
||||
let key2 = (random uuid)
|
||||
|
||||
let expected = 'value1'
|
||||
$expected | kv set -u $key1
|
||||
|
||||
let actual = (
|
||||
'value2'
|
||||
| kv set -u --return all $key2 # Set $key2, but return the entire kv store
|
||||
| transpose -dr # Convert to record for easier retrieval
|
||||
| get $key1 # Attempt to retrieve key1 (set previously)
|
||||
)
|
||||
|
||||
assert equal $actual $expected
|
||||
kv drop --universal $key1
|
||||
kv drop --universal $key2
|
||||
rm $env.NU_UNIVERSAL_KV_PATH
|
||||
}
|
||||
|
||||
#[test]
|
||||
def universal-return_value_only [] {
|
||||
if ('sqlite' not-in (version).features) { return }
|
||||
$env.NU_UNIVERSAL_KV_PATH = (mktemp -t --suffix .sqlite3)
|
||||
|
||||
let key = (random uuid)
|
||||
|
||||
let expected = 'VALUE'
|
||||
let actual = ('value' | kv set --universal -r v $key {str upcase})
|
||||
|
||||
assert equal $actual $expected
|
||||
|
||||
kv drop --universal $key
|
||||
rm $env.NU_UNIVERSAL_KV_PATH
|
||||
}
|
||||
|
38
crates/nu-std/tests/test_std-rfc_path.nu
Normal file
38
crates/nu-std/tests/test_std-rfc_path.nu
Normal file
@ -0,0 +1,38 @@
|
||||
use std-rfc/path
|
||||
use std/assert
|
||||
|
||||
#[test]
|
||||
def path_with_extension [] {
|
||||
let new_path = "ab.txt" | path with-extension "rs"
|
||||
assert equal $new_path "ab.rs"
|
||||
|
||||
let new_path = "ab.txt" | path with-extension ".rs"
|
||||
assert equal $new_path "ab.rs"
|
||||
}
|
||||
|
||||
#[test]
|
||||
def path_with_extension_for_list [] {
|
||||
let new_path = ["ab.txt", "cd.exe"] | path with-extension "rs"
|
||||
assert equal $new_path ["ab.rs", "cd.rs"]
|
||||
|
||||
let new_path = ["ab.txt", "cd.exe"] | path with-extension ".rs"
|
||||
assert equal $new_path ["ab.rs", "cd.rs"]
|
||||
}
|
||||
|
||||
#[test]
|
||||
def path_with_stem [] {
|
||||
let new_path = $"(char psep)usr(char psep)bin" | path with-stem "share"
|
||||
assert equal $new_path $"(char psep)usr(char psep)share"
|
||||
|
||||
let new_path = [$"(char psep)home(char psep)alice(char psep)", $"(char psep)home(char psep)bob(char psep)secret.txt"] | path with-stem "nushell"
|
||||
assert equal $new_path [$"(char psep)home(char psep)nushell", $"(char psep)home(char psep)bob(char psep)nushell.txt"]
|
||||
}
|
||||
|
||||
#[test]
|
||||
def path_with_parent [] {
|
||||
let new_path = $"(char psep)etc(char psep)foobar" | path with-parent $"(char psep)usr(char psep)share(char psep)"
|
||||
assert equal $new_path $"(char psep)usr(char psep)share(char psep)foobar"
|
||||
|
||||
let new_path = [$"(char psep)home(char psep)rose(char psep)meow", $"(char psep)home(char psep)fdncred(char psep)"] | path with-parent $"(char psep)root(char psep)"
|
||||
assert equal $new_path [$"(char psep)root(char psep)meow", $"(char psep)root(char psep)fdncred"]
|
||||
}
|
292
crates/nu-std/tests/test_std-rfc_str.nu
Normal file
292
crates/nu-std/tests/test_std-rfc_str.nu
Normal file
@ -0,0 +1,292 @@
|
||||
use std/assert
|
||||
use std-rfc/str
|
||||
|
||||
#[test]
|
||||
def str-dedent_simple [] {
|
||||
|
||||
# Test 1:
|
||||
# Should start with "Heading" in the first character position
|
||||
# Should not end with a line-break
|
||||
# The blank line has no extra spaces
|
||||
assert equal (
|
||||
do {
|
||||
let s = "
|
||||
Heading
|
||||
|
||||
one
|
||||
two
|
||||
"
|
||||
$s | str dedent
|
||||
}
|
||||
) $"Heading(char lsep)(char lsep) one(char lsep) two"
|
||||
}
|
||||
|
||||
#[test]
|
||||
def str-dedent_leave_blankline_whitespace [] {
|
||||
# Test 2:
|
||||
# Same as #1, but the blank line has leftover whitespace
|
||||
# indentation (16 spaces) which is left in the result
|
||||
assert equal (
|
||||
do {
|
||||
let s = "
|
||||
Heading
|
||||
|
||||
one
|
||||
two
|
||||
"
|
||||
$s | str dedent
|
||||
}
|
||||
) $"Heading(char lsep) (char lsep) one(char lsep) two"
|
||||
}
|
||||
|
||||
#[test]
|
||||
def str-dedent_leave_blankline_tab [] {
|
||||
# Test 3:
|
||||
# Same, but with a single tab character on the "blank" line
|
||||
assert equal (
|
||||
do {
|
||||
let s = $"
|
||||
Heading
|
||||
(char tab)
|
||||
one
|
||||
two
|
||||
"
|
||||
$s | str dedent
|
||||
}
|
||||
) $"Heading(char lsep)(char tab)(char lsep) one(char lsep) two"
|
||||
}
|
||||
|
||||
#[test]
|
||||
def str-dedent_ends_with_newline [] {
|
||||
# Test 4:
|
||||
# Ends with line-break
|
||||
assert equal (
|
||||
do {
|
||||
let s = "
|
||||
Heading
|
||||
|
||||
one
|
||||
two
|
||||
|
||||
"
|
||||
$s | str dedent
|
||||
}
|
||||
) $"Heading(char lsep)(char lsep) one(char lsep) two(char lsep)"
|
||||
}
|
||||
|
||||
#[test]
|
||||
def str-dedent_identity [] {
|
||||
# Test 5:
|
||||
# Identity - Returns the original string sans first and last empty lines
|
||||
# No other whitespace should be removed
|
||||
assert equal (
|
||||
do {
|
||||
let s = $"(char lsep) Identity (char lsep)"
|
||||
$s | str dedent
|
||||
}
|
||||
) " Identity "
|
||||
}
|
||||
|
||||
#[test]
|
||||
def str-dedent_error-no_blank_lines [] {
|
||||
# Test 6:
|
||||
# Error - Does not contain an empty first line
|
||||
assert error {||
|
||||
let s = "Error"
|
||||
$s | str dedent
|
||||
}
|
||||
|
||||
# Test 6.1:
|
||||
# Error - Does not contain an empty first line
|
||||
assert error {||
|
||||
let s = $"Error(char lsep) (char lsep)Testing(char lsep)"
|
||||
$s | str dedent
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
def str-dedent_error-no_blank_first_line [] {
|
||||
# Test 7:
|
||||
# Error - Does not contain an empty last line
|
||||
assert error {||
|
||||
let s = "
|
||||
Error"
|
||||
$s | str dedent
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
def str-dedent_error-missing_last_empty_line [] {
|
||||
# Test 7.1:
|
||||
# Error - Does not contain an empty last line
|
||||
assert error {||
|
||||
let s = "
|
||||
|
||||
Error"
|
||||
$s | str dedent
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
def str-dedent_error-not_enough_indentation [] {
|
||||
# Test 8:
|
||||
# Error - Line 1 does not have enough indentation
|
||||
assert error {||
|
||||
let s = "
|
||||
Line 1
|
||||
Line 2
|
||||
"
|
||||
$s | str dedent
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
def str-dedent_error-not_enough_indentation2 [] {
|
||||
# Test 8.1:
|
||||
# Error - Line 2 does not have enough indentation
|
||||
assert error {||
|
||||
let s = "
|
||||
Line 1
|
||||
Line 2
|
||||
"
|
||||
$s | str dedent
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
def str-dedent_error-not_enough_indentation3 [] {
|
||||
# Test 8.2:
|
||||
# Error - Line does not have enough indentation
|
||||
assert error {||
|
||||
let s = "
|
||||
Line
|
||||
"
|
||||
$s | str dedent
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
def str-dedent_first_line_whitespace_allowed [] {
|
||||
# Test 9:
|
||||
# "Hidden" whitespace on the first line is allowed
|
||||
assert equal (
|
||||
do {
|
||||
let s = $" (char tab) (char lsep) Identity (char lsep)"
|
||||
$s | str dedent
|
||||
}
|
||||
) " Identity "
|
||||
}
|
||||
|
||||
#[test]
|
||||
def str-dedent_using_tabs [] {
|
||||
# Test 10:
|
||||
# If the indentation on the last line uses tabs, then the number of tabs
|
||||
# will be used instead of spaces
|
||||
let actual = (
|
||||
$"(char lsep)(char tab)(char tab)First line(char lsep)(char tab)(char tab)(char tab)Second line(char lsep)(char tab)(char tab)"
|
||||
| str dedent
|
||||
)
|
||||
|
||||
let expected = $"First line(char lsep)(char tab)Second line"
|
||||
|
||||
assert equal $actual $expected
|
||||
}
|
||||
|
||||
#[test]
|
||||
def str-unindent_simple [] {
|
||||
# Test 1:
|
||||
# Should start with "Heading" in the first character position
|
||||
# Should not end with a line-break
|
||||
# The blank line has no extra spaces
|
||||
let actual = (
|
||||
"
|
||||
Heading
|
||||
|
||||
one
|
||||
two
|
||||
"
|
||||
| str unindent
|
||||
)
|
||||
|
||||
let expected = $"Heading(char lsep)(char lsep) one(char lsep) two"
|
||||
|
||||
assert equal $actual $expected
|
||||
}
|
||||
|
||||
#[test]
|
||||
def str-unindent_ignore_first_and_last_whitespace [] {
|
||||
# Test 2:
|
||||
# If the first and/or last line are only whitespace
|
||||
# then they shouldn't be included in the result
|
||||
|
||||
let actual = "
|
||||
Heading
|
||||
|
||||
one
|
||||
two
|
||||
"
|
||||
| str unindent
|
||||
|
||||
let expected = " Heading
|
||||
|
||||
one
|
||||
two"
|
||||
| str unindent
|
||||
|
||||
assert equal $actual $expected
|
||||
}
|
||||
|
||||
#[test]
|
||||
def str-unindent_keep_extra_line [] {
|
||||
# Test 3:
|
||||
# Keep intentional blank lines at start and/or end
|
||||
|
||||
let actual = "
|
||||
|
||||
Content
|
||||
|
||||
" | str unindent
|
||||
|
||||
let expected = $"(char lsep)Content(char lsep)"
|
||||
|
||||
assert equal $actual $expected
|
||||
}
|
||||
|
||||
#[test]
|
||||
def str-unindent_works_on_single_line [] {
|
||||
# Test 4:
|
||||
# Works on a single-line string
|
||||
# And trailing whitespace is preserved
|
||||
|
||||
let actual = (" Content " | str unindent)
|
||||
let expected = "Content "
|
||||
|
||||
assert equal $actual $expected
|
||||
}
|
||||
|
||||
#[test]
|
||||
def str-unindent_whitespace_only_single_line [] {
|
||||
# Test 4:
|
||||
# Works on a single-line string with whitespace-only
|
||||
# Returns the original string
|
||||
|
||||
let actual = (" " | str unindent)
|
||||
let expected = " "
|
||||
|
||||
assert equal $actual $expected
|
||||
}
|
||||
|
||||
#[test]
|
||||
def str-unindent_whitespace_works_with_tabs [] {
|
||||
# Test 4:
|
||||
# Works with tabs for indentation
|
||||
|
||||
let actual = (
|
||||
$"(char lsep)(char tab)(char tab)Content(char lsep)"
|
||||
| str unindent --tabs
|
||||
)
|
||||
|
||||
let expected = "Content"
|
||||
|
||||
assert equal $actual $expected
|
||||
}
|
331
crates/nu-std/tests/test_std-rfc_tables.nu
Normal file
331
crates/nu-std/tests/test_std-rfc_tables.nu
Normal file
@ -0,0 +1,331 @@
|
||||
use std/assert
|
||||
use std-rfc/tables *
|
||||
|
||||
const test_table = [
|
||||
[ col-a col-b col-c col-d col-e col-f ];
|
||||
[ 'a0' 'b0' 'c0' 'd0' 'e0' 'f0' ]
|
||||
[ 'a1' 'b1' 'c1' 'd1' 'e1' 'f1' ]
|
||||
[ 'a2' 'b2' 'c2' 'd2' 'e2' 'f2' ]
|
||||
[ 'a3' 'b3' 'c3' 'd3' 'e3' 'f3' ]
|
||||
[ 'a4' 'b4' 'c4' 'd4' 'e4' 'f4' ]
|
||||
[ 'a5' 'b5' 'c5' 'd5' 'e5' 'f5' ]
|
||||
[ 'a6' 'b6' 'c6' 'd6' 'e6' 'f6' ]
|
||||
[ 'a7' 'b7' 'c7' 'd7' 'e7' 'f7' ]
|
||||
[ 'a8' 'b8' 'c8' 'd8' 'e8' 'f8' ]
|
||||
[ 'a9' 'b9' 'c9' 'd9' 'e9' 'f9' ]
|
||||
]
|
||||
|
||||
const enumerated_table = [
|
||||
[ index col-a col-b col-c col-d col-e col-f ];
|
||||
[ 0 'a0' 'b0' 'c0' 'd0' 'e0' 'f0' ]
|
||||
[ 1 'a1' 'b1' 'c1' 'd1' 'e1' 'f1' ]
|
||||
[ 2 'a2' 'b2' 'c2' 'd2' 'e2' 'f2' ]
|
||||
[ 3 'a3' 'b3' 'c3' 'd3' 'e3' 'f3' ]
|
||||
[ 4 'a4' 'b4' 'c4' 'd4' 'e4' 'f4' ]
|
||||
[ 5 'a5' 'b5' 'c5' 'd5' 'e5' 'f5' ]
|
||||
[ 6 'a6' 'b6' 'c6' 'd6' 'e6' 'f6' ]
|
||||
[ 7 'a7' 'b7' 'c7' 'd7' 'e7' 'f7' ]
|
||||
[ 8 'a8' 'b8' 'c8' 'd8' 'e8' 'f8' ]
|
||||
[ 9 'a9' 'b9' 'c9' 'd9' 'e9' 'f9' ]
|
||||
]
|
||||
|
||||
#[test]
|
||||
def select-slice--single_int [] {
|
||||
assert equal (
|
||||
$test_table | select slices 1
|
||||
) (
|
||||
$enumerated_table | select 1
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
def select-slice--single_slice [] {
|
||||
assert equal (
|
||||
$test_table | select slices 2..4
|
||||
) (
|
||||
$enumerated_table | select 2 3 4
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
def select-slice--complex [] {
|
||||
assert equal (
|
||||
# First and every following third-row + second row
|
||||
$test_table | select slices 1 0..3..100
|
||||
) (
|
||||
$enumerated_table | select 0 1 3 6 9
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
def select-slice--out_of_bounds [] {
|
||||
assert equal (
|
||||
$test_table | select slices 100
|
||||
) (
|
||||
[]
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
def reject-slice--single_index [] {
|
||||
assert equal (
|
||||
$test_table | reject slices 4
|
||||
) (
|
||||
$enumerated_table | reject 4
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
def reject-slice--slices [] {
|
||||
assert equal (
|
||||
# Reject rows 0-3 and 5-9, leaving only 4
|
||||
$test_table | reject slices 0..3 5..9
|
||||
) (
|
||||
$enumerated_table | select 4
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
def reject-slice--out_of_bounds [] {
|
||||
assert error {
|
||||
$test_table | reject slices 1000
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
def select-col--index [] {
|
||||
assert equal (
|
||||
$test_table | select column-slices 2
|
||||
) (
|
||||
$test_table | select col-c
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
def select-col--indices [] {
|
||||
assert equal (
|
||||
$test_table | select column-slices 2 4
|
||||
) (
|
||||
$test_table | select col-c col-e
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
def select-col--slices_and_index [] {
|
||||
assert equal (
|
||||
$test_table | select column-slices 0..2..5 1
|
||||
) (
|
||||
$test_table | select col-a col-c col-e col-b
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
def reject-col--slices_and_index [] {
|
||||
assert equal (
|
||||
$test_table | reject column-slices 0..2..5 1
|
||||
) (
|
||||
$enumerated_table | select col-d col-f
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
def reject-col--out_of_bounds [] {
|
||||
assert equal (
|
||||
$test_table | reject column-slices 1_000
|
||||
) (
|
||||
$test_table
|
||||
)
|
||||
}
|
||||
|
||||
const movies = [
|
||||
[ Film, Genre, Lead_Studio, Audience_score_%, Profitability, Rotten_Tomatoes_%, Worldwide_Gross, Year ];
|
||||
[ "Youth in Revolt", Comedy, "The Weinstein Company", 52, 1.09, 68, 19.62, 2010 ],
|
||||
[ "You Will Meet a Tall Dark Stranger", Comedy, Independent, 35, 1.211818182, 43, 26.66, 2010 ],
|
||||
[ "When in Rome", Comedy, Disney, 44, 0, 15, 43.04, 2010 ],
|
||||
[ "What Happens in Vegas", Comedy, Fox, 72, 6.267647029, 28, 219.37, 2008 ],
|
||||
[ "Water For Elephants", Drama, "20th Century Fox", 72, 3.081421053, 60, 117.09, 2011 ],
|
||||
[ WALL-E, Animation, Disney, 89, 2.896019067, 96, 521.28, 2008 ],
|
||||
[ Waitress, Romance, Independent, 67, 11.0897415, 89, 22.18, 2007 ],
|
||||
[ "Waiting For Forever", Romance, Independent, 53, 0.005, 6, 0.03, 2011 ],
|
||||
[ "Valentine's Day", Comedy, "Warner Bros.", 54, 4.184038462, 17, 217.57, 2010 ],
|
||||
[ "Tyler Perry's Why Did I get Married", Romance, Independent, 47, 3.7241924, 46, 55.86, 2007 ],
|
||||
[ "Twilight: Breaking Dawn", Romance, Independent, 68, 6.383363636, 26, 702.17, 2011 ],
|
||||
[ Twilight, Romance, Summit, 82, 10.18002703, 49, 376.66, 2008 ],
|
||||
[ "The Ugly Truth", Comedy, Independent, 68, 5.402631579, 14, 205.3, 2009 ],
|
||||
[ "The Twilight Saga: New Moon", Drama, Summit, 78, 14.1964, 27, 709.82, 2009 ],
|
||||
[ "The Time Traveler's Wife", Drama, Paramount, 65, 2.598205128, 38, 101.33, 2009 ],
|
||||
[ "The Proposal", Comedy, Disney, 74, 7.8675, 43, 314.7, 2009 ],
|
||||
[ "The Invention of Lying", Comedy, "Warner Bros.", 47, 1.751351351, 56, 32.4, 2009 ],
|
||||
[ "The Heartbreak Kid", Comedy, Paramount, 41, 2.129444167, 30, 127.77, 2007 ],
|
||||
[ "The Duchess", Drama, Paramount, 68, 3.207850222, 60, 43.31, 2008 ],
|
||||
[ "The Curious Case of Benjamin Button", Fantasy, "Warner Bros.", 81, 1.78394375, 73, 285.43, 2008 ],
|
||||
[ "The Back-up Plan", Comedy, CBS, 47, 2.202571429, 20, 77.09, 2010 ],
|
||||
[ Tangled, Animation, Disney, 88, 1.365692308, 89, 355.01, 2010 ],
|
||||
[ "Something Borrowed", Romance, Independent, 48, 1.719514286, 15, 60.18, 2011 ],
|
||||
[ "She's Out of My League", Comedy, Paramount, 60, 2.4405, 57, 48.81, 2010 ],
|
||||
[ "Sex and the City Two", Comedy, "Warner Bros.", 49, 2.8835, 15, 288.35, 2010 ],
|
||||
[ "Sex and the City 2", Comedy, "Warner Bros.", 49, 2.8835, 15, 288.35, 2010 ],
|
||||
[ "Sex and the City", Comedy, "Warner Bros.", 81, 7.221795791, 49, 415.25, 2008 ],
|
||||
[ "Remember Me", Drama, Summit, 70, 3.49125, 28, 55.86, 2010 ],
|
||||
[ "Rachel Getting Married", Drama, Independent, 61, 1.384166667, 85, 16.61, 2008 ],
|
||||
[ Penelope, Comedy, Summit, 74, 1.382799733, 52, 20.74, 2008 ],
|
||||
[ "P.S. I Love You", Romance, Independent, 82, 5.103116833, 21, 153.09, 2007 ],
|
||||
[ "Over Her Dead Body", Comedy, "New Line", 47, 2.071, 15, 20.71, 2008 ],
|
||||
[ "Our Family Wedding", Comedy, Independent, 49, 0, 14, 21.37, 2010 ],
|
||||
[ "One Day", Romance, Independent, 54, 3.682733333, 37, 55.24, 2011 ],
|
||||
[ "Not Easily Broken", Drama, Independent, 66, 2.14, 34, 10.7, 2009 ],
|
||||
[ "No Reservations", Comedy, "Warner Bros.", 64, 3.307180357, 39, 92.6, 2007 ],
|
||||
[ "Nick and Norah's Infinite Playlist", Comedy, Sony, 67, 3.3527293, 73, 33.53, 2008 ],
|
||||
[ "New Year's Eve", Romance, "Warner Bros.", 48, 2.536428571, 8, 142.04, 2011 ],
|
||||
[ "My Week with Marilyn", Drama, "The Weinstein Company", 84, 0.8258, 83, 8.26, 2011 ],
|
||||
[ "Music and Lyrics", Romance, "Warner Bros.", 70, 3.64741055, 63, 145.9, 2007 ],
|
||||
[ "Monte Carlo", Romance, "20th Century Fox", 50, 1.9832, 38, 39.66, 2011 ],
|
||||
[ "Miss Pettigrew Lives for a Day", Comedy, Independent, 70, 0.2528949, 78, 15.17, 2008 ],
|
||||
[ "Midnight in Paris", Romence, Sony, 84, 8.744705882, 93, 148.66, 2011 ],
|
||||
[ "Marley and Me", Comedy, Fox, 77, 3.746781818, 63, 206.07, 2008 ],
|
||||
[ "Mamma Mia!", Comedy, Universal, 76, 9.234453864, 53, 609.47, 2008 ],
|
||||
[ "Mamma Mia!", Comedy, Universal, 76, 9.234453864, 53, 609.47, 2008 ],
|
||||
[ "Made of Honor", Comdy, Sony, 61, 2.64906835, 13, 105.96, 2008 ],
|
||||
[ "Love Happens", Drama, Universal, 40, 2.004444444, 18, 36.08, 2009 ],
|
||||
[ "Love & Other Drugs", Comedy, Fox, 55, 1.817666667, 48, 54.53, 2010 ],
|
||||
[ "Life as We Know It", Comedy, Independent, 62, 2.530526316, 28, 96.16, 2010 ],
|
||||
[ "License to Wed", Comedy, "Warner Bros.", 55, 1.9802064, 8, 69.31, 2007 ],
|
||||
[ "Letters to Juliet", Comedy, Summit, 62, 2.639333333, 40, 79.18, 2010 ],
|
||||
[ "Leap Year", Comedy, Universal, 49, 1.715263158, 21, 32.59, 2010 ],
|
||||
[ "Knocked Up", Comedy, Universal, 83, 6.636401848, 91, 219, 2007 ],
|
||||
[ Killers, Action, Lionsgate, 45, 1.245333333, 11, 93.4, 2010 ],
|
||||
[ "Just Wright", Comedy, Fox, 58, 1.797416667, 45, 21.57, 2010 ],
|
||||
[ "Jane Eyre", Romance, Universal, 77, 0, 85, 30.15, 2011 ],
|
||||
[ "It's Complicated", Comedy, Universal, 63, 2.642352941, 56, 224.6, 2009 ],
|
||||
[ "I Love You Phillip Morris", Comedy, Independent, 57, 1.34, 71, 20.1, 2010 ],
|
||||
[ "High School Musical 3: Senior Year", Comedy, Disney, 76, 22.91313646, 65, 252.04, 2008 ],
|
||||
[ "He's Just Not That Into You", Comedy, "Warner Bros.", 60, 7.1536, 42, 178.84, 2009 ],
|
||||
[ "Good Luck Chuck", Comedy, Lionsgate, 61, 2.36768512, 3, 59.19, 2007 ],
|
||||
[ "Going the Distance", Comedy, "Warner Bros.", 56, 1.3140625, 53, 42.05, 2010 ],
|
||||
[ "Gnomeo and Juliet", Animation, Disney, 52, 5.387972222, 56, 193.97, 2011 ],
|
||||
[ "Gnomeo and Juliet", Animation, Disney, 52, 5.387972222, 56, 193.97, 2011 ],
|
||||
[ "Ghosts of Girlfriends Past", Comedy, "Warner Bros.", 47, 2.0444, 27, 102.22, 2009 ],
|
||||
[ "Four Christmases", Comedy, "Warner Bros.", 52, 2.022925, 26, 161.83, 2008 ],
|
||||
[ Fireproof, Drama, Independent, 51, 66.934, 40, 33.47, 2008 ],
|
||||
[ Enchanted, Comedy, Disney, 80, 4.005737082, 93, 340.49, 2007 ],
|
||||
[ "Dear John", Drama, Sony, 66, 4.5988, 29, 114.97, 2010 ],
|
||||
[ Beginners, Comedy, Independent, 80, 4.471875, 84, 14.31, 2011 ],
|
||||
[ "Across the Universe", romance, Independent, 84, 0.652603178, 54, 29.37, 2007 ],
|
||||
[ "A Serious Man", Drama, Universal, 64, 4.382857143, 89, 30.68, 2009 ],
|
||||
[ "A Dangerous Method", Drama, Independent, 89, 0.44864475, 79, 8.97, 2011 ],
|
||||
[ "27 Dresses", Comedy, Fox, 71, 5.3436218, 40, 160.31, 2008 ],
|
||||
[ "(500) Days of Summer", comedy, Fox, 81, 8.096, 87, 60.72, 2009 ]
|
||||
]
|
||||
|
||||
#[test]
|
||||
def count_movies_by_Lead_Studio [] {
|
||||
let grouped = $movies | group-by Lead_Studio --to-table
|
||||
let out = $grouped | aggregate
|
||||
# let expected = $grouped | insert count {get items | length} | select Lead_Studio count
|
||||
let expected = [
|
||||
[ Lead_Studio, count ];
|
||||
[ "The Weinstein Company", 2 ],
|
||||
[ Independent, 19 ],
|
||||
[ Disney, 8 ],
|
||||
[ Fox, 6 ],
|
||||
[ "20th Century Fox", 2 ],
|
||||
[ "Warner Bros.", 14 ],
|
||||
[ Summit, 5 ],
|
||||
[ Paramount, 4 ],
|
||||
[ CBS, 1 ],
|
||||
[ "New Line", 1 ],
|
||||
[ Sony, 4 ],
|
||||
[ Universal, 8 ],
|
||||
[ Lionsgate, 2 ]
|
||||
]
|
||||
|
||||
assert equal $out $expected
|
||||
}
|
||||
|
||||
#[test]
|
||||
def average_gross_by_Genre [] {
|
||||
let grouped = $movies | group-by Genre --to-table
|
||||
let out = $grouped | aggregate --ops {avg: {math avg}} Worldwide_Gross | select Genre Worldwide_Gross_avg
|
||||
# let expected = $grouped | insert Worldwide_Gross_avg {get items.Worldwide_Gross | math avg} | select Genre Worldwide_Gross_avg
|
||||
|
||||
# Round to 2 digits of precision to keep floating point operations consistent between platforms.
|
||||
let out = $out | update Worldwide_Gross_avg {math round --precision 2}
|
||||
let expected = [
|
||||
[ Genre, Worldwide_Gross_avg ];
|
||||
[ Comedy, 148.33 ],
|
||||
[ Drama, 99.01 ],
|
||||
[ Animation, 316.06 ],
|
||||
[ Romance, 148.60 ],
|
||||
[ Fantasy, 285.43 ],
|
||||
[ Romence, 148.66 ],
|
||||
[ Comdy, 105.96 ],
|
||||
[ Action, 93.40 ],
|
||||
[ romance, 29.37 ],
|
||||
[ comedy, 60.72 ]
|
||||
]
|
||||
|
||||
assert equal $out $expected
|
||||
}
|
||||
|
||||
#[test]
|
||||
def aggregate_default_ops [] {
|
||||
let grouped = $movies | group-by Genre --to-table
|
||||
let out = $grouped | aggregate Worldwide_Gross
|
||||
|
||||
# Round to 2 digits of precision to keep floating point operations consistent between platforms.
|
||||
let out = $out | update cells -c [Worldwide_Gross_min, Worldwide_Gross_avg, Worldwide_Gross_max, Worldwide_Gross_sum] { math round --precision 2 }
|
||||
|
||||
let expected = [
|
||||
[Genre , count, Worldwide_Gross_min, Worldwide_Gross_avg, Worldwide_Gross_max, Worldwide_Gross_sum];
|
||||
[Comedy , 41, 14.31, 148.33, 609.47, 6081.73],
|
||||
[Drama , 13, 8.26, 99.01, 709.82, 1287.15],
|
||||
[Animation, 4, 193.97, 316.06, 521.28, 1264.23],
|
||||
[Romance , 12, 0.03, 148.60, 702.17, 1783.16],
|
||||
[Fantasy , 1, 285.43, 285.43, 285.43, 285.43],
|
||||
[Romence , 1, 148.66, 148.66, 148.66, 148.66],
|
||||
[Comdy , 1, 105.96, 105.96, 105.96, 105.96],
|
||||
[Action , 1, 93.40, 93.40, 93.40, 93.40],
|
||||
[romance , 1, 29.37, 29.37, 29.37, 29.37],
|
||||
[comedy , 1, 60.72, 60.72, 60.72, 60.72],
|
||||
]
|
||||
|
||||
assert equal $out $expected
|
||||
}
|
||||
|
||||
#[test]
|
||||
def throw_error_on_non-table_input [] {
|
||||
# without --to-table
|
||||
let out = try {
|
||||
$movies | group-by Genre | aggregate Worldwide_Gross
|
||||
} catch {|e|
|
||||
$e.msg
|
||||
}
|
||||
|
||||
assert equal $out "input must be a table"
|
||||
}
|
||||
|
||||
#[test]
|
||||
def throw_error_on_non-existing_column [] {
|
||||
let grouped = $movies | group-by Genre --to-table
|
||||
let error = try {
|
||||
$grouped | aggregate --ops {avg: {math avg}} NotInTheDataSet
|
||||
} catch {|e|
|
||||
$e.json | from json
|
||||
}
|
||||
|
||||
assert equal $error.inner.0.msg "Cannot find column '$.items.NotInTheDataSet'"
|
||||
}
|
||||
|
||||
#[test]
|
||||
def aggregate_stats_without_grouping [] {
|
||||
let out = $movies | aggregate Year | update cells -c [Year_min Year_avg Year_max Year_sum] {math round -p 2}
|
||||
let expected = [{
|
||||
count: 76,
|
||||
Year_min: 2007,
|
||||
Year_avg: 2009.09,
|
||||
Year_max: 2011,
|
||||
Year_sum: 152691
|
||||
}]
|
||||
|
||||
assert equal $out $expected
|
||||
}
|
Reference in New Issue
Block a user