nushell/src/data/base.rs

1003 lines
32 KiB
Rust
Raw Normal View History

2019-11-04 16:47:03 +01:00
mod debug;
mod property_get;
pub(crate) mod shape;
2019-07-24 00:22:11 +02:00
use crate::context::CommandRegistry;
2019-11-04 16:47:03 +01:00
use crate::data::base::shape::{Column, InlineShape, TypeShape};
use crate::data::TaggedDictBuilder;
2019-05-12 00:59:57 +02:00
use crate::errors::ShellError;
2019-06-22 05:43:37 +02:00
use crate::evaluate::{evaluate_baseline_expr, Scope};
2019-11-04 16:47:03 +01:00
use crate::parser::hir::path::{ColumnPath, PathMember};
2019-08-01 03:58:42 +02:00
use crate::parser::{hir, Operator};
2019-05-23 06:30:43 +02:00
use crate::prelude::*;
2019-06-22 22:46:16 +02:00
use crate::Text;
use chrono::{DateTime, Utc};
use chrono_humanize::Humanize;
2019-05-26 08:54:41 +02:00
use derive_new::new;
use indexmap::IndexMap;
Overhaul the expansion system The main thrust of this (very large) commit is an overhaul of the expansion system. The parsing pipeline is: - Lightly parse the source file for atoms, basic delimiters and pipeline structure into a token tree - Expand the token tree into a HIR (high-level intermediate representation) based upon the baseline syntax rules for expressions and the syntactic shape of commands. Somewhat non-traditionally, nu doesn't have an AST at all. It goes directly from the token tree, which doesn't represent many important distinctions (like the difference between `hello` and `5KB`) directly into a high-level representation that doesn't have a direct correspondence to the source code. At a high level, nu commands work like macros, in the sense that the syntactic shape of the invocation of a command depends on the definition of a command. However, commands do not have the ability to perform unrestricted expansions of the token tree. Instead, they describe their arguments in terms of syntactic shapes, and the expander expands the token tree into HIR based upon that definition. For example, the `where` command says that it takes a block as its first required argument, and the description of the block syntactic shape expands the syntax `cpu > 10` into HIR that represents `{ $it.cpu > 10 }`. This commit overhauls that system so that the syntactic shapes are described in terms of a few new traits (`ExpandSyntax` and `ExpandExpression` are the primary ones) that are more composable than the previous system. The first big win of this new system is the addition of the `ColumnPath` shape, which looks like `cpu."max ghz"` or `package.version`. Previously, while a variable path could look like `$it.cpu."max ghz"`, the tail of a variable path could not be easily reused in other contexts. Now, that tail is its own syntactic shape, and it can be used as part of a command's signature. This cleans up commands like `inc`, `add` and `edit` as well as shorthand blocks, which can now look like `| where cpu."max ghz" > 10`
2019-09-18 00:26:27 +02:00
use log::trace;
2019-08-02 21:15:07 +02:00
use serde::{Deserialize, Serialize};
2019-07-15 23:16:27 +02:00
use std::path::PathBuf;
use std::time::SystemTime;
2019-05-10 18:59:12 +02:00
2019-09-14 19:48:24 +02:00
mod serde_bigint {
use num_traits::cast::FromPrimitive;
use num_traits::cast::ToPrimitive;
pub fn serialize<S>(big_int: &super::BigInt, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
serde::Serialize::serialize(
&big_int
.to_i64()
.ok_or(serde::ser::Error::custom("expected a i64-sized bignum"))?,
serializer,
)
}
pub fn deserialize<'de, D>(deserializer: D) -> Result<super::BigInt, D::Error>
where
D: serde::Deserializer<'de>,
{
let x: i64 = serde::Deserialize::deserialize(deserializer)?;
Ok(super::BigInt::from_i64(x)
.ok_or(serde::de::Error::custom("expected a i64-sized bignum"))?)
}
}
mod serde_bigdecimal {
use num_traits::cast::FromPrimitive;
use num_traits::cast::ToPrimitive;
pub fn serialize<S>(big_decimal: &super::BigDecimal, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
serde::Serialize::serialize(
&big_decimal
.to_f64()
.ok_or(serde::ser::Error::custom("expected a f64-sized bignum"))?,
serializer,
)
}
pub fn deserialize<'de, D>(deserializer: D) -> Result<super::BigDecimal, D::Error>
where
D: serde::Deserializer<'de>,
{
let x: f64 = serde::Deserialize::deserialize(deserializer)?;
Ok(super::BigDecimal::from_f64(x)
.ok_or(serde::de::Error::custom("expected a f64-sized bigdecimal"))?)
}
}
2019-06-30 08:46:49 +02:00
#[derive(Debug, Clone, Ord, PartialOrd, Eq, PartialEq, Deserialize, Serialize)]
2019-05-10 18:59:12 +02:00
pub enum Primitive {
Nothing,
2019-09-14 19:48:24 +02:00
#[serde(with = "serde_bigint")]
Int(BigInt),
2019-09-14 19:48:24 +02:00
#[serde(with = "serde_bigdecimal")]
Decimal(BigDecimal),
2019-06-30 08:46:49 +02:00
Bytes(u64),
2019-05-10 18:59:12 +02:00
String(String),
2019-11-04 16:47:03 +01:00
ColumnPath(ColumnPath),
Pattern(String),
2019-05-10 18:59:12 +02:00
Boolean(bool),
Date(DateTime<Utc>),
2019-11-17 06:48:48 +01:00
Duration(u64), // Duration in seconds
2019-07-15 23:16:27 +02:00
Path(PathBuf),
#[serde(with = "serde_bytes")]
Binary(Vec<u8>),
2019-06-27 18:47:24 +02:00
// Stream markers (used as bookend markers rather than actual values)
BeginningOfStream,
2019-06-27 18:47:24 +02:00
EndOfStream,
2019-05-10 18:59:12 +02:00
}
2019-11-04 16:47:03 +01:00
impl ShellTypeName for Primitive {
fn type_name(&self) -> &'static str {
match self {
Primitive::Nothing => "nothing",
Primitive::Int(_) => "integer",
Primitive::Decimal(_) => "decimal",
Primitive::Bytes(_) => "bytes",
Primitive::String(_) => "string",
Primitive::ColumnPath(_) => "column path",
Primitive::Pattern(_) => "pattern",
Primitive::Boolean(_) => "boolean",
Primitive::Date(_) => "date",
Primitive::Duration(_) => "duration",
Primitive::Path(_) => "file path",
Primitive::Binary(_) => "binary",
Primitive::BeginningOfStream => "marker<beginning of stream>",
Primitive::EndOfStream => "marker<end of stream>",
}
}
}
impl From<BigDecimal> for Primitive {
fn from(decimal: BigDecimal) -> Primitive {
Primitive::Decimal(decimal)
}
}
impl From<f64> for Primitive {
fn from(float: f64) -> Primitive {
Primitive::Decimal(BigDecimal::from_f64(float).unwrap())
}
}
2019-05-10 18:59:12 +02:00
impl Primitive {
pub fn number(number: impl Into<Number>) -> Primitive {
let number = number.into();
match number {
Number::Int(int) => Primitive::Int(int),
Number::Decimal(decimal) => Primitive::Decimal(decimal),
}
}
2019-07-04 07:11:56 +02:00
pub fn format(&self, field_name: Option<&String>) -> String {
2019-05-10 18:59:12 +02:00
match self {
2019-08-16 22:03:29 +02:00
Primitive::Nothing => String::new(),
Primitive::BeginningOfStream => String::new(),
Primitive::EndOfStream => String::new(),
2019-07-15 23:16:27 +02:00
Primitive::Path(p) => format!("{}", p.display()),
Primitive::Bytes(b) => {
2019-06-30 08:46:49 +02:00
let byte = byte_unit::Byte::from_bytes(*b as u128);
2019-05-16 00:23:36 +02:00
if byte.get_bytes() == 0u128 {
2019-08-24 07:31:50 +02:00
return "".to_string();
2019-05-16 00:23:36 +02:00
}
2019-05-28 07:05:14 +02:00
let byte = byte.get_appropriate_unit(false);
match byte.get_unit() {
2019-08-24 07:31:50 +02:00
byte_unit::ByteUnit::B => format!("{} B ", byte.get_value()),
_ => format!("{}", byte.format(1)),
}
}
2019-11-17 22:51:56 +01:00
Primitive::Duration(sec) => format_duration(*sec),
2019-05-10 18:59:12 +02:00
Primitive::Int(i) => format!("{}", i),
Primitive::Decimal(decimal) => format!("{}", decimal),
Primitive::Pattern(s) => format!("{}", s),
Primitive::String(s) => format!("{}", s),
2019-11-04 16:47:03 +01:00
Primitive::ColumnPath(p) => {
let mut members = p.iter();
let mut f = String::new();
f.push_str(
&members
.next()
.expect("BUG: column path with zero members")
.to_string(),
);
for member in members {
f.push_str(".");
f.push_str(&member.to_string())
}
f
}
Primitive::Boolean(b) => match (b, field_name) {
(true, None) => format!("Yes"),
(false, None) => format!("No"),
2019-07-03 19:37:09 +02:00
(true, Some(s)) if !s.is_empty() => format!("{}", s),
(false, Some(s)) if !s.is_empty() => format!(""),
2019-06-04 23:42:31 +02:00
(true, Some(_)) => format!("Yes"),
(false, Some(_)) => format!("No"),
},
Primitive::Binary(_) => format!("<binary>"),
2019-05-16 00:23:36 +02:00
Primitive::Date(d) => format!("{}", d.humanize()),
2019-05-10 18:59:12 +02:00
}
}
2019-08-24 07:31:50 +02:00
pub fn style(&self) -> &'static str {
match self {
Primitive::Bytes(0) => "c", // centre 'missing' indicator
Primitive::Int(_) | Primitive::Bytes(_) | Primitive::Decimal(_) => "r",
2019-08-26 20:19:05 +02:00
_ => "",
2019-08-24 07:31:50 +02:00
}
}
2019-05-10 18:59:12 +02:00
}
2019-11-17 22:51:56 +01:00
fn format_duration(sec: u64) -> String {
let (minutes, seconds) = (sec / 60, sec % 60);
let (hours, minutes) = (minutes / 60, minutes % 60);
let (days, hours) = (hours / 24, hours % 24);
match (days, hours, minutes, seconds) {
(0, 0, 0, 1) => format!("1 sec"),
(0, 0, 0, s) => format!("{} secs", s),
(0, 0, m, s) => format!("{}:{:02}", m, s),
(0, h, m, s) => format!("{}:{:02}:{:02}", h, m, s),
(d, h, m, s) => format!("{}:{:02}:{:02}:{:02}", d, h, m, s),
}
}
2019-05-28 04:01:37 +02:00
#[derive(Debug, Ord, PartialOrd, Eq, PartialEq, Clone, new, Serialize)]
2019-05-26 08:54:41 +02:00
pub struct Operation {
pub(crate) left: Value,
pub(crate) operator: Operator,
pub(crate) right: Value,
2019-05-26 08:54:41 +02:00
}
2019-11-04 16:47:03 +01:00
#[derive(Debug, Ord, PartialOrd, Eq, PartialEq, Clone, Hash, Serialize, Deserialize, new)]
2019-05-28 08:45:18 +02:00
pub struct Block {
pub(crate) expressions: Vec<hir::Expression>,
pub(crate) source: Text,
pub(crate) tag: Tag,
2019-05-28 08:45:18 +02:00
}
impl Block {
2019-08-01 03:58:42 +02:00
pub fn invoke(&self, value: &Tagged<Value>) -> Result<Tagged<Value>, ShellError> {
2019-07-08 18:44:53 +02:00
let scope = Scope::new(value.clone());
if self.expressions.len() == 0 {
return Ok(Value::nothing().tagged(&self.tag));
}
let mut last = None;
Overhaul the expansion system The main thrust of this (very large) commit is an overhaul of the expansion system. The parsing pipeline is: - Lightly parse the source file for atoms, basic delimiters and pipeline structure into a token tree - Expand the token tree into a HIR (high-level intermediate representation) based upon the baseline syntax rules for expressions and the syntactic shape of commands. Somewhat non-traditionally, nu doesn't have an AST at all. It goes directly from the token tree, which doesn't represent many important distinctions (like the difference between `hello` and `5KB`) directly into a high-level representation that doesn't have a direct correspondence to the source code. At a high level, nu commands work like macros, in the sense that the syntactic shape of the invocation of a command depends on the definition of a command. However, commands do not have the ability to perform unrestricted expansions of the token tree. Instead, they describe their arguments in terms of syntactic shapes, and the expander expands the token tree into HIR based upon that definition. For example, the `where` command says that it takes a block as its first required argument, and the description of the block syntactic shape expands the syntax `cpu > 10` into HIR that represents `{ $it.cpu > 10 }`. This commit overhauls that system so that the syntactic shapes are described in terms of a few new traits (`ExpandSyntax` and `ExpandExpression` are the primary ones) that are more composable than the previous system. The first big win of this new system is the addition of the `ColumnPath` shape, which looks like `cpu."max ghz"` or `package.version`. Previously, while a variable path could look like `$it.cpu."max ghz"`, the tail of a variable path could not be easily reused in other contexts. Now, that tail is its own syntactic shape, and it can be used as part of a command's signature. This cleans up commands like `inc`, `add` and `edit` as well as shorthand blocks, which can now look like `| where cpu."max ghz" > 10`
2019-09-18 00:26:27 +02:00
trace!(
"EXPRS = {:?}",
self.expressions
.iter()
.map(|e| format!("{}", e))
.collect::<Vec<_>>()
);
for expr in self.expressions.iter() {
2019-07-24 00:22:11 +02:00
last = Some(evaluate_baseline_expr(
&expr,
&CommandRegistry::empty(),
&scope,
&self.source,
)?)
}
Ok(last.unwrap())
2019-05-28 08:45:18 +02:00
}
}
2019-06-30 08:46:49 +02:00
#[derive(Debug, Eq, PartialEq, Ord, PartialOrd, Clone, Serialize, Deserialize)]
2019-05-10 18:59:12 +02:00
pub enum Value {
Primitive(Primitive),
Row(crate::data::Dictionary),
Table(Vec<Tagged<Value>>),
2019-09-02 08:11:05 +02:00
// Errors are a type of value too
Error(ShellError),
2019-05-28 08:45:18 +02:00
Block(Block),
2019-05-10 18:59:12 +02:00
}
2019-11-04 16:47:03 +01:00
impl ShellTypeName for Value {
fn type_name(&self) -> &'static str {
match self {
Value::Primitive(p) => p.type_name(),
Value::Row(_) => "row",
Value::Table(_) => "table",
Value::Error(_) => "error",
Value::Block(_) => "block",
}
}
}
impl Into<Value> for Number {
fn into(self) -> Value {
match self {
Number::Int(int) => Value::int(int),
Number::Decimal(decimal) => Value::decimal(decimal),
}
}
}
impl Into<Value> for &Number {
fn into(self) -> Value {
match self {
Number::Int(int) => Value::int(int.clone()),
Number::Decimal(decimal) => Value::decimal(decimal.clone()),
}
}
}
2019-11-04 16:47:03 +01:00
impl Tagged<Value> {
pub fn tagged_type_name(&self) -> Tagged<String> {
let name = self.type_name().to_string();
name.tagged(self.tag())
2019-06-22 05:43:37 +02:00
}
}
2019-11-04 16:47:03 +01:00
impl Tagged<&Value> {
Overhaul the coloring system This commit replaces the previous naive coloring system with a coloring system that is more aligned with the parser. The main benefit of this change is that it allows us to use parsing rules to decide how to color tokens. For example, consider the following syntax: ``` $ ps | where cpu > 10 ``` Ideally, we could color `cpu` like a column name and not a string, because `cpu > 10` is a shorthand block syntax that expands to `{ $it.cpu > 10 }`. The way that we know that it's a shorthand block is that the `where` command declares that its first parameter is a `SyntaxShape::Block`, which allows the shorthand block form. In order to accomplish this, we need to color the tokens in a way that corresponds to their expanded semantics, which means that high-fidelity coloring requires expansion. This commit adds a `ColorSyntax` trait that corresponds to the `ExpandExpression` trait. The semantics are fairly similar, with a few differences. First `ExpandExpression` consumes N tokens and returns a single `hir::Expression`. `ColorSyntax` consumes N tokens and writes M `FlatShape` tokens to the output. Concretely, for syntax like `[1 2 3]` - `ExpandExpression` takes a single token node and produces a single `hir::Expression` - `ColorSyntax` takes the same token node and emits 7 `FlatShape`s (open delimiter, int, whitespace, int, whitespace, int, close delimiter) Second, `ColorSyntax` is more willing to plow through failures than `ExpandExpression`. In particular, consider syntax like ``` $ ps | where cpu > ``` In this case - `ExpandExpression` will see that the `where` command is expecting a block, see that it's not a literal block and try to parse it as a shorthand block. It will successfully find a member followed by an infix operator, but not a following expression. That means that the entire pipeline part fails to parse and is a syntax error. - `ColorSyntax` will also try to parse it as a shorthand block and ultimately fail, but it will fall back to "backoff coloring mode", which parsing any unidentified tokens in an unfallible, simple way. In this case, `cpu` will color as a string and `>` will color as an operator. Finally, it's very important that coloring a pipeline infallibly colors the entire string, doesn't fail, and doesn't get stuck in an infinite loop. In order to accomplish this, this PR separates `ColorSyntax`, which is infallible from `FallibleColorSyntax`, which might fail. This allows the type system to let us know if our coloring rules bottom out at at an infallible rule. It's not perfect: it's still possible for the coloring process to get stuck or consume tokens non-atomically. I intend to reduce the opportunity for those problems in a future commit. In the meantime, the current system catches a number of mistakes (like trying to use a fallible coloring rule in a loop without thinking about the possibility that it will never terminate).
2019-10-06 22:22:50 +02:00
pub fn tagged_type_name(&self) -> Tagged<String> {
2019-11-04 16:47:03 +01:00
let name = self.type_name().to_string();
name.tagged(self.tag())
2019-06-24 02:55:31 +02:00
}
}
impl std::convert::TryFrom<&Tagged<Value>> for Block {
type Error = ShellError;
fn try_from(value: &Tagged<Value>) -> Result<Block, ShellError> {
match value.item() {
Value::Block(block) => Ok(block.clone()),
v => Err(ShellError::type_error(
"Block",
2019-11-04 16:47:03 +01:00
v.type_name().spanned(value.span()),
)),
}
}
}
impl std::convert::TryFrom<&Tagged<Value>> for i64 {
type Error = ShellError;
fn try_from(value: &Tagged<Value>) -> Result<i64, ShellError> {
match value.item() {
Value::Primitive(Primitive::Int(int)) => {
int.tagged(&value.tag).coerce_into("converting to i64")
}
v => Err(ShellError::type_error(
"Integer",
2019-11-04 16:47:03 +01:00
v.type_name().spanned(value.span()),
)),
}
}
}
impl std::convert::TryFrom<&Tagged<Value>> for String {
2019-08-26 16:16:34 +02:00
type Error = ShellError;
fn try_from(value: &Tagged<Value>) -> Result<String, ShellError> {
2019-08-26 16:16:34 +02:00
match value.item() {
Value::Primitive(Primitive::String(s)) => Ok(s.clone()),
v => Err(ShellError::type_error(
"String",
2019-11-04 16:47:03 +01:00
v.type_name().spanned(value.span()),
2019-08-26 16:16:34 +02:00
)),
}
}
}
impl std::convert::TryFrom<&Tagged<Value>> for Vec<u8> {
2019-08-26 16:16:34 +02:00
type Error = ShellError;
fn try_from(value: &Tagged<Value>) -> Result<Vec<u8>, ShellError> {
2019-08-26 16:16:34 +02:00
match value.item() {
Value::Primitive(Primitive::Binary(b)) => Ok(b.clone()),
2019-08-26 16:16:34 +02:00
v => Err(ShellError::type_error(
"Binary",
2019-11-04 16:47:03 +01:00
v.type_name().spanned(value.span()),
2019-08-26 16:16:34 +02:00
)),
}
}
}
impl<'a> std::convert::TryFrom<&'a Tagged<Value>> for &'a crate::data::Dictionary {
2019-08-26 16:16:34 +02:00
type Error = ShellError;
fn try_from(value: &'a Tagged<Value>) -> Result<&'a crate::data::Dictionary, ShellError> {
2019-08-26 16:16:34 +02:00
match value.item() {
Value::Row(d) => Ok(d),
2019-08-26 16:16:34 +02:00
v => Err(ShellError::type_error(
"Dictionary",
2019-11-04 16:47:03 +01:00
v.type_name().spanned(value.span()),
2019-08-26 16:16:34 +02:00
)),
}
}
}
2019-08-02 21:15:07 +02:00
#[derive(Serialize, Deserialize)]
pub enum Switch {
Present,
Absent,
}
impl std::convert::TryFrom<Option<&Tagged<Value>>> for Switch {
type Error = ShellError;
fn try_from(value: Option<&Tagged<Value>>) -> Result<Switch, ShellError> {
match value {
None => Ok(Switch::Absent),
Some(value) => match value.item() {
Value::Primitive(Primitive::Boolean(true)) => Ok(Switch::Present),
v => Err(ShellError::type_error(
"Boolean",
2019-11-04 16:47:03 +01:00
v.type_name().spanned(value.span()),
)),
},
}
}
}
2019-05-16 00:23:36 +02:00
impl Value {
2019-07-04 07:11:56 +02:00
pub fn data_descriptors(&self) -> Vec<String> {
2019-05-10 18:59:12 +02:00
match self {
2019-07-03 19:37:09 +02:00
Value::Primitive(_) => vec![],
2019-10-31 10:36:08 +01:00
Value::Row(columns) => columns
2019-07-03 19:37:09 +02:00
.entries
.keys()
.into_iter()
.map(|x| x.to_string())
.collect(),
Value::Block(_) => vec![],
Value::Table(_) => vec![],
Value::Error(_) => vec![],
2019-05-10 18:59:12 +02:00
}
}
pub fn get_data(&self, desc: &String) -> MaybeOwned<'_, Value> {
2019-05-10 18:59:12 +02:00
match self {
p @ Value::Primitive(_) => MaybeOwned::Borrowed(p),
Value::Row(o) => o.get_data(desc),
2019-05-28 08:45:18 +02:00
Value::Block(_) => MaybeOwned::Owned(Value::nothing()),
Value::Table(_) => MaybeOwned::Owned(Value::nothing()),
Value::Error(_) => MaybeOwned::Owned(Value::nothing()),
}
}
2019-11-04 16:47:03 +01:00
pub(crate) fn format_type(&self, width: usize) -> String {
TypeShape::from_value(self).colored_string(width)
}
pub(crate) fn format_leaf(&self) -> DebugDocBuilder {
InlineShape::from_value(self).format().pretty_debug()
}
pub(crate) fn format_for_column(&self, column: impl Into<Column>) -> DebugDocBuilder {
InlineShape::from_value(self)
.format_for_column(column)
.pretty_debug()
2019-05-10 18:59:12 +02:00
}
pub(crate) fn style_leaf(&self) -> &'static str {
2019-08-24 07:31:50 +02:00
match self {
Value::Primitive(p) => p.style(),
2019-08-26 20:19:05 +02:00
_ => "",
2019-08-24 07:31:50 +02:00
}
}
pub(crate) fn compare(
&self,
operator: &Operator,
other: &Value,
2019-11-04 16:47:03 +01:00
) -> Result<bool, (&'static str, &'static str)> {
2019-05-28 08:45:18 +02:00
match operator {
_ => {
let coerced = coerce_compare(self, other)?;
let ordering = coerced.compare();
use std::cmp::Ordering;
let result = match (operator, ordering) {
(Operator::Equal, Ordering::Equal) => true,
(Operator::NotEqual, Ordering::Less)
| (Operator::NotEqual, Ordering::Greater) => true,
2019-05-28 08:45:18 +02:00
(Operator::LessThan, Ordering::Less) => true,
(Operator::GreaterThan, Ordering::Greater) => true,
(Operator::GreaterThanOrEqual, Ordering::Greater)
| (Operator::GreaterThanOrEqual, Ordering::Equal) => true,
(Operator::LessThanOrEqual, Ordering::Less)
| (Operator::LessThanOrEqual, Ordering::Equal) => true,
_ => false,
};
2019-06-24 02:55:31 +02:00
Ok(result)
2019-05-28 08:45:18 +02:00
}
}
}
pub(crate) fn is_true(&self) -> bool {
2019-05-28 08:45:18 +02:00
match self {
Value::Primitive(Primitive::Boolean(true)) => true,
_ => false,
}
}
2019-11-04 16:47:03 +01:00
pub(crate) fn is_error(&self) -> bool {
match self {
Value::Error(_err) => true,
_ => false,
}
}
pub(crate) fn expect_error(&self) -> ShellError {
match self {
Value::Error(err) => err.clone(),
_ => panic!("Don't call expect_error without first calling is_error"),
}
}
#[allow(unused)]
pub fn row(entries: IndexMap<String, Tagged<Value>>) -> Value {
Value::Row(entries.into())
}
pub fn table(list: &Vec<Tagged<Value>>) -> Value {
Value::Table(list.to_vec())
}
2019-06-27 06:56:48 +02:00
pub fn string(s: impl Into<String>) -> Value {
2019-05-10 18:59:12 +02:00
Value::Primitive(Primitive::String(s.into()))
}
2019-11-04 16:47:03 +01:00
pub fn column_path(s: Vec<impl Into<PathMember>>) -> Value {
Value::Primitive(Primitive::ColumnPath(ColumnPath::new(
s.into_iter().map(|p| p.into()).collect(),
)))
}
pub fn int(i: impl Into<BigInt>) -> Value {
Value::Primitive(Primitive::Int(i.into()))
}
pub fn pattern(s: impl Into<String>) -> Value {
Value::Primitive(Primitive::String(s.into()))
}
Add support for ~ expansion This ended up being a bit of a yak shave. The basic idea in this commit is to expand `~` in paths, but only in paths. The way this is accomplished is by doing the expansion inside of the code that parses literal syntax for `SyntaxType::Path`. As a quick refresher: every command is entitled to expand its arguments in a custom way. While this could in theory be used for general-purpose macros, today the expansion facility is limited to syntactic hints. For example, the syntax `where cpu > 0` expands under the hood to `where { $it.cpu > 0 }`. This happens because the first argument to `where` is defined as a `SyntaxType::Block`, and the parser coerces binary expressions whose left-hand-side looks like a member into a block when the command is expecting one. This is mildly more magical than what most programming languages would do, but we believe that it makes sense to allow commands to fine-tune the syntax because of the domain nushell is in (command-line shells). The syntactic expansions supported by this facility are relatively limited. For example, we don't allow `$it` to become a bare word, simply because the command asks for a string in the relevant position. That would quickly become more confusing than it's worth. This PR adds a new `SyntaxType` rule: `SyntaxType::Path`. When a command declares a parameter as a `SyntaxType::Path`, string literals and bare words passed as an argument to that parameter are processed using the path expansion rules. Right now, that only means that `~` is expanded into the home directory, but additional rules are possible in the future. By restricting this expansion to a syntactic expansion when passed as an argument to a command expecting a path, we avoid making `~` a generally reserved character. This will also allow us to give good tab completion for paths with `~` characters in them when a command is expecting a path. In order to accomplish the above, this commit changes the parsing functions to take a `Context` instead of just a `CommandRegistry`. From the perspective of macro expansion, you can think of the `CommandRegistry` as a dictionary of in-scope macros, and the `Context` as the compile-time state used in expansion. This could gain additional functionality over time as we find more uses for the expansion system.
2019-08-26 21:21:03 +02:00
pub fn path(s: impl Into<PathBuf>) -> Value {
Value::Primitive(Primitive::Path(s.into()))
}
2019-06-30 08:46:49 +02:00
pub fn bytes(s: impl Into<u64>) -> Value {
Value::Primitive(Primitive::Bytes(s.into()))
}
pub fn decimal(s: impl Into<BigDecimal>) -> Value {
Value::Primitive(Primitive::Decimal(s.into()))
}
pub fn binary(binary: Vec<u8>) -> Value {
Value::Primitive(Primitive::Binary(binary))
}
pub fn number(s: impl Into<Number>) -> Value {
let num = s.into();
match num {
Number::Int(int) => Value::int(int),
Number::Decimal(decimal) => Value::decimal(decimal),
}
2019-05-17 18:59:25 +02:00
}
2019-06-27 06:56:48 +02:00
pub fn boolean(s: impl Into<bool>) -> Value {
2019-05-16 04:42:44 +02:00
Value::Primitive(Primitive::Boolean(s.into()))
}
2019-11-17 06:48:48 +01:00
pub fn duration(secs: u64) -> Value {
Value::Primitive(Primitive::Duration(secs))
}
2019-06-27 06:56:48 +02:00
pub fn system_date(s: SystemTime) -> Value {
Value::Primitive(Primitive::Date(s.into()))
}
Overhaul the coloring system This commit replaces the previous naive coloring system with a coloring system that is more aligned with the parser. The main benefit of this change is that it allows us to use parsing rules to decide how to color tokens. For example, consider the following syntax: ``` $ ps | where cpu > 10 ``` Ideally, we could color `cpu` like a column name and not a string, because `cpu > 10` is a shorthand block syntax that expands to `{ $it.cpu > 10 }`. The way that we know that it's a shorthand block is that the `where` command declares that its first parameter is a `SyntaxShape::Block`, which allows the shorthand block form. In order to accomplish this, we need to color the tokens in a way that corresponds to their expanded semantics, which means that high-fidelity coloring requires expansion. This commit adds a `ColorSyntax` trait that corresponds to the `ExpandExpression` trait. The semantics are fairly similar, with a few differences. First `ExpandExpression` consumes N tokens and returns a single `hir::Expression`. `ColorSyntax` consumes N tokens and writes M `FlatShape` tokens to the output. Concretely, for syntax like `[1 2 3]` - `ExpandExpression` takes a single token node and produces a single `hir::Expression` - `ColorSyntax` takes the same token node and emits 7 `FlatShape`s (open delimiter, int, whitespace, int, whitespace, int, close delimiter) Second, `ColorSyntax` is more willing to plow through failures than `ExpandExpression`. In particular, consider syntax like ``` $ ps | where cpu > ``` In this case - `ExpandExpression` will see that the `where` command is expecting a block, see that it's not a literal block and try to parse it as a shorthand block. It will successfully find a member followed by an infix operator, but not a following expression. That means that the entire pipeline part fails to parse and is a syntax error. - `ColorSyntax` will also try to parse it as a shorthand block and ultimately fail, but it will fall back to "backoff coloring mode", which parsing any unidentified tokens in an unfallible, simple way. In this case, `cpu` will color as a string and `>` will color as an operator. Finally, it's very important that coloring a pipeline infallibly colors the entire string, doesn't fail, and doesn't get stuck in an infinite loop. In order to accomplish this, this PR separates `ColorSyntax`, which is infallible from `FallibleColorSyntax`, which might fail. This allows the type system to let us know if our coloring rules bottom out at at an infallible rule. It's not perfect: it's still possible for the coloring process to get stuck or consume tokens non-atomically. I intend to reduce the opportunity for those problems in a future commit. In the meantime, the current system catches a number of mistakes (like trying to use a fallible coloring rule in a loop without thinking about the possibility that it will never terminate).
2019-10-06 22:22:50 +02:00
pub fn date_from_str(s: Tagged<&str>) -> Result<Value, ShellError> {
let date = DateTime::parse_from_rfc3339(s.item).map_err(|err| {
ShellError::labeled_error(
&format!("Date parse error: {}", err),
"original value",
s.tag,
)
})?;
let date = date.with_timezone(&chrono::offset::Utc);
Ok(Value::Primitive(Primitive::Date(date)))
}
2019-06-27 06:56:48 +02:00
pub fn nothing() -> Value {
2019-05-10 18:59:12 +02:00
Value::Primitive(Primitive::Nothing)
}
}
Add support for ~ expansion This ended up being a bit of a yak shave. The basic idea in this commit is to expand `~` in paths, but only in paths. The way this is accomplished is by doing the expansion inside of the code that parses literal syntax for `SyntaxType::Path`. As a quick refresher: every command is entitled to expand its arguments in a custom way. While this could in theory be used for general-purpose macros, today the expansion facility is limited to syntactic hints. For example, the syntax `where cpu > 0` expands under the hood to `where { $it.cpu > 0 }`. This happens because the first argument to `where` is defined as a `SyntaxType::Block`, and the parser coerces binary expressions whose left-hand-side looks like a member into a block when the command is expecting one. This is mildly more magical than what most programming languages would do, but we believe that it makes sense to allow commands to fine-tune the syntax because of the domain nushell is in (command-line shells). The syntactic expansions supported by this facility are relatively limited. For example, we don't allow `$it` to become a bare word, simply because the command asks for a string in the relevant position. That would quickly become more confusing than it's worth. This PR adds a new `SyntaxType` rule: `SyntaxType::Path`. When a command declares a parameter as a `SyntaxType::Path`, string literals and bare words passed as an argument to that parameter are processed using the path expansion rules. Right now, that only means that `~` is expanded into the home directory, but additional rules are possible in the future. By restricting this expansion to a syntactic expansion when passed as an argument to a command expecting a path, we avoid making `~` a generally reserved character. This will also allow us to give good tab completion for paths with `~` characters in them when a command is expecting a path. In order to accomplish the above, this commit changes the parsing functions to take a `Context` instead of just a `CommandRegistry`. From the perspective of macro expansion, you can think of the `CommandRegistry` as a dictionary of in-scope macros, and the `Context` as the compile-time state used in expansion. This could gain additional functionality over time as we find more uses for the expansion system.
2019-08-26 21:21:03 +02:00
impl Tagged<Value> {
pub(crate) fn as_path(&self) -> Result<PathBuf, ShellError> {
Add support for ~ expansion This ended up being a bit of a yak shave. The basic idea in this commit is to expand `~` in paths, but only in paths. The way this is accomplished is by doing the expansion inside of the code that parses literal syntax for `SyntaxType::Path`. As a quick refresher: every command is entitled to expand its arguments in a custom way. While this could in theory be used for general-purpose macros, today the expansion facility is limited to syntactic hints. For example, the syntax `where cpu > 0` expands under the hood to `where { $it.cpu > 0 }`. This happens because the first argument to `where` is defined as a `SyntaxType::Block`, and the parser coerces binary expressions whose left-hand-side looks like a member into a block when the command is expecting one. This is mildly more magical than what most programming languages would do, but we believe that it makes sense to allow commands to fine-tune the syntax because of the domain nushell is in (command-line shells). The syntactic expansions supported by this facility are relatively limited. For example, we don't allow `$it` to become a bare word, simply because the command asks for a string in the relevant position. That would quickly become more confusing than it's worth. This PR adds a new `SyntaxType` rule: `SyntaxType::Path`. When a command declares a parameter as a `SyntaxType::Path`, string literals and bare words passed as an argument to that parameter are processed using the path expansion rules. Right now, that only means that `~` is expanded into the home directory, but additional rules are possible in the future. By restricting this expansion to a syntactic expansion when passed as an argument to a command expecting a path, we avoid making `~` a generally reserved character. This will also allow us to give good tab completion for paths with `~` characters in them when a command is expecting a path. In order to accomplish the above, this commit changes the parsing functions to take a `Context` instead of just a `CommandRegistry`. From the perspective of macro expansion, you can think of the `CommandRegistry` as a dictionary of in-scope macros, and the `Context` as the compile-time state used in expansion. This could gain additional functionality over time as we find more uses for the expansion system.
2019-08-26 21:21:03 +02:00
match self.item() {
Value::Primitive(Primitive::Path(path)) => Ok(path.clone()),
Value::Primitive(Primitive::String(path_str)) => Ok(PathBuf::from(&path_str).clone()),
Add support for ~ expansion This ended up being a bit of a yak shave. The basic idea in this commit is to expand `~` in paths, but only in paths. The way this is accomplished is by doing the expansion inside of the code that parses literal syntax for `SyntaxType::Path`. As a quick refresher: every command is entitled to expand its arguments in a custom way. While this could in theory be used for general-purpose macros, today the expansion facility is limited to syntactic hints. For example, the syntax `where cpu > 0` expands under the hood to `where { $it.cpu > 0 }`. This happens because the first argument to `where` is defined as a `SyntaxType::Block`, and the parser coerces binary expressions whose left-hand-side looks like a member into a block when the command is expecting one. This is mildly more magical than what most programming languages would do, but we believe that it makes sense to allow commands to fine-tune the syntax because of the domain nushell is in (command-line shells). The syntactic expansions supported by this facility are relatively limited. For example, we don't allow `$it` to become a bare word, simply because the command asks for a string in the relevant position. That would quickly become more confusing than it's worth. This PR adds a new `SyntaxType` rule: `SyntaxType::Path`. When a command declares a parameter as a `SyntaxType::Path`, string literals and bare words passed as an argument to that parameter are processed using the path expansion rules. Right now, that only means that `~` is expanded into the home directory, but additional rules are possible in the future. By restricting this expansion to a syntactic expansion when passed as an argument to a command expecting a path, we avoid making `~` a generally reserved character. This will also allow us to give good tab completion for paths with `~` characters in them when a command is expecting a path. In order to accomplish the above, this commit changes the parsing functions to take a `Context` instead of just a `CommandRegistry`. From the perspective of macro expansion, you can think of the `CommandRegistry` as a dictionary of in-scope macros, and the `Context` as the compile-time state used in expansion. This could gain additional functionality over time as we find more uses for the expansion system.
2019-08-26 21:21:03 +02:00
other => Err(ShellError::type_error(
"Path",
2019-11-04 16:47:03 +01:00
other.type_name().spanned(self.span()),
Add support for ~ expansion This ended up being a bit of a yak shave. The basic idea in this commit is to expand `~` in paths, but only in paths. The way this is accomplished is by doing the expansion inside of the code that parses literal syntax for `SyntaxType::Path`. As a quick refresher: every command is entitled to expand its arguments in a custom way. While this could in theory be used for general-purpose macros, today the expansion facility is limited to syntactic hints. For example, the syntax `where cpu > 0` expands under the hood to `where { $it.cpu > 0 }`. This happens because the first argument to `where` is defined as a `SyntaxType::Block`, and the parser coerces binary expressions whose left-hand-side looks like a member into a block when the command is expecting one. This is mildly more magical than what most programming languages would do, but we believe that it makes sense to allow commands to fine-tune the syntax because of the domain nushell is in (command-line shells). The syntactic expansions supported by this facility are relatively limited. For example, we don't allow `$it` to become a bare word, simply because the command asks for a string in the relevant position. That would quickly become more confusing than it's worth. This PR adds a new `SyntaxType` rule: `SyntaxType::Path`. When a command declares a parameter as a `SyntaxType::Path`, string literals and bare words passed as an argument to that parameter are processed using the path expansion rules. Right now, that only means that `~` is expanded into the home directory, but additional rules are possible in the future. By restricting this expansion to a syntactic expansion when passed as an argument to a command expecting a path, we avoid making `~` a generally reserved character. This will also allow us to give good tab completion for paths with `~` characters in them when a command is expecting a path. In order to accomplish the above, this commit changes the parsing functions to take a `Context` instead of just a `CommandRegistry`. From the perspective of macro expansion, you can think of the `CommandRegistry` as a dictionary of in-scope macros, and the `Context` as the compile-time state used in expansion. This could gain additional functionality over time as we find more uses for the expansion system.
2019-08-26 21:21:03 +02:00
)),
}
}
}
pub(crate) fn select_fields(obj: &Value, fields: &[String], tag: impl Into<Tag>) -> Tagged<Value> {
let mut out = TaggedDictBuilder::new(tag);
let descs = obj.data_descriptors();
for field in fields {
2019-07-13 04:07:06 +02:00
match descs.iter().find(|d| *d == field) {
2019-07-09 06:31:26 +02:00
None => out.insert(field, Value::nothing()),
Some(desc) => out.insert(desc.clone(), obj.get_data(desc).borrow().clone()),
}
}
2019-08-01 03:58:42 +02:00
out.into_tagged_value()
2019-05-10 18:59:12 +02:00
}
pub(crate) fn reject_fields(obj: &Value, fields: &[String], tag: impl Into<Tag>) -> Tagged<Value> {
let mut out = TaggedDictBuilder::new(tag);
let descs = obj.data_descriptors();
for desc in descs {
2019-08-29 04:44:08 +02:00
if fields.iter().any(|field| *field == desc) {
continue;
} else {
out.insert(desc.clone(), obj.get_data(&desc).borrow().clone())
}
}
2019-08-01 03:58:42 +02:00
out.into_tagged_value()
}
2019-05-16 04:42:44 +02:00
2019-05-28 08:45:18 +02:00
enum CompareValues {
Ints(BigInt, BigInt),
Decimals(BigDecimal, BigDecimal),
2019-05-28 08:45:18 +02:00
String(String, String),
2019-11-16 02:36:51 +01:00
Date(DateTime<Utc>, DateTime<Utc>),
2019-11-17 06:48:48 +01:00
DateDuration(DateTime<Utc>, u64),
2019-05-28 08:45:18 +02:00
}
impl CompareValues {
fn compare(&self) -> std::cmp::Ordering {
match self {
CompareValues::Ints(left, right) => left.cmp(right),
CompareValues::Decimals(left, right) => left.cmp(right),
2019-05-28 08:45:18 +02:00
CompareValues::String(left, right) => left.cmp(right),
2019-11-17 06:48:48 +01:00
CompareValues::Date(left, right) => left.cmp(right),
CompareValues::DateDuration(left, right) => {
use std::time::Duration;
// Create the datetime we're comparing against, as duration is an offset from now
let right: DateTime<Utc> = (SystemTime::now() - Duration::from_secs(*right)).into();
right.cmp(left)
}
2019-05-28 08:45:18 +02:00
}
}
}
2019-11-04 16:47:03 +01:00
fn coerce_compare(
left: &Value,
right: &Value,
) -> Result<CompareValues, (&'static str, &'static str)> {
2019-05-28 08:45:18 +02:00
match (left, right) {
(Value::Primitive(left), Value::Primitive(right)) => coerce_compare_primitive(left, right),
2019-06-24 02:55:31 +02:00
_ => Err((left.type_name(), right.type_name())),
2019-05-28 08:45:18 +02:00
}
}
2019-06-24 02:55:31 +02:00
fn coerce_compare_primitive(
left: &Primitive,
right: &Primitive,
2019-11-04 16:47:03 +01:00
) -> Result<CompareValues, (&'static str, &'static str)> {
2019-05-28 08:45:18 +02:00
use Primitive::*;
2019-06-24 02:55:31 +02:00
Ok(match (left, right) {
(Int(left), Int(right)) => CompareValues::Ints(left.clone(), right.clone()),
(Int(left), Decimal(right)) => {
CompareValues::Decimals(BigDecimal::zero() + left, right.clone())
}
(Int(left), Bytes(right)) => CompareValues::Ints(left.clone(), BigInt::from(*right)),
(Decimal(left), Decimal(right)) => CompareValues::Decimals(left.clone(), right.clone()),
(Decimal(left), Int(right)) => {
CompareValues::Decimals(left.clone(), BigDecimal::zero() + right)
}
(Decimal(left), Bytes(right)) => {
CompareValues::Decimals(left.clone(), BigDecimal::from(*right))
}
(Bytes(left), Int(right)) => CompareValues::Ints(BigInt::from(*left), right.clone()),
(Bytes(left), Decimal(right)) => {
CompareValues::Decimals(BigDecimal::from(*left), right.clone())
}
2019-06-24 02:55:31 +02:00
(String(left), String(right)) => CompareValues::String(left.clone(), right.clone()),
2019-11-16 02:36:51 +01:00
(Date(left), Date(right)) => CompareValues::Date(left.clone(), right.clone()),
2019-11-17 06:48:48 +01:00
(Date(left), Duration(right)) => CompareValues::DateDuration(left.clone(), right.clone()),
2019-06-24 02:55:31 +02:00
_ => return Err((left.type_name(), right.type_name())),
})
2019-05-28 08:45:18 +02:00
}
#[cfg(test)]
mod tests {
use crate::data::meta::*;
2019-11-04 16:47:03 +01:00
use crate::parser::hir::path::PathMember;
use crate::ColumnPath as ColumnPathValue;
use crate::ShellError;
use crate::Value;
use indexmap::IndexMap;
2019-11-04 16:47:03 +01:00
use num_bigint::BigInt;
fn string(input: impl Into<String>) -> Tagged<Value> {
Value::string(input.into()).tagged_unknown()
}
2019-11-04 16:47:03 +01:00
fn int(input: impl Into<BigInt>) -> Tagged<Value> {
Value::int(input.into()).tagged_unknown()
}
fn row(entries: IndexMap<String, Tagged<Value>>) -> Tagged<Value> {
Value::row(entries).tagged_unknown()
}
fn table(list: &Vec<Tagged<Value>>) -> Tagged<Value> {
Value::table(list).tagged_unknown()
}
2019-11-04 16:47:03 +01:00
fn error_callback(
reason: &'static str,
) -> impl FnOnce((&Value, &PathMember, ShellError)) -> ShellError {
move |(_obj_source, _column_path_tried, _err)| ShellError::unimplemented(reason)
}
2019-11-04 16:47:03 +01:00
fn column_path(paths: &Vec<Tagged<Value>>) -> Tagged<ColumnPathValue> {
table(&paths.iter().cloned().collect())
.as_column_path()
.unwrap()
}
#[test]
fn gets_matching_field_from_a_row() {
let row = Value::row(indexmap! {
"amigos".into() => table(&vec![string("andres"),string("jonathan"),string("yehuda")])
});
assert_eq!(
2019-11-04 16:47:03 +01:00
row.get_data_by_key("amigos".spanned_unknown()).unwrap(),
table(&vec![
string("andres"),
string("jonathan"),
string("yehuda")
])
);
}
#[test]
fn gets_matching_field_from_nested_rows_inside_a_row() {
let field_path = column_path(&vec![string("package"), string("version")]);
let (version, tag) = string("0.4.0").into_parts();
let value = Value::row(indexmap! {
"package".into() =>
row(indexmap! {
"name".into() => string("nu"),
"version".into() => string("0.4.0")
})
});
assert_eq!(
2019-11-04 16:47:03 +01:00
*value
.tagged(tag)
.get_data_by_column_path(&field_path, Box::new(error_callback("package.version")))
.unwrap(),
version
)
}
2019-11-04 16:47:03 +01:00
#[test]
fn gets_first_matching_field_from_rows_with_same_field_inside_a_table() {
let field_path = column_path(&vec![string("package"), string("authors"), string("name")]);
let (_, tag) = string("Andrés N. Robalino").into_parts();
let value = Value::row(indexmap! {
"package".into() => row(indexmap! {
"name".into() => string("nu"),
"version".into() => string("0.4.0"),
"authors".into() => table(&vec![
row(indexmap!{"name".into() => string("Andrés N. Robalino")}),
row(indexmap!{"name".into() => string("Jonathan Turner")}),
row(indexmap!{"name".into() => string("Yehuda Katz")})
])
})
});
assert_eq!(
value
.tagged(tag)
.get_data_by_column_path(
&field_path,
Box::new(error_callback("package.authors.name"))
)
.unwrap(),
table(&vec![
string("Andrés N. Robalino"),
string("Jonathan Turner"),
string("Yehuda Katz")
])
)
}
#[test]
fn column_path_that_contains_just_a_number_gets_a_row_from_a_table() {
2019-11-04 16:47:03 +01:00
let field_path = column_path(&vec![string("package"), string("authors"), int(0)]);
let (_, tag) = string("Andrés N. Robalino").into_parts();
let value = Value::row(indexmap! {
"package".into() => row(indexmap! {
"name".into() => string("nu"),
"version".into() => string("0.4.0"),
"authors".into() => table(&vec![
row(indexmap!{"name".into() => string("Andrés N. Robalino")}),
row(indexmap!{"name".into() => string("Jonathan Turner")}),
row(indexmap!{"name".into() => string("Yehuda Katz")})
])
})
});
assert_eq!(
2019-11-04 16:47:03 +01:00
*value
.tagged(tag)
.get_data_by_column_path(&field_path, Box::new(error_callback("package.authors.0")))
.unwrap(),
Value::row(indexmap! {
"name".into() => string("Andrés N. Robalino")
})
);
}
#[test]
fn column_path_that_contains_just_a_number_gets_a_row_from_a_row() {
let field_path = column_path(&vec![string("package"), string("authors"), string("0")]);
let (_, tag) = string("Andrés N. Robalino").into_parts();
let value = Value::row(indexmap! {
"package".into() => row(indexmap! {
"name".into() => string("nu"),
"version".into() => string("0.4.0"),
"authors".into() => row(indexmap! {
"0".into() => row(indexmap!{"name".into() => string("Andrés N. Robalino")}),
"1".into() => row(indexmap!{"name".into() => string("Jonathan Turner")}),
"2".into() => row(indexmap!{"name".into() => string("Yehuda Katz")}),
})
})
});
assert_eq!(
2019-11-04 16:47:03 +01:00
*value
.tagged(tag)
.get_data_by_column_path(
&field_path,
Box::new(error_callback("package.authors.\"0\""))
)
.unwrap(),
Value::row(indexmap! {
"name".into() => string("Andrés N. Robalino")
})
);
}
#[test]
fn replaces_matching_field_from_a_row() {
let field_path = column_path(&vec![string("amigos")]);
let sample = Value::row(indexmap! {
"amigos".into() => table(&vec![
string("andres"),
string("jonathan"),
string("yehuda"),
]),
});
let (replacement, tag) = string("jonas").into_parts();
let actual = sample
2019-11-04 16:47:03 +01:00
.tagged(tag)
.replace_data_at_column_path(&field_path, replacement)
.unwrap();
assert_eq!(actual, row(indexmap! {"amigos".into() => string("jonas")}));
}
#[test]
fn replaces_matching_field_from_nested_rows_inside_a_row() {
let field_path = column_path(&vec![
string("package"),
string("authors"),
string("los.3.caballeros"),
]);
let sample = Value::row(indexmap! {
"package".into() => row(indexmap! {
"authors".into() => row(indexmap! {
"los.3.mosqueteros".into() => table(&vec![string("andres::yehuda::jonathan")]),
"los.3.amigos".into() => table(&vec![string("andres::yehuda::jonathan")]),
"los.3.caballeros".into() => table(&vec![string("andres::yehuda::jonathan")])
})
})
});
let (replacement, tag) = table(&vec![string("yehuda::jonathan::andres")]).into_parts();
let actual = sample
2019-11-04 16:47:03 +01:00
.tagged(tag.clone())
.replace_data_at_column_path(&field_path, replacement.clone())
.unwrap();
assert_eq!(
actual,
Value::row(indexmap! {
"package".into() => row(indexmap! {
"authors".into() => row(indexmap! {
"los.3.mosqueteros".into() => table(&vec![string("andres::yehuda::jonathan")]),
"los.3.amigos".into() => table(&vec![string("andres::yehuda::jonathan")]),
"los.3.caballeros".into() => replacement.tagged(&tag)})})})
.tagged(tag)
);
}
#[test]
fn replaces_matching_field_from_rows_inside_a_table() {
let field_path = column_path(&vec![
string("shell_policy"),
string("releases"),
string("nu.version.arepa"),
]);
let sample = Value::row(indexmap! {
"shell_policy".into() => row(indexmap! {
"releases".into() => table(&vec![
row(indexmap! {
"nu.version.arepa".into() => row(indexmap! {
"code".into() => string("0.4.0"), "tag_line".into() => string("GitHub-era")
})
}),
row(indexmap! {
"nu.version.taco".into() => row(indexmap! {
"code".into() => string("0.3.0"), "tag_line".into() => string("GitHub-era")
})
}),
row(indexmap! {
"nu.version.stable".into() => row(indexmap! {
"code".into() => string("0.2.0"), "tag_line".into() => string("GitHub-era")
})
})
])
})
});
let (replacement, tag) = row(indexmap! {
"code".into() => string("0.5.0"),
"tag_line".into() => string("CABALLEROS")
})
.into_parts();
let actual = sample
2019-11-04 16:47:03 +01:00
.tagged(tag.clone())
.replace_data_at_column_path(&field_path, replacement.clone())
.unwrap();
assert_eq!(
actual,
Value::row(indexmap! {
"shell_policy".into() => row(indexmap! {
"releases".into() => table(&vec![
row(indexmap! {
"nu.version.arepa".into() => replacement.tagged(&tag)
}),
row(indexmap! {
"nu.version.taco".into() => row(indexmap! {
"code".into() => string("0.3.0"), "tag_line".into() => string("GitHub-era")
})
}),
row(indexmap! {
"nu.version.stable".into() => row(indexmap! {
"code".into() => string("0.2.0"), "tag_line".into() => string("GitHub-era")
})
})
])
})
}).tagged(&tag)
);
}
}