Bump polars from 0.35 to 0.36 (#11624)

# Description
* release notes
 https://github.com/pola-rs/polars/releases/tag/rs-0.36.2

* dependencies 
remove `sysinfo` 0.29.11
add `polars-compute` 0.36.2

# User-Facing Changes
[Change value_counts resulting column name from counts to
count](https://github.com/pola-rs/polars/pull/12506)

# Tests + Formatting
<!--
Don't forget to add tests that cover your changes.

Make sure you've run and fixed any issues with these commands:

- `cargo fmt --all -- --check` to check standard code formatting (`cargo
fmt --all` applies these changes)
- `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used` to
check that you're using the standard code style
- `cargo test --workspace` to check that all tests pass (on Windows make
sure to [enable developer
mode](https://learn.microsoft.com/en-us/windows/apps/get-started/developer-mode-features-and-debugging))
- `cargo run -- -c "use std testing; testing run-tests --path
crates/nu-std"` to run the tests for the standard library

> **Note**
> from `nushell` you can also use the `toolkit` as follows
> ```bash
> use toolkit.nu # or use an `env_change` hook to activate it
automatically
> toolkit check pr
> ```
-->

# After Submitting
<!-- If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
-->
This commit is contained in:
nibon7 2024-01-24 23:27:06 +08:00 committed by GitHub
parent 4105255a5a
commit a44ad949f1
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
30 changed files with 345 additions and 228 deletions

104
Cargo.lock generated
View File

@ -2854,7 +2854,7 @@ dependencies = [
"percent-encoding", "percent-encoding",
"reedline", "reedline",
"rstest", "rstest",
"sysinfo 0.30.5", "sysinfo",
"unicode-segmentation", "unicode-segmentation",
"uuid", "uuid",
"which", "which",
@ -2895,6 +2895,7 @@ dependencies = [
"polars-io", "polars-io",
"polars-ops", "polars-ops",
"polars-plan", "polars-plan",
"polars-utils",
"serde", "serde",
"sqlparser", "sqlparser",
] ]
@ -3032,7 +3033,7 @@ dependencies = [
"serde_urlencoded", "serde_urlencoded",
"serde_yaml", "serde_yaml",
"sha2", "sha2",
"sysinfo 0.30.5", "sysinfo",
"tabled", "tabled",
"terminal_size 0.3.0", "terminal_size 0.3.0",
"titlecase", "titlecase",
@ -3218,7 +3219,7 @@ dependencies = [
"ntapi", "ntapi",
"once_cell", "once_cell",
"procfs", "procfs",
"sysinfo 0.30.5", "sysinfo",
"windows 0.52.0", "windows 0.52.0",
] ]
@ -3933,9 +3934,9 @@ dependencies = [
[[package]] [[package]]
name = "polars" name = "polars"
version = "0.35.4" version = "0.36.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "df8e52f9236eb722da0990a70bbb1216dcc7a77bcb00c63439d2d982823e90d5" checksum = "938048fcda6a8e2ace6eb168bee1b415a92423ce51e418b853bf08fc40349b6b"
dependencies = [ dependencies = [
"getrandom", "getrandom",
"polars-core", "polars-core",
@ -3949,9 +3950,9 @@ dependencies = [
[[package]] [[package]]
name = "polars-arrow" name = "polars-arrow"
version = "0.35.4" version = "0.36.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dd503430a6d9779b07915d858865fe998317ef3cfef8973881f578ac5d4baae7" checksum = "ce68a02f698ff7787c261aea1b4c040a8fe183a8fb200e2436d7f35d95a1b86f"
dependencies = [ dependencies = [
"ahash 0.8.7", "ahash 0.8.7",
"arrow-format", "arrow-format",
@ -3973,20 +3974,33 @@ dependencies = [
"num-traits", "num-traits",
"polars-error", "polars-error",
"polars-utils", "polars-utils",
"rustc_version",
"ryu", "ryu",
"serde", "serde",
"simdutf8", "simdutf8",
"streaming-iterator", "streaming-iterator",
"strength_reduce", "strength_reduce",
"version_check",
"zstd", "zstd",
] ]
[[package]] [[package]]
name = "polars-core" name = "polars-compute"
version = "0.35.4" version = "0.36.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ae73d5b8e55decde670caba1cc82b61f14bfb9a72503198f0997d657a98dcfd6" checksum = "b14fbc5f141b29b656a4cec4802632e5bff10bf801c6809c6bbfbd4078a044dd"
dependencies = [
"bytemuck",
"num-traits",
"polars-arrow",
"polars-utils",
"version_check",
]
[[package]]
name = "polars-core"
version = "0.36.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d0f5efe734b6cbe5f97ea769be8360df5324fade396f1f3f5ad7fe9360ca4a23"
dependencies = [ dependencies = [
"ahash 0.8.7", "ahash 0.8.7",
"bitflags 2.4.2", "bitflags 2.4.2",
@ -3999,6 +4013,7 @@ dependencies = [
"num-traits", "num-traits",
"once_cell", "once_cell",
"polars-arrow", "polars-arrow",
"polars-compute",
"polars-error", "polars-error",
"polars-row", "polars-row",
"polars-utils", "polars-utils",
@ -4016,9 +4031,9 @@ dependencies = [
[[package]] [[package]]
name = "polars-error" name = "polars-error"
version = "0.35.4" version = "0.36.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "eb0520d68eaa9993ae0c741409d1526beff5b8f48e1d73e4381616f8152cf488" checksum = "6396de788f99ebfc9968e7b6f523e23000506cde4ba6dfc62ae4ce949002a886"
dependencies = [ dependencies = [
"arrow-format", "arrow-format",
"avro-schema", "avro-schema",
@ -4029,9 +4044,9 @@ dependencies = [
[[package]] [[package]]
name = "polars-io" name = "polars-io"
version = "0.35.4" version = "0.36.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "96e10a0745acd6009db64bef0ceb9e23a70b1c27b26a0a6517c91f3e6363bc06" checksum = "7d0458efe8946f4718fd352f230c0db5a37926bd0d2bd25af79dc24746abaaea"
dependencies = [ dependencies = [
"ahash 0.8.7", "ahash 0.8.7",
"async-trait", "async-trait",
@ -4070,9 +4085,9 @@ dependencies = [
[[package]] [[package]]
name = "polars-json" name = "polars-json"
version = "0.35.4" version = "0.36.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "93b9cb83c19daf334c398e56a9361bd79c8ad0718296db2afab08d476bd84559" checksum = "ea47d46b7a98fa683ef235ad48b783abf61734828e754096cfbdc77404fff9b3"
dependencies = [ dependencies = [
"ahash 0.8.7", "ahash 0.8.7",
"chrono", "chrono",
@ -4091,9 +4106,9 @@ dependencies = [
[[package]] [[package]]
name = "polars-lazy" name = "polars-lazy"
version = "0.35.4" version = "0.36.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3555f759705be6dd0d3762d16a0b8787b2dc4da73b57465f3b2bf1a070ba8f20" checksum = "9d7105b40905bb38e8fc4a7fd736594b7491baa12fad3ac492969ca221a1b5d5"
dependencies = [ dependencies = [
"ahash 0.8.7", "ahash 0.8.7",
"bitflags 2.4.2", "bitflags 2.4.2",
@ -4115,9 +4130,9 @@ dependencies = [
[[package]] [[package]]
name = "polars-ops" name = "polars-ops"
version = "0.35.4" version = "0.36.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1a7eb218296aaa7f79945f08288ca32ca3cf25fa505649eeee689ec21eebf636" checksum = "2e09afc456ab11e75e5dcb43e00a01c71f3a46a2781e450054acb6bb096ca78e"
dependencies = [ dependencies = [
"ahash 0.8.7", "ahash 0.8.7",
"argminmax", "argminmax",
@ -4128,6 +4143,7 @@ dependencies = [
"memchr", "memchr",
"num-traits", "num-traits",
"polars-arrow", "polars-arrow",
"polars-compute",
"polars-core", "polars-core",
"polars-error", "polars-error",
"polars-utils", "polars-utils",
@ -4142,9 +4158,9 @@ dependencies = [
[[package]] [[package]]
name = "polars-parquet" name = "polars-parquet"
version = "0.35.4" version = "0.36.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "146010e4b7dd4d2d0e58ddc762f6361f77d7a0385c54471199370c17164f67dd" checksum = "7ba24d67b1f64ab85143033dd46fa090b13c0f74acdf91b0780c16aecf005e3d"
dependencies = [ dependencies = [
"ahash 0.8.7", "ahash 0.8.7",
"async-stream", "async-stream",
@ -4168,9 +4184,9 @@ dependencies = [
[[package]] [[package]]
name = "polars-pipe" name = "polars-pipe"
version = "0.35.4" version = "0.36.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "66094e7df64c932a9a7bdfe7df0c65efdcb192096e11a6a765a9778f78b4bdec" checksum = "d9b7ead073cc3917027d77b59861a9f071db47125de9314f8907db1a0a3e4100"
dependencies = [ dependencies = [
"crossbeam-channel", "crossbeam-channel",
"crossbeam-queue", "crossbeam-queue",
@ -4178,6 +4194,7 @@ dependencies = [
"hashbrown 0.14.3", "hashbrown 0.14.3",
"num-traits", "num-traits",
"polars-arrow", "polars-arrow",
"polars-compute",
"polars-core", "polars-core",
"polars-io", "polars-io",
"polars-ops", "polars-ops",
@ -4191,9 +4208,9 @@ dependencies = [
[[package]] [[package]]
name = "polars-plan" name = "polars-plan"
version = "0.35.4" version = "0.36.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "10e32a0958ef854b132bad7f8369cb3237254635d5e864c99505bc0bc1035fbc" checksum = "384a175624d050c31c473ee11df9d7af5d729ae626375e522158cfb3d150acd0"
dependencies = [ dependencies = [
"ahash 0.8.7", "ahash 0.8.7",
"bytemuck", "bytemuck",
@ -4202,6 +4219,7 @@ dependencies = [
"polars-arrow", "polars-arrow",
"polars-core", "polars-core",
"polars-io", "polars-io",
"polars-json",
"polars-ops", "polars-ops",
"polars-parquet", "polars-parquet",
"polars-time", "polars-time",
@ -4216,9 +4234,9 @@ dependencies = [
[[package]] [[package]]
name = "polars-row" name = "polars-row"
version = "0.35.4" version = "0.36.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d135ab81cac2906ba74ea8984c7e6025d081ae5867615bcefb4d84dfdb456dac" checksum = "32322f7acbb83db3e9c7697dc821be73d06238da89c817dcc8bc1549a5e9c72f"
dependencies = [ dependencies = [
"polars-arrow", "polars-arrow",
"polars-error", "polars-error",
@ -4227,9 +4245,9 @@ dependencies = [
[[package]] [[package]]
name = "polars-sql" name = "polars-sql"
version = "0.35.4" version = "0.36.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b8dbd7786849a5e3ad1fde188bf38141632f626e3a57319b0bbf7a5f1d75519e" checksum = "9f0b4c6ddffdfd0453e84bc3918572c633014d661d166654399cf93752aa95b5"
dependencies = [ dependencies = [
"polars-arrow", "polars-arrow",
"polars-core", "polars-core",
@ -4244,9 +4262,9 @@ dependencies = [
[[package]] [[package]]
name = "polars-time" name = "polars-time"
version = "0.35.4" version = "0.36.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "aae56f79e9cedd617773c1c8f5ca84a31a8b1d593714959d5f799e7bdd98fe51" checksum = "dee2649fc96bd1b6584e0e4a4b3ca7d22ed3d117a990e63ad438ecb26f7544d0"
dependencies = [ dependencies = [
"atoi", "atoi",
"chrono", "chrono",
@ -4264,9 +4282,9 @@ dependencies = [
[[package]] [[package]]
name = "polars-utils" name = "polars-utils"
version = "0.35.4" version = "0.36.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "da6ce68169fe61d46958c8eab7447360f30f2f23f6e24a0ce703a14b0a3cfbfc" checksum = "b174ca4a77ad47d7b91a0460aaae65bbf874c8bfbaaa5308675dadef3976bbda"
dependencies = [ dependencies = [
"ahash 0.8.7", "ahash 0.8.7",
"bytemuck", "bytemuck",
@ -4277,7 +4295,7 @@ dependencies = [
"polars-error", "polars-error",
"rayon", "rayon",
"smartstring", "smartstring",
"sysinfo 0.29.11", "sysinfo",
"version_check", "version_check",
] ]
@ -5575,20 +5593,6 @@ dependencies = [
"libc", "libc",
] ]
[[package]]
name = "sysinfo"
version = "0.29.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cd727fc423c2060f6c92d9534cef765c65a6ed3f428a03d7def74a8c4348e666"
dependencies = [
"cfg-if",
"core-foundation-sys",
"libc",
"ntapi",
"once_cell",
"winapi",
]
[[package]] [[package]]
name = "sysinfo" name = "sysinfo"
version = "0.30.5" version = "0.30.5"

View File

@ -25,10 +25,11 @@ indexmap = { version = "2.1" }
num = { version = "0.4", optional = true } num = { version = "0.4", optional = true }
serde = { version = "1.0", features = ["derive"] } serde = { version = "1.0", features = ["derive"] }
sqlparser = { version = "0.39", optional = true } sqlparser = { version = "0.39", optional = true }
polars-io = { version = "0.35", features = ["avro"], optional = true } polars-io = { version = "0.36", features = ["avro"], optional = true }
polars-arrow = { version = "0.35", optional = true } polars-arrow = { version = "0.36", optional = true }
polars-ops = { version = "0.35", optional = true } polars-ops = { version = "0.36", optional = true }
polars-plan = { version = "0.35", optional = true } polars-plan = { version = "0.36", optional = true }
polars-utils = { version = "0.36", optional = true }
[dependencies.polars] [dependencies.polars]
features = [ features = [
@ -62,10 +63,10 @@ features = [
"to_dummies", "to_dummies",
] ]
optional = true optional = true
version = "0.35" version = "0.36"
[features] [features]
dataframe = ["num", "polars", "polars-io", "polars-arrow", "polars-ops", "polars-plan", "sqlparser"] dataframe = ["num", "polars", "polars-io", "polars-arrow", "polars-ops", "polars-plan", "polars-utils", "sqlparser"]
default = [] default = []
[dev-dependencies] [dev-dependencies]

View File

@ -52,7 +52,6 @@ pub use sample::SampleDF;
pub use shape::ShapeDF; pub use shape::ShapeDF;
pub use slice::SliceDF; pub use slice::SliceDF;
pub use sql_context::SQLContext; pub use sql_context::SQLContext;
pub use sql_expr::parse_sql_expr;
pub use summary::Summary; pub use summary::Summary;
pub use take::TakeDF; pub use take::TakeDF;
pub use to_arrow::ToArrow; pub use to_arrow::ToArrow;

View File

@ -13,7 +13,7 @@ fn map_sql_polars_datatype(data_type: &SQLDataType) -> Result<DataType> {
| SQLDataType::Uuid | SQLDataType::Uuid
| SQLDataType::Clob(_) | SQLDataType::Clob(_)
| SQLDataType::Text | SQLDataType::Text
| SQLDataType::String(_) => DataType::Utf8, | SQLDataType::String(_) => DataType::String,
SQLDataType::Float(_) => DataType::Float32, SQLDataType::Float(_) => DataType::Float32,
SQLDataType::Real => DataType::Float32, SQLDataType::Real => DataType::Float32,
SQLDataType::Double => DataType::Float64, SQLDataType::Double => DataType::Float64,
@ -62,7 +62,9 @@ fn binary_op_(left: Expr, right: Expr, op: &SQLBinaryOperator) -> Result<Expr> {
SQLBinaryOperator::Multiply => left * right, SQLBinaryOperator::Multiply => left * right,
SQLBinaryOperator::Divide => left / right, SQLBinaryOperator::Divide => left / right,
SQLBinaryOperator::Modulo => left % right, SQLBinaryOperator::Modulo => left % right,
SQLBinaryOperator::StringConcat => left.cast(DataType::Utf8) + right.cast(DataType::Utf8), SQLBinaryOperator::StringConcat => {
left.cast(DataType::String) + right.cast(DataType::String)
}
SQLBinaryOperator::Gt => left.gt(right), SQLBinaryOperator::Gt => left.gt(right),
SQLBinaryOperator::Lt => left.lt(right), SQLBinaryOperator::Lt => left.lt(right),
SQLBinaryOperator::GtEq => left.gt_eq(right), SQLBinaryOperator::GtEq => left.gt_eq(right),

View File

@ -10,7 +10,7 @@ use polars::{
chunked_array::ChunkedArray, chunked_array::ChunkedArray,
prelude::{ prelude::{
AnyValue, DataFrame, DataType, Float64Type, IntoSeries, NewChunkedArray, AnyValue, DataFrame, DataType, Float64Type, IntoSeries, NewChunkedArray,
QuantileInterpolOptions, Series, Utf8Type, QuantileInterpolOptions, Series, StringType,
}, },
}; };
@ -171,7 +171,7 @@ fn command(
let df = NuDataFrame::try_from_pipeline(input, call.head)?; let df = NuDataFrame::try_from_pipeline(input, call.head)?;
let names = ChunkedArray::<Utf8Type>::from_slice_options("descriptor", &labels).into_series(); let names = ChunkedArray::<StringType>::from_slice_options("descriptor", &labels).into_series();
let head = std::iter::once(names); let head = std::iter::once(names);
@ -179,17 +179,18 @@ fn command(
.as_ref() .as_ref()
.get_columns() .get_columns()
.iter() .iter()
.filter(|col| col.dtype() != &DataType::Object("object")) .filter(|col| !matches!(col.dtype(), &DataType::Object("object", _)))
.map(|col| { .map(|col| {
let count = col.len() as f64; let count = col.len() as f64;
let sum = col let sum = col.sum_as_series().ok().and_then(|series| {
.sum_as_series() series
.cast(&DataType::Float64) .cast(&DataType::Float64)
.ok() .ok()
.and_then(|ca| match ca.get(0) { .and_then(|ca| match ca.get(0) {
Ok(AnyValue::Float64(v)) => Some(v), Ok(AnyValue::Float64(v)) => Some(v),
_ => None, _ => None,
})
}); });
let mean = match col.mean_as_series().get(0) { let mean = match col.mean_as_series().get(0) {
@ -197,23 +198,30 @@ fn command(
_ => None, _ => None,
}; };
let median = match col.median_as_series().get(0) { let median = match col.median_as_series() {
Ok(v) => match v.get(0) {
Ok(AnyValue::Float64(v)) => Some(v), Ok(AnyValue::Float64(v)) => Some(v),
_ => None, _ => None,
},
_ => None,
}; };
let std = match col.std_as_series(0).get(0) { let std = match col.std_as_series(0) {
Ok(v) => match v.get(0) {
Ok(AnyValue::Float64(v)) => Some(v), Ok(AnyValue::Float64(v)) => Some(v),
_ => None, _ => None,
},
_ => None,
}; };
let min = col let min = col.min_as_series().ok().and_then(|series| {
.min_as_series() series
.cast(&DataType::Float64) .cast(&DataType::Float64)
.ok() .ok()
.and_then(|ca| match ca.get(0) { .and_then(|ca| match ca.get(0) {
Ok(AnyValue::Float64(v)) => Some(v), Ok(AnyValue::Float64(v)) => Some(v),
_ => None, _ => None,
})
}); });
let mut quantiles = quantiles let mut quantiles = quantiles
@ -230,13 +238,14 @@ fn command(
}) })
.collect::<Vec<Option<f64>>>(); .collect::<Vec<Option<f64>>>();
let max = col let max = col.max_as_series().ok().and_then(|series| {
.max_as_series() series
.cast(&DataType::Float64) .cast(&DataType::Float64)
.ok() .ok()
.and_then(|ca| match ca.get(0) { .and_then(|ca| match ca.get(0) {
Ok(AnyValue::Float64(v)) => Some(v), Ok(AnyValue::Float64(v)) => Some(v),
_ => None, _ => None,
})
}); });
let mut descriptors = vec![Some(count), sum, mean, median, std, min]; let mut descriptors = vec![Some(count), sum, mean, median, std, min];

View File

@ -9,6 +9,11 @@ use nu_protocol::{
Category, Example, PipelineData, ShellError, Signature, Span, Spanned, SyntaxShape, Type, Category, Example, PipelineData, ShellError, Signature, Span, Spanned, SyntaxShape, Type,
Value, Value,
}; };
use polars::{
datatypes::{DataType, TimeUnit},
prelude::NamedFrom,
series::Series,
};
#[derive(Clone)] #[derive(Clone)]
pub struct ExprDatePart; pub struct ExprDatePart;
@ -66,16 +71,21 @@ impl Command for ExprDatePart {
(dfr col datetime | dfr datepart second | dfr as datetime_second ), (dfr col datetime | dfr datepart second | dfr as datetime_second ),
(dfr col datetime | dfr datepart nanosecond | dfr as datetime_ns ) ]"#, (dfr col datetime | dfr datepart nanosecond | dfr as datetime_ns ) ]"#,
result: Some( result: Some(
NuDataFrame::try_from_columns(vec![ NuDataFrame::try_from_series(
Column::new("datetime".to_string(), vec![Value::test_date(dt)]), vec![
Column::new("datetime_year".to_string(), vec![Value::test_int(2021)]), Series::new("datetime", &[dt.timestamp_nanos_opt()])
Column::new("datetime_month".to_string(), vec![Value::test_int(12)]), .cast(&DataType::Datetime(TimeUnit::Nanoseconds, None))
Column::new("datetime_day".to_string(), vec![Value::test_int(30)]), .expect("Error casting to datetime type"),
Column::new("datetime_hour".to_string(), vec![Value::test_int(1)]), Series::new("datetime_year", &[2021_i64]), // i32 was coerced to i64
Column::new("datetime_minute".to_string(), vec![Value::test_int(2)]), Series::new("datetime_month", &[12_i8]),
Column::new("datetime_second".to_string(), vec![Value::test_int(3)]), Series::new("datetime_day", &[30_i8]),
Column::new("datetime_ns".to_string(), vec![Value::test_int(123456789)]), Series::new("datetime_hour", &[1_i8]),
]) Series::new("datetime_minute", &[2_i8]),
Series::new("datetime_second", &[3_i8]),
Series::new("datetime_ns", &[123456789_i64]), // i32 was coerced to i64
],
Span::test_data(),
)
.expect("simple df for test should not fail") .expect("simple df for test should not fail")
.into_value(Span::test_data()), .into_value(Span::test_data()),
), ),

View File

@ -179,7 +179,18 @@ macro_rules! lazy_expr_command {
let value = input.into_value(call.head); let value = input.into_value(call.head);
if NuDataFrame::can_downcast(&value) { if NuDataFrame::can_downcast(&value) {
let lazy = NuLazyFrame::try_from_value(value)?; let lazy = NuLazyFrame::try_from_value(value)?;
let lazy = NuLazyFrame::new(lazy.from_eager, lazy.into_polars().$func()); let lazy = NuLazyFrame::new(
lazy.from_eager,
lazy.into_polars()
.$func()
.map_err(|e| ShellError::GenericError {
error: "Dataframe Error".into(),
msg: e.to_string(),
help: None,
span: None,
inner: vec![],
})?,
);
Ok(PipelineData::Value(lazy.into_value(call.head)?, None)) Ok(PipelineData::Value(lazy.into_value(call.head)?, None))
} else { } else {
@ -267,7 +278,18 @@ macro_rules! lazy_expr_command {
let value = input.into_value(call.head); let value = input.into_value(call.head);
if NuDataFrame::can_downcast(&value) { if NuDataFrame::can_downcast(&value) {
let lazy = NuLazyFrame::try_from_value(value)?; let lazy = NuLazyFrame::try_from_value(value)?;
let lazy = NuLazyFrame::new(lazy.from_eager, lazy.into_polars().$func($ddof)); let lazy = NuLazyFrame::new(
lazy.from_eager,
lazy.into_polars()
.$func($ddof)
.map_err(|e| ShellError::GenericError {
error: "Dataframe Error".into(),
msg: e.to_string(),
help: None,
span: None,
inner: vec![],
})?,
);
Ok(PipelineData::Value(lazy.into_value(call.head)?, None)) Ok(PipelineData::Value(lazy.into_value(call.head)?, None))
} else { } else {

View File

@ -160,7 +160,7 @@ fn get_col_name(expr: &Expr) -> Option<String> {
| polars::prelude::AggExpr::Last(e) | polars::prelude::AggExpr::Last(e)
| polars::prelude::AggExpr::Mean(e) | polars::prelude::AggExpr::Mean(e)
| polars::prelude::AggExpr::Implode(e) | polars::prelude::AggExpr::Implode(e)
| polars::prelude::AggExpr::Count(e) | polars::prelude::AggExpr::Count(e, _)
| polars::prelude::AggExpr::Sum(e) | polars::prelude::AggExpr::Sum(e)
| polars::prelude::AggExpr::AggGroups(e) | polars::prelude::AggExpr::AggGroups(e)
| polars::prelude::AggExpr::Std(e, _) | polars::prelude::AggExpr::Std(e, _)

View File

@ -178,7 +178,7 @@ impl Command for LazyJoin {
let how = if left { let how = if left {
JoinType::Left JoinType::Left
} else if outer { } else if outer {
JoinType::Outer JoinType::Outer { coalesce: true }
} else if cross { } else if cross {
JoinType::Cross JoinType::Cross
} else { } else {

View File

@ -112,6 +112,70 @@ macro_rules! lazy_command {
} }
} }
}; };
($command: ident, $name: expr, $desc: expr, $examples: expr, $func: ident?, $test: ident) => {
#[derive(Clone)]
pub struct $command;
impl Command for $command {
fn name(&self) -> &str {
$name
}
fn usage(&self) -> &str {
$desc
}
fn signature(&self) -> Signature {
Signature::build(self.name())
.input_output_type(
Type::Custom("dataframe".into()),
Type::Custom("dataframe".into()),
)
.category(Category::Custom("lazyframe".into()))
}
fn examples(&self) -> Vec<Example> {
$examples
}
fn run(
&self,
_engine_state: &EngineState,
_stack: &mut Stack,
call: &Call,
input: PipelineData,
) -> Result<PipelineData, ShellError> {
let lazy = NuLazyFrame::try_from_pipeline(input, call.head)?;
let lazy = NuLazyFrame::new(
lazy.from_eager,
lazy.into_polars()
.$func()
.map_err(|e| ShellError::GenericError {
error: "Dataframe Error".into(),
msg: e.to_string(),
help: None,
span: None,
inner: vec![],
})?,
);
Ok(PipelineData::Value(lazy.into_value(call.head)?, None))
}
}
#[cfg(test)]
mod $test {
use super::super::super::test_dataframe::test_dataframe;
use super::*;
#[test]
fn test_examples() {
test_dataframe(vec![Box::new($command {})])
}
}
};
} }
// LazyReverse command // LazyReverse command
@ -175,6 +239,6 @@ lazy_command!(
.into_value(Span::test_data()), .into_value(Span::test_data()),
), ),
},], },],
median, median?,
test_median test_median
); );

View File

@ -62,7 +62,14 @@ impl Command for LazyQuantile {
let lazy = NuLazyFrame::new( let lazy = NuLazyFrame::new(
lazy.from_eager, lazy.from_eager,
lazy.into_polars() lazy.into_polars()
.quantile(lit(quantile), QuantileInterpolOptions::default()), .quantile(lit(quantile), QuantileInterpolOptions::default())
.map_err(|e| ShellError::GenericError {
error: "Dataframe Error".into(),
msg: e.to_string(),
help: None,
span: None,
inner: vec![],
})?,
); );
Ok(PipelineData::Value(lazy.into_value(call.head)?, None)) Ok(PipelineData::Value(lazy.into_value(call.head)?, None))

View File

@ -108,7 +108,7 @@ fn command(
let df = NuDataFrame::try_from_pipeline(input, call.head)?; let df = NuDataFrame::try_from_pipeline(input, call.head)?;
let series = df.as_series(call.head)?; let series = df.as_series(call.head)?;
if let DataType::Object(_) = series.dtype() { if let DataType::Object(..) = series.dtype() {
return Err(ShellError::GenericError { return Err(ShellError::GenericError {
error: "Found object series".into(), error: "Found object series".into(),
msg: "Series of type object cannot be used for cumulative operation".into(), msg: "Series of type object cannot be used for cumulative operation".into(),

View File

@ -6,7 +6,7 @@ use nu_protocol::{
engine::{Command, EngineState, Stack}, engine::{Command, EngineState, Stack},
Category, Example, PipelineData, ShellError, Signature, SyntaxShape, Type, Category, Example, PipelineData, ShellError, Signature, SyntaxShape, Type,
}; };
use polars::prelude::{IntoSeries, Utf8Methods}; use polars::prelude::{IntoSeries, StringMethods};
#[derive(Clone)] #[derive(Clone)]
pub struct AsDate; pub struct AsDate;
@ -68,7 +68,7 @@ fn command(
let df = NuDataFrame::try_from_pipeline(input, call.head)?; let df = NuDataFrame::try_from_pipeline(input, call.head)?;
let series = df.as_series(call.head)?; let series = df.as_series(call.head)?;
let casted = series.utf8().map_err(|e| ShellError::GenericError { let casted = series.str().map_err(|e| ShellError::GenericError {
error: "Error casting to string".into(), error: "Error casting to string".into(),
msg: e.to_string(), msg: e.to_string(),
span: Some(call.head), span: Some(call.head),

View File

@ -7,7 +7,7 @@ use nu_protocol::{
engine::{Command, EngineState, Stack}, engine::{Command, EngineState, Stack},
Category, Example, PipelineData, ShellError, Signature, Span, SyntaxShape, Type, Value, Category, Example, PipelineData, ShellError, Signature, Span, SyntaxShape, Type, Value,
}; };
use polars::prelude::{IntoSeries, TimeUnit, Utf8Methods}; use polars::prelude::{IntoSeries, StringMethods, TimeUnit};
#[derive(Clone)] #[derive(Clone)]
pub struct AsDateTime; pub struct AsDateTime;
@ -132,7 +132,7 @@ fn command(
let df = NuDataFrame::try_from_pipeline(input, call.head)?; let df = NuDataFrame::try_from_pipeline(input, call.head)?;
let series = df.as_series(call.head)?; let series = df.as_series(call.head)?;
let casted = series.utf8().map_err(|e| ShellError::GenericError { let casted = series.str().map_err(|e| ShellError::GenericError {
error: "Error casting to string".into(), error: "Error casting to string".into(),
msg: e.to_string(), msg: e.to_string(),
span: Some(call.head), span: Some(call.head),

View File

@ -121,8 +121,7 @@ fn command(
let series = df.as_series(call.head)?; let series = df.as_series(call.head)?;
let span = value.span(); let span = value.span();
let res = let res = match value {
match value {
Value::Int { val, .. } => { Value::Int { val, .. } => {
let chunked = series.i64().map_err(|e| ShellError::GenericError { let chunked = series.i64().map_err(|e| ShellError::GenericError {
error: "Error casting to i64".into(), error: "Error casting to i64".into(),
@ -132,7 +131,7 @@ fn command(
inner: vec![], inner: vec![],
})?; })?;
let res = chunked.set_at_idx(indices, Some(val)).map_err(|e| { let res = chunked.scatter_single(indices, Some(val)).map_err(|e| {
ShellError::GenericError { ShellError::GenericError {
error: "Error setting value".into(), error: "Error setting value".into(),
msg: e.to_string(), msg: e.to_string(),
@ -153,7 +152,7 @@ fn command(
inner: vec![], inner: vec![],
})?; })?;
let res = chunked.set_at_idx(indices, Some(val)).map_err(|e| { let res = chunked.scatter_single(indices, Some(val)).map_err(|e| {
ShellError::GenericError { ShellError::GenericError {
error: "Error setting value".into(), error: "Error setting value".into(),
msg: e.to_string(), msg: e.to_string(),
@ -166,7 +165,7 @@ fn command(
NuDataFrame::try_from_series(vec![res.into_series()], call.head) NuDataFrame::try_from_series(vec![res.into_series()], call.head)
} }
Value::String { val, .. } => { Value::String { val, .. } => {
let chunked = series.utf8().map_err(|e| ShellError::GenericError { let chunked = series.str().map_err(|e| ShellError::GenericError {
error: "Error casting to string".into(), error: "Error casting to string".into(),
msg: e.to_string(), msg: e.to_string(),
span: Some(span), span: Some(span),
@ -175,7 +174,7 @@ fn command(
})?; })?;
let res = chunked let res = chunked
.set_at_idx(indices, Some(val.as_ref())) .scatter_single(indices, Some(val.as_ref()))
.map_err(|e| ShellError::GenericError { .map_err(|e| ShellError::GenericError {
error: "Error setting value".into(), error: "Error setting value".into(),
msg: e.to_string(), msg: e.to_string(),

View File

@ -148,7 +148,7 @@ fn command(
NuDataFrame::try_from_series(vec![res.into_series()], call.head) NuDataFrame::try_from_series(vec![res.into_series()], call.head)
} }
Value::String { val, .. } => { Value::String { val, .. } => {
let chunked = series.utf8().map_err(|e| ShellError::GenericError { let chunked = series.str().map_err(|e| ShellError::GenericError {
error: "Error casting to string".into(), error: "Error casting to string".into(),
msg: e.to_string(), msg: e.to_string(),
span: Some(span), span: Some(span),

View File

@ -128,7 +128,7 @@ fn command(
let df = NuDataFrame::try_from_pipeline(input, call.head)?; let df = NuDataFrame::try_from_pipeline(input, call.head)?;
let series = df.as_series(call.head)?; let series = df.as_series(call.head)?;
if let DataType::Object(_) = series.dtype() { if let DataType::Object(..) = series.dtype() {
return Err(ShellError::GenericError { return Err(ShellError::GenericError {
error: "Found object series".into(), error: "Found object series".into(),
msg: "Series of type object cannot be used for rolling operation".into(), msg: "Series of type object cannot be used for rolling operation".into(),

View File

@ -6,7 +6,7 @@ use nu_protocol::{
engine::{Command, EngineState, Stack}, engine::{Command, EngineState, Stack},
Category, Example, PipelineData, ShellError, Signature, Span, SyntaxShape, Type, Value, Category, Example, PipelineData, ShellError, Signature, Span, SyntaxShape, Type, Value,
}; };
use polars::prelude::{IntoSeries, Utf8NameSpaceImpl}; use polars::prelude::{IntoSeries, StringNameSpaceImpl};
#[derive(Clone)] #[derive(Clone)]
pub struct Concatenate; pub struct Concatenate;
@ -78,7 +78,7 @@ fn command(
let other_df = NuDataFrame::try_from_value(other)?; let other_df = NuDataFrame::try_from_value(other)?;
let other_series = other_df.as_series(other_span)?; let other_series = other_df.as_series(other_span)?;
let other_chunked = other_series.utf8().map_err(|e| ShellError::GenericError { let other_chunked = other_series.str().map_err(|e| ShellError::GenericError {
error: "The concatenate only with string columns".into(), error: "The concatenate only with string columns".into(),
msg: e.to_string(), msg: e.to_string(),
span: Some(other_span), span: Some(other_span),
@ -87,7 +87,7 @@ fn command(
})?; })?;
let series = df.as_series(call.head)?; let series = df.as_series(call.head)?;
let chunked = series.utf8().map_err(|e| ShellError::GenericError { let chunked = series.str().map_err(|e| ShellError::GenericError {
error: "The concatenate only with string columns".into(), error: "The concatenate only with string columns".into(),
msg: e.to_string(), msg: e.to_string(),
span: Some(call.head), span: Some(call.head),

View File

@ -6,7 +6,7 @@ use nu_protocol::{
engine::{Command, EngineState, Stack}, engine::{Command, EngineState, Stack},
Category, Example, PipelineData, ShellError, Signature, Span, SyntaxShape, Type, Value, Category, Example, PipelineData, ShellError, Signature, Span, SyntaxShape, Type, Value,
}; };
use polars::prelude::{IntoSeries, Utf8NameSpaceImpl}; use polars::prelude::{IntoSeries, StringNameSpaceImpl};
#[derive(Clone)] #[derive(Clone)]
pub struct Contains; pub struct Contains;
@ -74,7 +74,7 @@ fn command(
let pattern: String = call.req(engine_state, stack, 0)?; let pattern: String = call.req(engine_state, stack, 0)?;
let series = df.as_series(call.head)?; let series = df.as_series(call.head)?;
let chunked = series.utf8().map_err(|e| ShellError::GenericError { let chunked = series.str().map_err(|e| ShellError::GenericError {
error: "The contains command only with string columns".into(), error: "The contains command only with string columns".into(),
msg: e.to_string(), msg: e.to_string(),
span: Some(call.head), span: Some(call.head),

View File

@ -6,7 +6,7 @@ use nu_protocol::{
engine::{Command, EngineState, Stack}, engine::{Command, EngineState, Stack},
Category, Example, PipelineData, ShellError, Signature, Span, SyntaxShape, Type, Value, Category, Example, PipelineData, ShellError, Signature, Span, SyntaxShape, Type, Value,
}; };
use polars::prelude::{IntoSeries, Utf8NameSpaceImpl}; use polars::prelude::{IntoSeries, StringNameSpaceImpl};
#[derive(Clone)] #[derive(Clone)]
pub struct Replace; pub struct Replace;
@ -86,7 +86,7 @@ fn command(
let df = NuDataFrame::try_from_pipeline(input, call.head)?; let df = NuDataFrame::try_from_pipeline(input, call.head)?;
let series = df.as_series(call.head)?; let series = df.as_series(call.head)?;
let chunked = series.utf8().map_err(|e| ShellError::GenericError { let chunked = series.str().map_err(|e| ShellError::GenericError {
error: "Error conversion to string".into(), error: "Error conversion to string".into(),
msg: e.to_string(), msg: e.to_string(),
span: Some(call.head), span: Some(call.head),

View File

@ -6,7 +6,7 @@ use nu_protocol::{
engine::{Command, EngineState, Stack}, engine::{Command, EngineState, Stack},
Category, Example, PipelineData, ShellError, Signature, Span, SyntaxShape, Type, Value, Category, Example, PipelineData, ShellError, Signature, Span, SyntaxShape, Type, Value,
}; };
use polars::prelude::{IntoSeries, Utf8NameSpaceImpl}; use polars::prelude::{IntoSeries, StringNameSpaceImpl};
#[derive(Clone)] #[derive(Clone)]
pub struct ReplaceAll; pub struct ReplaceAll;
@ -86,7 +86,7 @@ fn command(
let df = NuDataFrame::try_from_pipeline(input, call.head)?; let df = NuDataFrame::try_from_pipeline(input, call.head)?;
let series = df.as_series(call.head)?; let series = df.as_series(call.head)?;
let chunked = series.utf8().map_err(|e| ShellError::GenericError { let chunked = series.str().map_err(|e| ShellError::GenericError {
error: "Error conversion to string".into(), error: "Error conversion to string".into(),
msg: e.to_string(), msg: e.to_string(),
span: Some(call.head), span: Some(call.head),

View File

@ -5,7 +5,7 @@ use nu_protocol::{
engine::{Command, EngineState, Stack}, engine::{Command, EngineState, Stack},
Category, Example, PipelineData, ShellError, Signature, Span, Type, Value, Category, Example, PipelineData, ShellError, Signature, Span, Type, Value,
}; };
use polars::prelude::{IntoSeries, Utf8NameSpaceImpl}; use polars::prelude::{IntoSeries, StringNameSpaceImpl};
#[derive(Clone)] #[derive(Clone)]
pub struct StrLengths; pub struct StrLengths;
@ -63,7 +63,7 @@ fn command(
let df = NuDataFrame::try_from_pipeline(input, call.head)?; let df = NuDataFrame::try_from_pipeline(input, call.head)?;
let series = df.as_series(call.head)?; let series = df.as_series(call.head)?;
let chunked = series.utf8().map_err(|e| ShellError::GenericError { let chunked = series.str().map_err(|e| ShellError::GenericError {
error: "Error casting to string".into(), error: "Error casting to string".into(),
msg: e.to_string(), msg: e.to_string(),
span: Some(call.head), span: Some(call.head),

View File

@ -6,7 +6,7 @@ use nu_protocol::{
engine::{Command, EngineState, Stack}, engine::{Command, EngineState, Stack},
Category, Example, PipelineData, ShellError, Signature, Span, SyntaxShape, Type, Value, Category, Example, PipelineData, ShellError, Signature, Span, SyntaxShape, Type, Value,
}; };
use polars::prelude::{IntoSeries, Utf8NameSpaceImpl}; use polars::prelude::{IntoSeries, StringNameSpaceImpl};
#[derive(Clone)] #[derive(Clone)]
pub struct StrSlice; pub struct StrSlice;
@ -75,7 +75,7 @@ fn command(
let df = NuDataFrame::try_from_pipeline(input, call.head)?; let df = NuDataFrame::try_from_pipeline(input, call.head)?;
let series = df.as_series(call.head)?; let series = df.as_series(call.head)?;
let chunked = series.utf8().map_err(|e| ShellError::GenericError { let chunked = series.str().map_err(|e| ShellError::GenericError {
error: "Error casting to string".into(), error: "Error casting to string".into(),
msg: e.to_string(), msg: e.to_string(),
span: Some(call.head), span: Some(call.head),

View File

@ -5,7 +5,7 @@ use nu_protocol::{
engine::{Command, EngineState, Stack}, engine::{Command, EngineState, Stack},
Category, Example, PipelineData, ShellError, Signature, Span, Type, Value, Category, Example, PipelineData, ShellError, Signature, Span, Type, Value,
}; };
use polars::prelude::{IntoSeries, Utf8NameSpaceImpl}; use polars::prelude::{IntoSeries, StringNameSpaceImpl};
#[derive(Clone)] #[derive(Clone)]
pub struct ToLowerCase; pub struct ToLowerCase;
@ -67,7 +67,7 @@ fn command(
let df = NuDataFrame::try_from_pipeline(input, call.head)?; let df = NuDataFrame::try_from_pipeline(input, call.head)?;
let series = df.as_series(call.head)?; let series = df.as_series(call.head)?;
let casted = series.utf8().map_err(|e| ShellError::GenericError { let casted = series.str().map_err(|e| ShellError::GenericError {
error: "Error casting to string".into(), error: "Error casting to string".into(),
msg: e.to_string(), msg: e.to_string(),
span: Some(call.head), span: Some(call.head),

View File

@ -5,7 +5,7 @@ use nu_protocol::{
engine::{Command, EngineState, Stack}, engine::{Command, EngineState, Stack},
Category, Example, PipelineData, ShellError, Signature, Span, Type, Value, Category, Example, PipelineData, ShellError, Signature, Span, Type, Value,
}; };
use polars::prelude::{IntoSeries, Utf8NameSpaceImpl}; use polars::prelude::{IntoSeries, StringNameSpaceImpl};
#[derive(Clone)] #[derive(Clone)]
pub struct ToUpperCase; pub struct ToUpperCase;
@ -71,7 +71,7 @@ fn command(
let df = NuDataFrame::try_from_pipeline(input, call.head)?; let df = NuDataFrame::try_from_pipeline(input, call.head)?;
let series = df.as_series(call.head)?; let series = df.as_series(call.head)?;
let casted = series.utf8().map_err(|e| ShellError::GenericError { let casted = series.str().map_err(|e| ShellError::GenericError {
error: "Error casting to string".into(), error: "Error casting to string".into(),
msg: e.to_string(), msg: e.to_string(),
span: Some(call.head), span: Some(call.head),

View File

@ -40,7 +40,7 @@ impl Command for ValueCount {
vec![Value::test_int(5), Value::test_int(6)], vec![Value::test_int(5), Value::test_int(6)],
), ),
Column::new( Column::new(
"counts".to_string(), "count".to_string(),
vec![Value::test_int(4), Value::test_int(2)], vec![Value::test_int(4), Value::test_int(2)],
), ),
]) ])

View File

@ -6,7 +6,7 @@ use nu_protocol::{
use num::Zero; use num::Zero;
use polars::prelude::{ use polars::prelude::{
BooleanType, ChunkCompare, ChunkedArray, DataType, Float64Type, Int64Type, IntoSeries, BooleanType, ChunkCompare, ChunkedArray, DataType, Float64Type, Int64Type, IntoSeries,
NumOpsDispatchChecked, PolarsError, Series, Utf8NameSpaceImpl, NumOpsDispatchChecked, PolarsError, Series, StringNameSpaceImpl,
}; };
use std::ops::{Add, BitAnd, BitOr, Div, Mul, Sub}; use std::ops::{Add, BitAnd, BitOr, Div, Mul, Sub};
@ -721,7 +721,7 @@ where
} }
fn contains_series_pat(series: &Series, pat: &str, span: Span) -> Result<Value, ShellError> { fn contains_series_pat(series: &Series, pat: &str, span: Span) -> Result<Value, ShellError> {
let casted = series.utf8(); let casted = series.str();
match casted { match casted {
Ok(casted) => { Ok(casted) => {
let res = casted.contains(pat, false); let res = casted.contains(pat, false);
@ -751,7 +751,7 @@ fn contains_series_pat(series: &Series, pat: &str, span: Span) -> Result<Value,
} }
fn add_string_to_series(series: &Series, pat: &str, span: Span) -> Result<Value, ShellError> { fn add_string_to_series(series: &Series, pat: &str, span: Span) -> Result<Value, ShellError> {
let casted = series.utf8(); let casted = series.str();
match casted { match casted {
Ok(casted) => { Ok(casted) => {
let res = casted + pat; let res = casted + pat;

View File

@ -10,9 +10,9 @@ use polars::datatypes::AnyValue;
use polars::export::arrow::Either; use polars::export::arrow::Either;
use polars::prelude::{ use polars::prelude::{
DataFrame, DataType, DatetimeChunked, Float64Type, Int64Type, IntoSeries, DataFrame, DataType, DatetimeChunked, Float64Type, Int64Type, IntoSeries,
ListBooleanChunkedBuilder, ListBuilderTrait, ListPrimitiveChunkedBuilder, ListType, ListBooleanChunkedBuilder, ListBuilderTrait, ListPrimitiveChunkedBuilder,
ListUtf8ChunkedBuilder, NamedFrom, NewChunkedArray, ObjectType, Series, TemporalMethods, ListStringChunkedBuilder, ListType, NamedFrom, NewChunkedArray, ObjectType, Series,
TimeUnit, TemporalMethods, TimeUnit,
}; };
use nu_protocol::{Record, ShellError, Span, Value}; use nu_protocol::{Record, ShellError, Span, Value};
@ -387,7 +387,7 @@ fn input_type_list_to_series(
Ok(res.into_series()) Ok(res.into_series())
} }
InputType::String => { InputType::String => {
let mut builder = ListUtf8ChunkedBuilder::new(name, values.len(), VALUES_CAPACITY); let mut builder = ListStringChunkedBuilder::new(name, values.len(), VALUES_CAPACITY);
for v in values { for v in values {
let value_list = v let value_list = v
.as_list()? .as_list()?
@ -713,8 +713,8 @@ fn series_to_values(
Ok(values) Ok(values)
} }
DataType::Utf8 => { DataType::String => {
let casted = series.utf8().map_err(|e| ShellError::GenericError { let casted = series.str().map_err(|e| ShellError::GenericError {
error: "Error casting column to string".into(), error: "Error casting column to string".into(),
msg: "".into(), msg: "".into(),
span: None, span: None,
@ -736,7 +736,7 @@ fn series_to_values(
Ok(values) Ok(values)
} }
DataType::Object(x) => { DataType::Object(x, _) => {
let casted = series let casted = series
.as_any() .as_any()
.downcast_ref::<ChunkedArray<ObjectType<DataFrameValue>>>(); .downcast_ref::<ChunkedArray<ObjectType<DataFrameValue>>>();
@ -920,7 +920,7 @@ fn any_value_to_value(any_value: &AnyValue, span: Span) -> Result<Value, ShellEr
match any_value { match any_value {
AnyValue::Null => Ok(Value::nothing(span)), AnyValue::Null => Ok(Value::nothing(span)),
AnyValue::Boolean(b) => Ok(Value::bool(*b, span)), AnyValue::Boolean(b) => Ok(Value::bool(*b, span)),
AnyValue::Utf8(s) => Ok(Value::string(s.to_string(), span)), AnyValue::String(s) => Ok(Value::string(s.to_string(), span)),
AnyValue::UInt8(i) => Ok(Value::int(*i as i64, span)), AnyValue::UInt8(i) => Ok(Value::int(*i as i64, span)),
AnyValue::UInt16(i) => Ok(Value::int(*i as i64, span)), AnyValue::UInt16(i) => Ok(Value::int(*i as i64, span)),
AnyValue::UInt32(i) => Ok(Value::int(*i as i64, span)), AnyValue::UInt32(i) => Ok(Value::int(*i as i64, span)),
@ -986,7 +986,7 @@ fn any_value_to_value(any_value: &AnyValue, span: Span) -> Result<Value, ShellEr
internal_span: span, internal_span: span,
}) })
} }
AnyValue::Utf8Owned(s) => Ok(Value::string(s.to_string(), span)), AnyValue::StringOwned(s) => Ok(Value::string(s.to_string(), span)),
AnyValue::Binary(bytes) => Ok(Value::binary(*bytes, span)), AnyValue::Binary(bytes) => Ok(Value::binary(*bytes, span)),
AnyValue::BinaryOwned(bytes) => Ok(Value::binary(bytes.to_owned(), span)), AnyValue::BinaryOwned(bytes) => Ok(Value::binary(bytes.to_owned(), span)),
e => Err(ShellError::GenericError { e => Err(ShellError::GenericError {
@ -1113,11 +1113,11 @@ mod tests {
let test_str = "foo"; let test_str = "foo";
assert_eq!( assert_eq!(
any_value_to_value(&AnyValue::Utf8(test_str), span)?, any_value_to_value(&AnyValue::String(test_str), span)?,
Value::string(test_str.to_string(), span) Value::string(test_str.to_string(), span)
); );
assert_eq!( assert_eq!(
any_value_to_value(&AnyValue::Utf8Owned(test_str.into()), span)?, any_value_to_value(&AnyValue::StringOwned(test_str.into()), span)?,
Value::string(test_str.to_owned(), span) Value::string(test_str.to_owned(), span)
); );

View File

@ -9,7 +9,7 @@ pub use operations::Axis;
use indexmap::map::IndexMap; use indexmap::map::IndexMap;
use nu_protocol::{did_you_mean, PipelineData, Record, ShellError, Span, Value}; use nu_protocol::{did_you_mean, PipelineData, Record, ShellError, Span, Value};
use polars::prelude::{DataFrame, DataType, IntoLazy, LazyFrame, PolarsObject, Series}; use polars::prelude::{DataFrame, DataType, IntoLazy, LazyFrame, PolarsObject, Series};
use polars_arrow::util::total_ord::TotalEq; use polars_utils::total_ord::TotalEq;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use std::{cmp::Ordering, fmt::Display, hash::Hasher}; use std::{cmp::Ordering, fmt::Display, hash::Hasher};
@ -496,7 +496,7 @@ impl NuDataFrame {
_ => self_series.clone(), _ => self_series.clone(),
}; };
if !self_series.series_equal(other_series) { if !self_series.equals(other_series) {
return None; return None;
} }
} }

View File

@ -225,7 +225,7 @@ pub fn expr_to_value(expr: &Expr, span: Span) -> Result<Value, ShellError> {
| AggExpr::Last(expr) | AggExpr::Last(expr)
| AggExpr::Mean(expr) | AggExpr::Mean(expr)
| AggExpr::Implode(expr) | AggExpr::Implode(expr)
| AggExpr::Count(expr) | AggExpr::Count(expr, _)
| AggExpr::Sum(expr) | AggExpr::Sum(expr)
| AggExpr::AggGroups(expr) | AggExpr::AggGroups(expr)
| AggExpr::Std(expr, _) | AggExpr::Std(expr, _)