Upgrade to polars 0.43 (#14148)

Upgrades the polars plugin to polars version 0.43
This commit is contained in:
Jack Wright 2024-10-23 10:14:24 -07:00 committed by GitHub
parent e7c4597ad0
commit ae54d05930
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
59 changed files with 530 additions and 398 deletions

223
Cargo.lock generated
View File

@ -487,6 +487,17 @@ dependencies = [
"brotli-decompressor",
]
[[package]]
name = "brotli"
version = "6.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "74f7971dbd9326d58187408ab83117d8ac1bb9c17b085fdacd1cf2f598719b6b"
dependencies = [
"alloc-no-stdlib",
"alloc-stdlib",
"brotli-decompressor",
]
[[package]]
name = "brotli-decompressor"
version = "4.0.1"
@ -720,12 +731,12 @@ dependencies = [
[[package]]
name = "chrono-tz"
version = "0.9.0"
version = "0.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "93698b29de5e97ad0ae26447b344c482a7284c737d9ddc5f9e52b74a336671bb"
checksum = "cd6dd8046d00723a59a2f8c5f295c515b9bb9a331ee4f8f3d4dd49e428acd3b6"
dependencies = [
"chrono",
"chrono-tz-build 0.3.0",
"chrono-tz-build 0.4.0",
"phf 0.11.2",
]
@ -742,12 +753,11 @@ dependencies = [
[[package]]
name = "chrono-tz-build"
version = "0.3.0"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0c088aee841df9c3041febbb73934cfc39708749bf96dc827e3359cd39ef11b1"
checksum = "e94fea34d77a245229e7746bd2beb786cd2a896f306ff491fb8cecb3074b10a7"
dependencies = [
"parse-zoneinfo",
"phf 0.11.2",
"phf_codegen 0.11.2",
]
@ -871,6 +881,21 @@ dependencies = [
"static_assertions",
]
[[package]]
name = "compact_str"
version = "0.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6050c3a16ddab2e412160b31f2c871015704239bca62f72f6e5f0be631d3f644"
dependencies = [
"castaway",
"cfg-if",
"itoa",
"rustversion",
"ryu",
"serde",
"static_assertions",
]
[[package]]
name = "console"
version = "0.15.8"
@ -1570,12 +1595,6 @@ version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b"
[[package]]
name = "foreign_vec"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ee1b05cbd864bcaecbd3455d6d967862d446e4ebfc3c2e5e5b9841e53cba6673"
[[package]]
name = "form_urlencoded"
version = "1.2.1"
@ -3102,7 +3121,7 @@ dependencies = [
"alphanumeric-sort",
"base64 0.22.1",
"bracoxide",
"brotli",
"brotli 5.0.0",
"byteorder",
"bytesize",
"calamine",
@ -3201,7 +3220,7 @@ dependencies = [
"v_htmlescape",
"wax",
"which",
"windows 0.56.0",
"windows",
"winreg",
]
@ -3347,7 +3366,7 @@ dependencies = [
"rmp-serde",
"serde",
"serde_json",
"windows 0.56.0",
"windows",
]
[[package]]
@ -3363,7 +3382,7 @@ dependencies = [
"nu-utils",
"serde",
"typetag",
"windows 0.56.0",
"windows",
]
[[package]]
@ -3409,7 +3428,7 @@ dependencies = [
name = "nu-protocol"
version = "0.99.2"
dependencies = [
"brotli",
"brotli 5.0.0",
"byte-unit",
"bytes",
"chrono",
@ -3469,7 +3488,7 @@ dependencies = [
"once_cell",
"procfs",
"sysinfo 0.32.0",
"windows 0.56.0",
"windows",
]
[[package]]
@ -3581,7 +3600,7 @@ name = "nu_plugin_polars"
version = "0.99.2"
dependencies = [
"chrono",
"chrono-tz 0.9.0",
"chrono-tz 0.10.0",
"env_logger 0.11.5",
"fancy-regex",
"hashbrown 0.14.5",
@ -4263,9 +4282,9 @@ dependencies = [
[[package]]
name = "polars"
version = "0.41.3"
version = "0.43.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8e3351ea4570e54cd556e6755b78fe7a2c85368d820c0307cca73c96e796a7ba"
checksum = "0e248cf2f0069277f8fe80d413cfb9240c7dd1cfa382b5674c1b4afa57222747"
dependencies = [
"getrandom",
"polars-arrow",
@ -4283,9 +4302,9 @@ dependencies = [
[[package]]
name = "polars-arrow"
version = "0.41.3"
version = "0.43.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ba65fc4bcabbd64fca01fd30e759f8b2043f0963c57619e331d4b534576c0b47"
checksum = "2981d5b2f34c84069a39fceca0d36dffeb97db8cadba101e7ea6605c8d42294d"
dependencies = [
"ahash 0.8.11",
"atoi",
@ -4298,7 +4317,6 @@ dependencies = [
"either",
"ethnum",
"fast-float",
"foreign_vec",
"futures",
"getrandom",
"hashbrown 0.14.5",
@ -4307,8 +4325,10 @@ dependencies = [
"lz4",
"multiversion",
"num-traits",
"parking_lot",
"polars-arrow-format",
"polars-error",
"polars-schema",
"polars-utils",
"ryu",
"serde",
@ -4331,9 +4351,9 @@ dependencies = [
[[package]]
name = "polars-compute"
version = "0.41.3"
version = "0.43.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9f099516af30ac9ae4b4480f4ad02aa017d624f2f37b7a16ad4e9ba52f7e5269"
checksum = "5a97b2a5c9b880ab7e52553c40a336fdb6e3244bf896b4d4917700defe8085d5"
dependencies = [
"bytemuck",
"either",
@ -4347,9 +4367,9 @@ dependencies = [
[[package]]
name = "polars-core"
version = "0.41.3"
version = "0.43.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b2439484be228b8c302328e2f953e64cfd93930636e5c7ceed90339ece7fef6c"
checksum = "d5bc2cadcca904a9dc4d2c2b437c346712806e9a678bf17c7e94ebf622faae76"
dependencies = [
"ahash 0.8.11",
"bitflags 2.6.0",
@ -4366,6 +4386,7 @@ dependencies = [
"polars-compute",
"polars-error",
"polars-row",
"polars-schema",
"polars-utils",
"rand",
"rand_distr",
@ -4373,7 +4394,6 @@ dependencies = [
"regex",
"serde",
"serde_json",
"smartstring",
"thiserror",
"version_check",
"xxhash-rust",
@ -4381,9 +4401,9 @@ dependencies = [
[[package]]
name = "polars-error"
version = "0.41.3"
version = "0.43.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0c9b06dfbe79cabe50a7f0a90396864b5ee2c0e0f8d6a9353b2343c29c56e937"
checksum = "56b0a8eb9b1e56a4640de6887d613cb4de73c4e09d491f3b779855d4c3bcb9ba"
dependencies = [
"avro-schema",
"polars-arrow-format",
@ -4394,14 +4414,15 @@ dependencies = [
[[package]]
name = "polars-expr"
version = "0.41.3"
version = "0.43.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d9c630385a56a867c410a20f30772d088f90ec3d004864562b84250b35268f97"
checksum = "34e9c0e8c7ba93aac64051b92dc68eac5a0e9543cf44ca784467db2c035821fe"
dependencies = [
"ahash 0.8.11",
"bitflags 2.6.0",
"once_cell",
"polars-arrow",
"polars-compute",
"polars-core",
"polars-io",
"polars-ops",
@ -4409,14 +4430,13 @@ dependencies = [
"polars-time",
"polars-utils",
"rayon",
"smartstring",
]
[[package]]
name = "polars-io"
version = "0.41.3"
version = "0.43.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9d7363cd14e4696a28b334a56bd11013ff49cc96064818ab3f91a126e453462d"
checksum = "454ebbebe1cb8cb4768adca44b8fc9431abc3c91d5927f6824e73f916bced911"
dependencies = [
"ahash 0.8.11",
"async-trait",
@ -4426,6 +4446,8 @@ dependencies = [
"fast-float",
"flate2",
"futures",
"glob",
"hashbrown 0.14.5",
"home",
"itoa",
"memchr",
@ -4438,16 +4460,15 @@ dependencies = [
"polars-error",
"polars-json",
"polars-parquet",
"polars-schema",
"polars-time",
"polars-utils",
"rayon",
"regex",
"ryu",
"serde",
"serde_json",
"simd-json",
"simdutf8",
"smartstring",
"tokio",
"tokio-util",
"zstd",
@ -4455,9 +4476,9 @@ dependencies = [
[[package]]
name = "polars-json"
version = "0.41.3"
version = "0.43.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "543d7d3853f2c52dbfedee9ebf0d58c4ff3b92aadee5309150b2d14df49d6253"
checksum = "4ca086fbbff6e46efbc97032e93f92690c1fc9c662fd5e1f13a42922bd7d3aa4"
dependencies = [
"ahash 0.8.11",
"chrono",
@ -4476,13 +4497,12 @@ dependencies = [
[[package]]
name = "polars-lazy"
version = "0.41.3"
version = "0.43.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "03877e74e42b5340ae52ded705f6d5d14563d90554c9177b01b91ed2412a56ed"
checksum = "7e61c062e833d2376de0a4cf745504449215cbf499cea293cb592e674ffb39ca"
dependencies = [
"ahash 0.8.11",
"bitflags 2.6.0",
"glob",
"memchr",
"once_cell",
"polars-arrow",
@ -4497,16 +4517,16 @@ dependencies = [
"polars-time",
"polars-utils",
"rayon",
"smartstring",
"version_check",
]
[[package]]
name = "polars-mem-engine"
version = "0.41.3"
version = "0.43.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dea9e17771af750c94bf959885e4b3f5b14149576c62ef3ec1c9ef5827b2a30f"
checksum = "c0643812829cc990e1533a5bf48c21a1b3eaa46aabf2967b0f53f99097cbc74c"
dependencies = [
"memmap2",
"polars-arrow",
"polars-core",
"polars-error",
@ -4522,9 +4542,9 @@ dependencies = [
[[package]]
name = "polars-ops"
version = "0.41.3"
version = "0.43.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6066552eb577d43b307027fb38096910b643ffb2c89a21628c7e41caf57848d0"
checksum = "5ac14a136d87bea798f3db51d5987556deb2293da34bfc8b105ebffa05f6e810"
dependencies = [
"ahash 0.8.11",
"argminmax",
@ -4544,6 +4564,7 @@ dependencies = [
"polars-core",
"polars-error",
"polars-json",
"polars-schema",
"polars-utils",
"rand",
"rand_distr",
@ -4551,24 +4572,25 @@ dependencies = [
"regex",
"serde",
"serde_json",
"smartstring",
"unicode-reverse",
"version_check",
]
[[package]]
name = "polars-parquet"
version = "0.41.3"
version = "0.43.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2b35b2592a2e7ef7ce9942dc2120dc4576142626c0e661668e4c6b805042e461"
checksum = "491f5af321169259d5b1294c9fe8ed89faaeac34b4dec4abcedc0d1b3d11013a"
dependencies = [
"ahash 0.8.11",
"async-stream",
"base64 0.22.1",
"brotli",
"brotli 6.0.0",
"bytemuck",
"ethnum",
"flate2",
"futures",
"hashbrown 0.14.5",
"lz4",
"num-traits",
"parquet-format-safe",
@ -4585,13 +4607,14 @@ dependencies = [
[[package]]
name = "polars-pipe"
version = "0.41.3"
version = "0.43.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "021bce7768c330687d735340395a77453aa18dd70d57c184cbb302311e87c1b9"
checksum = "29215c31f599295cc0f803c42fc812cc518db6d5ed4d6c7cc03daf3976a0add5"
dependencies = [
"crossbeam-channel",
"crossbeam-queue",
"enum_dispatch",
"futures",
"hashbrown 0.14.5",
"num-traits",
"polars-arrow",
@ -4604,22 +4627,25 @@ dependencies = [
"polars-row",
"polars-utils",
"rayon",
"smartstring",
"uuid",
"version_check",
]
[[package]]
name = "polars-plan"
version = "0.41.3"
version = "0.43.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "220d0d7c02d1c4375802b2813dbedcd1a184df39c43b74689e729ede8d5c2921"
checksum = "e3f728df4bc643492a2057a0a125c7e550cbcfe35b391444653ad294be9ab190"
dependencies = [
"ahash 0.8.11",
"bitflags 2.6.0",
"bytemuck",
"bytes",
"chrono",
"chrono-tz 0.8.6",
"either",
"hashbrown 0.14.5",
"memmap2",
"once_cell",
"percent-encoding",
"polars-arrow",
@ -4634,16 +4660,15 @@ dependencies = [
"recursive",
"regex",
"serde",
"smartstring",
"strum_macros",
"version_check",
]
[[package]]
name = "polars-row"
version = "0.41.3"
version = "0.43.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c1d70d87a2882a64a43b431aea1329cb9a2c4100547c95c417cc426bb82408b3"
checksum = "4eb931f0929ca7498b3ed5056357d2d364cad42cce95383a7e3983dbceb4bed1"
dependencies = [
"bytemuck",
"polars-arrow",
@ -4652,10 +4677,23 @@ dependencies = [
]
[[package]]
name = "polars-sql"
version = "0.41.3"
name = "polars-schema"
version = "0.43.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a6fc1c9b778862f09f4a347f768dfdd3d0ba9957499d306d83c7103e0fa8dc5b"
checksum = "9c7e1234b942d3244024ecbac9c7f5a48a52a815f8ca4b9d075fbba16afb1a39"
dependencies = [
"indexmap",
"polars-error",
"polars-utils",
"serde",
"version_check",
]
[[package]]
name = "polars-sql"
version = "0.43.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ce52bfd2ef1e2e18ac26d7d7ea3f9132b199cff06d975156703fa5badcfae187"
dependencies = [
"hex",
"once_cell",
@ -4666,6 +4704,7 @@ dependencies = [
"polars-ops",
"polars-plan",
"polars-time",
"polars-utils",
"rand",
"serde",
"serde_json",
@ -4674,9 +4713,9 @@ dependencies = [
[[package]]
name = "polars-time"
version = "0.41.3"
version = "0.43.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "179f98313a15c0bfdbc8cc0f1d3076d08d567485b9952d46439f94fbc3085df5"
checksum = "9925ab75e1d859ae2283ca09d7683198b0b9ff5afecd03f2c9180f3e36e35056"
dependencies = [
"atoi",
"bytemuck",
@ -4691,27 +4730,30 @@ dependencies = [
"polars-utils",
"regex",
"serde",
"smartstring",
]
[[package]]
name = "polars-utils"
version = "0.41.3"
version = "0.43.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "53e6dd89fcccb1ec1a62f752c9a9f2d482a85e9255153f46efecc617b4996d50"
checksum = "b44846e1fc6ae1dfdc7f65a37af7d270d0a6a17a58fff76716561f5b887a8ad7"
dependencies = [
"ahash 0.8.11",
"bytemuck",
"bytes",
"compact_str 0.8.0",
"hashbrown 0.14.5",
"indexmap",
"libc",
"memmap2",
"num-traits",
"once_cell",
"polars-error",
"raw-cpuid",
"rayon",
"smartstring",
"serde",
"stacker",
"sysinfo 0.30.13",
"sysinfo 0.31.4",
"version_check",
]
@ -5040,7 +5082,7 @@ checksum = "f44c9e68fd46eda15c646fbb85e1040b657a58cdc8c98db1d97a55930d991eef"
dependencies = [
"bitflags 2.6.0",
"cassowary",
"compact_str",
"compact_str 0.7.1",
"crossterm 0.27.0",
"itertools 0.12.1",
"lru",
@ -5822,18 +5864,6 @@ version = "1.13.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3c5e1a9a646d36c3599cd173a41282daf47c44583ad367b8e6837255952e5c67"
[[package]]
name = "smartstring"
version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3fb72c633efbaa2dd666986505016c32c3044395ceaf881518399d2f4127ee29"
dependencies = [
"autocfg",
"serde",
"static_assertions",
"version_check",
]
[[package]]
name = "smawk"
version = "0.3.2"
@ -5858,9 +5888,9 @@ dependencies = [
[[package]]
name = "sqlparser"
version = "0.47.0"
version = "0.49.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "295e9930cd7a97e58ca2a070541a3ca502b17f5d1fa7157376d0fabd85324f25"
checksum = "a4a404d0e14905361b918cb8afdb73605e25c1d5029312bd9785142dcb3aa49e"
dependencies = [
"log",
]
@ -6087,16 +6117,15 @@ dependencies = [
[[package]]
name = "sysinfo"
version = "0.30.13"
version = "0.31.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0a5b4ddaee55fb2bea2bf0e5000747e5f5c0de765e5a5ff87f4cd106439f4bb3"
checksum = "355dbe4f8799b304b05e1b0f05fc59b2a18d36645cf169607da45bde2f69a1be"
dependencies = [
"cfg-if",
"core-foundation-sys",
"libc",
"memchr",
"ntapi",
"once_cell",
"windows 0.52.0",
"windows",
]
[[package]]
@ -6110,7 +6139,7 @@ dependencies = [
"memchr",
"ntapi",
"rayon",
"windows 0.56.0",
"windows",
]
[[package]]
@ -6448,7 +6477,7 @@ dependencies = [
"once_cell",
"scopeguard",
"urlencoding",
"windows 0.56.0",
"windows",
]
[[package]]
@ -7084,16 +7113,6 @@ version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
[[package]]
name = "windows"
version = "0.52.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e48a53791691ab099e5e2ad123536d0fff50652600abaf43bbf952894110d0be"
dependencies = [
"windows-core 0.52.0",
"windows-targets 0.52.6",
]
[[package]]
name = "windows"
version = "0.56.0"

View File

@ -24,18 +24,18 @@ nu-utils = { path = "../nu-utils", version = "0.99.2" }
# Potential dependencies for extras
chrono = { workspace = true, features = ["std", "unstable-locales"], default-features = false }
chrono-tz = "0.9"
chrono-tz = "0.10"
fancy-regex = { workspace = true }
indexmap = { version = "2.6" }
mimalloc = { version = "0.1.42" }
num = {version = "0.4"}
serde = { version = "1.0", features = ["derive"] }
sqlparser = { version = "0.47"}
polars-io = { version = "0.41", features = ["avro"]}
polars-arrow = { version = "0.41"}
polars-ops = { version = "0.41", features = ["pivot"]}
polars-plan = { version = "0.41", features = ["regex"]}
polars-utils = { version = "0.41"}
sqlparser = { version = "0.49"}
polars-io = { version = "0.43", features = ["avro"]}
polars-arrow = { version = "0.43"}
polars-ops = { version = "0.43", features = ["pivot"]}
polars-plan = { version = "0.43", features = ["regex"]}
polars-utils = { version = "0.43"}
typetag = "0.2"
env_logger = "0.11.3"
log.workspace = true
@ -68,6 +68,7 @@ features = [
"lazy",
"object",
"parquet",
"pivot",
"random",
"rolling_window",
"rows",
@ -80,7 +81,7 @@ features = [
"to_dummies",
]
optional = false
version = "0.41"
version = "0.43"
[dev-dependencies]
nu-cmd-lang = { path = "../nu-cmd-lang", version = "0.99.2" }

View File

@ -42,7 +42,10 @@ impl PluginCommand for CacheGet {
polars store-ls | get key | first | polars store-get $in"#,
result: Some(
NuDataFrame::try_from_series_vec(
vec![Series::new("a", &[1_i64, 3]), Series::new("b", &[2_i64, 4])],
vec![
Series::new("a".into(), &[1_i64, 3]),
Series::new("b".into(), &[2_i64, 4]),
],
Span::test_data(),
)
.expect("could not create dataframe")

View File

@ -234,7 +234,7 @@ fn command_df(
})?;
let name = format!("{}_{}", series.name(), cum_type.to_str());
res.rename(&name);
res.rename(name.into());
let df = NuDataFrame::try_from_series_vec(vec![res.into_series()], call.head)?;
df.to_pipeline_data(plugin, engine, call.head)

View File

@ -169,7 +169,7 @@ fn command(
})?;
let name = format!("{}_{}", series.name(), roll_type.to_str());
res.rename(&name);
res.rename(name.into());
let df = NuDataFrame::try_from_series_vec(vec![res.into_series()], call.head)?;
df.to_pipeline_data(plugin, engine, call.head)

View File

@ -92,7 +92,7 @@ fn command(
let series = df.as_series(call.head)?;
let res = series
.value_counts(sort, parallel, column, normalize)
.value_counts(sort, parallel, column.into(), normalize)
.map_err(|e| ShellError::GenericError {
error: "Error calculating value counts values".into(),
msg: e.to_string(),

View File

@ -84,7 +84,7 @@ fn command(
match columns.first() {
Some(column) => {
let expression = arg_where(col(column).eq(true)).alias("arg_true");
let expression = arg_where(col((*column).clone()).eq(true)).alias("arg_true");
let res: NuDataFrame = df
.as_ref()
.clone()

View File

@ -112,7 +112,7 @@ fn command(
})?
.into_series();
res.rename("is_duplicated");
res.rename("is_duplicated".into());
let df = NuDataFrame::try_from_series_vec(vec![res], call.head)?;
df.to_pipeline_data(plugin, engine, call.head)

View File

@ -178,7 +178,7 @@ fn command_df(
})?
.into_series();
res.rename("is_in");
res.rename("is_in".into());
let mut new_df = NuDataFrame::try_from_series_vec(vec![res.into_series()], call.head)?;
new_df.from_lazy = df.from_lazy;

View File

@ -108,7 +108,7 @@ fn command(
df: NuDataFrame,
) -> Result<PipelineData, ShellError> {
let mut res = df.as_series(call.head)?.is_not_null();
res.rename("is_not_null");
res.rename("is_not_null".into());
let df = NuDataFrame::try_from_series_vec(vec![res.into_series()], call.head)?;
df.to_pipeline_data(plugin, engine, call.head)

View File

@ -110,7 +110,7 @@ fn command(
df: NuDataFrame,
) -> Result<PipelineData, ShellError> {
let mut res = df.as_series(call.head)?.is_null();
res.rename("is_null");
res.rename("is_null".into());
let df = NuDataFrame::try_from_series_vec(vec![res.into_series()], call.head)?;
df.to_pipeline_data(plugin, engine, call.head)

View File

@ -112,7 +112,7 @@ fn command(
})?
.into_series();
res.rename("is_unique");
res.rename("is_unique".into());
let df = NuDataFrame::try_from_series_vec(vec![res], call.head)?;
df.to_pipeline_data(plugin, engine, call.head)

View File

@ -175,7 +175,7 @@ fn command(
})?;
let mut res = res.into_series();
res.rename("string");
res.rename("string".into());
NuDataFrame::try_from_series_vec(vec![res.into_series()], call.head)
}

View File

@ -59,7 +59,7 @@ fn command(
.as_ref()
.get_column_names()
.iter()
.map(|v| Value::string(*v, call.head))
.map(|v| Value::string(v.as_str(), call.head))
.collect();
let names = Value::list(names, call.head);

View File

@ -24,7 +24,7 @@ use polars::{
lazy::frame::LazyJsonLineReader,
prelude::{
CsvEncoding, IpcReader, JsonFormat, JsonReader, LazyCsvReader, LazyFileListReader,
LazyFrame, ParquetReader, ScanArgsIpc, ScanArgsParquet, SerReader,
LazyFrame, ParquetReader, PlSmallStr, ScanArgsIpc, ScanArgsParquet, SerReader,
},
};
@ -190,6 +190,7 @@ fn from_parquet(
use_statistics: false,
hive_options: HiveOptions::default(),
glob: true,
include_file_paths: None,
};
let df: NuLazyFrame = LazyFrame::scan_parquet(file, args)
@ -286,8 +287,9 @@ fn from_arrow(
cache: true,
rechunk: false,
row_index: None,
memory_map: true,
cloud_options: None,
include_file_paths: None,
hive_options: HiveOptions::default(),
};
let df: NuLazyFrame = LazyFrame::scan_ipc(file, args)
@ -530,7 +532,11 @@ fn from_csv(
.with_infer_schema_length(Some(infer_schema))
.with_skip_rows(skip_rows.unwrap_or_default())
.with_schema(maybe_schema.map(|s| s.into()))
.with_columns(columns.map(|v| Arc::from(v.into_boxed_slice())))
.with_columns(
columns
.map(|v| v.iter().map(PlSmallStr::from).collect::<Vec<PlSmallStr>>())
.map(|v| Arc::from(v.into_boxed_slice())),
)
.map_parse_options(|options| {
options
.with_separator(

View File

@ -177,7 +177,8 @@ fn command(
let df = NuDataFrame::try_from_pipeline_coerce(plugin, input, call.head)?;
let names = ChunkedArray::<StringType>::from_slice_options("descriptor", &labels).into_series();
let names =
ChunkedArray::<StringType>::from_slice_options("descriptor".into(), &labels).into_series();
let head = std::iter::once(names);
@ -201,7 +202,7 @@ fn command(
.map(|q| {
col.quantile_reduce(q, QuantileInterpolOptions::default())
.ok()
.map(|s| s.into_series("quantile"))
.map(|s| s.into_series("quantile".into()))
.and_then(|ca| ca.cast(&DataType::Float64).ok())
.and_then(|ca| match ca.get(0) {
Ok(AnyValue::Float64(v)) => Some(v),
@ -217,7 +218,7 @@ fn command(
descriptors.push(max);
let name = format!("{} ({})", col.name(), col.dtype());
ChunkedArray::<Float64Type>::from_slice_options(&name, &descriptors).into_series()
ChunkedArray::<Float64Type>::from_slice_options(name.into(), &descriptors).into_series()
});
let res = head.chain(tail).collect::<Vec<Series>>();

View File

@ -163,18 +163,18 @@ impl PluginCommand for ToDataFrame {
example: "[[a b c]; [1 {d: [1 2 3]} [10 11 12] ]]| polars into-df -s {a: u8, b: {d: list<u64>}, c: list<u8>}",
result: Some(
NuDataFrame::try_from_series_vec(vec![
Series::new("a", &[1u8]),
Series::new("a".into(), &[1u8]),
{
let dtype = DataType::Struct(vec![Field::new("a", DataType::List(Box::new(DataType::UInt64)))]);
let dtype = DataType::Struct(vec![Field::new("a".into(), DataType::List(Box::new(DataType::UInt64)))]);
let vals = vec![AnyValue::StructOwned(
Box::new((vec![AnyValue::List(Series::new("a", &[1u64, 2, 3]))], vec![Field::new("a", DataType::String)]))); 1];
Series::from_any_values_and_dtype("b", &vals, &dtype, false)
Box::new((vec![AnyValue::List(Series::new("a".into(), &[1u64, 2, 3]))], vec![Field::new("a".into(), DataType::String)]))); 1];
Series::from_any_values_and_dtype("b".into(), &vals, &dtype, false)
.expect("Struct series should not fail")
},
{
let dtype = DataType::List(Box::new(DataType::String));
let vals = vec![AnyValue::List(Series::new("c", &[10, 11, 12]))];
Series::from_any_values_and_dtype("c", &vals, &dtype, false)
let vals = vec![AnyValue::List(Series::new("c".into(), &[10, 11, 12]))];
Series::from_any_values_and_dtype("c".into(), &vals, &dtype, false)
.expect("List series should not fail")
}
], Span::test_data())
@ -186,9 +186,9 @@ impl PluginCommand for ToDataFrame {
description: "Convert to a dataframe and provide a schema that adds a new column",
example: r#"[[a b]; [1 "foo"] [2 "bar"]] | polars into-df -s {a: u8, b:str, c:i64} | polars fill-null 3"#,
result: Some(NuDataFrame::try_from_series_vec(vec![
Series::new("a", [1u8, 2]),
Series::new("b", ["foo", "bar"]),
Series::new("c", [3i64, 3]),
Series::new("a".into(), [1u8, 2]),
Series::new("b".into(), ["foo", "bar"]),
Series::new("c".into(), [3i64, 3]),
], Span::test_data())
.expect("simple df for test should not fail")
.into_value(Span::test_data()),

View File

@ -103,7 +103,7 @@ fn command(
let polars_df = df
.as_ref()
.unique(subset_slice, keep_strategy, None)
.unique_stable(subset_slice, keep_strategy, None)
.map_err(|e| ShellError::GenericError {
error: "Error dropping duplicates".into(),
msg: e.to_string(),

View File

@ -38,10 +38,10 @@ impl PluginCommand for Dummies {
result: Some(
NuDataFrame::try_from_series_vec(
vec![
Series::new("a_1", &[1_u8, 0]),
Series::new("a_3", &[0_u8, 1]),
Series::new("b_2", &[1_u8, 0]),
Series::new("b_4", &[0_u8, 1]),
Series::new("a_1".into(), &[1_u8, 0]),
Series::new("a_3".into(), &[0_u8, 1]),
Series::new("b_2".into(), &[1_u8, 0]),
Series::new("b_4".into(), &[0_u8, 1]),
],
Span::test_data(),
)
@ -55,9 +55,9 @@ impl PluginCommand for Dummies {
result: Some(
NuDataFrame::try_from_series_vec(
vec![
Series::new("0_1", &[1_u8, 0, 0, 0, 0]),
Series::new("0_2", &[0_u8, 1, 1, 0, 0]),
Series::new("0_3", &[0_u8, 0, 0, 1, 1]),
Series::new("0_1".into(), &[1_u8, 0, 0, 0, 0]),
Series::new("0_2".into(), &[0_u8, 1, 1, 0, 0]),
Series::new("0_3".into(), &[0_u8, 0, 0, 1, 1]),
],
Span::test_data(),
)

View File

@ -69,7 +69,6 @@ pub use slice::SliceDF;
use sort_by_expr::LazySortBy;
pub use take::TakeDF;
pub use unique::Unique;
pub use unpivot::UnpivotDF;
pub use with_column::WithColumn;
pub(crate) fn data_commands() -> Vec<Box<dyn PluginCommand<Plugin = PolarsPlugin>>> {
@ -84,7 +83,7 @@ pub(crate) fn data_commands() -> Vec<Box<dyn PluginCommand<Plugin = PolarsPlugin
Box::new(filter_with::FilterWith),
Box::new(GetDF),
Box::new(pivot::PivotDF),
Box::new(UnpivotDF),
Box::new(unpivot::Unpivot),
Box::new(FirstDF),
Box::new(LastDF),
Box::new(len::ExprLen),

View File

@ -150,7 +150,7 @@ fn command_eager(
for (from, to) in columns.iter().zip(new_names.iter()) {
polars_df
.rename(from, to)
.rename(from, to.into())
.map_err(|e| ShellError::GenericError {
error: "Error renaming".into(),
msg: e.to_string(),

View File

@ -114,7 +114,12 @@ fn command(
let df = match (rows, fraction) {
(Some(rows), None) => df
.as_ref()
.sample_n(&Series::new("s", &[rows.item]), replace, shuffle, seed)
.sample_n(
&Series::new("s".into(), &[rows.item]),
replace,
shuffle,
seed,
)
.map_err(|e| ShellError::GenericError {
error: "Error creating sample".into(),
msg: e.to_string(),
@ -124,7 +129,12 @@ fn command(
}),
(None, Some(frac)) => df
.as_ref()
.sample_frac(&Series::new("frac", &[frac.item]), replace, shuffle, seed)
.sample_frac(
&Series::new("frac".into(), &[frac.item]),
replace,
shuffle,
seed,
)
.map_err(|e| ShellError::GenericError {
error: "Error creating sample".into(),
msg: e.to_string(),

View File

@ -98,11 +98,11 @@ impl SQLContext {
// Check for group by
// After projection since there might be number.
let group_by = match &select_stmt.group_by {
GroupByExpr::All =>
GroupByExpr::All(_) =>
Err(
PolarsError::ComputeError("Group-By Error: Only positive number or expression are supported, not all".into())
)?,
GroupByExpr::Expressions(expressions) => expressions
GroupByExpr::Expressions(expressions, _) => expressions
}
.iter()
.map(
@ -171,7 +171,8 @@ impl SQLContext {
.schema()
.get_at_index(shm_p)
.unwrap_or((&"".into(), &DataType::Null))
.0)
.0
.clone())
})
.collect::<Vec<_>>();
agg_df.select(final_proj)

View File

@ -1,5 +1,8 @@
use crate::{
dataframe::{utils::extract_strings, values::NuLazyFrame},
dataframe::{
utils::{extract_sm_strs, extract_strings},
values::NuLazyFrame,
},
values::{cant_convert_err, CustomValueSupport, PolarsPluginObject, PolarsPluginType},
PolarsPlugin,
};
@ -179,12 +182,7 @@ fn command_lazy(
) -> Result<PipelineData, ShellError> {
let last = call.has_flag("last")?;
let maintain = call.has_flag("maintain-order")?;
let subset: Option<Value> = call.get_flag("subset")?;
let subset = match subset {
Some(value) => Some(extract_strings(value)?),
None => None,
};
let strategy = if last {
UniqueKeepStrategy::Last
@ -194,8 +192,16 @@ fn command_lazy(
let lazy = lazy.to_polars();
let lazy: NuLazyFrame = if maintain {
let subset = match subset {
Some(value) => Some(extract_strings(value)?),
None => None,
};
lazy.unique(subset, strategy).into()
} else {
let subset = match subset {
Some(value) => Some(extract_sm_strs(value)?),
None => None,
};
lazy.unique_stable(subset, strategy).into()
};
lazy.to_pipeline_data(plugin, engine, call.head)

View File

@ -3,20 +3,21 @@ use nu_protocol::{
Category, Example, LabeledError, PipelineData, ShellError, Signature, Span, Spanned,
SyntaxShape, Type, Value,
};
use polars::frame::explode::UnpivotArgs;
use polars::{frame::explode::UnpivotArgsIR, prelude::UnpivotArgsDSL};
use polars_ops::pivot::UnpivotDF;
use crate::{
dataframe::values::utils::convert_columns_string,
values::{CustomValueSupport, NuLazyFrame, PolarsPluginObject},
values::{utils::convert_columns_sm_str, CustomValueSupport, NuLazyFrame, PolarsPluginObject},
PolarsPlugin,
};
use crate::values::{Column, NuDataFrame};
#[derive(Clone)]
pub struct UnpivotDF;
pub struct Unpivot;
impl PluginCommand for UnpivotDF {
impl PluginCommand for Unpivot {
type Plugin = PolarsPlugin;
fn name(&self) -> &str {
@ -57,11 +58,6 @@ impl PluginCommand for UnpivotDF {
Type::Custom("dataframe".into()),
Type::Custom("dataframe".into()),
)
.switch(
"streamable",
"Whether or not to use the polars streaming engine. Only valid for lazy dataframes",
Some('t'),
)
.category(Category::Custom("dataframe".into()))
}
@ -220,14 +216,11 @@ fn command_eager(
check_column_datatypes(df.as_ref(), &index_col_string, index_col_span)?;
check_column_datatypes(df.as_ref(), &on_col_string, on_col_span)?;
let streamable = call.has_flag("streamable")?;
let args = UnpivotArgs {
let args = UnpivotArgsIR {
on: on_col_string.iter().map(Into::into).collect(),
index: index_col_string.iter().map(Into::into).collect(),
variable_name: variable_name.map(|s| s.item.into()),
value_name: value_name.map(|s| s.item.into()),
streamable,
};
let res = df
@ -254,20 +247,17 @@ fn command_lazy(
let index_col: Vec<Value> = call.get_flag("index")?.expect("required value");
let on_col: Vec<Value> = call.get_flag("on")?.expect("required value");
let (index_col_string, _index_col_span) = convert_columns_string(index_col, call.head)?;
let (on_col_string, _on_col_span) = convert_columns_string(on_col, call.head)?;
let (index_col_string, _index_col_span) = convert_columns_sm_str(index_col, call.head)?;
let (on_col_string, _on_col_span) = convert_columns_sm_str(on_col, call.head)?;
let value_name: Option<String> = call.get_flag("value-name")?;
let variable_name: Option<String> = call.get_flag("variable-name")?;
let streamable = call.has_flag("streamable")?;
let unpivot_args = UnpivotArgs {
on: on_col_string.iter().map(Into::into).collect(),
index: index_col_string.iter().map(Into::into).collect(),
let unpivot_args = UnpivotArgsDSL {
on: on_col_string.iter().cloned().map(Into::into).collect(),
index: index_col_string.iter().cloned().map(Into::into).collect(),
value_name: value_name.map(Into::into),
variable_name: variable_name.map(Into::into),
streamable,
};
let polars_df = df.to_polars().unpivot(unpivot_args);
@ -341,6 +331,6 @@ mod test {
#[test]
fn test_examples() -> Result<(), ShellError> {
test_polars_plugin_command(&UnpivotDF)
test_polars_plugin_command(&Unpivot)
}
}

View File

@ -153,7 +153,7 @@ fn command_eager(
None => other.name().to_string(),
};
let series = other.rename(&name).clone();
let series = other.rename(name.into()).clone();
let mut polars_df = df.to_polars();
polars_df

View File

@ -101,7 +101,7 @@ fn command(
})?
.into_series();
res.rename("date");
res.rename("date".into());
let df = NuDataFrame::try_from_series_vec(vec![res], call.head)?;
df.to_pipeline_data(plugin, engine, call.head)

View File

@ -202,7 +202,7 @@ fn command(
})?
.into_series();
res.rename("datetime");
res.rename("datetime".into());
let df = NuDataFrame::try_from_series_vec(vec![res], call.head)?;
df.to_pipeline_data(plugin, engine, call.head)
}

View File

@ -80,16 +80,16 @@ impl PluginCommand for ExprDatePart {
result: Some(
NuDataFrame::try_from_series_vec(
vec![
Series::new("datetime", &[dt.timestamp_nanos_opt()])
Series::new("datetime".into(), &[dt.timestamp_nanos_opt()])
.cast(&DataType::Datetime(TimeUnit::Nanoseconds, None))
.expect("Error casting to datetime type"),
Series::new("datetime_year", &[2021_i64]), // i32 was coerced to i64
Series::new("datetime_month", &[12_i8]),
Series::new("datetime_day", &[30_i8]),
Series::new("datetime_hour", &[1_i8]),
Series::new("datetime_minute", &[2_i8]),
Series::new("datetime_second", &[3_i8]),
Series::new("datetime_ns", &[123456789_i64]), // i32 was coerced to i64
Series::new("datetime_year".into(), &[2021_i64]), // i32 was coerced to i64
Series::new("datetime_month".into(), &[12_i8]),
Series::new("datetime_day".into(), &[30_i8]),
Series::new("datetime_hour".into(), &[1_i8]),
Series::new("datetime_minute".into(), &[2_i8]),
Series::new("datetime_second".into(), &[3_i8]),
Series::new("datetime_ns".into(), &[123456789_i64]), // i32 was coerced to i64
],
Span::test_data(),
)

View File

@ -41,7 +41,7 @@ impl PluginCommand for GetDay {
let df = ([$dt $dt] | polars into-df);
$df | polars get-day"#,
result: Some(
NuDataFrame::try_from_series(Series::new("0", &[4i8, 4]), Span::test_data())
NuDataFrame::try_from_series(Series::new("0".into(), &[4i8, 4]), Span::test_data())
.expect("simple df for test should not fail")
.into_value(Span::test_data()),
),

View File

@ -41,7 +41,10 @@ impl PluginCommand for GetHour {
let df = ([$dt $dt] | polars into-df);
$df | polars get-hour"#,
result: Some(
NuDataFrame::try_from_series(Series::new("0", &[16i8, 16]), Span::test_data())
NuDataFrame::try_from_series(
Series::new("0".into(), &[16i8, 16]),
Span::test_data(),
)
.expect("simple df for test should not fail")
.into_value(Span::test_data()),
),

View File

@ -39,7 +39,10 @@ impl PluginCommand for GetMinute {
let df = ([$dt $dt] | polars into-df);
$df | polars get-minute"#,
result: Some(
NuDataFrame::try_from_series(Series::new("0", &[39i8, 39]), Span::test_data())
NuDataFrame::try_from_series(
Series::new("0".into(), &[39i8, 39]),
Span::test_data(),
)
.expect("simple df for test should not fail")
.into_value(Span::test_data()),
),

View File

@ -41,7 +41,7 @@ impl PluginCommand for GetMonth {
let df = ([$dt $dt] | polars into-df);
$df | polars get-month"#,
result: Some(
NuDataFrame::try_from_series(Series::new("0", &[8i8, 8]), Span::test_data())
NuDataFrame::try_from_series(Series::new("0".into(), &[8i8, 8]), Span::test_data())
.expect("simple df for test should not fail")
.into_value(Span::test_data()),
),

View File

@ -41,7 +41,10 @@ impl PluginCommand for GetNanosecond {
let df = ([$dt $dt] | polars into-df);
$df | polars get-nanosecond"#,
result: Some(
NuDataFrame::try_from_series(Series::new("0", &[0i32, 0]), Span::test_data())
NuDataFrame::try_from_series(
Series::new("0".into(), &[0i32, 0]),
Span::test_data(),
)
.expect("simple df for test should not fail")
.into_value(Span::test_data()),
),

View File

@ -41,7 +41,10 @@ impl PluginCommand for GetOrdinal {
let df = ([$dt $dt] | polars into-df);
$df | polars get-ordinal"#,
result: Some(
NuDataFrame::try_from_series(Series::new("0", &[217i16, 217]), Span::test_data())
NuDataFrame::try_from_series(
Series::new("0".into(), &[217i16, 217]),
Span::test_data(),
)
.expect("simple df for test should not fail")
.into_value(Span::test_data()),
),

View File

@ -41,7 +41,10 @@ impl PluginCommand for GetSecond {
let df = ([$dt $dt] | polars into-df);
$df | polars get-second"#,
result: Some(
NuDataFrame::try_from_series(Series::new("0", &[18i8, 18]), Span::test_data())
NuDataFrame::try_from_series(
Series::new("0".into(), &[18i8, 18]),
Span::test_data(),
)
.expect("simple df for test should not fail")
.into_value(Span::test_data()),
),

View File

@ -41,7 +41,10 @@ impl PluginCommand for GetWeek {
let df = ([$dt $dt] | polars into-df);
$df | polars get-week"#,
result: Some(
NuDataFrame::try_from_series(Series::new("0", &[32i8, 32]), Span::test_data())
NuDataFrame::try_from_series(
Series::new("0".into(), &[32i8, 32]),
Span::test_data(),
)
.expect("simple df for test should not fail")
.into_value(Span::test_data()),
),

View File

@ -41,7 +41,7 @@ impl PluginCommand for GetWeekDay {
let df = ([$dt $dt] | polars into-df);
$df | polars get-weekday"#,
result: Some(
NuDataFrame::try_from_series(Series::new("0", &[2i8, 2]), Span::test_data())
NuDataFrame::try_from_series(Series::new("0".into(), &[2i8, 2]), Span::test_data())
.expect("simple df for test should not fail")
.into_value(Span::test_data()),
),

View File

@ -41,7 +41,10 @@ impl PluginCommand for GetYear {
let df = ([$dt $dt] | polars into-df);
$df | polars get-year"#,
result: Some(
NuDataFrame::try_from_series(Series::new("0", &[2020i32, 2020]), Span::test_data())
NuDataFrame::try_from_series(
Series::new("0".into(), &[2020i32, 2020]),
Span::test_data(),
)
.expect("simple df for test should not fail")
.into_value(Span::test_data()),
),

View File

@ -72,8 +72,8 @@ fn command(
let res = series.arg_max();
let chunked = match res {
Some(index) => UInt32Chunked::from_slice("arg_max", &[index as u32]),
None => UInt32Chunked::from_slice("arg_max", &[]),
Some(index) => UInt32Chunked::from_slice("arg_max".into(), &[index as u32]),
None => UInt32Chunked::from_slice("arg_max".into(), &[]),
};
let res = chunked.into_series();

View File

@ -72,8 +72,8 @@ fn command(
let res = series.arg_min();
let chunked = match res {
Some(index) => UInt32Chunked::from_slice("arg_min", &[index as u32]),
None => UInt32Chunked::from_slice("arg_min", &[]),
Some(index) => UInt32Chunked::from_slice("arg_min".into(), &[index as u32]),
None => UInt32Chunked::from_slice("arg_min".into(), &[]),
};
let res = chunked.into_series();

View File

@ -119,7 +119,7 @@ fn command(
.as_series(call.head)?
.arg_sort(sort_options)
.into_series();
res.rename("arg_sort");
res.rename("arg_sort".into());
let df = NuDataFrame::try_from_series_vec(vec![res], call.head)?;
df.to_pipeline_data(plugin, engine, call.head)

View File

@ -83,7 +83,7 @@ fn command(
inner: vec![],
})?
.into_series();
res.rename("arg_unique");
res.rename("arg_unique".into());
let df = NuDataFrame::try_from_series_vec(vec![res], call.head)?;
df.to_pipeline_data(plugin, engine, call.head)

View File

@ -195,7 +195,7 @@ fn command(
})?;
let mut res = res.into_series();
res.rename("string");
res.rename("string".into());
NuDataFrame::try_from_series_vec(vec![res.into_series()], call.head)
}

View File

@ -184,7 +184,7 @@ fn command_df(
inner: vec![],
})?;
res.rename(series.name());
res.rename(series.name().to_owned());
let df = NuDataFrame::try_from_series_vec(vec![res.into_series()], call.head)?;
df.to_pipeline_data(plugin, engine, call.head)

View File

@ -185,7 +185,7 @@ fn command_df(
inner: vec![],
})?;
res.rename(series.name());
res.rename(series.name().to_owned());
let df = NuDataFrame::try_from_series_vec(vec![res.into_series()], call.head)?;
df.to_pipeline_data(plugin, engine_state, call.head)

View File

@ -159,7 +159,7 @@ fn command_df(
let mut res = chunked.concat(other_chunked);
res.rename(series.name());
res.rename(series.name().to_owned());
let df = NuDataFrame::try_from_series_vec(vec![res.into_series()], call.head)?;
df.to_pipeline_data(plugin, engine, call.head)

View File

@ -160,12 +160,12 @@ fn command_df(
df: NuDataFrame,
) -> Result<PipelineData, ShellError> {
let start: i64 = call.req(0)?;
let start = Series::new("", &[start]);
let start = Series::new("".into(), &[start]);
let length: Option<i64> = call.get_flag("length")?;
let length = match length {
Some(v) => Series::new("", &[v as u64]),
None => Series::new_null("", 1),
Some(v) => Series::new("".into(), &[v as u64]),
None => Series::new_null("".into(), 1),
};
let series = df.as_series(call.head)?;
@ -187,7 +187,7 @@ fn command_df(
help: None,
inner: vec![],
})?
.with_name(series.name());
.with_name(series.name().to_owned());
let df = NuDataFrame::try_from_series_vec(vec![res.into_series()], call.head)?;
df.to_pipeline_data(plugin, engine, call.head)

View File

@ -137,7 +137,7 @@ fn command_df(
})?;
let mut res = casted.to_lowercase();
res.rename(series.name());
res.rename(series.name().to_owned());
let df = NuDataFrame::try_from_series_vec(vec![res.into_series()], call.head)?;
df.to_pipeline_data(plugin, engine, call.head)

View File

@ -141,7 +141,7 @@ fn command_df(
})?;
let mut res = casted.to_uppercase();
res.rename(series.name());
res.rename(series.name().clone());
let df = NuDataFrame::try_from_series_vec(vec![res.into_series()], call.head)?;
df.to_pipeline_data(plugin, engine, call.head)

View File

@ -1,4 +1,5 @@
use nu_protocol::{FromValue, ShellError, Value};
use polars::prelude::PlSmallStr;
pub fn extract_strings(value: Value) -> Result<Vec<String>, ShellError> {
let span = value.span();
@ -14,3 +15,18 @@ pub fn extract_strings(value: Value) -> Result<Vec<String>, ShellError> {
}),
}
}
pub fn extract_sm_strs(value: Value) -> Result<Vec<PlSmallStr>, ShellError> {
let span = value.span();
match (
<String as FromValue>::from_value(value.clone()),
<Vec<String> as FromValue>::from_value(value),
) {
(Ok(col), Err(_)) => Ok(vec![col.into()]),
(Err(_), Ok(cols)) => Ok(cols.iter().map(PlSmallStr::from).collect()),
_ => Err(ShellError::IncompatibleParametersSingle {
msg: "Expected a string or list of strings".into(),
span,
}),
}
}

View File

@ -49,7 +49,7 @@ pub(super) fn compute_between_series(
inner: vec![],
})?;
let name = format!("sum_{}_{}", lhs.name(), rhs.name());
res.rename(&name);
res.rename(name.into());
NuDataFrame::try_from_series(res, operation_span)
}
Operator::Math(Math::Minus) => {
@ -61,7 +61,7 @@ pub(super) fn compute_between_series(
inner: vec![],
})?;
let name = format!("sub_{}_{}", lhs.name(), rhs.name());
res.rename(&name);
res.rename(name.into());
NuDataFrame::try_from_series(res, operation_span)
}
Operator::Math(Math::Multiply) => {
@ -73,7 +73,7 @@ pub(super) fn compute_between_series(
inner: vec![],
})?;
let name = format!("mul_{}_{}", lhs.name(), rhs.name());
res.rename(&name);
res.rename(name.into());
NuDataFrame::try_from_series(res, operation_span)
}
Operator::Math(Math::Divide) => {
@ -81,7 +81,7 @@ pub(super) fn compute_between_series(
match res {
Ok(mut res) => {
let name = format!("div_{}_{}", lhs.name(), rhs.name());
res.rename(&name);
res.rename(name.into());
NuDataFrame::try_from_series(res, operation_span)
}
Err(e) => Err(ShellError::GenericError {
@ -132,7 +132,7 @@ pub(super) fn compute_between_series(
(Ok(l), Ok(r)) => {
let mut res = l.bitand(r).into_series();
let name = format!("and_{}_{}", lhs.name(), rhs.name());
res.rename(&name);
res.rename(name.into());
NuDataFrame::try_from_series(res, operation_span)
}
_ => Err(ShellError::GenericError {
@ -161,7 +161,7 @@ pub(super) fn compute_between_series(
(Ok(l), Ok(r)) => {
let mut res = l.bitor(r).into_series();
let name = format!("or_{}_{}", lhs.name(), rhs.name());
res.rename(&name);
res.rename(name.into());
NuDataFrame::try_from_series(res, operation_span)
}
_ => Err(ShellError::GenericError {
@ -211,7 +211,7 @@ where
})?
.into_series();
res.rename(name);
res.rename(name.into());
Ok(res)
}
@ -815,7 +815,7 @@ mod test {
#[test]
fn test_compute_between_series_comparisons() {
let series = Series::new("c", &[1, 2]);
let series = Series::new("c".into(), &[1, 2]);
let df = NuDataFrame::try_from_series_vec(vec![series], Span::test_data())
.expect("should be able to create a simple dataframe");
@ -848,7 +848,7 @@ mod test {
let result = result
.as_series(Span::test_data())
.expect("should be convert to a series");
assert_eq!(result, Series::new("neq_c_c", &[false, false]));
assert_eq!(result, Series::new("neq_c_c".into(), &[false, false]));
let op = Spanned {
item: Operator::Comparison(Comparison::Equal),
@ -859,7 +859,7 @@ mod test {
let result = result
.as_series(Span::test_data())
.expect("should be convert to a series");
assert_eq!(result, Series::new("eq_c_c", &[true, true]));
assert_eq!(result, Series::new("eq_c_c".into(), &[true, true]));
let op = Spanned {
item: Operator::Comparison(Comparison::LessThan),
@ -870,7 +870,7 @@ mod test {
let result = result
.as_series(Span::test_data())
.expect("should be convert to a series");
assert_eq!(result, Series::new("lt_c_c", &[false, false]));
assert_eq!(result, Series::new("lt_c_c".into(), &[false, false]));
let op = Spanned {
item: Operator::Comparison(Comparison::LessThanOrEqual),
@ -881,7 +881,7 @@ mod test {
let result = result
.as_series(Span::test_data())
.expect("should be convert to a series");
assert_eq!(result, Series::new("lte_c_c", &[true, true]));
assert_eq!(result, Series::new("lte_c_c".into(), &[true, true]));
let op = Spanned {
item: Operator::Comparison(Comparison::GreaterThan),
@ -892,7 +892,7 @@ mod test {
let result = result
.as_series(Span::test_data())
.expect("should be convert to a series");
assert_eq!(result, Series::new("gt_c_c", &[false, false]));
assert_eq!(result, Series::new("gt_c_c".into(), &[false, false]));
let op = Spanned {
item: Operator::Comparison(Comparison::GreaterThanOrEqual),
@ -903,6 +903,6 @@ mod test {
let result = result
.as_series(Span::test_data())
.expect("should be convert to a series");
assert_eq!(result, Series::new("gte_c_c", &[true, true]));
assert_eq!(result, Series::new("gte_c_c".into(), &[true, true]));
}
}

View File

@ -7,14 +7,14 @@ use indexmap::map::{Entry, IndexMap};
use polars::chunked_array::builder::AnonymousOwnedListBuilder;
use polars::chunked_array::object::builder::ObjectChunkedBuilder;
use polars::chunked_array::ChunkedArray;
use polars::datatypes::AnyValue;
use polars::datatypes::{AnyValue, PlSmallStr};
use polars::export::arrow::Either;
use polars::prelude::{
DataFrame, DataType, DatetimeChunked, Float32Type, Float64Type, Int16Type, Int32Type,
Int64Type, Int8Type, IntoSeries, ListBooleanChunkedBuilder, ListBuilderTrait,
ChunkAnyValue, DataFrame, DataType, DatetimeChunked, Float32Type, Float64Type, Int16Type,
Int32Type, Int64Type, Int8Type, IntoSeries, ListBooleanChunkedBuilder, ListBuilderTrait,
ListPrimitiveChunkedBuilder, ListStringChunkedBuilder, ListType, NamedFrom, NewChunkedArray,
ObjectType, PolarsError, Schema, Series, StructChunked, TemporalMethods, TimeUnit, UInt16Type,
UInt32Type, UInt64Type, UInt8Type,
ObjectType, PolarsError, Schema, SchemaExt, Series, StructChunked, TemporalMethods, TimeUnit,
UInt16Type, UInt32Type, UInt64Type, UInt8Type,
};
use nu_protocol::{Record, ShellError, Span, Value};
@ -67,27 +67,27 @@ macro_rules! value_to_primitive {
#[derive(Debug)]
pub struct Column {
name: String,
name: PlSmallStr,
values: Vec<Value>,
}
impl Column {
pub fn new(name: impl Into<String>, values: Vec<Value>) -> Self {
pub fn new(name: impl Into<PlSmallStr>, values: Vec<Value>) -> Self {
Self {
name: name.into(),
values,
}
}
pub fn new_empty(name: String) -> Self {
pub fn new_empty(name: PlSmallStr) -> Self {
Self {
name,
values: Vec::new(),
}
}
pub fn name(&self) -> &str {
self.name.as_str()
pub fn name(&self) -> &PlSmallStr {
&self.name
}
}
@ -121,7 +121,7 @@ pub struct TypedColumn {
}
impl TypedColumn {
fn new_empty(name: String) -> Self {
fn new_empty(name: PlSmallStr) -> Self {
Self {
column: Column::new_empty(name),
column_type: None,
@ -143,7 +143,7 @@ impl DerefMut for TypedColumn {
}
}
pub type ColumnMap = IndexMap<String, TypedColumn>;
pub type ColumnMap = IndexMap<PlSmallStr, TypedColumn>;
pub fn create_column(
series: &Series,
@ -153,7 +153,7 @@ pub fn create_column(
) -> Result<Column, ShellError> {
let size = to_row - from_row;
let values = series_to_values(series, Some(from_row), Some(size), span)?;
Ok(Column::new(series.name(), values))
Ok(Column::new(series.name().clone(), values))
}
// Adds a separator to the vector of values using the column names from the
@ -168,7 +168,7 @@ pub fn add_separator(values: &mut Vec<Value>, df: &DataFrame, has_index: bool, s
for name in df.get_column_names() {
// there should only be one index field
record.push(name, Value::string("...", span))
record.push(name.as_str(), Value::string("...", span))
}
values.push(Value::record(record, span));
@ -181,7 +181,7 @@ pub fn insert_record(
maybe_schema: &Option<NuSchema>,
) -> Result<(), ShellError> {
for (col, value) in record {
insert_value(value, col, column_values, maybe_schema)?;
insert_value(value, col.into(), column_values, maybe_schema)?;
}
Ok(())
@ -189,7 +189,7 @@ pub fn insert_record(
pub fn insert_value(
value: Value,
key: String,
key: PlSmallStr,
column_values: &mut ColumnMap,
maybe_schema: &Option<NuSchema>,
) -> Result<(), ShellError> {
@ -201,7 +201,7 @@ pub fn insert_value(
// If we have a schema, use that for determining how things should be added to each column
if let Some(schema) = maybe_schema {
if let Some(field) = schema.schema.get_field(&key) {
col_val.column_type = Some(field.data_type().clone());
col_val.column_type = Some(field.dtype().clone());
col_val.values.push(value);
Ok(())
} else {
@ -219,7 +219,7 @@ pub fn insert_value(
else if col_val.values.is_empty() {
if let Some(schema) = maybe_schema {
if let Some(field) = schema.schema.get_field(&key) {
col_val.column_type = Some(field.data_type().clone());
col_val.column_type = Some(field.dtype().clone());
}
}
@ -284,7 +284,7 @@ fn value_to_data_type(value: &Value) -> DataType {
}
}
fn typed_column_to_series(name: &str, column: TypedColumn) -> Result<Series, ShellError> {
fn typed_column_to_series(name: PlSmallStr, column: TypedColumn) -> Result<Series, ShellError> {
if let Some(column_type) = &column.column_type {
match column_type {
DataType::Float32 => {
@ -426,13 +426,13 @@ fn typed_column_to_series(name: &str, column: TypedColumn) -> Result<Series, She
Ok(Series::new(name, series_values?))
}
DataType::List(list_type) => {
match input_type_list_to_series(name, list_type.as_ref(), &column.values) {
match input_type_list_to_series(&name, list_type.as_ref(), &column.values) {
Ok(series) => Ok(series),
Err(_) => {
// An error case will occur when there are lists of mixed types.
// If this happens, fallback to object list
input_type_list_to_series(
name,
&name,
&DataType::Object("unknown", None),
&column.values,
)
@ -489,7 +489,7 @@ fn typed_column_to_series(name: &str, column: TypedColumn) -> Result<Series, She
DataType::Struct(fields) => {
let schema = Some(NuSchema::new(Schema::from_iter(fields.clone())));
// let mut structs: Vec<Series> = Vec::new();
let mut structs: HashMap<String, Series> = HashMap::new();
let mut structs: HashMap<PlSmallStr, Series> = HashMap::new();
for v in column.values.iter() {
let mut column_values: ColumnMap = IndexMap::new();
@ -517,7 +517,7 @@ fn typed_column_to_series(name: &str, column: TypedColumn) -> Result<Series, She
inner: vec![],
})?;
} else {
structs.insert(name.to_string(), series.to_owned());
structs.insert(name.clone(), series.to_owned());
}
}
}
@ -525,14 +525,13 @@ fn typed_column_to_series(name: &str, column: TypedColumn) -> Result<Series, She
let structs: Vec<Series> = structs.into_values().collect();
let chunked =
StructChunked::new(column.name(), structs.as_slice()).map_err(|e| {
ShellError::GenericError {
StructChunked::from_series(column.name().to_owned(), structs.as_slice())
.map_err(|e| ShellError::GenericError {
error: format!("Error creating struct: {e}"),
msg: "".into(),
span: None,
help: None,
inner: vec![],
}
})?;
Ok(chunked.into_series())
}
@ -561,7 +560,7 @@ fn typed_column_to_series(name: &str, column: TypedColumn) -> Result<Series, She
pub fn from_parsed_columns(column_values: ColumnMap) -> Result<NuDataFrame, ShellError> {
let mut df_series: Vec<Series> = Vec::new();
for (name, column) in column_values {
let series = typed_column_to_series(&name, column)?;
let series = typed_column_to_series(name, column)?;
df_series.push(series);
}
@ -576,7 +575,7 @@ pub fn from_parsed_columns(column_values: ColumnMap) -> Result<NuDataFrame, Shel
})
}
fn value_to_series(name: &str, values: &[Value]) -> Result<Series, ShellError> {
fn value_to_series(name: PlSmallStr, values: &[Value]) -> Result<Series, ShellError> {
let mut builder = ObjectChunkedBuilder::<DataFrameValue>::new(name, values.len());
for v in values {
@ -588,7 +587,7 @@ fn value_to_series(name: &str, values: &[Value]) -> Result<Series, ShellError> {
}
fn input_type_list_to_series(
name: &str,
name: &PlSmallStr,
data_type: &DataType,
values: &[Value],
) -> Result<Series, ShellError> {
@ -605,7 +604,7 @@ fn input_type_list_to_series(
macro_rules! primitive_list_series {
($list_type:ty, $vec_type:tt) => {{
let mut builder = ListPrimitiveChunkedBuilder::<$list_type>::new(
name,
name.clone(),
values.len(),
VALUES_CAPACITY,
data_type.clone(),
@ -628,7 +627,8 @@ fn input_type_list_to_series(
match *data_type {
// list of boolean values
DataType::Boolean => {
let mut builder = ListBooleanChunkedBuilder::new(name, values.len(), VALUES_CAPACITY);
let mut builder =
ListBooleanChunkedBuilder::new(name.clone(), values.len(), VALUES_CAPACITY);
for v in values {
let value_list = v
.as_list()?
@ -653,7 +653,8 @@ fn input_type_list_to_series(
DataType::Float32 => primitive_list_series!(Float32Type, f32),
DataType::Float64 => primitive_list_series!(Float64Type, f64),
DataType::String => {
let mut builder = ListStringChunkedBuilder::new(name, values.len(), VALUES_CAPACITY);
let mut builder =
ListStringChunkedBuilder::new(name.clone(), values.len(), VALUES_CAPACITY);
for v in values {
let value_list = v
.as_list()?
@ -668,7 +669,7 @@ fn input_type_list_to_series(
}
DataType::Date => {
let mut builder = AnonymousOwnedListBuilder::new(
name,
name.clone(),
values.len(),
Some(DataType::Datetime(TimeUnit::Nanoseconds, None)),
);
@ -682,7 +683,7 @@ fn input_type_list_to_series(
None
}
});
let dt_chunked = ChunkedArray::<Int64Type>::from_iter_options(&list_name, it)
let dt_chunked = ChunkedArray::<Int64Type>::from_iter_options(list_name.into(), it)
.into_datetime(TimeUnit::Nanoseconds, None);
builder
@ -702,7 +703,7 @@ fn input_type_list_to_series(
Ok(input_type_list_to_series(name, sub_list_type, values)?)
}
// treat everything else as an object
_ => Ok(value_to_series(name, values)?),
_ => Ok(value_to_series(name.clone(), values)?),
}
}
@ -1142,7 +1143,7 @@ fn series_to_values(
.collect::<Result<Vec<Value>, ShellError>>()?;
Ok(values)
}
DataType::Struct(polar_fields) => {
DataType::Struct(_) => {
let casted = series.struct_().map_err(|e| ShellError::GenericError {
error: "Error casting column to struct".into(),
msg: "".to_string(),
@ -1150,26 +1151,29 @@ fn series_to_values(
help: Some(e.to_string()),
inner: Vec::new(),
})?;
let it = casted.into_iter();
let values: Result<Vec<Value>, ShellError> =
if let (Some(size), Some(from_row)) = (maybe_size, maybe_from_row) {
Either::Left(it.skip(from_row).take(size))
} else {
Either::Right(it)
}
.map(|any_values| {
let record = polar_fields
.iter()
.zip(any_values)
.map(|(field, val)| {
any_value_to_value(val, span).map(|val| (field.name.to_string(), val))
})
.collect::<Result<_, _>>()?;
Ok(Value::record(record, span))
})
.collect();
values
let range = if let (Some(size), Some(from_row)) = (maybe_size, maybe_from_row) {
from_row..(from_row + size)
} else {
0..casted.len()
};
let mut values = Vec::with_capacity(casted.len());
for i in range {
let val = casted
.get_any_value(i)
.map_err(|e| ShellError::GenericError {
error: format!("Could not get struct value for index {i} - {e}"),
msg: "".into(),
span: None,
help: None,
inner: vec![],
})?;
values.push(any_value_to_value(&val, span)?)
}
Ok(values)
}
DataType::Time => {
let casted =
@ -1300,7 +1304,7 @@ fn nanos_from_timeunit(a: i64, time_unit: TimeUnit) -> i64 {
fn datetime_from_epoch_nanos(
nanos: i64,
timezone: &Option<String>,
timezone: &Option<PlSmallStr>,
span: Span,
) -> Result<DateTime<FixedOffset>, ShellError> {
let tz: Tz = if let Some(polars_tz) = timezone {
@ -1355,6 +1359,7 @@ where
mod tests {
use indexmap::indexmap;
use nu_protocol::record;
use polars::datatypes::CompatLevel;
use polars::export::arrow::array::{BooleanArray, PrimitiveArray};
use polars::prelude::Field;
use polars_io::prelude::StructArray;
@ -1374,7 +1379,7 @@ mod tests {
),
];
let column = Column {
name: "foo".to_string(),
name: "foo".into(),
values: values.clone(),
};
let typed_column = TypedColumn {
@ -1382,7 +1387,7 @@ mod tests {
column_type: Some(DataType::List(Box::new(DataType::String))),
};
let column_map = indexmap!("foo".to_string() => typed_column);
let column_map = indexmap!("foo".into() => typed_column);
let parsed_df = from_parsed_columns(column_map)?;
let parsed_columns = parsed_df.columns(Span::test_data())?;
assert_eq!(parsed_columns.len(), 1);
@ -1521,7 +1526,7 @@ mod tests {
Value::date(test_time, span)
);
let test_list_series = Series::new("int series", &[1, 2, 3]);
let test_list_series = Series::new("int series".into(), &[1, 2, 3]);
let comparison_list_series = Value::list(
vec![
Value::int(1, span),
@ -1541,8 +1546,8 @@ mod tests {
let field_name_0 = "num_field";
let field_name_1 = "bool_field";
let fields = vec![
Field::new(field_name_0, DataType::Int32),
Field::new(field_name_1, DataType::Boolean),
Field::new(field_name_0.into(), DataType::Int32),
Field::new(field_name_1.into(), DataType::Boolean),
];
let test_owned_struct = AnyValue::StructOwned(Box::new((values, fields.clone())));
let comparison_owned_record = Value::test_record(record!(
@ -1557,7 +1562,7 @@ mod tests {
let test_int_arr = PrimitiveArray::from([Some(1_i32)]);
let test_bool_arr = BooleanArray::from([Some(true)]);
let test_struct_arr = StructArray::new(
DataType::Struct(fields.clone()).to_arrow(true),
DataType::Struct(fields.clone()).to_arrow(CompatLevel::newest()),
vec![Box::new(test_int_arr), Box::new(test_bool_arr)],
None,
);
@ -1586,9 +1591,12 @@ mod tests {
column_type: Some(DataType::Float32),
};
let result = typed_column_to_series("foo", column)?;
let result = typed_column_to_series("foo".into(), column)?;
assert_eq!(result, Series::new("name", [Some(1.1f32), Some(2.0), None]));
assert_eq!(
result,
Series::new("name".into(), [Some(1.1f32), Some(2.0), None])
);
Ok(())
}
@ -1606,9 +1614,12 @@ mod tests {
column_type: Some(DataType::Float64),
};
let result = typed_column_to_series("foo", column)?;
let result = typed_column_to_series("foo".into(), column)?;
assert_eq!(result, Series::new("name", [Some(1.1f64), Some(2.0), None]));
assert_eq!(
result,
Series::new("name".into(), [Some(1.1f64), Some(2.0), None])
);
Ok(())
}
@ -1619,9 +1630,9 @@ mod tests {
column_type: Some(DataType::UInt8),
};
let result = typed_column_to_series("foo", column)?;
let result = typed_column_to_series("foo".into(), column)?;
assert_eq!(result, Series::new("name", [Some(1u8), None]));
assert_eq!(result, Series::new("name".into(), [Some(1u8), None]));
Ok(())
}
@ -1632,9 +1643,9 @@ mod tests {
column_type: Some(DataType::UInt16),
};
let result = typed_column_to_series("foo", column)?;
let result = typed_column_to_series("foo".into(), column)?;
assert_eq!(result, Series::new("name", [Some(1u16), None]));
assert_eq!(result, Series::new("name".into(), [Some(1u16), None]));
Ok(())
}
@ -1645,9 +1656,9 @@ mod tests {
column_type: Some(DataType::UInt32),
};
let result = typed_column_to_series("foo", column)?;
let result = typed_column_to_series("foo".into(), column)?;
assert_eq!(result, Series::new("name", [Some(1u32), None]));
assert_eq!(result, Series::new("name".into(), [Some(1u32), None]));
Ok(())
}
@ -1658,9 +1669,9 @@ mod tests {
column_type: Some(DataType::UInt64),
};
let result = typed_column_to_series("foo", column)?;
let result = typed_column_to_series("foo".into(), column)?;
assert_eq!(result, Series::new("name", [Some(1u64), None]));
assert_eq!(result, Series::new("name".into(), [Some(1u64), None]));
Ok(())
}
@ -1671,9 +1682,9 @@ mod tests {
column_type: Some(DataType::Int8),
};
let result = typed_column_to_series("foo", column)?;
let result = typed_column_to_series("foo".into(), column)?;
assert_eq!(result, Series::new("name", [Some(1i8), None]));
assert_eq!(result, Series::new("name".into(), [Some(1i8), None]));
Ok(())
}
@ -1684,9 +1695,9 @@ mod tests {
column_type: Some(DataType::Int16),
};
let result = typed_column_to_series("foo", column)?;
let result = typed_column_to_series("foo".into(), column)?;
assert_eq!(result, Series::new("name", [Some(1i16), None]));
assert_eq!(result, Series::new("name".into(), [Some(1i16), None]));
Ok(())
}
@ -1697,9 +1708,9 @@ mod tests {
column_type: Some(DataType::Int32),
};
let result = typed_column_to_series("foo", column)?;
let result = typed_column_to_series("foo".into(), column)?;
assert_eq!(result, Series::new("name", [Some(1i32), None]));
assert_eq!(result, Series::new("name".into(), [Some(1i32), None]));
Ok(())
}
@ -1710,9 +1721,9 @@ mod tests {
column_type: Some(DataType::Int64),
};
let result = typed_column_to_series("foo", column)?;
let result = typed_column_to_series("foo".into(), column)?;
assert_eq!(result, Series::new("name", [Some(1i64), None]));
assert_eq!(result, Series::new("name".into(), [Some(1i64), None]));
Ok(())
}
@ -1730,9 +1741,12 @@ mod tests {
column_type: Some(DataType::Boolean),
};
let result = typed_column_to_series("foo", column)?;
let result = typed_column_to_series("foo".into(), column)?;
assert_eq!(result, Series::new("name", [Some(true), Some(false), None]));
assert_eq!(
result,
Series::new("name".into(), [Some(true), Some(false), None])
);
Ok(())
}
@ -1746,11 +1760,11 @@ mod tests {
column_type: Some(DataType::String),
};
let result = typed_column_to_series("foo", column)?;
let result = typed_column_to_series("foo".into(), column)?;
assert_eq!(
result,
Series::new("name", [Some("barbaz".to_string()), None])
Series::new("name".into(), [Some("barbaz".to_string()), None])
);
Ok(())
}

View File

@ -181,7 +181,7 @@ impl NuDataFrame {
)?,
_ => {
let key = "0".to_string();
conversion::insert_value(value, key, &mut column_values, &maybe_schema)?
conversion::insert_value(value, key.into(), &mut column_values, &maybe_schema)?
}
}
}
@ -209,7 +209,7 @@ impl NuDataFrame {
let mut column_values: ColumnMap = IndexMap::new();
for column in columns {
let name = column.name().to_string();
let name = column.name().clone();
for value in column {
conversion::insert_value(value, name.clone(), &mut column_values, &maybe_schema)?;
}
@ -493,10 +493,9 @@ fn add_missing_columns(
})
.collect();
// todo - fix
let missing_exprs: Vec<Expr> = missing
.iter()
.map(|(name, dtype)| lit(Null {}).cast((*dtype).to_owned()).alias(name))
.map(|(name, dtype)| lit(Null {}).cast((*dtype).to_owned()).alias(*name))
.collect();
let df = if !missing.is_empty() {

View File

@ -126,7 +126,7 @@ impl NuDataFrame {
.iter()
.chain(other.df.get_columns())
.map(|s| {
let name = if columns.contains(&s.name()) {
let name = if columns.contains(&s.name().as_str()) {
format!("{}_{}", s.name(), "x")
} else {
columns.push(s.name());
@ -134,7 +134,7 @@ impl NuDataFrame {
};
let mut series = s.clone();
series.rename(&name);
series.rename(name.into());
series
})
.collect::<Vec<Series>>();

View File

@ -146,7 +146,7 @@ pub fn expr_to_value(expr: &Expr, span: Span) -> Result<Value, ShellError> {
Expr::Alias(expr, alias) => Ok(Value::record(
record! {
"expr" => expr_to_value(expr.as_ref(), span)?,
"alias" => Value::string(alias.as_ref(), span),
"alias" => Value::string(alias.as_str(), span),
},
span,
)),
@ -271,7 +271,7 @@ pub fn expr_to_value(expr: &Expr, span: Span) -> Result<Value, ShellError> {
)),
Expr::Cast {
expr,
data_type,
dtype: data_type,
options,
} => {
let cast_option_str = match options {

View File

@ -83,9 +83,8 @@ impl NuLazyFrame {
}
pub fn schema(&mut self) -> Result<NuSchema, ShellError> {
let internal_schema =
Arc::make_mut(&mut self.lazy)
.schema()
let internal_schema = Arc::make_mut(&mut self.lazy)
.collect_schema()
.map_err(|e| ShellError::GenericError {
error: "Error getting schema from lazy frame".into(),
msg: e.to_string(),

View File

@ -3,7 +3,7 @@ use std::sync::Arc;
use nu_protocol::{ShellError, Span, Value};
use polars::{
datatypes::UnknownKind,
prelude::{DataType, Field, Schema, SchemaRef, TimeUnit},
prelude::{DataType, Field, PlSmallStr, Schema, SchemaExt, SchemaRef, TimeUnit},
};
#[derive(Debug, Clone)]
@ -49,7 +49,7 @@ fn fields_to_value(fields: impl Iterator<Item = Field>, span: Span) -> Value {
let record = fields
.map(|field| {
let col = field.name().to_string();
let val = dtype_to_value(field.data_type(), span);
let val = dtype_to_value(field.dtype(), span);
(col, val)
})
.collect();
@ -78,11 +78,11 @@ fn value_to_fields(value: &Value, span: Span) -> Result<Vec<Field>, ShellError>
Value::Record { .. } => {
let fields = value_to_fields(val, span)?;
let dtype = DataType::Struct(fields);
Ok(Field::new(col, dtype))
Ok(Field::new(col.into(), dtype))
}
_ => {
let dtype = str_to_dtype(&val.coerce_string()?, span)?;
Ok(Field::new(col, dtype))
Ok(Field::new(col.into(), dtype))
}
})
.collect::<Result<Vec<Field>, ShellError>>()?;
@ -150,7 +150,10 @@ pub fn str_to_dtype(dtype: &str, span: Span) -> Result<DataType, ShellError> {
} else {
Some(next.to_string())
};
Ok(DataType::Datetime(time_unit, timezone))
Ok(DataType::Datetime(
time_unit,
timezone.map(PlSmallStr::from),
))
}
_ if dtype.starts_with("duration") => {
let inner = dtype.trim_start_matches("duration<").trim_end_matches('>');
@ -215,13 +218,13 @@ mod test {
let schema = value_to_schema(&value, Span::unknown()).unwrap();
let expected = Schema::from_iter(vec![
Field::new("name", DataType::String),
Field::new("age", DataType::Int32),
Field::new("name".into(), DataType::String),
Field::new("age".into(), DataType::Int32),
Field::new(
"address",
"address".into(),
DataType::Struct(vec![
Field::new("street", DataType::String),
Field::new("city", DataType::String),
Field::new("street".into(), DataType::String),
Field::new("city".into(), DataType::String),
]),
),
]);

View File

@ -1,4 +1,5 @@
use nu_protocol::{ShellError, Span, Spanned, Value};
use polars::prelude::PlSmallStr;
// Default value used when selecting rows from dataframe
pub const DEFAULT_ROWS: usize = 5;
@ -88,3 +89,46 @@ pub(crate) fn convert_columns_string(
Ok((res, col_span))
}
// Converts a Vec<Value> to a Vec<String> with a Span marking the whole
// location of the columns for error referencing
// todo - fix
#[allow(dead_code)]
pub(crate) fn convert_columns_sm_str(
columns: Vec<Value>,
span: Span,
) -> Result<(Vec<PlSmallStr>, Span), ShellError> {
// First column span
let mut col_span = columns
.first()
.ok_or_else(|| ShellError::GenericError {
error: "Empty column list".into(),
msg: "Empty list found for command".into(),
span: Some(span),
help: None,
inner: vec![],
})
.map(|v| v.span())?;
let res = columns
.into_iter()
.map(|value| {
let span = value.span();
match value {
Value::String { val, .. } => {
col_span = col_span.merge(span);
Ok(val.into())
}
_ => Err(ShellError::GenericError {
error: "Incorrect column format".into(),
msg: "Only string as column name".into(),
span: Some(span),
help: None,
inner: vec![],
}),
}
})
.collect::<Result<Vec<PlSmallStr>, _>>()?;
Ok((res, col_span))
}