nushell/crates/nu-cli/src/commands/from_sqlite.rs

181 lines
6.0 KiB
Rust
Raw Normal View History

2019-08-27 23:45:18 +02:00
use crate::commands::WholeStreamCommand;
use crate::prelude::*;
use nu_errors::ShellError;
use nu_protocol::{Primitive, ReturnSuccess, Signature, TaggedDictBuilder, UntaggedValue, Value};
2019-08-27 23:45:18 +02:00
use rusqlite::{types::ValueRef, Connection, Row, NO_PARAMS};
use std::io::Write;
use std::path::Path;
pub struct FromSQLite;
impl WholeStreamCommand for FromSQLite {
fn name(&self) -> &str {
"from-sqlite"
}
fn signature(&self) -> Signature {
Signature::build("from-sqlite")
}
fn usage(&self) -> &str {
"Parse binary data as sqlite .db and create table."
}
2019-08-31 03:30:41 +02:00
fn run(
&self,
args: CommandArgs,
registry: &CommandRegistry,
) -> Result<OutputStream, ShellError> {
from_sqlite(args, registry)
}
}
2019-08-31 03:30:41 +02:00
pub struct FromDB;
impl WholeStreamCommand for FromDB {
2019-08-31 03:30:41 +02:00
fn name(&self) -> &str {
"from-db"
}
fn signature(&self) -> Signature {
Signature::build("from-db")
}
fn usage(&self) -> &str {
"Parse binary data as db and create table."
}
fn run(
&self,
args: CommandArgs,
registry: &CommandRegistry,
) -> Result<OutputStream, ShellError> {
from_sqlite(args, registry)
}
2019-08-31 03:30:41 +02:00
}
2019-08-27 23:45:18 +02:00
pub fn convert_sqlite_file_to_nu_value(
path: &Path,
tag: impl Into<Tag> + Clone,
) -> Result<Value, rusqlite::Error> {
2019-08-27 23:45:18 +02:00
let conn = Connection::open(path)?;
let mut meta_out = Vec::new();
let mut meta_stmt = conn.prepare("select name from sqlite_master where type='table'")?;
let mut meta_rows = meta_stmt.query(NO_PARAMS)?;
while let Some(meta_row) = meta_rows.next()? {
let table_name: String = meta_row.get(0)?;
let mut meta_dict = TaggedDictBuilder::new(tag.clone());
let mut out = Vec::new();
let mut table_stmt = conn.prepare(&format!("select * from [{}]", table_name))?;
let mut table_rows = table_stmt.query(NO_PARAMS)?;
while let Some(table_row) = table_rows.next()? {
out.push(convert_sqlite_row_to_nu_value(table_row, tag.clone())?)
}
meta_dict.insert_value(
2019-08-27 23:45:18 +02:00
"table_name".to_string(),
UntaggedValue::Primitive(Primitive::String(table_name)).into_value(tag.clone()),
2019-08-27 23:45:18 +02:00
);
meta_dict.insert_value(
"table_values",
UntaggedValue::Table(out).into_value(tag.clone()),
);
meta_out.push(meta_dict.into_value());
2019-08-27 23:45:18 +02:00
}
let tag = tag.into();
Ok(UntaggedValue::Table(meta_out).into_value(tag))
2019-08-27 23:45:18 +02:00
}
fn convert_sqlite_row_to_nu_value(
row: &Row,
tag: impl Into<Tag> + Clone,
) -> Result<Value, rusqlite::Error> {
2019-08-27 23:45:18 +02:00
let mut collected = TaggedDictBuilder::new(tag.clone());
for (i, c) in row.columns().iter().enumerate() {
collected.insert_value(
2019-08-27 23:45:18 +02:00
c.name().to_string(),
convert_sqlite_value_to_nu_value(row.get_raw(i), tag.clone()),
);
}
Ok(collected.into_value())
2019-08-27 23:45:18 +02:00
}
fn convert_sqlite_value_to_nu_value(value: ValueRef, tag: impl Into<Tag> + Clone) -> Value {
2019-08-27 23:45:18 +02:00
match value {
ValueRef::Null => {
UntaggedValue::Primitive(Primitive::String(String::from(""))).into_value(tag)
}
ValueRef::Integer(i) => UntaggedValue::int(i).into_value(tag),
ValueRef::Real(f) => UntaggedValue::decimal(f).into_value(tag),
ValueRef::Text(s) => {
2019-08-27 23:45:18 +02:00
// this unwrap is safe because we know the ValueRef is Text.
UntaggedValue::Primitive(Primitive::String(String::from_utf8_lossy(s).to_string()))
.into_value(tag)
2019-08-27 23:45:18 +02:00
}
ValueRef::Blob(u) => UntaggedValue::binary(u.to_owned()).into_value(tag),
2019-08-27 23:45:18 +02:00
}
}
pub fn from_sqlite_bytes_to_value(
mut bytes: Vec<u8>,
tag: impl Into<Tag> + Clone,
) -> Result<Value, std::io::Error> {
2019-08-27 23:45:18 +02:00
// FIXME: should probably write a sqlite virtual filesystem
// that will allow us to use bytes as a file to avoid this
// write out, but this will require C code. Might be
// best done as a PR to rusqlite.
let mut tempfile = tempfile::NamedTempFile::new()?;
tempfile.write_all(bytes.as_mut_slice())?;
match convert_sqlite_file_to_nu_value(tempfile.path(), tag) {
Ok(value) => Ok(value),
Err(e) => Err(std::io::Error::new(std::io::ErrorKind::Other, e)),
}
}
fn from_sqlite(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream, ShellError> {
let args = args.evaluate_once(registry)?;
let tag = args.name_tag();
2019-08-27 23:45:18 +02:00
let input = args.input;
let stream = async_stream! {
let values: Vec<Value> = input.values.collect().await;
2019-08-27 23:45:18 +02:00
for value in values {
let value_tag = &value.tag;
match value.value {
UntaggedValue::Primitive(Primitive::Binary(vb)) =>
match from_sqlite_bytes_to_value(vb, tag.clone()) {
2019-08-31 04:23:29 +02:00
Ok(x) => match x {
Value { value: UntaggedValue::Table(list), .. } => {
2019-08-31 04:23:29 +02:00
for l in list {
yield ReturnSuccess::value(l);
}
}
_ => yield ReturnSuccess::value(x),
}
Restructure and streamline token expansion (#1123) Restructure and streamline token expansion The purpose of this commit is to streamline the token expansion code, by removing aspects of the code that are no longer relevant, removing pointless duplication, and eliminating the need to pass the same arguments to `expand_syntax`. The first big-picture change in this commit is that instead of a handful of `expand_` functions, which take a TokensIterator and ExpandContext, a smaller number of methods on the `TokensIterator` do the same job. The second big-picture change in this commit is fully eliminating the coloring traits, making coloring a responsibility of the base expansion implementations. This also means that the coloring tracer is merged into the expansion tracer, so you can follow a single expansion and see how the expansion process produced colored tokens. One side effect of this change is that the expander itself is marginally more error-correcting. The error correction works by switching from structured expansion to `BackoffColoringMode` when an unexpected token is found, which guarantees that all spans of the source are colored, but may not be the most optimal error recovery strategy. That said, because `BackoffColoringMode` only extends as far as a closing delimiter (`)`, `]`, `}`) or pipe (`|`), it does result in fairly granular correction strategy. The current code still produces an `Err` (plus a complete list of colored shapes) from the parsing process if any errors are encountered, but this could easily be addressed now that the underlying expansion is error-correcting. This commit also colors any spans that are syntax errors in red, and causes the parser to include some additional information about what tokens were expected at any given point where an error was encountered, so that completions and hinting could be more robust in the future. Co-authored-by: Jonathan Turner <jonathandturner@users.noreply.github.com> Co-authored-by: Andrés N. Robalino <andres@androbtech.com>
2020-01-21 23:45:03 +01:00
Err(err) => {
println!("{:?}", err);
2019-08-27 23:45:18 +02:00
yield Err(ShellError::labeled_error_with_secondary(
"Could not parse as SQLite",
"input cannot be parsed as SQLite",
&tag,
2019-08-27 23:45:18 +02:00
"value originates from here",
value_tag,
2019-08-27 23:45:18 +02:00
))
}
}
_ => yield Err(ShellError::labeled_error_with_secondary(
"Expected binary data from pipeline",
"requires binary data input",
&tag,
2019-08-27 23:45:18 +02:00
"value originates from here",
value_tag,
2019-08-27 23:45:18 +02:00
)),
}
}
};
Ok(stream.to_output_stream())
}