Add string stream and binary stream, add text decoding (#570)

* WIP

* Add binary/string streams and text decoding

* Make string collection fallible

* Oops, forgot pretty hex

* Oops, forgot pretty hex

* clippy
This commit is contained in:
JT
2021-12-24 18:22:11 +11:00
committed by GitHub
parent 7f0921a14b
commit 3522bead97
50 changed files with 1633 additions and 119 deletions

View File

@ -11,6 +11,7 @@ build = "build.rs"
nu-engine = { path = "../nu-engine" }
nu-json = { path = "../nu-json" }
nu-path = { path = "../nu-path" }
nu-pretty-hex = { path = "../nu-pretty-hex" }
nu-protocol = { path = "../nu-protocol" }
nu-table = { path = "../nu-table" }
nu-term-grid = { path = "../nu-term-grid" }
@ -55,7 +56,6 @@ trash = { version = "2.0.2", optional = true }
unicode-segmentation = "1.8.0"
uuid = { version = "0.8.2", features = ["v4"] }
htmlescape = "0.3.1"
pretty-hex = "0.2.1"
zip = { version="0.5.9", optional=true }
lazy_static = "1.4.0"
strip-ansi-escapes = "0.1.1"
@ -66,6 +66,7 @@ digest = "0.10.0"
md5 = { package = "md-5", version = "0.10.0" }
sha2 = "0.10.0"
base64 = "0.13.0"
encoding_rs = "0.8.30"
num = { version = "0.4.0", optional = true }
[target.'cfg(unix)'.dependencies]

View File

@ -34,7 +34,7 @@ impl Command for Echo {
let n = to_be_echoed.len();
match n.cmp(&1usize) {
// More than one value is converted in a stream of values
std::cmp::Ordering::Greater => PipelineData::Stream(
std::cmp::Ordering::Greater => PipelineData::ListStream(
ValueStream::from_stream(to_be_echoed.into_iter(), engine_state.ctrlc.clone()),
None,
),

View File

@ -111,6 +111,7 @@ pub fn create_default_context() -> EngineState {
bind_command! {
BuildString,
Char,
Decode,
Format,
Parse,
Size,

View File

@ -72,7 +72,7 @@ fn getcol(
.map(move |x| Value::String { val: x, span })
.into_pipeline_data(engine_state.ctrlc.clone()))
}
PipelineData::Stream(stream, ..) => {
PipelineData::ListStream(stream, ..) => {
let v: Vec<_> = stream.into_iter().collect();
let input_cols = get_input_cols(v);
@ -81,7 +81,7 @@ fn getcol(
.map(move |x| Value::String { val: x, span })
.into_pipeline_data(engine_state.ctrlc.clone()))
}
PipelineData::Value(_v, ..) => {
PipelineData::Value(..) | PipelineData::StringStream(..) | PipelineData::ByteStream(..) => {
let cols = vec![];
let vals = vec![];
Ok(Value::Record { cols, vals, span }.into_pipeline_data())

View File

@ -86,7 +86,7 @@ fn dropcol(
.into_iter()
.into_pipeline_data(engine_state.ctrlc.clone()))
}
PipelineData::Stream(stream, ..) => {
PipelineData::ListStream(stream, ..) => {
let mut output = vec![];
let v: Vec<_> = stream.into_iter().collect();
@ -123,6 +123,7 @@ fn dropcol(
Ok(Value::Record { cols, vals, span }.into_pipeline_data())
}
x => Ok(x),
}
}

View File

@ -76,7 +76,7 @@ impl Command for Each {
match input {
PipelineData::Value(Value::Range { .. }, ..)
| PipelineData::Value(Value::List { .. }, ..)
| PipelineData::Stream { .. } => Ok(input
| PipelineData::ListStream { .. } => Ok(input
.into_iter()
.enumerate()
.map(move |(idx, x)| {
@ -109,6 +109,79 @@ impl Command for Each {
}
})
.into_pipeline_data(ctrlc)),
PipelineData::ByteStream(stream, ..) => Ok(stream
.into_iter()
.enumerate()
.map(move |(idx, x)| {
let x = Value::Binary { val: x, span };
if let Some(var) = block.signature.get_positional(0) {
if let Some(var_id) = &var.var_id {
if numbered {
stack.add_var(
*var_id,
Value::Record {
cols: vec!["index".into(), "item".into()],
vals: vec![
Value::Int {
val: idx as i64,
span,
},
x,
],
span,
},
);
} else {
stack.add_var(*var_id, x);
}
}
}
match eval_block(&engine_state, &mut stack, &block, PipelineData::new(span)) {
Ok(v) => v.into_value(span),
Err(error) => Value::Error { error },
}
})
.into_pipeline_data(ctrlc)),
PipelineData::StringStream(stream, ..) => Ok(stream
.into_iter()
.enumerate()
.map(move |(idx, x)| {
let x = match x {
Ok(x) => Value::String { val: x, span },
Err(err) => return Value::Error { error: err },
};
if let Some(var) = block.signature.get_positional(0) {
if let Some(var_id) = &var.var_id {
if numbered {
stack.add_var(
*var_id,
Value::Record {
cols: vec!["index".into(), "item".into()],
vals: vec![
Value::Int {
val: idx as i64,
span,
},
x,
],
span,
},
);
} else {
stack.add_var(*var_id, x);
}
}
}
match eval_block(&engine_state, &mut stack, &block, PipelineData::new(span)) {
Ok(v) => v.into_value(span),
Err(error) => Value::Error { error },
}
})
.into_pipeline_data(ctrlc)),
PipelineData::Value(Value::Record { cols, vals, .. }, ..) => {
let mut output_cols = vec![];
let mut output_vals = vec![];

View File

@ -27,10 +27,11 @@ impl Command for Lines {
fn run(
&self,
engine_state: &EngineState,
_stack: &mut Stack,
stack: &mut Stack,
call: &Call,
input: PipelineData,
) -> Result<nu_protocol::PipelineData, nu_protocol::ShellError> {
let head = call.head;
let skip_empty = call.has_flag("skip-emtpy");
match input {
#[allow(clippy::needless_collect)]
@ -53,7 +54,7 @@ impl Command for Lines {
Ok(iter.into_pipeline_data(engine_state.ctrlc.clone()))
}
PipelineData::Stream(stream, ..) => {
PipelineData::ListStream(stream, ..) => {
let iter = stream
.into_iter()
.filter_map(move |value| {
@ -81,10 +82,55 @@ impl Command for Lines {
Ok(iter.into_pipeline_data(engine_state.ctrlc.clone()))
}
PipelineData::StringStream(stream, span, ..) => {
let iter = stream
.into_iter()
.map(move |value| match value {
Ok(value) => value
.split(SPLIT_CHAR)
.filter_map(|s| {
if !s.is_empty() {
Some(Value::String {
val: s.into(),
span,
})
} else {
None
}
})
.collect::<Vec<Value>>(),
Err(err) => vec![Value::Error { error: err }],
})
.flatten();
Ok(iter.into_pipeline_data(engine_state.ctrlc.clone()))
}
PipelineData::Value(val, ..) => Err(ShellError::UnsupportedInput(
format!("Not supported input: {}", val.as_string()?),
call.head,
)),
PipelineData::ByteStream(..) => {
let config = stack.get_config()?;
//FIXME: Make sure this can fail in the future to let the user
//know to use a different encoding
let s = input.collect_string("", &config)?;
let lines = s
.split(SPLIT_CHAR)
.map(|s| s.to_string())
.collect::<Vec<String>>();
let iter = lines.into_iter().filter_map(move |s| {
if skip_empty && s.is_empty() {
None
} else {
Some(Value::string(s, head))
}
});
Ok(iter.into_pipeline_data(engine_state.ctrlc.clone()))
}
}
}
}

View File

@ -139,7 +139,7 @@ impl Command for ParEach {
.into_iter()
.flatten()
.into_pipeline_data(ctrlc)),
PipelineData::Stream(stream, ..) => Ok(stream
PipelineData::ListStream(stream, ..) => Ok(stream
.enumerate()
.par_bridge()
.map(move |(idx, x)| {
@ -179,6 +179,91 @@ impl Command for ParEach {
.into_iter()
.flatten()
.into_pipeline_data(ctrlc)),
PipelineData::StringStream(stream, ..) => Ok(stream
.enumerate()
.par_bridge()
.map(move |(idx, x)| {
let x = match x {
Ok(x) => Value::String { val: x, span },
Err(err) => return Value::Error { error: err }.into_pipeline_data(),
};
let block = engine_state.get_block(block_id);
let mut stack = stack.clone();
if let Some(var) = block.signature.get_positional(0) {
if let Some(var_id) = &var.var_id {
if numbered {
stack.add_var(
*var_id,
Value::Record {
cols: vec!["index".into(), "item".into()],
vals: vec![
Value::Int {
val: idx as i64,
span,
},
x,
],
span,
},
);
} else {
stack.add_var(*var_id, x);
}
}
}
match eval_block(&engine_state, &mut stack, block, PipelineData::new(span)) {
Ok(v) => v,
Err(error) => Value::Error { error }.into_pipeline_data(),
}
})
.collect::<Vec<_>>()
.into_iter()
.flatten()
.into_pipeline_data(ctrlc)),
PipelineData::ByteStream(stream, ..) => Ok(stream
.enumerate()
.par_bridge()
.map(move |(idx, x)| {
let x = Value::Binary { val: x, span };
let block = engine_state.get_block(block_id);
let mut stack = stack.clone();
if let Some(var) = block.signature.get_positional(0) {
if let Some(var_id) = &var.var_id {
if numbered {
stack.add_var(
*var_id,
Value::Record {
cols: vec!["index".into(), "item".into()],
vals: vec![
Value::Int {
val: idx as i64,
span,
},
x,
],
span,
},
);
} else {
stack.add_var(*var_id, x);
}
}
}
match eval_block(&engine_state, &mut stack, block, PipelineData::new(span)) {
Ok(v) => v,
Err(error) => Value::Error { error }.into_pipeline_data(),
}
})
.collect::<Vec<_>>()
.into_iter()
.flatten()
.into_pipeline_data(ctrlc)),
PipelineData::Value(Value::Record { cols, vals, .. }, ..) => {
let mut output_cols = vec![];
let mut output_vals = vec![];

View File

@ -82,7 +82,7 @@ fn reject(
.into_iter()
.into_pipeline_data(engine_state.ctrlc.clone()))
}
PipelineData::Stream(stream, ..) => {
PipelineData::ListStream(stream, ..) => {
let mut output = vec![];
let v: Vec<_> = stream.into_iter().collect();
@ -119,6 +119,7 @@ fn reject(
Ok(Value::Record { cols, vals, span }.into_pipeline_data())
}
x => Ok(x),
}
}

View File

@ -95,7 +95,7 @@ fn select(
.into_iter()
.into_pipeline_data(engine_state.ctrlc.clone()))
}
PipelineData::Stream(stream, ..) => Ok(stream
PipelineData::ListStream(stream, ..) => Ok(stream
.map(move |x| {
let mut cols = vec![];
let mut vals = vec![];
@ -130,6 +130,7 @@ fn select(
Ok(Value::Record { cols, vals, span }.into_pipeline_data())
}
_ => Ok(PipelineData::new(span)),
}
}

View File

@ -43,13 +43,23 @@ impl Command for Wrap {
span,
})
.into_pipeline_data(engine_state.ctrlc.clone())),
PipelineData::Stream(stream, ..) => Ok(stream
PipelineData::ListStream(stream, ..) => Ok(stream
.map(move |x| Value::Record {
cols: vec![name.clone()],
vals: vec![x],
span,
})
.into_pipeline_data(engine_state.ctrlc.clone())),
PipelineData::StringStream(stream, ..) => Ok(Value::String {
val: stream.into_string("")?,
span,
}
.into_pipeline_data()),
PipelineData::ByteStream(stream, ..) => Ok(Value::Binary {
val: stream.into_vec(),
span,
}
.into_pipeline_data()),
PipelineData::Value(input, ..) => Ok(Value::Record {
cols: vec![name],
vals: vec![input],

View File

@ -52,7 +52,7 @@ pub fn from_delimited_data(
name: Span,
config: &Config,
) -> Result<PipelineData, ShellError> {
let concat_string = input.collect_string("", config);
let concat_string = input.collect_string("", config)?;
Ok(
from_delimited_string_to_value(concat_string, noheaders, sep, name)

View File

@ -183,7 +183,7 @@ fn from_eml(
head: Span,
config: &Config,
) -> Result<PipelineData, ShellError> {
let value = input.collect_string("", config);
let value = input.collect_string("", config)?;
let body_preview = preview_body
.map(|b| b.item as usize)

View File

@ -93,7 +93,7 @@ END:VCALENDAR' | from ics",
}
fn from_ics(input: PipelineData, head: Span, config: &Config) -> Result<PipelineData, ShellError> {
let input_string = input.collect_string("", config);
let input_string = input.collect_string("", config)?;
let input_bytes = input_string.as_bytes();
let buf_reader = BufReader::new(input_bytes);
let parser = ical::IcalParser::new(buf_reader);

View File

@ -88,7 +88,7 @@ pub fn from_ini_string_to_value(s: String, span: Span) -> Result<Value, ShellErr
}
fn from_ini(input: PipelineData, head: Span, config: &Config) -> Result<PipelineData, ShellError> {
let concat_string = input.collect_string("", config);
let concat_string = input.collect_string("", config)?;
match from_ini_string_to_value(concat_string, head) {
Ok(x) => Ok(x.into_pipeline_data()),

View File

@ -76,7 +76,7 @@ impl Command for FromJson {
) -> Result<nu_protocol::PipelineData, ShellError> {
let span = call.head;
let config = stack.get_config().unwrap_or_default();
let mut string_input = input.collect_string("", &config);
let mut string_input = input.collect_string("", &config)?;
string_input.push('\n');
// TODO: turn this into a structured underline of the nu_json error

View File

@ -275,7 +275,7 @@ fn from_ssv(
let minimum_spaces: Option<Spanned<usize>> =
call.get_flag(engine_state, stack, "minimum-spaces")?;
let concat_string = input.collect_string("", &config);
let concat_string = input.collect_string("", &config)?;
let split_at = match minimum_spaces {
Some(number) => number.item,
None => DEFAULT_MINIMUM_SPACES,

View File

@ -74,7 +74,7 @@ b = [1, 2]' | from toml",
) -> Result<nu_protocol::PipelineData, ShellError> {
let span = call.head;
let config = stack.get_config().unwrap_or_default();
let mut string_input = input.collect_string("", &config);
let mut string_input = input.collect_string("", &config)?;
string_input.push('\n');
Ok(convert_string_to_value(string_input, span)?.into_pipeline_data())
}

View File

@ -54,7 +54,7 @@ impl Command for FromUrl {
}
fn from_url(input: PipelineData, head: Span, config: &Config) -> Result<PipelineData, ShellError> {
let concat_string = input.collect_string("", config);
let concat_string = input.collect_string("", config)?;
let result = serde_urlencoded::from_str::<Vec<(String, String)>>(&concat_string);

View File

@ -124,7 +124,7 @@ END:VCARD' | from vcf",
}
fn from_vcf(input: PipelineData, head: Span, config: &Config) -> Result<PipelineData, ShellError> {
let input_string = input.collect_string("", config);
let input_string = input.collect_string("", config)?;
let input_bytes = input_string.as_bytes();
let cursor = std::io::Cursor::new(input_bytes);
let parser = ical::VcardParser::new(cursor);

View File

@ -179,7 +179,7 @@ pub fn from_xml_string_to_value(s: String, span: Span) -> Result<Value, roxmltre
}
fn from_xml(input: PipelineData, head: Span, config: &Config) -> Result<PipelineData, ShellError> {
let concat_string = input.collect_string("", config);
let concat_string = input.collect_string("", config)?;
match from_xml_string_to_value(concat_string, head) {
Ok(x) => Ok(x.into_pipeline_data()),

View File

@ -206,7 +206,7 @@ pub fn from_yaml_string_to_value(s: String, span: Span) -> Result<Value, ShellEr
}
fn from_yaml(input: PipelineData, head: Span, config: &Config) -> Result<PipelineData, ShellError> {
let concat_string = input.collect_string("", config);
let concat_string = input.collect_string("", config)?;
match from_yaml_string_to_value(concat_string, head) {
Ok(x) => Ok(x.into_pipeline_data()),

View File

@ -444,7 +444,7 @@ fn html_value(value: Value, config: &Config) -> String {
let mut output_string = String::new();
match value {
Value::Binary { val, .. } => {
let output = pretty_hex::pretty_hex(&val);
let output = nu_pretty_hex::pretty_hex(&val);
output_string.push_str("<pre>");
output_string.push_str(&output);
output_string.push_str("</pre>");

View File

@ -62,7 +62,7 @@ pub fn calculate(
mf: impl Fn(&[Value], &Span) -> Result<Value, ShellError>,
) -> Result<Value, ShellError> {
match values {
PipelineData::Stream(s, ..) => helper_for_tables(&s.collect::<Vec<Value>>(), name, mf),
PipelineData::ListStream(s, ..) => helper_for_tables(&s.collect::<Vec<Value>>(), name, mf),
PipelineData::Value(Value::List { ref vals, .. }, ..) => match &vals[..] {
[Value::Record { .. }, _end @ ..] => helper_for_tables(vals, name, mf),
_ => mf(vals, &name),
@ -88,5 +88,9 @@ pub fn calculate(
mf(&new_vals?, &name)
}
PipelineData::Value(val, ..) => mf(&[val], &name),
_ => Err(ShellError::UnsupportedInput(
"Input data is not supported by this command.".to_string(),
name,
)),
}
}

View File

@ -71,13 +71,17 @@ the output of 'path parse' and 'path split' subcommands."#
PipelineData::Value(val, md) => {
Ok(PipelineData::Value(handle_value(val, &args, head), md))
}
PipelineData::Stream(stream, md) => Ok(PipelineData::Stream(
PipelineData::ListStream(stream, md) => Ok(PipelineData::ListStream(
ValueStream::from_stream(
stream.map(move |val| handle_value(val, &args, head)),
engine_state.ctrlc.clone(),
),
md,
)),
_ => Err(ShellError::UnsupportedInput(
"Input data is not supported by this command.".to_string(),
head,
)),
}
}

View File

@ -79,7 +79,7 @@ fn dice(
}
});
Ok(PipelineData::Stream(
Ok(PipelineData::ListStream(
ValueStream::from_stream(iter, engine_state.ctrlc.clone()),
None,
))

View File

@ -0,0 +1,107 @@
use encoding_rs::Encoding;
use nu_engine::CallExt;
use nu_protocol::ast::Call;
use nu_protocol::engine::{Command, EngineState, Stack};
use nu_protocol::{
Category, Example, IntoPipelineData, PipelineData, ShellError, Signature, Spanned, SyntaxShape,
Value,
};
#[derive(Clone)]
pub struct Decode;
impl Command for Decode {
fn name(&self) -> &str {
"decode"
}
fn usage(&self) -> &str {
"Decode bytes as a string."
}
fn signature(&self) -> nu_protocol::Signature {
Signature::build("decode")
.required("encoding", SyntaxShape::String, "the text encoding to use")
.category(Category::Strings)
}
fn examples(&self) -> Vec<Example> {
vec![Example {
description: "Decode the output of an external command",
example: "cat myfile.q | decode utf-8",
result: None,
}]
}
fn run(
&self,
engine_state: &EngineState,
stack: &mut Stack,
call: &Call,
input: PipelineData,
) -> Result<PipelineData, ShellError> {
let head = call.head;
let encoding: Spanned<String> = call.req(engine_state, stack, 0)?;
match input {
PipelineData::ByteStream(stream, ..) => {
let bytes: Vec<u8> = stream.flatten().collect();
let encoding = match Encoding::for_label(encoding.item.as_bytes()) {
None => Err(ShellError::SpannedLabeledError(
format!(
r#"{} is not a valid encoding, refer to https://docs.rs/encoding_rs/0.8.23/encoding_rs/#statics for a valid list of encodings"#,
encoding.item
),
"invalid encoding".into(),
encoding.span,
)),
Some(encoding) => Ok(encoding),
}?;
let result = encoding.decode(&bytes);
Ok(Value::String {
val: result.0.to_string(),
span: head,
}
.into_pipeline_data())
}
PipelineData::Value(Value::Binary { val: bytes, .. }, ..) => {
let encoding = match Encoding::for_label(encoding.item.as_bytes()) {
None => Err(ShellError::SpannedLabeledError(
format!(
r#"{} is not a valid encoding, refer to https://docs.rs/encoding_rs/0.8.23/encoding_rs/#statics for a valid list of encodings"#,
encoding.item
),
"invalid encoding".into(),
encoding.span,
)),
Some(encoding) => Ok(encoding),
}?;
let result = encoding.decode(&bytes);
Ok(Value::String {
val: result.0.to_string(),
span: head,
}
.into_pipeline_data())
}
_ => Err(ShellError::UnsupportedInput(
"non-binary input".into(),
head,
)),
}
}
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn test_examples() {
crate::test_examples(Decode)
}
}

View File

@ -151,7 +151,7 @@ fn format(
}
}
Ok(PipelineData::Stream(
Ok(PipelineData::ListStream(
ValueStream::from_stream(list.into_iter(), None),
None,
))

View File

@ -1,5 +1,6 @@
mod build_string;
mod char_;
mod decode;
mod format;
mod parse;
mod size;
@ -8,6 +9,7 @@ mod str_;
pub use build_string::BuildString;
pub use char_::Char;
pub use decode::*;
pub use format::*;
pub use parse::*;
pub use size::Size;

View File

@ -126,7 +126,7 @@ fn operate(
}
}
Ok(PipelineData::Stream(
Ok(PipelineData::ListStream(
ValueStream::from_stream(parsed.into_iter(), ctrlc),
None,
))

View File

@ -1,4 +1,3 @@
use std::borrow::Cow;
use std::collections::HashMap;
use std::env;
use std::io::{BufRead, BufReader, Write};
@ -10,14 +9,14 @@ use std::sync::mpsc;
use nu_engine::env_to_strings;
use nu_protocol::engine::{EngineState, Stack};
use nu_protocol::{ast::Call, engine::Command, ShellError, Signature, SyntaxShape, Value};
use nu_protocol::{Category, Config, IntoInterruptiblePipelineData, PipelineData, Span, Spanned};
use nu_protocol::{ByteStream, Category, Config, PipelineData, Spanned};
use itertools::Itertools;
use nu_engine::CallExt;
use regex::Regex;
const OUTPUT_BUFFER_SIZE: usize = 8192;
const OUTPUT_BUFFER_SIZE: usize = 1024;
#[derive(Clone)]
pub struct External;
@ -137,6 +136,7 @@ impl<'call> ExternalCommand<'call> {
config: Config,
) -> Result<PipelineData, ShellError> {
let mut process = self.create_command();
let head = self.name.span;
let ctrlc = engine_state.ctrlc.clone();
@ -223,11 +223,7 @@ impl<'call> ExternalCommand<'call> {
// from bytes to String. If no replacements are required, then the
// borrowed value is a proper UTF-8 string. The Owned option represents
// a string where the values had to be replaced, thus marking it as bytes
let data = match String::from_utf8_lossy(bytes) {
Cow::Borrowed(s) => Data::String(s.into()),
Cow::Owned(_) => Data::Bytes(bytes.to_vec()),
};
let bytes = bytes.to_vec();
let length = bytes.len();
buf_read.consume(length);
@ -237,7 +233,7 @@ impl<'call> ExternalCommand<'call> {
}
}
match tx.send(data) {
match tx.send(bytes) {
Ok(_) => continue,
Err(_) => break,
}
@ -249,11 +245,16 @@ impl<'call> ExternalCommand<'call> {
Ok(_) => Ok(()),
}
});
// The ValueStream is consumed by the next expression in the pipeline
let value =
ChannelReceiver::new(rx, self.name.span).into_pipeline_data(output_ctrlc);
let receiver = ChannelReceiver::new(rx);
Ok(value)
Ok(PipelineData::ByteStream(
ByteStream {
stream: Box::new(receiver),
ctrlc: output_ctrlc,
},
head,
None,
))
}
}
}
@ -345,42 +346,24 @@ fn trim_enclosing_quotes(input: &str) -> String {
}
}
// The piped data from stdout from the external command can be either String
// or binary. We use this enum to pass the data from the spawned process
#[derive(Debug)]
enum Data {
String(String),
Bytes(Vec<u8>),
}
// Receiver used for the ValueStream
// It implements iterator so it can be used as a ValueStream
struct ChannelReceiver {
rx: mpsc::Receiver<Data>,
span: Span,
rx: mpsc::Receiver<Vec<u8>>,
}
impl ChannelReceiver {
pub fn new(rx: mpsc::Receiver<Data>, span: Span) -> Self {
Self { rx, span }
pub fn new(rx: mpsc::Receiver<Vec<u8>>) -> Self {
Self { rx }
}
}
impl Iterator for ChannelReceiver {
type Item = Value;
type Item = Vec<u8>;
fn next(&mut self) -> Option<Self::Item> {
match self.rx.recv() {
Ok(v) => match v {
Data::String(s) => Some(Value::String {
val: s,
span: self.span,
}),
Data::Bytes(b) => Some(Value::Binary {
val: b,
span: self.span,
}),
},
Ok(v) => Some(v),
Err(_) => None,
}
}

View File

@ -86,7 +86,7 @@ prints out the list properly."#
Ok(PipelineData::new(call.head))
}
}
PipelineData::Stream(stream, ..) => {
PipelineData::ListStream(stream, ..) => {
// dbg!("value::stream");
let data = convert_to_list(stream, &config, call.head);
if let Some(items) = data {

View File

@ -4,8 +4,8 @@ use nu_engine::{env_to_string, CallExt};
use nu_protocol::ast::{Call, PathMember};
use nu_protocol::engine::{Command, EngineState, Stack};
use nu_protocol::{
Category, Config, DataSource, IntoInterruptiblePipelineData, IntoPipelineData, PipelineData,
PipelineMetadata, ShellError, Signature, Span, SyntaxShape, Value, ValueStream,
Category, Config, DataSource, IntoPipelineData, PipelineData, PipelineMetadata, ShellError,
Signature, Span, StringStream, SyntaxShape, Value, ValueStream,
};
use nu_table::{StyledString, TextStyle, Theme};
use std::sync::atomic::{AtomicBool, Ordering};
@ -47,6 +47,7 @@ impl Command for Table {
call: &Call,
input: PipelineData,
) -> Result<nu_protocol::PipelineData, nu_protocol::ShellError> {
let head = call.head;
let ctrlc = engine_state.ctrlc.clone();
let config = stack.get_config().unwrap_or_default();
let color_hm = get_color_config(&config);
@ -60,6 +61,20 @@ impl Command for Table {
};
match input {
PipelineData::ByteStream(stream, ..) => Ok(PipelineData::StringStream(
StringStream::from_stream(
stream.map(move |x| {
Ok(if x.iter().all(|x| x.is_ascii()) {
format!("{}", String::from_utf8_lossy(&x))
} else {
format!("{}\n", nu_pretty_hex::pretty_hex(&x))
})
}),
ctrlc,
),
head,
None,
)),
PipelineData::Value(Value::List { vals, .. }, ..) => {
let table = convert_to_table(row_offset, &vals, ctrlc, &config, call.head)?;
@ -75,7 +90,7 @@ impl Command for Table {
Ok(PipelineData::new(call.head))
}
}
PipelineData::Stream(stream, metadata) => {
PipelineData::ListStream(stream, metadata) => {
let stream = match metadata {
Some(PipelineMetadata {
data_source: DataSource::Ls,
@ -161,14 +176,20 @@ impl Command for Table {
let head = call.head;
Ok(PagingTableCreator {
row_offset,
config,
ctrlc: ctrlc.clone(),
Ok(PipelineData::StringStream(
StringStream::from_stream(
PagingTableCreator {
row_offset,
config,
ctrlc: ctrlc.clone(),
head,
stream,
},
ctrlc,
),
head,
stream,
}
.into_pipeline_data(ctrlc))
None,
))
}
PipelineData::Value(Value::Record { cols, vals, .. }, ..) => {
let mut output = vec![];
@ -363,7 +384,7 @@ struct PagingTableCreator {
}
impl Iterator for PagingTableCreator {
type Item = Value;
type Item = Result<String, ShellError>;
fn next(&mut self) -> Option<Self::Item> {
let mut batch = vec![];
@ -418,12 +439,9 @@ impl Iterator for PagingTableCreator {
Ok(Some(table)) => {
let result = nu_table::draw_table(&table, term_width, &color_hm, &self.config);
Some(Value::String {
val: result,
span: self.head,
})
Some(Ok(result))
}
Err(err) => Some(Value::Error { error: err }),
Err(err) => Some(Err(err)),
_ => None,
}
}