Move fetch to extra and clean up some code (#664)

* Move fetch to extra

* Move byte stream code to a function instead of copying it twice

* Fix formatting issues

* Make fetch a default command

* Fix formatting
This commit is contained in:
Ștefan 2022-01-06 00:06:16 +01:00 committed by GitHub
parent d0c280f6cc
commit 47544ad219
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
5 changed files with 70 additions and 93 deletions

View File

@ -59,7 +59,6 @@ pretty_assertions = "1.0.0"
[features] [features]
plugin = ["nu-plugin", "nu-parser/plugin", "nu-command/plugin", "nu-protocol/plugin", "nu-engine/plugin"] plugin = ["nu-plugin", "nu-parser/plugin", "nu-command/plugin", "nu-protocol/plugin", "nu-engine/plugin"]
fetch-support = ["nu-command/fetch"]
default = [ default = [
"plugin", "plugin",
"inc", "inc",

View File

@ -68,7 +68,7 @@ sha2 = "0.10.0"
base64 = "0.13.0" base64 = "0.13.0"
encoding_rs = "0.8.30" encoding_rs = "0.8.30"
num = { version = "0.4.0", optional = true } num = { version = "0.4.0", optional = true }
reqwest = {version = "0.11", features = ["blocking"], optional = true } reqwest = {version = "0.11", features = ["blocking"] }
mime = "0.3.16" mime = "0.3.16"
[target.'cfg(unix)'.dependencies] [target.'cfg(unix)'.dependencies]
@ -88,7 +88,6 @@ features = [
trash-support = ["trash"] trash-support = ["trash"]
plugin = ["nu-parser/plugin"] plugin = ["nu-parser/plugin"]
dataframe = ["polars", "num"] dataframe = ["polars", "num"]
fetch = ["reqwest"]
[build-dependencies] [build-dependencies]
shadow-rs = "0.8.1" shadow-rs = "0.8.1"

View File

@ -271,6 +271,7 @@ pub fn create_default_context(cwd: impl AsRef<Path>) -> EngineState {
// Network // Network
bind_command! { bind_command! {
Fetch,
Url, Url,
UrlHost, UrlHost,
UrlPath, UrlPath,
@ -304,9 +305,6 @@ pub fn create_default_context(cwd: impl AsRef<Path>) -> EngineState {
#[cfg(feature = "plugin")] #[cfg(feature = "plugin")]
bind_command!(Register); bind_command!(Register);
#[cfg(feature = "fetch")]
bind_command!(Fetch);
// This is a WIP proof of concept // This is a WIP proof of concept
// bind_command!(ListGitBranches, Git, GitCheckout, Source); // bind_command!(ListGitBranches, Git, GitCheckout, Source);

View File

@ -7,6 +7,7 @@ use nu_protocol::ByteStream;
use nu_protocol::{ use nu_protocol::{
Category, Example, PipelineData, ShellError, Signature, Span, SyntaxShape, Value, Category, Example, PipelineData, ShellError, Signature, Span, SyntaxShape, Value,
}; };
use reqwest::blocking::Response;
use std::io::{BufRead, BufReader, Read}; use std::io::{BufRead, BufReader, Read};
@ -162,21 +163,10 @@ fn helper(
} }
match request.send() { match request.send() {
Ok(resp) => { Ok(resp) => match resp.headers().get("content-type") {
// let temp = std::fs::File::create("temp_dwl.txt")?;
// let mut b = BufWriter::new(temp);
// let _bytes = resp.copy_to(&mut b);
// let temp1 = std::fs::File::open("temp_dwl.txt")?;
// let a = BufReader::new(temp1);
// TODO I guess we should check if all bytes were written/read...
match resp.headers().get("content-type") {
Some(content_type) => { Some(content_type) => {
let content_type = content_type.to_str().map_err(|e| { let content_type = content_type.to_str().map_err(|e| {
ShellError::LabeledError( ShellError::LabeledError(e.to_string(), "MIME type were invalid".to_string())
e.to_string(),
"MIME type were invalid".to_string(),
)
})?; })?;
let content_type = mime::Mime::from_str(content_type).map_err(|_| { let content_type = mime::Mime::from_str(content_type).map_err(|_| {
ShellError::LabeledError( ShellError::LabeledError(
@ -206,18 +196,7 @@ fn helper(
_ => Some(content_type.subtype().to_string()), _ => Some(content_type.subtype().to_string()),
}; };
let buffered_input = BufReader::new(resp); let output = response_to_buffer(resp, engine_state, span);
let output = PipelineData::ByteStream(
ByteStream {
stream: Box::new(BufferedReader {
input: buffered_input,
}),
ctrlc: engine_state.ctrlc.clone(),
},
span,
None,
);
if raw { if raw {
return Ok(output); return Ok(output);
@ -237,23 +216,8 @@ fn helper(
Ok(output) Ok(output)
} }
} }
None => { None => Ok(response_to_buffer(resp, engine_state, span)),
let buffered_input = BufReader::new(resp);
let output = PipelineData::ByteStream(
ByteStream {
stream: Box::new(BufferedReader {
input: buffered_input,
}),
ctrlc: engine_state.ctrlc.clone(),
}, },
span,
None,
);
Ok(output)
}
}
}
Err(e) if e.is_timeout() => Err(ShellError::NetworkFailure( Err(e) if e.is_timeout() => Err(ShellError::NetworkFailure(
format!("Request to {} has timed out", requested_url), format!("Request to {} has timed out", requested_url),
span, span,
@ -327,6 +291,25 @@ impl<R: Read> Iterator for BufferedReader<R> {
} }
} }
fn response_to_buffer(
response: Response,
engine_state: &EngineState,
span: Span,
) -> nu_protocol::PipelineData {
let buffered_input = BufReader::new(response);
PipelineData::ByteStream(
ByteStream {
stream: Box::new(BufferedReader {
input: buffered_input,
}),
ctrlc: engine_state.ctrlc.clone(),
},
span,
None,
)
}
// Only panics if the user agent is invalid but we define it statically so either // Only panics if the user agent is invalid but we define it statically so either
// it always or never fails // it always or never fails
#[allow(clippy::unwrap_used)] #[allow(clippy::unwrap_used)]

View File

@ -1,7 +1,5 @@
#[cfg(feature = "fetch")]
mod fetch; mod fetch;
mod url; mod url;
pub use self::url::*; pub use self::url::*;
#[cfg(feature = "fetch")]
pub use fetch::SubCommand as Fetch; pub use fetch::SubCommand as Fetch;