Fix clippy lint and disable broken lint (#3865)

This commit is contained in:
JT 2021-07-30 08:11:47 +12:00 committed by GitHub
parent 9696e4d315
commit e602647d4d
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
28 changed files with 52 additions and 52 deletions

View File

@ -49,7 +49,7 @@ steps:
- bash: RUSTFLAGS="-D warnings" cargo test --all
condition: eq(variables['style'], 'unflagged')
displayName: Run tests
- bash: RUSTFLAGS="-D warnings" cargo clippy --all -- -D clippy::unwrap_used
- bash: RUSTFLAGS="-D warnings" cargo clippy --all -- -D clippy::unwrap_used -A clippy::needless_collect
condition: eq(variables['style'], 'unflagged')
displayName: Check clippy lints
- bash: cd samples/wasm && npm install wasm-pack && node ./node_modules/wasm-pack/run.js build

View File

@ -121,9 +121,9 @@ impl App {
use logger::{configure, debug_filters, logger, trace_filters};
logger(|builder| {
configure(&self, builder)?;
trace_filters(&self, builder)?;
debug_filters(&self, builder)?;
configure(self, builder)?;
trace_filters(self, builder)?;
debug_filters(self, builder)?;
Ok(())
})?;
@ -298,7 +298,7 @@ impl App {
}
pub fn parse(&self, args: &str) -> Result<(), ShellError> {
self.parser.parse(&args).map(|options| {
self.parser.parse(args).map(|options| {
self.options.swap(&options);
})
}

View File

@ -111,7 +111,7 @@ impl OptionsParser for NuParser {
.flatten();
if let Some(value) = value {
options.put(&k, value);
options.put(k, value);
}
});
}

View File

@ -63,7 +63,7 @@ pub fn letcmd(args: CommandArgs) -> Result<ActionStream, ShellError> {
.expect("Internal error: type checker should require args");
let var_name = positional[0].var_name()?;
let rhs_raw = evaluate_baseline_expr(&positional[2], &ctx)?;
let rhs_raw = evaluate_baseline_expr(&positional[2], ctx)?;
let tag: Tag = positional[2].span.into();
let rhs: CapturedBlock = FromValue::from_value(&rhs_raw)?;
@ -98,7 +98,7 @@ pub fn letcmd(args: CommandArgs) -> Result<ActionStream, ShellError> {
};
ctx.scope.enter_scope();
let value = evaluate_baseline_expr(expr, &ctx);
let value = evaluate_baseline_expr(expr, ctx);
ctx.scope.exit_scope();
let value = value?;

View File

@ -51,7 +51,7 @@ pub fn source(args: CommandArgs) -> Result<ActionStream, ShellError> {
let contents = std::fs::read_to_string(&expand_path(Cow::Borrowed(Path::new(&filename.item))));
match contents {
Ok(contents) => {
let result = script::run_script_standalone(contents, true, &ctx, false);
let result = script::run_script_standalone(contents, true, ctx, false);
if let Err(err) = result {
ctx.error(err);

View File

@ -175,7 +175,7 @@ fn command(mut args: CommandArgs) -> Result<OutputStream, ShellError> {
UntaggedValue::DataFrame(df) => {
let df = df.as_ref();
let res = perform_dataframe_aggregation(&df, op, &operation.tag)?;
let res = perform_dataframe_aggregation(df, op, &operation.tag)?;
Ok(OutputStream::one(NuDataFrame::dataframe_to_value(res, tag)))
}

View File

@ -73,7 +73,7 @@ fn command(mut args: CommandArgs) -> Result<OutputStream, ShellError> {
let casted = series.bool().map_err(|e| {
parse_polars_error(
&e,
&&series_span,
&series_span,
Some("Perhaps you want to use a series with booleans as mask"),
)
})?;
@ -82,7 +82,7 @@ fn command(mut args: CommandArgs) -> Result<OutputStream, ShellError> {
let res = df
.as_ref()
.filter(&casted)
.filter(casted)
.map_err(|e| parse_polars_error::<&str>(&e, &df_tag.span, None))?;
Ok(OutputStream::one(NuDataFrame::dataframe_to_value(res, tag)))

View File

@ -180,11 +180,11 @@ fn check_column_datatypes<T: AsRef<str>>(
for (l, r) in l_cols.iter().zip(r_cols.iter()) {
let l_series = df_l
.column(l.as_ref())
.map_err(|e| parse_polars_error::<&str>(&e, &l_col_span, None))?;
.map_err(|e| parse_polars_error::<&str>(&e, l_col_span, None))?;
let r_series = df_r
.column(r.as_ref())
.map_err(|e| parse_polars_error::<&str>(&e, &r_col_span, None))?;
.map_err(|e| parse_polars_error::<&str>(&e, r_col_span, None))?;
if l_series.dtype() != r_series.dtype() {
return Err(ShellError::labeled_error_with_secondary(

View File

@ -139,11 +139,11 @@ fn check_column_datatypes<T: AsRef<str>>(
for w in cols.windows(2) {
let l_series = df
.column(w[0].as_ref())
.map_err(|e| parse_polars_error::<&str>(&e, &col_span, None))?;
.map_err(|e| parse_polars_error::<&str>(&e, col_span, None))?;
let r_series = df
.column(w[1].as_ref())
.map_err(|e| parse_polars_error::<&str>(&e, &col_span, None))?;
.map_err(|e| parse_polars_error::<&str>(&e, col_span, None))?;
if l_series.dtype() != r_series.dtype() {
return Err(ShellError::labeled_error_with_secondary(

View File

@ -85,7 +85,7 @@ fn command(mut args: CommandArgs) -> Result<OutputStream, ShellError> {
)
})?;
let mut res = chunked.concat(&other_chunked);
let mut res = chunked.concat(other_chunked);
res.rename(series.name());

View File

@ -99,7 +99,7 @@ fn command(args: CommandArgs) -> Result<OutputStream, ShellError> {
let rhs = evaluate_baseline_expr(&expression.right, &args.context)?;
filter_dataframe(args, &col_name, &col_name_span, &rhs, &expression.op)
filter_dataframe(args, col_name, col_name_span, &rhs, &expression.op)
}
macro_rules! comparison_arm {

View File

@ -86,7 +86,7 @@ pub fn set_env(args: CommandArgs) -> Result<ActionStream, ShellError> {
ctx.scope.enter_scope();
ctx.scope.add_vars(&captured.entries);
let value = evaluate_baseline_expr(&expr, &ctx);
let value = evaluate_baseline_expr(&expr, ctx);
ctx.scope.exit_scope();

View File

@ -85,10 +85,10 @@ pub fn load_env(args: CommandArgs) -> Result<ActionStream, ShellError> {
let ctx = &args.context;
if let Some(values) = args.opt::<Vec<Value>>(0)? {
load_env_from_table(values, &ctx)?;
load_env_from_table(values, ctx)?;
}
load_env_from_table(args.input, &ctx)?;
load_env_from_table(args.input, ctx)?;
Ok(ActionStream::empty())
}

View File

@ -109,7 +109,7 @@ fn with_env(args: CommandArgs) -> Result<ActionStream, ShellError> {
context.scope.add_env(env);
context.scope.add_vars(&block.captured.entries);
let result = run_block(&block.block, &context, args.input, external_redirection);
let result = run_block(&block.block, context, args.input, external_redirection);
context.scope.exit_scope();
result.map(|x| x.into_action_stream())

View File

@ -67,7 +67,7 @@ fn collect(args: CommandArgs) -> Result<OutputStream, ShellError> {
let result = run_block(
&block.block,
&context,
context,
InputStream::empty(),
external_redirection,
);

View File

@ -52,7 +52,7 @@ fn merge(args: CommandArgs) -> Result<ActionStream, ShellError> {
context.scope.add_vars(&block.captured.entries);
let result = run_block(
&block.block,
&context,
context,
InputStream::empty(),
ExternalRedirection::Stdout,
);

View File

@ -37,7 +37,6 @@ impl WholeStreamCommand for Reverse {
}
}
#[allow(clippy::needless_collect)]
fn reverse(args: CommandArgs) -> Result<ActionStream, ShellError> {
let input = args.input.collect::<Vec<_>>();
Ok((input.into_iter().rev().map(ReturnSuccess::value)).into_action_stream())

View File

@ -69,10 +69,10 @@ fn from_json(args: CommandArgs) -> Result<OutputStream, ShellError> {
let concat_string = args.input.collect_string(name_tag.clone())?;
let string_clone: Vec<_> = concat_string.item.lines().map(|x| x.to_string()).collect();
if objects {
Ok(string_clone
#[allow(clippy::needless_collect)]
let lines: Vec<_> = concat_string.item.lines().map(|x| x.to_string()).collect();
Ok(lines
.into_iter()
.filter_map(move |json_str| {
if json_str.is_empty() {

View File

@ -218,7 +218,7 @@ fn which(args: CommandArgs) -> Result<OutputStream, ShellError> {
let mut output = vec![];
for app in which_args.applications {
let values = which_single(app, which_args.all, &args.scope());
let values = which_single(app, which_args.all, args.scope());
output.extend(values);
}

View File

@ -10,6 +10,7 @@ pub struct Command;
#[derive(Deserialize)]
struct Arguments {
#[allow(unused)]
pub rest: Vec<Value>,
}

View File

@ -66,7 +66,7 @@ fn pathvar_correctly_reads_path_from_config() {
let expected = "/Users/andresrobalino/.volta/bin-/Users/mosqueteros/bin";
let actual = sandbox.pipeline(r#" pathvar | first 2 | str collect '-' "#);
assert_that!(actual, says().stdout(&expected));
assert_that!(actual, says().stdout(expected));
})
}
@ -78,7 +78,7 @@ fn pathvar_correctly_reads_env_var_from_env() {
let expected = "bacon-spam";
let actual = sandbox.pipeline(r#" pathvar -v BREAKFAST | str collect '-' "#);
assert_that!(actual, says().stdout(&expected));
assert_that!(actual, says().stdout(expected));
})
}
@ -103,7 +103,7 @@ fn pathvar_adds_to_path() {
let expected = "spam";
let actual = sandbox.pipeline(r#" pathvar add spam; pathvar | first "#);
assert_that!(actual, says().stdout(&expected));
assert_that!(actual, says().stdout(expected));
})
}
@ -145,7 +145,7 @@ fn pathvar_appends_to_path() {
let expected = "spam";
let actual = sandbox.pipeline(r#" pathvar append spam; pathvar | last "#);
assert_that!(actual, says().stdout(&expected));
assert_that!(actual, says().stdout(expected));
})
}
@ -187,7 +187,7 @@ fn pathvar_removes_from_path() {
let expected = "/Users/mosquito/proboscis";
let actual = sandbox.pipeline(r#" pathvar remove 1; pathvar | first "#);
assert_that!(actual, says().stdout(&expected));
assert_that!(actual, says().stdout(expected));
})
}
@ -241,7 +241,7 @@ fn pathvar_resets_path_from_config() {
"#,
);
assert_that!(actual, says().stdout(&expected));
assert_that!(actual, says().stdout(expected));
})
}
@ -273,7 +273,7 @@ fn pathvar_resets_env_var_from_config() {
"#,
);
assert_that!(actual, says().stdout(&expected));
assert_that!(actual, says().stdout(expected));
})
}
@ -304,7 +304,7 @@ fn pathvar_saves_path_to_config() {
"#,
);
assert_that!(actual, says().stdout(&expected));
assert_that!(actual, says().stdout(expected));
})
}
@ -333,7 +333,7 @@ fn pathvar_saves_env_var_to_config() {
"#,
);
assert_that!(actual, says().stdout(&expected));
assert_that!(actual, says().stdout(expected));
})
}
@ -360,7 +360,7 @@ fn pathvar_saves_new_env_var_to_config() {
"#,
);
assert_that!(actual, says().stdout(&expected));
assert_that!(actual, says().stdout(expected));
})
}

View File

@ -20,12 +20,12 @@ fn build_path(head: &str, members: &Path, entry: &str) -> String {
fn collect_entries(value_fs: &ValueShell, head: &str, path: &Path) -> Vec<String> {
value_fs
.members_under(&path)
.members_under(path)
.iter()
.flat_map(|entry| {
entry
.row_entries()
.map(|(entry_name, _)| build_path(&head, &path, entry_name))
.map(|(entry_name, _)| build_path(head, path, entry_name))
})
.collect()
}
@ -62,7 +62,7 @@ where
.or_else(|| {
path.parent().map(|parent| {
fs.find(parent)
.map(|fs| collect_entries(fs, &head, &parent))
.map(|fs| collect_entries(fs, &head, parent))
.unwrap_or_default()
})
})
@ -72,7 +72,7 @@ where
})
.flatten()
.filter_map(|candidate| {
if matcher.matches(&partial, &candidate) {
if matcher.matches(partial, &candidate) {
Some(Suggestion::new(&candidate, &candidate))
} else {
None
@ -107,7 +107,7 @@ mod tests {
}
fn source(&self) -> &nu_engine::EvaluationContext {
&self.0
self.0
}
fn scope(&self) -> &dyn nu_parser::ParserScope {

View File

@ -266,7 +266,7 @@ impl<'a> From<&'a Expression> for Variable<'a> {
"$true" => Self::True,
"$false" => Self::False,
"$nothing" => Self::Nothing,
_ => Self::Other(&name),
_ => Self::Other(name),
},
_ => unreachable!(),
}

View File

@ -42,7 +42,7 @@ impl ValueShell {
let mut value_system = ValueStructure::new();
if value_system.walk_decorate(&self.value).is_ok() {
value_system.exists(&path).then(|| self)
value_system.exists(path).then(|| self)
} else {
None
}

View File

@ -104,7 +104,7 @@ impl WholeStreamCommand for Arc<Block> {
let external_redirection = args.call_info.args.external_redirection;
let ctx = &args.context;
let evaluated = call_info.evaluate(&ctx)?;
let evaluated = call_info.evaluate(ctx)?;
let input = args.input;
ctx.scope.enter_scope();
@ -198,7 +198,7 @@ impl WholeStreamCommand for Arc<Block> {
}
}
}
let result = run_block(&block, &ctx, input, external_redirection);
let result = run_block(&block, ctx, input, external_redirection);
ctx.scope.exit_scope();
result
}

View File

@ -186,7 +186,7 @@ pub fn compute_between_series(
let mut res = l.bitand(r).into_series();
let name = format!("and_{}_{}", lhs.name(), rhs.name());
res.rename(name.as_ref());
Ok(NuDataFrame::series_to_untagged(res, &operation_span))
Ok(NuDataFrame::series_to_untagged(res, operation_span))
}
_ => Ok(UntaggedValue::Error(ShellError::labeled_error(
"Casting error",
@ -211,7 +211,7 @@ pub fn compute_between_series(
let mut res = l.bitor(r).into_series();
let name = format!("or_{}_{}", lhs.name(), rhs.name());
res.rename(name.as_ref());
Ok(NuDataFrame::series_to_untagged(res, &operation_span))
Ok(NuDataFrame::series_to_untagged(res, operation_span))
}
_ => Ok(UntaggedValue::Error(ShellError::labeled_error(
"Casting error",

View File

@ -136,7 +136,7 @@ impl PartialEq for NuDataFrame {
_ => self_series.clone(),
};
if !self_series.series_equal(&other_series) {
if !self_series.series_equal(other_series) {
return false;
}
}

View File

@ -206,7 +206,7 @@ impl Start {
);
if let Some(app_name) = &self.application {
exec_cmd(&app_name, &args, self.tag.clone())
exec_cmd(app_name, &args, self.tag.clone())
} else {
for cmd in &["xdg-open", "gnome-open", "kde-open", "wslview"] {
if exec_cmd(cmd, &args, self.tag.clone()).is_err() {