mirror of
https://github.com/nushell/nushell.git
synced 2024-11-26 02:13:47 +01:00
Remove unnecessary #[allow(...)]
annotations (#6870)
* Remove unnecessary `#[allow]` annots
Reduce the number of lint exceptions that are not necessary with the
current state of the code (or more recent toolchain)
* Remove dead code from `FileStructure` in nu-command
* Replace `allow(unused)` with relevant feature switch
* Deal with `needless_collect` with annotations
* Change hack for needless_collect in `from json`
This change obviates the need for `allow(needless_collect)`
Removes a pessimistic allocation for empty strings, but increases
allocation size to `Value`
Probably not really worth it.
* Revert "Deal with `needless_collect` with annotations"
This reverts commit 05aca98445
.
The previous state seems to better from a performance perspective as a
`Vec<String>` is lighter weight than `Vec<Value>`
This commit is contained in:
parent
79fd7d54b2
commit
6a7a60429f
@ -55,7 +55,6 @@ impl Command for ColumnsDF {
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::needless_collect)]
|
||||
fn command(
|
||||
_engine_state: &EngineState,
|
||||
_stack: &mut Stack,
|
||||
|
@ -56,7 +56,6 @@ impl Command for DataTypes {
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::needless_collect)]
|
||||
fn command(
|
||||
_engine_state: &EngineState,
|
||||
_stack: &mut Stack,
|
||||
|
@ -25,7 +25,6 @@ const GLOB_PARAMS: nu_glob::MatchOptions = nu_glob::MatchOptions {
|
||||
#[derive(Clone)]
|
||||
pub struct Cp;
|
||||
|
||||
#[allow(unused_must_use)]
|
||||
impl Command for Cp {
|
||||
fn name(&self) -> &str {
|
||||
"cp"
|
||||
|
@ -20,7 +20,6 @@ const GLOB_PARAMS: nu_glob::MatchOptions = nu_glob::MatchOptions {
|
||||
#[derive(Clone)]
|
||||
pub struct Mv;
|
||||
|
||||
#[allow(unused_must_use)]
|
||||
impl Command for Mv {
|
||||
fn name(&self) -> &str {
|
||||
"mv"
|
||||
|
@ -14,20 +14,11 @@ pub struct FileStructure {
|
||||
pub resources: Vec<Resource>,
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
impl FileStructure {
|
||||
pub fn new() -> FileStructure {
|
||||
FileStructure { resources: vec![] }
|
||||
}
|
||||
|
||||
pub fn contains_more_than_one_file(&self) -> bool {
|
||||
self.resources.len() > 1
|
||||
}
|
||||
|
||||
pub fn contains_files(&self) -> bool {
|
||||
!self.resources.is_empty()
|
||||
}
|
||||
|
||||
pub fn paths_applying_with<F>(
|
||||
&mut self,
|
||||
to: F,
|
||||
|
@ -38,7 +38,6 @@ impl Command for GroupBy {
|
||||
group_by(engine_state, stack, call, input)
|
||||
}
|
||||
|
||||
#[allow(clippy::unwrap_used)]
|
||||
fn examples(&self) -> Vec<Example> {
|
||||
vec![
|
||||
Example {
|
||||
|
@ -35,7 +35,6 @@ impl Command for SplitBy {
|
||||
split_by(engine_state, stack, call, input)
|
||||
}
|
||||
|
||||
#[allow(clippy::unwrap_used)]
|
||||
fn examples(&self) -> Vec<Example> {
|
||||
vec![Example {
|
||||
description: "split items by column named \"lang\"",
|
||||
|
@ -84,21 +84,20 @@ impl Command for FromJson {
|
||||
|
||||
// TODO: turn this into a structured underline of the nu_json error
|
||||
if call.has_flag("objects") {
|
||||
#[allow(clippy::needless_collect)]
|
||||
let lines: Vec<String> = string_input.lines().map(|x| x.to_string()).collect();
|
||||
Ok(lines
|
||||
.into_iter()
|
||||
let converted_lines: Vec<Value> = string_input
|
||||
.lines()
|
||||
.filter_map(move |x| {
|
||||
if x.trim() == "" {
|
||||
None
|
||||
} else {
|
||||
match convert_string_to_value(x, span) {
|
||||
match convert_string_to_value(x.to_string(), span) {
|
||||
Ok(v) => Some(v),
|
||||
Err(error) => Some(Value::Error { error }),
|
||||
}
|
||||
}
|
||||
})
|
||||
.into_pipeline_data(engine_state.ctrlc.clone()))
|
||||
.collect();
|
||||
Ok(converted_lines.into_pipeline_data(engine_state.ctrlc.clone()))
|
||||
} else {
|
||||
Ok(convert_string_to_value(string_input, span)?.into_pipeline_data())
|
||||
}
|
||||
|
@ -270,15 +270,10 @@ fn get_output_string(
|
||||
|
||||
output_string.push_str("\n|");
|
||||
|
||||
#[allow(clippy::needless_range_loop)]
|
||||
for i in 0..headers.len() {
|
||||
for &col_width in column_widths.iter().take(headers.len()) {
|
||||
if pretty {
|
||||
output_string.push(' ');
|
||||
output_string.push_str(&get_padded_string(
|
||||
String::from("-"),
|
||||
column_widths[i],
|
||||
'-',
|
||||
));
|
||||
output_string.push_str(&get_padded_string(String::from("-"), col_width, '-'));
|
||||
output_string.push(' ');
|
||||
} else {
|
||||
output_string.push('-');
|
||||
|
@ -1,7 +1,6 @@
|
||||
use nu_protocol::{ShellError, Span, Value};
|
||||
use std::cmp::Ordering;
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub enum Reduce {
|
||||
Summation,
|
||||
Product,
|
||||
|
@ -774,7 +774,6 @@ fn convert_to_table(
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
#[allow(clippy::into_iter_on_ref)]
|
||||
fn convert_to_table2<'a>(
|
||||
row_offset: usize,
|
||||
input: impl Iterator<Item = &'a Value> + ExactSizeIterator + Clone,
|
||||
|
@ -1,5 +1,5 @@
|
||||
use nu_test_support::nu;
|
||||
#[allow(unused)]
|
||||
#[cfg(feature = "database")]
|
||||
use nu_test_support::pipeline;
|
||||
|
||||
#[test]
|
||||
|
@ -863,7 +863,6 @@ fn chars_eq(a: char, b: char, case_sensitive: bool) -> bool {
|
||||
}
|
||||
|
||||
/// Configuration options to modify the behaviour of `Pattern::matches_with(..)`.
|
||||
#[allow(missing_copy_implementations)]
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
|
||||
pub struct MatchOptions {
|
||||
/// Whether or not patterns should be matched in a case-sensitive manner.
|
||||
|
@ -1604,7 +1604,6 @@ pub fn parse_string_interpolation(
|
||||
|
||||
let mut b = start;
|
||||
|
||||
#[allow(clippy::needless_range_loop)]
|
||||
while b != end {
|
||||
if contents[b - start] == b'('
|
||||
&& (if double_quote && (b - start) > 0 {
|
||||
|
@ -606,7 +606,6 @@ fn parse_commandline_args(
|
||||
|
||||
struct NushellCliArgs {
|
||||
redirect_stdin: Option<Spanned<String>>,
|
||||
#[allow(dead_code)]
|
||||
login_shell: Option<Spanned<String>>,
|
||||
interactive_shell: Option<Spanned<String>>,
|
||||
commands: Option<Spanned<String>>,
|
||||
|
Loading…
Reference in New Issue
Block a user