Fix typos by codespell ()

# Description

Found via `codespell -S target -L
crate,ser,numer,falsy,ro,te,nd,bu,ndoes,statics,ons,fo,rouge,pard`

# User-Facing Changes

None.

# Tests + Formatting

None and done.

# After Submitting

None.
This commit is contained in:
Kian-Meng Ang
2022-12-26 15:31:26 +08:00
committed by GitHub
parent 2415381682
commit 79000aa5e0
39 changed files with 61 additions and 61 deletions
crates
nu-cli
nu-command
src
bytes
charting
dataframe
series
values
nu_expression
filesystem
filters
platform
strings
viewers
tests
format_conversions
nu-engine
nu-explore
src
commands
nu_common
pager
registry
nu-parser
nu-path
nu-protocol
nu-table
nu-term-grid
nu-test-support
nu-utils
src
sample_config
nu_plugin_python

@ -109,7 +109,7 @@ fn dotnu_completions() {
// Create a new engine // Create a new engine
let (_, _, engine, stack) = new_engine(); let (_, _, engine, stack) = new_engine();
// Instatiate a new completer // Instantiate a new completer
let mut completer = NuCompleter::new(std::sync::Arc::new(engine), stack); let mut completer = NuCompleter::new(std::sync::Arc::new(engine), stack);
// Test source completion // Test source completion
@ -169,7 +169,7 @@ fn file_completions() {
// Create a new engine // Create a new engine
let (dir, dir_str, engine, stack) = new_engine(); let (dir, dir_str, engine, stack) = new_engine();
// Instatiate a new completer // Instantiate a new completer
let mut completer = NuCompleter::new(std::sync::Arc::new(engine), stack); let mut completer = NuCompleter::new(std::sync::Arc::new(engine), stack);
// Test completions for the current folder // Test completions for the current folder
@ -454,7 +454,7 @@ fn flag_completions() {
// Create a new engine // Create a new engine
let (_, _, engine, stack) = new_engine(); let (_, _, engine, stack) = new_engine();
// Instatiate a new completer // Instantiate a new completer
let mut completer = NuCompleter::new(std::sync::Arc::new(engine), stack); let mut completer = NuCompleter::new(std::sync::Arc::new(engine), stack);
// Test completions for the 'ls' flags // Test completions for the 'ls' flags
let suggestions = completer.complete("ls -", 4); let suggestions = completer.complete("ls -", 4);
@ -487,7 +487,7 @@ fn folder_with_directorycompletions() {
// Create a new engine // Create a new engine
let (dir, dir_str, engine, stack) = new_engine(); let (dir, dir_str, engine, stack) = new_engine();
// Instatiate a new completer // Instantiate a new completer
let mut completer = NuCompleter::new(std::sync::Arc::new(engine), stack); let mut completer = NuCompleter::new(std::sync::Arc::new(engine), stack);
// Test completions for the current folder // Test completions for the current folder
@ -515,7 +515,7 @@ fn variables_completions() {
let record = "let actor = { name: 'Tom Hardy', age: 44 }"; let record = "let actor = { name: 'Tom Hardy', age: 44 }";
assert!(support::merge_input(record.as_bytes(), &mut engine, &mut stack, dir).is_ok()); assert!(support::merge_input(record.as_bytes(), &mut engine, &mut stack, dir).is_ok());
// Instatiate a new completer // Instantiate a new completer
let mut completer = NuCompleter::new(std::sync::Arc::new(engine), stack); let mut completer = NuCompleter::new(std::sync::Arc::new(engine), stack);
// Test completions for $nu // Test completions for $nu
@ -672,7 +672,7 @@ fn run_external_completion(block: &str, input: &str) -> Vec<Suggestion> {
config.external_completer = Some(latest_block_id); config.external_completer = Some(latest_block_id);
engine_state.set_config(&config); engine_state.set_config(&config);
// Instatiate a new completer // Instantiate a new completer
let mut completer = NuCompleter::new(std::sync::Arc::new(engine_state), stack); let mut completer = NuCompleter::new(std::sync::Arc::new(engine_state), stack);
completer.complete(input, input.len()) completer.complete(input, input.len())

@ -40,7 +40,7 @@ fn parse_range(range: Value, head: Span) -> Result<(isize, isize, Span), ShellEr
let end = match end { let end = match end {
Value::Int { val, .. } => val.to_string(), Value::Int { val, .. } => val.to_string(),
Value::String { val, .. } => val, Value::String { val, .. } => val,
// Explictly propagate errors instead of dropping them. // Explicitly propagate errors instead of dropping them.
Value::Error { error } => return Err(error), Value::Error { error } => return Err(error),
other => { other => {
return Err(ShellError::UnsupportedInput( return Err(ShellError::UnsupportedInput(
@ -55,7 +55,7 @@ fn parse_range(range: Value, head: Span) -> Result<(isize, isize, Span), ShellEr
let start = match start { let start = match start {
Value::Int { val, .. } => val.to_string(), Value::Int { val, .. } => val.to_string(),
Value::String { val, .. } => val, Value::String { val, .. } => val,
// Explictly propagate errors instead of dropping them. // Explicitly propagate errors instead of dropping them.
Value::Error { error } => return Err(error), Value::Error { error } => return Err(error),
other => { other => {
return Err(ShellError::UnsupportedInput( return Err(ShellError::UnsupportedInput(
@ -83,7 +83,7 @@ fn parse_range(range: Value, head: Span) -> Result<(isize, isize, Span), ShellEr
} }
} }
} }
// Explictly propagate errors instead of dropping them. // Explicitly propagate errors instead of dropping them.
Value::Error { error } => return Err(error), Value::Error { error } => return Err(error),
other => { other => {
return Err(ShellError::UnsupportedInput( return Err(ShellError::UnsupportedInput(

@ -52,7 +52,7 @@ impl Command for BytesBuild {
let val = eval_expression(engine_state, stack, expr)?; let val = eval_expression(engine_state, stack, expr)?;
match val { match val {
Value::Binary { mut val, .. } => output.append(&mut val), Value::Binary { mut val, .. } => output.append(&mut val),
// Explictly propagate errors instead of dropping them. // Explicitly propagate errors instead of dropping them.
Value::Error { error } => return Err(error), Value::Error { error } => return Err(error),
other => { other => {
return Err(ShellError::TypeMismatch( return Err(ShellError::TypeMismatch(

@ -54,7 +54,7 @@ impl Command for BytesCollect {
output_binary.append(&mut work_sep) output_binary.append(&mut work_sep)
} }
} }
// Explictly propagate errors instead of dropping them. // Explicitly propagate errors instead of dropping them.
Value::Error { error } => return Err(error), Value::Error { error } => return Err(error),
other => { other => {
return Err(ShellError::OnlySupportsThisInputType( return Err(ShellError::OnlySupportsThisInputType(

@ -193,7 +193,7 @@ fn remove_impl(input: &[u8], arg: &Arguments, span: Span) -> Value {
right += 1; right += 1;
} }
} }
// append the remaing thing to result, this can happened when // append the remaining thing to result, this can happened when
// we have something to remove and remove_all is False. // we have something to remove and remove_all is False.
let mut remain = input[left..].to_vec(); let mut remain = input[left..].to_vec();
result.append(&mut remain); result.append(&mut remain);

@ -78,7 +78,7 @@ impl HashableValue {
Value::String { val, span } => Ok(HashableValue::String { val, span }), Value::String { val, span } => Ok(HashableValue::String { val, span }),
Value::Binary { val, span } => Ok(HashableValue::Binary { val, span }), Value::Binary { val, span } => Ok(HashableValue::Binary { val, span }),
// Explictly propagate errors instead of dropping them. // Explicitly propagate errors instead of dropping them.
Value::Error { error } => Err(error), Value::Error { error } => Err(error),
_ => Err(ShellError::UnsupportedInput( _ => Err(ShellError::UnsupportedInput(
"input value is not hashable".into(), "input value is not hashable".into(),

@ -86,7 +86,7 @@ fn command(
let series = df.as_series(call.head)?; let series = df.as_series(call.head)?;
let chunked = series.utf8().map_err(|e| { let chunked = series.utf8().map_err(|e| {
ShellError::GenericError( ShellError::GenericError(
"Error convertion to string".into(), "Error conversion to string".into(),
e.to_string(), e.to_string(),
Some(call.head), Some(call.head),
None, None,

@ -86,7 +86,7 @@ fn command(
let series = df.as_series(call.head)?; let series = df.as_series(call.head)?;
let chunked = series.utf8().map_err(|e| { let chunked = series.utf8().map_err(|e| {
ShellError::GenericError( ShellError::GenericError(
"Error convertion to string".into(), "Error conversion to string".into(),
e.to_string(), e.to_string(),
Some(call.head), Some(call.head),
None, None,

@ -126,7 +126,7 @@ impl NuExpression {
expr_to_value(self.as_ref(), span) expr_to_value(self.as_ref(), span)
} }
// Convenient function to extrac multiple Expr that could be inside a nushell Value // Convenient function to extract multiple Expr that could be inside a nushell Value
pub fn extract_exprs(value: Value) -> Result<Vec<Expr>, ShellError> { pub fn extract_exprs(value: Value) -> Result<Vec<Expr>, ShellError> {
ExtractedExpr::extract_exprs(value).map(ExtractedExpr::into_exprs) ExtractedExpr::extract_exprs(value).map(ExtractedExpr::into_exprs)
} }

@ -8,7 +8,7 @@ use nu_protocol::{
}; };
use std::path::Path; use std::path::Path;
// when the file under the fold executeable // when the file under the fold executable
#[cfg(unix)] #[cfg(unix)]
mod permission_mods { mod permission_mods {
pub type Mode = u32; pub type Mode = u32;

@ -25,7 +25,7 @@ struct Finding {
congruence: Vec<Congruence>, congruence: Vec<Congruence>,
} }
/// Returns an interator over directory's children matching the abbreviation. /// Returns an iterator over directory's children matching the abbreviation.
fn get_matching_children<'a, P>( fn get_matching_children<'a, P>(
path: &'a P, path: &'a P,
abbr: &'a Abbr, abbr: &'a Abbr,

@ -514,7 +514,7 @@ fn to_nu_ansi_term_style(style: &LsColors_Style) -> Style {
LsColors_Color::White => Color::White, LsColors_Color::White => Color::White,
// Below items are a rough translations to 256 colors as // Below items are a rough translations to 256 colors as
// nu-ansi-term do not have bright varients // nu-ansi-term do not have bright variants
LsColors_Color::BrightBlack => Color::Fixed(8), LsColors_Color::BrightBlack => Color::Fixed(8),
LsColors_Color::BrightRed => Color::Fixed(9), LsColors_Color::BrightRed => Color::Fixed(9),
LsColors_Color::BrightGreen => Color::Fixed(10), LsColors_Color::BrightGreen => Color::Fixed(10),

@ -5,7 +5,7 @@ use nu_protocol::{
Signature, Span, Type, Value, Signature, Span, Type, Value,
}; };
use once_cell::sync::Lazy; use once_cell::sync::Lazy;
// regex can be replaced with fancy-regex once it suppports `split()` // regex can be replaced with fancy-regex once it supports `split()`
// https://github.com/fancy-regex/fancy-regex/issues/104 // https://github.com/fancy-regex/fancy-regex/issues/104
use regex::Regex; use regex::Regex;

@ -154,7 +154,7 @@ static CODE_LIST: Lazy<Vec<AnsiCode>> = Lazy::new(|| { vec![
AnsiCode{ short_name: Some("defr"), long_name: "default_reverse", code: Color::Default.reverse().prefix().to_string()}, AnsiCode{ short_name: Some("defr"), long_name: "default_reverse", code: Color::Default.reverse().prefix().to_string()},
AnsiCode{ short_name: Some("bg_def"), long_name: "bg_default", code: Style::new().on(Color::Default).prefix().to_string()}, AnsiCode{ short_name: Some("bg_def"), long_name: "bg_default", code: Style::new().on(Color::Default).prefix().to_string()},
// Xterm 256 colors with conflicting names names preceeded by x // Xterm 256 colors with conflicting names names preceded by x
AnsiCode { short_name: Some("xblack"), long_name: "xterm_black", code: Color::Fixed(0).prefix().to_string()}, AnsiCode { short_name: Some("xblack"), long_name: "xterm_black", code: Color::Fixed(0).prefix().to_string()},
AnsiCode { short_name: Some("maroon"), long_name: "xterm_maroon", code: Color::Fixed(1).prefix().to_string()}, AnsiCode { short_name: Some("maroon"), long_name: "xterm_maroon", code: Color::Fixed(1).prefix().to_string()},
AnsiCode { short_name: Some("xgreen"), long_name: "xterm_green", code: Color::Fixed(2).prefix().to_string()}, AnsiCode { short_name: Some("xgreen"), long_name: "xterm_green", code: Color::Fixed(2).prefix().to_string()},

@ -192,7 +192,7 @@ fn action(
Value::string(gradient_string, *span) Value::string(gradient_string, *span)
} }
(None, Some(fg_end), None, Some(bg_end)) => { (None, Some(fg_end), None, Some(bg_end)) => {
// missin fg_start and bg_start, so assume black // missing fg_start and bg_start, so assume black
let fg_start = Rgb::new(0, 0, 0); let fg_start = Rgb::new(0, 0, 0);
let bg_start = Rgb::new(0, 0, 0); let bg_start = Rgb::new(0, 0, 0);
let fg_gradient = Gradient::new(fg_start, fg_end); let fg_gradient = Gradient::new(fg_start, fg_end);

@ -124,18 +124,18 @@ impl Command for SubCommand {
result: Some(Value::test_string("dogs_$2_cats")), result: Some(Value::test_string("dogs_$2_cats")),
}, },
Example { Example {
description: "Find and replace the first occurence using string replacement *not* regular expressions", description: "Find and replace the first occurrence using string replacement *not* regular expressions",
example: r#"'c:\some\cool\path' | str replace 'c:\some\cool' '~' -s"#, example: r#"'c:\some\cool\path' | str replace 'c:\some\cool' '~' -s"#,
result: Some(Value::test_string("~\\path")), result: Some(Value::test_string("~\\path")),
}, },
Example { Example {
description: "Find and replace all occurences using string replacement *not* regular expressions", description: "Find and replace all occurrences using string replacement *not* regular expressions",
example: r#"'abc abc abc' | str replace -a 'b' 'z' -s"#, example: r#"'abc abc abc' | str replace -a 'b' 'z' -s"#,
result: Some(Value::test_string("azc azc azc")), result: Some(Value::test_string("azc azc azc")),
}, },
Example { Example {
description: "Find and replace with fancy-regex", description: "Find and replace with fancy-regex",
example: r#"'a sucessful b' | str replace '\b([sS])uc(?:cs|s?)e(ed(?:ed|ing|s?)|ss(?:es|ful(?:ly)?|i(?:ons?|ve(?:ly)?)|ors?)?)\b' '${1}ucce$2'"#, example: r#"'a successful b' | str replace '\b([sS])uc(?:cs|s?)e(ed(?:ed|ing|s?)|ss(?:es|ful(?:ly)?|i(?:ons?|ve(?:ly)?)|ors?)?)\b' '${1}ucce$2'"#,
result: Some(Value::test_string("a successful b")), result: Some(Value::test_string("a successful b")),
}, },
Example { Example {

@ -272,7 +272,7 @@ fn trim(s: &str, char_: Option<char>, closure_flags: &ClosureFlags) -> String {
let re_str = format!("{}{{2,}}", reg); let re_str = format!("{}{{2,}}", reg);
// create the regex // create the regex
let re = Regex::new(&re_str).expect("Error creating regular expression"); let re = Regex::new(&re_str).expect("Error creating regular expression");
// replace all mutliple occurances with single occurences represented by r // replace all multiple occurrences with single occurrences represented by r
let new_str = re.replace_all(&return_string, r.to_string()); let new_str = re.replace_all(&return_string, r.to_string());
// update the return string so the next loop has the latest changes // update the return string so the next loop has the latest changes
return_string = new_str.to_string(); return_string = new_str.to_string();

@ -86,12 +86,12 @@ impl Command for Table {
.named( .named(
"flatten-separator", "flatten-separator",
SyntaxShape::String, SyntaxShape::String,
"sets a seperator when 'flatten' used", "sets a separator when 'flatten' used",
None, None,
) )
.switch( .switch(
"collapse", "collapse",
"expand the table structure in colapse mode.\nBe aware collapse mode currently doesn't support width controll", "expand the table structure in colapse mode.\nBe aware collapse mode currently doesn't support width control",
Some('c'), Some('c'),
) )
.category(Category::Viewers) .category(Category::Viewers)
@ -497,7 +497,7 @@ fn build_expanded_table(
match table { match table {
Some((mut table, with_header, with_index)) => { Some((mut table, with_header, with_index)) => {
// controll width via removing table columns. // control width via removing table columns.
table.truncate(remaining_width, &theme); table.truncate(remaining_width, &theme);
is_expanded = true; is_expanded = true;

@ -62,7 +62,7 @@ fn nested_tables_to_toml() {
#[test] #[test]
fn table_to_toml_fails() { fn table_to_toml_fails() {
// Tables cant be represented in toml // Tables can't be represented in toml
let actual = nu!( let actual = nu!(
cwd: "tests/fixtures/formats", pipeline( cwd: "tests/fixtures/formats", pipeline(
r#" r#"

@ -183,7 +183,7 @@ pub fn redirect_env(engine_state: &EngineState, caller_stack: &mut Stack, callee
} }
} }
/// Eval extarnal expression /// Eval external expression
/// ///
/// It returns PipelineData with a boolean flag, indicate that if the external runs to failed. /// It returns PipelineData with a boolean flag, indicate that if the external runs to failed.
#[allow(clippy::too_many_arguments)] #[allow(clippy::too_many_arguments)]

@ -73,7 +73,7 @@ impl ViewCommand for TableCmd {
Shortcode::new("Esc", "", "Exits cursor mode. Exits the just explored dataset."), Shortcode::new("Esc", "", "Exits cursor mode. Exits the just explored dataset."),
Shortcode::new("i", "view", "Enters cursor mode to inspect individual cells"), Shortcode::new("i", "view", "Enters cursor mode to inspect individual cells"),
Shortcode::new("t", "view", "Transpose table, so that columns become rows and vice versa"), Shortcode::new("t", "view", "Transpose table, so that columns become rows and vice versa"),
Shortcode::new("e", "view", "Open expand view (equvalent of :expand)"), Shortcode::new("e", "view", "Open expand view (equivalent of :expand)"),
Shortcode::new("Enter", "cursor", "In cursor mode, explore the data of the selected cell"), Shortcode::new("Enter", "cursor", "In cursor mode, explore the data of the selected cell"),
]; ];

@ -77,8 +77,8 @@ fn eval_source2(
return Err(ShellError::IOError(err.to_string())); return Err(ShellError::IOError(err.to_string()));
} }
// eval_block outputs all expressions expept the last to STDOUT; // eval_block outputs all expressions except the last to STDOUT;
// we don't wont that. // we don't won't that.
// //
// So we LITERALLY ignore all expressions except the LAST. // So we LITERALLY ignore all expressions except the LAST.
if block.len() > 1 { if block.len() > 1 {

@ -171,7 +171,7 @@ fn build_expanded_table(
match table { match table {
Some((mut table, with_header, with_index)) => { Some((mut table, with_header, with_index)) => {
// controll width via removing table columns. // control width via removing table columns.
let theme = load_theme_from_config(config); let theme = load_theme_from_config(config);
table.truncate(remaining_width, &theme); table.truncate(remaining_width, &theme);

@ -732,7 +732,7 @@ fn handle_event<V: View>(
} }
} }
// was not handled so we must check our default controlls // was not handled so we must check our default controls
handle_general_key_events2(&key, search, command, view, info); handle_general_key_events2(&key, search, command, view, info);
None None

@ -46,9 +46,9 @@ impl CommandRegistry {
); );
} }
pub fn create_aliase(&mut self, aliase: &str, command: &str) { pub fn create_aliase(&mut self, aliases: &str, command: &str) {
self.aliases.insert( self.aliases.insert(
Cow::Owned(aliase.to_owned()), Cow::Owned(aliases.to_owned()),
Cow::Owned(command.to_owned()), Cow::Owned(command.to_owned()),
); );
} }

@ -698,7 +698,7 @@ pub fn parse_alias(
( (
garbage_pipeline(spans), garbage_pipeline(spans),
Some(ParseError::InternalError( Some(ParseError::InternalError(
"Alias statement unparseable".into(), "Alias statement unparsable".into(),
span(spans), span(spans),
)), )),
) )
@ -2486,7 +2486,7 @@ pub fn parse_overlay_use(
return ( return (
pipeline, pipeline,
Some(ParseError::CantAddOverlayHelp( Some(ParseError::CantAddOverlayHelp(
format!("Cannot add overlay as '{}' because it already exsits under the name '{}'", new_name.item, overlay_name), format!("Cannot add overlay as '{}' because it already exists under the name '{}'", new_name.item, overlay_name),
new_name.span, new_name.span,
)), )),
); );
@ -2883,7 +2883,7 @@ pub fn parse_let_or_const(
( (
garbage_pipeline(spans), garbage_pipeline(spans),
Some(ParseError::UnknownState( Some(ParseError::UnknownState(
"internal error: let or const statement unparseable".into(), "internal error: let or const statement unparsable".into(),
span(spans), span(spans),
)), )),
) )
@ -3005,7 +3005,7 @@ pub fn parse_mut(
( (
garbage_pipeline(spans), garbage_pipeline(spans),
Some(ParseError::UnknownState( Some(ParseError::UnknownState(
"internal error: mut statement unparseable".into(), "internal error: mut statement unparsable".into(),
span(spans), span(spans),
)), )),
) )
@ -3175,7 +3175,7 @@ pub fn parse_source(
( (
garbage_pipeline(spans), garbage_pipeline(spans),
Some(ParseError::UnknownState( Some(ParseError::UnknownState(
"internal error: source statement unparseable".into(), "internal error: source statement unparsable".into(),
span(spans), span(spans),
)), )),
) )

@ -21,7 +21,7 @@ fn handle_dots_push(string: &mut String, count: u8) {
string.pop(); // remove last '/' string.pop(); // remove last '/'
} }
/// Expands any occurence of more than two dots into a sequence of ../ (or ..\ on windows), e.g., /// Expands any occurrence of more than two dots into a sequence of ../ (or ..\ on windows), e.g.,
/// "..." into "../..", "...." into "../../../", etc. /// "..." into "../..", "...." into "../../../", etc.
pub fn expand_ndots(path: impl AsRef<Path>) -> PathBuf { pub fn expand_ndots(path: impl AsRef<Path>) -> PathBuf {
// Check if path is valid UTF-8 and if not, return it as it is to avoid breaking it via string // Check if path is valid UTF-8 and if not, return it as it is to avoid breaking it via string

@ -167,10 +167,10 @@ pub enum TableIndexMode {
pub enum TrimStrategy { pub enum TrimStrategy {
/// Wrapping strategy. /// Wrapping strategy.
/// ///
/// It it's simmilar to original nu_table, strategy. /// It it's similar to original nu_table, strategy.
Wrap { Wrap {
/// A flag which indicates whether is it necessary to try /// A flag which indicates whether is it necessary to try
/// to keep word bounderies. /// to keep word boundaries.
try_to_keep_words: bool, try_to_keep_words: bool,
}, },
/// Truncating strategy, where we just cut the string. /// Truncating strategy, where we just cut the string.

@ -177,7 +177,7 @@ impl EngineState {
.iter_mut() .iter_mut()
.find(|(name, _)| name == &delta_name) .find(|(name, _)| name == &delta_name)
{ {
// Upating existing overlay // Updating existing overlay
for item in delta_overlay.decls.into_iter() { for item in delta_overlay.decls.into_iter() {
existing_overlay.decls.insert(item.0, item.1); existing_overlay.decls.insert(item.0, item.1);
} }

@ -527,7 +527,7 @@ impl PipelineData {
let ctrlc = exit_code_stream.ctrlc.clone(); let ctrlc = exit_code_stream.ctrlc.clone();
let exit_code: Vec<Value> = exit_code_stream.into_iter().collect(); let exit_code: Vec<Value> = exit_code_stream.into_iter().collect();
if let Some(Value::Int { val: code, .. }) = exit_code.last() { if let Some(Value::Int { val: code, .. }) = exit_code.last() {
// if exit_code is not 0, it indicates error occured, return back Err. // if exit_code is not 0, it indicates error occurred, return back Err.
if *code != 0 { if *code != 0 {
failed_to_run = true; failed_to_run = true;
} }
@ -564,7 +564,7 @@ impl PipelineData {
/// Consume and print self data immediately. /// Consume and print self data immediately.
/// ///
/// `no_newline` controls if we need to attach newline character to output. /// `no_newline` controls if we need to attach newline character to output.
/// `to_stderr` controls if data is output to stderr, when the value is false, the data is ouput to stdout. /// `to_stderr` controls if data is output to stderr, when the value is false, the data is output to stdout.
pub fn print( pub fn print(
self, self,
engine_state: &EngineState, engine_state: &EngineState,

@ -123,7 +123,7 @@ pub struct Signature {
pub category: Category, pub category: Category,
} }
/// Fromat argumet type for user readable output. /// Format argument type for user readable output.
/// ///
/// In general: /// In general:
/// if argument type is a simple type(like string), we'll wrapped with `<>`, the result will be `<string>` /// if argument type is a simple type(like string), we'll wrapped with `<>`, the result will be `<string>`
@ -147,7 +147,7 @@ fn fmt_type(arg_type: &Type, optional: bool) -> String {
// //
// <string> | <string>, <int?> => string // <string> | <string>, <int?> => string
// //
// More detail explaination: // More detail explanation:
// the first one is the input from previous command, aka, pipeline input // the first one is the input from previous command, aka, pipeline input
// then followed by `|`, then positional arguments type // then followed by `|`, then positional arguments type
// then optional arguments type, which ends with `?` // then optional arguments type, which ends with `?`

@ -699,7 +699,7 @@ impl Value {
let cols = cols.clone(); let cols = cols.clone();
let span = *span; let span = *span;
// Make reverse iterate to avoid duplicate column leads to first value, actuall last value is expected. // Make reverse iterate to avoid duplicate column leads to first value, actually last value is expected.
if let Some(found) = cols.iter().zip(vals.iter()).rev().find(|x| { if let Some(found) = cols.iter().zip(vals.iter()).rev().find(|x| {
if insensitive { if insensitive {
x.0.to_lowercase() == column_name.to_lowercase() x.0.to_lowercase() == column_name.to_lowercase()
@ -1613,7 +1613,7 @@ impl PartialOrd for Value {
.. ..
} => { } => {
// reorder cols and vals to make more logically compare. // reorder cols and vals to make more logically compare.
// more genral, if two record have same col and values, // more general, if two record have same col and values,
// the order of cols shouldn't affect the equal property. // the order of cols shouldn't affect the equal property.
let (lhs_cols_ordered, lhs_vals_ordered) = let (lhs_cols_ordered, lhs_vals_ordered) =
reorder_record_inner(lhs_cols, lhs_vals); reorder_record_inner(lhs_cols, lhs_vals);

@ -179,7 +179,7 @@ impl Iterator for RawStream {
} }
} }
/// A potentially infinite stream of values, optinally with a mean to send a Ctrl-C signal to stop /// A potentially infinite stream of values, optionally with a mean to send a Ctrl-C signal to stop
/// the stream from continuing. /// the stream from continuing.
/// ///
/// In practice, a "stream" here means anything which can be iterated and produce Values as it iterates. /// In practice, a "stream" here means anything which can be iterated and produce Values as it iterates.

@ -10,7 +10,7 @@ use tabled::{
/// NuTable has a recursive table representation of nu_protocol::Value. /// NuTable has a recursive table representation of nu_protocol::Value.
/// ///
/// It doesn't support alignement and a proper width control. /// It doesn't support alignment and a proper width control.
pub struct NuTable { pub struct NuTable {
inner: tabled::Table, inner: tabled::Table,
} }

@ -34,7 +34,7 @@ impl Table {
/// ///
/// If `headers.is_empty` then no headers will be rendered. /// If `headers.is_empty` then no headers will be rendered.
pub fn new(data: Vec<Vec<TCell<CellInfo<'static>, TextStyle>>>, size: (usize, usize)) -> Table { pub fn new(data: Vec<Vec<TCell<CellInfo<'static>, TextStyle>>>, size: (usize, usize)) -> Table {
// it's not guaranted that data will have all rows with the same number of columns. // it's not guaranteed that data will have all rows with the same number of columns.
// but VecRecords::with_hint require this constrain. // but VecRecords::with_hint require this constrain.
// //
// so we do a check to make it certainly true // so we do a check to make it certainly true

@ -352,7 +352,7 @@ impl Grid {
let theoretical_max_num_lines = self.theoretical_max_num_lines(maximum_width); let theoretical_max_num_lines = self.theoretical_max_num_lines(maximum_width);
if theoretical_max_num_lines == 1 { if theoretical_max_num_lines == 1 {
// This if—statement is neccesary for the function to work correctly // This if—statement is necessary for the function to work correctly
// for small inputs. // for small inputs.
return Some(Dimensions { return Some(Dimensions {
num_lines: 1, num_lines: 1,

@ -15,7 +15,7 @@
/// ///
/// ```no_run /// ```no_run
/// # // NOTE: The `nu!` macro needs the `nu` binary to exist. The test are /// # // NOTE: The `nu!` macro needs the `nu` binary to exist. The test are
/// # // therefore only compiled but not run (thats what the `no_run` at /// # // therefore only compiled but not run (that's what the `no_run` at
/// # // the beginning of this code block is for). /// # // the beginning of this code block is for).
/// # /// #
/// use nu_test_support::nu; /// use nu_test_support::nu;

@ -33,7 +33,7 @@ module completions {
--write-fetch-head # Write fetched refs in FETCH_HEAD (default) --write-fetch-head # Write fetched refs in FETCH_HEAD (default)
--no-write-fetch-head # Do not write FETCH_HEAD --no-write-fetch-head # Do not write FETCH_HEAD
--force(-f) # Always update the local branch --force(-f) # Always update the local branch
--keep(-k) # Keep dowloaded pack --keep(-k) # Keep downloaded pack
--multiple # Allow several arguments to be specified --multiple # Allow several arguments to be specified
--auto-maintenance # Run 'git maintenance run --auto' at the end (default) --auto-maintenance # Run 'git maintenance run --auto' at the end (default)
--no-auto-maintenance # Don't run 'git maintenance' at the end --no-auto-maintenance # Don't run 'git maintenance' at the end
@ -424,7 +424,7 @@ let-env config = {
} }
menus: [ menus: [
# Configuration for default nushell menus # Configuration for default nushell menus
# Note the lack of souce parameter # Note the lack of source parameter
{ {
name: completion_menu name: completion_menu
only_buffer_difference: false only_buffer_difference: false

@ -8,7 +8,7 @@
# To register the plugin use: # To register the plugin use:
# register <path-to-py-file> # register <path-to-py-file>
# #
# Be carefull with the spans. Miette will crash if a span is outside the # Be careful with the spans. Miette will crash if a span is outside the
# size of the contents vector. For this example we are using 0 and 1, which will # size of the contents vector. For this example we are using 0 and 1, which will
# point to the beginning of the contents vector. We strongly suggest using the span # point to the beginning of the contents vector. We strongly suggest using the span
# found in the plugin call head # found in the plugin call head