Fix typos and capitalization of "Unicode" (#3234)

* Capitalize "Unicode"

* Fix several typos

* Fix mixed whitespace in nu-parser's tests
This commit is contained in:
Waldir Pimenta 2021-04-03 20:14:07 +01:00 committed by GitHub
parent e278ca61d1
commit 4bc9d9fd3b
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
20 changed files with 72 additions and 71 deletions

View File

@ -12,7 +12,7 @@ impl matchers::Matcher for Matcher {
mod tests {
use super::*;
// TODO: check some unicode matches if this becomes relevant
// TODO: check some Unicode matches if this becomes relevant
// FIXME: could work exhaustively through ['-', '--'. ''] in a loop for each test
#[test]

View File

@ -285,7 +285,7 @@ fn get_shape_of_expr(expr: &SpannedExpression) -> Option<SyntaxShape> {
nu_protocol::hir::Literal::Bare(_) => Some(SyntaxShape::String),
}
}
//Synthetic are expressions that are generated by the parser and not inputed by the user
//Synthetic are expressions that are generated by the parser and not inputted by the user
//ExternalWord is anything sent to external commands (?)
Expression::ExternalWord => Some(SyntaxShape::String),
Expression::Synthetic(_) => Some(SyntaxShape::String),
@ -387,7 +387,7 @@ impl VarSyntaxShapeDeductor {
}
fn infer_shape(&mut self, block: &Block, scope: &Scope) -> Result<(), ShellError> {
trace!("Infering vars in shape");
trace!("Inferring vars in shape");
for group in &block.block {
for pipeline in &group.pipelines {
self.infer_pipeline(pipeline, scope)?;
@ -397,7 +397,7 @@ impl VarSyntaxShapeDeductor {
}
pub fn infer_pipeline(&mut self, pipeline: &Pipeline, scope: &Scope) -> Result<(), ShellError> {
trace!("Infering vars in pipeline");
trace!("Inferring vars in pipeline");
for (cmd_pipeline_idx, classified) in pipeline.list.iter().enumerate() {
match &classified {
ClassifiedCommand::Internal(internal) => {
@ -429,7 +429,7 @@ impl VarSyntaxShapeDeductor {
}
}
if let Some(named) = &internal.args.named {
trace!("Infering vars in named exprs");
trace!("Inferring vars in named exprs");
for (_name, val) in named.iter() {
if let NamedValue::Value(_, named_expr) = val {
self.infer_shapes_in_expr(
@ -443,7 +443,7 @@ impl VarSyntaxShapeDeductor {
}
ClassifiedCommand::Expr(spanned_expr) => {
trace!(
"Infering shapes in ClassifiedCommand::Expr: {:?}",
"Inferring shapes in ClassifiedCommand::Expr: {:?}",
spanned_expr
);
self.infer_shapes_in_expr((cmd_pipeline_idx, pipeline), spanned_expr, scope)?;
@ -459,7 +459,7 @@ impl VarSyntaxShapeDeductor {
positionals: &[SpannedExpression],
signature: &Signature,
) -> Result<(), ShellError> {
trace!("Infering vars in positionals");
trace!("Inferring vars in positionals");
//TODO currently correct inference for optional positionals is not implemented.
// See https://github.com/nushell/nushell/pull/2486 for a discussion about this
// For now we assume every variable in an optional positional is used as this optional
@ -500,7 +500,7 @@ impl VarSyntaxShapeDeductor {
named: &NamedArguments,
signature: &Signature,
) -> Result<(), ShellError> {
trace!("Infering vars in named");
trace!("Inferring vars in named");
for (name, val) in named.iter() {
if let NamedValue::Value(span, spanned_expr) = &val {
if let Expression::Variable(var_name, _) = &spanned_expr.expr {
@ -534,15 +534,15 @@ impl VarSyntaxShapeDeductor {
) -> Result<(), ShellError> {
match &spanned_expr.expr {
Expression::Binary(_) => {
trace!("Infering vars in bin expr");
trace!("Inferring vars in bin expr");
self.infer_shapes_in_binary_expr((pipeline_idx, pipeline), spanned_expr, scope)?;
}
Expression::Block(b) => {
trace!("Infering vars in block");
trace!("Inferring vars in block");
self.infer_shape(&b, scope)?;
}
Expression::Path(path) => {
trace!("Infering vars in path");
trace!("Inferring vars in path");
match &path.head.expr {
//PathMember can't be var yet (?)
//TODO Iterate over path parts and find var when implemented
@ -560,7 +560,7 @@ impl VarSyntaxShapeDeductor {
}
}
Expression::Range(range) => {
trace!("Infering vars in range");
trace!("Inferring vars in range");
if let Some(range_left) = &range.left {
if let Expression::Variable(var_name, _) = &range_left.expr {
self.checked_insert(
@ -585,13 +585,13 @@ impl VarSyntaxShapeDeductor {
}
}
Expression::List(inner_exprs) => {
trace!("Infering vars in list");
trace!("Inferring vars in list");
for expr in inner_exprs {
self.infer_shapes_in_expr((pipeline_idx, pipeline), expr, scope)?;
}
}
Expression::Invocation(invoc) => {
trace!("Infering vars in invocation: {:?}", invoc);
trace!("Inferring vars in invocation: {:?}", invoc);
self.infer_shape(invoc, scope)?;
}
Expression::Table(header, _rows) => {
@ -738,7 +738,7 @@ impl VarSyntaxShapeDeductor {
(pipeline_idx, pipeline): (usize, &Pipeline),
scope: &Scope,
) -> Result<(), ShellError> {
trace!("Infering shapes between var {:?} and expr {:?}", var, expr);
trace!("Inferring shapes between var {:?} and expr {:?}", var, expr);
let bin = spanned_to_binary(bin_spanned);
if let Expression::Literal(Literal::Operator(op)) = bin.op.expr {
match &op {

View File

@ -17,7 +17,7 @@ impl WholeStreamCommand for Autoenv {
// "Mark a .nu-env file in a directory as trusted. Needs to be re-run after each change to the file or its filepath."
r#"Create a file called .nu-env in any directory and run 'autoenv trust' to let nushell load it when entering the directory.
The .nu-env file has the same format as your $HOME/nu/config.toml file. By loading a .nu-env file the following applies:
- environment variables (section \"[env]\") are loaded from the .nu-env file. Those env variables are only existend in this directory (and children directories)
- environment variables (section \"[env]\") are loaded from the .nu-env file. Those env variables only exist in this directory (and children directories)
- the \"startup\" commands are run when entering the directory
- the \"on_exit\" commands are run when leaving the directory
"#

View File

@ -26,8 +26,8 @@ impl WholeStreamCommand for Char {
SyntaxShape::Any,
"the name of the character to output",
)
.rest(SyntaxShape::String, "multiple unicode bytes")
.switch("unicode", "unicode string i.e. 1f378", Some('u'))
.rest(SyntaxShape::String, "multiple Unicode bytes")
.switch("unicode", "Unicode string i.e. 1f378", Some('u'))
}
fn usage(&self) -> &str {
@ -51,12 +51,12 @@ impl WholeStreamCommand for Char {
]),
},
Example {
description: "Output unicode character",
description: "Output Unicode character",
example: r#"char -u 1f378"#,
result: Some(vec![Value::from("\u{1f378}")]),
},
Example {
description: "Output multi-byte unicode character",
description: "Output multi-byte Unicode character",
example: r#"char -u 1F468 200D 1F466 200D 1F466"#,
result: Some(vec![Value::from(
"\u{1F468}\u{200D}\u{1F466}\u{200D}\u{1F466}",
@ -77,7 +77,7 @@ impl WholeStreamCommand for Char {
if unicode {
if !rest.is_empty() {
// Setup a new buffer to put all the unicode bytes in
// Setup a new buffer to put all the Unicode bytes in
let mut multi_byte = String::new();
// Get the first byte
let decoded_char = string_to_unicode_char(&name.item, &name.tag);
@ -104,8 +104,8 @@ impl WholeStreamCommand for Char {
)))
} else {
Err(ShellError::labeled_error(
"error decoding unicode character",
"error decoding unicode character",
"error decoding Unicode character",
"error decoding Unicode character",
name.tag(),
))
}
@ -136,8 +136,8 @@ fn string_to_unicode_char(s: &str, t: &Tag) -> Result<char, ShellError> {
Ok(ch)
} else {
Err(ShellError::labeled_error(
"error decoding unicode character",
"error decoding unicode character",
"error decoding Unicode character",
"error decoding Unicode character",
t,
))
}

View File

@ -41,7 +41,7 @@ impl WholeStreamCommand for Size {
.into()]),
},
Example {
description: "Counts unicode characters correctly in a string",
description: "Counts Unicode characters correctly in a string",
example: r#"echo "Amélie Amelie" | size"#,
result: Some(vec![UntaggedValue::row(indexmap! {
"lines".to_string() => UntaggedValue::int(0).into(),

View File

@ -68,7 +68,7 @@ impl WholeStreamCommand for SubCommand {
result: Some(vec![UntaggedValue::string("123").into_untagged_value()]),
},
Example {
description: "Use lpad to pad unicode",
description: "Use lpad to pad Unicode",
example: "echo '▉' | str lpad -l 10 -c '▉'",
result: Some(vec![
UntaggedValue::string("▉▉▉▉▉▉▉▉▉▉").into_untagged_value()

View File

@ -68,7 +68,7 @@ impl WholeStreamCommand for SubCommand {
result: Some(vec![UntaggedValue::string("123").into_untagged_value()]),
},
Example {
description: "Use rpad to pad unicode",
description: "Use rpad to pad Unicode",
example: "echo '▉' | str rpad -l 10 -c '▉'",
result: Some(vec![
UntaggedValue::string("▉▉▉▉▉▉▉▉▉▉").into_untagged_value()

View File

@ -212,7 +212,7 @@ async fn table(
{
// This is called when the pager finishes, to indicate to the
// while loop below to finish, in case of long running InputStream consumer
// that doesnt finish by the time the user quits out of the pager
// that doesn't finish by the time the user quits out of the pager
pager.lock().await.add_exit_callback(move || {
finished_within_callback.store(true, Ordering::Relaxed);
});

View File

@ -14,7 +14,7 @@ pub struct LocalConfigDiff {
/// Every config seen while going down the filesystem (e.G. from `/foo/bar` to `/foo/bar`) is
/// returned as a config to unload
/// If both paths are unrelated to each other, (e.G. windows paths as: `C:/foo` and `D:/bar`)
/// this function first walks `from` completly down the filesystem and then it walks up until `to`.
/// this function first walks `from` completely down the filesystem and then it walks up until `to`.
///
/// Both paths are required to be absolute.
impl LocalConfigDiff {
@ -35,7 +35,7 @@ impl LocalConfigDiff {
///Walks from the first parameter down the filesystem to the second parameter. Marking all
///configs found in directories on the way as to remove.
///If to is None, this method walks from the first paramter down to the beginning of the
///If to is None, this method walks from the first parameter down to the beginning of the
///filesystem
///Returns tuple of (configs to remove, errors from io).
fn walk_down(

View File

@ -11,6 +11,6 @@ The following topics shall give the reader a top level understanding how various
Environment variables (or short envs) are stored in the `Scope` of the `EvaluationContext`. That means that environment variables are scoped by default and we don't use `std::env` to store envs (but make exceptions where convenient).
Nushell handles environment variables and their lifetime the following:
- At startup all existing environment variables are read and put into `Scope`. (Nushell reads existing environment variables platform independent by asking the `Host`. They will most likly come from `std::env::*`)
- At startup all existing environment variables are read and put into `Scope`. (Nushell reads existing environment variables platform independent by asking the `Host`. They will most likely come from `std::env::*`)
- Envs can also be loaded from config files. Each loaded config produces a new `ScopeFrame` with the envs of the loaded config.
- Nu-Script files and internal commands read and write env variables from / to the `Scope`. External scripts and binaries can't interact with the `Scope`. Therefore all env variables are read from the `Scope` and put into the external binaries environment-variables-memory area.

View File

@ -119,9 +119,9 @@ impl EvaluationContext {
/// If an error occurs while loading the config:
/// The config is not loaded
/// The error is returned
/// After successfull loading of the config the startup scripts are run
/// After successful loading of the config the startup scripts are run
/// as normal scripts (Errors are printed out, ...)
/// After executing the startup scripts, true is returned to indicate successfull loading
/// After executing the startup scripts, true is returned to indicate successful loading
/// of the config
//
// The rational here is that, we should not partially load any config

View File

@ -51,7 +51,7 @@ pub enum ErrorCode {
/// Invalid number.
InvalidNumber,
/// Invalid unicode code point.
/// Invalid Unicode code point.
InvalidUnicodeCodePoint,
/// Object key is not a string.
@ -63,7 +63,7 @@ pub enum ErrorCode {
/// JSON has non-whitespace trailing characters after the value.
TrailingCharacters,
/// Unexpected end of hex excape.
/// Unexpected end of hex escape.
UnexpectedEndOfHexEscape,
/// Found a punctuator character when expecting a quoteless string.
@ -87,7 +87,7 @@ impl fmt::Debug for ErrorCode {
ErrorCode::ExpectedSomeValue => "expected value".fmt(f),
ErrorCode::InvalidEscape => "invalid escape".fmt(f),
ErrorCode::InvalidNumber => "invalid number".fmt(f),
ErrorCode::InvalidUnicodeCodePoint => "invalid unicode code point".fmt(f),
ErrorCode::InvalidUnicodeCodePoint => "invalid Unicode code point".fmt(f),
ErrorCode::KeyMustBeAString => "key must be a string".fmt(f),
ErrorCode::LoneLeadingSurrogateInHexEscape => {
"lone leading surrogate in hex escape".fmt(f)
@ -105,7 +105,7 @@ impl fmt::Debug for ErrorCode {
/// value into JSON.
#[derive(Debug)]
pub enum Error {
/// The JSON value had some syntatic error.
/// The JSON value had some syntactic error.
Syntax(ErrorCode, usize, usize),
/// Some IO error occurred when serializing or deserializing a value.

View File

@ -132,7 +132,7 @@ def e [] {echo hi}
}
#[test]
fn def_comment_with_sinqle_quote() {
fn def_comment_with_single_quote() {
let input = r#"def f [] {
# shouldn't return error
echo hi
@ -152,7 +152,7 @@ def e [] {echo hi}
}
#[test]
fn def_comment_with_bracks() {
fn def_comment_with_bracket() {
let input = r#"def f [] {
# should not [return error
echo hi
@ -162,7 +162,7 @@ def e [] {echo hi}
}
#[test]
fn def_comment_with_curly() {
fn def_comment_with_curly_brace() {
let input = r#"def f [] {
# should not return {error
echo hi
@ -334,7 +334,7 @@ echo 42
#[test]
fn no_discarded_white_space_start_of_comment() {
let code = r#"
#No white_space at firt line ==> No white_space discarded
#No white_space at first line ==> No white_space discarded
# Starting space is not discarded
echo 42
"#;
@ -351,14 +351,14 @@ echo 42
result.block[0].pipelines[0].commands[0].comments,
Some(vec![
LiteComment::new(
"No white_space at firt line ==> No white_space discarded"
"No white_space at first line ==> No white_space discarded"
.to_string()
.spanned(Span::new(2, 58))
.spanned(Span::new(2, 59))
),
LiteComment::new(
" Starting space is not discarded"
.to_string()
.spanned(Span::new(60, 94))
.spanned(Span::new(61, 95))
),
])
);

View File

@ -62,8 +62,8 @@ pub fn parse_simple_column_path(
output.push(Member::Bare(trimmed.clone().spanned(part_span)));
}
current_part.clear();
// Note: I believe this is safe because of the delimiter we're using, but if we get fancy with
// unicode we'll need to change this
// Note: I believe this is safe because of the delimiter we're using,
// but if we get fancy with Unicode we'll need to change this.
start_index = idx + '.'.len_utf8();
continue;
}
@ -143,8 +143,8 @@ pub fn parse_full_column_path(
);
}
current_part.clear();
// Note: I believe this is safe because of the delimiter we're using, but if we get fancy with
// unicode we'll need to change this
// Note: I believe this is safe because of the delimiter we're using,
// but if we get fancy with Unicode we'll need to change this.
start_index = idx + '.'.len_utf8();
continue;
}

View File

@ -311,11 +311,11 @@ fn parse_signature_item_end(tokens: &[Token]) -> (Option<String>, usize, Option<
);
////Separating flags/parameters is optional.
////If this should change, the below code would raise a warning whenever 2 parameters/flags are
////not delmited by <,> or <eol>
////not delimited by <,> or <eol>
//if there is next item, but it's not comma, then it must be Optional(#Comment) + <eof>
//let parsed_delimiter = parsed_comma || parsed_eol;
//if !parsed_delimiter && i < tokens.len() {
// //If not parsed , or eol but more tokens are comming
// //If not parsed , or eol but more tokens are coming
// err = err.or(Some(ParseError::mismatch(
// "Newline or ','",
// (token[i-1].to_string() + token[i].to_string()).spanned(token[i-1].span.until(token[i].span))

View File

@ -167,8 +167,8 @@ fn parse(raw_column_path: &Spanned<String>) -> (SpannedExpression, Option<ParseE
output.push(Member::Bare(trimmed.clone().spanned(part_span)));
}
current_part.clear();
// Note: I believe this is safe because of the delimiter we're using, but if we get fancy with
// unicode we'll need to change this
// Note: I believe this is safe because of the delimiter we're using,
// but if we get fancy with Unicode we'll need to change this.
start_index = idx + '.'.len_utf8();
continue;
}

View File

@ -62,7 +62,7 @@ pub fn split_sublines(input: &str) -> Vec<Vec<Subline>> {
width: {
// We've tried UnicodeWidthStr::width(x), UnicodeSegmentation::graphemes(x, true).count()
// and x.chars().count() with all types of combinations. Currently, it appears that
// getting the max of char count and unicode width seems to produce the best layout.
// getting the max of char count and Unicode width seems to produce the best layout.
// However, it's not perfect.
let c = x.chars().count();
let u = UnicodeWidthStr::width(x);

View File

@ -50,7 +50,7 @@ It is possible to comment them by appending `# Comment text`!
Example
```shell
def cmd [
parameter # Paramter Comment
parameter # Parameter comment
--flag: int # Flag comment
...rest: path # Rest comment
] { ... }

View File

@ -1,5 +1,6 @@
# textview config
The configuration for textview, which is used to autoview text files, uses [bat](https://docs.rs/bat/0.15.4/bat/struct.PrettyPrinter.html). The textview configurtion will **not** use any existing `bat` configuration you may have.
The configuration for textview, which is used to autoview text files, uses [bat](https://docs.rs/bat/0.15.4/bat/struct.PrettyPrinter.html). The textview configuration will **not** use any existing `bat` configuration you may have.
## Configuration Points and Defaults
@ -23,7 +24,7 @@ The configuration for textview, which is used to autoview text files, uses [bat]
| highlight_range | Specify a range of lines that should be highlighted (default: none). This can be called multiple times to highlight more than one range of lines. | no |
| theme | Specify the highlighting theme (default: OneHalfDark) | yes |
## Example textview confguration for `config.toml`
## Example textview configuration for `config.toml`
```toml
[textview]
@ -59,4 +60,4 @@ theme = "TwoDark"
## Help
For a more detailed description of the configuration points that textview uses, please visit the `bat` repo at https://github.com/sharkdp/bat
For a more detailed description of the configuration points that textview uses, please visit the `bat` repo at <https://github.com/sharkdp/bat>.

View File

@ -40,7 +40,7 @@ function print_good_response {
# int_value["item"] = int_item
# return int_value
# functino get_length {
# function get_length {
# param($string_val)
# $string_len = $string_val[`"item`"][`"Primitive`"][`"String`"].Length
# }