nushell/crates/nu-parser/src/parse/def.rs
Yehuda Katz d07789677f
Clean up lexer ()
* Document the lexer and lightly improve its names

The bulk of this pull request adds a substantial amount of new inline
documentation for the lexer. Along the way, I made a few minor changes
to the names in the lexer, most of which were internal.

The main change that affects other files is renaming `group` to `block`,
since the function is actually parsing a block (a list of groups).

* Further clean up the lexer

- Consolidate the logic of the various token builders into a single type
- Improve and clean up the event-driven BlockParser
- Clean up comment parsing. Comments now contain their original leading
  whitespace as well as trailing whitespace, and know how to move some
  leading whitespace back into the body based on how the lexer decides
  to dedent the comments. This preserves the original whitespace
  information while still making it straight-forward to eliminate leading
  whitespace in help comments.

* Update meta.rs

* WIP

* fix clippy

* remove unwraps

* remove unwraps

Co-authored-by: Jonathan Turner <jonathandturner@users.noreply.github.com>
Co-authored-by: Jonathan Turner <jonathan.d.turner@gmail.com>
2021-02-04 20:20:21 +13:00

104 lines
3.0 KiB
Rust

use crate::{
lex::tokens::LiteCommand,
parse::{classify_block, util::trim_quotes},
};
use indexmap::IndexMap;
use nu_errors::ParseError;
use nu_protocol::hir::Block;
use nu_source::{HasSpan, SpannedItem};
//use crate::errors::{ParseError, ParseResult};
use crate::lex::lexer::{block, lex};
use crate::ParserScope;
use self::param_flag_list::parse_signature;
mod param_flag_list;
pub(crate) fn parse_definition(call: &LiteCommand, scope: &dyn ParserScope) -> Option<ParseError> {
// A this point, we've already handled the prototype and put it into scope;
// So our main goal here is to parse the block now that the names and
// prototypes of adjacent commands are also available
if call.parts.len() == 4 {
if call.parts.len() != 4 {
return Some(ParseError::mismatch("definition", call.parts[0].clone()));
}
if call.parts[0].item != "def" {
return Some(ParseError::mismatch("definition", call.parts[0].clone()));
}
let name = trim_quotes(&call.parts[1].item);
let (mut signature, err) = parse_signature(&name, &call.parts[2]);
//Add commands comments to signature usage
signature.usage = call.comments_joined();
if err.is_some() {
return err;
};
let mut chars = call.parts[3].chars();
match (chars.next(), chars.next_back()) {
(Some('{'), Some('}')) => {
// We have a literal block
let string: String = chars.collect();
scope.enter_scope();
let (tokens, err) = lex(&string, call.parts[3].span.start() + 1);
if err.is_some() {
return err;
};
let (lite_block, err) = block(tokens);
if err.is_some() {
return err;
};
let (mut block, err) = classify_block(&lite_block, scope);
scope.exit_scope();
block.params = signature;
block.params.name = name;
scope.add_definition(block);
err
}
_ => Some(ParseError::mismatch("body", call.parts[3].clone())),
}
} else {
Some(ParseError::internal_error(
"need a block".to_string().spanned(call.span()),
))
}
}
pub(crate) fn parse_definition_prototype(
call: &LiteCommand,
scope: &dyn ParserScope,
) -> Option<ParseError> {
let mut err = None;
if call.parts.len() != 4 {
return Some(ParseError::mismatch("definition", call.parts[0].clone()));
}
if call.parts[0].item != "def" {
return Some(ParseError::mismatch("definition", call.parts[0].clone()));
}
let name = trim_quotes(&call.parts[1].item);
let (signature, error) = parse_signature(&name, &call.parts[2]);
if err.is_none() {
err = error;
}
scope.add_definition(Block::new(signature, vec![], IndexMap::new(), call.span()));
err
}