Getting closer to multiline scripts (#2738)

* Begin allowing comments and multiline scripts.

* clippy

* Finish moving to groups. Test pass
This commit is contained in:
Jonathan Turner
2020-11-10 16:52:42 +13:00
committed by GitHub
parent 3924e9d50a
commit e66bf70589
17 changed files with 262 additions and 160 deletions

View File

@ -290,8 +290,8 @@ pub fn create_default_context(interactive: bool) -> Result<EvaluationContext, Bo
Ok(context)
}
pub async fn run_vec_of_pipelines(
pipelines: Vec<String>,
pub async fn run_script_file(
file_contents: String,
redirect_stdin: bool,
) -> Result<(), Box<dyn Error>> {
let mut syncer = EnvironmentSyncer::new();
@ -313,9 +313,7 @@ pub async fn run_vec_of_pipelines(
let _ = run_startup_commands(&mut context, &config).await;
for pipeline in pipelines {
run_pipeline_standalone(pipeline, redirect_stdin, &mut context, true).await?;
}
run_script_standalone(file_contents, redirect_stdin, &mut context, true).await?;
Ok(())
}
@ -475,7 +473,7 @@ pub async fn cli(mut context: EvaluationContext) -> Result<(), Box<dyn Error>> {
}
let line = match convert_rustyline_result_to_string(readline) {
LineResult::Success(s) => process_line(&s, &mut context, false, true).await,
LineResult::Success(s) => process_script(&s, &mut context, false, true).await,
x => x,
};
@ -602,8 +600,7 @@ async fn run_startup_commands(
} => {
for pipeline in pipelines {
if let Ok(pipeline_string) = pipeline.as_string() {
let _ =
run_pipeline_standalone(pipeline_string, false, context, false).await;
let _ = run_script_standalone(pipeline_string, false, context, false).await;
}
}
}
@ -618,13 +615,13 @@ async fn run_startup_commands(
Ok(())
}
pub async fn run_pipeline_standalone(
pipeline: String,
pub async fn run_script_standalone(
script_text: String,
redirect_stdin: bool,
context: &mut EvaluationContext,
exit_on_error: bool,
) -> Result<(), Box<dyn Error>> {
let line = process_line(&pipeline, context, redirect_stdin, false).await;
let line = process_script(&script_text, context, redirect_stdin, false).await;
match line {
LineResult::Success(line) => {
@ -892,16 +889,16 @@ pub async fn parse_and_eval(line: &str, ctx: &mut EvaluationContext) -> Result<S
}
/// Process the line by parsing the text to turn it into commands, classify those commands so that we understand what is being called in the pipeline, and then run this pipeline
pub async fn process_line(
line: &str,
pub async fn process_script(
script_text: &str,
ctx: &mut EvaluationContext,
redirect_stdin: bool,
cli_mode: bool,
) -> LineResult {
if line.trim() == "" {
LineResult::Success(line.to_string())
if script_text.trim() == "" {
LineResult::Success(script_text.to_string())
} else {
let line = chomp_newline(line);
let line = chomp_newline(script_text);
ctx.raw_input = line.to_string();
let (result, err) = nu_parser::lite_parse(&line, 0);
@ -930,11 +927,12 @@ pub async fn process_line(
// ...then change to this directory
if cli_mode
&& classified_block.block.block.len() == 1
&& classified_block.block.block[0].list.len() == 1
&& classified_block.block.block[0].pipelines.len() == 1
&& classified_block.block.block[0].pipelines[0].list.len() == 1
{
if let ClassifiedCommand::Internal(InternalCommand {
ref name, ref args, ..
}) = classified_block.block.block[0].list[0]
}) = classified_block.block.block[0].pipelines[0].list[0]
{
let internal_name = name;
let name = args

View File

@ -5,7 +5,7 @@ use crate::prelude::*;
use heim::cpu::time;
use nu_errors::ShellError;
use nu_protocol::{
hir::{Block, ClassifiedCommand, Commands, InternalCommand},
hir::{Block, ClassifiedCommand, Group, InternalCommand, Pipeline},
Dictionary, Scope, Signature, SyntaxShape, UntaggedValue, Value,
};
use rand::{
@ -175,15 +175,19 @@ where
fn add_implicit_autoview(mut block: Block) -> Block {
if block.block.is_empty() {
block.push({
let mut commands = Commands::new(block.span);
commands.push(ClassifiedCommand::Internal(InternalCommand::new(
"autoview".to_string(),
block.span,
block.span,
)));
commands
});
let group = Group::new(
vec![{
let mut commands = Pipeline::new(block.span);
commands.push(ClassifiedCommand::Internal(InternalCommand::new(
"autoview".to_string(),
block.span,
block.span,
)));
commands
}],
block.span,
);
block.push(group);
}
block
}

View File

@ -5,7 +5,9 @@ use crate::prelude::*;
use crate::stream::InputStream;
use futures::stream::TryStreamExt;
use nu_errors::ShellError;
use nu_protocol::hir::{Block, ClassifiedCommand, Commands};
use nu_protocol::hir::{
Block, Call, ClassifiedCommand, Expression, Pipeline, SpannedExpression, Synthetic,
};
use nu_protocol::{ReturnSuccess, Scope, UntaggedValue, Value};
use std::sync::atomic::Ordering;
@ -16,35 +18,52 @@ pub(crate) async fn run_block(
scope: Arc<Scope>,
) -> Result<InputStream, ShellError> {
let mut output: Result<InputStream, ShellError> = Ok(InputStream::empty());
for pipeline in &block.block {
for group in &block.block {
match output {
Ok(inp) if inp.is_empty() => {}
Ok(inp) => {
let mut output_stream = inp.to_output_stream();
loop {
match output_stream.try_next().await {
Ok(Some(ReturnSuccess::Value(Value {
value: UntaggedValue::Error(e),
..
}))) => return Err(e),
Ok(Some(_item)) => {
if let Some(err) = ctx.get_errors().get(0) {
ctx.clear_errors();
return Err(err.clone());
// Run autoview on the values we've seen so far
// We may want to make this configurable for other kinds of hosting
if let Some(autoview) = ctx.get_command("autoview") {
let mut output_stream = ctx
.run_command(
autoview,
Tag::unknown(),
Call::new(
Box::new(SpannedExpression::new(
Expression::Synthetic(Synthetic::String("autoview".into())),
Span::unknown(),
)),
Span::unknown(),
),
scope.clone(),
inp,
)
.await?;
loop {
match output_stream.try_next().await {
Ok(Some(ReturnSuccess::Value(Value {
value: UntaggedValue::Error(e),
..
}))) => return Err(e),
Ok(Some(_item)) => {
if let Some(err) = ctx.get_errors().get(0) {
ctx.clear_errors();
return Err(err.clone());
}
if ctx.ctrl_c.load(Ordering::SeqCst) {
break;
}
}
if ctx.ctrl_c.load(Ordering::SeqCst) {
Ok(None) => {
if let Some(err) = ctx.get_errors().get(0) {
ctx.clear_errors();
return Err(err.clone());
}
break;
}
Err(e) => return Err(e),
}
Ok(None) => {
if let Some(err) = ctx.get_errors().get(0) {
ctx.clear_errors();
return Err(err.clone());
}
break;
}
Err(e) => return Err(e),
}
}
}
@ -52,16 +71,54 @@ pub(crate) async fn run_block(
return Err(e);
}
}
output = run_pipeline(pipeline, ctx, input, scope.clone()).await;
output = Ok(InputStream::empty());
for pipeline in &group.pipelines {
match output {
Ok(inp) if inp.is_empty() => {}
Ok(inp) => {
let mut output_stream = inp.to_output_stream();
input = InputStream::empty();
loop {
match output_stream.try_next().await {
Ok(Some(ReturnSuccess::Value(Value {
value: UntaggedValue::Error(e),
..
}))) => return Err(e),
Ok(Some(_item)) => {
if let Some(err) = ctx.get_errors().get(0) {
ctx.clear_errors();
return Err(err.clone());
}
if ctx.ctrl_c.load(Ordering::SeqCst) {
break;
}
}
Ok(None) => {
if let Some(err) = ctx.get_errors().get(0) {
ctx.clear_errors();
return Err(err.clone());
}
break;
}
Err(e) => return Err(e),
}
}
}
Err(e) => {
return Err(e);
}
}
output = run_pipeline(pipeline, ctx, input, scope.clone()).await;
input = InputStream::empty();
}
}
output
}
async fn run_pipeline(
commands: &Commands,
commands: &Pipeline,
ctx: &mut EvaluationContext,
mut input: InputStream,
scope: Arc<Scope>,

View File

@ -94,9 +94,9 @@ async fn if_command(
tag,
));
}
match condition.block[0].list.get(0) {
Some(item) => match item {
ClassifiedCommand::Expr(expr) => expr.clone(),
match condition.block[0].pipelines.get(0) {
Some(item) => match item.list.get(0) {
Some(ClassifiedCommand::Expr(expr)) => expr.clone(),
_ => {
return Err(ShellError::labeled_error(
"Expected a condition",

View File

@ -51,9 +51,9 @@ impl WholeStreamCommand for SubCommand {
tag,
));
}
match block.block[0].list.get(0) {
Some(item) => match item {
ClassifiedCommand::Expr(expr) => expr.clone(),
match block.block[0].pipelines.get(0) {
Some(item) => match item.list.get(0) {
Some(ClassifiedCommand::Expr(expr)) => expr.clone(),
_ => {
return Err(ShellError::labeled_error(
"Expected a condition",

View File

@ -50,9 +50,9 @@ impl WholeStreamCommand for SubCommand {
tag,
));
}
match block.block[0].list.get(0) {
Some(item) => match item {
ClassifiedCommand::Expr(expr) => expr.clone(),
match block.block[0].pipelines.get(0) {
Some(item) => match item.list.get(0) {
Some(ClassifiedCommand::Expr(expr)) => expr.clone(),
_ => {
return Err(ShellError::labeled_error(
"Expected a condition",

View File

@ -50,9 +50,9 @@ impl WholeStreamCommand for SubCommand {
tag,
));
}
match block.block[0].list.get(0) {
Some(item) => match item {
ClassifiedCommand::Expr(expr) => expr.clone(),
match block.block[0].pipelines.get(0) {
Some(item) => match item.list.get(0) {
Some(ClassifiedCommand::Expr(expr)) => expr.clone(),
_ => {
return Err(ShellError::labeled_error(
"Expected a condition",

View File

@ -50,9 +50,9 @@ impl WholeStreamCommand for SubCommand {
tag,
));
}
match block.block[0].list.get(0) {
Some(item) => match item {
ClassifiedCommand::Expr(expr) => expr.clone(),
match block.block[0].pipelines.get(0) {
Some(item) => match item.list.get(0) {
Some(ClassifiedCommand::Expr(expr)) => expr.clone(),
_ => {
return Err(ShellError::labeled_error(
"Expected a condition",

View File

@ -81,9 +81,9 @@ async fn where_command(
tag,
));
}
match block.block[0].list.get(0) {
Some(item) => match item {
ClassifiedCommand::Expr(expr) => expr.clone(),
match block.block[0].pipelines.get(0) {
Some(item) => match item.list.get(0) {
Some(ClassifiedCommand::Expr(expr)) => expr.clone(),
_ => {
return Err(ShellError::labeled_error(
"Expected a condition",

View File

@ -138,7 +138,7 @@ impl<'s> Flatten<'s> {
result
}
fn pipeline(&self, pipeline: &Commands) -> Vec<CompletionLocation> {
fn pipeline(&self, pipeline: &Pipeline) -> Vec<CompletionLocation> {
let mut result = Vec::new();
for command in &pipeline.list {
@ -158,7 +158,11 @@ impl<'s> Flatten<'s> {
/// Flattens the block into a Vec of completion locations
pub fn completion_locations(&self, block: &Block) -> Vec<CompletionLocation> {
block.block.iter().flat_map(|v| self.pipeline(v)).collect()
block
.block
.iter()
.flat_map(|g| g.pipelines.iter().flat_map(|v| self.pipeline(v)))
.collect()
}
pub fn new(line: &'s str) -> Flatten<'s> {

View File

@ -43,8 +43,8 @@ mod examples;
pub use crate::cli::cli;
pub use crate::cli::{
create_default_context, parse_and_eval, process_line, register_plugins,
run_pipeline_standalone, run_vec_of_pipelines, LineResult,
create_default_context, parse_and_eval, process_script, register_plugins, run_script_file,
run_script_standalone, LineResult,
};
pub use crate::command_registry::CommandRegistry;
pub use crate::commands::command::{

View File

@ -5,8 +5,8 @@ use nu_errors::ShellError;
use nu_parser::SignatureRegistry;
use nu_protocol::{
hir::{
Binary, Block, ClassifiedCommand, Commands, Expression, Literal, NamedArguments,
NamedValue, Operator, SpannedExpression,
Binary, Block, ClassifiedCommand, Expression, Literal, NamedArguments, NamedValue,
Operator, Pipeline, SpannedExpression,
},
NamedType, PositionalType, Signature, SyntaxShape,
};
@ -318,7 +318,7 @@ fn spanned_to_binary(bin_spanned: &SpannedExpression) -> &Binary {
///Returns result shape of this math expr otherwise
fn get_result_shape_of_math_expr(
bin: &Binary,
(pipeline_idx, pipeline): (usize, &Commands),
(pipeline_idx, pipeline): (usize, &Pipeline),
registry: &CommandRegistry,
) -> Result<Option<SyntaxShape>, ShellError> {
let mut shapes: Vec<Option<SyntaxShape>> = vec![];
@ -388,15 +388,17 @@ impl VarSyntaxShapeDeductor {
fn infer_shape(&mut self, block: &Block, registry: &CommandRegistry) -> Result<(), ShellError> {
trace!("Infering vars in shape");
for pipeline in &block.block {
self.infer_pipeline(pipeline, registry)?;
for group in &block.block {
for pipeline in &group.pipelines {
self.infer_pipeline(pipeline, registry)?;
}
}
Ok(())
}
pub fn infer_pipeline(
&mut self,
pipeline: &Commands,
pipeline: &Pipeline,
registry: &CommandRegistry,
) -> Result<(), ShellError> {
trace!("Infering vars in pipeline");
@ -534,7 +536,7 @@ impl VarSyntaxShapeDeductor {
fn infer_shapes_in_expr(
&mut self,
(pipeline_idx, pipeline): (usize, &Commands),
(pipeline_idx, pipeline): (usize, &Pipeline),
spanned_expr: &SpannedExpression,
registry: &CommandRegistry,
) -> Result<(), ShellError> {
@ -660,7 +662,7 @@ impl VarSyntaxShapeDeductor {
(var, expr): (&VarUsage, &SpannedExpression),
//source_bin is binary having var on one and expr on other side
source_bin: &SpannedExpression,
(pipeline_idx, pipeline): (usize, &Commands),
(pipeline_idx, pipeline): (usize, &Pipeline),
registry: &CommandRegistry,
) -> Result<Option<SyntaxShape>, ShellError> {
get_result_shape_of_math_expr(spanned_to_binary(expr), (pipeline_idx, pipeline), registry)
@ -682,7 +684,7 @@ impl VarSyntaxShapeDeductor {
(var, expr): (&VarUsage, &SpannedExpression),
//source_bin is binary having var on one and expr on other side
source_bin: &SpannedExpression,
(pipeline_idx, pipeline): (usize, &Commands),
(pipeline_idx, pipeline): (usize, &Pipeline),
registry: &CommandRegistry,
) -> Result<Option<SyntaxShape>, ShellError> {
trace!("Getting shape of binary arg {:?} for var {:?}", expr, var);
@ -714,7 +716,7 @@ impl VarSyntaxShapeDeductor {
var: &VarUsage,
bin_spanned: &SpannedExpression,
list: &[SpannedExpression],
(_pipeline_idx, _pipeline): (usize, &Commands),
(_pipeline_idx, _pipeline): (usize, &Pipeline),
_registry: &CommandRegistry,
) -> Option<Vec<SyntaxShape>> {
let shapes_in_list = list
@ -740,7 +742,7 @@ impl VarSyntaxShapeDeductor {
var_side: BinarySide,
//Binary having expr on one side and var on other
bin_spanned: &SpannedExpression,
(pipeline_idx, pipeline): (usize, &Commands),
(pipeline_idx, pipeline): (usize, &Pipeline),
registry: &CommandRegistry,
) -> Result<(), ShellError> {
trace!("Infering shapes between var {:?} and expr {:?}", var, expr);
@ -893,7 +895,7 @@ impl VarSyntaxShapeDeductor {
fn infer_shapes_in_binary_expr(
&mut self,
(pipeline_idx, pipeline): (usize, &Commands),
(pipeline_idx, pipeline): (usize, &Pipeline),
bin_spanned: &SpannedExpression,
registry: &CommandRegistry,
) -> Result<(), ShellError> {