From 7efb31a4e4887a66c0ac4a13228ef904fd8de9e5 Mon Sep 17 00:00:00 2001 From: Yehuda Katz Date: Tue, 21 Jan 2020 17:45:03 -0500 Subject: [PATCH] Restructure and streamline token expansion (#1123) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Restructure and streamline token expansion The purpose of this commit is to streamline the token expansion code, by removing aspects of the code that are no longer relevant, removing pointless duplication, and eliminating the need to pass the same arguments to `expand_syntax`. The first big-picture change in this commit is that instead of a handful of `expand_` functions, which take a TokensIterator and ExpandContext, a smaller number of methods on the `TokensIterator` do the same job. The second big-picture change in this commit is fully eliminating the coloring traits, making coloring a responsibility of the base expansion implementations. This also means that the coloring tracer is merged into the expansion tracer, so you can follow a single expansion and see how the expansion process produced colored tokens. One side effect of this change is that the expander itself is marginally more error-correcting. The error correction works by switching from structured expansion to `BackoffColoringMode` when an unexpected token is found, which guarantees that all spans of the source are colored, but may not be the most optimal error recovery strategy. That said, because `BackoffColoringMode` only extends as far as a closing delimiter (`)`, `]`, `}`) or pipe (`|`), it does result in fairly granular correction strategy. The current code still produces an `Err` (plus a complete list of colored shapes) from the parsing process if any errors are encountered, but this could easily be addressed now that the underlying expansion is error-correcting. This commit also colors any spans that are syntax errors in red, and causes the parser to include some additional information about what tokens were expected at any given point where an error was encountered, so that completions and hinting could be more robust in the future. Co-authored-by: Jonathan Turner Co-authored-by: Andrés N. Robalino --- .azure/azure-pipelines.yml | 1 + Cargo.lock | 10 +- Cargo.toml | 15 +- TODO.md | 8 + crates/nu-errors/Cargo.toml | 1 + crates/nu-errors/src/lib.rs | 49 +- crates/nu-parser/src/commands.rs | 7 +- crates/nu-parser/src/commands/classified.rs | 12 +- crates/nu-parser/src/hir.rs | 372 +++-- .../nu-parser/src/hir/baseline_parse/tests.rs | 204 ++- crates/nu-parser/src/hir/binary.rs | 6 +- .../src/hir/expand_external_tokens.rs | 287 ++-- crates/nu-parser/src/hir/named.rs | 43 +- crates/nu-parser/src/hir/path.rs | 6 +- crates/nu-parser/src/hir/range.rs | 6 +- crates/nu-parser/src/hir/syntax_shape.rs | 1193 +++++------------ .../nu-parser/src/hir/syntax_shape/block.rs | 212 +-- .../nu-parser/src/hir/syntax_shape/design.md | 72 + .../src/hir/syntax_shape/expression.rs | 312 +---- .../hir/syntax_shape/expression/delimited.rs | 55 +- .../hir/syntax_shape/expression/file_path.rs | 120 +- .../src/hir/syntax_shape/expression/list.rs | 185 +-- .../src/hir/syntax_shape/expression/number.rs | 188 +-- .../hir/syntax_shape/expression/pattern.rs | 132 +- .../src/hir/syntax_shape/expression/range.rs | 98 +- .../src/hir/syntax_shape/expression/string.rs | 141 +- .../src/hir/syntax_shape/expression/unit.rs | 86 +- .../syntax_shape/expression/variable_path.rs | 794 ++++------- .../src/hir/syntax_shape/flat_shape.rs | 201 ++- crates/nu-parser/src/hir/tokens_iterator.rs | 920 ++++++------- .../hir/tokens_iterator/debug/color_trace.rs | 46 +- .../hir/tokens_iterator/debug/expand_trace.rs | 267 +++- .../src/hir/tokens_iterator/into_shapes.rs | 56 + .../src/hir/tokens_iterator/pattern.rs | 30 + .../src/hir/tokens_iterator/state.rs | 105 ++ .../src/hir/tokens_iterator/tests.rs | 7 +- crates/nu-parser/src/lib.rs | 62 +- crates/nu-parser/src/macros.rs | 9 + crates/nu-parser/src/parse.rs | 2 +- crates/nu-parser/src/parse/call_node.rs | 8 +- crates/nu-parser/src/parse/comment.rs | 12 +- crates/nu-parser/src/parse/flag.rs | 7 +- crates/nu-parser/src/parse/number.rs | 70 + crates/nu-parser/src/parse/parser.rs | 250 +++- crates/nu-parser/src/parse/pipeline.rs | 29 +- crates/nu-parser/src/parse/token_tree.rs | 576 ++++---- .../nu-parser/src/parse/token_tree_builder.rs | 149 +- crates/nu-parser/src/parse_command.rs | 466 +++---- crates/nu-protocol/src/signature.rs | 29 + crates/nu-protocol/src/syntax_shape.rs | 20 +- crates/nu-source/src/lib.rs | 5 +- crates/nu-source/src/meta.rs | 84 +- crates/nu-source/src/pretty.rs | 47 +- crates/nu-test-support/src/macros.rs | 21 +- src/cli.rs | 64 +- src/commands/autoview.rs | 15 +- src/commands/classified/external.rs | 4 +- src/commands/classified/internal.rs | 30 +- src/commands/classified/pipeline.rs | 3 + src/commands/command.rs | 2 +- src/commands/enter.rs | 6 +- src/commands/from_sqlite.rs | 3 +- src/commands/help.rs | 6 +- src/commands/open.rs | 2 +- src/commands/save.rs | 4 +- src/commands/shells.rs | 11 +- src/commands/what.rs | 7 +- src/commands/which_.rs | 4 +- src/context.rs | 159 +-- src/data/base.rs | 2 +- src/evaluate/evaluator.rs | 60 +- src/prelude.rs | 13 +- src/shell/completer.rs | 23 +- src/shell/filesystem_shell.rs | 12 +- src/shell/help_shell.rs | 46 +- src/shell/helper.rs | 191 +-- src/shell/shell.rs | 9 +- src/shell/shell_manager.rs | 242 +--- src/shell/value_shell.rs | 9 +- tests/commands/open.rs | 3 +- tests/commands/pick.rs | 4 +- 81 files changed, 4145 insertions(+), 4882 deletions(-) create mode 100644 crates/nu-parser/src/hir/syntax_shape/design.md create mode 100644 crates/nu-parser/src/hir/tokens_iterator/into_shapes.rs create mode 100644 crates/nu-parser/src/hir/tokens_iterator/pattern.rs create mode 100644 crates/nu-parser/src/hir/tokens_iterator/state.rs create mode 100644 crates/nu-parser/src/macros.rs create mode 100644 crates/nu-parser/src/parse/number.rs diff --git a/.azure/azure-pipelines.yml b/.azure/azure-pipelines.yml index 25a0b6b6eb..50d9b3e1a7 100644 --- a/.azure/azure-pipelines.yml +++ b/.azure/azure-pipelines.yml @@ -59,3 +59,4 @@ steps: - bash: cargo fmt --all -- --check condition: eq(variables['style'], 'fmt') displayName: Lint + \ No newline at end of file diff --git a/Cargo.lock b/Cargo.lock index 3e0bae031c..a59deda51d 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1870,9 +1870,9 @@ dependencies = [ [[package]] name = "libsqlite3-sys" -version = "0.17.1" +version = "0.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "266eb8c361198e8d1f682bc974e5d9e2ae90049fb1943890904d11dad7d4a77d" +checksum = "5e5b95e89c330291768dc840238db7f9e204fd208511ab6319b56193a7f2ae25" dependencies = [ "cc", "pkg-config", @@ -2289,6 +2289,7 @@ dependencies = [ "roxmltree", "rusqlite", "rustyline", + "semver", "serde 1.0.104", "serde-hjson 0.9.1", "serde_bytes", @@ -2331,6 +2332,7 @@ dependencies = [ "ansi_term 0.12.1", "bigdecimal", "derive-new", + "getset", "language-reporting", "nom 5.1.0", "nom_locate", @@ -3373,9 +3375,9 @@ dependencies = [ [[package]] name = "rusqlite" -version = "0.21.0" +version = "0.20.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "64a656821bb6317a84b257737b7934f79c0dbb7eb694710475908280ebad3e64" +checksum = "2a194373ef527035645a1bc21b10dc2125f73497e6e155771233eb187aedd051" dependencies = [ "bitflags", "fallible-iterator", diff --git a/Cargo.toml b/Cargo.toml index f9785ace4e..2417ecf887 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -39,12 +39,12 @@ members = [ # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] -nu-source = { version = "0.8.0", path = "./crates/nu-source" } -nu-plugin = { version = "0.8.0", path = "./crates/nu-plugin" } -nu-protocol = { version = "0.8.0", path = "./crates/nu-protocol" } -nu-errors = { version = "0.8.0", path = "./crates/nu-errors" } -nu-parser = { version = "0.8.0", path = "./crates/nu-parser" } -nu-value-ext = { version = "0.8.0", path = "./crates/nu-value-ext" } +nu-source = {version = "0.8.0", path = "./crates/nu-source"} +nu-plugin = {version = "0.8.0", path = "./crates/nu-plugin"} +nu-protocol = {version = "0.8.0", path = "./crates/nu-protocol"} +nu-errors = {version = "0.8.0", path = "./crates/nu-errors"} +nu-parser = {version = "0.8.0", path = "./crates/nu-parser"} +nu-value-ext = {version = "0.8.0", path = "./crates/nu-value-ext"} nu_plugin_average = {version = "0.8.0", path = "./crates/nu_plugin_average", optional=true} nu_plugin_binaryview = {version = "0.8.0", path = "./crates/nu_plugin_binaryview", optional=true} nu_plugin_fetch = {version = "0.8.0", path = "./crates/nu_plugin_fetch", optional=true} @@ -133,6 +133,7 @@ onig_sys = {version = "=69.1.0", optional = true } crossterm = {version = "0.10.2", optional = true} futures-timer = {version = "1.0.2", optional = true} url = {version = "2.1.1", optional = true} +semver = {version = "0.9.0", optional = true} [target.'cfg(unix)'.dependencies] users = "0.9" @@ -163,7 +164,7 @@ trace = ["nu-parser/trace"] tree = ["nu_plugin_tree"] [dependencies.rusqlite] -version = "0.21.0" +version = "0.20.0" features = ["bundled", "blob"] [dev-dependencies] diff --git a/TODO.md b/TODO.md index db001731f2..2ac069f45a 100644 --- a/TODO.md +++ b/TODO.md @@ -50,3 +50,11 @@ textview in own crate Combine atomic and atomic_parse in parser at_end_possible_ws needs to be comment and separator sensitive + +Eliminate unnecessary `nodes` parser + +#[derive(HasSpan)] + +Figure out a solution for the duplication in stuff like NumberShape vs. NumberExpressionShape + +use `struct Expander` from signature.rs \ No newline at end of file diff --git a/crates/nu-errors/Cargo.toml b/crates/nu-errors/Cargo.toml index 9740477c32..ed4e623034 100644 --- a/crates/nu-errors/Cargo.toml +++ b/crates/nu-errors/Cargo.toml @@ -21,6 +21,7 @@ num-traits = "0.2.10" serde = { version = "1.0.103", features = ["derive"] } nom = "5.0.1" nom_locate = "1.0.0" +getset = "0.0.9" # implement conversions subprocess = "0.1.18" diff --git a/crates/nu-errors/src/lib.rs b/crates/nu-errors/src/lib.rs index 8b16d2c281..a78689d9de 100644 --- a/crates/nu-errors/src/lib.rs +++ b/crates/nu-errors/src/lib.rs @@ -1,8 +1,11 @@ use ansi_term::Color; use bigdecimal::BigDecimal; use derive_new::new; +use getset::Getters; use language_reporting::{Diagnostic, Label, Severity}; -use nu_source::{b, DebugDocBuilder, PrettyDebug, Span, Spanned, SpannedItem, TracableContext}; +use nu_source::{ + b, DebugDocBuilder, HasFallibleSpan, PrettyDebug, Span, Spanned, SpannedItem, TracableContext, +}; use num_bigint::BigInt; use num_traits::ToPrimitive; use serde::{Deserialize, Serialize}; @@ -12,16 +15,16 @@ use std::ops::Range; /// A structured reason for a ParseError. Note that parsing in nu is more like macro expansion in /// other languages, so the kinds of errors that can occur during parsing are more contextual than /// you might expect. -#[derive(Debug, Clone)] +#[derive(Debug, Clone, Eq, PartialEq)] pub enum ParseErrorReason { /// The parser encountered an EOF rather than what it was expecting - Eof { expected: &'static str, span: Span }, + Eof { expected: String, span: Span }, /// The parser expected to see the end of a token stream (possibly the token /// stream from inside a delimited token node), but found something else. ExtraTokens { actual: Spanned }, /// The parser encountered something other than what it was expecting Mismatch { - expected: &'static str, + expected: String, actual: Spanned, }, @@ -37,16 +40,20 @@ pub enum ParseErrorReason { } /// A newtype for `ParseErrorReason` -#[derive(Debug, Clone)] +#[derive(Debug, Clone, Eq, PartialEq, Getters)] pub struct ParseError { + #[get = "pub"] reason: ParseErrorReason, } impl ParseError { /// Construct a [ParseErrorReason::Eof](ParseErrorReason::Eof) - pub fn unexpected_eof(expected: &'static str, span: Span) -> ParseError { + pub fn unexpected_eof(expected: impl Into, span: Span) -> ParseError { ParseError { - reason: ParseErrorReason::Eof { expected, span }, + reason: ParseErrorReason::Eof { + expected: expected.into(), + span, + }, } } @@ -62,12 +69,12 @@ impl ParseError { } /// Construct a [ParseErrorReason::Mismatch](ParseErrorReason::Mismatch) - pub fn mismatch(expected: &'static str, actual: Spanned>) -> ParseError { + pub fn mismatch(expected: impl Into, actual: Spanned>) -> ParseError { let Spanned { span, item } = actual; ParseError { reason: ParseErrorReason::Mismatch { - expected, + expected: expected.into(), actual: item.into().spanned(span), }, } @@ -728,6 +735,30 @@ impl ProximateShellError { } } +impl HasFallibleSpan for ShellError { + fn maybe_span(&self) -> Option { + self.error.maybe_span() + } +} + +impl HasFallibleSpan for ProximateShellError { + fn maybe_span(&self) -> Option { + Some(match self { + ProximateShellError::SyntaxError { problem } => problem.span, + ProximateShellError::UnexpectedEof { span, .. } => *span, + ProximateShellError::TypeError { actual, .. } => actual.span, + ProximateShellError::MissingProperty { subpath, .. } => subpath.span, + ProximateShellError::InvalidIntegerIndex { subpath, .. } => subpath.span, + ProximateShellError::MissingValue { span, .. } => return *span, + ProximateShellError::ArgumentError { command, .. } => command.span, + ProximateShellError::RangeError { actual_kind, .. } => actual_kind.span, + ProximateShellError::Diagnostic(_) => return None, + ProximateShellError::CoerceError { left, right } => left.span.until(right.span), + ProximateShellError::UntaggedRuntimeError { .. } => return None, + }) + } +} + #[derive(Debug, Clone, Serialize, Deserialize)] pub struct ShellDiagnostic { pub(crate) diagnostic: Diagnostic, diff --git a/crates/nu-parser/src/commands.rs b/crates/nu-parser/src/commands.rs index 8bd5cc4ceb..5b601a483d 100644 --- a/crates/nu-parser/src/commands.rs +++ b/crates/nu-parser/src/commands.rs @@ -3,7 +3,6 @@ pub mod classified; use crate::commands::classified::external::{ExternalArg, ExternalArgs, ExternalCommand}; use crate::commands::classified::ClassifiedCommand; use crate::hir::expand_external_tokens::ExternalTokensShape; -use crate::hir::syntax_shape::{expand_syntax, ExpandContext}; use crate::hir::tokens_iterator::TokensIterator; use nu_errors::ParseError; use nu_source::{Spanned, Tagged}; @@ -13,10 +12,10 @@ use nu_source::{Spanned, Tagged}; // strings. pub(crate) fn external_command( tokens: &mut TokensIterator, - context: &ExpandContext, name: Tagged<&str>, ) -> Result { - let Spanned { item, span } = expand_syntax(&ExternalTokensShape, tokens, context)?.tokens; + let Spanned { item, span } = tokens.expand_infallible(ExternalTokensShape).tokens; + let full_span = name.span().until(span); Ok(ClassifiedCommand::External(ExternalCommand { name: name.to_string(), @@ -29,7 +28,7 @@ pub(crate) fn external_command( arg: x.item.clone(), }) .collect(), - span, + span: full_span, }, })) } diff --git a/crates/nu-parser/src/commands/classified.rs b/crates/nu-parser/src/commands/classified.rs index 7a3409db9e..238a9da491 100644 --- a/crates/nu-parser/src/commands/classified.rs +++ b/crates/nu-parser/src/commands/classified.rs @@ -4,18 +4,20 @@ pub mod internal; use crate::commands::classified::external::ExternalCommand; use crate::commands::classified::internal::InternalCommand; use crate::hir; -use crate::parse::token_tree::TokenNode; +use crate::parse::token_tree::SpannedToken; use derive_new::new; +use nu_errors::ParseError; use nu_source::{b, DebugDocBuilder, HasSpan, PrettyDebugWithSource, Span}; #[derive(Debug, Clone, Eq, PartialEq)] pub enum ClassifiedCommand { #[allow(unused)] - Expr(TokenNode), + Expr(SpannedToken), #[allow(unused)] Dynamic(hir::Call), Internal(InternalCommand), External(ExternalCommand), + Error(ParseError), } impl PrettyDebugWithSource for ClassifiedCommand { @@ -23,6 +25,7 @@ impl PrettyDebugWithSource for ClassifiedCommand { match self { ClassifiedCommand::Expr(token) => b::typed("command", token.pretty_debug(source)), ClassifiedCommand::Dynamic(call) => b::typed("command", call.pretty_debug(source)), + ClassifiedCommand::Error(_) => b::error("no command"), ClassifiedCommand::Internal(internal) => internal.pretty_debug(source), ClassifiedCommand::External(external) => external.pretty_debug(source), } @@ -35,6 +38,7 @@ impl HasSpan for ClassifiedCommand { ClassifiedCommand::Expr(node) => node.span(), ClassifiedCommand::Internal(command) => command.span(), ClassifiedCommand::Dynamic(call) => call.span, + ClassifiedCommand::Error(_) => Span::unknown(), ClassifiedCommand::External(command) => command.span(), } } @@ -62,6 +66,9 @@ impl std::ops::Deref for Commands { #[derive(Debug, Clone)] pub struct ClassifiedPipeline { pub commands: Commands, + // this is not a Result to make it crystal clear that these shapes + // aren't intended to be used directly with `?` + pub failed: Option, } impl ClassifiedPipeline { @@ -71,6 +78,7 @@ impl ClassifiedPipeline { list, span: span.into(), }, + failed: None, } } } diff --git a/crates/nu-parser/src/hir.rs b/crates/nu-parser/src/hir.rs index 8baa36088d..9cc1a7a0d6 100644 --- a/crates/nu-parser/src/hir.rs +++ b/crates/nu-parser/src/hir.rs @@ -5,7 +5,6 @@ pub(crate) mod external_command; pub(crate) mod named; pub(crate) mod path; pub(crate) mod range; -pub(crate) mod signature; pub mod syntax_shape; pub(crate) mod tokens_iterator; @@ -17,17 +16,17 @@ use derive_new::new; use getset::Getters; use nu_protocol::{PathMember, ShellTypeName}; use nu_source::{ - b, DebugDocBuilder, HasSpan, PrettyDebug, PrettyDebugWithSource, Span, Spanned, SpannedItem, + b, DebugDocBuilder, HasSpan, IntoSpanned, PrettyDebug, PrettyDebugRefineKind, + PrettyDebugWithSource, Span, Spanned, }; use serde::{Deserialize, Serialize}; use std::path::PathBuf; -use crate::parse::tokens::RawNumber; +use crate::parse::number::RawNumber; pub(crate) use self::binary::Binary; pub(crate) use self::path::Path; pub(crate) use self::range::Range; -pub(crate) use self::syntax_shape::ExpandContext; pub(crate) use self::tokens_iterator::TokensIterator; pub use self::external_command::ExternalCommand; @@ -63,9 +62,9 @@ impl PrettyDebugWithSource for Signature { #[derive(Debug, Clone, Eq, PartialEq, Getters, Serialize, Deserialize, new)] pub struct Call { #[get = "pub(crate)"] - pub head: Box, + pub head: Box, #[get = "pub(crate)"] - pub positional: Option>, + pub positional: Option>, #[get = "pub(crate)"] pub named: Option, pub span: Span, @@ -85,35 +84,54 @@ impl Call { } impl PrettyDebugWithSource for Call { + fn refined_pretty_debug(&self, refine: PrettyDebugRefineKind, source: &str) -> DebugDocBuilder { + match refine { + PrettyDebugRefineKind::ContextFree => self.pretty_debug(source), + PrettyDebugRefineKind::WithContext => { + self.head + .refined_pretty_debug(PrettyDebugRefineKind::WithContext, source) + + b::preceded_option( + Some(b::space()), + self.positional.as_ref().map(|pos| { + b::intersperse( + pos.iter().map(|expr| { + expr.refined_pretty_debug( + PrettyDebugRefineKind::WithContext, + source, + ) + }), + b::space(), + ) + }), + ) + + b::preceded_option( + Some(b::space()), + self.named.as_ref().map(|named| { + named.refined_pretty_debug(PrettyDebugRefineKind::WithContext, source) + }), + ) + } + } + } + fn pretty_debug(&self, source: &str) -> DebugDocBuilder { - b::delimit( - "(", - self.head.pretty_debug(source) - + b::preceded_option( - Some(b::space()), - self.positional.as_ref().map(|pos| { - b::intersperse(pos.iter().map(|expr| expr.pretty_debug(source)), b::space()) - }), - ) - + b::preceded_option( - Some(b::space()), - self.named.as_ref().map(|named| named.pretty_debug(source)), - ), - ")", + b::typed( + "call", + self.refined_pretty_debug(PrettyDebugRefineKind::WithContext, source), ) } } #[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] -pub enum RawExpression { +pub enum Expression { Literal(Literal), ExternalWord, Synthetic(Synthetic), Variable(Variable), Binary(Box), Range(Box), - Block(Vec), - List(Vec), + Block(Vec), + List(Vec), Path(Box), FilePath(PathBuf), @@ -123,22 +141,22 @@ pub enum RawExpression { Boolean(bool), } -impl ShellTypeName for RawExpression { +impl ShellTypeName for Expression { fn type_name(&self) -> &'static str { match self { - RawExpression::Literal(literal) => literal.type_name(), - RawExpression::Synthetic(synthetic) => synthetic.type_name(), - RawExpression::Command(..) => "command", - RawExpression::ExternalWord => "external word", - RawExpression::FilePath(..) => "file path", - RawExpression::Variable(..) => "variable", - RawExpression::List(..) => "list", - RawExpression::Binary(..) => "binary", - RawExpression::Range(..) => "range", - RawExpression::Block(..) => "block", - RawExpression::Path(..) => "variable path", - RawExpression::Boolean(..) => "boolean", - RawExpression::ExternalCommand(..) => "external", + Expression::Literal(literal) => literal.type_name(), + Expression::Synthetic(synthetic) => synthetic.type_name(), + Expression::Command(..) => "command", + Expression::ExternalWord => "external word", + Expression::FilePath(..) => "file path", + Expression::Variable(..) => "variable", + Expression::List(..) => "list", + Expression::Binary(..) => "binary", + Expression::Range(..) => "range", + Expression::Block(..) => "block", + Expression::Path(..) => "variable path", + Expression::Boolean(..) => "boolean", + Expression::ExternalCommand(..) => "external", } } } @@ -156,16 +174,24 @@ impl ShellTypeName for Synthetic { } } -impl RawExpression { - pub fn into_expr(self, span: impl Into) -> Expression { - Expression { +impl IntoSpanned for Expression { + type Output = SpannedExpression; + + fn into_spanned(self, span: impl Into) -> Self::Output { + SpannedExpression { expr: self, span: span.into(), } } +} - pub fn into_unspanned_expr(self) -> Expression { - Expression { +impl Expression { + pub fn into_expr(self, span: impl Into) -> SpannedExpression { + self.into_spanned(span) + } + + pub fn into_unspanned_expr(self) -> SpannedExpression { + SpannedExpression { expr: self, span: Span::unknown(), } @@ -173,40 +199,93 @@ impl RawExpression { } #[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] -pub struct Expression { - pub expr: RawExpression, +pub struct SpannedExpression { + pub expr: Expression, pub span: Span, } -impl std::ops::Deref for Expression { - type Target = RawExpression; +impl std::ops::Deref for SpannedExpression { + type Target = Expression; - fn deref(&self) -> &RawExpression { + fn deref(&self) -> &Expression { &self.expr } } -impl HasSpan for Expression { +impl HasSpan for SpannedExpression { fn span(&self) -> Span { self.span } } -impl PrettyDebugWithSource for Expression { +impl ShellTypeName for SpannedExpression { + fn type_name(&self) -> &'static str { + self.expr.type_name() + } +} + +impl PrettyDebugWithSource for SpannedExpression { + fn refined_pretty_debug(&self, refine: PrettyDebugRefineKind, source: &str) -> DebugDocBuilder { + match refine { + PrettyDebugRefineKind::ContextFree => self.refined_pretty_debug(refine, source), + PrettyDebugRefineKind::WithContext => match &self.expr { + Expression::Literal(literal) => literal + .clone() + .into_spanned(self.span) + .refined_pretty_debug(refine, source), + Expression::ExternalWord => { + b::delimit("e\"", b::primitive(self.span.slice(source)), "\"").group() + } + Expression::Synthetic(s) => match s { + Synthetic::String(_) => { + b::delimit("s\"", b::primitive(self.span.slice(source)), "\"").group() + } + }, + Expression::Variable(Variable::Other(_)) => b::keyword(self.span.slice(source)), + Expression::Variable(Variable::It(_)) => b::keyword("$it"), + Expression::Binary(binary) => binary.pretty_debug(source), + Expression::Range(range) => range.pretty_debug(source), + Expression::Block(_) => b::opaque("block"), + Expression::List(list) => b::delimit( + "[", + b::intersperse( + list.iter() + .map(|item| item.refined_pretty_debug(refine, source)), + b::space(), + ), + "]", + ), + Expression::Path(path) => path.pretty_debug(source), + Expression::FilePath(path) => b::typed("path", b::primitive(path.display())), + Expression::ExternalCommand(external) => { + b::keyword("^") + b::keyword(external.name.slice(source)) + } + Expression::Command(command) => b::keyword(command.slice(source)), + Expression::Boolean(boolean) => match boolean { + true => b::primitive("$yes"), + false => b::primitive("$no"), + }, + }, + } + } + fn pretty_debug(&self, source: &str) -> DebugDocBuilder { match &self.expr { - RawExpression::Literal(literal) => literal.spanned(self.span).pretty_debug(source), - RawExpression::ExternalWord => { + Expression::Literal(literal) => { + literal.clone().into_spanned(self.span).pretty_debug(source) + } + Expression::ExternalWord => { b::typed("external word", b::primitive(self.span.slice(source))) } - RawExpression::Synthetic(s) => match s { + Expression::Synthetic(s) => match s { Synthetic::String(s) => b::typed("synthetic", b::primitive(format!("{:?}", s))), }, - RawExpression::Variable(_) => b::keyword(self.span.slice(source)), - RawExpression::Binary(binary) => binary.pretty_debug(source), - RawExpression::Range(range) => range.pretty_debug(source), - RawExpression::Block(_) => b::opaque("block"), - RawExpression::List(list) => b::delimit( + Expression::Variable(Variable::Other(_)) => b::keyword(self.span.slice(source)), + Expression::Variable(Variable::It(_)) => b::keyword("$it"), + Expression::Binary(binary) => binary.pretty_debug(source), + Expression::Range(range) => range.pretty_debug(source), + Expression::Block(_) => b::opaque("block"), + Expression::List(list) => b::delimit( "[", b::intersperse( list.iter().map(|item| item.pretty_debug(source)), @@ -214,16 +293,16 @@ impl PrettyDebugWithSource for Expression { ), "]", ), - RawExpression::Path(path) => path.pretty_debug(source), - RawExpression::FilePath(path) => b::typed("path", b::primitive(path.display())), - RawExpression::ExternalCommand(external) => b::typed( - "external command", - b::primitive(external.name.slice(source)), + Expression::Path(path) => path.pretty_debug(source), + Expression::FilePath(path) => b::typed("path", b::primitive(path.display())), + Expression::ExternalCommand(external) => b::typed( + "command", + b::keyword("^") + b::primitive(external.name.slice(source)), ), - RawExpression::Command(command) => { + Expression::Command(command) => { b::typed("command", b::primitive(command.slice(source))) } - RawExpression::Boolean(boolean) => match boolean { + Expression::Boolean(boolean) => match boolean { true => b::primitive("$yes"), false => b::primitive("$no"), }, @@ -232,117 +311,91 @@ impl PrettyDebugWithSource for Expression { } impl Expression { - pub fn number(i: impl Into, span: impl Into) -> Expression { - let span = span.into(); - - RawExpression::Literal(RawLiteral::Number(i.into()).into_literal(span)).into_expr(span) + pub fn number(i: impl Into) -> Expression { + Expression::Literal(Literal::Number(i.into())) } - pub fn size(i: impl Into, unit: impl Into, span: impl Into) -> Expression { - let span = span.into(); - - RawExpression::Literal(RawLiteral::Size(i.into(), unit.into()).into_literal(span)) - .into_expr(span) + pub fn size(i: impl Into, unit: impl Into) -> Expression { + Expression::Literal(Literal::Size(i.into(), unit.into())) } - pub fn synthetic_string(s: impl Into) -> Expression { - RawExpression::Synthetic(Synthetic::String(s.into())).into_unspanned_expr() + pub fn string(inner: impl Into) -> Expression { + Expression::Literal(Literal::String(inner.into())) } - pub fn string(inner: impl Into, outer: impl Into) -> Expression { - let outer = outer.into(); - - RawExpression::Literal(RawLiteral::String(inner.into()).into_literal(outer)) - .into_expr(outer) + pub fn synthetic_string(string: impl Into) -> Expression { + Expression::Synthetic(Synthetic::String(string.into())) } - pub fn column_path(members: Vec, span: impl Into) -> Expression { - let span = span.into(); - - RawExpression::Literal(RawLiteral::ColumnPath(members).into_literal(span)).into_expr(span) + pub fn column_path(members: Vec) -> Expression { + Expression::Literal(Literal::ColumnPath(members)) } - pub fn path( - head: Expression, - tail: Vec>, - span: impl Into, - ) -> Expression { + pub fn path(head: SpannedExpression, tail: Vec>) -> Expression { let tail = tail.into_iter().map(|t| t.into()).collect(); - RawExpression::Path(Box::new(Path::new(head, tail))).into_expr(span.into()) + Expression::Path(Box::new(Path::new(head, tail))) } - pub fn dot_member(head: Expression, next: impl Into) -> Expression { - let Expression { expr: item, span } = head; + pub fn dot_member(head: SpannedExpression, next: impl Into) -> Expression { + let SpannedExpression { expr: item, span } = head; let next = next.into(); - let new_span = head.span.until(next.span); - match item { - RawExpression::Path(path) => { + Expression::Path(path) => { let (head, mut tail) = path.parts(); tail.push(next); - Expression::path(head, tail, new_span) + Expression::path(head, tail) } - other => Expression::path(other.into_expr(span), vec![next], new_span), + other => Expression::path(other.into_expr(span), vec![next]), } } pub fn infix( - left: Expression, + left: SpannedExpression, op: Spanned>, - right: Expression, + right: SpannedExpression, ) -> Expression { - let new_span = left.span.until(right.span); - - RawExpression::Binary(Box::new(Binary::new(left, op.map(|o| o.into()), right))) - .into_expr(new_span) + Expression::Binary(Box::new(Binary::new(left, op.map(|o| o.into()), right))) } - pub fn range(left: Expression, op: Span, right: Expression) -> Expression { - let new_span = left.span.until(right.span); - - RawExpression::Range(Box::new(Range::new(left, op, right))).into_expr(new_span) + pub fn range(left: SpannedExpression, op: Span, right: SpannedExpression) -> Expression { + Expression::Range(Box::new(Range::new(left, op, right))) } - pub fn file_path(path: impl Into, outer: impl Into) -> Expression { - RawExpression::FilePath(path.into()).into_expr(outer) + pub fn file_path(path: impl Into) -> Expression { + Expression::FilePath(path.into()) } - pub fn list(list: Vec, span: impl Into) -> Expression { - RawExpression::List(list).into_expr(span) + pub fn list(list: Vec) -> Expression { + Expression::List(list) } - pub fn bare(span: impl Into) -> Expression { - let span = span.into(); - - RawExpression::Literal(RawLiteral::Bare.into_literal(span)).into_expr(span) + pub fn bare() -> Expression { + Expression::Literal(Literal::Bare) } - pub fn pattern(inner: impl Into, outer: impl Into) -> Expression { - let outer = outer.into(); - - RawExpression::Literal(RawLiteral::GlobPattern(inner.into()).into_literal(outer)) - .into_expr(outer) + pub fn pattern(inner: impl Into) -> Expression { + Expression::Literal(Literal::GlobPattern(inner.into())) } - pub fn variable(inner: impl Into, outer: impl Into) -> Expression { - RawExpression::Variable(Variable::Other(inner.into())).into_expr(outer) + pub fn variable(inner: impl Into) -> Expression { + Expression::Variable(Variable::Other(inner.into())) } - pub fn external_command(inner: impl Into, outer: impl Into) -> Expression { - RawExpression::ExternalCommand(ExternalCommand::new(inner.into())).into_expr(outer) + pub fn external_command(inner: impl Into) -> Expression { + Expression::ExternalCommand(ExternalCommand::new(inner.into())) } - pub fn it_variable(inner: impl Into, outer: impl Into) -> Expression { - RawExpression::Variable(Variable::It(inner.into())).into_expr(outer) + pub fn it_variable(inner: impl Into) -> Expression { + Expression::Variable(Variable::It(inner.into())) } } -impl From> for Expression { - fn from(path: Spanned) -> Expression { - RawExpression::Path(Box::new(path.item)).into_expr(path.span) +impl From> for SpannedExpression { + fn from(path: Spanned) -> SpannedExpression { + Expression::Path(Box::new(path.item)).into_expr(path.span) } } @@ -352,7 +405,7 @@ impl From> for Expression { /// 2. Can be evaluated without additional context /// 3. Evaluation cannot produce an error #[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] -pub enum RawLiteral { +pub enum Literal { Number(Number), Size(Number, Unit), String(Span), @@ -361,9 +414,9 @@ pub enum RawLiteral { Bare, } -impl RawLiteral { - pub fn into_literal(self, span: impl Into) -> Literal { - Literal { +impl Literal { + pub fn into_spanned(self, span: impl Into) -> SpannedLiteral { + SpannedLiteral { literal: self, span: span.into(), } @@ -371,36 +424,57 @@ impl RawLiteral { } #[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] -pub struct Literal { - pub literal: RawLiteral, +pub struct SpannedLiteral { + pub literal: Literal, pub span: Span, } impl ShellTypeName for Literal { fn type_name(&self) -> &'static str { - match &self.literal { - RawLiteral::Number(..) => "number", - RawLiteral::Size(..) => "size", - RawLiteral::String(..) => "string", - RawLiteral::ColumnPath(..) => "column path", - RawLiteral::Bare => "string", - RawLiteral::GlobPattern(_) => "pattern", + match &self { + Literal::Number(..) => "number", + Literal::Size(..) => "size", + Literal::String(..) => "string", + Literal::ColumnPath(..) => "column path", + Literal::Bare => "string", + Literal::GlobPattern(_) => "pattern", } } } -impl PrettyDebugWithSource for Literal { +impl PrettyDebugWithSource for SpannedLiteral { + fn refined_pretty_debug(&self, refine: PrettyDebugRefineKind, source: &str) -> DebugDocBuilder { + match refine { + PrettyDebugRefineKind::ContextFree => self.pretty_debug(source), + PrettyDebugRefineKind::WithContext => match &self.literal { + Literal::Number(number) => number.pretty(), + Literal::Size(number, unit) => (number.pretty() + unit.pretty()).group(), + Literal::String(string) => b::primitive(format!("{:?}", string.slice(source))), + Literal::GlobPattern(pattern) => b::primitive(pattern), + Literal::ColumnPath(path) => { + b::intersperse_with_source(path.iter(), b::space(), source) + } + Literal::Bare => b::delimit("b\"", b::primitive(self.span.slice(source)), "\""), + }, + } + } + fn pretty_debug(&self, source: &str) -> DebugDocBuilder { match &self.literal { - RawLiteral::Number(number) => number.pretty(), - RawLiteral::Size(number, unit) => (number.pretty() + unit.pretty()).group(), - RawLiteral::String(string) => b::primitive(format!("{:?}", string.slice(source))), - RawLiteral::GlobPattern(pattern) => b::typed("pattern", b::primitive(pattern)), - RawLiteral::ColumnPath(path) => b::typed( + Literal::Number(number) => number.pretty(), + Literal::Size(number, unit) => { + b::typed("size", (number.pretty() + unit.pretty()).group()) + } + Literal::String(string) => b::typed( + "string", + b::primitive(format!("{:?}", string.slice(source))), + ), + Literal::GlobPattern(pattern) => b::typed("pattern", b::primitive(pattern)), + Literal::ColumnPath(path) => b::typed( "column path", b::intersperse_with_source(path.iter(), b::space(), source), ), - RawLiteral::Bare => b::primitive(self.span.slice(source)), + Literal::Bare => b::typed("bare", b::primitive(self.span.slice(source))), } } } diff --git a/crates/nu-parser/src/hir/baseline_parse/tests.rs b/crates/nu-parser/src/hir/baseline_parse/tests.rs index 0ed1dc69fe..06bd0de328 100644 --- a/crates/nu-parser/src/hir/baseline_parse/tests.rs +++ b/crates/nu-parser/src/hir/baseline_parse/tests.rs @@ -1,42 +1,99 @@ use crate::commands::classified::{internal::InternalCommand, ClassifiedCommand}; -use crate::hir::TokensIterator; -use crate::hir::{self, named::NamedValue, syntax_shape::*, NamedArguments}; +use crate::hir::expand_external_tokens::{ExternalTokensShape, ExternalTokensSyntax}; +use crate::hir::{ + self, named::NamedValue, syntax_shape::*, Expression, NamedArguments, SpannedExpression, + TokensIterator, +}; use crate::parse::files::Files; use crate::parse::token_tree_builder::{CurriedToken, TokenTreeBuilder as b}; -use crate::TokenNode; +use crate::SpannedToken; use derive_new::new; use indexmap::IndexMap; -use nu_errors::ShellError; -use nu_protocol::{PathMember, Signature, SyntaxShape}; -use nu_source::{HasSpan, Span, Tag, Text}; +use nu_errors::{ParseError, ShellError}; +use nu_protocol::{outln, PathMember, Signature, SyntaxShape}; +use nu_source::{HasSpan, PrettyDebugWithSource, Span, SpannedItem, Tag, Text}; use pretty_assertions::assert_eq; use std::fmt::Debug; #[test] -fn test_parse_string() { - parse_tokens(StringShape, vec![b::string("hello")], |tokens| { - hir::Expression::string(inner_string_span(tokens[0].span()), tokens[0].span()) - }); -} - -#[test] -fn test_parse_path() { +fn test_parse_external() { parse_tokens( - VariablePathShape, - vec![b::var("it"), b::dot(), b::bare("cpu")], + fallible(ExternalTokensShape), + "5kb", + vec![b::bare("5kb")], |tokens| { - let (outer_var, inner_var) = tokens[0].expect_var(); - let bare = tokens[2].expect_bare(); - hir::Expression::path( - hir::Expression::it_variable(inner_var, outer_var), - vec![PathMember::string("cpu", bare)], - outer_var.until(bare), + ExternalTokensSyntax::new( + vec![format!("5kb").spanned(tokens[0].span())].spanned(tokens[0].span()), ) }, ); parse_tokens( + fallible(ExternalTokensShape), + "cargo +nightly run -- --features all", + vec![ + b::bare("cargo"), + b::sp(), + b::external_word("+nightly"), + b::sp(), + b::bare("run"), + b::sp(), + b::external_word("--"), + b::sp(), + b::flag("features"), + b::sp(), + b::bare("all"), + ], + |tokens| { + let cargo = format!("cargo").spanned(tokens[0].span()); + let nightly = format!("+nightly").spanned(tokens[2].span()); + let run = format!("run").spanned(tokens[4].span()); + let dashdash = format!("--").spanned(tokens[6].span()); + let features = format!("--features").spanned(tokens[8].span()); + let all = format!("all").spanned(tokens[10].span()); + let span = tokens[0].span().until(tokens[10].span()); + + ExternalTokensSyntax::new( + vec![cargo, nightly, run, dashdash, features, all].spanned(span), + ) + }, + ); +} + +#[test] +fn test_parse_string() { + parse_tokens( + CoerceStringShape, + r#""hello""#, + vec![b::string("hello")], + |tokens| { + Expression::string(inner_string_span(tokens[0].span())).into_expr(tokens[0].span()) + }, + ); +} + +#[test] +fn test_parse_path() { + let _ = pretty_env_logger::try_init(); + + parse_expr( + AnyExpressionShape, + "$it.cpu", + vec![b::it_var(), b::dot(), b::bare("cpu")], + |tokens| { + let (outer_var, inner_var) = tokens[0].expect_var(); + let bare = tokens[2].expect_bare(); + Expression::path( + Expression::it_variable(inner_var).into_expr(outer_var), + vec![PathMember::string("cpu", bare)], + ) + .into_expr(outer_var.until(bare)) + }, + ); + + parse_expr( VariablePathShape, + r#"$cpu.amount."max ghz""#, vec![ b::var("cpu"), b::dot(), @@ -49,14 +106,14 @@ fn test_parse_path() { let amount = tokens[2].expect_bare(); let (outer_max_ghz, _) = tokens[4].expect_string(); - hir::Expression::path( - hir::Expression::variable(inner_var, outer_var), + Expression::path( + Expression::variable(inner_var).into_expr(outer_var), vec![ PathMember::string("amount", amount), PathMember::string("max ghz", outer_max_ghz), ], - outer_var.until(outer_max_ghz), ) + .into_expr(outer_var.until(outer_max_ghz)) }, ); } @@ -64,7 +121,8 @@ fn test_parse_path() { #[test] fn test_parse_command() { parse_tokens( - ClassifiedCommandShape, + fallible(ClassifiedCommandShape), + "ls *.txt", vec![b::bare("ls"), b::sp(), b::pattern("*.txt")], |tokens| { let bare = tokens[0].expect_bare(); @@ -81,8 +139,8 @@ fn test_parse_command() { anchor: None, }, hir::Call { - head: Box::new(hir::RawExpression::Command(bare).into_expr(bare)), - positional: Some(vec![hir::Expression::pattern("*.txt", pat)]), + head: Box::new(Expression::Command(bare).into_expr(bare)), + positional: Some(vec![Expression::pattern("*.txt").into_expr(pat)]), named: Some(NamedArguments { named: map }), span: bare.until(pat), }, @@ -91,7 +149,7 @@ fn test_parse_command() { ); } -#[derive(new)] +#[derive(Debug, Clone, new)] struct TestRegistry { #[new(default)] signatures: indexmap::IndexMap, @@ -104,11 +162,14 @@ impl TestRegistry { } impl SignatureRegistry for TestRegistry { - fn has(&self, name: &str) -> Result { - Ok(self.signatures.contains_key(name)) + fn has(&self, name: &str) -> bool { + self.signatures.contains_key(name) } - fn get(&self, name: &str) -> Result, ShellError> { - Ok(self.signatures.get(name).cloned()) + fn get(&self, name: &str) -> Option { + self.signatures.get(name).cloned() + } + fn clone_box(&self) -> Box { + Box::new(self.clone()) } } @@ -128,30 +189,91 @@ fn with_empty_context(source: &Text, callback: impl FnOnce(ExpandContext)) { callback(ExpandContext::new(Box::new(registry), source, None)) } -fn parse_tokens( - shape: impl ExpandSyntax, +trait Expand {} + +fn parse_tokens( + shape: impl ExpandSyntax>, + syntax: &str, tokens: Vec, - expected: impl FnOnce(&[TokenNode]) -> T, + expected: impl FnOnce(&[SpannedToken]) -> T, ) { + // let parsed_tokens = parse(syntax); let tokens = b::token_list(tokens); let (tokens, source) = b::build(tokens); - let text = Text::from(source); + let text = Text::from(&source); + + assert_eq!(syntax, source); with_empty_context(&text, |context| { let tokens = tokens.expect_list(); - let mut iterator = TokensIterator::all(tokens.item, text.clone(), tokens.span); + let mut iterator = TokensIterator::new(&tokens.item, context, tokens.span); - let expr = expand_syntax(&shape, &mut iterator, &context); + let expr = iterator.expand_syntax(shape); let expr = match expr { Ok(expr) => expr, Err(err) => { - print_err(err.into(), &context.source().clone()); + outln!(""); + ptree::print_tree(&iterator.expand_tracer().print(text.clone())).unwrap(); + outln!(""); + + print_err(err.into(), &iterator.context().source().clone()); panic!("Parse failed"); } }; - assert_eq!(expr, expected(tokens.item)); + let expected = expected(&tokens.item); + + if expr != expected { + outln!(""); + ptree::print_tree(&iterator.expand_tracer().print(text.clone())).unwrap(); + outln!(""); + + assert_eq!(expr, expected); + } + }) +} + +fn parse_expr( + shape: impl ExpandSyntax>, + syntax: &str, + tokens: Vec, + expected: impl FnOnce(&[SpannedToken]) -> SpannedExpression, +) { + // let parsed_tokens = parse(syntax); + let tokens = b::token_list(tokens); + let (tokens, source) = b::build(tokens); + let text = Text::from(&source); + + assert_eq!(syntax, source); + + with_empty_context(&text, |context| { + let tokens = tokens.expect_list(); + let mut iterator = TokensIterator::new(&tokens.item, context, tokens.span); + + let expr = iterator.expand_syntax(shape); + + let expr = match expr { + Ok(expr) => expr, + Err(err) => { + outln!(""); + ptree::print_tree(&iterator.expand_tracer().print(text.clone())).unwrap(); + outln!(""); + + print_err(err.into(), &iterator.source()); + panic!("Parse failed"); + } + }; + + let expected = expected(&tokens.item); + + if expr != expected { + outln!(""); + ptree::print_tree(&iterator.expand_tracer().print(text.clone())).unwrap(); + outln!(""); + + assert_eq!(expr, expected); + } }) } diff --git a/crates/nu-parser/src/hir/binary.rs b/crates/nu-parser/src/hir/binary.rs index cfc1e617ef..2762507381 100644 --- a/crates/nu-parser/src/hir/binary.rs +++ b/crates/nu-parser/src/hir/binary.rs @@ -1,4 +1,4 @@ -use crate::{hir::Expression, CompareOperator}; +use crate::{hir::SpannedExpression, CompareOperator}; use derive_new::new; use getset::Getters; @@ -10,9 +10,9 @@ use serde::{Deserialize, Serialize}; )] #[get = "pub"] pub struct Binary { - left: Expression, + left: SpannedExpression, op: Spanned, - right: Expression, + right: SpannedExpression, } impl PrettyDebugWithSource for Binary { diff --git a/crates/nu-parser/src/hir/expand_external_tokens.rs b/crates/nu-parser/src/hir/expand_external_tokens.rs index f390415a24..1913269b03 100644 --- a/crates/nu-parser/src/hir/expand_external_tokens.rs +++ b/crates/nu-parser/src/hir/expand_external_tokens.rs @@ -1,17 +1,14 @@ +use crate::parse::token_tree::Token; use crate::{ - hir::syntax_shape::{ - color_syntax, expand_atom, expand_expr, expand_syntax, AtomicToken, ColorSyntax, - ExpandContext, ExpandExpression, ExpandSyntax, ExpansionRule, MaybeSpaceShape, - UnspannedAtomicToken, - }, - hir::Expression, + hir::syntax_shape::{ExpandSyntax, FlatShape, MaybeSpaceShape}, TokensIterator, }; +use derive_new::new; use nu_errors::ParseError; use nu_protocol::SpannedTypeName; use nu_source::{b, DebugDocBuilder, HasSpan, PrettyDebug, Span, Spanned, SpannedItem}; -#[derive(Debug, Clone)] +#[derive(Debug, Eq, PartialEq, Clone, new)] pub struct ExternalTokensSyntax { pub tokens: Spanned>>, } @@ -40,57 +37,25 @@ impl ExpandSyntax for ExternalTokensShape { type Output = ExternalTokensSyntax; fn name(&self) -> &'static str { - "external command" + "external tokens" } - fn expand_syntax<'a, 'b>( - &self, - token_nodes: &'b mut TokensIterator<'a>, - context: &ExpandContext, - ) -> Result { + fn expand<'a, 'b>(&self, token_nodes: &'b mut TokensIterator<'a>) -> ExternalTokensSyntax { let mut out: Vec> = vec![]; let start = token_nodes.span_at_cursor(); loop { - match expand_syntax(&ExternalExpressionShape, token_nodes, context) { - Err(_) | Ok(None) => break, - Ok(Some(span)) => out.push(span.spanned_string(context.source())), + match token_nodes.expand_syntax(ExternalExpressionShape) { + Err(_) => break, + Ok(span) => out.push(span.spanned_string(&token_nodes.source())), } } let end = token_nodes.span_at_cursor(); - Ok(ExternalTokensSyntax { + ExternalTokensSyntax { tokens: out.spanned(start.until(end)), - }) - } -} - -impl ColorSyntax for ExternalTokensShape { - type Info = (); - type Input = (); - - fn name(&self) -> &'static str { - "ExternalTokensShape" - } - - fn color_syntax<'a, 'b>( - &self, - _input: &(), - token_nodes: &'b mut TokensIterator<'a>, - context: &ExpandContext, - ) -> Self::Info { - loop { - // Allow a space - color_syntax(&MaybeSpaceShape, token_nodes, context); - - // Process an external expression. External expressions are mostly words, with a - // few exceptions (like $variables and path expansion rules) - match color_syntax(&ExternalExpressionShape, token_nodes, context).1 { - ExternalExpressionResult::Eof => break, - ExternalExpressionResult::Processed => continue, - } } } } @@ -99,208 +64,112 @@ impl ColorSyntax for ExternalTokensShape { pub struct ExternalExpressionShape; impl ExpandSyntax for ExternalExpressionShape { - type Output = Option; + type Output = Result; fn name(&self) -> &'static str { "external expression" } - fn expand_syntax<'a, 'b>( - &self, - token_nodes: &'b mut TokensIterator<'a>, - context: &ExpandContext, - ) -> Result { - expand_syntax(&MaybeSpaceShape, token_nodes, context)?; - - let first = expand_atom( - token_nodes, - "external command", - context, - ExpansionRule::new().allow_external_word(), - )? - .span; + fn expand<'a, 'b>(&self, token_nodes: &'b mut TokensIterator<'a>) -> Result { + token_nodes.expand_infallible(MaybeSpaceShape); + let first = token_nodes.expand_syntax(ExternalStartToken)?; let mut last = first; loop { - let continuation = expand_expr(&ExternalContinuationShape, token_nodes, context); + let continuation = token_nodes.expand_syntax(ExternalStartToken); if let Ok(continuation) = continuation { - last = continuation.span; + last = continuation; } else { break; } } - Ok(Some(first.until(last))) + Ok(first.until(last)) } } #[derive(Debug, Copy, Clone)] -struct ExternalHeadShape; +struct ExternalStartToken; + +impl ExpandSyntax for ExternalStartToken { + type Output = Result; -impl ExpandExpression for ExternalHeadShape { fn name(&self) -> &'static str { - "external argument" + "external start token" } + fn expand<'a, 'b>(&self, token_nodes: &'b mut TokensIterator<'a>) -> Result { + token_nodes.atomic_parse(|token_nodes| { + let mut span: Option = None; - fn expand_expr<'a, 'b>( - &self, - token_nodes: &'b mut TokensIterator<'a>, - context: &ExpandContext, - ) -> Result { - let atom = expand_atom( - token_nodes, - "external argument", - context, - ExpansionRule::new() - .allow_external_word() - .treat_size_as_word(), - )?; + loop { + let boundary = token_nodes.expand_infallible(PeekExternalBoundary); - let span = atom.span; + if boundary { + break; + } - Ok(match &atom.unspanned { - UnspannedAtomicToken::Eof { .. } => unreachable!("ExpansionRule doesn't allow EOF"), - UnspannedAtomicToken::Error { .. } => unreachable!("ExpansionRule doesn't allow Error"), - UnspannedAtomicToken::Size { .. } => unreachable!("ExpansionRule treats size as word"), - UnspannedAtomicToken::Whitespace { .. } => { - unreachable!("ExpansionRule doesn't allow Whitespace") + let peeked = token_nodes.peek().not_eof("external start token")?; + let node = peeked.node; + + let new_span = match node.unspanned() { + Token::Comment(_) + | Token::Separator + | Token::Whitespace + | Token::Pipeline(_) => { + return Err(ParseError::mismatch( + "external start token", + node.spanned_type_name(), + )) + } + + _ => { + let node = peeked.commit(); + node.span() + } + }; + + span = match span { + None => Some(new_span), + Some(before) => Some(before.until(new_span)), + }; } - UnspannedAtomicToken::Separator { .. } => { - unreachable!("ExpansionRule doesn't allow Separator") - } - UnspannedAtomicToken::Comment { .. } => { - unreachable!("ExpansionRule doesn't allow Comment") - } - UnspannedAtomicToken::ShorthandFlag { .. } - | UnspannedAtomicToken::SquareDelimited { .. } - | UnspannedAtomicToken::RoundDelimited { .. } => { - return Err(ParseError::mismatch( - "external command name", - atom.spanned_type_name(), - )) - } - UnspannedAtomicToken::ExternalCommand { command } => { - Expression::external_command(*command, span) - } - UnspannedAtomicToken::Number { number } => { - Expression::number(number.to_number(context.source()), span) - } - UnspannedAtomicToken::String { body } => Expression::string(*body, span), - UnspannedAtomicToken::ItVariable { name } => Expression::it_variable(*name, span), - UnspannedAtomicToken::Variable { name } => Expression::variable(*name, span), - UnspannedAtomicToken::ExternalWord { .. } - | UnspannedAtomicToken::GlobPattern { .. } - | UnspannedAtomicToken::Word { .. } - | UnspannedAtomicToken::Dot { .. } - | UnspannedAtomicToken::DotDot { .. } - | UnspannedAtomicToken::CompareOperator { .. } => { - Expression::external_command(span, span) + + match span { + None => Err(token_nodes.err_next_token("external start token")), + Some(span) => { + token_nodes.color_shape(FlatShape::ExternalWord.spanned(span)); + Ok(span) + } } }) } } #[derive(Debug, Copy, Clone)] -struct ExternalContinuationShape; +struct PeekExternalBoundary; -impl ExpandExpression for ExternalContinuationShape { - fn name(&self) -> &'static str { - "external argument" - } - - fn expand_expr<'a, 'b>( - &self, - token_nodes: &'b mut TokensIterator<'a>, - context: &ExpandContext, - ) -> Result { - let atom = expand_atom( - token_nodes, - "external argument", - context, - ExpansionRule::new() - .allow_external_word() - .treat_size_as_word(), - )?; - - let span = atom.span; - - Ok(match &atom.unspanned { - UnspannedAtomicToken::Eof { .. } => unreachable!("ExpansionRule doesn't allow EOF"), - UnspannedAtomicToken::Error { .. } => unreachable!("ExpansionRule doesn't allow Error"), - UnspannedAtomicToken::Number { number } => { - Expression::number(number.to_number(context.source()), span) - } - UnspannedAtomicToken::Size { .. } => unreachable!("ExpansionRule treats size as word"), - UnspannedAtomicToken::ExternalCommand { .. } => { - unreachable!("ExpansionRule doesn't allow ExternalCommand") - } - UnspannedAtomicToken::Whitespace { .. } => { - unreachable!("ExpansionRule doesn't allow Whitespace") - } - UnspannedAtomicToken::Separator { .. } => { - unreachable!("ExpansionRule doesn't allow Separator") - } - UnspannedAtomicToken::Comment { .. } => { - unreachable!("ExpansionRule doesn't allow Comment") - } - UnspannedAtomicToken::String { body } => Expression::string(*body, span), - UnspannedAtomicToken::ItVariable { name } => Expression::it_variable(*name, span), - UnspannedAtomicToken::Variable { name } => Expression::variable(*name, span), - UnspannedAtomicToken::ExternalWord { .. } - | UnspannedAtomicToken::GlobPattern { .. } - | UnspannedAtomicToken::Word { .. } - | UnspannedAtomicToken::ShorthandFlag { .. } - | UnspannedAtomicToken::Dot { .. } - | UnspannedAtomicToken::DotDot { .. } - | UnspannedAtomicToken::CompareOperator { .. } => Expression::bare(span), - UnspannedAtomicToken::SquareDelimited { .. } - | UnspannedAtomicToken::RoundDelimited { .. } => { - return Err(ParseError::mismatch( - "external argument", - atom.spanned_type_name(), - )) - } - }) - } -} - -impl ColorSyntax for ExternalExpressionShape { - type Info = ExternalExpressionResult; - type Input = (); +impl ExpandSyntax for PeekExternalBoundary { + type Output = bool; fn name(&self) -> &'static str { - "ExternalExpressionShape" + "external boundary" } - fn color_syntax<'a, 'b>( - &self, - _input: &(), - token_nodes: &'b mut TokensIterator<'a>, - context: &ExpandContext, - ) -> ExternalExpressionResult { - let atom = match expand_atom( - token_nodes, - "external word", - context, - ExpansionRule::permissive(), - ) { - Err(_) => unreachable!("TODO: separate infallible expand_atom"), - Ok(AtomicToken { - unspanned: UnspannedAtomicToken::Eof { .. }, - .. - }) => return ExternalExpressionResult::Eof, - Ok(atom) => atom, - }; + fn expand<'a, 'b>(&self, token_nodes: &'b mut TokensIterator<'a>) -> Self::Output { + let next = token_nodes.peek(); - token_nodes.mutate_shapes(|shapes| atom.color_tokens(shapes)); - ExternalExpressionResult::Processed + match next.node { + None => true, + Some(node) => match node.unspanned() { + Token::Delimited(_) => true, + Token::Whitespace => true, + Token::Comment(_) => true, + Token::Separator => true, + Token::Call(_) => true, + _ => false, + }, + } } } - -#[must_use] -pub enum ExternalExpressionResult { - Eof, - Processed, -} diff --git a/crates/nu-parser/src/hir/named.rs b/crates/nu-parser/src/hir/named.rs index 45ca381d5d..49ae3125c0 100644 --- a/crates/nu-parser/src/hir/named.rs +++ b/crates/nu-parser/src/hir/named.rs @@ -1,8 +1,8 @@ -use crate::hir::Expression; +use crate::hir::SpannedExpression; use crate::Flag; use indexmap::IndexMap; use log::trace; -use nu_source::{b, DebugDocBuilder, PrettyDebugWithSource, Tag}; +use nu_source::{b, DebugDocBuilder, PrettyDebugRefineKind, PrettyDebugWithSource, Tag}; use serde::{Deserialize, Serialize}; #[derive(Debug, Clone, Eq, PartialEq, Serialize, Deserialize)] @@ -10,7 +10,7 @@ pub enum NamedValue { AbsentSwitch, PresentSwitch(Tag), AbsentValue, - Value(Expression), + Value(SpannedExpression), } impl PrettyDebugWithSource for NamedValue { @@ -22,6 +22,18 @@ impl PrettyDebugWithSource for NamedValue { NamedValue::Value(value) => value.pretty_debug(source), } } + + fn refined_pretty_debug(&self, refine: PrettyDebugRefineKind, source: &str) -> DebugDocBuilder { + match refine { + PrettyDebugRefineKind::ContextFree => self.pretty_debug(source), + PrettyDebugRefineKind::WithContext => match self { + NamedValue::AbsentSwitch => b::value("absent"), + NamedValue::PresentSwitch(_) => b::value("present"), + NamedValue::AbsentValue => b::value("absent"), + NamedValue::Value(value) => value.refined_pretty_debug(refine, source), + }, + } + } } #[derive(Debug, Default, Clone, Eq, PartialEq, Serialize, Deserialize)] @@ -60,28 +72,37 @@ impl NamedArguments { }; } - pub fn insert_optional(&mut self, name: impl Into, expr: Option) { + pub fn insert_optional(&mut self, name: impl Into, expr: Option) { match expr { None => self.named.insert(name.into(), NamedValue::AbsentValue), Some(expr) => self.named.insert(name.into(), NamedValue::Value(expr)), }; } - pub fn insert_mandatory(&mut self, name: impl Into, expr: Expression) { + pub fn insert_mandatory(&mut self, name: impl Into, expr: SpannedExpression) { self.named.insert(name.into(), NamedValue::Value(expr)); } } impl PrettyDebugWithSource for NamedArguments { + fn refined_pretty_debug(&self, refine: PrettyDebugRefineKind, source: &str) -> DebugDocBuilder { + match refine { + PrettyDebugRefineKind::ContextFree => self.pretty_debug(source), + PrettyDebugRefineKind::WithContext => b::intersperse( + self.named.iter().map(|(key, value)| { + b::key(key) + + b::equals() + + value.refined_pretty_debug(PrettyDebugRefineKind::WithContext, source) + }), + b::space(), + ), + } + } + fn pretty_debug(&self, source: &str) -> DebugDocBuilder { b::delimit( "(", - b::intersperse( - self.named - .iter() - .map(|(key, value)| b::key(key) + b::equals() + value.pretty_debug(source)), - b::space(), - ), + self.refined_pretty_debug(PrettyDebugRefineKind::WithContext, source), ")", ) } diff --git a/crates/nu-parser/src/hir/path.rs b/crates/nu-parser/src/hir/path.rs index 7930e0fb77..685debfae8 100644 --- a/crates/nu-parser/src/hir/path.rs +++ b/crates/nu-parser/src/hir/path.rs @@ -1,4 +1,4 @@ -use crate::hir::Expression; +use crate::hir::SpannedExpression; use derive_new::new; use getset::{Getters, MutGetters}; use nu_protocol::PathMember; @@ -21,7 +21,7 @@ use serde::{Deserialize, Serialize}; )] #[get = "pub"] pub struct Path { - head: Expression, + head: SpannedExpression, #[get_mut = "pub(crate)"] tail: Vec, } @@ -35,7 +35,7 @@ impl PrettyDebugWithSource for Path { } impl Path { - pub(crate) fn parts(self) -> (Expression, Vec) { + pub(crate) fn parts(self) -> (SpannedExpression, Vec) { (self.head, self.tail) } } diff --git a/crates/nu-parser/src/hir/range.rs b/crates/nu-parser/src/hir/range.rs index 075dd3bd60..cfd702c368 100644 --- a/crates/nu-parser/src/hir/range.rs +++ b/crates/nu-parser/src/hir/range.rs @@ -1,4 +1,4 @@ -use crate::hir::Expression; +use crate::hir::SpannedExpression; use derive_new::new; use getset::Getters; @@ -10,11 +10,11 @@ use serde::{Deserialize, Serialize}; )] pub struct Range { #[get = "pub"] - left: Expression, + left: SpannedExpression, #[get = "pub"] dotdot: Span, #[get = "pub"] - right: Expression, + right: SpannedExpression, } impl PrettyDebugWithSource for Range { diff --git a/crates/nu-parser/src/hir/syntax_shape.rs b/crates/nu-parser/src/hir/syntax_shape.rs index 839efd2a3c..1468178b56 100644 --- a/crates/nu-parser/src/hir/syntax_shape.rs +++ b/crates/nu-parser/src/hir/syntax_shape.rs @@ -1,3 +1,5 @@ +#![allow(clippy::large_enum_variant, clippy::type_complexity)] + mod block; mod expression; pub mod flat_shape; @@ -6,80 +8,48 @@ use crate::commands::classified::internal::InternalCommand; use crate::commands::classified::{ClassifiedCommand, ClassifiedPipeline}; use crate::commands::external_command; use crate::hir; -use crate::hir::expand_external_tokens::ExternalTokensShape; -use crate::hir::syntax_shape::block::AnyBlockShape; +use crate::hir::syntax_shape::block::CoerceBlockShape; use crate::hir::syntax_shape::expression::range::RangeShape; -use crate::hir::tokens_iterator::{Peeked, TokensIterator}; +use crate::hir::syntax_shape::flat_shape::ShapeResult; +use crate::hir::tokens_iterator::TokensIterator; +use crate::hir::{Expression, SpannedExpression}; use crate::parse::operator::EvaluationOperator; -use crate::parse::token_tree::TokenNode; -use crate::parse::tokens::{Token, UnspannedToken}; -use crate::parse_command::{parse_command_tail, CommandTailShape}; +use crate::parse::token_tree::{ + ExternalCommandType, PipelineType, SpannedToken, Token, WhitespaceType, WordType, +}; +use crate::parse_command::parse_command_tail; use derive_new::new; use getset::Getters; -use nu_errors::{ParseError, ShellError}; -use nu_protocol::{ShellTypeName, Signature}; +use nu_errors::ParseError; +use nu_protocol::{ShellTypeName, Signature, SpannedTypeName}; use nu_source::{ - b, DebugDocBuilder, HasFallibleSpan, HasSpan, PrettyDebug, PrettyDebugWithSource, Span, - Spanned, SpannedItem, Tag, TaggedItem, Text, + b, DebugDocBuilder, HasSpan, PrettyDebug, PrettyDebugWithSource, Span, Spanned, SpannedItem, + Tag, TaggedItem, Text, }; use std::path::{Path, PathBuf}; -pub(crate) use self::expression::atom::{ - expand_atom, AtomicToken, ExpansionRule, UnspannedAtomicToken, -}; -pub(crate) use self::expression::delimited::{ - color_delimited_square, expand_delimited_square, DelimitedShape, -}; -pub(crate) use self::expression::file_path::FilePathShape; +pub(crate) use self::expression::delimited::DelimitedSquareShape; +pub(crate) use self::expression::file_path::{ExternalWordShape, FilePathShape}; pub(crate) use self::expression::list::{BackoffColoringMode, ExpressionListShape}; -pub(crate) use self::expression::number::{IntShape, NumberShape}; -pub(crate) use self::expression::pattern::{BarePatternShape, PatternShape}; -pub(crate) use self::expression::string::StringShape; -pub(crate) use self::expression::unit::{UnitShape, UnitSyntax}; -pub(crate) use self::expression::variable_path::{ - ColorableDotShape, ColumnPathShape, ColumnPathSyntax, DotShape, ExpressionContinuation, - ExpressionContinuationShape, Member, MemberShape, PathTailShape, PathTailSyntax, - VariablePathShape, +pub(crate) use self::expression::number::{ + DecimalShape, IntExpressionShape, IntShape, NumberExpressionShape, NumberShape, }; -pub(crate) use self::expression::{continue_expression, AnyExpressionShape}; +pub(crate) use self::expression::pattern::{PatternExpressionShape, PatternShape}; +pub(crate) use self::expression::string::{CoerceStringShape, StringExpressionShape, StringShape}; +pub(crate) use self::expression::unit::UnitExpressionShape; +pub(crate) use self::expression::variable_path::{ + ColumnPathShape, ColumnPathSyntax, ExpressionContinuationShape, Member, MemberShape, + PathTailShape, PathTailSyntax, VariablePathShape, VariableShape, +}; +pub(crate) use self::expression::{AnyExpressionShape, AnyExpressionStartShape}; pub(crate) use self::flat_shape::FlatShape; use nu_protocol::SyntaxShape; +use std::fmt::Debug; -impl FallibleColorSyntax for SyntaxShape { - type Info = (); - type Input = (); +impl ExpandSyntax for SyntaxShape { + type Output = Result; - fn name(&self) -> &'static str { - "SyntaxShape" - } - - fn color_syntax<'a, 'b>( - &self, - _input: &(), - token_nodes: &'b mut TokensIterator<'a>, - context: &ExpandContext, - ) -> Result<(), ShellError> { - match self { - SyntaxShape::Any => color_fallible_syntax(&AnyExpressionShape, token_nodes, context), - SyntaxShape::Int => color_fallible_syntax(&IntShape, token_nodes, context), - SyntaxShape::Range => color_fallible_syntax(&RangeShape, token_nodes, context), - SyntaxShape::String => { - color_fallible_syntax_with(&StringShape, &FlatShape::String, token_nodes, context) - } - SyntaxShape::Member => color_fallible_syntax(&MemberShape, token_nodes, context), - SyntaxShape::ColumnPath => { - color_fallible_syntax(&ColumnPathShape, token_nodes, context) - } - SyntaxShape::Number => color_fallible_syntax(&NumberShape, token_nodes, context), - SyntaxShape::Path => color_fallible_syntax(&FilePathShape, token_nodes, context), - SyntaxShape::Pattern => color_fallible_syntax(&PatternShape, token_nodes, context), - SyntaxShape::Block => color_fallible_syntax(&AnyBlockShape, token_nodes, context), - } - } -} - -impl ExpandExpression for SyntaxShape { fn name(&self) -> &'static str { match self { SyntaxShape::Any => "shape[any]", @@ -95,43 +65,69 @@ impl ExpandExpression for SyntaxShape { } } - fn expand_expr<'a, 'b>( + fn expand<'a, 'b>( &self, token_nodes: &'b mut TokensIterator<'a>, - context: &ExpandContext, - ) -> Result { + ) -> Result { match self { - SyntaxShape::Any => expand_expr(&AnyExpressionShape, token_nodes, context), - SyntaxShape::Int => expand_expr(&IntShape, token_nodes, context), - SyntaxShape::Range => expand_expr(&RangeShape, token_nodes, context), - SyntaxShape::String => expand_expr(&StringShape, token_nodes, context), + SyntaxShape::Any => token_nodes.expand_syntax(AnyExpressionShape), + SyntaxShape::Int => token_nodes + .expand_syntax(IntExpressionShape) + .or_else(|_| token_nodes.expand_syntax(VariablePathShape)), + SyntaxShape::Range => token_nodes + .expand_syntax(RangeShape) + .or_else(|_| token_nodes.expand_syntax(VariablePathShape)), + SyntaxShape::String => token_nodes + .expand_syntax(CoerceStringShape) + .or_else(|_| token_nodes.expand_syntax(VariablePathShape)), SyntaxShape::Member => { - let syntax = expand_syntax(&MemberShape, token_nodes, context)?; + let syntax = token_nodes.expand_syntax(MemberShape)?; Ok(syntax.to_expr()) } SyntaxShape::ColumnPath => { - let column_path = expand_syntax(&ColumnPathShape, token_nodes, context)?; + let column_path = token_nodes.expand_syntax(ColumnPathShape)?; let ColumnPathSyntax { path: column_path, tag, } = column_path; - Ok(hir::Expression::column_path(column_path, tag.span)) + Ok(Expression::column_path(column_path).into_expr(tag.span)) } - SyntaxShape::Number => expand_expr(&NumberShape, token_nodes, context), - SyntaxShape::Path => expand_expr(&FilePathShape, token_nodes, context), - SyntaxShape::Pattern => expand_expr(&PatternShape, token_nodes, context), - SyntaxShape::Block => expand_expr(&AnyBlockShape, token_nodes, context), + SyntaxShape::Number => token_nodes + .expand_syntax(NumberExpressionShape) + .or_else(|_| token_nodes.expand_syntax(VariablePathShape)), + SyntaxShape::Path => token_nodes + .expand_syntax(FilePathShape) + .or_else(|_| token_nodes.expand_syntax(VariablePathShape)), + SyntaxShape::Pattern => token_nodes + .expand_syntax(PatternShape) + .or_else(|_| token_nodes.expand_syntax(VariablePathShape)), + SyntaxShape::Block => token_nodes + .expand_syntax(CoerceBlockShape) + .or_else(|_| token_nodes.expand_syntax(VariablePathShape)), } } } -pub trait SignatureRegistry { - fn has(&self, name: &str) -> Result; - fn get(&self, name: &str) -> Result, ShellError>; +pub trait SignatureRegistry: Debug { + fn has(&self, name: &str) -> bool; + fn get(&self, name: &str) -> Option; + fn clone_box(&self) -> Box; } -#[derive(Getters, new)] +impl SignatureRegistry for Box { + fn has(&self, name: &str) -> bool { + (&**self).has(name) + } + fn get(&self, name: &str) -> Option { + (&**self).get(name) + } + fn clone_box(&self) -> Box { + (&**self).clone_box() + } +} + +#[derive(Debug, Getters, new)] pub struct ExpandContext<'context> { #[get = "pub(crate)"] pub registry: Box, @@ -149,156 +145,44 @@ impl<'context> ExpandContext<'context> { } } -pub trait TestSyntax: std::fmt::Debug + Copy { - fn test<'a, 'b>( - &self, - token_nodes: &'b mut TokensIterator<'a>, - context: &ExpandContext, - ) -> Option>; -} - -pub trait ExpandExpression: std::fmt::Debug + Copy { - fn name(&self) -> &'static str; - - fn expand_expr<'a, 'b>( - &self, - token_nodes: &'b mut TokensIterator<'a>, - context: &ExpandContext, - ) -> Result; -} - -pub trait FallibleColorSyntax: std::fmt::Debug + Copy { - type Info; - type Input; +pub trait ExpandSyntax: std::fmt::Debug + Clone { + type Output: Clone + std::fmt::Debug + 'static; fn name(&self) -> &'static str; - fn color_syntax<'a, 'b>( - &self, - input: &Self::Input, - token_nodes: &'b mut TokensIterator<'a>, - context: &ExpandContext, - ) -> Result; + fn expand<'a, 'b>(&self, token_nodes: &'b mut TokensIterator<'a>) -> Self::Output; } -pub trait ColorSyntax: std::fmt::Debug + Copy { - type Info; - type Input; - - fn name(&self) -> &'static str; - - fn color_syntax<'a, 'b>( - &self, - input: &Self::Input, - token_nodes: &'b mut TokensIterator<'a>, - context: &ExpandContext, - ) -> Self::Info; +pub fn fallible(syntax: S) -> FallibleSyntax +where + T: Clone + Debug + 'static, + S: ExpandSyntax, +{ + FallibleSyntax { inner: syntax } } -pub trait ExpandSyntax: std::fmt::Debug + Copy { - type Output: HasFallibleSpan + Clone + std::fmt::Debug + 'static; - - fn name(&self) -> &'static str; - - fn expand_syntax<'a, 'b>( - &self, - token_nodes: &'b mut TokensIterator<'a>, - context: &ExpandContext, - ) -> Result; +#[derive(Debug, Copy, Clone)] +pub struct FallibleSyntax { + inner: I, } -pub fn expand_syntax<'a, 'b, T: ExpandSyntax>( - shape: &T, - token_nodes: &'b mut TokensIterator<'a>, - context: &ExpandContext, -) -> Result { - token_nodes.expand_frame(shape.name(), |token_nodes| { - shape.expand_syntax(token_nodes, context) - }) -} - -pub(crate) fn expand_expr<'a, 'b, T: ExpandExpression>( - shape: &T, - token_nodes: &'b mut TokensIterator<'a>, - context: &ExpandContext, -) -> Result { - token_nodes.expand_expr_frame(shape.name(), |token_nodes| { - shape.expand_expr(token_nodes, context) - }) -} - -pub fn color_syntax<'a, 'b, T: ColorSyntax, U>( - shape: &T, - token_nodes: &'b mut TokensIterator<'a>, - context: &ExpandContext, -) -> ((), U) { - ( - (), - token_nodes.color_frame(shape.name(), |token_nodes| { - shape.color_syntax(&(), token_nodes, context) - }), - ) -} - -pub fn color_fallible_syntax<'a, 'b, T: FallibleColorSyntax, U>( - shape: &T, - token_nodes: &'b mut TokensIterator<'a>, - context: &ExpandContext, -) -> Result { - token_nodes.color_fallible_frame(shape.name(), |token_nodes| { - shape.color_syntax(&(), token_nodes, context) - }) -} - -pub fn color_syntax_with<'a, 'b, T: ColorSyntax, U, I>( - shape: &T, - input: &I, - token_nodes: &'b mut TokensIterator<'a>, - context: &ExpandContext, -) -> ((), U) { - ( - (), - token_nodes.color_frame(shape.name(), |token_nodes| { - shape.color_syntax(input, token_nodes, context) - }), - ) -} - -pub fn color_fallible_syntax_with<'a, 'b, T: FallibleColorSyntax, U, I>( - shape: &T, - input: &I, - token_nodes: &'b mut TokensIterator<'a>, - context: &ExpandContext, -) -> Result { - token_nodes.color_fallible_frame(shape.name(), |token_nodes| { - shape.color_syntax(input, token_nodes, context) - }) -} - -impl ExpandSyntax for T { - type Output = hir::Expression; +impl ExpandSyntax for FallibleSyntax +where + I: ExpandSyntax, + T: Clone + Debug + 'static, +{ + type Output = Result; fn name(&self) -> &'static str { - ExpandExpression::name(self) + "fallible" } - fn expand_syntax<'a, 'b>( - &self, - token_nodes: &'b mut TokensIterator<'a>, - context: &ExpandContext, - ) -> Result { - ExpandExpression::expand_expr(self, token_nodes, context) + fn expand<'a, 'b>(&self, token_nodes: &'b mut TokensIterator<'a>) -> Result { + Ok(self.inner.expand(token_nodes)) } } -pub trait SkipSyntax: std::fmt::Debug + Copy { - fn skip<'a, 'b>( - &self, - token_nodes: &'b mut TokensIterator<'a>, - context: &ExpandContext, - ) -> Result<(), ShellError>; -} - +#[derive(Debug, Clone)] enum BarePathState { Initial, Seen(Span, Span), @@ -314,9 +198,14 @@ impl BarePathState { } } - pub fn end(self, peeked: Peeked, reason: &'static str) -> BarePathState { + pub fn end(self, node: Option<&SpannedToken>, expected: &'static str) -> BarePathState { match self { - BarePathState::Initial => BarePathState::Error(peeked.type_error(reason)), + BarePathState::Initial => match node { + None => BarePathState::Error(ParseError::unexpected_eof(expected, Span::unknown())), + Some(token) => { + BarePathState::Error(ParseError::mismatch(expected, token.spanned_type_name())) + } + }, BarePathState::Seen(start, end) => BarePathState::Seen(start, end), BarePathState::Error(err) => BarePathState::Error(err), } @@ -331,30 +220,33 @@ impl BarePathState { } } -pub fn expand_bare<'a, 'b>( - token_nodes: &'b mut TokensIterator<'a>, - _context: &ExpandContext, - predicate: impl Fn(&TokenNode) -> bool, +pub fn expand_bare( + token_nodes: &'_ mut TokensIterator<'_>, + predicate: impl Fn(&SpannedToken) -> bool, ) -> Result { let mut state = BarePathState::Initial; loop { - // Whitespace ends a word - let mut peeked = token_nodes.peek_any(); + if token_nodes.at_end() { + state = state.end(None, "word"); + break; + } - match peeked.node { - None => { - state = state.end(peeked, "word"); - break; + let source = token_nodes.source(); + + let mut peeked = token_nodes.peek(); + let node = peeked.node; + + match node { + Some(token) if predicate(token) => { + peeked.commit(); + state = state.clone().seen(token.span()); + let shapes = FlatShape::shapes(token, &source); + token_nodes.color_shapes(shapes); } - Some(node) => { - if predicate(node) { - state = state.seen(node.span()); - peeked.commit(); - } else { - state = state.end(peeked, "word"); - break; - } + token => { + state = state.clone().end(token, "word"); + break; } } } @@ -362,30 +254,39 @@ pub fn expand_bare<'a, 'b>( state.into_bare() } +#[derive(Debug, Copy, Clone)] +pub struct BareExpressionShape; + +impl ExpandSyntax for BareExpressionShape { + type Output = Result; + + fn name(&self) -> &'static str { + "bare expression" + } + + fn expand<'a, 'b>( + &self, + token_nodes: &'b mut TokensIterator<'a>, + ) -> Result { + token_nodes + .expand_syntax(BarePathShape) + .map(|span| Expression::bare().into_expr(span)) + } +} + #[derive(Debug, Copy, Clone)] pub struct BarePathShape; impl ExpandSyntax for BarePathShape { - type Output = Span; + type Output = Result; fn name(&self) -> &'static str { "bare path" } - fn expand_syntax<'a, 'b>( - &self, - token_nodes: &'b mut TokensIterator<'a>, - context: &ExpandContext, - ) -> Result { - expand_bare(token_nodes, context, |token| match token { - TokenNode::Token(Token { - unspanned: UnspannedToken::Bare, - .. - }) - | TokenNode::Token(Token { - unspanned: UnspannedToken::EvaluationOperator(EvaluationOperator::Dot), - .. - }) => true, + fn expand<'a, 'b>(&self, token_nodes: &'b mut TokensIterator<'a>) -> Result { + expand_bare(token_nodes, |token| match token.unspanned() { + Token::Bare | Token::EvaluationOperator(EvaluationOperator::Dot) => true, _ => false, }) @@ -395,37 +296,6 @@ impl ExpandSyntax for BarePathShape { #[derive(Debug, Copy, Clone)] pub struct BareShape; -impl FallibleColorSyntax for BareShape { - type Info = (); - type Input = FlatShape; - - fn name(&self) -> &'static str { - "BareShape" - } - - fn color_syntax<'a, 'b>( - &self, - input: &FlatShape, - token_nodes: &'b mut TokensIterator<'a>, - _context: &ExpandContext, - ) -> Result<(), ShellError> { - let span = token_nodes.peek_any_token("word", |token| match token { - // If it's a bare token, color it - TokenNode::Token(Token { span, .. }) => Ok(span), - - // otherwise, fail - other => Err(ParseError::mismatch( - "word", - other.type_name().spanned(other.span()), - )), - })?; - - token_nodes.color_shape((*input).spanned(*span)); - - Ok(()) - } -} - #[derive(Debug, Clone)] pub struct BareSyntax { pub word: String, @@ -445,51 +315,27 @@ impl PrettyDebug for BareSyntax { } impl ExpandSyntax for BareShape { - type Output = BareSyntax; + type Output = Result; fn name(&self) -> &'static str { "word" } - fn expand_syntax<'a, 'b>( + fn expand<'a, 'b>( &self, token_nodes: &'b mut TokensIterator<'a>, - context: &ExpandContext, - ) -> Result { - let peeked = token_nodes.peek_any().not_eof("word")?; + ) -> Result { + let source = token_nodes.source(); - match peeked.node { - TokenNode::Token(Token { - unspanned: UnspannedToken::Bare, - span, - }) => { - peeked.commit(); - Ok(BareSyntax { - word: context.source.to_string(), - span: *span, - }) - } - - other => Err(ParseError::mismatch( - "word", - other.type_name().spanned(other.span()), - )), - } - } -} - -impl TestSyntax for BareShape { - fn test<'a, 'b>( - &self, - token_nodes: &'b mut TokensIterator<'a>, - _context: &ExpandContext, - ) -> Option> { - let peeked = token_nodes.peek_any(); - - match peeked.node { - Some(token) if token.is_bare() => Some(peeked), - _ => None, - } + token_nodes.expand_token(WordType, |span| { + Ok(( + FlatShape::Word, + BareSyntax { + word: span.string(&source), + span, + }, + )) + }) } } @@ -498,7 +344,7 @@ pub enum CommandSignature { Internal(Spanned), LiteralExternal { outer: Span, inner: Span }, External(Span), - Expression(hir::Expression), + Expression(hir::SpannedExpression), } impl PrettyDebugWithSource for CommandSignature { @@ -531,19 +377,18 @@ impl HasSpan for CommandSignature { } impl CommandSignature { - pub fn to_expression(&self) -> hir::Expression { + pub fn to_expression(&self) -> hir::SpannedExpression { match self { CommandSignature::Internal(command) => { let span = command.span; - hir::RawExpression::Command(span).into_expr(span) + hir::Expression::Command(span).into_expr(span) } CommandSignature::LiteralExternal { outer, inner } => { - hir::RawExpression::ExternalCommand(hir::ExternalCommand::new(*inner)) + hir::Expression::ExternalCommand(hir::ExternalCommand::new(*inner)) .into_expr(*outer) } CommandSignature::External(span) => { - hir::RawExpression::ExternalCommand(hir::ExternalCommand::new(*span)) - .into_expr(*span) + hir::Expression::ExternalCommand(hir::ExternalCommand::new(*span)).into_expr(*span) } CommandSignature::Expression(expr) => expr.clone(), } @@ -553,45 +398,6 @@ impl CommandSignature { #[derive(Debug, Copy, Clone)] pub struct PipelineShape; -// The failure mode is if the head of the token stream is not a pipeline -impl FallibleColorSyntax for PipelineShape { - type Info = (); - type Input = (); - - fn name(&self) -> &'static str { - "PipelineShape" - } - - fn color_syntax<'a, 'b>( - &self, - _input: &(), - token_nodes: &'b mut TokensIterator<'a>, - context: &ExpandContext, - ) -> Result<(), ShellError> { - // Make sure we're looking at a pipeline - let pipeline = token_nodes.peek_any_token("pipeline", |node| node.as_pipeline())?; - - let parts = &pipeline.parts[..]; - - // Enumerate the pipeline parts - for part in parts { - // If the pipeline part has a prefix `|`, emit a pipe to color - if let Some(pipe) = part.pipe { - token_nodes.color_shape(FlatShape::Pipe.spanned(pipe)) - } - - let tokens: Spanned<&[TokenNode]> = (part.tokens()).spanned(part.span()); - - token_nodes.child(tokens, context.source.clone(), move |token_nodes| { - color_syntax(&MaybeSpaceShape, token_nodes, context); - color_syntax(&CommandShape, token_nodes, context); - }); - } - - Ok(()) - } -} - impl ExpandSyntax for PipelineShape { type Output = ClassifiedPipeline; @@ -599,34 +405,55 @@ impl ExpandSyntax for PipelineShape { "pipeline" } - fn expand_syntax<'content, 'me>( + fn expand<'content, 'me>( &self, - iterator: &'me mut TokensIterator<'content>, - context: &ExpandContext, - ) -> Result { - let start = iterator.span_at_cursor(); + token_nodes: &'me mut TokensIterator<'content>, + ) -> ClassifiedPipeline { + if token_nodes.at_end() { + return ClassifiedPipeline::commands(vec![], Span::unknown()); + } - let peeked = iterator.peek_any().not_eof("pipeline")?; - let pipeline = peeked.commit().as_pipeline()?; + let start = token_nodes.span_at_cursor(); + + // whitespace is allowed at the beginning + token_nodes.expand_infallible(MaybeSpaceShape); + + let pipeline = token_nodes + .expand_token(PipelineType, |pipeline| Ok(((), pipeline))) + .expect("PipelineShape is only expected to be called with a Pipeline token"); let parts = &pipeline.parts[..]; let mut out = vec![]; for part in parts { - let tokens: Spanned<&[TokenNode]> = part.tokens().spanned(part.span()); + if let Some(span) = part.pipe { + token_nodes.color_shape(FlatShape::Pipe.spanned(span)); + } - let classified = - iterator.child(tokens, context.source.clone(), move |token_nodes| { - expand_syntax(&ClassifiedCommandShape, token_nodes, context) - })?; + let tokens: Spanned<&[SpannedToken]> = part.tokens().spanned(part.span()); + + let (shapes, classified) = token_nodes.child(tokens, move |token_nodes| { + token_nodes.expand_infallible(ClassifiedCommandShape) + }); + + for shape in shapes { + match shape { + ShapeResult::Success(shape) => token_nodes.color_shape(shape), + ShapeResult::Fallback { shape, allowed } => { + token_nodes.color_err(shape, allowed) + } + } + } out.push(classified); } - let end = iterator.span_at_cursor(); + token_nodes.expand_infallible(BackoffColoringMode::new(vec!["no more tokens".to_string()])); - Ok(ClassifiedPipeline::commands(out, start.until(end))) + let end = token_nodes.span_at_cursor(); + + ClassifiedPipeline::commands(out, start.until(end)) } } @@ -638,128 +465,48 @@ pub enum CommandHeadKind { #[derive(Debug, Copy, Clone)] pub struct CommandHeadShape; -impl FallibleColorSyntax for CommandHeadShape { - type Info = CommandHeadKind; - type Input = (); - - fn name(&self) -> &'static str { - "CommandHeadShape" - } - - fn color_syntax<'a, 'b>( - &self, - _input: &(), - token_nodes: &'b mut TokensIterator<'a>, - context: &ExpandContext, - ) -> Result { - // If we don't ultimately find a token, roll back - token_nodes.atomic(|token_nodes| { - // First, take a look at the next token - let atom = expand_atom( - token_nodes, - "command head", - context, - ExpansionRule::permissive(), - )?; - - match atom.unspanned { - // If the head is an explicit external command (^cmd), color it as an external command - UnspannedAtomicToken::ExternalCommand { .. } => { - token_nodes.color_shape(FlatShape::ExternalCommand.spanned(atom.span)); - Ok(CommandHeadKind::External) - } - - // If the head is a word, it depends on whether it matches a registered internal command - UnspannedAtomicToken::Word { text } => { - let name = text.slice(context.source); - - if context.registry.has(name)? { - // If the registry has the command, color it as an internal command - token_nodes.color_shape(FlatShape::InternalCommand.spanned(text)); - let signature = context - .registry - .get(name) - .map_err(|_| { - ShellError::labeled_error( - "Internal error: could not load signature from registry", - "could not load from registry", - text, - ) - })? - .ok_or_else(|| { - ShellError::labeled_error( - "Internal error: could not load signature from registry", - "could not load from registry", - text, - ) - })?; - Ok(CommandHeadKind::Internal(signature)) - } else { - // Otherwise, color it as an external command - token_nodes.color_shape(FlatShape::ExternalCommand.spanned(text)); - Ok(CommandHeadKind::External) - } - } - - // Otherwise, we're not actually looking at a command - _ => Err(ShellError::syntax_error( - "No command at the head".spanned(atom.span), - )), - } - }) - } -} - impl ExpandSyntax for CommandHeadShape { - type Output = CommandSignature; + type Output = Result; fn name(&self) -> &'static str { "command head" } - fn expand_syntax<'a, 'b>( + fn expand<'a, 'b>( &self, token_nodes: &mut TokensIterator<'_>, - context: &ExpandContext, ) -> Result { - let node = - parse_single_node_skipping_ws(token_nodes, "command head1", |token, token_span, _| { - Ok(match token { - UnspannedToken::ExternalCommand(span) => CommandSignature::LiteralExternal { - outer: token_span, - inner: span, - }, - UnspannedToken::Bare => { - let name = token_span.slice(context.source); - if context.registry.has(name)? { - let signature = context - .registry - .get(name) - .map_err(|_| ParseError::internal_error(name.spanned(token_span)))? - .ok_or_else(|| { - ParseError::internal_error(name.spanned(token_span)) - })?; - CommandSignature::Internal(signature.spanned(token_span)) - } else { - CommandSignature::External(token_span) - } - } - _ => { - return Err(ShellError::type_error( - "command head2", - token.type_name().spanned(token_span), + token_nodes.expand_infallible(MaybeSpaceShape); + + let source = token_nodes.source(); + let registry = &token_nodes.context().registry.clone_box(); + + token_nodes + .expand_token(ExternalCommandType, |(inner, outer)| { + Ok(( + FlatShape::ExternalCommand, + CommandSignature::LiteralExternal { outer, inner }, + )) + }) + .or_else(|_| { + token_nodes.expand_token(WordType, |span| { + let name = span.slice(&source); + if registry.has(name) { + let signature = registry.get(name).unwrap(); + Ok(( + FlatShape::InternalCommand, + CommandSignature::Internal(signature.spanned(span)), )) + } else { + Ok((FlatShape::ExternalCommand, CommandSignature::External(span))) } }) - }); - - match node { - Ok(expr) => Ok(expr), - Err(_) => match expand_expr(&AnyExpressionShape, token_nodes, context) { - Ok(expr) => Ok(CommandSignature::Expression(expr)), - Err(_) => Err(token_nodes.peek_non_ws().type_error("command head3")), - }, - } + }) + .or_else(|_| { + token_nodes + .expand_syntax(AnyExpressionShape) + .map(CommandSignature::Expression) + }) } } @@ -773,321 +520,138 @@ impl ExpandSyntax for ClassifiedCommandShape { "classified command" } - fn expand_syntax<'a, 'b>( - &self, - iterator: &'b mut TokensIterator<'a>, - context: &ExpandContext, - ) -> Result { - let start = iterator.span_at_cursor(); - let head = expand_syntax(&CommandHeadShape, iterator, context)?; + fn expand<'a, 'b>(&self, token_nodes: &'b mut TokensIterator<'a>) -> ClassifiedCommand { + let start = token_nodes.span_at_cursor(); + let source = token_nodes.source(); - match &head { - CommandSignature::Expression(expr) => Err(ParseError::mismatch( + let head = match token_nodes.expand_syntax(CommandHeadShape) { + Err(err) => { + token_nodes + .expand_infallible(BackoffColoringMode::new(vec!["command".to_string()])); + return ClassifiedCommand::Error(err); + } + + Ok(head) => head, + }; + + match head { + CommandSignature::Expression(expr) => ClassifiedCommand::Error(ParseError::mismatch( "command", expr.type_name().spanned(expr.span), )), - // If the command starts with `^`, treat it as an external command no matter what CommandSignature::External(name) => { - let name_str = name.slice(&context.source); + let name_str = name.slice(&source); - external_command(iterator, context, name_str.tagged(name)) + match external_command(token_nodes, name_str.tagged(name)) { + Err(err) => ClassifiedCommand::Error(err), + Ok(command) => command, + } } + // If the command starts with `^`, treat it as an external command no matter what CommandSignature::LiteralExternal { outer, inner } => { - let name_str = inner.slice(&context.source); + let name_str = inner.slice(&source); - external_command(iterator, context, name_str.tagged(outer)) + match external_command(token_nodes, name_str.tagged(outer)) { + Err(err) => ClassifiedCommand::Error(err), + Ok(command) => command, + } } CommandSignature::Internal(signature) => { - let tail = parse_command_tail(&signature.item, &context, iterator, signature.span)?; + let tail = parse_command_tail(&signature.item, token_nodes, signature.span); + + let tail = match tail { + Err(err) => { + return ClassifiedCommand::Error(err); + } + Ok(tail) => tail, + }; let (positional, named) = match tail { None => (None, None), Some((positional, named)) => (positional, named), }; - let end = iterator.span_at_cursor(); + let end = token_nodes.span_at_cursor(); + + let expr = hir::Expression::Command(signature.span).into_expr(signature.span); let call = hir::Call { - head: Box::new(head.to_expression()), + head: Box::new(expr), positional, named, span: start.until(end), }; - Ok(ClassifiedCommand::Internal(InternalCommand::new( + ClassifiedCommand::Internal(InternalCommand::new( signature.item.name.clone(), Tag { span: signature.span, anchor: None, }, call, - ))) + )) } } } } #[derive(Debug, Copy, Clone)] -pub struct InternalCommandHeadShape; +pub struct MaybeWhitespaceEof; -impl FallibleColorSyntax for InternalCommandHeadShape { - type Info = (); - type Input = (); +impl ExpandSyntax for MaybeWhitespaceEof { + type Output = Result<(), ParseError>; fn name(&self) -> &'static str { - "InternalCommandHeadShape" + "" } - fn color_syntax<'a, 'b>( - &self, - _input: &(), - token_nodes: &'b mut TokensIterator<'a>, - _context: &ExpandContext, - ) -> Result<(), ShellError> { - let peeked_head = token_nodes.peek_non_ws().not_eof("command head4"); + fn expand<'a, 'b>(&self, token_nodes: &'b mut TokensIterator<'a>) -> Self::Output { + token_nodes.atomic_parse(|token_nodes| { + token_nodes.expand_infallible(MaybeSpaceShape); + token_nodes.expand_syntax(EofShape) + }) + } +} - let peeked_head = match peeked_head { - Err(_) => return Ok(()), - Ok(peeked_head) => peeked_head, - }; +#[derive(Debug, Copy, Clone)] +pub struct EofShape; - let node = peeked_head.commit(); +impl ExpandSyntax for EofShape { + type Output = Result<(), ParseError>; + + fn name(&self) -> &'static str { + "eof" + } + + fn expand<'a, 'b>(&self, token_nodes: &'b mut TokensIterator<'a>) -> Result<(), ParseError> { + let next = token_nodes.peek(); + let node = next.node; match node { - TokenNode::Token(Token { - unspanned: UnspannedToken::Bare, - span, - }) => token_nodes.color_shape(FlatShape::Word.spanned(*span)), - - TokenNode::Token(Token { - unspanned: UnspannedToken::String(_inner_tag), - span, - }) => token_nodes.color_shape(FlatShape::String.spanned(*span)), - - _node => token_nodes.color_shape(FlatShape::Error.spanned(node.span())), - }; - - Ok(()) - } -} - -impl ExpandExpression for InternalCommandHeadShape { - fn name(&self) -> &'static str { - "internal command head" - } - - fn expand_expr( - &self, - token_nodes: &mut TokensIterator<'_>, - _context: &ExpandContext, - ) -> Result { - let peeked_head = token_nodes.peek_non_ws().not_eof("command head")?; - - let expr = match peeked_head.node { - TokenNode::Token(Token { - unspanned: UnspannedToken::Bare, - span, - }) => hir::RawExpression::Literal(hir::RawLiteral::Bare.into_literal(span)) - .into_expr(span), - - TokenNode::Token(Token { - unspanned: UnspannedToken::String(inner_span), - span, - }) => { - hir::RawExpression::Literal(hir::RawLiteral::String(*inner_span).into_literal(span)) - .into_expr(span) - } - - node => { - return Err(ParseError::mismatch( - "command head", - node.type_name().spanned(node.span()), - )) - } - }; - - peeked_head.commit(); - - Ok(expr) - } -} - -pub(crate) struct SingleError<'token> { - expected: &'static str, - node: &'token Token, -} - -impl<'token> SingleError<'token> { - pub(crate) fn error(&self) -> ParseError { - ParseError::mismatch(self.expected, self.node.type_name().spanned(self.node.span)) - } -} - -fn parse_single_node( - token_nodes: &mut TokensIterator<'_>, - expected: &'static str, - callback: impl FnOnce(UnspannedToken, Span, SingleError) -> Result, -) -> Result { - token_nodes.peek_any_token(expected, |node| match node { - TokenNode::Token(token) => callback( - token.unspanned, - token.span, - SingleError { - expected, - node: token, - }, - ), - - other => Err(ParseError::mismatch( - expected, - other.type_name().spanned(other.span()), - )), - }) -} - -fn parse_single_node_skipping_ws( - token_nodes: &mut TokensIterator<'_>, - expected: &'static str, - callback: impl FnOnce(UnspannedToken, Span, SingleError) -> Result, -) -> Result { - let peeked = token_nodes.peek_non_ws().not_eof(expected)?; - - let expr = match peeked.node { - TokenNode::Token(token) => callback( - token.unspanned, - token.span, - SingleError { - expected, - node: token, - }, - )?, - - other => { - return Err(ShellError::type_error( - expected, - other.type_name().spanned(other.span()), - )) + None => Ok(()), + Some(node) => Err(ParseError::mismatch("eof", node.spanned_type_name())), } - }; - - peeked.commit(); - - Ok(expr) + } } #[derive(Debug, Copy, Clone)] pub struct WhitespaceShape; -impl FallibleColorSyntax for WhitespaceShape { - type Info = (); - type Input = (); - - fn name(&self) -> &'static str { - "WhitespaceShape" - } - - fn color_syntax<'a, 'b>( - &self, - _input: &(), - token_nodes: &'b mut TokensIterator<'a>, - _context: &ExpandContext, - ) -> Result<(), ShellError> { - let peeked = token_nodes.peek_any().not_eof("whitespace"); - - let peeked = match peeked { - Err(_) => return Ok(()), - Ok(peeked) => peeked, - }; - - let node = peeked.commit(); - - match node { - TokenNode::Whitespace(span) => { - token_nodes.color_shape(FlatShape::Whitespace.spanned(*span)) - } - - _other => return Ok(()), - }; - - Ok(()) - } -} - impl ExpandSyntax for WhitespaceShape { - type Output = Span; + type Output = Result; fn name(&self) -> &'static str { "whitespace" } - fn expand_syntax<'a, 'b>( - &self, - token_nodes: &'b mut TokensIterator<'a>, - _context: &ExpandContext, - ) -> Result { - let peeked = token_nodes.peek_any().not_eof("whitespace")?; - - let span = match peeked.node { - TokenNode::Whitespace(tag) => *tag, - - other => { - return Err(ParseError::mismatch( - "whitespace", - other.type_name().spanned(other.span()), - )) - } - }; - - peeked.commit(); - - Ok(span) + fn expand<'a, 'b>(&self, token_nodes: &'b mut TokensIterator<'a>) -> Result { + token_nodes.expand_token(WhitespaceType, |span| Ok((FlatShape::Whitespace, span))) } } -#[derive(Debug, Copy, Clone)] -pub struct SpacedExpression { - inner: T, -} - -impl ExpandExpression for SpacedExpression { - fn name(&self) -> &'static str { - "spaced expression" - } - - fn expand_expr<'a, 'b>( - &self, - token_nodes: &'b mut TokensIterator<'a>, - context: &ExpandContext, - ) -> Result { - // TODO: Make the name part of the trait - let peeked = token_nodes.peek_any().not_eof("whitespace")?; - - match peeked.node { - TokenNode::Whitespace(_) => { - peeked.commit(); - expand_expr(&self.inner, token_nodes, context) - } - - other => Err(ParseError::mismatch( - "whitespace", - other.type_name().spanned(other.span()), - )), - } - } -} - -pub fn maybe_spaced(inner: T) -> MaybeSpacedExpression { - MaybeSpacedExpression { inner } -} - -#[derive(Debug, Copy, Clone)] -pub struct MaybeSpacedExpression { - inner: T, -} - #[derive(Debug, Copy, Clone)] pub struct MaybeSpaceShape; @@ -1095,165 +659,22 @@ impl ExpandSyntax for MaybeSpaceShape { type Output = Option; fn name(&self) -> &'static str { - "maybe space" + "whitespace?" } - fn expand_syntax<'a, 'b>( - &self, - token_nodes: &'b mut TokensIterator<'a>, - _context: &ExpandContext, - ) -> Result { - let peeked = token_nodes.peek_any().not_eof("whitespace"); + fn expand<'a, 'b>(&self, token_nodes: &'b mut TokensIterator<'a>) -> Option { + let result = token_nodes.expand_token(WhitespaceType, |span| { + Ok((FlatShape::Whitespace, Some(span))) + }); - let span = match peeked { - Err(_) => None, - Ok(peeked) => { - if let TokenNode::Whitespace(..) = peeked.node { - let node = peeked.commit(); - Some(node.span()) - } else { - None - } - } - }; - - Ok(span) - } -} - -impl ColorSyntax for MaybeSpaceShape { - type Info = (); - type Input = (); - - fn name(&self) -> &'static str { - "MaybeSpaceShape" - } - - fn color_syntax<'a, 'b>( - &self, - _input: &(), - token_nodes: &'b mut TokensIterator<'a>, - _context: &ExpandContext, - ) -> Self::Info { - let peeked = token_nodes.peek_any().not_eof("whitespace"); - - let peeked = match peeked { - Err(_) => return, - Ok(peeked) => peeked, - }; - - if let TokenNode::Whitespace(span) = peeked.node { - peeked.commit(); - token_nodes.color_shape(FlatShape::Whitespace.spanned(*span)); - } + // No space is acceptable, but we need to err inside expand_token so we don't + // consume the non-whitespace token + result.unwrap_or(None) } } #[derive(Debug, Copy, Clone)] pub struct SpaceShape; -impl FallibleColorSyntax for SpaceShape { - type Info = (); - type Input = (); - - fn name(&self) -> &'static str { - "SpaceShape" - } - - fn color_syntax<'a, 'b>( - &self, - _input: &(), - token_nodes: &'b mut TokensIterator<'a>, - _context: &ExpandContext, - ) -> Result<(), ShellError> { - let peeked = token_nodes.peek_any().not_eof("whitespace")?; - - match peeked.node { - TokenNode::Whitespace(span) => { - peeked.commit(); - token_nodes.color_shape(FlatShape::Whitespace.spanned(*span)); - Ok(()) - } - - other => Err(ShellError::type_error( - "whitespace", - other.type_name().spanned(other.span()), - )), - } - } -} - -impl ExpandExpression for MaybeSpacedExpression { - fn name(&self) -> &'static str { - "maybe space" - } - - fn expand_expr<'a, 'b>( - &self, - token_nodes: &'b mut TokensIterator<'a>, - context: &ExpandContext, - ) -> Result { - // TODO: Make the name part of the trait - let peeked = token_nodes.peek_any().not_eof("whitespace")?; - - match peeked.node { - TokenNode::Whitespace(_) => { - peeked.commit(); - expand_expr(&self.inner, token_nodes, context) - } - - _ => { - peeked.rollback(); - expand_expr(&self.inner, token_nodes, context) - } - } - } -} - -pub fn spaced(inner: T) -> SpacedExpression { - SpacedExpression { inner } -} - -fn expand_variable(span: Span, token_span: Span, source: &Text) -> hir::Expression { - if span.slice(source) == "it" { - hir::Expression::it_variable(span, token_span) - } else { - hir::Expression::variable(span, token_span) - } -} - #[derive(Debug, Copy, Clone)] pub struct CommandShape; - -impl ColorSyntax for CommandShape { - type Info = (); - type Input = (); - - fn name(&self) -> &'static str { - "CommandShape" - } - - fn color_syntax<'a, 'b>( - &self, - _input: &(), - token_nodes: &'b mut TokensIterator<'a>, - context: &ExpandContext, - ) { - let kind = color_fallible_syntax(&CommandHeadShape, token_nodes, context); - - match kind { - Err(_) => { - // We didn't find a command, so we'll have to fall back to parsing this pipeline part - // as a blob of undifferentiated expressions - color_syntax(&ExpressionListShape, token_nodes, context); - } - - Ok(CommandHeadKind::External) => { - color_syntax(&ExternalTokensShape, token_nodes, context); - } - Ok(CommandHeadKind::Internal(signature)) => { - color_syntax_with(&CommandTailShape, &signature, token_nodes, context); - } - }; - } -} diff --git a/crates/nu-parser/src/hir/syntax_shape/block.rs b/crates/nu-parser/src/hir/syntax_shape/block.rs index 788fddbc8d..e32bc0b8a6 100644 --- a/crates/nu-parser/src/hir/syntax_shape/block.rs +++ b/crates/nu-parser/src/hir/syntax_shape/block.rs @@ -1,141 +1,82 @@ +use crate::hir::Expression; use crate::{ hir, hir::syntax_shape::{ - color_fallible_syntax, color_syntax_with, continue_expression, expand_expr, expand_syntax, - DelimitedShape, ExpandContext, ExpandExpression, ExpressionContinuationShape, - ExpressionListShape, FallibleColorSyntax, MemberShape, PathTailShape, PathTailSyntax, + ExpandSyntax, ExpressionContinuationShape, MemberShape, PathTailShape, PathTailSyntax, VariablePathShape, }, hir::tokens_iterator::TokensIterator, - parse::token_tree::Delimiter, }; -use nu_errors::{ParseError, ShellError}; +use hir::SpannedExpression; +use nu_errors::ParseError; use nu_source::Span; #[derive(Debug, Copy, Clone)] -pub struct AnyBlockShape; +pub struct CoerceBlockShape; -impl FallibleColorSyntax for AnyBlockShape { - type Info = (); - type Input = (); +impl ExpandSyntax for CoerceBlockShape { + type Output = Result; - fn name(&self) -> &'static str { - "AnyBlockShape" - } - - fn color_syntax<'a, 'b>( - &self, - _input: &(), - token_nodes: &'b mut TokensIterator<'a>, - context: &ExpandContext, - ) -> Result<(), ShellError> { - let block = token_nodes.peek_non_ws().not_eof("block"); - - let block = match block { - Err(_) => return Ok(()), - Ok(block) => block, - }; - - // is it just a block? - let block = block.node.as_block(); - - if let Some((children, spans)) = block { - token_nodes.child(children, context.source.clone(), |token_nodes| { - color_syntax_with( - &DelimitedShape, - &(Delimiter::Brace, spans.0, spans.1), - token_nodes, - context, - ); - }); - - return Ok(()); - } - - // Otherwise, look for a shorthand block. If none found, fail - color_fallible_syntax(&ShorthandBlock, token_nodes, context) - } -} - -impl ExpandExpression for AnyBlockShape { fn name(&self) -> &'static str { "any block" } - fn expand_expr<'a, 'b>( + fn expand<'a, 'b>( &self, token_nodes: &mut TokensIterator<'_>, - context: &ExpandContext, - ) -> Result { - let block = token_nodes.peek_non_ws().not_eof("block")?; - + ) -> Result { // is it just a block? - let block = block.node.as_block(); - - if let Some((block, _tags)) = block { - let mut iterator = - TokensIterator::new(&block.item, block.span, context.source.clone(), false); - - let exprs = expand_syntax(&ExpressionListShape, &mut iterator, context)?.exprs; - - return Ok(hir::RawExpression::Block(exprs.item).into_expr(block.span)); - } - - expand_syntax(&ShorthandBlock, token_nodes, context) + token_nodes + .expand_syntax(BlockShape) + .or_else(|_| token_nodes.expand_syntax(ShorthandBlockShape)) } } #[derive(Debug, Copy, Clone)] -pub struct ShorthandBlock; +pub struct BlockShape; -impl FallibleColorSyntax for ShorthandBlock { - type Info = (); - type Input = (); +impl ExpandSyntax for BlockShape { + type Output = Result; fn name(&self) -> &'static str { - "ShorthandBlock" + "block" } - fn color_syntax<'a, 'b>( + fn expand<'a, 'b>( &self, - _input: &(), token_nodes: &'b mut TokensIterator<'a>, - context: &ExpandContext, - ) -> Result<(), ShellError> { - // Try to find a shorthand head. If none found, fail - color_fallible_syntax(&ShorthandPath, token_nodes, context)?; + ) -> Result { + let exprs = token_nodes.block()?; - loop { - // Check to see whether there's any continuation after the head expression - let result = color_fallible_syntax(&ExpressionContinuationShape, token_nodes, context); - - match result { - // if no continuation was found, we're done - Err(_) => break, - // if a continuation was found, look for another one - Ok(_) => continue, - } - } - - Ok(()) + Ok(hir::Expression::Block(exprs.item).into_expr(exprs.span)) } } -impl ExpandExpression for ShorthandBlock { +#[derive(Debug, Copy, Clone)] +pub struct ShorthandBlockShape; + +impl ExpandSyntax for ShorthandBlockShape { + type Output = Result; + fn name(&self) -> &'static str { "shorthand block" } - fn expand_expr<'a, 'b>( + fn expand<'a, 'b>( &self, token_nodes: &'b mut TokensIterator<'a>, - context: &ExpandContext, - ) -> Result { - let path = expand_expr(&ShorthandPath, token_nodes, context)?; - let start = path.span; - let expr = continue_expression(path, token_nodes, context); - let end = expr.span; - let block = hir::RawExpression::Block(vec![expr]).into_expr(start.until(end)); + ) -> Result { + let mut current = token_nodes.expand_syntax(ShorthandPath)?; + + loop { + match token_nodes.expand_syntax(ExpressionContinuationShape) { + Result::Err(_) => break, + Result::Ok(continuation) => current = continuation.append_to(current), + } + } + let span = current.span; + + let block = hir::Expression::Block(vec![current]).into_expr(span); Ok(block) } @@ -145,74 +86,40 @@ impl ExpandExpression for ShorthandBlock { #[derive(Debug, Copy, Clone)] pub struct ShorthandPath; -impl FallibleColorSyntax for ShorthandPath { - type Info = (); - type Input = (); +impl ExpandSyntax for ShorthandPath { + type Output = Result; - fn name(&self) -> &'static str { - "ShorthandPath" - } - - fn color_syntax<'a, 'b>( - &self, - _input: &(), - token_nodes: &'b mut TokensIterator<'a>, - context: &ExpandContext, - ) -> Result<(), ShellError> { - token_nodes.atomic(|token_nodes| { - let variable = color_fallible_syntax(&VariablePathShape, token_nodes, context); - - if variable.is_ok() { - // if it's a variable path, that's the head part - return Ok(()); - } - - // otherwise, we'll try to find a member path - - // look for a member (`` -> `$it.`) - color_fallible_syntax(&MemberShape, token_nodes, context)?; - - // Now that we've synthesized the head, of the path, proceed to expand the tail of the path - // like any other path. - // It's ok if there's no path tail; a single member is sufficient - let _ = color_fallible_syntax(&PathTailShape, token_nodes, context); - - Ok(()) - }) - } -} - -impl ExpandExpression for ShorthandPath { fn name(&self) -> &'static str { "shorthand path" } - fn expand_expr<'a, 'b>( + fn expand<'a, 'b>( &self, token_nodes: &'b mut TokensIterator<'a>, - context: &ExpandContext, - ) -> Result { + ) -> Result { // if it's a variable path, that's the head part - let path = expand_expr(&VariablePathShape, token_nodes, context); + let path = token_nodes.expand_syntax(VariablePathShape); if let Ok(path) = path { return Ok(path); } // Synthesize the head of the shorthand path (`` -> `$it.`) - let mut head = expand_expr(&ShorthandHeadShape, token_nodes, context)?; + let mut head = token_nodes.expand_syntax(ShorthandHeadShape)?; // Now that we've synthesized the head, of the path, proceed to expand the tail of the path // like any other path. - let tail = expand_syntax(&PathTailShape, token_nodes, context); + let tail = token_nodes.expand_syntax(PathTailShape); match tail { Err(_) => Ok(head), - Ok(PathTailSyntax { tail, .. }) => { + Ok(PathTailSyntax { tail, span }) => { + let span = head.span.until(span); + // For each member that `PathTailShape` expanded, join it onto the existing expression // to form a new path for member in tail { - head = hir::Expression::dot_member(head, member); + head = Expression::dot_member(head, member).into_expr(span); } Ok(head) @@ -225,27 +132,28 @@ impl ExpandExpression for ShorthandPath { #[derive(Debug, Copy, Clone)] pub struct ShorthandHeadShape; -impl ExpandExpression for ShorthandHeadShape { +impl ExpandSyntax for ShorthandHeadShape { + type Output = Result; + fn name(&self) -> &'static str { "shorthand head" } - fn expand_expr<'a, 'b>( + fn expand<'a, 'b>( &self, token_nodes: &'b mut TokensIterator<'a>, - context: &ExpandContext, - ) -> Result { - let head = expand_syntax(&MemberShape, token_nodes, context)?; - let head = head.to_path_member(context.source); + ) -> Result { + let head = token_nodes.expand_syntax(MemberShape)?; + let head = head.to_path_member(&token_nodes.source()); // Synthesize an `$it` expression let it = synthetic_it(); let span = head.span; - Ok(hir::Expression::path(it, vec![head], span)) + Ok(Expression::path(it, vec![head]).into_expr(span)) } } -fn synthetic_it() -> hir::Expression { - hir::Expression::it_variable(Span::unknown(), Span::unknown()) +fn synthetic_it() -> hir::SpannedExpression { + Expression::it_variable(Span::unknown()).into_expr(Span::unknown()) } diff --git a/crates/nu-parser/src/hir/syntax_shape/design.md b/crates/nu-parser/src/hir/syntax_shape/design.md new file mode 100644 index 0000000000..5b4632a814 --- /dev/null +++ b/crates/nu-parser/src/hir/syntax_shape/design.md @@ -0,0 +1,72 @@ +# Meaningful Primitive Tokens + +- `int` +- `decimal` +- `op::name` +- `dot` +- `dotdot` +- `string` +- `var::it` +- `var::other` +- `external-command` +- `pattern::glob` +- `word` +- `comment` +- `whitespace` +- `separator` +- `longhand-flag` +- `shorthand-flag` + +# Grouped Tokens + +- `(call head ...tail)` +- `(list ...nodes)` +- `(paren ...nodes)` +- `(square ...nodes)` +- `(curly ...nodes)` +- `(pipeline ...elements) where elements: pipeline-element` +- `(pipeline-element pipe? token)` + +# Atomic Tokens + +- `(unit number unit) where number: number, unit: unit` + +# Expression + +``` +start(ExpressionStart) continuation(ExpressionContinuation)* -> +``` + +## ExpressionStart + +``` +word -> String +unit -> Unit +number -> Number +string -> String +var::it -> Var::It +var::other -> Var::Other +pattern::glob -> Pattern::Glob +square -> Array +``` + +## TightExpressionContinuation + +``` +dot AnyExpression -> Member +dodot AnyExpression -> RangeContinuation +``` + +## InfixExpressionContinuation + +``` +whitespace op whitespace AnyExpression -> InfixContinuation +``` + +## Member + +``` +int -> Member::Int +word -> Member::Word +string -> Member::String +``` diff --git a/crates/nu-parser/src/hir/syntax_shape/expression.rs b/crates/nu-parser/src/hir/syntax_shape/expression.rs index c2457920ae..c7c9d920e6 100644 --- a/crates/nu-parser/src/hir/syntax_shape/expression.rs +++ b/crates/nu-parser/src/hir/syntax_shape/expression.rs @@ -1,4 +1,3 @@ -pub(crate) mod atom; pub(crate) mod delimited; pub(crate) mod file_path; pub(crate) mod list; @@ -10,311 +9,64 @@ pub(crate) mod unit; pub(crate) mod variable_path; use crate::hir::syntax_shape::{ - color_delimited_square, color_fallible_syntax, color_fallible_syntax_with, expand_atom, - expand_delimited_square, expand_expr, expand_syntax, BareShape, ColorableDotShape, DotShape, - ExpandContext, ExpandExpression, ExpandSyntax, ExpansionRule, ExpressionContinuation, - ExpressionContinuationShape, FallibleColorSyntax, FlatShape, UnspannedAtomicToken, + BareExpressionShape, DelimitedSquareShape, ExpandContext, ExpandSyntax, + ExpressionContinuationShape, NumberExpressionShape, PatternExpressionShape, + StringExpressionShape, UnitExpressionShape, VariableShape, }; -use crate::{ - hir, - hir::{Expression, TokensIterator}, -}; -use nu_errors::{ParseError, ShellError}; -use nu_source::{HasSpan, Span, Spanned, SpannedItem, Tag}; +use crate::hir::{SpannedExpression, TokensIterator}; +use nu_errors::ParseError; use std::path::PathBuf; #[derive(Debug, Copy, Clone)] pub struct AnyExpressionShape; -impl ExpandExpression for AnyExpressionShape { +impl ExpandSyntax for AnyExpressionShape { + type Output = Result; + fn name(&self) -> &'static str { "any expression" } - fn expand_expr<'a, 'b>( + fn expand<'a, 'b>( &self, token_nodes: &mut TokensIterator<'_>, - context: &ExpandContext, - ) -> Result { - // Look for an expression at the cursor - let head = expand_expr(&AnyExpressionStartShape, token_nodes, context)?; + ) -> Result { + token_nodes.atomic_parse(|token_nodes| { + // Look for an atomic expression at the cursor + let mut current = token_nodes.expand_syntax(AnyExpressionStartShape)?; - Ok(continue_expression(head, token_nodes, context)) - } -} - -impl FallibleColorSyntax for AnyExpressionShape { - type Info = (); - type Input = (); - - fn name(&self) -> &'static str { - "AnyExpressionShape" - } - - fn color_syntax<'a, 'b>( - &self, - _input: &(), - token_nodes: &'b mut TokensIterator<'a>, - context: &ExpandContext, - ) -> Result<(), ShellError> { - // Look for an expression at the cursor - color_fallible_syntax(&AnyExpressionStartShape, token_nodes, context)?; - - match continue_coloring_expression(token_nodes, context) { - Err(_) => { - // it's fine for there to be no continuation + loop { + match token_nodes.expand_syntax(ExpressionContinuationShape) { + Err(_) => return Ok(current), + Ok(continuation) => current = continuation.append_to(current), + } } - - Ok(()) => {} - } - - Ok(()) - } -} - -pub(crate) fn continue_expression( - mut head: hir::Expression, - token_nodes: &mut TokensIterator<'_>, - context: &ExpandContext, -) -> hir::Expression { - loop { - // Check to see whether there's any continuation after the head expression - let continuation = expand_syntax(&ExpressionContinuationShape, token_nodes, context); - - match continuation { - // If there's no continuation, return the head - Err(_) => return head, - // Otherwise, form a new expression by combining the head with the continuation - Ok(continuation) => match continuation { - // If the continuation is a `.member`, form a path with the new member - ExpressionContinuation::DotSuffix(_dot, member) => { - head = Expression::dot_member(head, member); - } - - // Otherwise, if the continuation is an infix suffix, form an infix expression - ExpressionContinuation::InfixSuffix(op, expr) => { - head = Expression::infix(head, op, expr); - } - }, - } - } -} - -pub(crate) fn continue_coloring_expression( - token_nodes: &mut TokensIterator<'_>, - context: &ExpandContext, -) -> Result<(), ShellError> { - // if there's not even one expression continuation, fail - color_fallible_syntax(&ExpressionContinuationShape, token_nodes, context)?; - - loop { - // Check to see whether there's any continuation after the head expression - let result = color_fallible_syntax(&ExpressionContinuationShape, token_nodes, context); - - if result.is_err() { - // We already saw one continuation, so just return - return Ok(()); - } + }) } } #[derive(Debug, Copy, Clone)] pub struct AnyExpressionStartShape; -impl ExpandExpression for AnyExpressionStartShape { +impl ExpandSyntax for AnyExpressionStartShape { + type Output = Result; + fn name(&self) -> &'static str { "any expression start" } - fn expand_expr<'a, 'b>( + fn expand<'a, 'b>( &self, token_nodes: &mut TokensIterator<'_>, - context: &ExpandContext, - ) -> Result { - let atom = expand_atom(token_nodes, "expression", context, ExpansionRule::new())?; - - match atom.unspanned { - UnspannedAtomicToken::Size { number, unit } => Ok(hir::Expression::size( - number.to_number(context.source), - unit.item, - Tag { - span: atom.span, - anchor: None, - }, - )), - - UnspannedAtomicToken::SquareDelimited { nodes, .. } => { - expand_delimited_square(&nodes, atom.span, context) - } - - UnspannedAtomicToken::Word { .. } => { - let end = expand_syntax(&BareTailShape, token_nodes, context)?; - Ok(hir::Expression::bare(atom.span.until_option(end))) - } - - other => other - .into_atomic_token(atom.span) - .to_hir(context, "expression"), - } - } -} - -impl FallibleColorSyntax for AnyExpressionStartShape { - type Info = (); - type Input = (); - - fn name(&self) -> &'static str { - "AnyExpressionStartShape" - } - - fn color_syntax<'a, 'b>( - &self, - _input: &(), - token_nodes: &'b mut TokensIterator<'a>, - context: &ExpandContext, - ) -> Result<(), ShellError> { - let atom = token_nodes.spanned(|token_nodes| { - expand_atom( - token_nodes, - "expression", - context, - ExpansionRule::permissive(), - ) - }); - - let atom = match atom { - Spanned { - item: Err(_err), - span, - } => { - token_nodes.color_shape(FlatShape::Error.spanned(span)); - return Ok(()); - } - - Spanned { - item: Ok(value), .. - } => value, - }; - - match atom.unspanned { - UnspannedAtomicToken::Size { number, unit } => token_nodes.color_shape( - FlatShape::Size { - number: number.span(), - unit: unit.span, - } - .spanned(atom.span), - ), - - UnspannedAtomicToken::SquareDelimited { nodes, spans } => { - token_nodes.child( - (&nodes[..]).spanned(atom.span), - context.source.clone(), - |tokens| { - color_delimited_square(spans, tokens, atom.span, context); - }, - ); - } - - UnspannedAtomicToken::Word { .. } | UnspannedAtomicToken::Dot { .. } => { - token_nodes.color_shape(FlatShape::Word.spanned(atom.span)); - } - - _ => token_nodes.mutate_shapes(|shapes| atom.color_tokens(shapes)), - } - - Ok(()) - } -} - -#[derive(Debug, Copy, Clone)] -pub struct BareTailShape; - -impl FallibleColorSyntax for BareTailShape { - type Info = (); - type Input = (); - - fn name(&self) -> &'static str { - "BareTailShape" - } - - fn color_syntax<'a, 'b>( - &self, - _input: &(), - token_nodes: &'b mut TokensIterator<'a>, - context: &ExpandContext, - ) -> Result<(), ShellError> { - let len = token_nodes.state().shapes().len(); - - loop { - let word = - color_fallible_syntax_with(&BareShape, &FlatShape::Word, token_nodes, context); - - if word.is_ok() { - // if a word was found, continue - continue; - } - - // if a word wasn't found, try to find a dot - - // try to find a dot - let dot = color_fallible_syntax_with( - &ColorableDotShape, - &FlatShape::Word, - token_nodes, - context, - ); - - match dot { - // if a dot was found, try to find another word - Ok(_) => continue, - // otherwise, we're done - Err(_) => break, - } - } - - if token_nodes.state().shapes().len() > len { - Ok(()) - } else { - Err(ShellError::syntax_error( - "No tokens matched BareTailShape".spanned_unknown(), - )) - } - } -} - -impl ExpandSyntax for BareTailShape { - fn name(&self) -> &'static str { - "word continuation" - } - - type Output = Option; - - fn expand_syntax<'a, 'b>( - &self, - token_nodes: &'b mut TokensIterator<'a>, - context: &ExpandContext, - ) -> Result, ParseError> { - let mut end: Option = None; - - loop { - match expand_syntax(&BareShape, token_nodes, context) { - Ok(bare) => { - end = Some(bare.span); - continue; - } - - Err(_) => match expand_syntax(&DotShape, token_nodes, context) { - Ok(dot) => { - end = Some(dot); - continue; - } - - Err(_) => break, - }, - } - } - - Ok(end) + ) -> Result { + token_nodes + .expand_syntax(VariableShape) + .or_else(|_| token_nodes.expand_syntax(UnitExpressionShape)) + .or_else(|_| token_nodes.expand_syntax(BareExpressionShape)) + .or_else(|_| token_nodes.expand_syntax(PatternExpressionShape)) + .or_else(|_| token_nodes.expand_syntax(NumberExpressionShape)) + .or_else(|_| token_nodes.expand_syntax(StringExpressionShape)) + .or_else(|_| token_nodes.expand_syntax(DelimitedSquareShape)) } } diff --git a/crates/nu-parser/src/hir/syntax_shape/expression/delimited.rs b/crates/nu-parser/src/hir/syntax_shape/expression/delimited.rs index 904b998bb3..4ea9f4fc97 100644 --- a/crates/nu-parser/src/hir/syntax_shape/expression/delimited.rs +++ b/crates/nu-parser/src/hir/syntax_shape/expression/delimited.rs @@ -1,55 +1,24 @@ -use crate::hir::syntax_shape::{ - color_syntax, expand_syntax, ColorSyntax, ExpandContext, ExpressionListShape, TokenNode, -}; -use crate::{hir, hir::TokensIterator, Delimiter, FlatShape}; +use crate::hir::syntax_shape::ExpandSyntax; +use crate::hir::SpannedExpression; +use crate::{hir, hir::TokensIterator}; use nu_errors::ParseError; -use nu_source::{Span, SpannedItem, Tag}; - -pub fn expand_delimited_square( - children: &[TokenNode], - span: Span, - context: &ExpandContext, -) -> Result { - let mut tokens = TokensIterator::new(&children, span, context.source.clone(), false); - - let list = expand_syntax(&ExpressionListShape, &mut tokens, context); - - Ok(hir::Expression::list( - list?.exprs.item, - Tag { span, anchor: None }, - )) -} - -pub fn color_delimited_square( - (open, close): (Span, Span), - token_nodes: &mut TokensIterator, - _span: Span, - context: &ExpandContext, -) { - token_nodes.color_shape(FlatShape::OpenDelimiter(Delimiter::Square).spanned(open)); - let _list = color_syntax(&ExpressionListShape, token_nodes, context); - token_nodes.color_shape(FlatShape::CloseDelimiter(Delimiter::Square).spanned(close)); -} #[derive(Debug, Copy, Clone)] -pub struct DelimitedShape; +pub struct DelimitedSquareShape; -impl ColorSyntax for DelimitedShape { - type Info = (); - type Input = (Delimiter, Span, Span); +impl ExpandSyntax for DelimitedSquareShape { + type Output = Result; fn name(&self) -> &'static str { - "DelimitedShape" + "delimited square" } - fn color_syntax<'a, 'b>( + fn expand<'a, 'b>( &self, - (delimiter, open, close): &(Delimiter, Span, Span), token_nodes: &'b mut TokensIterator<'a>, - context: &ExpandContext, - ) -> Self::Info { - token_nodes.color_shape(FlatShape::OpenDelimiter(*delimiter).spanned(*open)); - color_syntax(&ExpressionListShape, token_nodes, context); - token_nodes.color_shape(FlatShape::CloseDelimiter(*delimiter).spanned(*close)); + ) -> Result { + let exprs = token_nodes.square()?; + + Ok(hir::Expression::list(exprs.item).into_expr(exprs.span)) } } diff --git a/crates/nu-parser/src/hir/syntax_shape/expression/file_path.rs b/crates/nu-parser/src/hir/syntax_shape/expression/file_path.rs index 83ceb7f403..2679cf48d1 100644 --- a/crates/nu-parser/src/hir/syntax_shape/expression/file_path.rs +++ b/crates/nu-parser/src/hir/syntax_shape/expression/file_path.rs @@ -1,88 +1,62 @@ -use crate::hir::syntax_shape::expression::atom::{ - expand_atom, ExpansionRule, UnspannedAtomicToken, -}; use crate::hir::syntax_shape::{ - expression::expand_file_path, ExpandContext, ExpandExpression, FallibleColorSyntax, FlatShape, + expression::expand_file_path, BarePathShape, DecimalShape, ExpandContext, ExpandSyntax, + FlatShape, IntShape, StringShape, }; -use crate::{hir, hir::TokensIterator}; -use nu_errors::{ParseError, ShellError}; -use nu_source::SpannedItem; +use crate::hir::{Expression, SpannedExpression, TokensIterator}; +use crate::parse::token_tree::ExternalWordType; +use nu_errors::ParseError; +use nu_source::{HasSpan, Span}; #[derive(Debug, Copy, Clone)] pub struct FilePathShape; -impl FallibleColorSyntax for FilePathShape { - type Info = (); - type Input = (); +impl ExpandSyntax for FilePathShape { + type Output = Result; - fn name(&self) -> &'static str { - "FilePathShape" - } - - fn color_syntax<'a, 'b>( - &self, - _input: &(), - token_nodes: &'b mut TokensIterator<'a>, - context: &ExpandContext, - ) -> Result<(), ShellError> { - let atom = expand_atom( - token_nodes, - "file path", - context, - ExpansionRule::permissive(), - ); - - let atom = match atom { - Err(_) => return Ok(()), - Ok(atom) => atom, - }; - - match atom.unspanned { - UnspannedAtomicToken::Word { .. } - | UnspannedAtomicToken::String { .. } - | UnspannedAtomicToken::Number { .. } - | UnspannedAtomicToken::Size { .. } => { - token_nodes.color_shape(FlatShape::Path.spanned(atom.span)); - } - - _ => token_nodes.mutate_shapes(|shapes| atom.color_tokens(shapes)), - } - - Ok(()) - } -} - -impl ExpandExpression for FilePathShape { fn name(&self) -> &'static str { "file path" } - fn expand_expr<'a, 'b>( + fn expand<'a, 'b>( &self, token_nodes: &mut TokensIterator<'_>, - context: &ExpandContext, - ) -> Result { - let atom = expand_atom( - token_nodes, - "file path", - context, - ExpansionRule::new().allow_external_word(), - )?; - - match atom.unspanned { - UnspannedAtomicToken::Word { text: body } - | UnspannedAtomicToken::ExternalWord { text: body } - | UnspannedAtomicToken::String { body } => { - let path = expand_file_path(body.slice(context.source), context); - Ok(hir::Expression::file_path(path, atom.span)) - } - - UnspannedAtomicToken::Number { .. } | UnspannedAtomicToken::Size { .. } => { - let path = atom.span.slice(context.source); - Ok(hir::Expression::file_path(path, atom.span)) - } - - _ => atom.to_hir(context, "file path"), - } + ) -> Result { + token_nodes + .expand_syntax(BarePathShape) + .or_else(|_| token_nodes.expand_syntax(ExternalWordShape)) + .map(|span| file_path(span, token_nodes.context()).into_expr(span)) + .or_else(|_| { + token_nodes.expand_syntax(StringShape).map(|syntax| { + file_path(syntax.inner, token_nodes.context()).into_expr(syntax.span) + }) + }) + .or_else(|_| { + token_nodes + .expand_syntax(IntShape) + .or_else(|_| token_nodes.expand_syntax(DecimalShape)) + .map(|number| { + file_path(number.span(), token_nodes.context()).into_expr(number.span()) + }) + }) + .map_err(|_| token_nodes.err_next_token("file path")) + } +} + +fn file_path(text: Span, context: &ExpandContext) -> Expression { + Expression::FilePath(expand_file_path(text.slice(context.source), context)) +} + +#[derive(Debug, Copy, Clone)] +pub struct ExternalWordShape; + +impl ExpandSyntax for ExternalWordShape { + type Output = Result; + + fn name(&self) -> &'static str { + "external word" + } + + fn expand<'a, 'b>(&self, token_nodes: &'b mut TokensIterator<'a>) -> Result { + token_nodes.expand_token(ExternalWordType, |span| Ok((FlatShape::ExternalWord, span))) } } diff --git a/crates/nu-parser/src/hir/syntax_shape/expression/list.rs b/crates/nu-parser/src/hir/syntax_shape/expression/list.rs index 1a857c09ca..a975843f40 100644 --- a/crates/nu-parser/src/hir/syntax_shape/expression/list.rs +++ b/crates/nu-parser/src/hir/syntax_shape/expression/list.rs @@ -1,18 +1,15 @@ +use crate::hir::syntax_shape::flat_shape::FlatShape; use crate::{ hir, - hir::syntax_shape::{ - color_fallible_syntax, color_syntax, expand_atom, expand_expr, maybe_spaced, spaced, - AnyExpressionShape, ColorSyntax, ExpandContext, ExpandSyntax, ExpansionRule, - MaybeSpaceShape, SpaceShape, - }, + hir::syntax_shape::{AnyExpressionShape, ExpandSyntax, MaybeSpaceShape}, hir::TokensIterator, }; -use nu_errors::ParseError; +use derive_new::new; use nu_source::{b, DebugDocBuilder, HasSpan, PrettyDebugWithSource, Span, Spanned, SpannedItem}; #[derive(Debug, Clone)] pub struct ExpressionListSyntax { - pub exprs: Spanned>, + pub exprs: Spanned>, } impl HasSpan for ExpressionListSyntax { @@ -40,99 +37,60 @@ impl ExpandSyntax for ExpressionListShape { "expression list" } - fn expand_syntax<'a, 'b>( - &self, - token_nodes: &mut TokensIterator<'_>, - context: &ExpandContext, - ) -> Result { - let mut exprs = vec![]; - - let start = token_nodes.span_at_cursor(); - - if token_nodes.at_end_possible_ws() { - return Ok(ExpressionListSyntax { - exprs: exprs.spanned(start), - }); - } - - let expr = expand_expr(&maybe_spaced(AnyExpressionShape), token_nodes, context)?; - - exprs.push(expr); - - loop { - if token_nodes.at_end_possible_ws() { - let end = token_nodes.span_at_cursor(); - return Ok(ExpressionListSyntax { - exprs: exprs.spanned(start.until(end)), - }); - } - - let expr = expand_expr(&spaced(AnyExpressionShape), token_nodes, context)?; - - exprs.push(expr); - } - } -} - -impl ColorSyntax for ExpressionListShape { - type Info = (); - type Input = (); - - fn name(&self) -> &'static str { - "ExpressionListShape" - } - - /// The intent of this method is to fully color an expression list shape infallibly. - /// This means that if we can't expand a token into an expression, we fall back to - /// a simpler coloring strategy. - /// - /// This would apply to something like `where x >`, which includes an incomplete - /// binary operator. Since we will fail to process it as a binary operator, we'll - /// fall back to a simpler coloring and move on. - fn color_syntax<'a, 'b>( - &self, - _input: &(), - token_nodes: &'b mut TokensIterator<'a>, - context: &ExpandContext, - ) { + fn expand<'a, 'b>(&self, token_nodes: &mut TokensIterator<'_>) -> ExpressionListSyntax { // We encountered a parsing error and will continue with simpler coloring ("backoff // coloring mode") let mut backoff = false; - // Consume any leading whitespace - color_syntax(&MaybeSpaceShape, token_nodes, context); + let mut exprs = vec![]; + + let start = token_nodes.span_at_cursor(); + + token_nodes.expand_infallible(MaybeSpaceShape); + + if token_nodes.at_end() { + return ExpressionListSyntax { + exprs: exprs.spanned(start), + }; + } + + let expr = token_nodes.expand_syntax(AnyExpressionShape); + + match expr { + Ok(expr) => exprs.push(expr), + Err(_) => backoff = true, + } loop { - // If we reached the very end of the token stream, we're done if token_nodes.at_end() { - return; + let end = token_nodes.span_at_cursor(); + return ExpressionListSyntax { + exprs: exprs.spanned(start.until(end)), + }; } if backoff { let len = token_nodes.state().shapes().len(); // If we previously encountered a parsing error, use backoff coloring mode - color_syntax(&SimplestExpression, token_nodes, context); + token_nodes + .expand_infallible(SimplestExpression::new(vec!["expression".to_string()])); if len == token_nodes.state().shapes().len() && !token_nodes.at_end() { // This should never happen, but if it does, a panic is better than an infinite loop panic!("Unexpected tokens left that couldn't be colored even with SimplestExpression") } } else { - // Try to color the head of the stream as an expression - if color_fallible_syntax(&AnyExpressionShape, token_nodes, context).is_err() { - // If no expression was found, switch to backoff coloring mode + let expr = token_nodes.atomic_parse(|token_nodes| { + token_nodes.expand_infallible(MaybeSpaceShape); + token_nodes.expand_syntax(AnyExpressionShape) + }); - backoff = true; - continue; - } - - // If an expression was found, consume a space - if color_fallible_syntax(&SpaceShape, token_nodes, context).is_err() { - // If no space was found, we're either at the end or there's an error. - // Either way, switch to backoff coloring mode. If we're at the end - // it won't have any consequences. - backoff = true; + match expr { + Ok(expr) => exprs.push(expr), + Err(_) => { + backoff = true; + } } // Otherwise, move on to the next expression } @@ -141,69 +99,72 @@ impl ColorSyntax for ExpressionListShape { } /// BackoffColoringMode consumes all of the remaining tokens in an infallible way -#[derive(Debug, Copy, Clone)] -pub struct BackoffColoringMode; +#[derive(Debug, Clone, new)] +pub struct BackoffColoringMode { + allowed: Vec, +} -impl ColorSyntax for BackoffColoringMode { - type Info = (); - type Input = (); +impl ExpandSyntax for BackoffColoringMode { + type Output = Option; fn name(&self) -> &'static str { "BackoffColoringMode" } - fn color_syntax<'a, 'b>( - &self, - _input: &Self::Input, - token_nodes: &'b mut TokensIterator<'a>, - context: &ExpandContext, - ) -> Self::Info { + fn expand<'a, 'b>(&self, token_nodes: &'b mut TokensIterator<'a>) -> Option { loop { if token_nodes.at_end() { break; } let len = token_nodes.state().shapes().len(); - color_syntax(&SimplestExpression, token_nodes, context); + token_nodes.expand_infallible(SimplestExpression::new(self.allowed.clone())); if len == token_nodes.state().shapes().len() && !token_nodes.at_end() { // This shouldn't happen, but if it does, a panic is better than an infinite loop panic!("SimplestExpression failed to consume any tokens, but it's not at the end. This is unexpected\n== token nodes==\n{:#?}\n\n== shapes ==\n{:#?}", token_nodes, token_nodes.state().shapes()); } } + + None } } /// The point of `SimplestExpression` is to serve as an infallible base case for coloring. /// As a last ditch effort, if we can't find any way to parse the head of the stream as an /// expression, fall back to simple coloring. -#[derive(Debug, Copy, Clone)] -pub struct SimplestExpression; +#[derive(Debug, Clone, new)] +pub struct SimplestExpression { + valid_shapes: Vec, +} -impl ColorSyntax for SimplestExpression { - type Info = (); - type Input = (); +impl ExpandSyntax for SimplestExpression { + type Output = Span; fn name(&self) -> &'static str { "SimplestExpression" } - fn color_syntax<'a, 'b>( - &self, - _input: &(), - token_nodes: &'b mut TokensIterator<'a>, - context: &ExpandContext, - ) { - let atom = expand_atom( - token_nodes, - "any token", - context, - ExpansionRule::permissive(), - ); + fn expand<'a, 'b>(&self, token_nodes: &'b mut TokensIterator<'a>) -> Span { + if token_nodes.at_end() { + return Span::unknown(); + } - match atom { - Err(_) => {} - Ok(atom) => token_nodes.mutate_shapes(|shapes| atom.color_tokens(shapes)), + let source = token_nodes.source(); + + let peeked = token_nodes.peek(); + + match peeked.not_eof("simplest expression") { + Err(_) => token_nodes.span_at_cursor(), + Ok(peeked) => { + let token = peeked.commit(); + + for shape in FlatShape::shapes(token, &source) { + token_nodes.color_err(shape, self.valid_shapes.clone()) + } + + token.span() + } } } } diff --git a/crates/nu-parser/src/hir/syntax_shape/expression/number.rs b/crates/nu-parser/src/hir/syntax_shape/expression/number.rs index b8fb388e20..32b72660de 100644 --- a/crates/nu-parser/src/hir/syntax_shape/expression/number.rs +++ b/crates/nu-parser/src/hir/syntax_shape/expression/number.rs @@ -1,169 +1,109 @@ -use crate::hir::syntax_shape::{ - expand_atom, parse_single_node, ExpandContext, ExpandExpression, ExpansionRule, - FallibleColorSyntax, FlatShape, TestSyntax, -}; -use crate::hir::tokens_iterator::Peeked; -use crate::parse::tokens::UnspannedToken; -use crate::{ - hir, - hir::{RawNumber, TokensIterator}, -}; -use nu_errors::{ParseError, ShellError}; -use nu_source::{Spanned, SpannedItem}; +use crate::hir::syntax_shape::{ExpandSyntax, FlatShape}; +use crate::hir::{Expression, SpannedExpression}; +use crate::hir::{RawNumber, TokensIterator}; +use crate::parse::token_tree::{DecimalType, IntType}; +use nu_errors::ParseError; +use nu_source::HasSpan; #[derive(Debug, Copy, Clone)] -pub struct NumberShape; +pub struct NumberExpressionShape; + +impl ExpandSyntax for NumberExpressionShape { + type Output = Result; -impl ExpandExpression for NumberShape { fn name(&self) -> &'static str { "number" } - fn expand_expr<'a, 'b>( + fn expand<'a, 'b>( &self, token_nodes: &mut TokensIterator<'_>, - context: &ExpandContext, - ) -> Result { - parse_single_node(token_nodes, "Number", |token, token_span, err| { - Ok(match token { - UnspannedToken::GlobPattern - | UnspannedToken::CompareOperator(..) - | UnspannedToken::EvaluationOperator(..) => return Err(err.error()), - UnspannedToken::Variable(tag) if tag.slice(context.source) == "it" => { - hir::Expression::it_variable(tag, token_span) - } - UnspannedToken::ExternalCommand(tag) => { - hir::Expression::external_command(tag, token_span) - } - UnspannedToken::ExternalWord => { - return Err(ParseError::mismatch( - "number", - "syntax error".spanned(token_span), - )) - } - UnspannedToken::Variable(tag) => hir::Expression::variable(tag, token_span), - UnspannedToken::Number(number) => { - hir::Expression::number(number.to_number(context.source), token_span) - } - UnspannedToken::Bare => hir::Expression::bare(token_span), - UnspannedToken::String(tag) => hir::Expression::string(tag, token_span), - }) - }) + ) -> Result { + let source = token_nodes.source(); + + token_nodes + .expand_syntax(NumberShape) + .map(|number| Expression::number(number.to_number(&source)).into_expr(number.span())) } } -impl FallibleColorSyntax for NumberShape { - type Info = (); - type Input = (); +#[derive(Debug, Copy, Clone)] +pub struct IntExpressionShape; + +impl ExpandSyntax for IntExpressionShape { + type Output = Result; fn name(&self) -> &'static str { - "NumberShape" + "integer" } - fn color_syntax<'a, 'b>( + fn expand<'a, 'b>( &self, - _input: &(), - token_nodes: &'b mut TokensIterator<'a>, - context: &ExpandContext, - ) -> Result<(), ShellError> { - let atom = token_nodes.spanned(|token_nodes| { - expand_atom(token_nodes, "number", context, ExpansionRule::permissive()) - }); + token_nodes: &mut TokensIterator<'_>, + ) -> Result { + let source = token_nodes.source(); - let atom = match atom { - Spanned { item: Err(_), span } => { - token_nodes.color_shape(FlatShape::Error.spanned(span)); - return Ok(()); - } - Spanned { item: Ok(atom), .. } => atom, - }; - - token_nodes.mutate_shapes(|shapes| atom.color_tokens(shapes)); - - Ok(()) + token_nodes.expand_token(IntType, |number| { + Ok(( + FlatShape::Int, + Expression::number(number.to_number(&source)), + )) + }) } } #[derive(Debug, Copy, Clone)] pub struct IntShape; -impl ExpandExpression for IntShape { +impl ExpandSyntax for IntShape { + type Output = Result; + fn name(&self) -> &'static str { "integer" } - fn expand_expr<'a, 'b>( + fn expand<'a, 'b>( &self, - token_nodes: &mut TokensIterator<'_>, - context: &ExpandContext, - ) -> Result { - parse_single_node(token_nodes, "Integer", |token, token_span, err| { - Ok(match token { - UnspannedToken::GlobPattern - | UnspannedToken::CompareOperator(..) - | UnspannedToken::EvaluationOperator(..) - | UnspannedToken::ExternalWord => return Err(err.error()), - UnspannedToken::Variable(span) if span.slice(context.source) == "it" => { - hir::Expression::it_variable(span, token_span) - } - UnspannedToken::ExternalCommand(span) => { - hir::Expression::external_command(span, token_span) - } - UnspannedToken::Variable(span) => hir::Expression::variable(span, token_span), - UnspannedToken::Number(number @ RawNumber::Int(_)) => { - hir::Expression::number(number.to_number(context.source), token_span) - } - UnspannedToken::Number(_) => return Err(err.error()), - UnspannedToken::Bare => hir::Expression::bare(token_span), - UnspannedToken::String(span) => hir::Expression::string(span, token_span), - }) - }) + token_nodes: &'b mut TokensIterator<'a>, + ) -> Result { + token_nodes.expand_token(IntType, |number| Ok((FlatShape::Int, number))) } } -impl FallibleColorSyntax for IntShape { - type Info = (); - type Input = (); +#[derive(Debug, Copy, Clone)] +pub struct DecimalShape; + +impl ExpandSyntax for DecimalShape { + type Output = Result; fn name(&self) -> &'static str { - "IntShape" + "decimal" } - fn color_syntax<'a, 'b>( + fn expand<'a, 'b>( &self, - _input: &(), token_nodes: &'b mut TokensIterator<'a>, - context: &ExpandContext, - ) -> Result<(), ShellError> { - let atom = token_nodes.spanned(|token_nodes| { - expand_atom(token_nodes, "integer", context, ExpansionRule::permissive()) - }); - - let atom = match atom { - Spanned { item: Err(_), span } => { - token_nodes.color_shape(FlatShape::Error.spanned(span)); - return Ok(()); - } - Spanned { item: Ok(atom), .. } => atom, - }; - - token_nodes.mutate_shapes(|shapes| atom.color_tokens(shapes)); - - Ok(()) + ) -> Result { + token_nodes.expand_token(DecimalType, |number| Ok((FlatShape::Decimal, number))) } } -impl TestSyntax for NumberShape { - fn test<'a, 'b>( +#[derive(Debug, Copy, Clone)] +pub struct NumberShape; + +impl ExpandSyntax for NumberShape { + type Output = Result; + + fn name(&self) -> &'static str { + "decimal" + } + + fn expand<'a, 'b>( &self, token_nodes: &'b mut TokensIterator<'a>, - _context: &ExpandContext, - ) -> Option> { - let peeked = token_nodes.peek_any(); - - match peeked.node { - Some(token) if token.is_number() => Some(peeked), - _ => None, - } + ) -> Result { + token_nodes + .expand_syntax(IntShape) + .or_else(|_| token_nodes.expand_syntax(DecimalShape)) } } diff --git a/crates/nu-parser/src/hir/syntax_shape/expression/pattern.rs b/crates/nu-parser/src/hir/syntax_shape/expression/pattern.rs index 08ad4c2dee..24b2b14653 100644 --- a/crates/nu-parser/src/hir/syntax_shape/expression/pattern.rs +++ b/crates/nu-parser/src/hir/syntax_shape/expression/pattern.rs @@ -1,77 +1,66 @@ use crate::hir::syntax_shape::{ - expand_atom, expand_bare, expression::expand_file_path, ExpandContext, ExpandExpression, - ExpandSyntax, ExpansionRule, FallibleColorSyntax, FlatShape, UnspannedAtomicToken, + expand_bare, expression::expand_file_path, BarePathShape, ExpandContext, ExpandSyntax, + ExternalWordShape, StringShape, }; +use crate::hir::{Expression, SpannedExpression}; use crate::parse::operator::EvaluationOperator; -use crate::parse::tokens::{Token, UnspannedToken}; -use crate::{hir, hir::TokensIterator, TokenNode}; -use nu_errors::{ParseError, ShellError}; - -use nu_protocol::ShellTypeName; -use nu_source::{Span, SpannedItem}; +use crate::{hir, hir::TokensIterator, Token}; +use nu_errors::ParseError; +use nu_source::Span; #[derive(Debug, Copy, Clone)] pub struct PatternShape; -impl FallibleColorSyntax for PatternShape { - type Info = (); - type Input = (); +impl ExpandSyntax for PatternShape { + type Output = Result; - fn name(&self) -> &'static str { - "PatternShape" - } - - fn color_syntax<'a, 'b>( - &self, - _input: &(), - token_nodes: &'b mut TokensIterator<'a>, - context: &ExpandContext, - ) -> Result<(), ShellError> { - token_nodes.atomic(|token_nodes| { - let atom = expand_atom(token_nodes, "pattern", context, ExpansionRule::permissive())?; - - match &atom.unspanned { - UnspannedAtomicToken::GlobPattern { .. } | UnspannedAtomicToken::Word { .. } => { - token_nodes.color_shape(FlatShape::GlobPattern.spanned(atom.span)); - Ok(()) - } - - other => Err(ShellError::type_error( - "pattern", - other.type_name().spanned(atom.span), - )), - } - }) - } -} - -impl ExpandExpression for PatternShape { fn name(&self) -> &'static str { "glob pattern" } - fn expand_expr<'a, 'b>( + fn expand<'a, 'b>( &self, token_nodes: &mut TokensIterator<'_>, - context: &ExpandContext, - ) -> Result { - let atom = expand_atom( - token_nodes, - "pattern", - context, - ExpansionRule::new().allow_external_word(), - )?; + ) -> Result { + let (inner, outer) = token_nodes + .expand_syntax(BarePatternShape) + .or_else(|_| token_nodes.expand_syntax(BarePathShape)) + .or_else(|_| token_nodes.expand_syntax(ExternalWordShape)) + .map(|span| (span, span)) + .or_else(|_| { + token_nodes + .expand_syntax(StringShape) + .map(|syntax| (syntax.inner, syntax.span)) + }) + .map_err(|_| token_nodes.err_next_token("glob pattern"))?; - match atom.unspanned { - UnspannedAtomicToken::Word { text: body } - | UnspannedAtomicToken::String { body } - | UnspannedAtomicToken::ExternalWord { text: body } - | UnspannedAtomicToken::GlobPattern { pattern: body } => { - let path = expand_file_path(body.slice(context.source), context); - Ok(hir::Expression::pattern(path.to_string_lossy(), atom.span)) - } - _ => atom.to_hir(context, "pattern"), - } + Ok(file_pattern(inner, outer, token_nodes.context())) + } +} + +fn file_pattern(body: Span, outer: Span, context: &ExpandContext) -> SpannedExpression { + let path = expand_file_path(body.slice(context.source), context); + Expression::pattern(path.to_string_lossy()).into_expr(outer) +} + +#[derive(Debug, Copy, Clone)] +pub struct PatternExpressionShape; + +impl ExpandSyntax for PatternExpressionShape { + type Output = Result; + + fn name(&self) -> &'static str { + "pattern" + } + + fn expand<'a, 'b>( + &self, + token_nodes: &'b mut TokensIterator<'a>, + ) -> Result { + token_nodes.expand_syntax(BarePatternShape).map(|span| { + let path = expand_file_path(span.slice(&token_nodes.source()), token_nodes.context()); + Expression::pattern(path.to_string_lossy()).into_expr(span) + }) } } @@ -79,30 +68,17 @@ impl ExpandExpression for PatternShape { pub struct BarePatternShape; impl ExpandSyntax for BarePatternShape { - type Output = Span; + type Output = Result; fn name(&self) -> &'static str { "bare pattern" } - fn expand_syntax<'a, 'b>( - &self, - token_nodes: &'b mut TokensIterator<'a>, - context: &ExpandContext, - ) -> Result { - expand_bare(token_nodes, context, |token| match token { - TokenNode::Token(Token { - unspanned: UnspannedToken::Bare, - .. - }) - | TokenNode::Token(Token { - unspanned: UnspannedToken::EvaluationOperator(EvaluationOperator::Dot), - .. - }) - | TokenNode::Token(Token { - unspanned: UnspannedToken::GlobPattern, - .. - }) => true, + fn expand<'a, 'b>(&self, token_nodes: &'b mut TokensIterator<'a>) -> Result { + expand_bare(token_nodes, |token| match token.unspanned() { + Token::Bare + | Token::EvaluationOperator(EvaluationOperator::Dot) + | Token::GlobPattern => true, _ => false, }) diff --git a/crates/nu-parser/src/hir/syntax_shape/expression/range.rs b/crates/nu-parser/src/hir/syntax_shape/expression/range.rs index 7c729ce09d..462d0a789c 100644 --- a/crates/nu-parser/src/hir/syntax_shape/expression/range.rs +++ b/crates/nu-parser/src/hir/syntax_shape/expression/range.rs @@ -1,103 +1,47 @@ -use crate::hir::syntax_shape::expression::UnspannedAtomicToken; -use crate::hir::syntax_shape::{ - color_fallible_syntax, expand_atom, expand_expr, AnyExpressionShape, ExpandContext, - ExpandExpression, ExpansionRule, FallibleColorSyntax, FlatShape, -}; -use crate::parse::operator::EvaluationOperator; -use crate::parse::token_tree::TokenNode; -use crate::parse::tokens::{Token, UnspannedToken}; -use crate::{hir, hir::TokensIterator}; -use nu_errors::{ParseError, ShellError}; -use nu_protocol::SpannedTypeName; -use nu_source::SpannedItem; +use crate::hir::syntax_shape::{AnyExpressionStartShape, ExpandSyntax, FlatShape}; +use crate::hir::TokensIterator; +use crate::hir::{Expression, SpannedExpression}; +use crate::parse::token_tree::DotDotType; +use nu_errors::ParseError; +use nu_source::{HasSpan, Span}; #[derive(Debug, Copy, Clone)] pub struct RangeShape; -impl ExpandExpression for RangeShape { +impl ExpandSyntax for RangeShape { + type Output = Result; + fn name(&self) -> &'static str { "range" } - fn expand_expr<'a, 'b>( + fn expand<'a, 'b>( &self, token_nodes: &'b mut TokensIterator<'a>, - context: &ExpandContext, - ) -> Result { + ) -> Result { token_nodes.atomic_parse(|token_nodes| { - let left = expand_expr(&AnyExpressionShape, token_nodes, context)?; + let left = token_nodes.expand_syntax(AnyExpressionStartShape)?; + let dotdot = token_nodes.expand_syntax(DotDotShape)?; + let right = token_nodes.expand_syntax(AnyExpressionStartShape)?; - let atom = expand_atom( - token_nodes, - "..", - context, - ExpansionRule::new().allow_eval_operator(), - )?; + let span = left.span.until(right.span); - let span = match atom.unspanned { - UnspannedAtomicToken::DotDot { text } => text, - _ => return Err(ParseError::mismatch("..", atom.spanned_type_name())), - }; - - let right = expand_expr(&AnyExpressionShape, token_nodes, context)?; - - Ok(hir::Expression::range(left, span, right)) + Ok(Expression::range(left, dotdot, right).into_expr(span)) }) } } -impl FallibleColorSyntax for RangeShape { - type Info = (); - type Input = (); - - fn name(&self) -> &'static str { - "RangeShape" - } - - fn color_syntax<'a, 'b>( - &self, - _input: &(), - token_nodes: &'b mut TokensIterator<'a>, - context: &ExpandContext, - ) -> Result<(), ShellError> { - token_nodes.atomic_parse(|token_nodes| { - color_fallible_syntax(&AnyExpressionShape, token_nodes, context)?; - color_fallible_syntax(&DotDotShape, token_nodes, context)?; - color_fallible_syntax(&AnyExpressionShape, token_nodes, context) - })?; - - Ok(()) - } -} - #[derive(Debug, Copy, Clone)] struct DotDotShape; -impl FallibleColorSyntax for DotDotShape { - type Info = (); - type Input = (); +impl ExpandSyntax for DotDotShape { + type Output = Result; fn name(&self) -> &'static str { - ".." + "dotdot" } - fn color_syntax<'a, 'b>( - &self, - _input: &Self::Input, - token_nodes: &'b mut TokensIterator<'a>, - _context: &ExpandContext, - ) -> Result { - let peeked = token_nodes.peek_any().not_eof("..")?; - match &peeked.node { - TokenNode::Token(Token { - unspanned: UnspannedToken::EvaluationOperator(EvaluationOperator::DotDot), - span, - }) => { - peeked.commit(); - token_nodes.color_shape(FlatShape::DotDot.spanned(span)); - Ok(()) - } - token => Err(ShellError::type_error("..", token.spanned_type_name())), - } + fn expand<'a, 'b>(&self, token_nodes: &'b mut TokensIterator<'a>) -> Result { + token_nodes.expand_token(DotDotType, |token| Ok((FlatShape::DotDot, token.span()))) } } diff --git a/crates/nu-parser/src/hir/syntax_shape/expression/string.rs b/crates/nu-parser/src/hir/syntax_shape/expression/string.rs index c33bd7c8a3..1fd92f1d10 100644 --- a/crates/nu-parser/src/hir/syntax_shape/expression/string.rs +++ b/crates/nu-parser/src/hir/syntax_shape/expression/string.rs @@ -1,90 +1,103 @@ -use crate::hir::syntax_shape::{ - expand_atom, expand_variable, parse_single_node, AtomicToken, ExpandContext, ExpandExpression, - ExpansionRule, FallibleColorSyntax, FlatShape, TestSyntax, UnspannedAtomicToken, -}; -use crate::hir::tokens_iterator::Peeked; -use crate::parse::tokens::UnspannedToken; -use crate::{hir, hir::TokensIterator}; -use nu_errors::{ParseError, ShellError}; -use nu_source::SpannedItem; +use crate::hir::syntax_shape::{ExpandSyntax, FlatShape, NumberShape, VariableShape}; +use crate::hir::TokensIterator; +use crate::hir::{Expression, SpannedExpression}; +use crate::parse::token_tree::{BareType, StringType}; +use nu_errors::ParseError; +use nu_source::{b, DebugDocBuilder, HasSpan, PrettyDebugWithSource, Span}; #[derive(Debug, Copy, Clone)] -pub struct StringShape; +pub struct CoerceStringShape; -impl FallibleColorSyntax for StringShape { - type Info = (); - type Input = FlatShape; +impl ExpandSyntax for CoerceStringShape { + type Output = Result; fn name(&self) -> &'static str { "StringShape" } - fn color_syntax<'a, 'b>( + fn expand<'a, 'b>( &self, - input: &FlatShape, token_nodes: &'b mut TokensIterator<'a>, - context: &ExpandContext, - ) -> Result<(), ShellError> { - let atom = expand_atom(token_nodes, "string", context, ExpansionRule::permissive()); - - let atom = match atom { - Err(_) => return Ok(()), - Ok(atom) => atom, - }; - - match atom { - AtomicToken { - unspanned: UnspannedAtomicToken::String { .. }, - span, - } => token_nodes.color_shape((*input).spanned(span)), - atom => token_nodes.mutate_shapes(|shapes| atom.color_tokens(shapes)), - } - - Ok(()) + ) -> Result { + token_nodes + .expand_token(StringType, |(inner, outer)| { + Ok(( + FlatShape::String, + Expression::string(inner).into_expr(outer), + )) + }) + .or_else(|_| { + token_nodes.expand_token(BareType, |span| { + Ok((FlatShape::String, Expression::string(span).into_expr(span))) + }) + }) + .or_else(|_| { + token_nodes + .expand_syntax(NumberShape) + .map(|number| Expression::string(number.span()).into_expr(number.span())) + }) } } -impl ExpandExpression for StringShape { +#[derive(Debug, Copy, Clone)] +pub struct StringExpressionShape; + +impl ExpandSyntax for StringExpressionShape { + type Output = Result; + fn name(&self) -> &'static str { "string" } - fn expand_expr<'a, 'b>( + fn expand<'a, 'b>( &self, - token_nodes: &mut TokensIterator<'_>, - context: &ExpandContext, - ) -> Result { - parse_single_node(token_nodes, "String", |token, token_span, err| { - Ok(match token { - UnspannedToken::GlobPattern - | UnspannedToken::CompareOperator(..) - | UnspannedToken::EvaluationOperator(..) - | UnspannedToken::ExternalWord => return Err(err.error()), - UnspannedToken::Variable(span) => { - expand_variable(span, token_span, &context.source) - } - UnspannedToken::ExternalCommand(span) => { - hir::Expression::external_command(span, token_span) - } - UnspannedToken::Number(_) => hir::Expression::bare(token_span), - UnspannedToken::Bare => hir::Expression::bare(token_span), - UnspannedToken::String(span) => hir::Expression::string(span, token_span), + token_nodes: &'b mut TokensIterator<'a>, + ) -> Result { + token_nodes.expand_syntax(VariableShape).or_else(|_| { + token_nodes.expand_token(StringType, |(inner, outer)| { + Ok(( + FlatShape::String, + Expression::string(inner).into_expr(outer), + )) }) }) } } -impl TestSyntax for StringShape { - fn test<'a, 'b>( - &self, - token_nodes: &'b mut TokensIterator<'a>, - _context: &ExpandContext, - ) -> Option> { - let peeked = token_nodes.peek_any(); +#[derive(Debug, Copy, Clone)] +pub struct StringSyntax { + pub inner: Span, + pub span: Span, +} - match peeked.node { - Some(token) if token.is_string() => Some(peeked), - _ => None, - } +impl HasSpan for StringSyntax { + fn span(&self) -> Span { + self.span + } +} + +impl PrettyDebugWithSource for StringSyntax { + fn pretty_debug(&self, source: &str) -> DebugDocBuilder { + b::primitive(self.span.slice(source)) + } +} + +#[derive(Debug, Copy, Clone)] +pub struct StringShape; + +impl ExpandSyntax for StringShape { + type Output = Result; + + fn name(&self) -> &'static str { + "string" + } + + fn expand<'a, 'b>( + &self, + token_nodes: &'b mut TokensIterator<'a>, + ) -> Result { + token_nodes.expand_token(StringType, |(inner, outer)| { + Ok((FlatShape::String, StringSyntax { inner, span: outer })) + }) } } diff --git a/crates/nu-parser/src/hir/syntax_shape/expression/unit.rs b/crates/nu-parser/src/hir/syntax_shape/expression/unit.rs index 44738f1075..d672706e0b 100644 --- a/crates/nu-parser/src/hir/syntax_shape/expression/unit.rs +++ b/crates/nu-parser/src/hir/syntax_shape/expression/unit.rs @@ -1,16 +1,19 @@ -use crate::hir::syntax_shape::{ExpandContext, ExpandSyntax}; -use crate::parse::tokens::RawNumber; -use crate::parse::tokens::Token; -use crate::parse::tokens::UnspannedToken; +use crate::hir::syntax_shape::flat_shape::FlatShape; +use crate::hir::syntax_shape::ExpandSyntax; +use crate::hir::TokensIterator; +use crate::hir::{Expression, SpannedExpression}; +use crate::parse::number::RawNumber; +use crate::parse::token_tree::BareType; use crate::parse::unit::Unit; -use crate::{hir::TokensIterator, TokenNode}; use nom::branch::alt; use nom::bytes::complete::tag; use nom::character::complete::digit1; use nom::combinator::{all_consuming, opt, value}; use nom::IResult; use nu_errors::ParseError; -use nu_source::{b, DebugDocBuilder, HasSpan, PrettyDebugWithSource, Span, Spanned, SpannedItem}; +use nu_source::{ + b, DebugDocBuilder, HasSpan, PrettyDebugWithSource, Span, Spanned, SpannedItem, Text, +}; #[derive(Debug, Clone)] pub struct UnitSyntax { @@ -18,6 +21,17 @@ pub struct UnitSyntax { pub span: Span, } +impl UnitSyntax { + pub fn into_expr(self, source: &Text) -> SpannedExpression { + let UnitSyntax { + unit: (number, unit), + span, + } = self; + + Expression::size(number.to_number(source), *unit).into_expr(span) + } +} + impl PrettyDebugWithSource for UnitSyntax { fn pretty_debug(&self, source: &str) -> DebugDocBuilder { b::typed( @@ -33,42 +47,60 @@ impl HasSpan for UnitSyntax { } } +#[derive(Debug, Copy, Clone)] +pub struct UnitExpressionShape; + +impl ExpandSyntax for UnitExpressionShape { + type Output = Result; + + fn name(&self) -> &'static str { + "unit expression" + } + + fn expand<'a, 'b>( + &self, + token_nodes: &'b mut TokensIterator<'a>, + ) -> Result { + token_nodes + .expand_syntax(UnitShape) + .map(|unit| unit.into_expr(&token_nodes.source())) + } +} + #[derive(Debug, Copy, Clone)] pub struct UnitShape; impl ExpandSyntax for UnitShape { - type Output = UnitSyntax; + type Output = Result; fn name(&self) -> &'static str { "unit" } - fn expand_syntax<'a, 'b>( + fn expand<'a, 'b>( &self, token_nodes: &'b mut TokensIterator<'a>, - context: &ExpandContext, ) -> Result { - let peeked = token_nodes.peek_any().not_eof("unit")?; + let source = token_nodes.source(); - let span = match peeked.node { - TokenNode::Token(Token { - unspanned: UnspannedToken::Bare, - span, - }) => *span, - _ => return Err(peeked.type_error("unit")), - }; + token_nodes.expand_token(BareType, |span| { + let unit = unit_size(span.slice(&source), span); - let unit = unit_size(span.slice(context.source), span); + let (_, (number, unit)) = match unit { + Err(_) => return Err(ParseError::mismatch("unit", "word".spanned(span))), + Ok((number, unit)) => (number, unit), + }; - let (_, (number, unit)) = match unit { - Err(_) => return Err(ParseError::mismatch("unit", "word".spanned(span))), - Ok((number, unit)) => (number, unit), - }; - - peeked.commit(); - Ok(UnitSyntax { - unit: (number, unit), - span, + Ok(( + FlatShape::Size { + number: number.span(), + unit: unit.span, + }, + UnitSyntax { + unit: (number, unit), + span, + }, + )) }) } } diff --git a/crates/nu-parser/src/hir/syntax_shape/expression/variable_path.rs b/crates/nu-parser/src/hir/syntax_shape/expression/variable_path.rs index 605b6b18f6..fad73e5fb5 100644 --- a/crates/nu-parser/src/hir/syntax_shape/expression/variable_path.rs +++ b/crates/nu-parser/src/hir/syntax_shape/expression/variable_path.rs @@ -1,12 +1,10 @@ use crate::hir::syntax_shape::{ - color_fallible_syntax, color_fallible_syntax_with, expand_atom, expand_expr, expand_syntax, - parse_single_node, AnyExpressionShape, BareShape, ExpandContext, ExpandExpression, - ExpandSyntax, ExpansionRule, FallibleColorSyntax, FlatShape, ParseError, Peeked, SkipSyntax, - StringShape, TestSyntax, UnspannedAtomicToken, WhitespaceShape, + AnyExpressionShape, BareShape, ExpandSyntax, FlatShape, IntShape, ParseError, StringShape, + WhitespaceShape, }; -use crate::parse::tokens::{RawNumber, UnspannedToken}; -use crate::{hir, hir::Expression, hir::TokensIterator, CompareOperator, EvaluationOperator}; -use nu_errors::ShellError; +use crate::hir::{Expression, SpannedExpression, TokensIterator}; +use crate::parse::token_tree::{CompareOperatorType, DotDotType, DotType, ItVarType, VarType}; +use crate::{hir, CompareOperator}; use nu_protocol::{PathMember, ShellTypeName}; use nu_source::{ b, DebugDocBuilder, HasSpan, PrettyDebug, PrettyDebugWithSource, Span, Spanned, SpannedItem, @@ -19,120 +17,47 @@ use std::str::FromStr; #[derive(Debug, Copy, Clone)] pub struct VariablePathShape; -impl ExpandExpression for VariablePathShape { +impl ExpandSyntax for VariablePathShape { + type Output = Result; + fn name(&self) -> &'static str { "variable path" } - fn expand_expr<'a, 'b>( + fn expand<'a, 'b>( &self, token_nodes: &mut TokensIterator<'_>, - context: &ExpandContext, - ) -> Result { + ) -> Result { // 1. let the head be the first token, expecting a variable // 2. let the tail be an empty list of members // 2. while the next token (excluding ws) is a dot: // 1. consume the dot // 2. consume the next token as a member and push it onto tail - let head = expand_expr(&VariableShape, token_nodes, context)?; + let head = token_nodes.expand_syntax(VariableShape)?; let start = head.span; let mut end = start; let mut tail: Vec = vec![]; loop { - if DotShape.skip(token_nodes, context).is_err() { + if token_nodes.expand_syntax(DotShape).is_err() { break; } - let member = expand_syntax(&MemberShape, token_nodes, context)?; - let member = member.to_path_member(context.source); + let member = token_nodes.expand_syntax(MemberShape)?; + let member = member.to_path_member(&token_nodes.source()); end = member.span; tail.push(member); } - Ok(hir::Expression::path(head, tail, start.until(end))) - } -} - -impl FallibleColorSyntax for VariablePathShape { - type Info = (); - type Input = (); - - fn name(&self) -> &'static str { - "VariablePathShape" - } - - fn color_syntax<'a, 'b>( - &self, - _input: &(), - token_nodes: &'b mut TokensIterator<'a>, - context: &ExpandContext, - ) -> Result<(), ShellError> { - token_nodes.atomic(|token_nodes| { - // If the head of the token stream is not a variable, fail - color_fallible_syntax(&VariableShape, token_nodes, context)?; - - loop { - // look for a dot at the head of a stream - if color_fallible_syntax_with( - &ColorableDotShape, - &FlatShape::Dot, - token_nodes, - context, - ) - .is_err() - { - // if there's no dot, we're done - break; - } - - // otherwise, look for a member, and if you don't find one, fail - color_fallible_syntax(&MemberShape, token_nodes, context)?; - } - - Ok(()) - }) + Ok(Expression::path(head, tail).into_expr(start.until(end))) } } #[derive(Debug, Copy, Clone)] pub struct PathTailShape; -/// The failure mode of `PathTailShape` is a dot followed by a non-member -impl FallibleColorSyntax for PathTailShape { - type Info = (); - type Input = (); - - fn name(&self) -> &'static str { - "PathTailShape" - } - - fn color_syntax<'a, 'b>( - &self, - _input: &(), - token_nodes: &'b mut TokensIterator<'a>, - context: &ExpandContext, - ) -> Result<(), ShellError> { - token_nodes.atomic(|token_nodes| loop { - let result = color_fallible_syntax_with( - &ColorableDotShape, - &FlatShape::Dot, - token_nodes, - context, - ); - - if result.is_err() { - return Ok(()); - } - - // If we've seen a dot but not a member, fail - color_fallible_syntax(&MemberShape, token_nodes, context)?; - }) - } -} - #[derive(Debug, Clone)] pub struct PathTailSyntax { pub tail: Vec, @@ -152,36 +77,32 @@ impl PrettyDebug for PathTailSyntax { } impl ExpandSyntax for PathTailShape { - type Output = PathTailSyntax; + type Output = Result; fn name(&self) -> &'static str { "path continuation" } - fn expand_syntax<'a, 'b>( + fn expand<'a, 'b>( &self, token_nodes: &'b mut TokensIterator<'a>, - context: &ExpandContext, - ) -> Result { + ) -> Result { let mut end: Option = None; let mut tail: Vec = vec![]; loop { - if DotShape.skip(token_nodes, context).is_err() { + if token_nodes.expand_syntax(DotShape).is_err() { break; } - let member = expand_syntax(&MemberShape, token_nodes, context)?; - let member = member.to_path_member(context.source); + let member = token_nodes.expand_syntax(MemberShape)?; + let member = member.to_path_member(&token_nodes.source()); end = Some(member.span); tail.push(member); } match end { - None => Err(ParseError::mismatch( - "path tail", - token_nodes.typed_span_at_cursor(), - )), + None => Err(token_nodes.err_next_token("path continuation")), Some(end) => Ok(PathTailSyntax { tail, span: end }), } @@ -189,30 +110,60 @@ impl ExpandSyntax for PathTailShape { } #[derive(Debug, Clone)] -pub enum ExpressionContinuation { - DotSuffix(Span, PathMember), - InfixSuffix(Spanned, Expression), +pub struct ContinuationSyntax { + kind: ContinuationSyntaxKind, + span: Span, } -impl PrettyDebugWithSource for ExpressionContinuation { - fn pretty_debug(&self, source: &str) -> DebugDocBuilder { - match self { - ExpressionContinuation::DotSuffix(_, suffix) => { - b::operator(".") + suffix.pretty_debug(source) +impl ContinuationSyntax { + pub fn append_to(self, expr: SpannedExpression) -> SpannedExpression { + match self.kind { + ContinuationSyntaxKind::Infix(op, right) => { + let span = expr.span.until(right.span); + Expression::infix(expr, op, right).into_expr(span) } - ExpressionContinuation::InfixSuffix(op, expr) => { - op.pretty_debug(source) + b::space() + expr.pretty_debug(source) + ContinuationSyntaxKind::Dot(_, member) => { + let span = expr.span.until(member.span); + Expression::dot_member(expr, member).into_expr(span) + } + ContinuationSyntaxKind::DotDot(_, right) => { + let span = expr.span.until(right.span); + Expression::range(expr, span, right).into_expr(span) } } } } -impl HasSpan for ExpressionContinuation { +impl HasSpan for ContinuationSyntax { fn span(&self) -> Span { + self.span + } +} + +impl PrettyDebugWithSource for ContinuationSyntax { + fn pretty_debug(&self, source: &str) -> DebugDocBuilder { + b::typed("continuation", self.kind.pretty_debug(source)) + } +} + +#[derive(Debug, Clone)] +pub enum ContinuationSyntaxKind { + Infix(Spanned, SpannedExpression), + Dot(Span, PathMember), + DotDot(Span, SpannedExpression), +} + +impl PrettyDebugWithSource for ContinuationSyntaxKind { + fn pretty_debug(&self, source: &str) -> DebugDocBuilder { match self { - ExpressionContinuation::DotSuffix(dot, column) => dot.until(column.span), - ExpressionContinuation::InfixSuffix(operator, expression) => { - operator.span.until(expression.span) + ContinuationSyntaxKind::Infix(op, expr) => { + b::operator(op.span.slice(source)) + expr.pretty_debug(source) + } + ContinuationSyntaxKind::Dot(span, member) => { + b::operator(span.slice(source)) + member.pretty_debug(source) + } + ContinuationSyntaxKind::DotDot(span, expr) => { + b::operator(span.slice(source)) + expr.pretty_debug(source) } } } @@ -223,89 +174,55 @@ impl HasSpan for ExpressionContinuation { pub struct ExpressionContinuationShape; impl ExpandSyntax for ExpressionContinuationShape { - type Output = ExpressionContinuation; + type Output = Result; fn name(&self) -> &'static str { "expression continuation" } - fn expand_syntax<'a, 'b>( + fn expand<'a, 'b>( &self, token_nodes: &mut TokensIterator<'_>, - context: &ExpandContext, - ) -> Result { - // Try to expand a `.` - let dot = expand_syntax(&DotShape, token_nodes, context); + ) -> Result { + token_nodes.atomic_parse(|token_nodes| { + // Try to expand a `.` + let dot = token_nodes.expand_syntax(DotShape); - match dot { - // If a `.` was matched, it's a `Path`, and we expect a `Member` next - Ok(dot) => { - let syntax = expand_syntax(&MemberShape, token_nodes, context)?; - let member = syntax.to_path_member(context.source); + if let Ok(dot) = dot { + // If a `.` was matched, it's a `Path`, and we expect a `Member` next + let syntax = token_nodes.expand_syntax(MemberShape)?; + let member = syntax.to_path_member(&token_nodes.source()); + let member_span = member.span; - Ok(ExpressionContinuation::DotSuffix(dot, member)) + return Ok(ContinuationSyntax { + kind: ContinuationSyntaxKind::Dot(dot, member), + span: dot.until(member_span), + }); + } + + // Try to expand a `..` + let dot = token_nodes.expand_syntax(DotDotShape); + + if let Ok(dotdot) = dot { + // If a `..` was matched, it's a `Range`, and we expect an `Expression` next + let expr = token_nodes.expand_syntax(AnyExpressionShape)?; + let expr_span = expr.span; + + return Ok(ContinuationSyntax { + kind: ContinuationSyntaxKind::DotDot(dotdot, expr), + span: dotdot.until(expr_span), + }); } // Otherwise, we expect an infix operator and an expression next - Err(_) => { - let (_, op, _) = expand_syntax(&InfixShape, token_nodes, context)?.infix.item; - let next = expand_expr(&AnyExpressionShape, token_nodes, context)?; + let (_, op, _) = token_nodes.expand_syntax(InfixShape)?.infix.item; + let next = token_nodes.expand_syntax(AnyExpressionShape)?; + let next_span = next.span; - Ok(ExpressionContinuation::InfixSuffix(op.operator, next)) - } - } - } -} - -pub enum ContinuationInfo { - Dot, - Infix, -} - -impl FallibleColorSyntax for ExpressionContinuationShape { - type Info = ContinuationInfo; - type Input = (); - - fn name(&self) -> &'static str { - "ExpressionContinuationShape" - } - - fn color_syntax<'a, 'b>( - &self, - _input: &(), - token_nodes: &'b mut TokensIterator<'a>, - context: &ExpandContext, - ) -> Result { - token_nodes.atomic(|token_nodes| { - // Try to expand a `.` - let dot = color_fallible_syntax_with( - &ColorableDotShape, - &FlatShape::Dot, - token_nodes, - context, - ); - - match dot { - Ok(_) => { - // we found a dot, so let's keep looking for a member; if no member was found, fail - color_fallible_syntax(&MemberShape, token_nodes, context)?; - - Ok(ContinuationInfo::Dot) - } - Err(_) => { - let result = token_nodes.atomic(|token_nodes| { - // we didn't find a dot, so let's see if we're looking at an infix. If not found, fail - color_fallible_syntax(&InfixShape, token_nodes, context)?; - - // now that we've seen an infix shape, look for any expression. If not found, fail - color_fallible_syntax(&AnyExpressionShape, token_nodes, context)?; - - Ok(ContinuationInfo::Infix) - })?; - - Ok(result) - } - } + Ok(ContinuationSyntax { + kind: ContinuationSyntaxKind::Infix(op.operator, next), + span: op.operator.span.until(next_span), + }) }) } } @@ -313,68 +230,32 @@ impl FallibleColorSyntax for ExpressionContinuationShape { #[derive(Debug, Copy, Clone)] pub struct VariableShape; -impl ExpandExpression for VariableShape { +impl ExpandSyntax for VariableShape { + type Output = Result; + fn name(&self) -> &'static str { "variable" } - fn expand_expr<'a, 'b>( + fn expand<'a, 'b>( &self, token_nodes: &mut TokensIterator<'_>, - context: &ExpandContext, - ) -> Result { - parse_single_node(token_nodes, "variable", |token, token_tag, err| { - Ok(match token { - UnspannedToken::Variable(tag) => { - if tag.slice(context.source) == "it" { - hir::Expression::it_variable(tag, token_tag) - } else { - hir::Expression::variable(tag, token_tag) - } - } - _ => return Err(err.error()), + ) -> Result { + token_nodes + .expand_token(ItVarType, |(inner, outer)| { + Ok(( + FlatShape::ItVariable, + Expression::it_variable(inner).into_expr(outer), + )) + }) + .or_else(|_| { + token_nodes.expand_token(VarType, |(inner, outer)| { + Ok(( + FlatShape::Variable, + Expression::variable(inner).into_expr(outer), + )) + }) }) - }) - } -} - -impl FallibleColorSyntax for VariableShape { - type Info = (); - type Input = (); - - fn name(&self) -> &'static str { - "VariableShape" - } - - fn color_syntax<'a, 'b>( - &self, - _input: &(), - token_nodes: &'b mut TokensIterator<'a>, - context: &ExpandContext, - ) -> Result<(), ShellError> { - let atom = expand_atom( - token_nodes, - "variable", - context, - ExpansionRule::permissive(), - ); - - let atom = match atom { - Err(err) => return Err(err.into()), - Ok(atom) => atom, - }; - - match &atom.unspanned { - UnspannedAtomicToken::Variable { .. } => { - token_nodes.color_shape(FlatShape::Variable.spanned(atom.span)); - Ok(()) - } - UnspannedAtomicToken::ItVariable { .. } => { - token_nodes.color_shape(FlatShape::ItVariable.spanned(atom.span)); - Ok(()) - } - _ => Err(ParseError::mismatch("variable", atom.type_name().spanned(atom.span)).into()), - } } } @@ -396,6 +277,14 @@ impl ShellTypeName for Member { } impl Member { + pub fn int(span: Span, source: &Text) -> Member { + if let Ok(big_int) = BigInt::from_str(span.slice(source)) { + Member::Int(big_int, span) + } else { + unreachable!("Internal error: could not convert text to BigInt as expected") + } + } + pub fn to_path_member(&self, source: &Text) -> PathMember { match self { Member::String(outer, inner) => PathMember::string(inner.slice(source), *outer), @@ -426,11 +315,11 @@ impl HasSpan for Member { } impl Member { - pub fn to_expr(&self) -> hir::Expression { + pub fn to_expr(&self) -> hir::SpannedExpression { match self { - Member::String(outer, inner) => hir::Expression::string(*inner, *outer), - Member::Int(number, span) => hir::Expression::number(number.clone(), *span), - Member::Bare(span) => hir::Expression::string(*span, *span), + Member::String(outer, inner) => Expression::string(*inner).into_expr(outer), + Member::Int(number, span) => Expression::number(number.clone()).into_expr(span), + Member::Bare(span) => Expression::string(*span).into_expr(span), } } @@ -487,9 +376,9 @@ impl ColumnPathState { } } - pub fn into_path(self, next: Peeked) -> Result>, ParseError> { + pub fn into_path(self, err: ParseError) -> Result>, ParseError> { match self { - ColumnPathState::Initial => Err(next.type_error("column path")), + ColumnPathState::Initial => Err(err), ColumnPathState::LeadingDot(dot) => { Err(ParseError::mismatch("column", "dot".spanned(dot))) } @@ -502,86 +391,44 @@ impl ColumnPathState { } } -pub fn expand_column_path<'a, 'b>( - token_nodes: &'b mut TokensIterator<'a>, - context: &ExpandContext, -) -> Result { - let mut state = ColumnPathState::Initial; - - loop { - let member = expand_syntax(&MemberShape, token_nodes, context); - - match member { - Err(_) => break, - Ok(member) => state = state.member(member), - } - - let dot = expand_syntax(&DotShape, token_nodes, context); - - match dot { - Err(_) => break, - Ok(dot) => state = state.dot(dot), - } - } - - let path = state.into_path(token_nodes.peek_non_ws())?; - - Ok(ColumnPathSyntax { - path: path.item, - tag: path.tag, - }) -} - #[derive(Debug, Copy, Clone)] pub struct ColumnPathShape; -impl FallibleColorSyntax for ColumnPathShape { - type Info = (); - type Input = (); +impl ExpandSyntax for ColumnPathShape { + type Output = Result; fn name(&self) -> &'static str { - "ColumnPathShape" + "column path" } - fn color_syntax<'a, 'b>( + fn expand<'a, 'b>( &self, - _input: &(), token_nodes: &'b mut TokensIterator<'a>, - context: &ExpandContext, - ) -> Result<(), ShellError> { - // If there's not even one member shape, fail - color_fallible_syntax(&MemberShape, token_nodes, context)?; + ) -> Result { + let mut state = ColumnPathState::Initial; loop { - let checkpoint = token_nodes.checkpoint(); + let member = token_nodes.expand_syntax(MemberShape); - match color_fallible_syntax_with( - &ColorableDotShape, - &FlatShape::Dot, - checkpoint.iterator, - context, - ) { - Err(_) => { - // we already saw at least one member shape, so return successfully - return Ok(()); - } + match member { + Err(_) => break, + Ok(member) => state = state.member(member), + } - Ok(_) => { - match color_fallible_syntax(&MemberShape, checkpoint.iterator, context) { - Err(_) => { - // we saw a dot but not a member (but we saw at least one member), - // so don't commit the dot but return successfully - return Ok(()); - } + let dot = token_nodes.expand_syntax(DotShape); - Ok(_) => { - // we saw a dot and a member, so commit it and continue on - checkpoint.commit(); - } - } - } + match dot { + Err(_) => break, + Ok(dot) => state = state.dot(dot), } } + + let path = state.into_path(token_nodes.err_next_token("column path"))?; + + Ok(ColumnPathSyntax { + path: path.item, + tag: path.tag, + }) } } @@ -609,126 +456,25 @@ impl PrettyDebugWithSource for ColumnPathSyntax { } } -impl ExpandSyntax for ColumnPathShape { - type Output = ColumnPathSyntax; - - fn name(&self) -> &'static str { - "column path" - } - - fn expand_syntax<'a, 'b>( - &self, - token_nodes: &'b mut TokensIterator<'a>, - context: &ExpandContext, - ) -> Result { - expand_column_path(token_nodes, context) - } -} - #[derive(Debug, Copy, Clone)] pub struct MemberShape; -impl FallibleColorSyntax for MemberShape { - type Info = (); - type Input = (); - - fn name(&self) -> &'static str { - "MemberShape" - } - - fn color_syntax<'a, 'b>( - &self, - _input: &(), - token_nodes: &'b mut TokensIterator<'a>, - context: &ExpandContext, - ) -> Result<(), ShellError> { - let bare = - color_fallible_syntax_with(&BareShape, &FlatShape::BareMember, token_nodes, context); - - if bare.is_ok() { - return Ok(()); - } - - // If we don't have a bare word, we'll look for a string - - // Look for a string token. If we don't find one, fail - color_fallible_syntax_with(&StringShape, &FlatShape::StringMember, token_nodes, context) - } -} - -#[derive(Debug, Copy, Clone)] -struct IntMemberShape; - -impl ExpandSyntax for IntMemberShape { - type Output = Member; - - fn name(&self) -> &'static str { - "integer member" - } - - fn expand_syntax<'a, 'b>( - &self, - token_nodes: &'b mut TokensIterator<'a>, - context: &ExpandContext, - ) -> Result { - token_nodes.atomic_parse(|token_nodes| { - let next = expand_atom( - token_nodes, - "integer member", - context, - ExpansionRule::new().separate_members(), - )?; - - match next.unspanned { - UnspannedAtomicToken::Number { - number: RawNumber::Int(int), - } => Ok(Member::Int( - BigInt::from_str(int.slice(context.source)).map_err(|_| { - ParseError::internal_error( - "can't convert from string to big int".spanned(int), - ) - })?, - int, - )), - - UnspannedAtomicToken::Word { text } => { - let int = BigInt::from_str(text.slice(context.source)); - - match int { - Ok(int) => Ok(Member::Int(int, text)), - Err(_) => Err(ParseError::mismatch("integer member", "word".spanned(text))), - } - } - - other => Err(ParseError::mismatch( - "integer member", - other.type_name().spanned(next.span), - )), - } - }) - } -} - impl ExpandSyntax for MemberShape { - type Output = Member; + type Output = Result; fn name(&self) -> &'static str { "column" } - fn expand_syntax<'a, 'b>( - &self, - token_nodes: &mut TokensIterator<'_>, - context: &ExpandContext, - ) -> Result { - if let Ok(int) = expand_syntax(&IntMemberShape, token_nodes, context) { + fn expand<'a, 'b>(&self, token_nodes: &mut TokensIterator<'_>) -> Result { + if let Ok(int) = token_nodes.expand_syntax(IntMemberShape) { return Ok(int); } - let bare = BareShape.test(token_nodes, context); - if let Some(peeked) = bare { - let node = peeked.not_eof("column")?.commit(); - return Ok(Member::Bare(node.span())); + let bare = token_nodes.expand_syntax(BareShape); + + if let Ok(bare) = bare { + return Ok(Member::Bare(bare.span())); } /* KATZ */ @@ -743,18 +489,34 @@ impl ExpandSyntax for MemberShape { return Ok(Member::Number(n, span)) }*/ - let string = StringShape.test(token_nodes, context); + let string = token_nodes.expand_syntax(StringShape); - if let Some(peeked) = string { - let node = peeked.not_eof("column")?.commit(); - let (outer, inner) = node.as_string().ok_or_else(|| { - ParseError::internal_error("can't convert node to string".spanned(node.span())) - })?; - - return Ok(Member::String(outer, inner)); + if let Ok(syntax) = string { + return Ok(Member::String(syntax.span, syntax.inner)); } - Err(token_nodes.peek_any().type_error("column")) + Err(token_nodes.peek().type_error("column")) + } +} + +#[derive(Debug, Copy, Clone)] +struct IntMemberShape; + +impl ExpandSyntax for IntMemberShape { + type Output = Result; + + fn name(&self) -> &'static str { + "integer member" + } + + fn expand<'a, 'b>( + &self, + token_nodes: &'b mut TokensIterator<'a>, + ) -> Result { + token_nodes + .expand_syntax(IntShape) + .map(|int| Member::int(int.span(), &token_nodes.source())) + .or_else(|_| Err(token_nodes.err_next_token("integer member"))) } } @@ -764,127 +526,36 @@ pub struct DotShape; #[derive(Debug, Copy, Clone)] pub struct ColorableDotShape; -impl FallibleColorSyntax for ColorableDotShape { - type Info = (); - type Input = FlatShape; - - fn name(&self) -> &'static str { - "ColorableDotShape" - } - - fn color_syntax<'a, 'b>( - &self, - input: &FlatShape, - token_nodes: &'b mut TokensIterator<'a>, - _context: &ExpandContext, - ) -> Result<(), ShellError> { - let peeked = token_nodes.peek_any().not_eof("dot")?; - - match peeked.node { - node if node.is_dot() => { - peeked.commit(); - token_nodes.color_shape((*input).spanned(node.span())); - Ok(()) - } - - other => Err(ShellError::type_error( - "dot", - other.type_name().spanned(other.span()), - )), - } - } -} - -impl SkipSyntax for DotShape { - fn skip<'a, 'b>( - &self, - token_nodes: &mut TokensIterator<'_>, - context: &ExpandContext, - ) -> Result<(), ShellError> { - expand_syntax(self, token_nodes, context)?; - - Ok(()) - } -} - impl ExpandSyntax for DotShape { - type Output = Span; + type Output = Result; fn name(&self) -> &'static str { "dot" } - fn expand_syntax<'a, 'b>( - &self, - token_nodes: &'b mut TokensIterator<'a>, - _context: &ExpandContext, - ) -> Result { - parse_single_node(token_nodes, "dot", |token, token_span, _| { - Ok(match token { - UnspannedToken::EvaluationOperator(EvaluationOperator::Dot) => token_span, - _ => { - return Err(ParseError::mismatch( - "dot", - token.type_name().spanned(token_span), - )) - } - }) - }) + fn expand<'a, 'b>(&self, token_nodes: &'b mut TokensIterator<'a>) -> Result { + token_nodes.expand_token(DotType, |token| Ok((FlatShape::Dot, token.span()))) + } +} + +#[derive(Debug, Copy, Clone)] +struct DotDotShape; + +impl ExpandSyntax for DotDotShape { + type Output = Result; + + fn name(&self) -> &'static str { + "dotdot" + } + + fn expand<'a, 'b>(&self, token_nodes: &'b mut TokensIterator<'a>) -> Result { + token_nodes.expand_token(DotDotType, |token| Ok((FlatShape::DotDot, token.span()))) } } #[derive(Debug, Copy, Clone)] pub struct InfixShape; -impl FallibleColorSyntax for InfixShape { - type Info = (); - type Input = (); - - fn name(&self) -> &'static str { - "InfixShape" - } - - fn color_syntax<'a, 'b>( - &self, - _input: &(), - token_nodes: &'b mut TokensIterator<'a>, - context: &ExpandContext, - ) -> Result<(), ShellError> { - let checkpoint = token_nodes.checkpoint(); - - // An infix operator must be prefixed by whitespace. If no whitespace was found, fail - color_fallible_syntax(&WhitespaceShape, checkpoint.iterator, context)?; - - // Parse the next TokenNode after the whitespace - let operator_span = parse_single_node( - checkpoint.iterator, - "infix operator", - |token, token_span, _| { - match token { - // If it's an operator (and not `.`), it's a match - UnspannedToken::CompareOperator(_operator) => Ok(token_span), - - // Otherwise, it's not a match - _ => Err(ParseError::mismatch( - "infix operator", - token.type_name().spanned(token_span), - )), - } - }, - )?; - - checkpoint - .iterator - .color_shape(FlatShape::CompareOperator.spanned(operator_span)); - - // An infix operator must be followed by whitespace. If no whitespace was found, fail - color_fallible_syntax(&WhitespaceShape, checkpoint.iterator, context)?; - - checkpoint.commit(); - Ok(()) - } -} - #[derive(Debug, Clone)] pub struct InfixSyntax { infix: Spanned<(Span, InfixInnerSyntax, Span)>, @@ -903,32 +574,29 @@ impl PrettyDebugWithSource for InfixSyntax { } impl ExpandSyntax for InfixShape { - type Output = InfixSyntax; + type Output = Result; fn name(&self) -> &'static str { "infix operator" } - fn expand_syntax<'a, 'b>( + fn expand<'a, 'b>( &self, token_nodes: &'b mut TokensIterator<'a>, - context: &ExpandContext, - ) -> Result { - let mut checkpoint = token_nodes.checkpoint(); + ) -> Result { + token_nodes.atomic_parse(|token_nodes| { + // An infix operator must be prefixed by whitespace + let start = token_nodes.expand_syntax(WhitespaceShape)?; - // An infix operator must be prefixed by whitespace - let start = expand_syntax(&WhitespaceShape, checkpoint.iterator, context)?; + // Parse the next TokenNode after the whitespace + let operator = token_nodes.expand_syntax(InfixInnerShape)?; - // Parse the next TokenNode after the whitespace - let operator = expand_syntax(&InfixInnerShape, &mut checkpoint.iterator, context)?; + // An infix operator must be followed by whitespace + let end = token_nodes.expand_syntax(WhitespaceShape)?; - // An infix operator must be followed by whitespace - let end = expand_syntax(&WhitespaceShape, checkpoint.iterator, context)?; - - checkpoint.commit(); - - Ok(InfixSyntax { - infix: (start, operator, end).spanned(start.until(end)), + Ok(InfixSyntax { + infix: (start, operator, end).spanned(start.until(end)), + }) }) } } @@ -954,27 +622,23 @@ impl PrettyDebug for InfixInnerSyntax { pub struct InfixInnerShape; impl ExpandSyntax for InfixInnerShape { - type Output = InfixInnerSyntax; + type Output = Result; fn name(&self) -> &'static str { "infix inner" } - fn expand_syntax<'a, 'b>( + fn expand<'a, 'b>( &self, token_nodes: &'b mut TokensIterator<'a>, - _context: &ExpandContext, - ) -> Result { - parse_single_node(token_nodes, "infix operator", |token, token_span, err| { - Ok(match token { - // If it's a comparison operator, it's a match - UnspannedToken::CompareOperator(operator) => InfixInnerSyntax { - operator: operator.spanned(token_span), + ) -> Result { + token_nodes.expand_token(CompareOperatorType, |(span, operator)| { + Ok(( + FlatShape::CompareOperator, + InfixInnerSyntax { + operator: operator.spanned(span), }, - - // Otherwise, it's not a match - _ => return Err(err.error()), - }) + )) }) } } diff --git a/crates/nu-parser/src/hir/syntax_shape/flat_shape.rs b/crates/nu-parser/src/hir/syntax_shape/flat_shape.rs index 11445950a5..9851540755 100644 --- a/crates/nu-parser/src/hir/syntax_shape/flat_shape.rs +++ b/crates/nu-parser/src/hir/syntax_shape/flat_shape.rs @@ -1,13 +1,16 @@ use crate::parse::flag::{Flag, FlagKind}; +use crate::parse::number::RawNumber; use crate::parse::operator::EvaluationOperator; -use crate::parse::token_tree::{Delimiter, TokenNode}; -use crate::parse::tokens::{RawNumber, UnspannedToken}; -use nu_source::{HasSpan, Span, Spanned, SpannedItem, Text}; +use crate::parse::token_tree::{Delimiter, SpannedToken, Token}; +use nu_protocol::ShellTypeName; +use nu_source::{DebugDocBuilder, HasSpan, PrettyDebug, Span, Spanned, SpannedItem, Text}; #[derive(Debug, Copy, Clone)] pub enum FlatShape { OpenDelimiter(Delimiter), CloseDelimiter(Delimiter), + Type, + Identifier, ItVariable, Variable, CompareOperator, @@ -21,88 +24,170 @@ pub enum FlatShape { String, Path, Word, + Keyword, Pipe, GlobPattern, Flag, ShorthandFlag, Int, Decimal, + Garbage, Whitespace, Separator, - Error, Comment, Size { number: Span, unit: Span }, } +#[derive(Debug, Clone)] +pub enum ShapeResult { + Success(Spanned), + Fallback { + shape: Spanned, + allowed: Vec, + }, +} + +impl HasSpan for ShapeResult { + fn span(&self) -> Span { + match self { + ShapeResult::Success(shape) => shape.span, + ShapeResult::Fallback { shape, .. } => shape.span, + } + } +} + +impl PrettyDebug for FlatShape { + fn pretty(&self) -> DebugDocBuilder { + unimplemented!() + } +} + +#[derive(Debug, Copy, Clone)] +pub struct TraceShape { + shape: FlatShape, + span: Span, +} + +impl ShellTypeName for TraceShape { + fn type_name(&self) -> &'static str { + self.shape.type_name() + } +} + +impl PrettyDebug for TraceShape { + fn pretty(&self) -> DebugDocBuilder { + self.shape.pretty() + } +} + +impl HasSpan for TraceShape { + fn span(&self) -> Span { + self.span + } +} + +impl ShellTypeName for FlatShape { + fn type_name(&self) -> &'static str { + match self { + FlatShape::OpenDelimiter(Delimiter::Brace) => "open brace", + FlatShape::OpenDelimiter(Delimiter::Paren) => "open paren", + FlatShape::OpenDelimiter(Delimiter::Square) => "open square", + FlatShape::CloseDelimiter(Delimiter::Brace) => "close brace", + FlatShape::CloseDelimiter(Delimiter::Paren) => "close paren", + FlatShape::CloseDelimiter(Delimiter::Square) => "close square", + FlatShape::Type => "type", + FlatShape::Identifier => "identifier", + FlatShape::ItVariable => "$it", + FlatShape::Variable => "variable", + FlatShape::CompareOperator => "comparison", + FlatShape::Dot => "dot", + FlatShape::DotDot => "dotdot", + FlatShape::InternalCommand => "internal command", + FlatShape::ExternalCommand => "external command", + FlatShape::ExternalWord => "external word", + FlatShape::BareMember => "bare member", + FlatShape::StringMember => "string member", + FlatShape::String => "string", + FlatShape::Path => "path", + FlatShape::Word => "word", + FlatShape::Keyword => "keyword", + FlatShape::Pipe => "pipe", + FlatShape::GlobPattern => "glob", + FlatShape::Flag => "flag", + FlatShape::ShorthandFlag => "shorthand flag", + FlatShape::Int => "int", + FlatShape::Decimal => "decimal", + FlatShape::Garbage => "garbage", + FlatShape::Whitespace => "whitespace", + FlatShape::Separator => "separator", + FlatShape::Comment => "comment", + FlatShape::Size { .. } => "size", + } + } +} + impl FlatShape { - pub fn from(token: &TokenNode, source: &Text, shapes: &mut Vec>) { - match token { - TokenNode::Token(token) => match token.unspanned { - UnspannedToken::Number(RawNumber::Int(_)) => { - shapes.push(FlatShape::Int.spanned(token.span)) - } - UnspannedToken::Number(RawNumber::Decimal(_)) => { - shapes.push(FlatShape::Decimal.spanned(token.span)) - } - UnspannedToken::EvaluationOperator(EvaluationOperator::Dot) => { - shapes.push(FlatShape::Dot.spanned(token.span)) - } - UnspannedToken::EvaluationOperator(EvaluationOperator::DotDot) => { - shapes.push(FlatShape::DotDot.spanned(token.span)) - } - UnspannedToken::CompareOperator(_) => { - shapes.push(FlatShape::CompareOperator.spanned(token.span)) - } - UnspannedToken::String(_) => shapes.push(FlatShape::String.spanned(token.span)), - UnspannedToken::Variable(v) if v.slice(source) == "it" => { - shapes.push(FlatShape::ItVariable.spanned(token.span)) - } - UnspannedToken::Variable(_) => shapes.push(FlatShape::Variable.spanned(token.span)), - UnspannedToken::ExternalCommand(_) => { - shapes.push(FlatShape::ExternalCommand.spanned(token.span)) - } - UnspannedToken::ExternalWord => { - shapes.push(FlatShape::ExternalWord.spanned(token.span)) - } - UnspannedToken::GlobPattern => { - shapes.push(FlatShape::GlobPattern.spanned(token.span)) - } - UnspannedToken::Bare => shapes.push(FlatShape::Word.spanned(token.span)), - }, - TokenNode::Call(_) => unimplemented!(), - TokenNode::Nodes(nodes) => { - for node in &nodes.item { - FlatShape::from(node, source, shapes); - } + pub fn into_trace_shape(self, span: Span) -> TraceShape { + TraceShape { shape: self, span } + } + + pub fn shapes(token: &SpannedToken, source: &Text) -> Vec> { + let mut shapes = vec![]; + + FlatShape::from(token, source, &mut shapes); + shapes + } + + fn from(token: &SpannedToken, source: &Text, shapes: &mut Vec>) { + let span = token.span(); + + match token.unspanned() { + Token::Number(RawNumber::Int(_)) => shapes.push(FlatShape::Int.spanned(span)), + Token::Number(RawNumber::Decimal(_)) => shapes.push(FlatShape::Decimal.spanned(span)), + Token::EvaluationOperator(EvaluationOperator::Dot) => { + shapes.push(FlatShape::Dot.spanned(span)) } - TokenNode::Delimited(v) => { - shapes.push(FlatShape::OpenDelimiter(v.item.delimiter).spanned(v.item.spans.0)); - for token in &v.item.children { + Token::EvaluationOperator(EvaluationOperator::DotDot) => { + shapes.push(FlatShape::DotDot.spanned(span)) + } + Token::CompareOperator(_) => shapes.push(FlatShape::CompareOperator.spanned(span)), + Token::String(_) => shapes.push(FlatShape::String.spanned(span)), + Token::Variable(v) if v.slice(source) == "it" => { + shapes.push(FlatShape::ItVariable.spanned(span)) + } + Token::Variable(_) => shapes.push(FlatShape::Variable.spanned(span)), + Token::ItVariable(_) => shapes.push(FlatShape::ItVariable.spanned(span)), + Token::ExternalCommand(_) => shapes.push(FlatShape::ExternalCommand.spanned(span)), + Token::ExternalWord => shapes.push(FlatShape::ExternalWord.spanned(span)), + Token::GlobPattern => shapes.push(FlatShape::GlobPattern.spanned(span)), + Token::Bare => shapes.push(FlatShape::Word.spanned(span)), + Token::Call(_) => unimplemented!(), + Token::Delimited(v) => { + shapes.push(FlatShape::OpenDelimiter(v.delimiter).spanned(v.spans.0)); + for token in &v.children { FlatShape::from(token, source, shapes); } - shapes.push(FlatShape::CloseDelimiter(v.item.delimiter).spanned(v.item.spans.1)); + shapes.push(FlatShape::CloseDelimiter(v.delimiter).spanned(v.spans.1)); } - TokenNode::Pipeline(pipeline) => { + Token::Pipeline(pipeline) => { for part in &pipeline.parts { if part.pipe.is_some() { shapes.push(FlatShape::Pipe.spanned(part.span())); } } } - TokenNode::Flag(Flag { + Token::Flag(Flag { kind: FlagKind::Longhand, - span, .. - }) => shapes.push(FlatShape::Flag.spanned(*span)), - TokenNode::Flag(Flag { + }) => shapes.push(FlatShape::Flag.spanned(span)), + Token::Flag(Flag { kind: FlagKind::Shorthand, - span, .. - }) => shapes.push(FlatShape::ShorthandFlag.spanned(*span)), - TokenNode::Whitespace(_) => shapes.push(FlatShape::Whitespace.spanned(token.span())), - TokenNode::Separator(_) => shapes.push(FlatShape::Separator.spanned(token.span())), - TokenNode::Comment(_) => shapes.push(FlatShape::Comment.spanned(token.span())), - TokenNode::Error(v) => shapes.push(FlatShape::Error.spanned(v.span)), + }) => shapes.push(FlatShape::ShorthandFlag.spanned(span)), + Token::Garbage => shapes.push(FlatShape::Garbage.spanned(span)), + Token::Whitespace => shapes.push(FlatShape::Whitespace.spanned(span)), + Token::Separator => shapes.push(FlatShape::Separator.spanned(span)), + Token::Comment(_) => shapes.push(FlatShape::Comment.spanned(span)), } } } diff --git a/crates/nu-parser/src/hir/tokens_iterator.rs b/crates/nu-parser/src/hir/tokens_iterator.rs index dca99a5913..56661ba182 100644 --- a/crates/nu-parser/src/hir/tokens_iterator.rs +++ b/crates/nu-parser/src/hir/tokens_iterator.rs @@ -1,37 +1,34 @@ pub(crate) mod debug; +pub(crate) mod into_shapes; +pub(crate) mod pattern; +pub(crate) mod state; -use self::debug::{ColorTracer, ExpandTracer}; +use self::debug::ExpandTracer; +use self::into_shapes::IntoShapes; +use self::state::{Peeked, TokensIteratorState}; + +use crate::hir::syntax_shape::flat_shape::{FlatShape, ShapeResult}; +use crate::hir::syntax_shape::{ExpandContext, ExpandSyntax, ExpressionListShape}; +use crate::hir::SpannedExpression; +use crate::parse::token_tree::{BlockType, DelimitedNode, SpannedToken, SquareType, TokenType}; -use crate::hir::syntax_shape::FlatShape; -use crate::hir::Expression; -use crate::TokenNode; use getset::{Getters, MutGetters}; -use nu_errors::{ParseError, ShellError}; +use nu_errors::ParseError; use nu_protocol::SpannedTypeName; -use nu_source::{HasFallibleSpan, HasSpan, Span, Spanned, SpannedItem, Tag, Text}; +use nu_source::{ + HasFallibleSpan, HasSpan, IntoSpanned, PrettyDebugWithSource, Span, Spanned, SpannedItem, Text, +}; +use std::borrow::Borrow; +use std::sync::Arc; -#[derive(Getters, Debug)] -pub struct TokensIteratorState<'content> { - tokens: &'content [TokenNode], - span: Span, - skip_ws: bool, - index: usize, - seen: indexmap::IndexSet, - #[get = "pub"] - shapes: Vec>, -} - -#[derive(Getters, MutGetters, Debug)] +#[derive(Getters, MutGetters, Clone, Debug)] pub struct TokensIterator<'content> { #[get = "pub"] #[get_mut = "pub"] state: TokensIteratorState<'content>, #[get = "pub"] #[get_mut = "pub"] - color_tracer: ColorTracer, - #[get = "pub"] - #[get_mut = "pub"] - expand_tracer: ExpandTracer, + expand_tracer: ExpandTracer, } #[derive(Debug)] @@ -63,217 +60,236 @@ impl<'content, 'me> std::ops::Drop for Checkpoint<'content, 'me> { } } -#[derive(Debug)] -pub struct Peeked<'content, 'me> { - pub(crate) node: Option<&'content TokenNode>, - iterator: &'me mut TokensIterator<'content>, - pub from: usize, - pub to: usize, -} +// For parse_command +impl<'content> TokensIterator<'content> { + pub fn sort_shapes(&mut self) { + // This is pretty dubious, but it works. We should look into a better algorithm that doesn't end up requiring + // this solution. -impl<'content, 'me> Peeked<'content, 'me> { - pub fn commit(&mut self) -> Option<&'content TokenNode> { - let Peeked { - node, - iterator, - from, - to, - } = self; - - let node = (*node)?; - iterator.commit(*from, *to); - Some(node) + self.state + .shapes + .sort_by(|a, b| a.span().start().cmp(&b.span().start())); } - pub fn not_eof(self, expected: &'static str) -> Result, ParseError> { - match self.node { - None => Err(ParseError::unexpected_eof( - expected, - self.iterator.eof_span(), - )), - Some(node) => Ok(PeekedNode { - node, - iterator: self.iterator, - from: self.from, - to: self.to, - }), + /// Run a block of code, retrieving the shapes that were created during the block. This is + /// used by `parse_command` to associate shapes with a particular flag. + pub fn shapes_for<'me, T>( + &'me mut self, + block: impl FnOnce(&mut TokensIterator<'content>) -> Result, + ) -> (Result, Vec) { + let index = self.state.index; + let mut shapes = vec![]; + let mut errors = self.state.errors.clone(); + + let seen = self.state.seen.clone(); + std::mem::swap(&mut self.state.shapes, &mut shapes); + std::mem::swap(&mut self.state.errors, &mut errors); + + let checkpoint = Checkpoint { + iterator: self, + index, + seen, + committed: false, + shape_start: 0, + }; + + let value = block(checkpoint.iterator); + + let value = match value { + Err(err) => { + drop(checkpoint); + std::mem::swap(&mut self.state.shapes, &mut shapes); + std::mem::swap(&mut self.state.errors, &mut errors); + return (Err(err), vec![]); + } + + Ok(value) => value, + }; + + checkpoint.commit(); + std::mem::swap(&mut self.state.shapes, &mut shapes); + + (Ok(value), shapes) + } + + pub fn extract(&mut self, f: impl Fn(&SpannedToken) -> Option) -> Option<(usize, T)> { + let state = &mut self.state; + + for (i, item) in state.tokens.iter().enumerate() { + if state.seen.contains(&i) { + continue; + } + + match f(item) { + None => { + continue; + } + Some(value) => { + state.seen.insert(i); + return Some((i, value)); + } + } } + + self.move_to(0); + + None } - pub fn type_error(&self, expected: &'static str) -> ParseError { - peek_error(self.node, self.iterator.eof_span(), expected) + pub fn remove(&mut self, position: usize) { + self.state.seen.insert(position); } } -#[derive(Debug)] -pub struct PeekedNode<'content, 'me> { - pub(crate) node: &'content TokenNode, - iterator: &'me mut TokensIterator<'content>, - from: usize, - to: usize, -} - -impl<'content, 'me> PeekedNode<'content, 'me> { - pub fn commit(self) -> &'content TokenNode { - let PeekedNode { - node, - iterator, - from, - to, - } = self; - - iterator.commit(from, to); - node +// Delimited +impl<'content> TokensIterator<'content> { + pub fn block(&mut self) -> Result>, ParseError> { + self.expand_token_with_token_nodes(BlockType, |node, token_nodes| { + token_nodes.delimited(node) + }) } - pub fn rollback(self) {} - - pub fn type_error(&self, expected: &'static str) -> ParseError { - peek_error(Some(self.node), self.iterator.eof_span(), expected) + pub fn square(&mut self) -> Result>, ParseError> { + self.expand_token_with_token_nodes(SquareType, |node, token_nodes| { + token_nodes.delimited(node) + }) } -} -pub fn peek_error(node: Option<&TokenNode>, eof_span: Span, expected: &'static str) -> ParseError { - match node { - None => ParseError::unexpected_eof(expected, eof_span), - Some(node) => ParseError::mismatch(expected, node.spanned_type_name()), + fn delimited( + &mut self, + DelimitedNode { + delimiter, + spans, + children, + }: DelimitedNode, + ) -> Result<(Vec, Spanned>), ParseError> { + let span = spans.0.until(spans.1); + let (child_shapes, expr) = self.child(children[..].spanned(span), |token_nodes| { + token_nodes.expand_infallible(ExpressionListShape).exprs + }); + + let mut shapes = vec![ShapeResult::Success( + FlatShape::OpenDelimiter(delimiter).spanned(spans.0), + )]; + shapes.extend(child_shapes); + shapes.push(ShapeResult::Success( + FlatShape::CloseDelimiter(delimiter).spanned(spans.1), + )); + + Ok((shapes, expr)) } } impl<'content> TokensIterator<'content> { pub fn new( - items: &'content [TokenNode], + items: &'content [SpannedToken], + context: ExpandContext<'content>, span: Span, - source: Text, - skip_ws: bool, ) -> TokensIterator<'content> { + let source = context.source(); + TokensIterator { state: TokensIteratorState { tokens: items, span, - skip_ws, index: 0, seen: indexmap::IndexSet::new(), shapes: vec![], + errors: indexmap::IndexMap::new(), + context: Arc::new(context), }, - color_tracer: ColorTracer::new(source.clone()), - expand_tracer: ExpandTracer::new(source), + expand_tracer: ExpandTracer::new("Expand Trace", source.clone()), } } - pub fn all( - tokens: &'content [TokenNode], - source: Text, - span: Span, - ) -> TokensIterator<'content> { - TokensIterator::new(tokens, span, source, false) - } - pub fn len(&self) -> usize { self.state.tokens.len() } pub fn is_empty(&self) -> bool { - self.len() == 0 + self.state.tokens.is_empty() } - pub fn spanned( - &mut self, - block: impl FnOnce(&mut TokensIterator<'content>) -> T, - ) -> Spanned { - let start = self.span_at_cursor(); + pub fn source(&self) -> Text { + self.state.context.source().clone() + } - let result = block(self); + pub fn context(&self) -> &ExpandContext { + &self.state.context + } - let end = self.span_at_cursor(); - - result.spanned(start.until(end)) + pub fn color_result(&mut self, shape: ShapeResult) { + match shape { + ShapeResult::Success(shape) => self.color_shape(shape), + ShapeResult::Fallback { shape, allowed } => self.color_err(shape, allowed), + } } pub fn color_shape(&mut self, shape: Spanned) { - self.with_color_tracer(|_, tracer| tracer.add_shape(shape)); - self.state.shapes.push(shape); + self.with_tracer(|_, tracer| tracer.add_shape(shape.into_trace_shape(shape.span))); + self.state.shapes.push(ShapeResult::Success(shape)); } - pub fn mutate_shapes(&mut self, block: impl FnOnce(&mut Vec>)) { - let new_shapes: Vec> = { - let shapes = &mut self.state.shapes; - let len = shapes.len(); - block(shapes); - (len..(shapes.len())).map(|i| shapes[i]).collect() - }; - - self.with_color_tracer(|_, tracer| { - for shape in new_shapes { - tracer.add_shape(shape) - } + pub fn color_err(&mut self, shape: Spanned, valid_shapes: Vec) { + self.with_tracer(|_, tracer| tracer.add_err_shape(shape.into_trace_shape(shape.span))); + self.state.errors.insert(shape.span, valid_shapes.clone()); + self.state.shapes.push(ShapeResult::Fallback { + shape, + allowed: valid_shapes, }); } - pub fn silently_mutate_shapes(&mut self, block: impl FnOnce(&mut Vec>)) { - let shapes = &mut self.state.shapes; - block(shapes); - } + pub fn color_shapes(&mut self, shapes: Vec>) { + self.with_tracer(|_, tracer| { + for shape in &shapes { + tracer.add_shape(shape.into_trace_shape(shape.span)) + } + }); - pub fn sort_shapes(&mut self) { - // This is pretty dubious, but it works. We should look into a better algorithm that doesn't end up requiring - // this solution. - - self.state - .shapes - .sort_by(|a, b| a.span.start().cmp(&b.span.start())); + for shape in &shapes { + self.state.shapes.push(ShapeResult::Success(*shape)); + } } pub fn child<'me, T>( &'me mut self, - tokens: Spanned<&'me [TokenNode]>, - source: Text, + tokens: Spanned<&'me [SpannedToken]>, block: impl FnOnce(&mut TokensIterator<'me>) -> T, - ) -> T { + ) -> (Vec, T) { let mut shapes = vec![]; std::mem::swap(&mut shapes, &mut self.state.shapes); - let mut color_tracer = ColorTracer::new(source.clone()); - std::mem::swap(&mut color_tracer, &mut self.color_tracer); + let mut errors = self.state.errors.clone(); + std::mem::swap(&mut errors, &mut self.state.errors); - let mut expand_tracer = ExpandTracer::new(source); + let mut expand_tracer = ExpandTracer::new("Expand Trace", self.source()); std::mem::swap(&mut expand_tracer, &mut self.expand_tracer); let mut iterator = TokensIterator { state: TokensIteratorState { tokens: tokens.item, span: tokens.span, - skip_ws: false, index: 0, seen: indexmap::IndexSet::new(), shapes, + errors, + context: self.state.context.clone(), }, - color_tracer, expand_tracer, }; let result = block(&mut iterator); std::mem::swap(&mut iterator.state.shapes, &mut self.state.shapes); - std::mem::swap(&mut iterator.color_tracer, &mut self.color_tracer); + std::mem::swap(&mut iterator.state.errors, &mut self.state.errors); std::mem::swap(&mut iterator.expand_tracer, &mut self.expand_tracer); - result + (iterator.state.shapes, result) } - pub fn with_color_tracer( + fn with_tracer( &mut self, - block: impl FnOnce(&mut TokensIteratorState, &mut ColorTracer), - ) { - let state = &mut self.state; - let color_tracer = &mut self.color_tracer; - - block(state, color_tracer) - } - - pub fn with_expand_tracer( - &mut self, - block: impl FnOnce(&mut TokensIteratorState, &mut ExpandTracer), + block: impl FnOnce(&mut TokensIteratorState, &mut ExpandTracer), ) { let state = &mut self.state; let tracer = &mut self.expand_tracer; @@ -281,142 +297,10 @@ impl<'content> TokensIterator<'content> { block(state, tracer) } - pub fn color_frame( - &mut self, - desc: &'static str, - block: impl FnOnce(&mut TokensIterator) -> T, - ) -> T { - self.with_color_tracer(|_, tracer| tracer.start(desc)); - - let result = block(self); - - self.with_color_tracer(|_, tracer| { - tracer.success(); - }); - - result + pub fn finish_tracer(&mut self) { + self.with_tracer(|_, tracer| tracer.finish()) } - pub fn expand_frame( - &mut self, - desc: &'static str, - block: impl FnOnce(&mut TokensIterator<'content>) -> Result, - ) -> Result - where - T: std::fmt::Debug + Clone + HasFallibleSpan + 'static, - { - self.with_expand_tracer(|_, tracer| tracer.start(desc)); - - let result = block(self); - - self.with_expand_tracer(|_, tracer| match &result { - Ok(result) => { - tracer.add_result(result.clone()); - tracer.success(); - } - - Err(err) => tracer.failed(err), - }); - - result - } - - pub fn expand_expr_frame( - &mut self, - desc: &'static str, - block: impl FnOnce(&mut TokensIterator) -> Result, - ) -> Result { - self.with_expand_tracer(|_, tracer| tracer.start(desc)); - - let result = block(self); - - self.with_expand_tracer(|_, tracer| match &result { - Ok(expr) => { - tracer.add_expr(expr.clone()); - tracer.success() - } - - Err(err) => tracer.failed(err), - }); - - result - } - - pub fn color_fallible_frame( - &mut self, - desc: &'static str, - block: impl FnOnce(&mut TokensIterator) -> Result, - ) -> Result { - self.with_color_tracer(|_, tracer| tracer.start(desc)); - - if self.at_end() { - self.with_color_tracer(|_, tracer| tracer.eof_frame()); - return Err(ShellError::unexpected_eof("coloring", Tag::unknown())); - } - - let result = block(self); - - self.with_color_tracer(|_, tracer| match &result { - Ok(_) => { - tracer.success(); - } - - Err(err) => tracer.failed(err), - }); - - result - } - - /// Use a checkpoint when you need to peek more than one token ahead, but can't be sure - /// that you'll succeed. - pub fn checkpoint<'me>(&'me mut self) -> Checkpoint<'content, 'me> { - let state = &mut self.state; - - let index = state.index; - - let shape_start = state.shapes.len(); - let seen = state.seen.clone(); - - Checkpoint { - iterator: self, - index, - seen, - committed: false, - - shape_start, - } - } - - /// Use a checkpoint when you need to peek more than one token ahead, but can't be sure - /// that you'll succeed. - pub fn atomic<'me, T>( - &'me mut self, - block: impl FnOnce(&mut TokensIterator<'content>) -> Result, - ) -> Result { - let state = &mut self.state; - - let index = state.index; - - let shape_start = state.shapes.len(); - let seen = state.seen.clone(); - - let checkpoint = Checkpoint { - iterator: self, - index, - seen, - committed: false, - - shape_start, - }; - - let value = block(checkpoint.iterator)?; - - checkpoint.commit(); - Ok(value) - } - - /// Use a checkpoint when you need to peek more than one token ahead, but can't be sure - /// that you'll succeed. pub fn atomic_parse<'me, T, E>( &'me mut self, block: impl FnOnce(&mut TokensIterator<'content>) -> Result, @@ -443,58 +327,12 @@ impl<'content> TokensIterator<'content> { Ok(value) } - /// Use a checkpoint when you need to peek more than one token ahead, but can't be sure - /// that you'll succeed. - pub fn atomic_returning_shapes<'me, T>( - &'me mut self, - block: impl FnOnce(&mut TokensIterator<'content>) -> Result, - ) -> (Result, Vec>) { - let index = self.state.index; - let mut shapes = vec![]; - - let seen = self.state.seen.clone(); - std::mem::swap(&mut self.state.shapes, &mut shapes); - - let checkpoint = Checkpoint { - iterator: self, - index, - seen, - committed: false, - shape_start: 0, - }; - - let value = block(checkpoint.iterator); - - let value = match value { - Err(err) => { - drop(checkpoint); - std::mem::swap(&mut self.state.shapes, &mut shapes); - return (Err(err), vec![]); - } - - Ok(value) => value, - }; - - checkpoint.commit(); - std::mem::swap(&mut self.state.shapes, &mut shapes); - (Ok(value), shapes) - } - fn eof_span(&self) -> Span { Span::new(self.state.span.end(), self.state.span.end()) } - pub fn typed_span_at_cursor(&mut self) -> Spanned<&'static str> { - let next = self.peek_any(); - - match next.node { - None => "end".spanned(self.eof_span()), - Some(node) => node.spanned_type_name(), - } - } - pub fn span_at_cursor(&mut self) -> Span { - let next = self.peek_any(); + let next = self.peek(); match next.node { None => self.eof_span(), @@ -502,101 +340,123 @@ impl<'content> TokensIterator<'content> { } } - pub fn remove(&mut self, position: usize) { - self.state.seen.insert(position); - } - pub fn at_end(&self) -> bool { - peek(self, self.state.skip_ws).is_none() - } - - pub fn at_end_possible_ws(&self) -> bool { - peek(self, true).is_none() - } - - pub fn advance(&mut self) { - self.state.seen.insert(self.state.index); - self.state.index += 1; - } - - pub fn extract(&mut self, f: impl Fn(&TokenNode) -> Option) -> Option<(usize, T)> { - let state = &mut self.state; - - for (i, item) in state.tokens.iter().enumerate() { - if state.seen.contains(&i) { - continue; - } - - match f(item) { - None => { - continue; - } - Some(value) => { - state.seen.insert(i); - return Some((i, value)); - } - } - } - - None + next_index(&self.state).is_none() } pub fn move_to(&mut self, pos: usize) { self.state.index = pos; } - pub fn restart(&mut self) { - self.state.index = 0; - } + /// Peek the next token in the token stream and return a `Peeked`. + /// + /// # Example + /// + /// ```ignore + /// let peeked = token_nodes.peek().not_eof(); + /// let node = peeked.node; + /// match node.unspanned() { + /// Token::Whitespace => { + /// let node = peeked.commit(); + /// return Ok(node.span) + /// } + /// other => return Err(ParseError::mismatch("whitespace", node.spanned_type_name())) + /// } + /// ``` + pub fn peek<'me>(&'me mut self) -> Peeked<'content, 'me> { + let state = self.state(); + let len = state.tokens.len(); + let from = state.index; - // pub fn clone(&self) -> TokensIterator<'content> { - // let state = &self.state; - // TokensIterator { - // state: TokensIteratorState { - // tokens: state.tokens, - // span: state.span, - // index: state.index, - // seen: state.seen.clone(), - // skip_ws: state.skip_ws, - // - // shapes: state.shapes.clone(), - // }, - // color_tracer: self.color_tracer.clone(), - // expand_tracer: self.expand_tracer.clone(), - // } - // } + let index = next_index(state); - // Peek the next token, not including whitespace - pub fn peek_non_ws<'me>(&'me mut self) -> Peeked<'content, 'me> { - start_next(self, true) - } + let (node, to) = match index { + None => (None, len), - // Peek the next token, including whitespace - pub fn peek_any<'me>(&'me mut self) -> Peeked<'content, 'me> { - start_next(self, false) - } + Some(to) => (Some(&state.tokens[to]), to + 1), + }; - // Peek the next token, including whitespace, but not EOF - pub fn peek_any_token<'me, T>( - &'me mut self, - expected: &'static str, - block: impl FnOnce(&'content TokenNode) -> Result, - ) -> Result { - let peeked = start_next(self, false); - let peeked = peeked.not_eof(expected); - - match peeked { - Err(err) => Err(err), - Ok(peeked) => match block(peeked.node) { - Err(err) => Err(err), - Ok(val) => { - peeked.commit(); - Ok(val) - } - }, + Peeked { + node, + iterator: self, + from, + to, } } + /// Produce an error corresponding to the next token. + /// + /// If the next token is EOF, produce an `UnexpectedEof`. Otherwise, produce a `Mismatch`. + pub fn err_next_token(&mut self, expected: &'static str) -> ParseError { + match next_index(&self.state) { + None => ParseError::unexpected_eof(expected, self.eof_span()), + Some(index) => { + ParseError::mismatch(expected, self.state.tokens[index].spanned_type_name()) + } + } + } + + fn expand_token_with_token_nodes< + 'me, + T: 'me, + U: IntoSpanned, + V: HasFallibleSpan, + F: IntoShapes, + >( + &'me mut self, + expected: impl TokenType, + block: impl FnOnce(T, &mut Self) -> Result<(F, U), ParseError>, + ) -> Result { + let desc = expected.desc(); + + let peeked = self.peek().not_eof(desc.borrow())?; + + let (shapes, val) = { + let node = peeked.node; + let type_name = node.spanned_type_name(); + + let func = Box::new(|| Err(ParseError::mismatch(desc.clone().into_owned(), type_name))); + + match expected.extract_token_value(node, &func) { + Err(err) => return Err(err), + Ok(value) => match block(value, peeked.iterator) { + Err(err) => return Err(err), + Ok((shape, val)) => { + let span = peeked.node.span(); + peeked.commit(); + (shape.into_shapes(span), val.into_spanned(span)) + } + }, + } + }; + + for shape in &shapes { + self.color_result(shape.clone()); + } + + Ok(val) + } + + /// Expand and color a single token. Takes an `impl TokenType` and produces + /// (() | FlatShape | Vec>, Output) (or an error). + /// + /// If a single FlatShape is produced, it is annotated with the span of the + /// original token. Otherwise, each FlatShape in the list must already be + /// annotated. + pub fn expand_token<'me, T, U, V, F>( + &'me mut self, + expected: impl TokenType, + block: impl FnOnce(T) -> Result<(F, U), ParseError>, + ) -> Result + where + T: 'me, + U: IntoSpanned, + V: HasFallibleSpan, + F: IntoShapes, + { + self.expand_token_with_token_nodes(expected, |value, _| block(value)) + } + fn commit(&mut self, from: usize, to: usize) { for index in from..to { self.state.seen.insert(index); @@ -605,33 +465,103 @@ impl<'content> TokensIterator<'content> { self.state.index = to; } - pub fn pos(&self, skip_ws: bool) -> Option { - peek_pos(self, skip_ws) + pub fn debug_remaining(&self) -> Vec { + let mut tokens: TokensIterator = self.clone(); + tokens.move_to(0); + tokens.cloned().collect() } - pub fn debug_remaining(&self) -> Vec { - // TODO: TODO: TODO: Clean up - vec![] - // let mut tokens = self.clone(); - // tokens.restart(); - // tokens.cloned().collect() + /// Expand an `ExpandSyntax` whose output is a `Result`, producing either the shape's output + /// or a `ParseError`. If the token stream is at EOF, this method produces a ParseError + /// (`UnexpectedEof`). + /// + /// You must use `expand_syntax` if the `Output` of the `ExpandSyntax` is a `Result`, but + /// it's difficult to model this in the Rust type system. + pub fn expand_syntax( + &mut self, + shape: impl ExpandSyntax>, + ) -> Result + where + U: std::fmt::Debug + HasFallibleSpan + PrettyDebugWithSource + Clone + 'static, + { + if self.at_end() { + self.with_tracer(|_, tracer| tracer.start(shape.name(), None)); + self.with_tracer(|_, tracer| tracer.eof_frame()); + return Err(ParseError::unexpected_eof(shape.name(), self.eof_span())); + } + + let (result, added_shapes) = self.expand(shape); + + match &result { + Ok(val) => self.finish_expand(val, added_shapes), + Err(err) => self.with_tracer(|_, tracer| tracer.failed(err)), + } + + result + } + + /// Expand an `impl ExpandSyntax` and produce its Output. Use `expand_infallible` if the + /// `ExpandSyntax` cannot produce a `Result`. You must also use `ExpandSyntax` if EOF + /// is an error. + /// + /// The purpose of `expand_infallible` is to clearly mark the infallible path through + /// and entire list of tokens that produces a fully colored version of the source. + /// + /// If the `ExpandSyntax` can poroduce a `Result`, make sure to use `expand_syntax`, + /// which will correctly show the error in the trace. + pub fn expand_infallible(&mut self, shape: impl ExpandSyntax) -> U + where + U: std::fmt::Debug + PrettyDebugWithSource + HasFallibleSpan + Clone + 'static, + { + let (result, added_shapes) = self.expand(shape); + + self.finish_expand(&result, added_shapes); + + result + } + + fn finish_expand(&mut self, val: &V, added_shapes: usize) + where + V: PrettyDebugWithSource + HasFallibleSpan + Clone, + { + self.with_tracer(|_, tracer| { + if val.maybe_span().is_some() || added_shapes > 0 { + tracer.add_result(val.clone()); + } + + tracer.success(); + }) + } + + fn expand(&mut self, shape: impl ExpandSyntax) -> (U, usize) + where + U: std::fmt::Debug + Clone + 'static, + { + let desc = shape.name(); + self.with_tracer(|state, tracer| { + tracer.start( + desc, + next_index(state).map(|index| state.tokens[index].clone()), + ) + }); + + let start_shapes = self.state.shapes.len(); + let result = shape.expand(self); + let added_shapes = self.state.shapes.len() - start_shapes; + + (result, added_shapes) } } impl<'content> Iterator for TokensIterator<'content> { - type Item = &'content TokenNode; + type Item = &'content SpannedToken; fn next(&mut self) -> Option { - next(self, self.state.skip_ws) + next(self) } } -fn peek<'content, 'me>( - iterator: &'me TokensIterator<'content>, - skip_ws: bool, -) -> Option<&'me TokenNode> { - let state = iterator.state(); - +fn next_index(state: &TokensIteratorState) -> Option { let mut to = state.index; loop { @@ -648,127 +578,25 @@ fn peek<'content, 'me>( return None; } - let node = &state.tokens[to]; - - match node { - TokenNode::Whitespace(_) if skip_ws => { - to += 1; - } - _ => { - return Some(node); - } - } - } -} - -fn peek_pos(iterator: &TokensIterator<'_>, skip_ws: bool) -> Option { - let state = iterator.state(); - - let mut to = state.index; - - loop { - if to >= state.tokens.len() { - return None; - } - - if state.seen.contains(&to) { - to += 1; - continue; - } - - if to >= state.tokens.len() { - return None; - } - - let node = &state.tokens[to]; - - match node { - TokenNode::Whitespace(_) if skip_ws => { - to += 1; - } - _ => return Some(to), - } - } -} - -fn start_next<'content, 'me>( - iterator: &'me mut TokensIterator<'content>, - skip_ws: bool, -) -> Peeked<'content, 'me> { - let state = iterator.state(); - - let from = state.index; - let mut to = state.index; - - loop { - if to >= state.tokens.len() { - return Peeked { - node: None, - iterator, - from, - to, - }; - } - - if state.seen.contains(&to) { - to += 1; - continue; - } - - if to >= state.tokens.len() { - return Peeked { - node: None, - iterator, - from, - to, - }; - } - - let node = &state.tokens[to]; - - match node { - TokenNode::Whitespace(_) if skip_ws => { - to += 1; - } - _ => { - to += 1; - return Peeked { - node: Some(node), - iterator, - from, - to, - }; - } - } + return Some(to); } } fn next<'me, 'content>( iterator: &'me mut TokensIterator<'content>, - skip_ws: bool, -) -> Option<&'content TokenNode> { - loop { - if iterator.state().index >= iterator.state().tokens.len() { - return None; +) -> Option<&'content SpannedToken> { + let next = next_index(&iterator.state); + let len = iterator.len(); + + match next { + None => { + iterator.move_to(len); + None } - if iterator.state().seen.contains(&iterator.state().index) { - iterator.advance(); - continue; - } - - if iterator.state().index >= iterator.state().tokens.len() { - return None; - } - - match &iterator.state().tokens[iterator.state().index] { - TokenNode::Whitespace(_) if skip_ws => { - iterator.advance(); - } - other => { - iterator.advance(); - return Some(other); - } + Some(index) => { + iterator.move_to(index + 1); + Some(&iterator.state.tokens[index]) } } } diff --git a/crates/nu-parser/src/hir/tokens_iterator/debug/color_trace.rs b/crates/nu-parser/src/hir/tokens_iterator/debug/color_trace.rs index 85d0389b66..5c843702a2 100644 --- a/crates/nu-parser/src/hir/tokens_iterator/debug/color_trace.rs +++ b/crates/nu-parser/src/hir/tokens_iterator/debug/color_trace.rs @@ -1,7 +1,7 @@ -use crate::hir::syntax_shape::FlatShape; +use crate::hir::syntax_shape::flat_shape::{FlatShape, ShapeResult}; use ansi_term::Color; use log::trace; -use nu_errors::ShellError; +use nu_errors::{ParseError, ShellError}; use nu_source::{Spanned, Text}; use ptree::*; use std::borrow::Cow; @@ -10,14 +10,24 @@ use std::io; #[derive(Debug, Clone)] pub enum FrameChild { #[allow(unused)] - Shape(Spanned), + Shape(ShapeResult), Frame(ColorFrame), } impl FrameChild { fn colored_leaf_description(&self, text: &Text, f: &mut impl io::Write) -> io::Result<()> { match self { - FrameChild::Shape(shape) => write!( + FrameChild::Shape(ShapeResult::Success(shape)) => write!( + f, + "{} {:?}", + Color::White + .bold() + .on(Color::Green) + .paint(format!("{:?}", shape.item)), + shape.span.slice(text) + ), + + FrameChild::Shape(ShapeResult::Fallback { shape, .. }) => write!( f, "{} {:?}", Color::White @@ -43,7 +53,7 @@ impl FrameChild { pub struct ColorFrame { description: &'static str, children: Vec, - error: Option, + error: Option, } impl ColorFrame { @@ -98,8 +108,7 @@ impl ColorFrame { .collect() } - #[allow(unused)] - fn add_shape(&mut self, shape: Spanned) { + fn add_shape(&mut self, shape: ShapeResult) { self.children.push(FrameChild::Shape(shape)) } @@ -107,10 +116,10 @@ impl ColorFrame { self.any_child_shape(|_| true) } - fn any_child_shape(&self, predicate: impl Fn(Spanned) -> bool) -> bool { + fn any_child_shape(&self, predicate: impl Fn(&ShapeResult) -> bool) -> bool { for item in &self.children { if let FrameChild::Shape(shape) = item { - if predicate(*shape) { + if predicate(shape) { return true; } } @@ -172,14 +181,24 @@ impl ColorFrame { #[derive(Debug, Clone)] pub enum TreeChild { - Shape(Spanned, Text), + Shape(ShapeResult, Text), Frame(ColorFrame, Text), } impl TreeChild { fn colored_leaf_description(&self, f: &mut impl io::Write) -> io::Result<()> { match self { - TreeChild::Shape(shape, text) => write!( + TreeChild::Shape(ShapeResult::Success(shape), text) => write!( + f, + "{} {:?}", + Color::White + .bold() + .on(Color::Green) + .paint(format!("{:?}", shape.item)), + shape.span.slice(text) + ), + + TreeChild::Shape(ShapeResult::Fallback { shape, .. }, text) => write!( f, "{} {:?}", Color::White @@ -290,8 +309,7 @@ impl ColorTracer { } } - #[allow(unused)] - pub fn add_shape(&mut self, shape: Spanned) { + pub fn add_shape(&mut self, shape: ShapeResult) { self.current_frame().add_shape(shape); } @@ -302,7 +320,7 @@ impl ColorTracer { .push(FrameChild::Frame(current)); } - pub fn failed(&mut self, error: &ShellError) { + pub fn failed(&mut self, error: &ParseError) { let mut current = self.pop_frame(); current.error = Some(error.clone()); self.current_frame() diff --git a/crates/nu-parser/src/hir/tokens_iterator/debug/expand_trace.rs b/crates/nu-parser/src/hir/tokens_iterator/debug/expand_trace.rs index 5330d97544..25a92faafe 100644 --- a/crates/nu-parser/src/hir/tokens_iterator/debug/expand_trace.rs +++ b/crates/nu-parser/src/hir/tokens_iterator/debug/expand_trace.rs @@ -1,26 +1,44 @@ -use crate::hir::Expression; +use crate::hir::syntax_shape::flat_shape::TraceShape; +use crate::hir::SpannedExpression; +use crate::parse::token_tree::SpannedToken; use ansi_term::Color; use log::trace; -use nu_errors::ParseError; -use nu_protocol::ShellTypeName; -use nu_source::{DebugDoc, PrettyDebug, PrettyDebugWithSource, Text}; +use nu_errors::{ParseError, ParseErrorReason}; +use nu_protocol::{ShellTypeName, SpannedTypeName}; +use nu_source::{DebugDoc, PrettyDebug, PrettyDebugWithSource, Span, Spanned, Text}; use ptree::*; use std::borrow::Cow; +use std::fmt::Debug; use std::io; -#[derive(Debug)] -pub enum FrameChild { - Expr(Expression), - Frame(ExprFrame), +#[derive(Debug, Clone)] +pub enum FrameChild { + Expr(T), + Shape(Result), + Frame(Box>), Result(DebugDoc), } -impl FrameChild { - fn get_error_leaf(&self) -> Option<&'static str> { +fn err_desc(error: &ParseError) -> &'static str { + match error.reason() { + ParseErrorReason::ExtraTokens { .. } => "extra tokens", + ParseErrorReason::Mismatch { .. } => "mismatch", + ParseErrorReason::ArgumentError { .. } => "argument error", + ParseErrorReason::Eof { .. } => "eof", + ParseErrorReason::InternalError { .. } => "internal error", + } +} + +impl FrameChild { + fn get_error_leaf(&self) -> Option<(&'static str, &'static str)> { match self { - FrameChild::Frame(frame) if frame.error.is_some() => { - if frame.children.is_empty() { - Some(frame.description) + FrameChild::Frame(frame) => { + if let Some(error) = &frame.error { + if frame.children.is_empty() { + Some((frame.description, err_desc(error))) + } else { + None + } } else { None } @@ -31,15 +49,34 @@ impl FrameChild { fn to_tree_child(&self, text: &Text) -> TreeChild { match self { - FrameChild::Expr(expr) => TreeChild::OkExpr(expr.clone(), text.clone()), + FrameChild::Expr(expr) => TreeChild::OkExpr { + source: expr.spanned_type_name().span, + desc: expr.spanned_type_name().item, + text: text.clone(), + }, + FrameChild::Shape(Ok(shape)) => TreeChild::OkShape { + source: shape.spanned_type_name().span, + desc: shape.spanned_type_name().item, + text: text.clone(), + fallback: false, + }, + FrameChild::Shape(Err(shape)) => TreeChild::OkShape { + source: shape.spanned_type_name().span, + desc: shape.spanned_type_name().item, + text: text.clone(), + fallback: true, + }, FrameChild::Result(result) => { let result = result.display(); TreeChild::OkNonExpr(result) } FrameChild::Frame(frame) => { - if frame.error.is_some() { + if let Some(err) = &frame.error { if frame.children.is_empty() { - TreeChild::ErrorLeaf(vec![frame.description]) + TreeChild::ErrorLeaf( + vec![(frame.description, err_desc(err))], + frame.token_desc(), + ) } else { TreeChild::ErrorFrame(frame.to_tree_frame(text), text.clone()) } @@ -51,14 +88,22 @@ impl FrameChild { } } -#[derive(Debug)] -pub struct ExprFrame { +#[derive(Debug, Clone)] +pub struct ExprFrame { description: &'static str, - children: Vec, + token: Option, + children: Vec>, error: Option, } -impl ExprFrame { +impl ExprFrame { + fn token_desc(&self) -> &'static str { + match &self.token { + None => "EOF", + Some(token) => token.type_name(), + } + } + fn to_tree_frame(&self, text: &Text) -> TreeFrame { let mut children = vec![]; let mut errors = vec![]; @@ -68,7 +113,7 @@ impl ExprFrame { errors.push(error_leaf); continue; } else if !errors.is_empty() { - children.push(TreeChild::ErrorLeaf(errors)); + children.push(TreeChild::ErrorLeaf(errors, self.token_desc())); errors = vec![]; } @@ -76,18 +121,27 @@ impl ExprFrame { } if !errors.is_empty() { - children.push(TreeChild::ErrorLeaf(errors)); + children.push(TreeChild::ErrorLeaf(errors, self.token_desc())); } TreeFrame { description: self.description, + token_desc: self.token_desc(), children, error: self.error.clone(), } } - fn add_expr(&mut self, expr: Expression) { - self.children.push(FrameChild::Expr(expr)) + fn add_return(&mut self, value: T) { + self.children.push(FrameChild::Expr(value)) + } + + fn add_shape(&mut self, shape: TraceShape) { + self.children.push(FrameChild::Shape(Ok(shape))) + } + + fn add_err_shape(&mut self, shape: TraceShape) { + self.children.push(FrameChild::Shape(Err(shape))) } fn add_result(&mut self, result: impl PrettyDebug) { @@ -98,6 +152,7 @@ impl ExprFrame { #[derive(Debug, Clone)] pub struct TreeFrame { description: &'static str, + token_desc: &'static str, children: Vec, error: Option, } @@ -113,29 +168,43 @@ impl TreeFrame { write!(f, "{}", Color::Yellow.bold().paint(self.description))?; } + write!( + f, + "{}", + Color::Black.bold().paint(&format!("({})", self.token_desc)) + )?; + write!(f, " -> ")?; self.children[0].leaf_description(f) - } else if self.error.is_some() { - if self.children.is_empty() { - write!( - f, - "{}", - Color::White.bold().on(Color::Red).paint(self.description) - ) - } else { - write!(f, "{}", Color::Red.normal().paint(self.description)) - } - } else if self.has_descendent_green() { - write!(f, "{}", Color::Green.normal().paint(self.description)) } else { - write!(f, "{}", Color::Yellow.bold().paint(self.description)) + if self.error.is_some() { + if self.children.is_empty() { + write!( + f, + "{}", + Color::White.bold().on(Color::Red).paint(self.description) + )? + } else { + write!(f, "{}", Color::Red.normal().paint(self.description))? + } + } else if self.has_descendent_green() { + write!(f, "{}", Color::Green.normal().paint(self.description))? + } else { + write!(f, "{}", Color::Yellow.bold().paint(self.description))? + } + + write!( + f, + "{}", + Color::Black.bold().paint(&format!("({})", self.token_desc)) + ) } } fn has_child_green(&self) -> bool { self.children.iter().any(|item| match item { TreeChild::OkFrame(..) | TreeChild::ErrorFrame(..) | TreeChild::ErrorLeaf(..) => false, - TreeChild::OkExpr(..) | TreeChild::OkNonExpr(..) => true, + TreeChild::OkExpr { .. } | TreeChild::OkShape { .. } | TreeChild::OkNonExpr(..) => true, }) } @@ -163,9 +232,10 @@ impl TreeFrame { if self.children.len() == 1 { let child: &TreeChild = &self.children[0]; match child { - TreeChild::OkExpr(..) | TreeChild::OkNonExpr(..) | TreeChild::ErrorLeaf(..) => { - vec![] - } + TreeChild::OkExpr { .. } + | TreeChild::OkShape { .. } + | TreeChild::OkNonExpr(..) + | TreeChild::ErrorLeaf(..) => vec![], TreeChild::OkFrame(frame, _) | TreeChild::ErrorFrame(frame, _) => { frame.children_for_formatting(text) } @@ -179,21 +249,44 @@ impl TreeFrame { #[derive(Debug, Clone)] pub enum TreeChild { OkNonExpr(String), - OkExpr(Expression, Text), + OkExpr { + source: Span, + desc: &'static str, + text: Text, + }, + OkShape { + source: Span, + desc: &'static str, + text: Text, + fallback: bool, + }, OkFrame(TreeFrame, Text), ErrorFrame(TreeFrame, Text), - ErrorLeaf(Vec<&'static str>), + ErrorLeaf(Vec<(&'static str, &'static str)>, &'static str), } impl TreeChild { fn leaf_description(&self, f: &mut impl io::Write) -> io::Result<()> { match self { - TreeChild::OkExpr(expr, text) => write!( + TreeChild::OkExpr { source, desc, text } => write!( f, "{} {} {}", Color::Cyan.normal().paint("returns"), - Color::White.bold().on(Color::Green).paint(expr.type_name()), - expr.span.slice(text) + Color::White.bold().on(Color::Green).paint(*desc), + source.slice(text) + ), + + TreeChild::OkShape { + source, + desc, + text, + fallback, + } => write!( + f, + "{} {} {}", + Color::Purple.normal().paint("paints"), + Color::White.bold().on(Color::Green).paint(*desc), + source.slice(text) ), TreeChild::OkNonExpr(result) => write!( @@ -206,17 +299,21 @@ impl TreeChild { .paint(result.to_string()) ), - TreeChild::ErrorLeaf(desc) => { + TreeChild::ErrorLeaf(desc, token_desc) => { let last = desc.len() - 1; - for (i, item) in desc.iter().enumerate() { - write!(f, "{}", Color::White.bold().on(Color::Red).paint(*item))?; + for (i, (desc, err_desc)) in desc.iter().enumerate() { + write!(f, "{}", Color::White.bold().on(Color::Red).paint(*desc))?; + + write!(f, " {}", Color::Black.bold().paint(*err_desc))?; if i != last { write!(f, "{}", Color::White.normal().paint(", "))?; } } + // write!(f, " {}", Color::Black.bold().paint(*token_desc))?; + Ok(()) } @@ -236,9 +333,10 @@ impl TreeItem for TreeChild { fn children(&self) -> Cow<[Self::Child]> { match self { - TreeChild::OkExpr(..) | TreeChild::OkNonExpr(..) | TreeChild::ErrorLeaf(..) => { - Cow::Borrowed(&[]) - } + TreeChild::OkExpr { .. } + | TreeChild::OkShape { .. } + | TreeChild::OkNonExpr(..) + | TreeChild::ErrorLeaf(..) => Cow::Borrowed(&[]), TreeChild::OkFrame(frame, text) | TreeChild::ErrorFrame(frame, text) => { Cow::Owned(frame.children_for_formatting(text)) } @@ -246,39 +344,46 @@ impl TreeItem for TreeChild { } } -#[derive(Debug)] -pub struct ExpandTracer { - frame_stack: Vec, +#[derive(Debug, Clone)] +pub struct ExpandTracer { + desc: &'static str, + frame_stack: Vec>, source: Text, } -impl ExpandTracer { +impl ExpandTracer { pub fn print(&self, source: Text) -> PrintTracer { let root = self.frame_stack[0].to_tree_frame(&source); - PrintTracer { root, source } + PrintTracer { + root, + desc: self.desc, + source, + } } - pub fn new(source: Text) -> ExpandTracer { + pub fn new(desc: &'static str, source: Text) -> ExpandTracer { let root = ExprFrame { description: "Trace", children: vec![], + token: None, error: None, }; ExpandTracer { + desc, frame_stack: vec![root], source, } } - fn current_frame(&mut self) -> &mut ExprFrame { + fn current_frame(&mut self) -> &mut ExprFrame { let frames = &mut self.frame_stack; let last = frames.len() - 1; &mut frames[last] } - fn pop_frame(&mut self) -> ExprFrame { + fn pop_frame(&mut self) -> ExprFrame { let result = self.frame_stack.pop().expect("Can't pop root tracer frame"); if self.frame_stack.is_empty() { @@ -290,10 +395,11 @@ impl ExpandTracer { result } - pub fn start(&mut self, description: &'static str) { + pub fn start(&mut self, description: &'static str, token: Option) { let frame = ExprFrame { description, children: vec![], + token, error: None, }; @@ -301,8 +407,36 @@ impl ExpandTracer { self.debug(); } - pub fn add_expr(&mut self, shape: Expression) { - self.current_frame().add_expr(shape); + pub fn add_return(&mut self, value: T) { + self.current_frame().add_return(value); + } + + pub fn add_shape(&mut self, shape: TraceShape) { + self.current_frame().add_shape(shape); + } + + pub fn add_err_shape(&mut self, shape: TraceShape) { + self.current_frame().add_err_shape(shape); + } + + pub fn finish(&mut self) { + loop { + if self.frame_stack.len() == 1 { + break; + } + + let frame = self.pop_frame(); + self.current_frame() + .children + .push(FrameChild::Frame(Box::new(frame))); + } + } + + pub fn eof_frame(&mut self) { + let current = self.pop_frame(); + self.current_frame() + .children + .push(FrameChild::Frame(Box::new(current))); } pub fn add_result(&mut self, result: impl PrettyDebugWithSource) { @@ -316,7 +450,7 @@ impl ExpandTracer { let current = self.pop_frame(); self.current_frame() .children - .push(FrameChild::Frame(current)); + .push(FrameChild::Frame(Box::new(current))); } pub fn failed(&mut self, error: &ParseError) { @@ -324,7 +458,7 @@ impl ExpandTracer { current.error = Some(error.clone()); self.current_frame() .children - .push(FrameChild::Frame(current)); + .push(FrameChild::Frame(Box::new(current))); } fn debug(&self) { @@ -342,6 +476,7 @@ impl ExpandTracer { #[derive(Debug, Clone)] pub struct PrintTracer { + desc: &'static str, root: TreeFrame, source: Text, } @@ -350,7 +485,7 @@ impl TreeItem for PrintTracer { type Child = TreeChild; fn write_self(&self, f: &mut W, style: &Style) -> io::Result<()> { - write!(f, "{}", style.paint("Expansion Trace")) + write!(f, "{}", style.paint(self.desc)) } fn children(&self) -> Cow<[Self::Child]> { diff --git a/crates/nu-parser/src/hir/tokens_iterator/into_shapes.rs b/crates/nu-parser/src/hir/tokens_iterator/into_shapes.rs new file mode 100644 index 0000000000..450b641b82 --- /dev/null +++ b/crates/nu-parser/src/hir/tokens_iterator/into_shapes.rs @@ -0,0 +1,56 @@ +use crate::hir::syntax_shape::flat_shape::{FlatShape, ShapeResult}; +use nu_source::{Span, Spanned, SpannedItem}; + +pub struct FlatShapes { + shapes: Vec, +} + +impl<'a> IntoIterator for &'a FlatShapes { + type Item = &'a ShapeResult; + type IntoIter = std::slice::Iter<'a, ShapeResult>; + + fn into_iter(self) -> Self::IntoIter { + self.shapes.iter() + } +} + +pub trait IntoShapes: 'static { + fn into_shapes(self, span: Span) -> FlatShapes; +} + +impl IntoShapes for FlatShape { + fn into_shapes(self, span: Span) -> FlatShapes { + FlatShapes { + shapes: vec![ShapeResult::Success(self.spanned(span))], + } + } +} + +impl IntoShapes for Vec> { + fn into_shapes(self, _span: Span) -> FlatShapes { + FlatShapes { + shapes: self.into_iter().map(ShapeResult::Success).collect(), + } + } +} + +impl IntoShapes for Vec { + fn into_shapes(self, _span: Span) -> FlatShapes { + FlatShapes { shapes: self } + } +} + +impl IntoShapes for () { + fn into_shapes(self, _span: Span) -> FlatShapes { + FlatShapes { shapes: vec![] } + } +} + +impl IntoShapes for Option { + fn into_shapes(self, span: Span) -> FlatShapes { + match self { + Option::None => ().into_shapes(span), + Option::Some(shape) => shape.into_shapes(span), + } + } +} diff --git a/crates/nu-parser/src/hir/tokens_iterator/pattern.rs b/crates/nu-parser/src/hir/tokens_iterator/pattern.rs new file mode 100644 index 0000000000..1d06dbc292 --- /dev/null +++ b/crates/nu-parser/src/hir/tokens_iterator/pattern.rs @@ -0,0 +1,30 @@ +use crate::parse::token_tree::{ParseErrorFn, SpannedToken, TokenType}; +use nu_errors::ParseError; +use std::borrow::Cow; + +pub struct Pattern { + parts: Vec>>, +} + +impl TokenType for Pattern { + type Output = T; + + fn desc(&self) -> Cow<'static, str> { + Cow::Borrowed("pattern") + } + + fn extract_token_value( + &self, + token: &SpannedToken, + err: ParseErrorFn, + ) -> Result { + for part in &self.parts { + match part.extract_token_value(token, err) { + Err(_) => {} + Ok(result) => return Ok(result), + } + } + + err() + } +} diff --git a/crates/nu-parser/src/hir/tokens_iterator/state.rs b/crates/nu-parser/src/hir/tokens_iterator/state.rs new file mode 100644 index 0000000000..061d128ba4 --- /dev/null +++ b/crates/nu-parser/src/hir/tokens_iterator/state.rs @@ -0,0 +1,105 @@ +use crate::hir::syntax_shape::flat_shape::ShapeResult; +use crate::hir::syntax_shape::ExpandContext; +use crate::hir::tokens_iterator::TokensIterator; +use crate::parse::token_tree::SpannedToken; + +use getset::Getters; +use nu_errors::ParseError; +use nu_protocol::SpannedTypeName; +use nu_source::Span; +use std::sync::Arc; + +#[derive(Getters, Debug, Clone)] +pub struct TokensIteratorState<'content> { + pub(crate) tokens: &'content [SpannedToken], + pub(crate) span: Span, + pub(crate) index: usize, + pub(crate) seen: indexmap::IndexSet, + #[get = "pub"] + pub(crate) shapes: Vec, + pub(crate) errors: indexmap::IndexMap>, + pub(crate) context: Arc>, +} + +#[derive(Debug)] +pub struct Peeked<'content, 'me> { + pub(crate) node: Option<&'content SpannedToken>, + pub(crate) iterator: &'me mut TokensIterator<'content>, + pub(crate) from: usize, + pub(crate) to: usize, +} + +impl<'content, 'me> Peeked<'content, 'me> { + pub fn commit(&mut self) -> Option<&'content SpannedToken> { + let Peeked { + node, + iterator, + from, + to, + } = self; + + let node = (*node)?; + iterator.commit(*from, *to); + Some(node) + } + + pub fn rollback(self) {} + + pub fn not_eof(self, expected: &str) -> Result, ParseError> { + match self.node { + None => Err(ParseError::unexpected_eof( + expected.to_string(), + self.iterator.eof_span(), + )), + Some(node) => Ok(PeekedNode { + node, + iterator: self.iterator, + from: self.from, + to: self.to, + }), + } + } + + pub fn type_error(&self, expected: &'static str) -> ParseError { + peek_error(self.node, self.iterator.eof_span(), expected) + } +} + +#[derive(Debug)] +pub struct PeekedNode<'content, 'me> { + pub(crate) node: &'content SpannedToken, + pub(crate) iterator: &'me mut TokensIterator<'content>, + from: usize, + to: usize, +} + +impl<'content, 'me> PeekedNode<'content, 'me> { + pub fn commit(self) -> &'content SpannedToken { + let PeekedNode { + node, + iterator, + from, + to, + } = self; + + iterator.commit(from, to); + node + } + + pub fn rollback(self) {} + + pub fn type_error(&self, expected: &'static str) -> ParseError { + peek_error(Some(self.node), self.iterator.eof_span(), expected) + } +} + +pub fn peek_error( + node: Option<&SpannedToken>, + eof_span: Span, + expected: &'static str, +) -> ParseError { + match node { + None => ParseError::unexpected_eof(expected, eof_span), + Some(node) => ParseError::mismatch(expected, node.spanned_type_name()), + } +} diff --git a/crates/nu-parser/src/hir/tokens_iterator/tests.rs b/crates/nu-parser/src/hir/tokens_iterator/tests.rs index 24569a9a4b..7d6d2a762f 100644 --- a/crates/nu-parser/src/hir/tokens_iterator/tests.rs +++ b/crates/nu-parser/src/hir/tokens_iterator/tests.rs @@ -3,12 +3,17 @@ use crate::parse::token_tree_builder::TokenTreeBuilder as b; use crate::Span; #[test] +<<<<<<< HEAD +fn supplies_tokens() { + let tokens = b::token_list(vec![b::it_var(), b::op("."), b::bare("cpu")]); +======= fn supplies_tokens() -> Result<(), Box> { let tokens = b::token_list(vec![b::var("it"), b::op("."), b::bare("cpu")]); +>>>>>>> master let (tokens, _) = b::build(tokens); let tokens = tokens.expect_list(); - let mut iterator = TokensIterator::all(tokens, Span::unknown()); + let mut iterator = TokensIterator::new(tokens, Span::unknown()); iterator.next()?.expect_var(); iterator.next()?.expect_dot(); diff --git a/crates/nu-parser/src/lib.rs b/crates/nu-parser/src/lib.rs index a783951efb..2fb447bb34 100644 --- a/crates/nu-parser/src/lib.rs +++ b/crates/nu-parser/src/lib.rs @@ -1,4 +1,5 @@ -#![allow(clippy::large_enum_variant, clippy::type_complexity)] +#[macro_use] +pub mod macros; pub mod commands; pub mod hir; @@ -8,23 +9,64 @@ pub mod parse_command; pub use crate::commands::classified::{ external::ExternalCommand, internal::InternalCommand, ClassifiedCommand, ClassifiedPipeline, }; -pub use crate::hir::syntax_shape::flat_shape::FlatShape; -pub use crate::hir::syntax_shape::{ - expand_syntax, ExpandContext, ExpandSyntax, PipelineShape, SignatureRegistry, -}; +pub use crate::hir::syntax_shape::flat_shape::{FlatShape, ShapeResult}; +pub use crate::hir::syntax_shape::{ExpandContext, ExpandSyntax, PipelineShape, SignatureRegistry}; pub use crate::hir::tokens_iterator::TokensIterator; pub use crate::parse::files::Files; pub use crate::parse::flag::Flag; pub use crate::parse::operator::{CompareOperator, EvaluationOperator}; pub use crate::parse::parser::Number; pub use crate::parse::parser::{module, pipeline}; -pub use crate::parse::token_tree::{Delimiter, TokenNode}; +pub use crate::parse::token_tree::{Delimiter, SpannedToken, Token}; pub use crate::parse::token_tree_builder::TokenTreeBuilder; +use log::log_enabled; use nu_errors::ShellError; -use nu_source::nom_input; +use nu_protocol::{errln, outln}; +use nu_source::{nom_input, HasSpan, Text}; -pub fn parse(input: &str) -> Result { +pub fn pipeline_shapes(line: &str, expand_context: ExpandContext) -> Vec { + let tokens = parse_pipeline(line); + + match tokens { + Err(_) => vec![], + Ok(v) => { + let pipeline = match v.as_pipeline() { + Err(_) => return vec![], + Ok(v) => v, + }; + + let tokens = vec![Token::Pipeline(pipeline).into_spanned(v.span())]; + let mut tokens = TokensIterator::new(&tokens[..], expand_context, v.span()); + + let shapes = { + // We just constructed a token list that only contains a pipeline, so it can't fail + let result = tokens.expand_infallible(PipelineShape); + + if let Some(failure) = result.failed { + errln!( + "BUG: PipelineShape didn't find a pipeline :: {:#?}", + failure + ); + } + + tokens.finish_tracer(); + + tokens.state().shapes() + }; + + if log_enabled!(target: "nu::expand_syntax", log::Level::Debug) { + outln!(""); + let _ = ptree::print_tree(&tokens.expand_tracer().clone().print(Text::from(line))); + outln!(""); + } + + shapes.clone() + } + } +} + +pub fn parse_pipeline(input: &str) -> Result { let _ = pretty_env_logger::try_init(); match pipeline(nom_input(input)) { @@ -33,7 +75,9 @@ pub fn parse(input: &str) -> Result { } } -pub fn parse_script(input: &str) -> Result { +pub use parse_pipeline as parse; + +pub fn parse_script(input: &str) -> Result { let _ = pretty_env_logger::try_init(); match module(nom_input(input)) { diff --git a/crates/nu-parser/src/macros.rs b/crates/nu-parser/src/macros.rs new file mode 100644 index 0000000000..741476f83e --- /dev/null +++ b/crates/nu-parser/src/macros.rs @@ -0,0 +1,9 @@ +#[macro_export] +macro_rules! return_ok { + ($expr:expr) => { + match $expr { + Ok(val) => return Ok(val), + Err(_) => {} + } + }; +} diff --git a/crates/nu-parser/src/parse.rs b/crates/nu-parser/src/parse.rs index 7528b99983..99a9772eed 100644 --- a/crates/nu-parser/src/parse.rs +++ b/crates/nu-parser/src/parse.rs @@ -2,11 +2,11 @@ pub(crate) mod call_node; pub(crate) mod comment; pub(crate) mod files; pub(crate) mod flag; +pub(crate) mod number; pub(crate) mod operator; pub(crate) mod parser; pub(crate) mod pipeline; pub(crate) mod token_tree; pub(crate) mod token_tree_builder; -pub(crate) mod tokens; pub(crate) mod unit; pub(crate) mod util; diff --git a/crates/nu-parser/src/parse/call_node.rs b/crates/nu-parser/src/parse/call_node.rs index 989db440e7..568a801661 100644 --- a/crates/nu-parser/src/parse/call_node.rs +++ b/crates/nu-parser/src/parse/call_node.rs @@ -1,13 +1,13 @@ -use crate::TokenNode; +use crate::parse::token_tree::SpannedToken; use getset::Getters; use nu_source::{b, DebugDocBuilder, PrettyDebugWithSource}; #[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Getters)] pub struct CallNode { #[get = "pub(crate)"] - head: Box, + head: Box, #[get = "pub(crate)"] - children: Option>, + children: Option>, } impl PrettyDebugWithSource for CallNode { @@ -29,7 +29,7 @@ impl PrettyDebugWithSource for CallNode { } impl CallNode { - pub fn new(head: Box, children: Vec) -> CallNode { + pub fn new(head: Box, children: Vec) -> CallNode { if children.is_empty() { CallNode { head, diff --git a/crates/nu-parser/src/parse/comment.rs b/crates/nu-parser/src/parse/comment.rs index 57d26f338c..dcf0db2e34 100644 --- a/crates/nu-parser/src/parse/comment.rs +++ b/crates/nu-parser/src/parse/comment.rs @@ -1,6 +1,6 @@ use derive_new::new; use getset::Getters; -use nu_source::{b, DebugDocBuilder, HasSpan, PrettyDebugWithSource, Span}; +use nu_source::{b, DebugDocBuilder, PrettyDebugWithSource, Span}; use serde::{Deserialize, Serialize}; #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Deserialize, Serialize)] @@ -12,15 +12,13 @@ pub enum CommentKind { pub struct Comment { pub(crate) kind: CommentKind, pub(crate) text: Span, - pub(crate) span: Span, } impl Comment { - pub fn line(text: impl Into, outer: impl Into) -> Comment { + pub fn line(text: impl Into) -> Comment { Comment { kind: CommentKind::Line, text: text.into(), - span: outer.into(), } } } @@ -34,9 +32,3 @@ impl PrettyDebugWithSource for Comment { prefix + b::description(self.text.slice(source)) } } - -impl HasSpan for Comment { - fn span(&self) -> Span { - self.span - } -} diff --git a/crates/nu-parser/src/parse/flag.rs b/crates/nu-parser/src/parse/flag.rs index 5ee7ff02b5..1a8734c1ba 100644 --- a/crates/nu-parser/src/parse/flag.rs +++ b/crates/nu-parser/src/parse/flag.rs @@ -15,7 +15,6 @@ pub enum FlagKind { pub struct Flag { pub(crate) kind: FlagKind, pub(crate) name: Span, - pub(crate) span: Span, } impl PrettyDebugWithSource for Flag { @@ -30,10 +29,10 @@ impl PrettyDebugWithSource for Flag { } impl Flag { - pub fn color(&self) -> Spanned { + pub fn color(&self, span: impl Into) -> Spanned { match self.kind { - FlagKind::Longhand => FlatShape::Flag.spanned(self.span), - FlagKind::Shorthand => FlatShape::ShorthandFlag.spanned(self.span), + FlagKind::Longhand => FlatShape::Flag.spanned(span.into()), + FlagKind::Shorthand => FlatShape::ShorthandFlag.spanned(span.into()), } } } diff --git a/crates/nu-parser/src/parse/number.rs b/crates/nu-parser/src/parse/number.rs new file mode 100644 index 0000000000..0ee60f4177 --- /dev/null +++ b/crates/nu-parser/src/parse/number.rs @@ -0,0 +1,70 @@ +use crate::hir::syntax_shape::FlatShape; +use crate::parse::parser::Number; +use bigdecimal::BigDecimal; +use nu_source::{b, DebugDocBuilder, HasSpan, PrettyDebugWithSource, Span, Text}; +use num_bigint::BigInt; +use std::str::FromStr; + +#[derive(Debug, Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Hash)] +pub enum RawNumber { + Int(Span), + Decimal(Span), +} + +impl HasSpan for RawNumber { + fn span(&self) -> Span { + match self { + RawNumber::Int(span) => *span, + RawNumber::Decimal(span) => *span, + } + } +} + +impl PrettyDebugWithSource for RawNumber { + fn pretty_debug(&self, source: &str) -> DebugDocBuilder { + match self { + RawNumber::Int(span) => b::primitive(span.slice(source)), + RawNumber::Decimal(span) => b::primitive(span.slice(source)), + } + } +} + +impl RawNumber { + pub fn as_flat_shape(&self) -> FlatShape { + match self { + RawNumber::Int(_) => FlatShape::Int, + RawNumber::Decimal(_) => FlatShape::Decimal, + } + } + + pub fn int(span: impl Into) -> RawNumber { + let span = span.into(); + + RawNumber::Int(span) + } + + pub fn decimal(span: impl Into) -> RawNumber { + let span = span.into(); + + RawNumber::Decimal(span) + } + + pub(crate) fn to_number(self, source: &Text) -> Number { + match self { + RawNumber::Int(tag) => { + if let Ok(big_int) = BigInt::from_str(tag.slice(source)) { + Number::Int(big_int) + } else { + unreachable!("Internal error: could not parse text as BigInt as expected") + } + } + RawNumber::Decimal(tag) => { + if let Ok(big_decimal) = BigDecimal::from_str(tag.slice(source)) { + Number::Decimal(big_decimal) + } else { + unreachable!("Internal error: could not parse text as BigDecimal as expected") + } + } + } + } +} diff --git a/crates/nu-parser/src/parse/parser.rs b/crates/nu-parser/src/parse/parser.rs index f67d354fd7..c2445ca45a 100644 --- a/crates/nu-parser/src/parse/parser.rs +++ b/crates/nu-parser/src/parse/parser.rs @@ -1,8 +1,8 @@ #![allow(unused)] use crate::parse::{ - call_node::*, flag::*, operator::*, pipeline::*, token_tree::*, token_tree_builder::*, - tokens::*, unit::*, + call_node::*, flag::*, number::*, operator::*, pipeline::*, token_tree::*, + token_tree_builder::*, unit::*, }; use nom; use nom::branch::*; @@ -36,7 +36,7 @@ use std::str::FromStr; macro_rules! cmp_operator { ($name:tt : $token:tt ) => { #[tracable_parser] - pub fn $name(input: NomSpan) -> IResult { + pub fn $name(input: NomSpan) -> IResult { let start = input.offset; let (input, tag) = tag($token)(input)?; let end = input.offset; @@ -52,7 +52,7 @@ macro_rules! cmp_operator { macro_rules! eval_operator { ($name:tt : $token:tt ) => { #[tracable_parser] - pub fn $name(input: NomSpan) -> IResult { + pub fn $name(input: NomSpan) -> IResult { let start = input.offset; let (input, tag) = tag($token)(input)?; let end = input.offset; @@ -209,7 +209,7 @@ impl Into for BigInt { } #[tracable_parser] -pub fn number(input: NomSpan) -> IResult { +pub fn number(input: NomSpan) -> IResult { let (input, number) = raw_number(input)?; Ok(( @@ -218,12 +218,36 @@ pub fn number(input: NomSpan) -> IResult { )) } +#[tracable_parser] +pub fn int_member(input: NomSpan) -> IResult { + let start = input.offset; + let (input, head) = digit1(input)?; + + match input.fragment.chars().next() { + None | Some('.') => Ok(( + input, + Token::Number(RawNumber::int((start, input.offset))) + .into_spanned((start, input.offset)), + )), + other if is_boundary(other) => Ok(( + input, + Token::Number(RawNumber::int((start, input.offset))) + .into_spanned((start, input.offset)), + )), + _ => Err(nom::Err::Error(nom::error::make_error( + input, + nom::error::ErrorKind::Tag, + ))), + } +} + #[tracable_parser] pub fn raw_number(input: NomSpan) -> IResult { let anchoral = input; let start = input.offset; let (input, neg) = opt(tag("-"))(input)?; let (input, head) = digit1(input)?; + let after_int_head = input; match input.fragment.chars().next() { None => return Ok((input, RawNumber::int(Span::new(start, input.offset)))), @@ -255,7 +279,17 @@ pub fn raw_number(input: NomSpan) -> IResult { Err(_) => return Ok((input, RawNumber::int(Span::new(start, input.offset)))), }; - let (input, tail) = digit1(input)?; + let tail_digits_result: IResult = digit1(input); + + let (input, tail) = match tail_digits_result { + Ok((input, tail)) => (input, tail), + Err(_) => { + return Ok(( + after_int_head, + RawNumber::int((start, after_int_head.offset)), + )) + } + }; let end = input.offset; @@ -272,14 +306,14 @@ pub fn raw_number(input: NomSpan) -> IResult { } #[tracable_parser] -pub fn operator(input: NomSpan) -> IResult { +pub fn operator(input: NomSpan) -> IResult { let (input, operator) = alt((gte, lte, neq, gt, lt, eq, cont, ncont))(input)?; Ok((input, operator)) } #[tracable_parser] -pub fn dq_string(input: NomSpan) -> IResult { +pub fn dq_string(input: NomSpan) -> IResult { let start = input.offset; let (input, _) = char('"')(input)?; let start1 = input.offset; @@ -294,7 +328,7 @@ pub fn dq_string(input: NomSpan) -> IResult { } #[tracable_parser] -pub fn sq_string(input: NomSpan) -> IResult { +pub fn sq_string(input: NomSpan) -> IResult { let start = input.offset; let (input, _) = char('\'')(input)?; let start1 = input.offset; @@ -310,12 +344,12 @@ pub fn sq_string(input: NomSpan) -> IResult { } #[tracable_parser] -pub fn string(input: NomSpan) -> IResult { +pub fn string(input: NomSpan) -> IResult { alt((sq_string, dq_string))(input) } #[tracable_parser] -pub fn external(input: NomSpan) -> IResult { +pub fn external(input: NomSpan) -> IResult { let start = input.offset; let (input, _) = tag("^")(input)?; let (input, bare) = take_while(is_file_char)(input)?; @@ -373,7 +407,7 @@ pub fn matches(cond: fn(char) -> bool) -> impl Fn(NomSpan) -> IResult IResult { +pub fn pattern(input: NomSpan) -> IResult { word( start_pattern, matches(is_glob_char), @@ -387,7 +421,7 @@ pub fn start_pattern(input: NomSpan) -> IResult { } #[tracable_parser] -pub fn filename(input: NomSpan) -> IResult { +pub fn filename(input: NomSpan) -> IResult { let start_pos = input.offset; let (mut input, mut saw_special) = match start_file_char(input) { @@ -495,7 +529,7 @@ pub fn start_filename(input: NomSpan) -> IResult { } #[tracable_parser] -pub fn member(input: NomSpan) -> IResult { +pub fn bare_member(input: NomSpan) -> IResult { word( matches(is_start_member_char), matches(is_member_char), @@ -503,13 +537,22 @@ pub fn member(input: NomSpan) -> IResult { )(input) } +#[tracable_parser] +pub fn garbage_member(input: NomSpan) -> IResult { + word( + matches(is_garbage_member_char), + matches(is_garbage_member_char), + TokenTreeBuilder::spanned_garbage, + )(input) +} + #[tracable_parser] pub fn ident(input: NomSpan) -> IResult { word(matches(is_id_start), matches(is_id_continue), Tag::from)(input) } #[tracable_parser] -pub fn external_word(input: NomSpan) -> IResult { +pub fn external_word(input: NomSpan) -> IResult { let start = input.offset; let (input, _) = take_while1(is_external_word_char)(input)?; let end = input.offset; @@ -517,22 +560,48 @@ pub fn external_word(input: NomSpan) -> IResult { Ok((input, TokenTreeBuilder::spanned_external_word((start, end)))) } +enum OneOf { + First(T), + Second(U), +} + +trait SubParser<'a, T>: Sized + Fn(NomSpan<'a>) -> IResult, T> {} + +impl<'a, T, U> SubParser<'a, U> for T where T: Fn(NomSpan<'a>) -> IResult, U> {} + +fn one_of<'a, T, U>( + first: impl SubParser<'a, T>, + second: impl SubParser<'a, U>, +) -> impl SubParser<'a, OneOf> { + move |input: NomSpan<'a>| -> IResult> { + let first_result = first(input); + + match first_result { + Ok((input, val)) => Ok((input, OneOf::First(val))), + Err(_) => { + let (input, val) = second(input)?; + Ok((input, OneOf::Second(val))) + } + } + } +} + #[tracable_parser] -pub fn var(input: NomSpan) -> IResult { +pub fn var(input: NomSpan) -> IResult { let start = input.offset; let (input, _) = tag("$")(input)?; - let (input, bare) = ident(input)?; + let (input, name) = one_of(tag("it"), ident)(input)?; let end = input.offset; - Ok(( - input, - TokenTreeBuilder::spanned_var(bare, Span::new(start, end)), - )) + match name { + OneOf::First(it) => Ok((input, TokenTreeBuilder::spanned_it_var(it, (start, end)))), + OneOf::Second(name) => Ok((input, TokenTreeBuilder::spanned_var(name, (start, end)))), + } } fn tight<'a>( - parser: impl Fn(NomSpan<'a>) -> IResult, Vec>, -) -> impl Fn(NomSpan<'a>) -> IResult, Vec> { + parser: impl Fn(NomSpan<'a>) -> IResult, Vec>, +) -> impl Fn(NomSpan<'a>) -> IResult, Vec> { move |input: NomSpan| { let mut result = vec![]; let (input, head) = parser(input)?; @@ -560,7 +629,7 @@ fn tight<'a>( } #[tracable_parser] -pub fn flag(input: NomSpan) -> IResult { +pub fn flag(input: NomSpan) -> IResult { let start = input.offset; let (input, _) = tag("--")(input)?; let (input, bare) = filename(input)?; @@ -573,7 +642,7 @@ pub fn flag(input: NomSpan) -> IResult { } #[tracable_parser] -pub fn shorthand(input: NomSpan) -> IResult { +pub fn shorthand(input: NomSpan) -> IResult { let start = input.offset; let (input, _) = tag("-")(input)?; let (input, bare) = filename(input)?; @@ -586,14 +655,14 @@ pub fn shorthand(input: NomSpan) -> IResult { } #[tracable_parser] -pub fn leaf(input: NomSpan) -> IResult { +pub fn leaf(input: NomSpan) -> IResult { let (input, node) = alt((number, string, operator, flag, shorthand, var, external))(input)?; Ok((input, node)) } #[tracable_parser] -pub fn token_list(input: NomSpan) -> IResult>> { +pub fn token_list(input: NomSpan) -> IResult>> { let start = input.offset; let mut node_list = vec![]; @@ -658,7 +727,7 @@ pub fn token_list(input: NomSpan) -> IResult>> { } #[tracable_parser] -pub fn spaced_token_list(input: NomSpan) -> IResult>> { +pub fn spaced_token_list(input: NomSpan) -> IResult>> { let start = input.offset; let (input, pre_ws) = opt(any_space)(input)?; let (input, items) = token_list(input)?; @@ -679,10 +748,10 @@ pub fn spaced_token_list(input: NomSpan) -> IResult, - list: Vec<(Vec, Vec)>, - sp_right: Option, -) -> Vec { + first: Vec, + list: Vec<(Vec, Vec)>, + sp_right: Option, +) -> Vec { let mut nodes = vec![]; nodes.extend(first); @@ -700,7 +769,7 @@ fn make_token_list( } #[tracable_parser] -pub fn separator(input: NomSpan) -> IResult { +pub fn separator(input: NomSpan) -> IResult { let left = input.offset; let (input, ws1) = alt((tag(";"), tag("\n")))(input)?; let right = input.offset; @@ -709,7 +778,7 @@ pub fn separator(input: NomSpan) -> IResult { } #[tracable_parser] -pub fn whitespace(input: NomSpan) -> IResult { +pub fn whitespace(input: NomSpan) -> IResult { let left = input.offset; let (input, ws1) = space1(input)?; let right = input.offset; @@ -718,7 +787,7 @@ pub fn whitespace(input: NomSpan) -> IResult { } #[tracable_parser] -pub fn any_space(input: NomSpan) -> IResult> { +pub fn any_space(input: NomSpan) -> IResult> { let left = input.offset; let (input, tokens) = many1(alt((whitespace, separator, comment)))(input)?; let right = input.offset; @@ -727,7 +796,7 @@ pub fn any_space(input: NomSpan) -> IResult> { } #[tracable_parser] -pub fn comment(input: NomSpan) -> IResult { +pub fn comment(input: NomSpan) -> IResult { let left = input.offset; let (input, start) = tag("#")(input)?; let (input, rest) = not_line_ending(input)?; @@ -744,7 +813,7 @@ pub fn comment(input: NomSpan) -> IResult { pub fn delimited( input: NomSpan, delimiter: Delimiter, -) -> IResult>)> { +) -> IResult>)> { let left = input.offset; let (input, open_span) = tag(delimiter.open())(input)?; let (input, inner_items) = opt(spaced_token_list)(input)?; @@ -768,7 +837,7 @@ pub fn delimited( } #[tracable_parser] -pub fn delimited_paren(input: NomSpan) -> IResult { +pub fn delimited_paren(input: NomSpan) -> IResult { let (input, (left, right, tokens)) = delimited(input, Delimiter::Paren)?; Ok(( @@ -778,7 +847,7 @@ pub fn delimited_paren(input: NomSpan) -> IResult { } #[tracable_parser] -pub fn delimited_square(input: NomSpan) -> IResult { +pub fn delimited_square(input: NomSpan) -> IResult { let (input, (left, right, tokens)) = delimited(input, Delimiter::Square)?; Ok(( @@ -788,7 +857,7 @@ pub fn delimited_square(input: NomSpan) -> IResult { } #[tracable_parser] -pub fn delimited_brace(input: NomSpan) -> IResult { +pub fn delimited_brace(input: NomSpan) -> IResult { let (input, (left, right, tokens)) = delimited(input, Delimiter::Brace)?; Ok(( @@ -810,7 +879,7 @@ pub fn raw_call(input: NomSpan) -> IResult> { } #[tracable_parser] -pub fn range_continuation(input: NomSpan) -> IResult> { +pub fn range_continuation(input: NomSpan) -> IResult> { let original = input; let mut result = vec![]; @@ -824,7 +893,7 @@ pub fn range_continuation(input: NomSpan) -> IResult> { } #[tracable_parser] -pub fn dot_member(input: NomSpan) -> IResult> { +pub fn dot_member(input: NomSpan) -> IResult> { let (input, dot_result) = dot(input)?; let (input, member_result) = any_member(input)?; @@ -832,12 +901,12 @@ pub fn dot_member(input: NomSpan) -> IResult> { } #[tracable_parser] -pub fn any_member(input: NomSpan) -> IResult { - alt((number, string, member))(input) +pub fn any_member(input: NomSpan) -> IResult { + alt((int_member, string, bare_member, garbage_member))(input) } #[tracable_parser] -pub fn tight_node(input: NomSpan) -> IResult> { +pub fn tight_node(input: NomSpan) -> IResult> { alt(( tight(to_list(leaf)), tight(to_list(filename)), @@ -851,8 +920,8 @@ pub fn tight_node(input: NomSpan) -> IResult> { } fn to_list( - parser: impl Fn(NomSpan) -> IResult, -) -> impl Fn(NomSpan) -> IResult> { + parser: impl Fn(NomSpan) -> IResult, +) -> impl Fn(NomSpan) -> IResult> { move |input| { let (input, next) = parser(input)?; @@ -861,17 +930,18 @@ fn to_list( } #[tracable_parser] -pub fn nodes(input: NomSpan) -> IResult { +pub fn nodes(input: NomSpan) -> IResult { let (input, tokens) = token_list(input)?; + let span = tokens.span; Ok(( input, - TokenTreeBuilder::spanned_token_list(tokens.item, tokens.span), + TokenTreeBuilder::spanned_pipeline(vec![PipelineElement::new(None, tokens)], span), )) } #[tracable_parser] -pub fn pipeline(input: NomSpan) -> IResult { +pub fn pipeline(input: NomSpan) -> IResult { let start = input.offset; let (input, head) = spaced_token_list(input)?; let (input, items) = many0(tuple((tag("|"), spaced_token_list)))(input)?; @@ -900,7 +970,7 @@ pub fn pipeline(input: NomSpan) -> IResult { } #[tracable_parser] -pub fn module(input: NomSpan) -> IResult { +pub fn module(input: NomSpan) -> IResult { let (input, tokens) = spaced_token_list(input)?; if input.input_len() != 0 { @@ -999,9 +1069,17 @@ fn is_file_char(c: char) -> bool { } } +fn is_garbage_member_char(c: char) -> bool { + match c { + c if c.is_whitespace() => false, + '.' => false, + _ => true, + } +} + fn is_start_member_char(c: char) -> bool { match c { - _ if c.is_alphanumeric() => true, + _ if c.is_alphabetic() => true, '_' => true, '-' => true, _ => false, @@ -1263,7 +1341,7 @@ mod tests { fn test_variable() { equal_tokens! { - "$it" -> b::token_list(vec![b::var("it")]) + "$it" -> b::token_list(vec![b::it_var()]) } equal_tokens! { @@ -1354,12 +1432,33 @@ mod tests { equal_tokens! { - "$it.print" -> b::token_list(vec![b::var("it"), b::dot(), b::bare("print")]) + "$it.print" -> b::token_list(vec![b::it_var(), b::dot(), b::bare("print")]) } equal_tokens! { - "$it.0" -> b::token_list(vec![b::var("it"), b::dot(), b::int(0)]) + r#"nu.0xATYKARNU.baz"# -> b::token_list(vec![ + b::bare("nu"), + b::dot(), + b::garbage("0xATYKARNU"), + b::dot(), + b::bare("baz") + ]) + } + + equal_tokens! { + + "1.b" -> b::token_list(vec![b::int(1), b::dot(), b::bare("b")]) + } + + equal_tokens! { + + "$it.0" -> b::token_list(vec![b::it_var(), b::dot(), b::int(0)]) + } + + equal_tokens! { + + "fortune_tellers.2.name" -> b::token_list(vec![b::bare("fortune_tellers"), b::dot(), b::int(2), b::dot(), b::bare("name")]) } equal_tokens! { @@ -1386,7 +1485,7 @@ mod tests { vec![ b::parens(vec![ b::sp(), - b::var("it"), + b::it_var(), b::dot(), b::bare("is"), b::dot(), @@ -1407,7 +1506,7 @@ mod tests { r#"$it."are PAS".0"# -> b::token_list( vec![ - b::var("it"), + b::it_var(), b::dot(), b::string("are PAS"), b::dot(), @@ -1445,7 +1544,7 @@ mod tests { fn test_smoke_single_command_it() { equal_tokens! { - "echo $it" -> b::token_list(vec![b::bare("echo"), b::sp(), b::var("it")]) + "echo $it" -> b::token_list(vec![b::bare("echo"), b::sp(), b::it_var()]) } } @@ -1533,6 +1632,17 @@ mod tests { ] ]) } + + equal_tokens! { + "^echo 1 | ^cat" -> b::pipeline(vec![ + vec![ + b::external_command("echo"), b::sp(), b::int(1), b::sp() + ], + vec![ + b::sp(), b::external_command("cat") + ] + ]) + } } #[test] @@ -1631,7 +1741,7 @@ mod tests { // b::bare("where"), // vec![ // b::sp(), - // b::var("it"), + // b::it_var(), // b::sp(), // b::op("!="), // b::sp(), @@ -1654,7 +1764,7 @@ mod tests { // vec![ // b::sp(), // b::braced(vec![ - // b::path(b::var("it"), vec![b::member("size")]), + // b::path(b::it_var(), vec![b::member("size")]), // b::sp(), // b::op(">"), // b::sp(), @@ -1669,10 +1779,13 @@ mod tests { // } fn apply( - f: impl Fn(NomSpan) -> Result<(NomSpan, TokenNode), nom::Err<(NomSpan, nom::error::ErrorKind)>>, + f: impl Fn( + NomSpan, + ) + -> Result<(NomSpan, SpannedToken), nom::Err<(NomSpan, nom::error::ErrorKind)>>, desc: &str, string: &str, - ) -> TokenNode { + ) -> SpannedToken { let result = f(nom_input(string)); match result { @@ -1693,20 +1806,15 @@ mod tests { fn delimited( delimiter: Spanned, - children: Vec, + children: Vec, left: usize, right: usize, - ) -> TokenNode { + ) -> SpannedToken { let start = Span::for_char(left); let end = Span::for_char(right); let node = DelimitedNode::new(delimiter.item, (start, end), children); - let spanned = node.spanned(Span::new(left, right)); - TokenNode::Delimited(spanned) - } - - fn token(token: UnspannedToken, left: usize, right: usize) -> TokenNode { - TokenNode::Token(token.into_token(Span::new(left, right))) + Token::Delimited(node).into_spanned((left, right)) } fn build(block: CurriedNode) -> T { @@ -1714,7 +1822,7 @@ mod tests { block(&mut builder) } - fn build_token(block: CurriedToken) -> TokenNode { + fn build_token(block: CurriedToken) -> SpannedToken { TokenTreeBuilder::build(block).0 } } diff --git a/crates/nu-parser/src/parse/pipeline.rs b/crates/nu-parser/src/parse/pipeline.rs index 9752ce6117..40e1d487f7 100644 --- a/crates/nu-parser/src/parse/pipeline.rs +++ b/crates/nu-parser/src/parse/pipeline.rs @@ -1,23 +1,32 @@ -use crate::TokenNode; +use crate::{SpannedToken, Token}; use derive_new::new; use getset::Getters; -use nu_source::{b, DebugDocBuilder, HasSpan, PrettyDebugWithSource, Span, Spanned}; +use nu_source::{ + b, DebugDocBuilder, HasSpan, IntoSpanned, PrettyDebugWithSource, Span, Spanned, SpannedItem, +}; #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Getters, new)] pub struct Pipeline { #[get = "pub"] pub(crate) parts: Vec, - pub(crate) span: Span, +} + +impl IntoSpanned for Pipeline { + type Output = Spanned; + + fn into_spanned(self, span: impl Into) -> Self::Output { + self.spanned(span.into()) + } } #[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Getters, new)] pub struct Tokens { - pub(crate) tokens: Vec, + pub(crate) tokens: Vec, pub(crate) span: Span, } impl Tokens { - pub fn iter(&self) -> impl Iterator { + pub fn iter(&self) -> impl Iterator { self.tokens.iter() } } @@ -38,7 +47,7 @@ impl HasSpan for PipelineElement { } impl PipelineElement { - pub fn new(pipe: Option, tokens: Spanned>) -> PipelineElement { + pub fn new(pipe: Option, tokens: Spanned>) -> PipelineElement { PipelineElement { pipe, tokens: Tokens { @@ -48,7 +57,7 @@ impl PipelineElement { } } - pub fn tokens(&self) -> &[TokenNode] { + pub fn tokens(&self) -> &[SpannedToken] { &self.tokens.tokens } } @@ -65,9 +74,9 @@ impl PrettyDebugWithSource for Pipeline { impl PrettyDebugWithSource for PipelineElement { fn pretty_debug(&self, source: &str) -> DebugDocBuilder { b::intersperse( - self.tokens.iter().map(|token| match token { - TokenNode::Whitespace(_) => b::blank(), - token => token.pretty_debug(source), + self.tokens.iter().map(|token| match token.unspanned() { + Token::Whitespace => b::blank(), + _ => token.pretty_debug(source), }), b::space(), ) diff --git a/crates/nu-parser/src/parse/token_tree.rs b/crates/nu-parser/src/parse/token_tree.rs index e917120ca1..7166629f87 100644 --- a/crates/nu-parser/src/parse/token_tree.rs +++ b/crates/nu-parser/src/parse/token_tree.rs @@ -1,162 +1,275 @@ -use crate::parse::{call_node::*, comment::*, flag::*, operator::*, pipeline::*, tokens::*}; +#![allow(clippy::type_complexity)] +use crate::parse::{call_node::*, comment::*, flag::*, number::*, operator::*, pipeline::*}; use derive_new::new; use getset::Getters; use nu_errors::{ParseError, ShellError}; -use nu_protocol::ShellTypeName; +use nu_protocol::{ShellTypeName, SpannedTypeName}; use nu_source::{ - b, DebugDocBuilder, HasSpan, PrettyDebugWithSource, Span, Spanned, SpannedItem, Tagged, - TaggedItem, Text, + b, DebugDocBuilder, HasSpan, PrettyDebugWithSource, Span, Spanned, SpannedItem, Text, }; -use std::fmt; +use std::borrow::Cow; +use std::ops::Deref; #[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd)] -pub enum TokenNode { - Token(Token), +pub enum Token { + Number(RawNumber), + CompareOperator(CompareOperator), + EvaluationOperator(EvaluationOperator), + String(Span), + Variable(Span), + ItVariable(Span), + ExternalCommand(Span), + ExternalWord, + GlobPattern, + Bare, + Garbage, - Call(Spanned), - Nodes(Spanned>), - Delimited(Spanned), + Call(CallNode), + Delimited(DelimitedNode), Pipeline(Pipeline), Flag(Flag), Comment(Comment), - Whitespace(Span), - Separator(Span), - - Error(Spanned), + Whitespace, + Separator, } -impl PrettyDebugWithSource for TokenNode { +macro_rules! token_type { + (struct $name:tt (desc: $desc:tt) -> $out:ty { |$span:ident, $pat:pat| => $do:expr }) => { + pub struct $name; + + impl TokenType for $name { + type Output = $out; + + fn desc(&self) -> Cow<'static, str> { + Cow::Borrowed($desc) + } + + fn extract_token_value( + &self, + token: &SpannedToken, + err: ParseErrorFn<$out>, + ) -> Result<$out, ParseError> { + let $span = token.span(); + + match *token.unspanned() { + $pat => Ok($do), + _ => err(), + } + } + } + }; + + (struct $name:tt (desc: $desc:tt) -> $out:ty { $pat:pat => $do:expr }) => { + pub struct $name; + + impl TokenType for $name { + type Output = $out; + + fn desc(&self) -> Cow<'static, str> { + Cow::Borrowed($desc) + } + + fn extract_token_value( + &self, + token: &SpannedToken, + err: ParseErrorFn<$out>, + ) -> Result<$out, ParseError> { + match token.unspanned().clone() { + $pat => Ok($do), + _ => err(), + } + } + } + }; +} + +pub type ParseErrorFn<'a, T> = &'a dyn Fn() -> Result; + +token_type!(struct IntType (desc: "integer") -> RawNumber { + Token::Number(number @ RawNumber::Int(_)) => number +}); + +token_type!(struct DecimalType (desc: "decimal") -> RawNumber { + Token::Number(number @ RawNumber::Decimal(_)) => number +}); + +token_type!(struct StringType (desc: "string") -> (Span, Span) { + |outer, Token::String(inner)| => (inner, outer) +}); + +token_type!(struct BareType (desc: "word") -> Span { + |span, Token::Bare| => span +}); + +token_type!(struct DotType (desc: "dot") -> Span { + |span, Token::EvaluationOperator(EvaluationOperator::Dot)| => span +}); + +token_type!(struct DotDotType (desc: "dotdot") -> Span { + |span, Token::EvaluationOperator(EvaluationOperator::DotDot)| => span +}); + +token_type!(struct CompareOperatorType (desc: "compare operator") -> (Span, CompareOperator) { + |span, Token::CompareOperator(operator)| => (span, operator) +}); + +token_type!(struct ExternalWordType (desc: "external word") -> Span { + |span, Token::ExternalWord| => span +}); + +token_type!(struct ExternalCommandType (desc: "external command") -> (Span, Span) { + |outer, Token::ExternalCommand(inner)| => (inner, outer) +}); + +token_type!(struct CommentType (desc: "comment") -> (Comment, Span) { + |outer, Token::Comment(comment)| => (comment, outer) +}); + +token_type!(struct SeparatorType (desc: "separator") -> Span { + |span, Token::Separator| => span +}); + +token_type!(struct WhitespaceType (desc: "whitespace") -> Span { + |span, Token::Whitespace| => span +}); + +token_type!(struct WordType (desc: "word") -> Span { + |span, Token::Bare| => span +}); + +token_type!(struct ItVarType (desc: "$it") -> (Span, Span) { + |outer, Token::ItVariable(inner)| => (inner, outer) +}); + +token_type!(struct VarType (desc: "variable") -> (Span, Span) { + |outer, Token::Variable(inner)| => (inner, outer) +}); + +token_type!(struct PipelineType (desc: "pipeline") -> Pipeline { + Token::Pipeline(pipeline) => pipeline +}); + +token_type!(struct BlockType (desc: "block") -> DelimitedNode { + Token::Delimited(block @ DelimitedNode { delimiter: Delimiter::Brace, .. }) => block +}); + +token_type!(struct SquareType (desc: "square") -> DelimitedNode { + Token::Delimited(square @ DelimitedNode { delimiter: Delimiter::Square, .. }) => square +}); + +pub trait TokenType { + type Output; + + fn desc(&self) -> Cow<'static, str>; + + fn extract_token_value( + &self, + token: &SpannedToken, + err: ParseErrorFn, + ) -> Result; +} + +impl Token { + pub fn into_spanned(self, span: impl Into) -> SpannedToken { + SpannedToken { + unspanned: self, + span: span.into(), + } + } +} + +#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Getters)] +pub struct SpannedToken { + #[get = "pub"] + unspanned: Token, + span: Span, +} + +impl Deref for SpannedToken { + type Target = Token; + fn deref(&self) -> &Self::Target { + &self.unspanned + } +} + +impl HasSpan for SpannedToken { + fn span(&self) -> Span { + self.span + } +} + +impl ShellTypeName for SpannedToken { + fn type_name(&self) -> &'static str { + self.unspanned.type_name() + } +} + +impl PrettyDebugWithSource for SpannedToken { fn pretty_debug(&self, source: &str) -> DebugDocBuilder { - match self { - TokenNode::Token(token) => token.pretty_debug(source), - TokenNode::Call(call) => call.pretty_debug(source), - TokenNode::Nodes(nodes) => b::intersperse( - nodes.iter().map(|node| node.pretty_debug(source)), - b::space(), - ), - TokenNode::Delimited(delimited) => delimited.pretty_debug(source), - TokenNode::Pipeline(pipeline) => pipeline.pretty_debug(source), - TokenNode::Flag(flag) => flag.pretty_debug(source), - TokenNode::Whitespace(space) => b::typed( + match self.unspanned() { + Token::Number(number) => number.pretty_debug(source), + Token::CompareOperator(operator) => operator.pretty_debug(source), + Token::EvaluationOperator(operator) => operator.pretty_debug(source), + Token::String(_) | Token::GlobPattern | Token::Bare => { + b::primitive(self.span.slice(source)) + } + Token::Variable(_) => b::var(self.span.slice(source)), + Token::ItVariable(_) => b::keyword(self.span.slice(source)), + Token::ExternalCommand(_) => b::description(self.span.slice(source)), + Token::ExternalWord => b::description(self.span.slice(source)), + Token::Call(call) => call.pretty_debug(source), + Token::Delimited(delimited) => delimited.pretty_debug(source), + Token::Pipeline(pipeline) => pipeline.pretty_debug(source), + Token::Flag(flag) => flag.pretty_debug(source), + Token::Garbage => b::error(self.span.slice(source)), + Token::Whitespace => b::typed( "whitespace", - b::description(format!("{:?}", space.slice(source))), + b::description(format!("{:?}", self.span.slice(source))), ), - TokenNode::Separator(span) => b::typed( + Token::Separator => b::typed( "separator", - b::description(format!("{:?}", span.slice(source))), + b::description(format!("{:?}", self.span.slice(source))), ), - TokenNode::Comment(comment) => { + Token::Comment(comment) => { b::typed("comment", b::description(comment.text.slice(source))) } - TokenNode::Error(_) => b::error("error"), } } } -impl ShellTypeName for TokenNode { +impl ShellTypeName for Token { fn type_name(&self) -> &'static str { match self { - TokenNode::Token(t) => t.type_name(), - TokenNode::Nodes(_) => "nodes", - TokenNode::Call(_) => "command", - TokenNode::Delimited(d) => d.type_name(), - TokenNode::Pipeline(_) => "pipeline", - TokenNode::Flag(_) => "flag", - TokenNode::Whitespace(_) => "whitespace", - TokenNode::Separator(_) => "separator", - TokenNode::Comment(_) => "comment", - TokenNode::Error(_) => "error", + Token::Number(_) => "number", + Token::CompareOperator(_) => "comparison operator", + Token::EvaluationOperator(EvaluationOperator::Dot) => "dot", + Token::EvaluationOperator(EvaluationOperator::DotDot) => "dot dot", + Token::String(_) => "string", + Token::Variable(_) => "variable", + Token::ItVariable(_) => "it variable", + Token::ExternalCommand(_) => "external command", + Token::ExternalWord => "external word", + Token::GlobPattern => "glob pattern", + Token::Bare => "word", + Token::Call(_) => "command", + Token::Delimited(d) => d.type_name(), + Token::Pipeline(_) => "pipeline", + Token::Flag(_) => "flag", + Token::Garbage => "garbage", + Token::Whitespace => "whitespace", + Token::Separator => "separator", + Token::Comment(_) => "comment", } } } -pub struct DebugTokenNode<'a> { - node: &'a TokenNode, - source: &'a Text, -} - -impl fmt::Debug for DebugTokenNode<'_> { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match self.node { - TokenNode::Token(t) => write!(f, "{:?}", t.debug(self.source)), - TokenNode::Call(s) => { - write!(f, "(")?; - - write!(f, "{}", s.head().debug(self.source))?; - - if let Some(children) = s.children() { - for child in children { - write!(f, "{}", child.debug(self.source))?; - } - } - - write!(f, ")") - } - - TokenNode::Delimited(d) => { - write!( - f, - "{}", - match d.delimiter { - Delimiter::Brace => "{", - Delimiter::Paren => "(", - Delimiter::Square => "[", - } - )?; - - for child in d.children() { - write!(f, "{:?}", child.old_debug(self.source))?; - } - - write!( - f, - "{}", - match d.delimiter { - Delimiter::Brace => "}", - Delimiter::Paren => ")", - Delimiter::Square => "]", - } - ) - } - TokenNode::Pipeline(pipeline) => write!(f, "{}", pipeline.debug(self.source)), - TokenNode::Error(_) => write!(f, ""), - rest => write!(f, "{}", rest.span().slice(self.source)), - } +impl From<&SpannedToken> for Span { + fn from(token: &SpannedToken) -> Span { + token.span } } -impl From<&TokenNode> for Span { - fn from(token: &TokenNode) -> Span { - token.span() - } -} - -impl HasSpan for TokenNode { - fn span(&self) -> Span { - match self { - TokenNode::Token(t) => t.span, - TokenNode::Nodes(t) => t.span, - TokenNode::Call(s) => s.span, - TokenNode::Delimited(s) => s.span, - TokenNode::Pipeline(s) => s.span, - TokenNode::Flag(s) => s.span, - TokenNode::Whitespace(s) => *s, - TokenNode::Separator(s) => *s, - TokenNode::Comment(c) => c.span(), - TokenNode::Error(s) => s.span, - } - } -} - -impl TokenNode { - pub fn tagged_type_name(&self) -> Tagged<&'static str> { - self.type_name().tagged(self.span()) - } - - pub fn old_debug<'a>(&'a self, source: &'a Text) -> DebugTokenNode<'a> { - DebugTokenNode { node: self, source } - } - +impl SpannedToken { pub fn as_external_arg(&self, source: &Text) -> String { self.span().slice(source).to_string() } @@ -166,145 +279,105 @@ impl TokenNode { } pub fn get_variable(&self) -> Result<(Span, Span), ShellError> { - match self { - TokenNode::Token(Token { - unspanned: UnspannedToken::Variable(inner_span), - span: outer_span, - }) => Ok((*outer_span, *inner_span)), - _ => Err(ShellError::type_error( - "variable", - self.type_name().spanned(self.span()), - )), + match self.unspanned() { + Token::Variable(inner_span) => Ok((self.span(), *inner_span)), + _ => Err(ShellError::type_error("variable", self.spanned_type_name())), } } pub fn is_bare(&self) -> bool { - match self { - TokenNode::Token(Token { - unspanned: UnspannedToken::Bare, - .. - }) => true, + match self.unspanned() { + Token::Bare => true, _ => false, } } pub fn is_string(&self) -> bool { - match self { - TokenNode::Token(Token { - unspanned: UnspannedToken::String(_), - .. - }) => true, + match self.unspanned() { + Token::String(_) => true, _ => false, } } pub fn is_number(&self) -> bool { - match self { - TokenNode::Token(Token { - unspanned: UnspannedToken::Number(_), - .. - }) => true, + match self.unspanned() { + Token::Number(_) => true, _ => false, } } pub fn as_string(&self) -> Option<(Span, Span)> { - match self { - TokenNode::Token(Token { - unspanned: UnspannedToken::String(inner_span), - span: outer_span, - }) => Some((*outer_span, *inner_span)), + match self.unspanned() { + Token::String(inner_span) => Some((self.span(), *inner_span)), _ => None, } } pub fn is_pattern(&self) -> bool { - match self { - TokenNode::Token(Token { - unspanned: UnspannedToken::GlobPattern, - .. - }) => true, + match self.unspanned() { + Token::GlobPattern => true, _ => false, } } pub fn is_word(&self) -> bool { - match self { - TokenNode::Token(Token { - unspanned: UnspannedToken::Bare, - .. - }) => true, + match self.unspanned() { + Token::Bare => true, _ => false, } } pub fn is_int(&self) -> bool { - match self { - TokenNode::Token(Token { - unspanned: UnspannedToken::Number(RawNumber::Int(_)), - .. - }) => true, + match self.unspanned() { + Token::Number(RawNumber::Int(_)) => true, _ => false, } } pub fn is_dot(&self) -> bool { - match self { - TokenNode::Token(Token { - unspanned: UnspannedToken::EvaluationOperator(EvaluationOperator::Dot), - .. - }) => true, + match self.unspanned() { + Token::EvaluationOperator(EvaluationOperator::Dot) => true, _ => false, } } - pub fn as_block(&self) -> Option<(Spanned<&[TokenNode]>, (Span, Span))> { - match self { - TokenNode::Delimited(Spanned { - item: - DelimitedNode { - delimiter, - children, - spans, - }, - span, - }) if *delimiter == Delimiter::Brace => Some(((&children[..]).spanned(*span), *spans)), - _ => None, - } - } - - pub fn is_external(&self) -> bool { - match self { - TokenNode::Token(Token { - unspanned: UnspannedToken::ExternalCommand(..), - .. - }) => true, - _ => false, - } - } - - pub(crate) fn as_flag(&self, value: &str, source: &Text) -> Option { - match self { - TokenNode::Flag(flag @ Flag { .. }) if value == flag.name().slice(source) => { - Some(*flag) + pub fn as_block(&self) -> Option<(Spanned<&[SpannedToken]>, (Span, Span))> { + match self.unspanned() { + Token::Delimited(DelimitedNode { + delimiter, + children, + spans, + }) if *delimiter == Delimiter::Brace => { + Some(((&children[..]).spanned(self.span()), *spans)) } _ => None, } } + pub fn is_external(&self) -> bool { + match self.unspanned() { + Token::ExternalCommand(..) => true, + _ => false, + } + } + + pub(crate) fn as_flag(&self, value: &str, source: &Text) -> Option { + match self.unspanned() { + Token::Flag(flag @ Flag { .. }) if value == flag.name().slice(source) => Some(*flag), + _ => None, + } + } + pub fn as_pipeline(&self) -> Result { - match self { - TokenNode::Pipeline(pipeline) => Ok(pipeline.clone()), - other => Err(ParseError::mismatch( - "pipeline", - other.type_name().spanned(other.span()), - )), + match self.unspanned() { + Token::Pipeline(pipeline) => Ok(pipeline.clone()), + _ => Err(ParseError::mismatch("pipeline", self.spanned_type_name())), } } pub fn is_whitespace(&self) -> bool { - match self { - TokenNode::Whitespace(_) => true, + match self.unspanned() { + Token::Whitespace => true, _ => false, } } @@ -315,7 +388,13 @@ impl TokenNode { pub struct DelimitedNode { pub(crate) delimiter: Delimiter, pub(crate) spans: (Span, Span), - pub(crate) children: Vec, + pub(crate) children: Vec, +} + +impl HasSpan for DelimitedNode { + fn span(&self) -> Span { + self.spans.0.until(self.spans.1) + } } impl PrettyDebugWithSource for DelimitedNode { @@ -369,79 +448,68 @@ impl Delimiter { #[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Getters, new)] #[get = "pub(crate)"] pub struct PathNode { - head: Box, - tail: Vec, + head: Box, + tail: Vec, } #[cfg(test)] -impl TokenNode { +impl SpannedToken { pub fn expect_external(&self) -> Span { - match self { - TokenNode::Token(Token { - unspanned: UnspannedToken::ExternalCommand(span), - .. - }) => *span, - other => panic!( + match self.unspanned() { + Token::ExternalCommand(span) => *span, + _ => panic!( "Only call expect_external if you checked is_external first, found {:?}", - other + self ), } } pub fn expect_string(&self) -> (Span, Span) { - match self { - TokenNode::Token(Token { - unspanned: UnspannedToken::String(inner_span), - span: outer_span, - }) => (*outer_span, *inner_span), + match self.unspanned() { + Token::String(inner_span) => (self.span(), *inner_span), other => panic!("Expected string, found {:?}", other), } } - pub fn expect_list(&self) -> Spanned<&[TokenNode]> { - match self { - TokenNode::Nodes(token_nodes) => token_nodes[..].spanned(token_nodes.span), - other => panic!("Expected list, found {:?}", other), + pub fn expect_list(&self) -> Spanned> { + match self.unspanned() { + Token::Pipeline(pipeline) => pipeline + .parts() + .iter() + .flat_map(|part| part.tokens()) + .cloned() + .collect::>() + .spanned(self.span()), + _ => panic!("Expected list, found {:?}", self), } } pub fn expect_pattern(&self) -> Span { - match self { - TokenNode::Token(Token { - unspanned: UnspannedToken::GlobPattern, - span: outer_span, - }) => *outer_span, - other => panic!("Expected pattern, found {:?}", other), + match self.unspanned() { + Token::GlobPattern => self.span(), + _ => panic!("Expected pattern, found {:?}", self), } } pub fn expect_var(&self) -> (Span, Span) { - match self { - TokenNode::Token(Token { - unspanned: UnspannedToken::Variable(inner_span), - span: outer_span, - }) => (*outer_span, *inner_span), + match self.unspanned() { + Token::Variable(inner_span) => (self.span(), *inner_span), + Token::ItVariable(inner_span) => (self.span(), *inner_span), other => panic!("Expected var, found {:?}", other), } } pub fn expect_dot(&self) -> Span { - match self { - TokenNode::Token(Token { - unspanned: UnspannedToken::EvaluationOperator(EvaluationOperator::Dot), - span, - }) => *span, + match self.unspanned() { + Token::EvaluationOperator(EvaluationOperator::Dot) => self.span(), other => panic!("Expected dot, found {:?}", other), } } pub fn expect_bare(&self) -> Span { - match self { - TokenNode::Token(Token { - unspanned: UnspannedToken::Bare, - span, - }) => *span, - other => panic!("Expected bare, found {:?}", other), + match self.unspanned() { + Token::Bare => self.span(), + _ => panic!("Expected bare, found {:?}", self), } } } diff --git a/crates/nu-parser/src/parse/token_tree_builder.rs b/crates/nu-parser/src/parse/token_tree_builder.rs index 131dbd5c81..3634c2a603 100644 --- a/crates/nu-parser/src/parse/token_tree_builder.rs +++ b/crates/nu-parser/src/parse/token_tree_builder.rs @@ -1,10 +1,10 @@ use crate::parse::call_node::CallNode; use crate::parse::comment::Comment; use crate::parse::flag::{Flag, FlagKind}; +use crate::parse::number::RawNumber; use crate::parse::operator::{CompareOperator, EvaluationOperator}; use crate::parse::pipeline::{Pipeline, PipelineElement}; -use crate::parse::token_tree::{DelimitedNode, Delimiter, TokenNode}; -use crate::parse::tokens::{RawNumber, UnspannedToken}; +use crate::parse::token_tree::{DelimitedNode, Delimiter, SpannedToken, Token}; use bigdecimal::BigDecimal; use nu_source::{Span, Spanned, SpannedItem}; use num_bigint::BigInt; @@ -21,11 +21,11 @@ impl TokenTreeBuilder { } } -pub type CurriedToken = Box TokenNode + 'static>; +pub type CurriedToken = Box SpannedToken + 'static>; pub type CurriedCall = Box Spanned + 'static>; impl TokenTreeBuilder { - pub fn build(block: impl FnOnce(&mut Self) -> TokenNode) -> (TokenNode, String) { + pub fn build(block: impl FnOnce(&mut Self) -> SpannedToken) -> (SpannedToken, String) { let mut builder = TokenTreeBuilder::new(); let node = block(&mut builder); (node, builder.output) @@ -77,8 +77,8 @@ impl TokenTreeBuilder { }) } - pub fn spanned_pipeline(input: Vec, span: impl Into) -> TokenNode { - TokenNode::Pipeline(Pipeline::new(input, span.into())) + pub fn spanned_pipeline(input: Vec, span: impl Into) -> SpannedToken { + Token::Pipeline(Pipeline::new(input)).into_spanned(span) } pub fn token_list(input: Vec) -> CurriedToken { @@ -91,8 +91,28 @@ impl TokenTreeBuilder { }) } - pub fn spanned_token_list(input: Vec, span: impl Into) -> TokenNode { - TokenNode::Nodes(input.spanned(span.into())) + pub fn spanned_token_list(input: Vec, span: impl Into) -> SpannedToken { + let span = span.into(); + Token::Pipeline(Pipeline::new(vec![PipelineElement::new( + None, + input.spanned(span), + )])) + .into_spanned(span) + } + + pub fn garbage(input: impl Into) -> CurriedToken { + let input = input.into(); + + Box::new(move |b| { + let (start, end) = b.consume(&input); + b.pos = end; + + TokenTreeBuilder::spanned_garbage(Span::new(start, end)) + }) + } + + pub fn spanned_garbage(span: impl Into) -> SpannedToken { + Token::Garbage.into_spanned(span) } pub fn op(input: impl Into) -> CurriedToken { @@ -107,8 +127,11 @@ impl TokenTreeBuilder { }) } - pub fn spanned_cmp_op(input: impl Into, span: impl Into) -> TokenNode { - TokenNode::Token(UnspannedToken::CompareOperator(input.into()).into_token(span)) + pub fn spanned_cmp_op( + input: impl Into, + span: impl Into, + ) -> SpannedToken { + Token::CompareOperator(input.into()).into_spanned(span) } pub fn dot() -> CurriedToken { @@ -134,8 +157,8 @@ impl TokenTreeBuilder { pub fn spanned_eval_op( input: impl Into, span: impl Into, - ) -> TokenNode { - TokenNode::Token(UnspannedToken::EvaluationOperator(input.into()).into_token(span)) + ) -> SpannedToken { + Token::EvaluationOperator(input.into()).into_spanned(span) } pub fn string(input: impl Into) -> CurriedToken { @@ -154,8 +177,8 @@ impl TokenTreeBuilder { }) } - pub fn spanned_string(input: impl Into, span: impl Into) -> TokenNode { - TokenNode::Token(UnspannedToken::String(input.into()).into_token(span)) + pub fn spanned_string(input: impl Into, span: impl Into) -> SpannedToken { + Token::String(input.into()).into_spanned(span) } pub fn bare(input: impl Into) -> CurriedToken { @@ -169,8 +192,8 @@ impl TokenTreeBuilder { }) } - pub fn spanned_bare(span: impl Into) -> TokenNode { - TokenNode::Token(UnspannedToken::Bare.into_token(span)) + pub fn spanned_bare(span: impl Into) -> SpannedToken { + Token::Bare.into_spanned(span) } pub fn pattern(input: impl Into) -> CurriedToken { @@ -184,8 +207,8 @@ impl TokenTreeBuilder { }) } - pub fn spanned_pattern(input: impl Into) -> TokenNode { - TokenNode::Token(UnspannedToken::GlobPattern.into_token(input)) + pub fn spanned_pattern(input: impl Into) -> SpannedToken { + Token::GlobPattern.into_spanned(input) } pub fn external_word(input: impl Into) -> CurriedToken { @@ -199,8 +222,8 @@ impl TokenTreeBuilder { }) } - pub fn spanned_external_word(input: impl Into) -> TokenNode { - TokenNode::Token(UnspannedToken::ExternalWord.into_token(input)) + pub fn spanned_external_word(input: impl Into) -> SpannedToken { + Token::ExternalWord.into_spanned(input) } pub fn external_command(input: impl Into) -> CurriedToken { @@ -218,8 +241,11 @@ impl TokenTreeBuilder { }) } - pub fn spanned_external_command(inner: impl Into, outer: impl Into) -> TokenNode { - TokenNode::Token(UnspannedToken::ExternalCommand(inner.into()).into_token(outer)) + pub fn spanned_external_command( + inner: impl Into, + outer: impl Into, + ) -> SpannedToken { + Token::ExternalCommand(inner.into()).into_spanned(outer) } pub fn int(input: impl Into) -> CurriedToken { @@ -250,8 +276,8 @@ impl TokenTreeBuilder { }) } - pub fn spanned_number(input: impl Into, span: impl Into) -> TokenNode { - TokenNode::Token(UnspannedToken::Number(input.into()).into_token(span)) + pub fn spanned_number(input: impl Into, span: impl Into) -> SpannedToken { + Token::Number(input.into()).into_spanned(span) } pub fn var(input: impl Into) -> CurriedToken { @@ -265,8 +291,21 @@ impl TokenTreeBuilder { }) } - pub fn spanned_var(input: impl Into, span: impl Into) -> TokenNode { - TokenNode::Token(UnspannedToken::Variable(input.into()).into_token(span)) + pub fn spanned_var(input: impl Into, span: impl Into) -> SpannedToken { + Token::Variable(input.into()).into_spanned(span) + } + + pub fn it_var() -> CurriedToken { + Box::new(move |b| { + let (start, _) = b.consume("$"); + let (inner_start, end) = b.consume("it"); + + TokenTreeBuilder::spanned_it_var(Span::new(inner_start, end), Span::new(start, end)) + }) + } + + pub fn spanned_it_var(input: impl Into, span: impl Into) -> SpannedToken { + Token::ItVariable(input.into()).into_spanned(span) } pub fn flag(input: impl Into) -> CurriedToken { @@ -280,8 +319,9 @@ impl TokenTreeBuilder { }) } - pub fn spanned_flag(input: impl Into, span: impl Into) -> TokenNode { - TokenNode::Flag(Flag::new(FlagKind::Longhand, input.into(), span.into())) + pub fn spanned_flag(input: impl Into, span: impl Into) -> SpannedToken { + let span = span.into(); + Token::Flag(Flag::new(FlagKind::Longhand, input.into())).into_spanned(span) } pub fn shorthand(input: impl Into) -> CurriedToken { @@ -295,8 +335,10 @@ impl TokenTreeBuilder { }) } - pub fn spanned_shorthand(input: impl Into, span: impl Into) -> TokenNode { - TokenNode::Flag(Flag::new(FlagKind::Shorthand, input.into(), span.into())) + pub fn spanned_shorthand(input: impl Into, span: impl Into) -> SpannedToken { + let span = span.into(); + + Token::Flag(Flag::new(FlagKind::Shorthand, input.into())).into_spanned(span) } pub fn call(head: CurriedToken, input: Vec) -> CurriedCall { @@ -316,7 +358,7 @@ impl TokenTreeBuilder { }) } - pub fn spanned_call(input: Vec, span: impl Into) -> Spanned { + pub fn spanned_call(input: Vec, span: impl Into) -> Spanned { if input.is_empty() { panic!("BUG: spanned call (TODO)") } @@ -337,7 +379,7 @@ impl TokenTreeBuilder { input: Vec, _open: &str, _close: &str, - ) -> (Span, Span, Span, Vec) { + ) -> (Span, Span, Span, Vec) { let (start_open_paren, end_open_paren) = self.consume("("); let mut output = vec![]; for item in input { @@ -362,13 +404,12 @@ impl TokenTreeBuilder { } pub fn spanned_parens( - input: impl Into>, + input: impl Into>, spans: (Span, Span), span: impl Into, - ) -> TokenNode { - TokenNode::Delimited( - DelimitedNode::new(Delimiter::Paren, spans, input.into()).spanned(span.into()), - ) + ) -> SpannedToken { + Token::Delimited(DelimitedNode::new(Delimiter::Paren, spans, input.into())) + .into_spanned(span.into()) } pub fn square(input: Vec) -> CurriedToken { @@ -380,13 +421,12 @@ impl TokenTreeBuilder { } pub fn spanned_square( - input: impl Into>, + input: impl Into>, spans: (Span, Span), span: impl Into, - ) -> TokenNode { - TokenNode::Delimited( - DelimitedNode::new(Delimiter::Square, spans, input.into()).spanned(span.into()), - ) + ) -> SpannedToken { + Token::Delimited(DelimitedNode::new(Delimiter::Square, spans, input.into())) + .into_spanned(span) } pub fn braced(input: Vec) -> CurriedToken { @@ -398,19 +438,18 @@ impl TokenTreeBuilder { } pub fn spanned_brace( - input: impl Into>, + input: impl Into>, spans: (Span, Span), span: impl Into, - ) -> TokenNode { - TokenNode::Delimited( - DelimitedNode::new(Delimiter::Brace, spans, input.into()).spanned(span.into()), - ) + ) -> SpannedToken { + Token::Delimited(DelimitedNode::new(Delimiter::Brace, spans, input.into())) + .into_spanned(span) } pub fn sp() -> CurriedToken { Box::new(|b| { let (start, end) = b.consume(" "); - TokenNode::Whitespace(Span::new(start, end)) + Token::Whitespace.into_spanned((start, end)) }) } @@ -423,8 +462,8 @@ impl TokenTreeBuilder { }) } - pub fn spanned_ws(span: impl Into) -> TokenNode { - TokenNode::Whitespace(span.into()) + pub fn spanned_ws(span: impl Into) -> SpannedToken { + Token::Whitespace.into_spanned(span) } pub fn sep(input: impl Into) -> CurriedToken { @@ -436,8 +475,8 @@ impl TokenTreeBuilder { }) } - pub fn spanned_sep(span: impl Into) -> TokenNode { - TokenNode::Separator(span.into()) + pub fn spanned_sep(span: impl Into) -> SpannedToken { + Token::Separator.into_spanned(span) } pub fn comment(input: impl Into) -> CurriedToken { @@ -453,8 +492,10 @@ impl TokenTreeBuilder { }) } - pub fn spanned_comment(input: impl Into, span: impl Into) -> TokenNode { - TokenNode::Comment(Comment::line(input, span)) + pub fn spanned_comment(input: impl Into, span: impl Into) -> SpannedToken { + let span = span.into(); + + Token::Comment(Comment::line(input)).into_spanned(span) } fn consume(&mut self, input: &str) -> (usize, usize) { diff --git a/crates/nu-parser/src/parse_command.rs b/crates/nu-parser/src/parse_command.rs index 0f13aea7a9..76159593ac 100644 --- a/crates/nu-parser/src/parse_command.rs +++ b/crates/nu-parser/src/parse_command.rs @@ -1,89 +1,109 @@ use crate::hir::syntax_shape::{ - color_fallible_syntax, color_syntax, expand_expr, flat_shape::FlatShape, spaced, - BackoffColoringMode, ColorSyntax, MaybeSpaceShape, + BackoffColoringMode, ExpandSyntax, MaybeSpaceShape, MaybeWhitespaceEof, }; +use crate::hir::SpannedExpression; use crate::TokensIterator; use crate::{ - hir::{self, ExpandContext, NamedArguments}, + hir::{self, NamedArguments}, Flag, }; use log::trace; -use nu_source::{PrettyDebugWithSource, Span, Spanned, SpannedItem, Text}; - use nu_errors::{ArgumentError, ParseError}; -use nu_protocol::{NamedType, PositionalType, Signature}; +use nu_protocol::{NamedType, PositionalType, Signature, SyntaxShape}; +use nu_source::{HasFallibleSpan, HasSpan, PrettyDebugWithSource, Span, Spanned, SpannedItem}; + +type OptionalHeadTail = (Option>, Option); pub fn parse_command_tail( config: &Signature, - context: &ExpandContext, tail: &mut TokensIterator, command_span: Span, -) -> Result>, Option)>, ParseError> { +) -> Result, ParseError> { let mut named = NamedArguments::new(); - trace_remaining("nodes", &tail, context.source()); + let mut found_error: Option = None; + let mut rest_signature = config.clone(); + + trace!(target: "nu::parse::trace_remaining", ""); + + trace_remaining("nodes", &tail); for (name, kind) in &config.named { - trace!(target: "nu::parse", "looking for {} : {:?}", name, kind); + trace!(target: "nu::parse::trace_remaining", "looking for {} : {:?}", name, kind); + + tail.move_to(0); match &kind.0 { NamedType::Switch => { - let flag = extract_switch(name, tail, context.source()); + let switch = extract_switch(name, tail); - named.insert_switch(name, flag); + match switch { + None => named.insert_switch(name, None), + Some((_, flag)) => { + named.insert_switch(name, Some(*flag)); + rest_signature.remove_named(name); + tail.color_shape(flag.color(flag.span)); + } + } } NamedType::Help => { - let flag = extract_switch(name, tail, context.source()); + let switch = extract_switch(name, tail); - named.insert_switch(name, flag); - if flag.is_some() { - return Ok(Some((None, Some(named)))); + match switch { + None => named.insert_switch(name, None), + Some((_, flag)) => { + named.insert_switch(name, Some(*flag)); + return Ok(Some((None, Some(named)))); + } } } NamedType::Mandatory(syntax_type) => { - match extract_mandatory(config, name, tail, context.source(), command_span) { - Err(err) => return Err(err), // produce a correct diagnostic + match extract_mandatory(config, name, tail, command_span) { + Err(err) => { + // remember this error, but continue coloring + found_error = Some(err); + } Ok((pos, flag)) => { - tail.move_to(pos); + let result = expand_flag(tail, *syntax_type, flag, pos); - if tail.at_end() { - return Err(ParseError::argument_error( - config.name.clone().spanned(flag.span), - ArgumentError::MissingValueForName(name.to_string()), - )); + match result { + Ok(expr) => { + named.insert_mandatory(name, expr); + rest_signature.remove_named(name); + } + Err(_) => { + found_error = Some(ParseError::argument_error( + config.name.clone().spanned(flag.span), + ArgumentError::MissingValueForName(name.to_string()), + )) + } } - - let expr = expand_expr(&spaced(*syntax_type), tail, context)?; - - tail.restart(); - named.insert_mandatory(name, expr); } } } NamedType::Optional(syntax_type) => { - match extract_optional(name, tail, context.source()) { - Err(err) => return Err(err), // produce a correct diagnostic + match extract_optional(name, tail) { + Err(err) => { + // remember this error, but continue coloring + found_error = Some(err); + } Ok(Some((pos, flag))) => { - tail.move_to(pos); + let result = expand_flag(tail, *syntax_type, flag, pos); - if tail.at_end() { - return Err(ParseError::argument_error( - config.name.clone().spanned(flag.span), - ArgumentError::MissingValueForName(name.to_string()), - )); + match result { + Ok(expr) => { + named.insert_optional(name, Some(expr)); + rest_signature.remove_named(name); + } + Err(_) => { + found_error = Some(ParseError::argument_error( + config.name.clone().spanned(flag.span), + ArgumentError::MissingValueForName(name.to_string()), + )) + } } - - let expr = expand_expr(&spaced(*syntax_type), tail, context); - - match expr { - Err(_) => named.insert_optional(name, None), - Ok(expr) => named.insert_optional(name, Some(expr)), - } - - tail.restart(); } Ok(None) => { - tail.restart(); named.insert_optional(name, None); } } @@ -91,56 +111,88 @@ pub fn parse_command_tail( }; } - trace_remaining("after named", &tail, context.source()); + trace_remaining("after named", &tail); let mut positional = vec![]; for arg in &config.positional { - trace!(target: "nu::parse", "Processing positional {:?}", arg); + trace!(target: "nu::parse::trace_remaining", "Processing positional {:?}", arg); - match &arg.0 { - PositionalType::Mandatory(..) => { - if tail.at_end_possible_ws() { - return Err(ParseError::argument_error( - config.name.clone().spanned(command_span), - ArgumentError::MissingMandatoryPositional(arg.0.name().to_string()), - )); - } - } + tail.move_to(0); + + let result = expand_spaced_expr(arg.0.syntax_type(), tail); + + match result { + Err(_) => match &arg.0 { + PositionalType::Mandatory(..) => { + if found_error.is_none() { + found_error = Some(ParseError::argument_error( + config.name.clone().spanned(command_span), + ArgumentError::MissingMandatoryPositional(arg.0.name().to_string()), + )); + } - PositionalType::Optional(..) => { - if tail.at_end_possible_ws() { break; } + + PositionalType::Optional(..) => { + if tail.expand_syntax(MaybeWhitespaceEof).is_ok() { + break; + } + } + }, + Ok(result) => { + rest_signature.shift_positional(); + positional.push(result); } } - - let result = expand_expr(&spaced(arg.0.syntax_type()), tail, context)?; - - positional.push(result); } - trace_remaining("after positional", &tail, context.source()); + trace_remaining("after positional", &tail); if let Some((syntax_type, _)) = config.rest_positional { let mut out = vec![]; loop { - if tail.at_end_possible_ws() { + if found_error.is_some() { break; } - let next = expand_expr(&spaced(syntax_type), tail, context)?; + tail.move_to(0); - out.push(next); + trace_remaining("start rest", &tail); + eat_any_whitespace(tail); + trace_remaining("after whitespace", &tail); + + if tail.at_end() { + break; + } + + match tail.expand_syntax(syntax_type) { + Err(err) => found_error = Some(err), + Ok(next) => out.push(next), + }; } positional.extend(out); } - trace_remaining("after rest", &tail, context.source()); + eat_any_whitespace(tail); - trace!(target: "nu::parse", "Constructed positional={:?} named={:?}", positional, named); + // Consume any remaining tokens with backoff coloring mode + tail.expand_infallible(BackoffColoringMode::new(rest_signature.allowed())); + + // This is pretty dubious, but it works. We should look into a better algorithm that doesn't end up requiring + // this solution. + tail.sort_shapes(); + + if let Some(err) = found_error { + return Err(err); + } + + trace_remaining("after rest", &tail); + + trace!(target: "nu::parse::trace_remaining", "Constructed positional={:?} named={:?}", positional, named); let positional = if positional.is_empty() { None @@ -156,237 +208,72 @@ pub fn parse_command_tail( Some(named) }; - trace!(target: "nu::parse", "Normalized positional={:?} named={:?}", positional, named); + trace!(target: "nu::parse::trace_remaining", "Normalized positional={:?} named={:?}", positional, named); Ok(Some((positional, named))) } -#[derive(Debug)] -struct ColoringArgs { - vec: Vec>>>, -} - -impl ColoringArgs { - fn new(len: usize) -> ColoringArgs { - let vec = vec![None; len]; - ColoringArgs { vec } - } - - fn insert(&mut self, pos: usize, shapes: Vec>) { - self.vec[pos] = Some(shapes); - } - - fn spread_shapes(self, shapes: &mut Vec>) { - for item in self.vec { - match item { - None => {} - Some(vec) => { - shapes.extend(vec); - } - } +fn eat_any_whitespace(tail: &mut TokensIterator) { + loop { + match tail.expand_infallible(MaybeSpaceShape) { + None => break, + Some(_) => continue, } } } -#[derive(Debug, Copy, Clone)] -pub struct CommandTailShape; +fn expand_flag( + token_nodes: &mut TokensIterator, + syntax_type: SyntaxShape, + flag: Spanned, + pos: usize, +) -> Result { + token_nodes.color_shape(flag.color(flag.span)); -impl ColorSyntax for CommandTailShape { - type Info = (); - type Input = Signature; + let result = token_nodes.atomic_parse(|token_nodes| { + token_nodes.move_to(pos); - fn name(&self) -> &'static str { - "CommandTailShape" - } - - fn color_syntax<'a, 'b>( - &self, - signature: &Signature, - token_nodes: &'b mut TokensIterator<'a>, - context: &ExpandContext, - ) -> Self::Info { - use nu_protocol::SyntaxShape; - - let mut args = ColoringArgs::new(token_nodes.len()); - trace_remaining("nodes", &token_nodes, context.source()); - - fn insert_flag( - token_nodes: &mut TokensIterator, - syntax_type: SyntaxShape, - args: &mut ColoringArgs, - flag: Flag, - pos: usize, - context: &ExpandContext, - ) { - let (_, shapes) = token_nodes.atomic_returning_shapes(|token_nodes| { - token_nodes.color_shape(flag.color()); - token_nodes.move_to(pos); - - if token_nodes.at_end() { - return Ok(()); - } - - // We still want to color the flag even if the following tokens don't match, so don't - // propagate the error to the parent atomic block if it fails - let _ = token_nodes.atomic(|token_nodes| { - // We can live with unmatched syntax after a mandatory flag - color_syntax(&MaybeSpaceShape, token_nodes, context); - - // If the part after a mandatory flag isn't present, that's ok, but we - // should roll back any whitespace we chomped - color_fallible_syntax(&syntax_type, token_nodes, context)?; - - Ok(()) - }); - - Ok(()) - }); - - args.insert(pos, shapes); - token_nodes.restart(); + if token_nodes.at_end() { + return Err(ParseError::unexpected_eof("flag", Span::unknown())); } - for (name, kind) in &signature.named { - trace!(target: "nu::color_syntax", "looking for {} : {:?}", name, kind); + let expr = expand_spaced_expr(syntax_type, token_nodes)?; - match &kind.0 { - NamedType::Switch | NamedType::Help => { - if let Some((pos, flag)) = - token_nodes.extract(|t| t.as_flag(name, context.source())) - { - args.insert(pos, vec![flag.color()]) - } - } - NamedType::Mandatory(syntax_type) => { - match extract_mandatory( - signature, - name, - token_nodes, - context.source(), - Span::unknown(), - ) { - Err(_) => { - // The mandatory flag didn't exist at all, so there's nothing to color - } - Ok((pos, flag)) => { - insert_flag(token_nodes, *syntax_type, &mut args, flag, pos, context) - } - } - } - NamedType::Optional(syntax_type) => { - match extract_optional(name, token_nodes, context.source()) { - Err(_) => { - // The optional flag didn't exist at all, so there's nothing to color - } - Ok(Some((pos, flag))) => { - insert_flag(token_nodes, *syntax_type, &mut args, flag, pos, context) - } + Ok(expr) + }); - Ok(None) => { - token_nodes.restart(); - } - } - } - }; - } - - trace_remaining("after named", &token_nodes, context.source()); - - for arg in &signature.positional { - trace!("Processing positional {:?}", arg); - - match &arg.0 { - PositionalType::Mandatory(..) => { - if token_nodes.at_end() { - break; - } - } - - PositionalType::Optional(..) => { - if token_nodes.at_end() { - break; - } - } - } - - let pos = token_nodes.pos(false); - - match pos { - None => break, - Some(pos) => { - // We can live with an unmatched positional argument. Hopefully it will be - // matched by a future token - let (_, shapes) = token_nodes.atomic_returning_shapes(|token_nodes| { - color_syntax(&MaybeSpaceShape, token_nodes, context); - - // If no match, we should roll back any whitespace we chomped - color_fallible_syntax(&arg.0.syntax_type(), token_nodes, context)?; - - Ok(()) - }); - - args.insert(pos, shapes); - } - } - } - - trace_remaining("after positional", &token_nodes, context.source()); - - if let Some((syntax_type, _)) = signature.rest_positional { - loop { - if token_nodes.at_end_possible_ws() { - break; - } - - let pos = token_nodes.pos(false); - - match pos { - None => break, - Some(pos) => { - // If any arguments don't match, we'll fall back to backoff coloring mode - let (result, shapes) = token_nodes.atomic_returning_shapes(|token_nodes| { - color_syntax(&MaybeSpaceShape, token_nodes, context); - - // If no match, we should roll back any whitespace we chomped - color_fallible_syntax(&syntax_type, token_nodes, context)?; - - Ok(()) - }); - - args.insert(pos, shapes); - - match result { - Err(_) => break, - Ok(_) => continue, - } - } - } - } - } - - token_nodes.silently_mutate_shapes(|shapes| args.spread_shapes(shapes)); - - // Consume any remaining tokens with backoff coloring mode - color_syntax(&BackoffColoringMode, token_nodes, context); - - // This is pretty dubious, but it works. We should look into a better algorithm that doesn't end up requiring - // this solution. - token_nodes.sort_shapes() - } + let expr = result.map_err(|_| ())?; + Ok(expr) } -fn extract_switch(name: &str, tokens: &mut hir::TokensIterator<'_>, source: &Text) -> Option { - tokens.extract(|t| t.as_flag(name, source)).map(|f| f.1) +fn expand_spaced_expr< + T: HasFallibleSpan + PrettyDebugWithSource + Clone + std::fmt::Debug + 'static, +>( + syntax: impl ExpandSyntax>, + token_nodes: &mut TokensIterator, +) -> Result { + token_nodes.atomic_parse(|token_nodes| { + token_nodes.expand_infallible(MaybeSpaceShape); + token_nodes.expand_syntax(syntax) + }) +} + +fn extract_switch( + name: &str, + tokens: &mut hir::TokensIterator<'_>, +) -> Option<(usize, Spanned)> { + let source = tokens.source(); + tokens.extract(|t| t.as_flag(name, &source).map(|flag| flag.spanned(t.span()))) } fn extract_mandatory( config: &Signature, name: &str, tokens: &mut hir::TokensIterator<'_>, - source: &Text, span: Span, -) -> Result<(usize, Flag), ParseError> { - let flag = tokens.extract(|t| t.as_flag(name, source)); +) -> Result<(usize, Spanned), ParseError> { + let source = tokens.source(); + let flag = tokens.extract(|t| t.as_flag(name, &source).map(|flag| flag.spanned(t.span()))); match flag { None => Err(ParseError::argument_error( @@ -404,9 +291,9 @@ fn extract_mandatory( fn extract_optional( name: &str, tokens: &mut hir::TokensIterator<'_>, - source: &Text, -) -> Result, ParseError> { - let flag = tokens.extract(|t| t.as_flag(name, source)); +) -> Result)>, ParseError> { + let source = tokens.source(); + let flag = tokens.extract(|t| t.as_flag(name, &source).map(|flag| flag.spanned(t.span()))); match flag { None => Ok(None), @@ -417,15 +304,24 @@ fn extract_optional( } } -pub fn trace_remaining(desc: &'static str, tail: &hir::TokensIterator<'_>, source: &Text) { +pub fn trace_remaining(desc: &'static str, tail: &hir::TokensIterator<'_>) { + let offset = tail.clone().span_at_cursor(); + let source = tail.source(); + trace!( - target: "nu::parse", - "{} = {:?}", + target: "nu::parse::trace_remaining", + "{} = {}", desc, itertools::join( tail.debug_remaining() .iter() - .map(|i| format!("%{}%", i.debug(source))), + .map(|val| { + if val.span().start() == offset.start() { + format!("<|> %{}%", val.debug(&source)) + } else { + format!("%{}%", val.debug(&source)) + } + }), " " ) ); diff --git a/crates/nu-protocol/src/signature.rs b/crates/nu-protocol/src/signature.rs index 117c89be83..b92510d4e3 100644 --- a/crates/nu-protocol/src/signature.rs +++ b/crates/nu-protocol/src/signature.rs @@ -109,6 +109,35 @@ pub struct Signature { pub is_filter: bool, } +impl Signature { + pub fn shift_positional(&mut self) { + self.positional = Vec::from(&self.positional[1..]); + } + + pub fn remove_named(&mut self, name: &str) { + self.named.remove(name); + } + + pub fn allowed(&self) -> Vec { + let mut allowed = indexmap::IndexSet::new(); + + for (name, _) in &self.named { + allowed.insert(format!("--{}", name)); + } + + for (ty, _) in &self.positional { + let shape = ty.syntax_type(); + allowed.insert(shape.display()); + } + + if let Some((shape, _)) = &self.rest_positional { + allowed.insert(shape.display()); + } + + allowed.into_iter().collect() + } +} + impl PrettyDebugWithSource for Signature { /// Prepare a Signature for pretty-printing fn pretty_debug(&self, source: &str) -> DebugDocBuilder { diff --git a/crates/nu-protocol/src/syntax_shape.rs b/crates/nu-protocol/src/syntax_shape.rs index 6ddfe3fc40..d5f8e800dc 100644 --- a/crates/nu-protocol/src/syntax_shape.rs +++ b/crates/nu-protocol/src/syntax_shape.rs @@ -30,16 +30,16 @@ impl PrettyDebug for SyntaxShape { /// Prepare SyntaxShape for pretty-printing fn pretty(&self) -> DebugDocBuilder { b::kind(match self { - SyntaxShape::Any => "any shape", - SyntaxShape::String => "string shape", - SyntaxShape::Member => "member shape", - SyntaxShape::ColumnPath => "column path shape", - SyntaxShape::Number => "number shape", - SyntaxShape::Range => "range shape", - SyntaxShape::Int => "integer shape", - SyntaxShape::Path => "file path shape", - SyntaxShape::Pattern => "pattern shape", - SyntaxShape::Block => "block shape", + SyntaxShape::Any => "any", + SyntaxShape::String => "string", + SyntaxShape::Member => "member", + SyntaxShape::ColumnPath => "column path", + SyntaxShape::Number => "number", + SyntaxShape::Range => "range", + SyntaxShape::Int => "integer", + SyntaxShape::Path => "file path", + SyntaxShape::Pattern => "pattern", + SyntaxShape::Block => "block", }) } } diff --git a/crates/nu-source/src/lib.rs b/crates/nu-source/src/lib.rs index 524e53b8c7..216d7ed35f 100644 --- a/crates/nu-source/src/lib.rs +++ b/crates/nu-source/src/lib.rs @@ -6,10 +6,11 @@ mod tracable; pub use self::meta::{ span_for_spanned_list, tag_for_tagged_list, AnchorLocation, HasFallibleSpan, HasSpan, HasTag, - Span, Spanned, SpannedItem, Tag, Tagged, TaggedItem, + IntoSpanned, Span, Spanned, SpannedItem, Tag, Tagged, TaggedItem, }; pub use self::pretty::{ - b, DebugDoc, DebugDocBuilder, PrettyDebug, PrettyDebugWithSource, ShellAnnotation, + b, DebugDoc, DebugDocBuilder, PrettyDebug, PrettyDebugRefineKind, PrettyDebugWithSource, + ShellAnnotation, }; pub use self::term_colored::TermColored; pub use self::text::Text; diff --git a/crates/nu-source/src/meta.rs b/crates/nu-source/src/meta.rs index bfd262acbe..57e41525e7 100644 --- a/crates/nu-source/src/meta.rs +++ b/crates/nu-source/src/meta.rs @@ -490,6 +490,10 @@ impl Span { } } + pub fn contains(&self, pos: usize) -> bool { + self.start <= pos && self.end >= pos + } + pub fn since(&self, other: impl Into) -> Span { let other = other.into(); @@ -568,29 +572,66 @@ impl language_reporting::ReportingSpan for Span { } } -pub trait HasSpan: PrettyDebugWithSource { - fn span(&self) -> Span; +pub trait IntoSpanned { + type Output: HasFallibleSpan; + + fn into_spanned(self, span: impl Into) -> Self::Output; } -pub trait HasFallibleSpan: PrettyDebugWithSource { - fn maybe_span(&self) -> Option; -} - -impl HasFallibleSpan for T { - fn maybe_span(&self) -> Option { - Some(HasSpan::span(self)) +impl IntoSpanned for T { + type Output = T; + fn into_spanned(self, _span: impl Into) -> Self::Output { + self } } -impl HasSpan for Spanned +pub trait HasSpan { + fn span(&self) -> Span; +} + +impl HasSpan for Result where - Spanned: PrettyDebugWithSource, + T: HasSpan, { + fn span(&self) -> Span { + match self { + Result::Ok(val) => val.span(), + Result::Err(_) => Span::unknown(), + } + } +} + +impl HasSpan for Spanned { fn span(&self) -> Span { self.span } } +pub trait HasFallibleSpan { + fn maybe_span(&self) -> Option; +} + +impl HasFallibleSpan for bool { + fn maybe_span(&self) -> Option { + None + } +} + +impl HasFallibleSpan for () { + fn maybe_span(&self) -> Option { + None + } +} + +impl HasFallibleSpan for T +where + T: HasSpan, +{ + fn maybe_span(&self) -> Option { + Some(HasSpan::span(self)) + } +} + impl PrettyDebugWithSource for Option { fn pretty_debug(&self, source: &str) -> DebugDocBuilder { match self { @@ -609,8 +650,8 @@ impl HasFallibleSpan for Option { impl PrettyDebugWithSource for Span { fn pretty_debug(&self, source: &str) -> DebugDocBuilder { b::typed( - "spanned", - b::keyword("for") + b::space() + b::description(format!("{:?}", source)), + "span", + b::keyword("for") + b::space() + b::description(format!("{:?}", self.slice(source))), ) } } @@ -628,15 +669,12 @@ where fn pretty_debug(&self, source: &str) -> DebugDocBuilder { match self { None => b::description("nothing"), - Some(v) => v.pretty_debug(source), + Some(v) => v.pretty_debug(v.span.slice(source)), } } } -impl HasFallibleSpan for Option> -where - Spanned: PrettyDebugWithSource, -{ +impl HasFallibleSpan for Option> { fn maybe_span(&self) -> Option { match self { None => None, @@ -657,10 +695,7 @@ where } } -impl HasFallibleSpan for Option> -where - Tagged: PrettyDebugWithSource, -{ +impl HasFallibleSpan for Option> { fn maybe_span(&self) -> Option { match self { None => None, @@ -669,10 +704,7 @@ where } } -impl HasSpan for Tagged -where - Tagged: PrettyDebugWithSource, -{ +impl HasSpan for Tagged { fn span(&self) -> Span { self.tag.span } diff --git a/crates/nu-source/src/pretty.rs b/crates/nu-source/src/pretty.rs index 6ecfe4cb48..38b607e174 100644 --- a/crates/nu-source/src/pretty.rs +++ b/crates/nu-source/src/pretty.rs @@ -1,3 +1,4 @@ +use crate::meta::Spanned; use crate::term_colored::TermColored; use crate::text::Text; use derive_new::new; @@ -98,6 +99,21 @@ pub struct DebugDocBuilder { pub inner: PrettyDebugDocBuilder, } +impl PrettyDebug for bool { + fn pretty(&self) -> DebugDocBuilder { + match self { + true => b::primitive("true"), + false => b::primitive("false"), + } + } +} + +impl PrettyDebug for () { + fn pretty(&self) -> DebugDocBuilder { + b::primitive("nothing") + } +} + impl PrettyDebug for DebugDocBuilder { fn pretty(&self) -> DebugDocBuilder { self.clone() @@ -156,7 +172,7 @@ impl DebugDocBuilder { } pub fn typed(kind: &str, value: DebugDocBuilder) -> DebugDocBuilder { - b::delimit("(", b::kind(kind) + b::space() + value.group(), ")").group() + b::kind(kind) + b::delimit("[", value.group(), "]") } pub fn subtyped( @@ -340,9 +356,23 @@ pub struct DebugDoc { pub inner: PrettyDebugDoc, } +#[derive(Debug, Copy, Clone)] +pub enum PrettyDebugRefineKind { + ContextFree, + WithContext, +} + pub trait PrettyDebugWithSource: Sized { fn pretty_debug(&self, source: &str) -> DebugDocBuilder; + fn refined_pretty_debug( + &self, + _refine: PrettyDebugRefineKind, + source: &str, + ) -> DebugDocBuilder { + self.pretty_debug(source) + } + // This is a transitional convenience method fn debug(&self, source: impl Into) -> String where @@ -359,12 +389,27 @@ pub trait PrettyDebugWithSource: Sized { } } +impl PrettyDebug for Spanned { + fn pretty(&self) -> DebugDocBuilder { + self.item.pretty() + } +} + impl PrettyDebugWithSource for T { fn pretty_debug(&self, _source: &str) -> DebugDocBuilder { self.pretty() } } +impl PrettyDebugWithSource for Result { + fn pretty_debug(&self, source: &str) -> DebugDocBuilder { + match self { + Err(_) => b::error("error"), + Ok(val) => val.pretty_debug(source), + } + } +} + pub struct DebuggableWithSource { inner: T, source: Text, diff --git a/crates/nu-test-support/src/macros.rs b/crates/nu-test-support/src/macros.rs index 00bc59ba6c..c1063d721c 100644 --- a/crates/nu-test-support/src/macros.rs +++ b/crates/nu-test-support/src/macros.rs @@ -38,7 +38,7 @@ macro_rules! nu { }); let mut process = match Command::new($crate::fs::executable_path()) - .env_clear() + // .env_clear() .env("PATH", dummies) .stdin(Stdio::piped()) .stdout(Stdio::piped()) @@ -53,19 +53,26 @@ macro_rules! nu { .write_all(commands.as_bytes()) .expect("couldn't write to stdin"); - let output = process .wait_with_output() .expect("couldn't read from stdout"); - let out = String::from_utf8_lossy(&output.stdout); - let out = out.lines().skip(1).collect::>().join("\n"); - let out = out.replace("\r\n", ""); - let out = out.replace("\n", ""); + let out = $crate::macros::read_std(&output.stdout); + let err = $crate::macros::read_std(&output.stderr); + + println!("=== stderr\n{}", err); + out }}; } +pub fn read_std(std: &[u8]) -> String { + let out = String::from_utf8_lossy(std); + let out = out.lines().skip(1).collect::>().join("\n"); + let out = out.replace("\r\n", ""); + out.replace("\n", "") +} + #[macro_export] macro_rules! nu_error { (cwd: $cwd:expr, $path:expr, $($part:expr),*) => {{ @@ -106,7 +113,7 @@ macro_rules! nu_error { }); let mut process = Command::new($crate::fs::executable_path()) - .env_clear() + // .env_clear() .env("PATH", dummies) .stdout(Stdio::piped()) .stdin(Stdio::piped()) diff --git a/src/cli.rs b/src/cli.rs index c84c39af19..ffafd74e43 100644 --- a/src/cli.rs +++ b/src/cli.rs @@ -8,9 +8,10 @@ use crate::data::config; use crate::git::current_branch; use crate::prelude::*; use nu_errors::ShellError; +use nu_parser::hir::Expression; use nu_parser::{ - expand_syntax, hir, ClassifiedCommand, ClassifiedPipeline, InternalCommand, PipelineShape, - TokenNode, TokensIterator, + hir, ClassifiedCommand, ClassifiedPipeline, InternalCommand, PipelineShape, SpannedToken, + TokensIterator, }; use nu_protocol::{Signature, UntaggedValue, Value}; @@ -60,16 +61,16 @@ fn load_plugin(path: &std::path::Path, context: &mut Context) -> Result<(), Shel let name = params.name.clone(); let fname = fname.to_string(); - if context.get_command(&name)?.is_some() { + if context.get_command(&name).is_some() { trace!("plugin {:?} already loaded.", &name); } else if params.is_filter { - context.add_commands(vec![whole_stream_command( - PluginCommand::new(name, fname, params), - )])?; + context.add_commands(vec![whole_stream_command(PluginCommand::new( + name, fname, params, + ))]); } else { context.add_commands(vec![whole_stream_command(PluginSink::new( name, fname, params, - ))])?; + ))]); } Ok(()) } @@ -346,7 +347,7 @@ pub async fn cli() -> Result<(), Box> { whole_stream_command(FromXML), whole_stream_command(FromYAML), whole_stream_command(FromYML), - ])?; + ]); cfg_if::cfg_if! { if #[cfg(data_processing_primitives)] { @@ -355,7 +356,7 @@ pub async fn cli() -> Result<(), Box> { whole_stream_command(EvaluateBy), whole_stream_command(TSortBy), whole_stream_command(MapMaxBy), - ])?; + ]); } } @@ -363,7 +364,7 @@ pub async fn cli() -> Result<(), Box> { { context.add_commands(vec![whole_stream_command( crate::commands::clip::clipboard::Clip, - )])?; + )]); } } @@ -402,7 +403,7 @@ pub async fn cli() -> Result<(), Box> { continue; } - let cwd = context.shell_manager.path()?; + let cwd = context.shell_manager.path(); rl.set_helper(Some(crate::shell::Helper::new(context.clone()))); @@ -479,7 +480,7 @@ pub async fn cli() -> Result<(), Box> { context.with_host(|host| { print_err(err, host, &Text::from(line.clone())); - })?; + }); context.maybe_print_errors(Text::from(line.clone())); } @@ -501,7 +502,7 @@ pub async fn cli() -> Result<(), Box> { let _ = rl.save_history(&History::path()); std::process::exit(0); } else { - context.with_host(|host| host.stdout("CTRL-C pressed (again to quit)"))?; + context.with_host(|host| host.stdout("CTRL-C pressed (again to quit)")); ctrlcbreak = true; continue; } @@ -606,26 +607,33 @@ async fn process_line(readline: Result, ctx: &mut Context debug!("=== Parsed ==="); debug!("{:#?}", result); - let mut pipeline = match classify_pipeline(&result, ctx, &Text::from(line)) { - Ok(pipeline) => pipeline, - Err(err) => return LineResult::Error(line.to_string(), err), + let mut pipeline = classify_pipeline(&result, ctx, &Text::from(line)); + + if let Some(failure) = pipeline.failed { + return LineResult::Error(line.to_string(), failure.into()); + } + + let should_push = match pipeline.commands.list.last() { + Some(ClassifiedCommand::External(_)) => false, + _ => true, }; - match pipeline.commands.list.last() { - Some(ClassifiedCommand::External(_)) => {} - _ => pipeline + if should_push { + pipeline .commands .list .push(ClassifiedCommand::Internal(InternalCommand { name: "autoview".to_string(), name_tag: Tag::unknown(), args: hir::Call::new( - Box::new(hir::Expression::synthetic_string("autoview")), + Box::new( + Expression::synthetic_string("autoview").into_expr(Span::unknown()), + ), None, None, Span::unknown(), ), - })), + })); } // Check the config to see if we need to update the path @@ -650,19 +658,15 @@ async fn process_line(readline: Result, ctx: &mut Context } pub fn classify_pipeline( - pipeline: &TokenNode, + pipeline: &SpannedToken, context: &Context, source: &Text, -) -> Result { +) -> ClassifiedPipeline { let pipeline_list = vec![pipeline.clone()]; - let mut iterator = TokensIterator::all(&pipeline_list, source.clone(), pipeline.span()); + let expand_context = context.expand_context(source); + let mut iterator = TokensIterator::new(&pipeline_list, expand_context, pipeline.span()); - let result = expand_syntax( - &PipelineShape, - &mut iterator, - &context.expand_context(source)?, - ) - .map_err(|err| err.into()); + let result = iterator.expand_infallible(PipelineShape); if log_enabled!(target: "nu::expand_syntax", log::Level::Debug) { outln!(""); diff --git a/src/commands/autoview.rs b/src/commands/autoview.rs index cfcf61ea6e..f71660735a 100644 --- a/src/commands/autoview.rs +++ b/src/commands/autoview.rs @@ -70,7 +70,7 @@ pub fn autoview( } } }; - if let Some(table) = table? { + if let Some(table) = table { let mut new_output_stream: OutputStream = stream.to_output_stream(); let mut finished = false; let mut current_idx = 0; @@ -100,7 +100,7 @@ pub fn autoview( let first = &input[0]; let mut host = context.host.clone(); - let mut host = host.lock(); + let host = host.lock(); crate::cli::print_err(first.value.expect_error(), &*host, &context.source); return; @@ -108,13 +108,12 @@ pub fn autoview( let mut command_args = raw.with_input(input); let mut named_args = NamedArguments::new(); - named_args.insert_optional("start_number", Some(Expression::number(current_idx, Tag::unknown()))); + named_args.insert_optional("start_number", Some(Expression::number(current_idx).into_expr(Span::unknown()))); command_args.call_info.args.named = Some(named_args); let result = table.run(command_args, &context.commands); result.collect::>().await; - if finished { break; } else { @@ -130,7 +129,7 @@ pub fn autoview( value: UntaggedValue::Primitive(Primitive::String(ref s)), tag: Tag { anchor, span }, } if anchor.is_some() => { - if let Some(text) = text? { + if let Some(text) = text { let mut stream = VecDeque::new(); stream.push_back(UntaggedValue::string(s).into_value(Tag { anchor, span })); let result = text.run(raw.with_input(stream.into()), &context.commands); @@ -149,7 +148,7 @@ pub fn autoview( value: UntaggedValue::Primitive(Primitive::Line(ref s)), tag: Tag { anchor, span }, } if anchor.is_some() => { - if let Some(text) = text? { + if let Some(text) = text { let mut stream = VecDeque::new(); stream.push_back(UntaggedValue::string(s).into_value(Tag { anchor, span })); let result = text.run(raw.with_input(stream.into()), &context.commands); @@ -184,7 +183,7 @@ pub fn autoview( } Value { value: UntaggedValue::Primitive(Primitive::Binary(ref b)), .. } => { - if let Some(binary) = binary? { + if let Some(binary) = binary { let mut stream = VecDeque::new(); stream.push_back(x); let result = binary.run(raw.with_input(stream.into()), &context.commands); @@ -199,7 +198,7 @@ pub fn autoview( yield Err(e); } Value { value: ref item, .. } => { - if let Some(table) = table? { + if let Some(table) = table { let mut stream = VecDeque::new(); stream.push_back(x); let result = table.run(raw.with_input(stream.into()), &context.commands); diff --git a/src/commands/classified/external.rs b/src/commands/classified/external.rs index b40392975b..1445fcde7b 100644 --- a/src/commands/classified/external.rs +++ b/src/commands/classified/external.rs @@ -102,7 +102,7 @@ async fn run_with_iterator_arg( input: Option, is_last: bool, ) -> Result, ShellError> { - let path = context.shell_manager.path()?; + let path = context.shell_manager.path(); let mut inputs: InputStream = if let Some(input) = input { trace_stream!(target: "nu::trace_stream::external::it", "input" = input) @@ -180,7 +180,7 @@ async fn run_with_stdin( input: Option, is_last: bool, ) -> Result, ShellError> { - let path = context.shell_manager.path()?; + let path = context.shell_manager.path(); let mut inputs: InputStream = if let Some(input) = input { trace_stream!(target: "nu::trace_stream::external::stdin", "input" = input) diff --git a/src/commands/classified/internal.rs b/src/commands/classified/internal.rs index 0f781ff244..5e777ce5b7 100644 --- a/src/commands/classified/internal.rs +++ b/src/commands/classified/internal.rs @@ -47,18 +47,18 @@ pub(crate) async fn run_internal_command( match item { Ok(ReturnSuccess::Action(action)) => match action { CommandAction::ChangePath(path) => { - context.shell_manager.set_path(path)?; + context.shell_manager.set_path(path); } CommandAction::Exit => std::process::exit(0), // TODO: save history.txt CommandAction::Error(err) => { - context.error(err)?; + context.error(err); break; } CommandAction::AutoConvert(tagged_contents, extension) => { let contents_tag = tagged_contents.tag.clone(); let command_name = format!("from-{}", extension); let command = command.clone(); - if let Some(converter) = context.registry.get_command(&command_name)? { + if let Some(converter) = context.registry.get_command(&command_name) { let new_args = RawCommandArgs { host: context.host.clone(), ctrl_c: context.ctrl_c.clone(), @@ -100,43 +100,39 @@ pub(crate) async fn run_internal_command( value: UntaggedValue::Primitive(Primitive::String(cmd)), tag, } => { - let result = context.shell_manager.insert_at_current(Box::new( + context.shell_manager.insert_at_current(Box::new( HelpShell::for_command( UntaggedValue::string(cmd).into_value(tag), &context.registry(), )?, )); - - result? } _ => { - let result = context.shell_manager.insert_at_current(Box::new( + context.shell_manager.insert_at_current(Box::new( HelpShell::index(&context.registry())?, )); - - result? } } } CommandAction::EnterValueShell(value) => { context .shell_manager - .insert_at_current(Box::new(ValueShell::new(value)))?; + .insert_at_current(Box::new(ValueShell::new(value))); } CommandAction::EnterShell(location) => { context.shell_manager.insert_at_current(Box::new( FilesystemShell::with_location(location, context.registry().clone()), - ))?; + )); } CommandAction::PreviousShell => { - context.shell_manager.prev()?; + context.shell_manager.prev(); } CommandAction::NextShell => { - context.shell_manager.next()?; + context.shell_manager.next(); } CommandAction::LeaveShell => { - context.shell_manager.remove_at_current()?; - if context.shell_manager.is_empty()? { + context.shell_manager.remove_at_current(); + if context.shell_manager.is_empty() { std::process::exit(0); // TODO: save history.txt } } @@ -154,7 +150,7 @@ pub(crate) async fn run_internal_command( let mut buffer = termcolor::Buffer::ansi(); let _ = doc.render_raw( - context.with_host(|host| host.width() - 5)?, + context.with_host(|host| host.width() - 5), &mut nu_source::TermColored::new(&mut buffer), ); @@ -164,7 +160,7 @@ pub(crate) async fn run_internal_command( } Err(err) => { - context.error(err)?; + context.error(err); break; } } diff --git a/src/commands/classified/pipeline.rs b/src/commands/classified/pipeline.rs index 645e46a1e3..294cf8d21a 100644 --- a/src/commands/classified/pipeline.rs +++ b/src/commands/classified/pipeline.rs @@ -29,6 +29,9 @@ pub(crate) async fn run_pipeline( return Err(ShellError::unimplemented("Expression-only commands")) } + (Some(ClassifiedCommand::Error(err)), _) => return Err(err.into()), + (_, Some(ClassifiedCommand::Error(err))) => return Err(err.clone().into()), + (Some(ClassifiedCommand::Internal(left)), _) => { run_internal_command(left, ctx, input, Text::from(line)).await? } diff --git a/src/commands/command.rs b/src/commands/command.rs index dfcca6671d..4e2c2159af 100644 --- a/src/commands/command.rs +++ b/src/commands/command.rs @@ -236,7 +236,7 @@ pub struct RunnableContext { } impl RunnableContext { - pub fn get_command(&self, name: &str) -> Result>, ShellError> { + pub fn get_command(&self, name: &str) -> Option> { self.commands.get_command(name) } } diff --git a/src/commands/enter.rs b/src/commands/enter.rs index cbb9d1f4f5..b8fa2dd933 100644 --- a/src/commands/enter.rs +++ b/src/commands/enter.rs @@ -51,7 +51,7 @@ impl PerItemCommand for Enter { if spec.len() == 2 { let (_, command) = (spec[0], spec[1]); - if registry.has(command)? { + if registry.has(command) { return Ok(vec![Ok(ReturnSuccess::Action( CommandAction::EnterHelpShell( UntaggedValue::string(command).into_value(Tag::unknown()), @@ -74,7 +74,7 @@ impl PerItemCommand for Enter { // If it's a file, attempt to open the file as a value and enter it let cwd = raw_args.shell_manager.path(); - let full_path = std::path::PathBuf::from(cwd?); + let full_path = std::path::PathBuf::from(cwd); let (file_extension, contents, contents_tag) = crate::commands::open::fetch( @@ -90,7 +90,7 @@ impl PerItemCommand for Enter { if let Some(extension) = file_extension { let command_name = format!("from-{}", extension); if let Some(converter) = - registry.get_command(&command_name)? + registry.get_command(&command_name) { let new_args = RawCommandArgs { host: raw_args.host, diff --git a/src/commands/from_sqlite.rs b/src/commands/from_sqlite.rs index 34c2ef182b..2067ed91d0 100644 --- a/src/commands/from_sqlite.rs +++ b/src/commands/from_sqlite.rs @@ -153,7 +153,8 @@ fn from_sqlite(args: CommandArgs, registry: &CommandRegistry) -> Result yield ReturnSuccess::value(x), } - Err(_) => { + Err(err) => { + println!("{:?}", err); yield Err(ShellError::labeled_error_with_secondary( "Could not parse as SQLite", "input cannot be parsed as SQLite", diff --git a/src/commands/help.rs b/src/commands/help.rs index 6897ac798b..9299b3a6aa 100644 --- a/src/commands/help.rs +++ b/src/commands/help.rs @@ -41,12 +41,12 @@ impl PerItemCommand for Help { }) => { let mut help = VecDeque::new(); if document == "commands" { - let mut sorted_names = registry.names()?; + let mut sorted_names = registry.names(); sorted_names.sort(); for cmd in sorted_names { let mut short_desc = TaggedDictBuilder::new(tag.clone()); let value = command_dict( - registry.get_command(&cmd)?.ok_or_else(|| { + registry.get_command(&cmd).ok_or_else(|| { ShellError::labeled_error( format!("Could not load {}", cmd), "could not load command", @@ -72,7 +72,7 @@ impl PerItemCommand for Help { help.push_back(ReturnSuccess::value(short_desc.into_value())); } - } else if let Some(command) = registry.get_command(document)? { + } else if let Some(command) = registry.get_command(document) { return Ok( get_help(&command.name(), &command.usage(), command.signature()).into(), ); diff --git a/src/commands/open.rs b/src/commands/open.rs index 1ac00d8324..91a5aa031e 100644 --- a/src/commands/open.rs +++ b/src/commands/open.rs @@ -40,7 +40,7 @@ impl PerItemCommand for Open { fn run(call_info: &CallInfo, raw_args: &RawCommandArgs) -> Result { let shell_manager = &raw_args.shell_manager; - let cwd = PathBuf::from(shell_manager.path()?); + let cwd = PathBuf::from(shell_manager.path()); let full_path = cwd; let path = call_info.args.nth(0).ok_or_else(|| { diff --git a/src/commands/save.rs b/src/commands/save.rs index 144a8cc83e..1c1d06cae1 100644 --- a/src/commands/save.rs +++ b/src/commands/save.rs @@ -130,7 +130,7 @@ fn save( }: RunnableContext, raw_args: RawCommandArgs, ) -> Result { - let mut full_path = PathBuf::from(shell_manager.path()?); + let mut full_path = PathBuf::from(shell_manager.path()); let name_tag = name.clone(); let stream = async_stream! { @@ -179,7 +179,7 @@ fn save( break if !save_raw { if let Some(extension) = full_path.extension() { let command_name = format!("to-{}", extension.to_string_lossy()); - if let Some(converter) = registry.get_command(&command_name)? { + if let Some(converter) = registry.get_command(&command_name) { let new_args = RawCommandArgs { host, ctrl_c, diff --git a/src/commands/shells.rs b/src/commands/shells.rs index 5e9f73cb85..cee7cbe933 100644 --- a/src/commands/shells.rs +++ b/src/commands/shells.rs @@ -32,16 +32,7 @@ fn shells(args: CommandArgs, _registry: &CommandRegistry) -> Result &str { - "what?" + "describe" } fn signature(&self) -> Signature { - Signature::build("what?") + Signature::build("describe") } fn usage(&self) -> &str { @@ -43,7 +42,7 @@ pub fn what( pin_mut!(values); while let Some(row) = values.next().await { - let name = value::format_leaf(&row).plain_string(100000); + let name = value::format_type(&row, 100); yield ReturnSuccess::value(UntaggedValue::string(name).into_value(Tag::unknown_anchor(row.tag.span))); } }; diff --git a/src/commands/which_.rs b/src/commands/which_.rs index dc8b809788..012b52a8f9 100644 --- a/src/commands/which_.rs +++ b/src/commands/which_.rs @@ -95,7 +95,7 @@ fn which( } } - let builtin = commands.has(&item)?; + let builtin = commands.has(&item); if builtin { yield ReturnSuccess::value(entry_builtin!(item, application.tag.clone())); } @@ -128,7 +128,7 @@ fn which( if let Ok(path) = ichwh::which(&item).await { yield ReturnSuccess::value(entry_path!(item, path.into(), application.tag.clone())); } - } else if commands.has(&item)? { + } else if commands.has(&item) { yield ReturnSuccess::value(entry_builtin!(item, application.tag.clone())); } else if let Ok(path) = ichwh::which(&item).await { yield ReturnSuccess::value(entry_path!(item, path.into(), application.tag.clone())); diff --git a/src/context.rs b/src/context.rs index 806c31fd12..d2d9bbfdbf 100644 --- a/src/context.rs +++ b/src/context.rs @@ -5,37 +5,29 @@ use crate::stream::{InputStream, OutputStream}; use indexmap::IndexMap; use nu_errors::ShellError; use nu_parser::{hir, hir::syntax_shape::ExpandContext, hir::syntax_shape::SignatureRegistry}; -use nu_protocol::{errln, Signature}; +use nu_protocol::Signature; use nu_source::{Tag, Text}; +use parking_lot::Mutex; use std::error::Error; use std::sync::atomic::AtomicBool; -use std::sync::{Arc, Mutex}; +use std::sync::Arc; -#[derive(Clone)] +#[derive(Debug, Clone)] pub struct CommandRegistry { registry: Arc>>>, } impl SignatureRegistry for CommandRegistry { - fn has(&self, name: &str) -> Result { - if let Ok(registry) = self.registry.lock() { - Ok(registry.contains_key(name)) - } else { - Err(ShellError::untagged_runtime_error(format!( - "Could not load from registry: {}", - name - ))) - } + fn has(&self, name: &str) -> bool { + let registry = self.registry.lock(); + registry.contains_key(name) } - fn get(&self, name: &str) -> Result, ShellError> { - if let Ok(registry) = self.registry.lock() { - Ok(registry.get(name).map(|command| command.signature())) - } else { - Err(ShellError::untagged_runtime_error(format!( - "Could not get from registry: {}", - name - ))) - } + fn get(&self, name: &str) -> Option { + let registry = self.registry.lock(); + registry.get(name).map(|command| command.signature()) + } + fn clone_box(&self) -> Box { + Box::new(self.clone()) } } @@ -54,53 +46,32 @@ impl CommandRegistry { } } - pub(crate) fn get_command(&self, name: &str) -> Result>, ShellError> { - let registry = self.registry.lock().map_err(|_| { - ShellError::untagged_runtime_error("Internal error: get_command could not get mutex") - })?; + pub(crate) fn get_command(&self, name: &str) -> Option> { + let registry = self.registry.lock(); - Ok(registry.get(name).cloned()) + registry.get(name).cloned() } pub(crate) fn expect_command(&self, name: &str) -> Result, ShellError> { - self.get_command(name)?.ok_or_else(|| { + self.get_command(name).ok_or_else(|| { ShellError::untagged_runtime_error(format!("Could not load command: {}", name)) }) } - pub(crate) fn has(&self, name: &str) -> Result { - let registry = self.registry.lock().map_err(|_| { - ShellError::untagged_runtime_error("Internal error: has could not get mutex") - })?; + pub(crate) fn has(&self, name: &str) -> bool { + let registry = self.registry.lock(); - Ok(registry.contains_key(name)) + registry.contains_key(name) } - pub(crate) fn insert( - &mut self, - name: impl Into, - command: Arc, - ) -> Result<(), ShellError> { - let mut registry = self.registry.lock().map_err(|_| { - ShellError::untagged_runtime_error("Internal error: insert could not get mutex") - })?; - + pub(crate) fn insert(&mut self, name: impl Into, command: Arc) { + let mut registry = self.registry.lock(); registry.insert(name.into(), command); - Ok(()) } - pub(crate) fn names(&self) -> Result, ShellError> { - let registry = self.registry.lock().map_err(|_| { - ShellError::untagged_runtime_error("Internal error: names could not get mutex") - })?; - Ok(registry.keys().cloned().collect()) - } - - pub(crate) fn snapshot(&self) -> Result>, ShellError> { - let registry = self.registry.lock().map_err(|_| { - ShellError::untagged_runtime_error("Internal error: names could not get mutex") - })?; - Ok(registry.clone()) + pub(crate) fn names(&self) -> Vec { + let registry = self.registry.lock(); + registry.keys().cloned().collect() } } @@ -121,12 +92,12 @@ impl Context { pub(crate) fn expand_context<'context>( &'context self, source: &'context Text, - ) -> Result, ShellError> { - Ok(ExpandContext::new( + ) -> ExpandContext { + ExpandContext::new( Box::new(self.registry.clone()), source, - self.shell_manager.homedir()?, - )) + self.shell_manager.homedir(), + ) } pub(crate) fn basic() -> Result> { @@ -142,73 +113,47 @@ impl Context { }) } - pub(crate) fn error(&mut self, error: ShellError) -> Result<(), ShellError> { + pub(crate) fn error(&mut self, error: ShellError) { self.with_errors(|errors| errors.push(error)) } pub(crate) fn maybe_print_errors(&mut self, source: Text) -> bool { let errors = self.current_errors.clone(); - let errors = errors.lock(); + let mut errors = errors.lock(); let host = self.host.clone(); let host = host.lock(); - let result: bool; + if errors.len() > 0 { + let error = errors[0].clone(); + *errors = vec![]; - match (errors, host) { - (Err(err), _) => { - errln!( - "Unexpected error attempting to acquire the lock of the current errors: {:?}", - err - ); - result = false; - } - (Ok(mut errors), host) => { - if errors.len() > 0 { - let error = errors[0].clone(); - *errors = vec![]; - - crate::cli::print_err(error, &*host, &source); - result = true; - } else { - result = false; - } - } - }; - - result - } - - pub(crate) fn with_host( - &mut self, - block: impl FnOnce(&mut dyn Host) -> T, - ) -> Result { - let mut host = self.host.lock(); - Ok(block(&mut *host)) - } - - pub(crate) fn with_errors( - &mut self, - block: impl FnOnce(&mut Vec) -> T, - ) -> Result { - if let Ok(mut errors) = self.current_errors.lock() { - Ok(block(&mut *errors)) + crate::cli::print_err(error, &*host, &source); + true } else { - Err(ShellError::untagged_runtime_error( - "Internal error: could not lock host in with_errors", - )) + false } } - pub fn add_commands(&mut self, commands: Vec>) -> Result<(), ShellError> { + pub(crate) fn with_host(&mut self, block: impl FnOnce(&mut dyn Host) -> T) -> T { + let mut host = self.host.lock(); + + block(&mut *host) + } + + pub(crate) fn with_errors(&mut self, block: impl FnOnce(&mut Vec) -> T) -> T { + let mut errors = self.current_errors.lock(); + + block(&mut *errors) + } + + pub fn add_commands(&mut self, commands: Vec>) { for command in commands { - self.registry.insert(command.name().to_string(), command)?; + self.registry.insert(command.name().to_string(), command); } - - Ok(()) } - pub(crate) fn get_command(&self, name: &str) -> Result>, ShellError> { + pub(crate) fn get_command(&self, name: &str) -> Option> { self.registry.get_command(name) } diff --git a/src/data/base.rs b/src/data/base.rs index a957aa45bd..799708e2e8 100644 --- a/src/data/base.rs +++ b/src/data/base.rs @@ -29,7 +29,7 @@ pub struct Operation { #[derive(Debug, Ord, PartialOrd, Eq, PartialEq, Clone, Hash, Serialize, Deserialize, new)] pub struct Block { - pub(crate) expressions: Vec, + pub(crate) expressions: Vec, pub(crate) source: Text, pub(crate) tag: Tag, } diff --git a/src/evaluate/evaluator.rs b/src/evaluate/evaluator.rs index d68c8c1c63..186127b881 100644 --- a/src/evaluate/evaluator.rs +++ b/src/evaluate/evaluator.rs @@ -4,7 +4,7 @@ use crate::evaluate::operator::apply_operator; use crate::prelude::*; use log::trace; use nu_errors::{ArgumentError, ShellError}; -use nu_parser::hir::{self, Expression, RawExpression}; +use nu_parser::hir::{self, Expression, SpannedExpression}; use nu_protocol::{ ColumnPath, Evaluate, Primitive, RangeInclusion, Scope, TaggedDictBuilder, UnspannedPathMember, UntaggedValue, Value, @@ -12,7 +12,7 @@ use nu_protocol::{ use nu_source::Text; pub(crate) fn evaluate_baseline_expr( - expr: &Expression, + expr: &SpannedExpression, registry: &CommandRegistry, scope: &Scope, source: &Text, @@ -22,19 +22,19 @@ pub(crate) fn evaluate_baseline_expr( anchor: None, }; match &expr.expr { - RawExpression::Literal(literal) => Ok(evaluate_literal(literal, source)), - RawExpression::ExternalWord => Err(ShellError::argument_error( + Expression::Literal(literal) => Ok(evaluate_literal(literal, expr.span, source)), + Expression::ExternalWord => Err(ShellError::argument_error( "Invalid external word".spanned(tag.span), ArgumentError::InvalidExternalWord, )), - RawExpression::FilePath(path) => Ok(UntaggedValue::path(path.clone()).into_value(tag)), - RawExpression::Synthetic(hir::Synthetic::String(s)) => { + Expression::FilePath(path) => Ok(UntaggedValue::path(path.clone()).into_value(tag)), + Expression::Synthetic(hir::Synthetic::String(s)) => { Ok(UntaggedValue::string(s).into_untagged_value()) } - RawExpression::Variable(var) => evaluate_reference(var, scope, source, tag), - RawExpression::Command(_) => evaluate_command(tag, scope, source), - RawExpression::ExternalCommand(external) => evaluate_external(external, scope, source), - RawExpression::Binary(binary) => { + Expression::Variable(var) => evaluate_reference(var, scope, source, tag), + Expression::Command(_) => evaluate_command(tag, scope, source), + Expression::ExternalCommand(external) => evaluate_external(external, scope, source), + Expression::Binary(binary) => { let left = evaluate_baseline_expr(binary.left(), registry, scope, source)?; let right = evaluate_baseline_expr(binary.right(), registry, scope, source)?; @@ -48,7 +48,7 @@ pub(crate) fn evaluate_baseline_expr( )), } } - RawExpression::Range(range) => { + Expression::Range(range) => { let left = range.left(); let right = range.right(); @@ -68,7 +68,7 @@ pub(crate) fn evaluate_baseline_expr( Ok(UntaggedValue::range(left, right).into_value(tag)) } - RawExpression::List(list) => { + Expression::List(list) => { let mut exprs = vec![]; for expr in list { @@ -78,13 +78,13 @@ pub(crate) fn evaluate_baseline_expr( Ok(UntaggedValue::Table(exprs).into_value(tag)) } - RawExpression::Block(block) => Ok(UntaggedValue::Block(Evaluate::new(Block::new( + Expression::Block(block) => Ok(UntaggedValue::Block(Evaluate::new(Block::new( block.clone(), source.clone(), tag.clone(), ))) .into_value(&tag)), - RawExpression::Path(path) => { + Expression::Path(path) => { let value = evaluate_baseline_expr(path.head(), registry, scope, source)?; let mut item = value; @@ -122,37 +122,29 @@ pub(crate) fn evaluate_baseline_expr( Ok(item.value.into_value(tag)) } - RawExpression::Boolean(_boolean) => unimplemented!(), + Expression::Boolean(_boolean) => unimplemented!(), } } -fn evaluate_literal(literal: &hir::Literal, source: &Text) -> Value { - match &literal.literal { - hir::RawLiteral::ColumnPath(path) => { +fn evaluate_literal(literal: &hir::Literal, span: Span, source: &Text) -> Value { + match &literal { + hir::Literal::ColumnPath(path) => { let members = path .iter() .map(|member| member.to_path_member(source)) .collect(); UntaggedValue::Primitive(Primitive::ColumnPath(ColumnPath::new(members))) - .into_value(&literal.span) + .into_value(span) } - hir::RawLiteral::Number(int) => match int { - nu_parser::Number::Int(i) => UntaggedValue::int(i.clone()).into_value(literal.span), - nu_parser::Number::Decimal(d) => { - UntaggedValue::decimal(d.clone()).into_value(literal.span) - } + hir::Literal::Number(int) => match int { + nu_parser::Number::Int(i) => UntaggedValue::int(i.clone()).into_value(span), + nu_parser::Number::Decimal(d) => UntaggedValue::decimal(d.clone()).into_value(span), }, - hir::RawLiteral::Size(int, unit) => unit.compute(&int).into_value(literal.span), - hir::RawLiteral::String(tag) => { - UntaggedValue::string(tag.slice(source)).into_value(literal.span) - } - hir::RawLiteral::GlobPattern(pattern) => { - UntaggedValue::pattern(pattern).into_value(literal.span) - } - hir::RawLiteral::Bare => { - UntaggedValue::string(literal.span.slice(source)).into_value(literal.span) - } + hir::Literal::Size(int, unit) => unit.compute(&int).into_value(span), + hir::Literal::String(tag) => UntaggedValue::string(tag.slice(source)).into_value(span), + hir::Literal::GlobPattern(pattern) => UntaggedValue::pattern(pattern).into_value(span), + hir::Literal::Bare => UntaggedValue::string(span.slice(source)).into_value(span), } } diff --git a/src/prelude.rs b/src/prelude.rs index ef1ad5d8bd..e442a283cb 100644 --- a/src/prelude.rs +++ b/src/prelude.rs @@ -69,6 +69,7 @@ macro_rules! trace_out_stream { } pub(crate) use nu_protocol::{errln, outln}; +use nu_source::HasFallibleSpan; pub(crate) use crate::commands::command::{ CallInfoExt, CommandArgs, PerItemCommand, RawCommandArgs, RunnableContext, @@ -131,12 +132,12 @@ where fn to_input_stream(self) -> InputStream { InputStream { values: self - .map(|item| { - if let Ok(result) = item.into() { - result - } else { - unreachable!("Internal errors: to_input_stream in inconsistent state") - } + .map(|item| match item.into() { + Ok(result) => result, + Err(err) => match HasFallibleSpan::maybe_span(&err) { + Some(span) => nu_protocol::UntaggedValue::Error(err).into_value(span), + None => nu_protocol::UntaggedValue::Error(err).into_untagged_value(), + }, }) .boxed(), } diff --git a/src/shell/completer.rs b/src/shell/completer.rs index a4bed043f5..63440059ab 100644 --- a/src/shell/completer.rs +++ b/src/shell/completer.rs @@ -1,13 +1,16 @@ use crate::context::CommandRegistry; use derive_new::new; +use nu_parser::ExpandContext; use nu_source::{HasSpan, Text}; use rustyline::completion::{Completer, FilenameCompleter}; +use std::path::PathBuf; #[derive(new)] pub(crate) struct NuCompleter { pub file_completer: FilenameCompleter, pub commands: CommandRegistry, + pub homedir: Option, } impl NuCompleter { @@ -17,7 +20,15 @@ impl NuCompleter { pos: usize, context: &rustyline::Context, ) -> rustyline::Result<(usize, Vec)> { - let commands: Vec = self.commands.names().unwrap_or_else(|_| vec![]); + let text = Text::from(line); + let expand_context = + ExpandContext::new(Box::new(self.commands.clone()), &text, self.homedir.clone()); + + #[allow(unused)] + // smarter completions + let shapes = nu_parser::pipeline_shapes(line, expand_context); + + let commands: Vec = self.commands.names(); let line_chars: Vec<_> = line[..pos].chars().collect(); @@ -100,8 +111,6 @@ impl NuCompleter { if let Ok(val) = nu_parser::parse(&line_copy) { let source = Text::from(line); let pipeline_list = vec![val.clone()]; - let mut iterator = - nu_parser::TokensIterator::all(&pipeline_list, source.clone(), val.span()); let expand_context = nu_parser::ExpandContext { homedir: None, @@ -109,10 +118,12 @@ impl NuCompleter { source: &source, }; - let result = - nu_parser::expand_syntax(&nu_parser::PipelineShape, &mut iterator, &expand_context); + let mut iterator = + nu_parser::TokensIterator::new(&pipeline_list, expand_context, val.span()); - if let Ok(result) = result { + let result = iterator.expand_infallible(nu_parser::PipelineShape); + + if result.failed.is_none() { for command in result.commands.list { if let nu_parser::ClassifiedCommand::Internal(nu_parser::InternalCommand { args, diff --git a/src/shell/filesystem_shell.rs b/src/shell/filesystem_shell.rs index 5b1101b5f0..90cb590a20 100644 --- a/src/shell/filesystem_shell.rs +++ b/src/shell/filesystem_shell.rs @@ -10,6 +10,7 @@ use crate::shell::completer::NuCompleter; use crate::shell::shell::Shell; use crate::utils::FileStructure; use nu_errors::ShellError; +use nu_parser::ExpandContext; use nu_protocol::{Primitive, ReturnSuccess, UntaggedValue}; use rustyline::completion::FilenameCompleter; use rustyline::hint::{Hinter, HistoryHinter}; @@ -38,6 +39,7 @@ impl Clone for FilesystemShell { completer: NuCompleter { file_completer: FilenameCompleter::new(), commands: self.completer.commands.clone(), + homedir: self.homedir(), }, hinter: HistoryHinter {}, } @@ -54,6 +56,7 @@ impl FilesystemShell { completer: NuCompleter { file_completer: FilenameCompleter::new(), commands, + homedir: dirs::home_dir(), }, hinter: HistoryHinter {}, }) @@ -67,6 +70,7 @@ impl FilesystemShell { completer: NuCompleter { file_completer: FilenameCompleter::new(), commands, + homedir: dirs::home_dir(), }, hinter: HistoryHinter {}, } @@ -1131,7 +1135,13 @@ impl Shell for FilesystemShell { self.completer.complete(line, pos, ctx) } - fn hint(&self, line: &str, pos: usize, ctx: &rustyline::Context<'_>) -> Option { + fn hint( + &self, + line: &str, + pos: usize, + ctx: &rustyline::Context<'_>, + _expand_context: ExpandContext, + ) -> Option { self.hinter.hint(line, pos, ctx) } } diff --git a/src/shell/help_shell.rs b/src/shell/help_shell.rs index d25cbdd113..09566d1da1 100644 --- a/src/shell/help_shell.rs +++ b/src/shell/help_shell.rs @@ -8,6 +8,7 @@ use crate::data::command_dict; use crate::prelude::*; use crate::shell::shell::Shell; use nu_errors::ShellError; +use nu_parser::ExpandContext; use nu_protocol::{ Primitive, ReturnSuccess, ShellTypeName, TaggedDictBuilder, UntaggedValue, Value, }; @@ -25,25 +26,28 @@ impl HelpShell { let mut cmds = TaggedDictBuilder::new(Tag::unknown()); let mut specs = Vec::new(); - let snapshot = registry.snapshot()?; + for cmd in registry.names() { + if let Some(cmd_value) = registry.get_command(&cmd) { + let mut spec = TaggedDictBuilder::new(Tag::unknown()); + let value = command_dict(cmd_value, Tag::unknown()); - for (name, cmd) in snapshot.iter() { - let mut spec = TaggedDictBuilder::new(Tag::unknown()); - let value = command_dict(cmd.clone(), Tag::unknown()); + spec.insert_untagged("name", cmd); + spec.insert_untagged( + "description", + value + .get_data_by_key("usage".spanned_unknown()) + .ok_or_else(|| { + ShellError::untagged_runtime_error( + "Internal error: expected to find usage", + ) + })? + .as_string()?, + ); + spec.insert_value("details", value); - spec.insert_untagged("name", name.to_string()); - spec.insert_untagged( - "description", - value - .get_data_by_key("usage".spanned_unknown()) - .ok_or_else(|| { - ShellError::untagged_runtime_error("Internal error: expected to find usage") - })? - .as_string()?, - ); - spec.insert_value("details", value); - - specs.push(spec.into_value()); + specs.push(spec.into_value()); + } else { + } } cmds.insert_untagged("help", UntaggedValue::Table(specs)); @@ -240,7 +244,13 @@ impl Shell for HelpShell { Ok((replace_pos, completions)) } - fn hint(&self, _line: &str, _pos: usize, _ctx: &rustyline::Context<'_>) -> Option { + fn hint( + &self, + _line: &str, + _pos: usize, + _ctx: &rustyline::Context<'_>, + _context: ExpandContext, + ) -> Option { None } } diff --git a/src/shell/helper.rs b/src/shell/helper.rs index 407991e903..eac5f03c0e 100644 --- a/src/shell/helper.rs +++ b/src/shell/helper.rs @@ -1,10 +1,9 @@ use crate::context::Context; -use ansi_term::Color; -use log::{log_enabled, trace}; -use nu_parser::hir::syntax_shape::color_fallible_syntax; -use nu_parser::{FlatShape, PipelineShape, TokenNode, TokensIterator}; -use nu_protocol::outln; -use nu_source::{nom_input, HasSpan, Spanned, Tag, Tagged, Text}; +use ansi_term::{Color, Style}; +use log::log_enabled; +use nu_parser::{FlatShape, PipelineShape, ShapeResult, Token, TokensIterator}; +use nu_protocol::{errln, outln}; +use nu_source::{nom_input, HasSpan, Tag, Tagged, Text}; use rustyline::completion::Completer; use rustyline::error::ReadlineError; use rustyline::highlight::Highlighter; @@ -39,10 +38,10 @@ impl Completer for Helper { impl Hinter for Helper { fn hint(&self, line: &str, pos: usize, ctx: &rustyline::Context<'_>) -> Option { - match self.context.shell_manager.hint(line, pos, ctx) { - Ok(output) => output, - Err(e) => Some(format!("{}", e)), - } + let text = Text::from(line); + self.context + .shell_manager + .hint(line, pos, ctx, self.context.expand_context(&text)) } } @@ -71,53 +70,47 @@ impl Highlighter for Helper { match tokens { Err(_) => Cow::Borrowed(line), Ok((_rest, v)) => { - let mut out = String::new(); let pipeline = match v.as_pipeline() { Err(_) => return Cow::Borrowed(line), Ok(v) => v, }; - let tokens = vec![TokenNode::Pipeline(pipeline)]; - let mut tokens = TokensIterator::all(&tokens[..], Text::from(line), v.span()); - let text = Text::from(line); - match self.context.expand_context(&text) { - Ok(expand_context) => { - let shapes = { - // We just constructed a token list that only contains a pipeline, so it can't fail - if let Err(err) = - color_fallible_syntax(&PipelineShape, &mut tokens, &expand_context) - { - let error_msg = format!("{}", err); - return Cow::Owned(error_msg); - } - tokens.with_color_tracer(|_, tracer| tracer.finish()); + let expand_context = self.context.expand_context(&text); - tokens.state().shapes() - }; + let tokens = vec![Token::Pipeline(pipeline).into_spanned(v.span())]; + let mut tokens = TokensIterator::new(&tokens[..], expand_context, v.span()); - trace!(target: "nu::color_syntax", "{:#?}", tokens.color_tracer()); + let shapes = { + // We just constructed a token list that only contains a pipeline, so it can't fail + let result = tokens.expand_infallible(PipelineShape); - if log_enabled!(target: "nu::color_syntax", log::Level::Debug) { - outln!(""); - let _ = ptree::print_tree( - &tokens.color_tracer().clone().print(Text::from(line)), - ); - outln!(""); - } - - for shape in shapes { - let styled = paint_flat_shape(&shape, line); - out.push_str(&styled); - } - - Cow::Owned(out) - } - Err(err) => { - let error_msg = format!("{}", err); - Cow::Owned(error_msg) + if let Some(failure) = result.failed { + errln!( + "BUG: PipelineShape didn't find a pipeline :: {:#?}", + failure + ); } + + tokens.finish_tracer(); + + tokens.state().shapes() + }; + + if log_enabled!(target: "nu::expand_syntax", log::Level::Debug) { + outln!(""); + let _ = + ptree::print_tree(&tokens.expand_tracer().clone().print(Text::from(line))); + outln!(""); } + + let mut painter = Painter::new(); + + for shape in shapes { + painter.paint_shape(&shape, line); + } + + Cow::Owned(painter.into_string()) } } } @@ -139,45 +132,75 @@ fn vec_tag(input: Vec>) -> Option { }) } -fn paint_flat_shape(flat_shape: &Spanned, line: &str) -> String { - let style = match &flat_shape.item { - FlatShape::OpenDelimiter(_) => Color::White.normal(), - FlatShape::CloseDelimiter(_) => Color::White.normal(), - FlatShape::ItVariable => Color::Purple.bold(), - FlatShape::Variable => Color::Purple.normal(), - FlatShape::CompareOperator => Color::Yellow.normal(), - FlatShape::DotDot => Color::Yellow.bold(), - FlatShape::Dot => Color::White.normal(), - FlatShape::InternalCommand => Color::Cyan.bold(), - FlatShape::ExternalCommand => Color::Cyan.normal(), - FlatShape::ExternalWord => Color::Green.bold(), - FlatShape::BareMember => Color::Yellow.bold(), - FlatShape::StringMember => Color::Yellow.bold(), - FlatShape::String => Color::Green.normal(), - FlatShape::Path => Color::Cyan.normal(), - FlatShape::GlobPattern => Color::Cyan.bold(), - FlatShape::Word => Color::Green.normal(), - FlatShape::Pipe => Color::Purple.bold(), - FlatShape::Flag => Color::Blue.bold(), - FlatShape::ShorthandFlag => Color::Blue.bold(), - FlatShape::Int => Color::Purple.bold(), - FlatShape::Decimal => Color::Purple.bold(), - FlatShape::Whitespace | FlatShape::Separator => Color::White.normal(), - FlatShape::Comment => Color::Green.bold(), - FlatShape::Error => Color::Red.bold(), - FlatShape::Size { number, unit } => { - let number = number.slice(line); - let unit = unit.slice(line); - return format!( - "{}{}", - Color::Purple.bold().paint(number), - Color::Cyan.bold().paint(unit) - ); - } - }; +struct Painter { + current: Style, + buffer: String, +} - let body = flat_shape.span.slice(line); - style.paint(body).to_string() +impl Painter { + fn new() -> Painter { + Painter { + current: Style::default(), + buffer: String::new(), + } + } + + fn into_string(self) -> String { + self.buffer + } + + fn paint_shape(&mut self, shape: &ShapeResult, line: &str) { + let style = match &shape { + ShapeResult::Success(shape) => match shape.item { + FlatShape::OpenDelimiter(_) => Color::White.normal(), + FlatShape::CloseDelimiter(_) => Color::White.normal(), + FlatShape::ItVariable | FlatShape::Keyword => Color::Purple.bold(), + FlatShape::Variable | FlatShape::Identifier => Color::Purple.normal(), + FlatShape::Type => Color::Blue.bold(), + FlatShape::CompareOperator => Color::Yellow.normal(), + FlatShape::DotDot => Color::Yellow.bold(), + FlatShape::Dot => Style::new().fg(Color::White).on(Color::Black), + FlatShape::InternalCommand => Color::Cyan.bold(), + FlatShape::ExternalCommand => Color::Cyan.normal(), + FlatShape::ExternalWord => Color::Green.bold(), + FlatShape::BareMember => Color::Yellow.bold(), + FlatShape::StringMember => Color::Yellow.bold(), + FlatShape::String => Color::Green.normal(), + FlatShape::Path => Color::Cyan.normal(), + FlatShape::GlobPattern => Color::Cyan.bold(), + FlatShape::Word => Color::Green.normal(), + FlatShape::Pipe => Color::Purple.bold(), + FlatShape::Flag => Color::Blue.bold(), + FlatShape::ShorthandFlag => Color::Blue.bold(), + FlatShape::Int => Color::Purple.bold(), + FlatShape::Decimal => Color::Purple.bold(), + FlatShape::Whitespace | FlatShape::Separator => Color::White.normal(), + FlatShape::Comment => Color::Green.bold(), + FlatShape::Garbage => Style::new().fg(Color::White).on(Color::Red), + FlatShape::Size { number, unit } => { + let number = number.slice(line); + let unit = unit.slice(line); + + self.paint(Color::Purple.bold(), number); + self.paint(Color::Cyan.bold(), unit); + return; + } + }, + ShapeResult::Fallback { shape, .. } => match shape.item { + FlatShape::Whitespace | FlatShape::Separator => Color::White.normal(), + _ => Style::new().fg(Color::White).on(Color::Red), + }, + }; + + self.paint(style, shape.span().slice(line)); + } + + fn paint(&mut self, style: Style, body: &str) { + let infix = self.current.infix(style); + self.current = style; + self.buffer + .push_str(&format!("{}{}", infix, style.paint(body))); + } } impl rustyline::Helper for Helper {} diff --git a/src/shell/shell.rs b/src/shell/shell.rs index 79c6fb78fa..35a359a1e5 100644 --- a/src/shell/shell.rs +++ b/src/shell/shell.rs @@ -7,6 +7,7 @@ use crate::commands::rm::RemoveArgs; use crate::prelude::*; use crate::stream::OutputStream; use nu_errors::ShellError; +use nu_parser::ExpandContext; use std::path::PathBuf; pub trait Shell: std::fmt::Debug { @@ -34,5 +35,11 @@ pub trait Shell: std::fmt::Debug { ctx: &rustyline::Context<'_>, ) -> Result<(usize, Vec), rustyline::error::ReadlineError>; - fn hint(&self, _line: &str, _pos: usize, _ctx: &rustyline::Context<'_>) -> Option; + fn hint( + &self, + _line: &str, + _pos: usize, + _ctx: &rustyline::Context<'_>, + _context: ExpandContext, + ) -> Option; } diff --git a/src/shell/shell_manager.rs b/src/shell/shell_manager.rs index dde248117a..16ec79aef4 100644 --- a/src/shell/shell_manager.rs +++ b/src/shell/shell_manager.rs @@ -9,10 +9,12 @@ use crate::shell::filesystem_shell::FilesystemShell; use crate::shell::shell::Shell; use crate::stream::OutputStream; use nu_errors::ShellError; +use nu_parser::ExpandContext; +use parking_lot::Mutex; use std::error::Error; use std::path::PathBuf; use std::sync::atomic::{AtomicUsize, Ordering}; -use std::sync::{Arc, Mutex}; +use std::sync::Arc; #[derive(Clone, Debug)] pub struct ShellManager { @@ -30,95 +32,53 @@ impl ShellManager { }) } - pub fn insert_at_current(&mut self, shell: Box) -> Result<(), ShellError> { - if let Ok(mut shells) = self.shells.lock() { - shells.push(shell); - } else { - return Err(ShellError::untagged_runtime_error( - "Internal error: could not lock shells ring buffer", - )); - } - - let shells_len = if let Ok(shells) = self.shells.lock() { - shells.len() - } else { - return Err(ShellError::untagged_runtime_error( - "Internal error: could not lock shells ring buffer", - )); - }; - - self.current_shell.store(shells_len - 1, Ordering::SeqCst); - self.set_path(self.path()?) + pub fn insert_at_current(&mut self, shell: Box) { + self.shells.lock().push(shell); + self.current_shell + .store(self.shells.lock().len() - 1, Ordering::SeqCst); + self.set_path(self.path()); } pub fn current_shell(&self) -> usize { self.current_shell.load(Ordering::SeqCst) } - pub fn remove_at_current(&mut self) -> Result<(), ShellError> { + pub fn remove_at_current(&mut self) { { - if let Ok(mut shells) = self.shells.lock() { - if shells.len() > 0 { - if self.current_shell() == shells.len() - 1 { - shells.pop(); - let new_len = shells.len(); - if new_len > 0 { - self.current_shell.store(new_len - 1, Ordering::SeqCst); - } else { - return Ok(()); - } + let mut shells = self.shells.lock(); + if shells.len() > 0 { + if self.current_shell() == shells.len() - 1 { + shells.pop(); + let new_len = shells.len(); + if new_len > 0 { + self.current_shell.store(new_len - 1, Ordering::SeqCst); } else { - shells.remove(self.current_shell()); + return; } + } else { + shells.remove(self.current_shell()); } - } else { - return Err(ShellError::untagged_runtime_error( - "Internal error: could not lock shells ring buffer", - )); } } - self.set_path(self.path()?) + self.set_path(self.path()) } - pub fn is_empty(&self) -> Result { - if let Ok(shells) = self.shells.lock() { - Ok(shells.is_empty()) - } else { - Err(ShellError::untagged_runtime_error( - "Internal error: could not lock shells ring buffer (is_empty)", - )) - } + pub fn is_empty(&self) -> bool { + self.shells.lock().is_empty() } - pub fn path(&self) -> Result { - if let Ok(shells) = self.shells.lock() { - Ok(shells[self.current_shell()].path()) - } else { - Err(ShellError::untagged_runtime_error( - "Internal error: could not lock shells ring buffer (path)", - )) - } + pub fn path(&self) -> String { + self.shells.lock()[self.current_shell()].path() } pub fn pwd(&self, args: EvaluatedWholeStreamCommandArgs) -> Result { - if let Ok(shells) = self.shells.lock() { - shells[self.current_shell()].pwd(args) - } else { - Err(ShellError::untagged_runtime_error( - "Internal error: could not lock shells ring buffer (pwd)", - )) - } + let env = self.shells.lock(); + + env[self.current_shell()].pwd(args) } - pub fn set_path(&mut self, path: String) -> Result<(), ShellError> { - if let Ok(mut shells) = self.shells.lock() { - shells[self.current_shell()].set_path(path); - Ok(()) - } else { - Err(ShellError::untagged_runtime_error( - "Internal error: could not lock shells ring buffer (set_path)", - )) - } + pub fn set_path(&mut self, path: String) { + self.shells.lock()[self.current_shell()].set_path(path) } pub fn complete( @@ -127,14 +87,7 @@ impl ShellManager { pos: usize, ctx: &rustyline::Context<'_>, ) -> Result<(usize, Vec), rustyline::error::ReadlineError> { - if let Ok(shells) = self.shells.lock() { - shells[self.current_shell()].complete(line, pos, ctx) - } else { - Err(rustyline::error::ReadlineError::Io(std::io::Error::new( - std::io::ErrorKind::Other, - "Internal error: could not lock shells ring buffer (complete)", - ))) - } + self.shells.lock()[self.current_shell()].complete(line, pos, ctx) } pub fn hint( @@ -142,62 +95,41 @@ impl ShellManager { line: &str, pos: usize, ctx: &rustyline::Context<'_>, - ) -> Result, ShellError> { - if let Ok(shells) = self.shells.lock() { - Ok(shells[self.current_shell()].hint(line, pos, ctx)) - } else { - Err(ShellError::untagged_runtime_error( - "Internal error: could not lock shells ring buffer (hint)", - )) - } + context: ExpandContext, + ) -> Option { + self.shells.lock()[self.current_shell()].hint(line, pos, ctx, context) } - pub fn next(&mut self) -> Result<(), ShellError> { + pub fn next(&mut self) { { - if let Ok(shells) = self.shells.lock() { - let shell_len = shells.len(); - if self.current_shell() == (shell_len - 1) { - self.current_shell.store(0, Ordering::SeqCst); - } else { - self.current_shell - .store(self.current_shell() + 1, Ordering::SeqCst); - } + let shell_len = self.shells.lock().len(); + if self.current_shell() == (shell_len - 1) { + self.current_shell.store(0, Ordering::SeqCst); } else { - return Err(ShellError::untagged_runtime_error( - "Internal error: could not lock shells ring buffer (next)", - )); + self.current_shell + .store(self.current_shell() + 1, Ordering::SeqCst); } } - self.set_path(self.path()?) + self.set_path(self.path()) } - pub fn prev(&mut self) -> Result<(), ShellError> { + pub fn prev(&mut self) { { - if let Ok(shells) = self.shells.lock() { - let shell_len = shells.len(); - if self.current_shell() == 0 { - self.current_shell.store(shell_len - 1, Ordering::SeqCst); - } else { - self.current_shell - .store(self.current_shell() - 1, Ordering::SeqCst); - } + let shell_len = self.shells.lock().len(); + if self.current_shell() == 0 { + self.current_shell.store(shell_len - 1, Ordering::SeqCst); } else { - return Err(ShellError::untagged_runtime_error( - "Internal error: could not lock shells ring buffer (prev)", - )); + self.current_shell + .store(self.current_shell() - 1, Ordering::SeqCst); } } - self.set_path(self.path()?) + self.set_path(self.path()) } - pub fn homedir(&self) -> Result, ShellError> { - if let Ok(shells) = self.shells.lock() { - Ok(shells[self.current_shell()].homedir()) - } else { - Err(ShellError::untagged_runtime_error( - "Internal error: could not lock shells ring buffer (homedir)", - )) - } + pub fn homedir(&self) -> Option { + let env = self.shells.lock(); + + env[self.current_shell()].homedir() } pub fn ls( @@ -205,23 +137,15 @@ impl ShellManager { args: LsArgs, context: &RunnablePerItemContext, ) -> Result { - if let Ok(shells) = self.shells.lock() { - shells[self.current_shell()].ls(args, context) - } else { - Err(ShellError::untagged_runtime_error( - "Internal error: could not lock shells ring buffer (ls)", - )) - } + let env = self.shells.lock(); + + env[self.current_shell()].ls(args, context) } pub fn cd(&self, args: EvaluatedWholeStreamCommandArgs) -> Result { - if let Ok(shells) = self.shells.lock() { - shells[self.current_shell()].cd(args) - } else { - Err(ShellError::untagged_runtime_error( - "Internal error: could not lock shells ring buffer (cd)", - )) - } + let env = self.shells.lock(); + + env[self.current_shell()].cd(args) } pub fn cp( @@ -231,17 +155,8 @@ impl ShellManager { ) -> Result { let shells = self.shells.lock(); - match shells { - Ok(x) => { - let path = x[self.current_shell()].path(); - x[self.current_shell()].cp(args, context.name.clone(), &path) - } - Err(e) => Err(ShellError::labeled_error( - format!("Internal error: could not lock {}", e), - "Internal error: could not lock", - &context.name, - )), - } + let path = shells[self.current_shell()].path(); + shells[self.current_shell()].cp(args, context.name.clone(), &path) } pub fn rm( @@ -251,17 +166,8 @@ impl ShellManager { ) -> Result { let shells = self.shells.lock(); - match shells { - Ok(x) => { - let path = x[self.current_shell()].path(); - x[self.current_shell()].rm(args, context.name.clone(), &path) - } - Err(e) => Err(ShellError::labeled_error( - format!("Internal error: could not lock {}", e), - "Internal error: could not lock", - &context.name, - )), - } + let path = shells[self.current_shell()].path(); + shells[self.current_shell()].rm(args, context.name.clone(), &path) } pub fn mkdir( @@ -271,17 +177,8 @@ impl ShellManager { ) -> Result { let shells = self.shells.lock(); - match shells { - Ok(x) => { - let path = x[self.current_shell()].path(); - x[self.current_shell()].mkdir(args, context.name.clone(), &path) - } - Err(e) => Err(ShellError::labeled_error( - format!("Internal error: could not lock {}", e), - "Internal error: could not lock", - &context.name, - )), - } + let path = shells[self.current_shell()].path(); + shells[self.current_shell()].mkdir(args, context.name.clone(), &path) } pub fn mv( @@ -291,16 +188,7 @@ impl ShellManager { ) -> Result { let shells = self.shells.lock(); - match shells { - Ok(x) => { - let path = x[self.current_shell()].path(); - x[self.current_shell()].mv(args, context.name.clone(), &path) - } - Err(e) => Err(ShellError::labeled_error( - format!("Internal error: could not lock {}", e), - "Internal error: could not lock", - &context.name, - )), - } + let path = shells[self.current_shell()].path(); + shells[self.current_shell()].mv(args, context.name.clone(), &path) } } diff --git a/src/shell/value_shell.rs b/src/shell/value_shell.rs index db36f93043..c058ef585a 100644 --- a/src/shell/value_shell.rs +++ b/src/shell/value_shell.rs @@ -8,6 +8,7 @@ use crate::prelude::*; use crate::shell::shell::Shell; use crate::utils::ValueStructure; use nu_errors::ShellError; +use nu_parser::ExpandContext; use nu_protocol::{ReturnSuccess, ShellTypeName, UntaggedValue, Value}; use std::ffi::OsStr; use std::path::{Path, PathBuf}; @@ -280,7 +281,13 @@ impl Shell for ValueShell { Ok((replace_pos, completions)) } - fn hint(&self, _line: &str, _pos: usize, _ctx: &rustyline::Context<'_>) -> Option { + fn hint( + &self, + _line: &str, + _pos: usize, + _ctx: &rustyline::Context<'_>, + _context: ExpandContext, + ) -> Option { None } } diff --git a/tests/commands/open.rs b/tests/commands/open.rs index b4f9fc44b8..786fae1ce7 100644 --- a/tests/commands/open.rs +++ b/tests/commands/open.rs @@ -139,7 +139,8 @@ fn parses_sqlite() { | get table_values | nth 2 | get x - | echo $it"# + | echo $it + "# )); assert_eq!(actual, "hello"); diff --git a/tests/commands/pick.rs b/tests/commands/pick.rs index bcca8da681..3bd81ff06e 100644 --- a/tests/commands/pick.rs +++ b/tests/commands/pick.rs @@ -62,8 +62,8 @@ fn complex_nested_columns() { cwd: dirs.test(), pipeline( r#" open los_tres_caballeros.json - | pick nu.0xATYKARNU nu.committers.name nu.releases.version - | where $it."nu.releases.version" > "0.8" + | pick nu."0xATYKARNU" nu.committers.name nu.releases.version + | where "nu.releases.version" > "0.8" | get "nu.releases.version" | echo $it "#