This commit contains two improvements: - Support for a Range syntax (and a corresponding Range value) - Work towards a signature syntax Implementing the Range syntax resulted in cleaning up how operators in the core syntax works. There are now two kinds of infix operators - tight operators (`.` and `..`) - loose operators Tight operators may not be interspersed (`$it.left..$it.right` is a syntax error). Loose operators require whitespace on both sides of the operator, and can be arbitrarily interspersed. Precedence is left to right in the core syntax. Note that delimited syntax (like `( ... )` or `[ ... ]`) is a single token node in the core syntax. A single token node can be parsed from beginning to end in a context-free manner. The rule for `.` is `<token node>.<member>`. The rule for `..` is `<token node>..<token node>`. Loose operators all have the same syntactic rule: `<token node><space><loose op><space><token node>`. The second aspect of this pull request is the beginning of support for a signature syntax. Before implementing signatures, a necessary prerequisite is for the core syntax to support multi-line programs. That work establishes a few things: - `;` and newlines are handled in the core grammar, and both count as "separators" - line comments begin with `#` and continue until the end of the line In this commit, multi-token productions in the core grammar can use separators interchangably with spaces. However, I think we will ultimately want a different rule preventing separators from occurring before an infix operator, so that the end of a line is always unambiguous. This would avoid gratuitous differences between modules and repl usage. We already effectively have this rule, because otherwise `x<newline> | y` would be a single pipeline, but of course that wouldn't work.
125 lines
3.7 KiB
Rust
125 lines
3.7 KiB
Rust
use crate::hir::syntax_shape::{
|
|
expand_atom, expand_variable, parse_single_node, AtomicToken, ExpandContext, ExpandExpression,
|
|
ExpansionRule, FallibleColorSyntax, FlatShape, TestSyntax, UnspannedAtomicToken,
|
|
};
|
|
use crate::hir::tokens_iterator::Peeked;
|
|
use crate::parse::tokens::UnspannedToken;
|
|
use crate::{hir, hir::TokensIterator};
|
|
use nu_errors::{ParseError, ShellError};
|
|
#[cfg(not(coloring_in_tokens))]
|
|
use nu_source::Spanned;
|
|
use nu_source::SpannedItem;
|
|
|
|
#[derive(Debug, Copy, Clone)]
|
|
pub struct StringShape;
|
|
|
|
#[cfg(not(coloring_in_tokens))]
|
|
impl FallibleColorSyntax for StringShape {
|
|
type Info = ();
|
|
type Input = FlatShape;
|
|
|
|
fn color_syntax<'a, 'b>(
|
|
&self,
|
|
input: &FlatShape,
|
|
token_nodes: &'b mut TokensIterator<'a>,
|
|
context: &ExpandContext,
|
|
shapes: &mut Vec<Spanned<FlatShape>>,
|
|
) -> Result<(), ShellError> {
|
|
let atom = expand_atom(token_nodes, "string", context, ExpansionRule::permissive());
|
|
|
|
let atom = match atom {
|
|
Err(_) => return Ok(()),
|
|
Ok(atom) => atom,
|
|
};
|
|
|
|
match atom {
|
|
AtomicToken {
|
|
unspanned: UnspannedAtomicToken::String { .. },
|
|
span,
|
|
} => shapes.push((*input).spanned(span)),
|
|
other => other.color_tokens(shapes),
|
|
}
|
|
|
|
Ok(())
|
|
}
|
|
}
|
|
|
|
#[cfg(coloring_in_tokens)]
|
|
impl FallibleColorSyntax for StringShape {
|
|
type Info = ();
|
|
type Input = FlatShape;
|
|
|
|
fn name(&self) -> &'static str {
|
|
"StringShape"
|
|
}
|
|
|
|
fn color_syntax<'a, 'b>(
|
|
&self,
|
|
input: &FlatShape,
|
|
token_nodes: &'b mut TokensIterator<'a>,
|
|
context: &ExpandContext,
|
|
) -> Result<(), ShellError> {
|
|
let atom = expand_atom(token_nodes, "string", context, ExpansionRule::permissive());
|
|
|
|
let atom = match atom {
|
|
Err(_) => return Ok(()),
|
|
Ok(atom) => atom,
|
|
};
|
|
|
|
match atom {
|
|
AtomicToken {
|
|
unspanned: UnspannedAtomicToken::String { .. },
|
|
span,
|
|
} => token_nodes.color_shape((*input).spanned(span)),
|
|
atom => token_nodes.mutate_shapes(|shapes| atom.color_tokens(shapes)),
|
|
}
|
|
|
|
Ok(())
|
|
}
|
|
}
|
|
|
|
impl ExpandExpression for StringShape {
|
|
fn name(&self) -> &'static str {
|
|
"string"
|
|
}
|
|
|
|
fn expand_expr<'a, 'b>(
|
|
&self,
|
|
token_nodes: &mut TokensIterator<'_>,
|
|
context: &ExpandContext,
|
|
) -> Result<hir::Expression, ParseError> {
|
|
parse_single_node(token_nodes, "String", |token, token_span, err| {
|
|
Ok(match token {
|
|
UnspannedToken::GlobPattern
|
|
| UnspannedToken::CompareOperator(..)
|
|
| UnspannedToken::EvaluationOperator(..)
|
|
| UnspannedToken::ExternalWord => return Err(err.error()),
|
|
UnspannedToken::Variable(span) => {
|
|
expand_variable(span, token_span, &context.source)
|
|
}
|
|
UnspannedToken::ExternalCommand(span) => {
|
|
hir::Expression::external_command(span, token_span)
|
|
}
|
|
UnspannedToken::Number(_) => hir::Expression::bare(token_span),
|
|
UnspannedToken::Bare => hir::Expression::bare(token_span),
|
|
UnspannedToken::String(span) => hir::Expression::string(span, token_span),
|
|
})
|
|
})
|
|
}
|
|
}
|
|
|
|
impl TestSyntax for StringShape {
|
|
fn test<'a, 'b>(
|
|
&self,
|
|
token_nodes: &'b mut TokensIterator<'a>,
|
|
_context: &ExpandContext,
|
|
) -> Option<Peeked<'a, 'b>> {
|
|
let peeked = token_nodes.peek_any();
|
|
|
|
match peeked.node {
|
|
Some(token) if token.is_string() => Some(peeked),
|
|
_ => None,
|
|
}
|
|
}
|
|
}
|