I really dont know what im doing with these tests sorry

This commit is contained in:
Kira 2024-07-24 17:14:32 +02:00
parent cc980115fc
commit b3bdd4ac64
5 changed files with 80 additions and 4 deletions

View File

@ -464,6 +464,7 @@ pub fn lex_signature(
(state.output, state.error)
}
#[derive(Debug)]
pub struct LexState<'a> {
pub input: &'a [u8],
pub output: Vec<Token>,

View File

@ -16,7 +16,7 @@ pub use flatten::{
flatten_block, flatten_expression, flatten_pipeline, flatten_pipeline_element, FlatShape,
};
pub use known_external::KnownExternal;
pub use lex::{lex, lex_signature, Token, TokenContents};
pub use lex::{lex, lex_n_tokens, lex_signature, LexState, Token, TokenContents};
pub use lite_parser::{lite_parse, LiteBlock, LiteCommand};
pub use nu_protocol::parser_path::*;
pub use parse_keywords::*;

View File

@ -5784,8 +5784,7 @@ pub fn parse_record(working_set: &mut StateWorkingSet, span: Span) -> Expression
)
} else {
let value = parse_value(working_set, tokens[idx].span, &SyntaxShape::Any);
if let Some(parse_error) = check_record_key_or_value(&working_set, &value, "value")
{
if let Some(parse_error) = check_record_key_or_value(working_set, &value, "value") {
working_set.error(parse_error);
garbage(working_set, value.span)
} else {

View File

@ -1,4 +1,4 @@
use nu_parser::{lex, lex_signature, Token, TokenContents};
use nu_parser::{lex, lex_n_tokens, lex_signature, LexState, Token, TokenContents};
use nu_protocol::{ParseError, Span};
#[test]
@ -281,3 +281,26 @@ fn lex_comments() {
}
);
}
#[test]
fn lex_manually() {
let file = b"'a'\n#comment\n#comment again\n| continue";
let mut lex_state = LexState {
input: file,
output: Vec::new(),
error: None,
span_offset: 10,
};
assert_eq!(lex_n_tokens(&mut lex_state, &[], &[], false, 1), 1);
assert_eq!(lex_state.output.len(), 1);
assert_eq!(lex_n_tokens(&mut lex_state, &[], &[], false, 5), 5);
assert_eq!(lex_state.output.len(), 6);
// Next token is the pipe.
// This shortens the output because it exhausts the input before it can
// compensate for the EOL tokens lost to the line continuation
assert_eq!(lex_n_tokens(&mut lex_state, &[], &[], false, 1), -1);
assert_eq!(lex_state.output.len(), 5);
assert_eq!(file.len(), lex_state.span_offset - 10);
let last_span = lex_state.output.last().unwrap().span;
assert_eq!(&file[last_span.start - 10..last_span.end - 10], b"continue");
}

View File

@ -2397,3 +2397,56 @@ mod operator {
);
}
}
mod record {
use super::*;
use nu_protocol::ast::RecordItem;
#[rstest]
#[case(b"{ :: x }", "Invalid literal")] // Key is bare colon
#[case(b"{ a: x:y }", "Invalid literal")] // Value is bare word with colon
#[case(b"{ a: x('y'):z }", "Invalid literal")] // Value is bare string interpolation with colon
#[case(b"{ ;: x }", "Parse mismatch during operation.")] // Key is a non-item token
#[case(b"{ a: || }", "Parse mismatch during operation.")] // Value is a non-item token
fn refuse_confusing_record(#[case] expr: &[u8], #[case] error: &str) {
dbg!(String::from_utf8_lossy(expr));
let engine_state = EngineState::new();
let mut working_set = StateWorkingSet::new(&engine_state);
parse(&mut working_set, None, expr, false);
assert_eq!(
working_set.parse_errors.first().map(|e| e.to_string()),
Some(error.to_string())
);
}
#[rstest]
#[case(b"{ a: 2024-07-23T22:54:54.532100627+02:00 b:xy }")]
fn parse_datetime_in_record(#[case] expr: &[u8]) {
dbg!(String::from_utf8_lossy(expr));
let engine_state = EngineState::new();
let mut working_set = StateWorkingSet::new(&engine_state);
let block = parse(&mut working_set, None, expr, false);
assert!(working_set.parse_errors.first().is_none());
let pipeline_el_expr = &block
.pipelines
.first()
.unwrap()
.elements
.first()
.unwrap()
.expr
.expr;
dbg!(pipeline_el_expr);
match pipeline_el_expr {
Expr::FullCellPath(v) => match &v.head.expr {
Expr::Record(fields) => assert!(matches!(
fields[0],
RecordItem::Pair(_, Expression { ty: Type::Date, .. })
)),
_ => panic!("Expected record head"),
},
_ => panic!("Expected full cell path"),
}
}
}