From b3bdd4ac6477959baf526c3e092fcc8a530ad315 Mon Sep 17 00:00:00 2001 From: Kira Date: Wed, 24 Jul 2024 17:14:32 +0200 Subject: [PATCH] I really dont know what im doing with these tests sorry --- crates/nu-parser/src/lex.rs | 1 + crates/nu-parser/src/lib.rs | 2 +- crates/nu-parser/src/parser.rs | 3 +- crates/nu-parser/tests/test_lex.rs | 25 ++++++++++++- crates/nu-parser/tests/test_parser.rs | 53 +++++++++++++++++++++++++++ 5 files changed, 80 insertions(+), 4 deletions(-) diff --git a/crates/nu-parser/src/lex.rs b/crates/nu-parser/src/lex.rs index 628b84414a..adcb239f4e 100644 --- a/crates/nu-parser/src/lex.rs +++ b/crates/nu-parser/src/lex.rs @@ -464,6 +464,7 @@ pub fn lex_signature( (state.output, state.error) } +#[derive(Debug)] pub struct LexState<'a> { pub input: &'a [u8], pub output: Vec, diff --git a/crates/nu-parser/src/lib.rs b/crates/nu-parser/src/lib.rs index 8f871aa815..c5d69cb270 100644 --- a/crates/nu-parser/src/lib.rs +++ b/crates/nu-parser/src/lib.rs @@ -16,7 +16,7 @@ pub use flatten::{ flatten_block, flatten_expression, flatten_pipeline, flatten_pipeline_element, FlatShape, }; pub use known_external::KnownExternal; -pub use lex::{lex, lex_signature, Token, TokenContents}; +pub use lex::{lex, lex_n_tokens, lex_signature, LexState, Token, TokenContents}; pub use lite_parser::{lite_parse, LiteBlock, LiteCommand}; pub use nu_protocol::parser_path::*; pub use parse_keywords::*; diff --git a/crates/nu-parser/src/parser.rs b/crates/nu-parser/src/parser.rs index 92db4205c6..22a0f6ab64 100644 --- a/crates/nu-parser/src/parser.rs +++ b/crates/nu-parser/src/parser.rs @@ -5784,8 +5784,7 @@ pub fn parse_record(working_set: &mut StateWorkingSet, span: Span) -> Expression ) } else { let value = parse_value(working_set, tokens[idx].span, &SyntaxShape::Any); - if let Some(parse_error) = check_record_key_or_value(&working_set, &value, "value") - { + if let Some(parse_error) = check_record_key_or_value(working_set, &value, "value") { working_set.error(parse_error); garbage(working_set, value.span) } else { diff --git a/crates/nu-parser/tests/test_lex.rs b/crates/nu-parser/tests/test_lex.rs index 07470a310e..22fe4c4715 100644 --- a/crates/nu-parser/tests/test_lex.rs +++ b/crates/nu-parser/tests/test_lex.rs @@ -1,4 +1,4 @@ -use nu_parser::{lex, lex_signature, Token, TokenContents}; +use nu_parser::{lex, lex_n_tokens, lex_signature, LexState, Token, TokenContents}; use nu_protocol::{ParseError, Span}; #[test] @@ -281,3 +281,26 @@ fn lex_comments() { } ); } + +#[test] +fn lex_manually() { + let file = b"'a'\n#comment\n#comment again\n| continue"; + let mut lex_state = LexState { + input: file, + output: Vec::new(), + error: None, + span_offset: 10, + }; + assert_eq!(lex_n_tokens(&mut lex_state, &[], &[], false, 1), 1); + assert_eq!(lex_state.output.len(), 1); + assert_eq!(lex_n_tokens(&mut lex_state, &[], &[], false, 5), 5); + assert_eq!(lex_state.output.len(), 6); + // Next token is the pipe. + // This shortens the output because it exhausts the input before it can + // compensate for the EOL tokens lost to the line continuation + assert_eq!(lex_n_tokens(&mut lex_state, &[], &[], false, 1), -1); + assert_eq!(lex_state.output.len(), 5); + assert_eq!(file.len(), lex_state.span_offset - 10); + let last_span = lex_state.output.last().unwrap().span; + assert_eq!(&file[last_span.start - 10..last_span.end - 10], b"continue"); +} diff --git a/crates/nu-parser/tests/test_parser.rs b/crates/nu-parser/tests/test_parser.rs index 44cd79a04a..591d783eef 100644 --- a/crates/nu-parser/tests/test_parser.rs +++ b/crates/nu-parser/tests/test_parser.rs @@ -2397,3 +2397,56 @@ mod operator { ); } } + +mod record { + use super::*; + + use nu_protocol::ast::RecordItem; + + #[rstest] + #[case(b"{ :: x }", "Invalid literal")] // Key is bare colon + #[case(b"{ a: x:y }", "Invalid literal")] // Value is bare word with colon + #[case(b"{ a: x('y'):z }", "Invalid literal")] // Value is bare string interpolation with colon + #[case(b"{ ;: x }", "Parse mismatch during operation.")] // Key is a non-item token + #[case(b"{ a: || }", "Parse mismatch during operation.")] // Value is a non-item token + fn refuse_confusing_record(#[case] expr: &[u8], #[case] error: &str) { + dbg!(String::from_utf8_lossy(expr)); + let engine_state = EngineState::new(); + let mut working_set = StateWorkingSet::new(&engine_state); + parse(&mut working_set, None, expr, false); + assert_eq!( + working_set.parse_errors.first().map(|e| e.to_string()), + Some(error.to_string()) + ); + } + + #[rstest] + #[case(b"{ a: 2024-07-23T22:54:54.532100627+02:00 b:xy }")] + fn parse_datetime_in_record(#[case] expr: &[u8]) { + dbg!(String::from_utf8_lossy(expr)); + let engine_state = EngineState::new(); + let mut working_set = StateWorkingSet::new(&engine_state); + let block = parse(&mut working_set, None, expr, false); + assert!(working_set.parse_errors.first().is_none()); + let pipeline_el_expr = &block + .pipelines + .first() + .unwrap() + .elements + .first() + .unwrap() + .expr + .expr; + dbg!(pipeline_el_expr); + match pipeline_el_expr { + Expr::FullCellPath(v) => match &v.head.expr { + Expr::Record(fields) => assert!(matches!( + fields[0], + RecordItem::Pair(_, Expression { ty: Type::Date, .. }) + )), + _ => panic!("Expected record head"), + }, + _ => panic!("Expected full cell path"), + } + } +}