From dc8a7ecb33e06cd293b08c368bb74ce6b48ebdc3 Mon Sep 17 00:00:00 2001 From: Soispha Date: Sun, 18 Feb 2024 13:17:34 +0100 Subject: [PATCH] test(trixy-parser): Restore test functionality after nasp -> mod rename --- trixy-parser/src/lexing/mod.rs | 9 +-- trixy-parser/src/lexing/test.rs | 66 ++++++++++----------- trixy-parser/src/lexing/tokenizer.rs | 2 +- trixy-parser/src/parsing/checked/test.rs | 51 +++++++++++----- trixy-parser/src/parsing/unchecked/error.rs | 2 +- trixy-parser/src/parsing/unchecked/mod.rs | 8 +-- trixy-parser/src/parsing/unchecked/test.rs | 14 ++--- 7 files changed, 86 insertions(+), 66 deletions(-) diff --git a/trixy-parser/src/lexing/mod.rs b/trixy-parser/src/lexing/mod.rs index 515510c..ebdc5bd 100644 --- a/trixy-parser/src/lexing/mod.rs +++ b/trixy-parser/src/lexing/mod.rs @@ -236,7 +236,7 @@ pub enum Keyword { impl Display for Keyword { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { - Keyword::mod => f.write_str("mod"), + Keyword::r#mod => f.write_str("mod"), Keyword::r#fn => f.write_str("fn"), Keyword::r#struct => f.write_str("struct"), Keyword::r#enum => f.write_str("enum"), @@ -250,7 +250,7 @@ impl Display for Keyword { /// # Examples /// /// ``` -/// use trixy_lang_parser::token; +/// use trixy_parser::token; /// # fn main() { /// token![mod]; /// token![;]; @@ -280,7 +280,7 @@ macro_rules! token { [ParenClose] => { $crate::lexing::TokenKind::ParenClose }; // [)] => { $crate::lexing::TokenKind::ParenthesisClose }; - [mod] => { $crate::lexing::TokenKind::Keyword($crate::lexing::Keyword::mod) }; + [mod] => { $crate::lexing::TokenKind::Keyword($crate::lexing::Keyword::r#mod) }; [fn] => { $crate::lexing::TokenKind::Keyword($crate::lexing::Keyword::r#fn) }; [struct] => { $crate::lexing::TokenKind::Keyword($crate::lexing::Keyword::r#struct) }; [enum] => { $crate::lexing::TokenKind::Keyword($crate::lexing::Keyword::r#enum) }; @@ -289,6 +289,7 @@ macro_rules! token { // see the `same_kind` method on TokenKind [Ident] => { $crate::lexing::TokenKind::Identifier("".to_owned()) }; [Identifier] => { $crate::lexing::TokenKind::Identifier("".to_owned()) }; + [DocComment] => { $crate::lexing::TokenKind::DocComment("".to_owned()) }; [DocCommentMatch] => { $crate::lexing::TokenKind::DocComment(_doc_comment) }; [Comment] => { $crate::lexing::TokenKind::Comment("".to_owned()) }; @@ -322,6 +323,6 @@ mod tests { token_macro_test!(tok_expands_to_arrow, ->, => TokenKind::Arrow); token_macro_test!(tok_expands_to_semicolon, Semicolon, => TokenKind::Semicolon); - token_macro_test!(tok_expands_to_mod, mod, => TokenKind::Keyword(crate::lexing::Keyword::mod)); + token_macro_test!(tok_expands_to_mod, mod, => TokenKind::Keyword(crate::lexing::Keyword::r#mod)); token_macro_test!(tok_expands_to_fn, fn, => TokenKind::Keyword(crate::lexing::Keyword::r#fn)); } diff --git a/trixy-parser/src/lexing/test.rs b/trixy-parser/src/lexing/test.rs index 4ae9d8c..3f9ae16 100644 --- a/trixy-parser/src/lexing/test.rs +++ b/trixy-parser/src/lexing/test.rs @@ -35,59 +35,59 @@ mod commands { let expected_token_stream = { let tokens = vec![ Token { - span: TokenSpan { start: 1, end: 5 }, - kind: TokenKind::Keyword(Keyword::mod), + span: TokenSpan { start: 1, end: 4 }, + kind: TokenKind::Keyword(Keyword::r#mod), }, Token { - span: TokenSpan { start: 6, end: 14 }, + span: TokenSpan { start: 5, end: 13 }, kind: TokenKind::Identifier("commands".to_owned()), }, Token { - span: TokenSpan { start: 15, end: 16 }, + span: TokenSpan { start: 14, end: 15 }, kind: TokenKind::BraceOpen, }, Token { - span: TokenSpan { start: 21, end: 23 }, + span: TokenSpan { start: 20, end: 22 }, kind: TokenKind::Keyword(Keyword::r#fn), }, Token { - span: TokenSpan { start: 24, end: 30 }, + span: TokenSpan { start: 23, end: 29 }, kind: TokenKind::Identifier("expect".to_owned()), }, Token { - span: TokenSpan { start: 30, end: 31 }, + span: TokenSpan { start: 29, end: 30 }, kind: TokenKind::ParenOpen, }, Token { - span: TokenSpan { start: 31, end: 36 }, + span: TokenSpan { start: 30, end: 35 }, kind: TokenKind::Identifier("event".to_owned()), }, Token { - span: TokenSpan { start: 36, end: 37 }, + span: TokenSpan { start: 35, end: 36 }, kind: TokenKind::Colon, }, Token { - span: TokenSpan { start: 38, end: 44 }, + span: TokenSpan { start: 37, end: 43 }, kind: TokenKind::Identifier("String".to_owned()), }, Token { - span: TokenSpan { start: 44, end: 45 }, + span: TokenSpan { start: 43, end: 44 }, kind: TokenKind::ParenClose, }, Token { - span: TokenSpan { start: 46, end: 48 }, + span: TokenSpan { start: 45, end: 47 }, kind: TokenKind::Arrow, }, Token { - span: TokenSpan { start: 49, end: 55 }, + span: TokenSpan { start: 48, end: 54 }, kind: TokenKind::Identifier("String".to_owned()), }, Token { - span: TokenSpan { start: 55, end: 56 }, + span: TokenSpan { start: 54, end: 55 }, kind: TokenKind::Semicolon, }, Token { - span: TokenSpan { start: 57, end: 58 }, + span: TokenSpan { start: 56, end: 57 }, kind: TokenKind::BraceClose, }, ]; @@ -126,27 +126,27 @@ mod mod {{ let expected_token_stream = { let tokens = vec![ Token { - span: TokenSpan { start: 1, end: 5 }, - kind: TokenKind::Keyword(Keyword::mod), + span: TokenSpan { start: 1, end: 4 }, + kind: TokenKind::Keyword(Keyword::r#mod), }, Token { - span: TokenSpan { start: 6, end: 10 }, - kind: TokenKind::Keyword(Keyword::mod), + span: TokenSpan { start: 5, end: 8 }, + kind: TokenKind::Keyword(Keyword::r#mod), }, Token { - span: TokenSpan { start: 11, end: 12 }, + span: TokenSpan { start: 9, end: 10 }, + kind: TokenKind::BraceOpen, + }, + Token { + span: TokenSpan { start: 10, end: 11 }, kind: TokenKind::BraceOpen, }, Token { span: TokenSpan { start: 12, end: 13 }, - kind: TokenKind::BraceOpen, - }, - Token { - span: TokenSpan { start: 14, end: 15 }, kind: TokenKind::BraceClose, }, Token { - span: TokenSpan { start: 15, end: 16 }, + span: TokenSpan { start: 13, end: 14 }, kind: TokenKind::BraceClose, }, ]; @@ -181,27 +181,27 @@ fn test_comments() { let expected_token_stream = { let tokens = vec![ Token { - span: TokenSpan { start: 33, end: 37 }, - kind: TokenKind::Keyword(Keyword::mod), + span: TokenSpan { start: 33, end: 36 }, + kind: TokenKind::Keyword(Keyword::r#mod), }, Token { - span: TokenSpan { start: 38, end: 42 }, - kind: TokenKind::Keyword(Keyword::mod), + span: TokenSpan { start: 37, end: 40 }, + kind: TokenKind::Keyword(Keyword::r#mod), }, Token { - span: TokenSpan { start: 43, end: 44 }, + span: TokenSpan { start: 41, end: 42 }, kind: TokenKind::BraceOpen, }, Token { - span: TokenSpan { start: 44, end: 45 }, + span: TokenSpan { start: 42, end: 43 }, kind: TokenKind::BraceOpen, }, Token { - span: TokenSpan { start: 55, end: 56 }, + span: TokenSpan { start: 53, end: 54 }, kind: TokenKind::BraceClose, }, Token { - span: TokenSpan { start: 56, end: 57 }, + span: TokenSpan { start: 54, end: 55 }, kind: TokenKind::BraceClose, }, ]; diff --git a/trixy-parser/src/lexing/tokenizer.rs b/trixy-parser/src/lexing/tokenizer.rs index 2c2a902..28acb06 100644 --- a/trixy-parser/src/lexing/tokenizer.rs +++ b/trixy-parser/src/lexing/tokenizer.rs @@ -203,7 +203,7 @@ fn tokenize_ident(text: &str) -> Result<(TokenKind, usize), LexingError> { // Filter out keywords let tokenkind = match got { - "mod" => TokenKind::Keyword(Keyword::mod), + "mod" => TokenKind::Keyword(Keyword::r#mod), "fn" => TokenKind::Keyword(Keyword::r#fn), "struct" => TokenKind::Keyword(Keyword::r#struct), "enum" => TokenKind::Keyword(Keyword::r#enum), diff --git a/trixy-parser/src/parsing/checked/test.rs b/trixy-parser/src/parsing/checked/test.rs index 6037feb..35c30f4 100644 --- a/trixy-parser/src/parsing/checked/test.rs +++ b/trixy-parser/src/parsing/checked/test.rs @@ -28,21 +28,30 @@ use pretty_assertions::assert_eq; #[test] fn test_full() { - let input = "nasp trinitrix { + let input = " +mod trinitrix { struct Callback { - func: Function, - timeout: Integer, - }; + func: void, + timeout: u8, + } enum CallbackPriority { High, Medium, Low, - }; + } fn execute_callback(callback: Callback, priority: CallbackPriority); -}"; - let output = TokenStream::lex(&input).unwrap().parse().unwrap(); +} +"; + let output = TokenStream::lex(&input) + .unwrap() + .parse_unchecked() + .map_err(|err| panic!("{}", err)) + .unwrap() + .process(input.to_owned()) + .map_err(|err| panic!("{}", err)) + .unwrap(); let expected = CommandSpec { structures: vec![], enumerations: vec![], @@ -93,7 +102,7 @@ fn test_full() { }, r#type: Type { identifier: Identifier { - name: "Function".to_owned(), + name: "()".to_owned(), }, generic_args: vec![], }, @@ -105,7 +114,7 @@ fn test_full() { }, r#type: Type { identifier: Identifier { - name: "Integer".to_owned(), + name: "u8".to_owned(), }, generic_args: vec![], }, @@ -143,15 +152,20 @@ fn test_full() { #[test] fn test_failing() { - let input = "struct Callback { - func: Function, - timeout: Integer, -}; + let input = " +struct Callback { + func: void, + timeout: u32, +} // The type \"Name\" should not be defined fn execute_callback(callback: Name); "; - let output = TokenStream::lex(&input).unwrap().parse(); + let output = TokenStream::lex(&input) + .unwrap() + .parse_unchecked() + .unwrap() + .process(input.to_owned()); match *(output.unwrap_err().source) { super::error::ParsingError::TypeNotDeclared { r#type, .. } => { assert_eq!( @@ -171,12 +185,17 @@ fn test_comments() { /// First doc comment // Some more text -nasp trinitrix { +mod trinitrix { /// Second doc comment fn hi(name: String) -> String; } "; - let output = TokenStream::lex(&input).unwrap().parse().unwrap(); + let output = TokenStream::lex(&input) + .unwrap() + .parse_unchecked() + .unwrap() + .process(input.to_owned()) + .unwrap(); let expected = CommandSpec { structures: vec![], enumerations: vec![], diff --git a/trixy-parser/src/parsing/unchecked/error.rs b/trixy-parser/src/parsing/unchecked/error.rs index 476d932..0b2459b 100644 --- a/trixy-parser/src/parsing/unchecked/error.rs +++ b/trixy-parser/src/parsing/unchecked/error.rs @@ -78,7 +78,7 @@ impl AdditionalHelp for ParsingError { expected, actual ), ParsingError::ExpectedKeyword { actual, .. } => format!( - "I expected a keyword (that is something like 'fn' or 'nasp') but you put a '{}' there!", + "I expected a keyword (that is something like 'fn' or 'mod') but you put a '{}' there!", actual), ParsingError::TrailingDocComment { .. } => "I expected some target (a function, namespace, enum, or something like this) which this doc comment annotates, but you put nothing there".to_owned(), ParsingError::UnexpectedEOF { expected, .. } => format!("Put the expected token ('{expected}') here."), diff --git a/trixy-parser/src/parsing/unchecked/mod.rs b/trixy-parser/src/parsing/unchecked/mod.rs index 65450b3..1114d6e 100644 --- a/trixy-parser/src/parsing/unchecked/mod.rs +++ b/trixy-parser/src/parsing/unchecked/mod.rs @@ -84,7 +84,7 @@ impl Parser { // Use of [peek_raw] here is fine, as we know that the function is only called, when // something should still be contained in the token stream match self.peek_raw().kind() { - token![nasp] => Ok(Declaration::Namespace(self.parse_namespace()?)), + token![mod] => Ok(Declaration::Namespace(self.parse_namespace()?)), token![fn] => Ok(Declaration::Function(self.parse_function()?)), token![struct] => Ok(Declaration::Structure(self.parse_structure()?)), token![enum] => Ok(Declaration::Enumeration(self.parse_enumeration()?)), @@ -181,7 +181,7 @@ impl Parser { fn parse_namespace(&mut self) -> Result { let attributes = self.parse_doc_comments()?; - self.expect(token![nasp])?; + self.expect(token![mod])?; let mut namespace = Namespace { name: self.expect(token![Ident])?, @@ -328,9 +328,9 @@ impl Parser { /// }; /// /// # fn main() { - /// let token_stream = TokenStream::lex("nasp {}").unwrap(); + /// let token_stream = TokenStream::lex("mod {}").unwrap(); /// let parser = Parser::new(token_stream); - /// assert_eq!(parser.expect(token![nasp]).unwrap(), TokenKind::Keyword(Keyword::nasp)); + /// assert_eq!(parser.expect(token![mod]).unwrap(), TokenKind::Keyword(Keyword::mod)); /// assert_eq!(parser.expect(token![BraceOpen]).unwrap(), TokenKind::BraceOpen); /// assert_eq!(parser.expect(token![BraceClose]).unwrap(), TokenKind::BraceClose); /// assert!(parser.expect(token![BraceClose]).is_err()); diff --git a/trixy-parser/src/parsing/unchecked/test.rs b/trixy-parser/src/parsing/unchecked/test.rs index f25083f..68868d3 100644 --- a/trixy-parser/src/parsing/unchecked/test.rs +++ b/trixy-parser/src/parsing/unchecked/test.rs @@ -32,7 +32,7 @@ fn test_failing() { let input = " fn print(message: CommandTransferValue); -nasp trinitrix { {} +mod trinitrix { {} fn hi honner(name: String) -> String; ; } @@ -49,7 +49,7 @@ nasp trinitrix { {} fn test_full() { let input = "fn print(message: CommandTransferValue); -nasp trinitrix { +mod trinitrix { fn hi(name: String) -> String; } "; @@ -80,22 +80,22 @@ nasp trinitrix { }], namespaces: vec![Namespace { name: Token { - span: TokenSpan { start: 47, end: 56 }, + span: TokenSpan { start: 46, end: 55 }, kind: TokenKind::Identifier("trinitrix".to_owned()), }, functions: vec![Function { identifier: Token { - span: TokenSpan { start: 66, end: 68 }, + span: TokenSpan { start: 65, end: 67 }, kind: TokenKind::Identifier("hi".to_owned()), }, inputs: vec![NamedType { name: Token { - span: TokenSpan { start: 69, end: 73 }, + span: TokenSpan { start: 68, end: 72 }, kind: TokenKind::Identifier("name".to_owned()), }, r#type: Type { identifier: Token { - span: TokenSpan { start: 75, end: 81 }, + span: TokenSpan { start: 74, end: 80 }, kind: TokenKind::Identifier("String".to_owned()), }, generic_args: vec![], @@ -103,7 +103,7 @@ nasp trinitrix { }], output: Some(Type { identifier: Token { - span: TokenSpan { start: 86, end: 92 }, + span: TokenSpan { start: 85, end: 91 }, kind: TokenKind::Identifier("String".to_owned()), }, generic_args: vec![],