test(trixy-parser): Restore test functionality after nasp -> mod rename

This commit is contained in:
Benedikt Peetz 2024-02-18 13:17:34 +01:00
parent 3e59d53b8b
commit dc8a7ecb33
Signed by: bpeetz
GPG Key ID: A5E94010C3A642AD
7 changed files with 86 additions and 66 deletions

View File

@ -236,7 +236,7 @@ pub enum Keyword {
impl Display for Keyword {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Keyword::mod => f.write_str("mod"),
Keyword::r#mod => f.write_str("mod"),
Keyword::r#fn => f.write_str("fn"),
Keyword::r#struct => f.write_str("struct"),
Keyword::r#enum => f.write_str("enum"),
@ -250,7 +250,7 @@ impl Display for Keyword {
/// # Examples
///
/// ```
/// use trixy_lang_parser::token;
/// use trixy_parser::token;
/// # fn main() {
/// token![mod];
/// token![;];
@ -280,7 +280,7 @@ macro_rules! token {
[ParenClose] => { $crate::lexing::TokenKind::ParenClose };
// [)] => { $crate::lexing::TokenKind::ParenthesisClose };
[mod] => { $crate::lexing::TokenKind::Keyword($crate::lexing::Keyword::mod) };
[mod] => { $crate::lexing::TokenKind::Keyword($crate::lexing::Keyword::r#mod) };
[fn] => { $crate::lexing::TokenKind::Keyword($crate::lexing::Keyword::r#fn) };
[struct] => { $crate::lexing::TokenKind::Keyword($crate::lexing::Keyword::r#struct) };
[enum] => { $crate::lexing::TokenKind::Keyword($crate::lexing::Keyword::r#enum) };
@ -289,6 +289,7 @@ macro_rules! token {
// see the `same_kind` method on TokenKind
[Ident] => { $crate::lexing::TokenKind::Identifier("".to_owned()) };
[Identifier] => { $crate::lexing::TokenKind::Identifier("".to_owned()) };
[DocComment] => { $crate::lexing::TokenKind::DocComment("".to_owned()) };
[DocCommentMatch] => { $crate::lexing::TokenKind::DocComment(_doc_comment) };
[Comment] => { $crate::lexing::TokenKind::Comment("".to_owned()) };
@ -322,6 +323,6 @@ mod tests {
token_macro_test!(tok_expands_to_arrow, ->, => TokenKind::Arrow);
token_macro_test!(tok_expands_to_semicolon, Semicolon, => TokenKind::Semicolon);
token_macro_test!(tok_expands_to_mod, mod, => TokenKind::Keyword(crate::lexing::Keyword::mod));
token_macro_test!(tok_expands_to_mod, mod, => TokenKind::Keyword(crate::lexing::Keyword::r#mod));
token_macro_test!(tok_expands_to_fn, fn, => TokenKind::Keyword(crate::lexing::Keyword::r#fn));
}

View File

@ -35,59 +35,59 @@ mod commands {
let expected_token_stream = {
let tokens = vec![
Token {
span: TokenSpan { start: 1, end: 5 },
kind: TokenKind::Keyword(Keyword::mod),
span: TokenSpan { start: 1, end: 4 },
kind: TokenKind::Keyword(Keyword::r#mod),
},
Token {
span: TokenSpan { start: 6, end: 14 },
span: TokenSpan { start: 5, end: 13 },
kind: TokenKind::Identifier("commands".to_owned()),
},
Token {
span: TokenSpan { start: 15, end: 16 },
span: TokenSpan { start: 14, end: 15 },
kind: TokenKind::BraceOpen,
},
Token {
span: TokenSpan { start: 21, end: 23 },
span: TokenSpan { start: 20, end: 22 },
kind: TokenKind::Keyword(Keyword::r#fn),
},
Token {
span: TokenSpan { start: 24, end: 30 },
span: TokenSpan { start: 23, end: 29 },
kind: TokenKind::Identifier("expect".to_owned()),
},
Token {
span: TokenSpan { start: 30, end: 31 },
span: TokenSpan { start: 29, end: 30 },
kind: TokenKind::ParenOpen,
},
Token {
span: TokenSpan { start: 31, end: 36 },
span: TokenSpan { start: 30, end: 35 },
kind: TokenKind::Identifier("event".to_owned()),
},
Token {
span: TokenSpan { start: 36, end: 37 },
span: TokenSpan { start: 35, end: 36 },
kind: TokenKind::Colon,
},
Token {
span: TokenSpan { start: 38, end: 44 },
span: TokenSpan { start: 37, end: 43 },
kind: TokenKind::Identifier("String".to_owned()),
},
Token {
span: TokenSpan { start: 44, end: 45 },
span: TokenSpan { start: 43, end: 44 },
kind: TokenKind::ParenClose,
},
Token {
span: TokenSpan { start: 46, end: 48 },
span: TokenSpan { start: 45, end: 47 },
kind: TokenKind::Arrow,
},
Token {
span: TokenSpan { start: 49, end: 55 },
span: TokenSpan { start: 48, end: 54 },
kind: TokenKind::Identifier("String".to_owned()),
},
Token {
span: TokenSpan { start: 55, end: 56 },
span: TokenSpan { start: 54, end: 55 },
kind: TokenKind::Semicolon,
},
Token {
span: TokenSpan { start: 57, end: 58 },
span: TokenSpan { start: 56, end: 57 },
kind: TokenKind::BraceClose,
},
];
@ -126,27 +126,27 @@ mod mod {{
let expected_token_stream = {
let tokens = vec![
Token {
span: TokenSpan { start: 1, end: 5 },
kind: TokenKind::Keyword(Keyword::mod),
span: TokenSpan { start: 1, end: 4 },
kind: TokenKind::Keyword(Keyword::r#mod),
},
Token {
span: TokenSpan { start: 6, end: 10 },
kind: TokenKind::Keyword(Keyword::mod),
span: TokenSpan { start: 5, end: 8 },
kind: TokenKind::Keyword(Keyword::r#mod),
},
Token {
span: TokenSpan { start: 11, end: 12 },
span: TokenSpan { start: 9, end: 10 },
kind: TokenKind::BraceOpen,
},
Token {
span: TokenSpan { start: 10, end: 11 },
kind: TokenKind::BraceOpen,
},
Token {
span: TokenSpan { start: 12, end: 13 },
kind: TokenKind::BraceOpen,
},
Token {
span: TokenSpan { start: 14, end: 15 },
kind: TokenKind::BraceClose,
},
Token {
span: TokenSpan { start: 15, end: 16 },
span: TokenSpan { start: 13, end: 14 },
kind: TokenKind::BraceClose,
},
];
@ -181,27 +181,27 @@ fn test_comments() {
let expected_token_stream = {
let tokens = vec![
Token {
span: TokenSpan { start: 33, end: 37 },
kind: TokenKind::Keyword(Keyword::mod),
span: TokenSpan { start: 33, end: 36 },
kind: TokenKind::Keyword(Keyword::r#mod),
},
Token {
span: TokenSpan { start: 38, end: 42 },
kind: TokenKind::Keyword(Keyword::mod),
span: TokenSpan { start: 37, end: 40 },
kind: TokenKind::Keyword(Keyword::r#mod),
},
Token {
span: TokenSpan { start: 43, end: 44 },
span: TokenSpan { start: 41, end: 42 },
kind: TokenKind::BraceOpen,
},
Token {
span: TokenSpan { start: 44, end: 45 },
span: TokenSpan { start: 42, end: 43 },
kind: TokenKind::BraceOpen,
},
Token {
span: TokenSpan { start: 55, end: 56 },
span: TokenSpan { start: 53, end: 54 },
kind: TokenKind::BraceClose,
},
Token {
span: TokenSpan { start: 56, end: 57 },
span: TokenSpan { start: 54, end: 55 },
kind: TokenKind::BraceClose,
},
];

View File

@ -203,7 +203,7 @@ fn tokenize_ident(text: &str) -> Result<(TokenKind, usize), LexingError> {
// Filter out keywords
let tokenkind = match got {
"mod" => TokenKind::Keyword(Keyword::mod),
"mod" => TokenKind::Keyword(Keyword::r#mod),
"fn" => TokenKind::Keyword(Keyword::r#fn),
"struct" => TokenKind::Keyword(Keyword::r#struct),
"enum" => TokenKind::Keyword(Keyword::r#enum),

View File

@ -28,21 +28,30 @@ use pretty_assertions::assert_eq;
#[test]
fn test_full() {
let input = "nasp trinitrix {
let input = "
mod trinitrix {
struct Callback {
func: Function,
timeout: Integer,
};
func: void,
timeout: u8,
}
enum CallbackPriority {
High,
Medium,
Low,
};
}
fn execute_callback(callback: Callback, priority: CallbackPriority);
}";
let output = TokenStream::lex(&input).unwrap().parse().unwrap();
}
";
let output = TokenStream::lex(&input)
.unwrap()
.parse_unchecked()
.map_err(|err| panic!("{}", err))
.unwrap()
.process(input.to_owned())
.map_err(|err| panic!("{}", err))
.unwrap();
let expected = CommandSpec {
structures: vec![],
enumerations: vec![],
@ -93,7 +102,7 @@ fn test_full() {
},
r#type: Type {
identifier: Identifier {
name: "Function".to_owned(),
name: "()".to_owned(),
},
generic_args: vec![],
},
@ -105,7 +114,7 @@ fn test_full() {
},
r#type: Type {
identifier: Identifier {
name: "Integer".to_owned(),
name: "u8".to_owned(),
},
generic_args: vec![],
},
@ -143,15 +152,20 @@ fn test_full() {
#[test]
fn test_failing() {
let input = "struct Callback {
func: Function,
timeout: Integer,
};
let input = "
struct Callback {
func: void,
timeout: u32,
}
// The type \"Name\" should not be defined
fn execute_callback(callback: Name);
";
let output = TokenStream::lex(&input).unwrap().parse();
let output = TokenStream::lex(&input)
.unwrap()
.parse_unchecked()
.unwrap()
.process(input.to_owned());
match *(output.unwrap_err().source) {
super::error::ParsingError::TypeNotDeclared { r#type, .. } => {
assert_eq!(
@ -171,12 +185,17 @@ fn test_comments() {
/// First doc comment
// Some more text
nasp trinitrix {
mod trinitrix {
/// Second doc comment
fn hi(name: String) -> String;
}
";
let output = TokenStream::lex(&input).unwrap().parse().unwrap();
let output = TokenStream::lex(&input)
.unwrap()
.parse_unchecked()
.unwrap()
.process(input.to_owned())
.unwrap();
let expected = CommandSpec {
structures: vec![],
enumerations: vec![],

View File

@ -78,7 +78,7 @@ impl AdditionalHelp for ParsingError {
expected, actual
),
ParsingError::ExpectedKeyword { actual, .. } => format!(
"I expected a keyword (that is something like 'fn' or 'nasp') but you put a '{}' there!",
"I expected a keyword (that is something like 'fn' or 'mod') but you put a '{}' there!",
actual),
ParsingError::TrailingDocComment { .. } => "I expected some target (a function, namespace, enum, or something like this) which this doc comment annotates, but you put nothing there".to_owned(),
ParsingError::UnexpectedEOF { expected, .. } => format!("Put the expected token ('{expected}') here."),

View File

@ -84,7 +84,7 @@ impl Parser {
// Use of [peek_raw] here is fine, as we know that the function is only called, when
// something should still be contained in the token stream
match self.peek_raw().kind() {
token![nasp] => Ok(Declaration::Namespace(self.parse_namespace()?)),
token![mod] => Ok(Declaration::Namespace(self.parse_namespace()?)),
token![fn] => Ok(Declaration::Function(self.parse_function()?)),
token![struct] => Ok(Declaration::Structure(self.parse_structure()?)),
token![enum] => Ok(Declaration::Enumeration(self.parse_enumeration()?)),
@ -181,7 +181,7 @@ impl Parser {
fn parse_namespace(&mut self) -> Result<Namespace, ParsingError> {
let attributes = self.parse_doc_comments()?;
self.expect(token![nasp])?;
self.expect(token![mod])?;
let mut namespace = Namespace {
name: self.expect(token![Ident])?,
@ -328,9 +328,9 @@ impl Parser {
/// };
///
/// # fn main() {
/// let token_stream = TokenStream::lex("nasp {}").unwrap();
/// let token_stream = TokenStream::lex("mod {}").unwrap();
/// let parser = Parser::new(token_stream);
/// assert_eq!(parser.expect(token![nasp]).unwrap(), TokenKind::Keyword(Keyword::nasp));
/// assert_eq!(parser.expect(token![mod]).unwrap(), TokenKind::Keyword(Keyword::mod));
/// assert_eq!(parser.expect(token![BraceOpen]).unwrap(), TokenKind::BraceOpen);
/// assert_eq!(parser.expect(token![BraceClose]).unwrap(), TokenKind::BraceClose);
/// assert!(parser.expect(token![BraceClose]).is_err());

View File

@ -32,7 +32,7 @@ fn test_failing() {
let input = "
fn print(message: CommandTransferValue);
nasp trinitrix { {}
mod trinitrix { {}
fn hi honner(name: String) -> String; ;
}
@ -49,7 +49,7 @@ nasp trinitrix { {}
fn test_full() {
let input = "fn print(message: CommandTransferValue);
nasp trinitrix {
mod trinitrix {
fn hi(name: String) -> String;
}
";
@ -80,22 +80,22 @@ nasp trinitrix {
}],
namespaces: vec![Namespace {
name: Token {
span: TokenSpan { start: 47, end: 56 },
span: TokenSpan { start: 46, end: 55 },
kind: TokenKind::Identifier("trinitrix".to_owned()),
},
functions: vec![Function {
identifier: Token {
span: TokenSpan { start: 66, end: 68 },
span: TokenSpan { start: 65, end: 67 },
kind: TokenKind::Identifier("hi".to_owned()),
},
inputs: vec![NamedType {
name: Token {
span: TokenSpan { start: 69, end: 73 },
span: TokenSpan { start: 68, end: 72 },
kind: TokenKind::Identifier("name".to_owned()),
},
r#type: Type {
identifier: Token {
span: TokenSpan { start: 75, end: 81 },
span: TokenSpan { start: 74, end: 80 },
kind: TokenKind::Identifier("String".to_owned()),
},
generic_args: vec![],
@ -103,7 +103,7 @@ nasp trinitrix {
}],
output: Some(Type {
identifier: Token {
span: TokenSpan { start: 86, end: 92 },
span: TokenSpan { start: 85, end: 91 },
kind: TokenKind::Identifier("String".to_owned()),
},
generic_args: vec![],