feat(trixy-parser): Adapt Trixy to be a complete subset of rust
This changes two important things: - Firstly it renames the `nasp` keyword to `mod`: This change removes the not required deviation from rust. - Secondly it removes the semicolon after an enumeration or structure declaration, which is pointless in these positions.
This commit is contained in:
parent
b3c6a4c1a1
commit
b6799877bb
|
@ -30,9 +30,9 @@
|
|||
CommandSpec = {Function | Namespace | Enumeration | Structure } ;
|
||||
|
||||
Function = {DocComment} "fn" Identifier "(" [NamedType {"," NamedType }] ")" [ "->" Type ] ";" ;
|
||||
Namespace = {DocComment} "nasp" Identifier "{" {Function | Namespace | Enumeration | Structure} "}" ;
|
||||
Structure = {DocComment} "struct" Identifier "{" [DocNamedType {"," DocNamedType } [","]] "}" ";";
|
||||
Enumeration = {DocComment} "enum" Identifier "{" [DocIdentifier {"," DocIdentifier} [","]] "}" ";";
|
||||
Namespace = {DocComment} "mod" Identifier "{" {Function | Namespace | Enumeration | Structure} "}" ;
|
||||
Structure = {DocComment} "struct" Identifier "{" [DocNamedType {"," DocNamedType } [","]] "}";
|
||||
Enumeration = {DocComment} "enum" Identifier "{" [DocIdentifier {"," DocIdentifier} [","]] "}";
|
||||
|
||||
Type = Identifier ["<" Type {"," Type} ">"];
|
||||
|
||||
|
|
Binary file not shown.
|
@ -18,6 +18,9 @@
|
|||
* If not, see <https://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
//! This module provides the type definitions for the parser.
|
||||
//! These types are split into type-checked ones [`checked`] and the raw types [`unchecked`]
|
||||
|
||||
pub mod checked;
|
||||
pub mod unchecked;
|
||||
|
||||
|
|
|
@ -221,7 +221,7 @@ impl Display for TokenKind {
|
|||
pub enum Keyword {
|
||||
/// Start a namespace declaration
|
||||
#[allow(non_camel_case_types)]
|
||||
nasp,
|
||||
r#mod,
|
||||
/// Start a function declaration
|
||||
#[allow(non_camel_case_types)]
|
||||
r#fn,
|
||||
|
@ -236,7 +236,7 @@ pub enum Keyword {
|
|||
impl Display for Keyword {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
Keyword::nasp => f.write_str("nasp"),
|
||||
Keyword::mod => f.write_str("mod"),
|
||||
Keyword::r#fn => f.write_str("fn"),
|
||||
Keyword::r#struct => f.write_str("struct"),
|
||||
Keyword::r#enum => f.write_str("enum"),
|
||||
|
@ -252,7 +252,7 @@ impl Display for Keyword {
|
|||
/// ```
|
||||
/// use trixy_lang_parser::token;
|
||||
/// # fn main() {
|
||||
/// token![nasp];
|
||||
/// token![mod];
|
||||
/// token![;];
|
||||
/// token![Arrow];
|
||||
/// # }
|
||||
|
@ -280,7 +280,7 @@ macro_rules! token {
|
|||
[ParenClose] => { $crate::lexing::TokenKind::ParenClose };
|
||||
// [)] => { $crate::lexing::TokenKind::ParenthesisClose };
|
||||
|
||||
[nasp] => { $crate::lexing::TokenKind::Keyword($crate::lexing::Keyword::nasp) };
|
||||
[mod] => { $crate::lexing::TokenKind::Keyword($crate::lexing::Keyword::mod) };
|
||||
[fn] => { $crate::lexing::TokenKind::Keyword($crate::lexing::Keyword::r#fn) };
|
||||
[struct] => { $crate::lexing::TokenKind::Keyword($crate::lexing::Keyword::r#struct) };
|
||||
[enum] => { $crate::lexing::TokenKind::Keyword($crate::lexing::Keyword::r#enum) };
|
||||
|
@ -322,6 +322,6 @@ mod tests {
|
|||
|
||||
token_macro_test!(tok_expands_to_arrow, ->, => TokenKind::Arrow);
|
||||
token_macro_test!(tok_expands_to_semicolon, Semicolon, => TokenKind::Semicolon);
|
||||
token_macro_test!(tok_expands_to_nasp, nasp, => TokenKind::Keyword(crate::lexing::Keyword::nasp));
|
||||
token_macro_test!(tok_expands_to_mod, mod, => TokenKind::Keyword(crate::lexing::Keyword::mod));
|
||||
token_macro_test!(tok_expands_to_fn, fn, => TokenKind::Keyword(crate::lexing::Keyword::r#fn));
|
||||
}
|
||||
|
|
|
@ -27,7 +27,7 @@ use pretty_assertions::assert_eq;
|
|||
#[test]
|
||||
fn test_lexing_trixy() {
|
||||
let input = "
|
||||
nasp commands {
|
||||
mod commands {
|
||||
fn expect(event: String) -> String;
|
||||
}
|
||||
";
|
||||
|
@ -36,7 +36,7 @@ nasp commands {
|
|||
let tokens = vec![
|
||||
Token {
|
||||
span: TokenSpan { start: 1, end: 5 },
|
||||
kind: TokenKind::Keyword(Keyword::nasp),
|
||||
kind: TokenKind::Keyword(Keyword::mod),
|
||||
},
|
||||
Token {
|
||||
span: TokenSpan { start: 6, end: 14 },
|
||||
|
@ -102,8 +102,8 @@ nasp commands {
|
|||
#[test]
|
||||
fn test_failing_lexing() {
|
||||
let input = "
|
||||
nasp trinitrix {
|
||||
nasp - commands {
|
||||
mod trinitrix {
|
||||
mod - commands {
|
||||
fn hi(strings: String) -> String;
|
||||
}
|
||||
}
|
||||
|
@ -119,7 +119,7 @@ nasp trinitrix {
|
|||
#[test]
|
||||
fn test_multiple_tokens() {
|
||||
let input = "
|
||||
nasp nasp {{
|
||||
mod mod {{
|
||||
}}
|
||||
";
|
||||
let token_stream = TokenStream::lex(input).unwrap();
|
||||
|
@ -127,11 +127,11 @@ nasp nasp {{
|
|||
let tokens = vec![
|
||||
Token {
|
||||
span: TokenSpan { start: 1, end: 5 },
|
||||
kind: TokenKind::Keyword(Keyword::nasp),
|
||||
kind: TokenKind::Keyword(Keyword::mod),
|
||||
},
|
||||
Token {
|
||||
span: TokenSpan { start: 6, end: 10 },
|
||||
kind: TokenKind::Keyword(Keyword::nasp),
|
||||
kind: TokenKind::Keyword(Keyword::mod),
|
||||
},
|
||||
Token {
|
||||
span: TokenSpan { start: 11, end: 12 },
|
||||
|
@ -162,7 +162,7 @@ nasp nasp {{
|
|||
fn test_comments() {
|
||||
let input = "
|
||||
// Some comment
|
||||
nasp nasp {{
|
||||
mod mod {{
|
||||
|
||||
}}
|
||||
// NOTE(@soispha): We do not support nested multi line comments <2023-12-16>
|
||||
|
@ -182,11 +182,11 @@ fn test_comments() {
|
|||
let tokens = vec![
|
||||
Token {
|
||||
span: TokenSpan { start: 33, end: 37 },
|
||||
kind: TokenKind::Keyword(Keyword::nasp),
|
||||
kind: TokenKind::Keyword(Keyword::mod),
|
||||
},
|
||||
Token {
|
||||
span: TokenSpan { start: 38, end: 42 },
|
||||
kind: TokenKind::Keyword(Keyword::nasp),
|
||||
kind: TokenKind::Keyword(Keyword::mod),
|
||||
},
|
||||
Token {
|
||||
span: TokenSpan { start: 43, end: 44 },
|
||||
|
|
|
@ -203,7 +203,7 @@ fn tokenize_ident(text: &str) -> Result<(TokenKind, usize), LexingError> {
|
|||
|
||||
// Filter out keywords
|
||||
let tokenkind = match got {
|
||||
"nasp" => TokenKind::Keyword(Keyword::nasp),
|
||||
"mod" => TokenKind::Keyword(Keyword::mod),
|
||||
"fn" => TokenKind::Keyword(Keyword::r#fn),
|
||||
"struct" => TokenKind::Keyword(Keyword::r#struct),
|
||||
"enum" => TokenKind::Keyword(Keyword::r#enum),
|
||||
|
|
|
@ -234,7 +234,6 @@ impl Parser {
|
|||
}
|
||||
}
|
||||
self.expect(token![BraceClose])?;
|
||||
self.expect(token![;])?;
|
||||
Ok(Enumeration {
|
||||
identifier,
|
||||
states,
|
||||
|
@ -261,7 +260,6 @@ impl Parser {
|
|||
}
|
||||
}
|
||||
self.expect(token![BraceClose])?;
|
||||
self.expect(token![;])?;
|
||||
|
||||
Ok(Structure {
|
||||
identifier: name,
|
||||
|
|
Reference in New Issue