feat(trixy-parser): Adapt Trixy to be a complete subset of rust
This changes two important things: - Firstly it renames the `nasp` keyword to `mod`: This change removes the not required deviation from rust. - Secondly it removes the semicolon after an enumeration or structure declaration, which is pointless in these positions.
This commit is contained in:
parent
b3c6a4c1a1
commit
b6799877bb
|
@ -30,9 +30,9 @@
|
||||||
CommandSpec = {Function | Namespace | Enumeration | Structure } ;
|
CommandSpec = {Function | Namespace | Enumeration | Structure } ;
|
||||||
|
|
||||||
Function = {DocComment} "fn" Identifier "(" [NamedType {"," NamedType }] ")" [ "->" Type ] ";" ;
|
Function = {DocComment} "fn" Identifier "(" [NamedType {"," NamedType }] ")" [ "->" Type ] ";" ;
|
||||||
Namespace = {DocComment} "nasp" Identifier "{" {Function | Namespace | Enumeration | Structure} "}" ;
|
Namespace = {DocComment} "mod" Identifier "{" {Function | Namespace | Enumeration | Structure} "}" ;
|
||||||
Structure = {DocComment} "struct" Identifier "{" [DocNamedType {"," DocNamedType } [","]] "}" ";";
|
Structure = {DocComment} "struct" Identifier "{" [DocNamedType {"," DocNamedType } [","]] "}";
|
||||||
Enumeration = {DocComment} "enum" Identifier "{" [DocIdentifier {"," DocIdentifier} [","]] "}" ";";
|
Enumeration = {DocComment} "enum" Identifier "{" [DocIdentifier {"," DocIdentifier} [","]] "}";
|
||||||
|
|
||||||
Type = Identifier ["<" Type {"," Type} ">"];
|
Type = Identifier ["<" Type {"," Type} ">"];
|
||||||
|
|
||||||
|
|
Binary file not shown.
|
@ -18,6 +18,9 @@
|
||||||
* If not, see <https://www.gnu.org/licenses/>.
|
* If not, see <https://www.gnu.org/licenses/>.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
//! This module provides the type definitions for the parser.
|
||||||
|
//! These types are split into type-checked ones [`checked`] and the raw types [`unchecked`]
|
||||||
|
|
||||||
pub mod checked;
|
pub mod checked;
|
||||||
pub mod unchecked;
|
pub mod unchecked;
|
||||||
|
|
||||||
|
|
|
@ -221,7 +221,7 @@ impl Display for TokenKind {
|
||||||
pub enum Keyword {
|
pub enum Keyword {
|
||||||
/// Start a namespace declaration
|
/// Start a namespace declaration
|
||||||
#[allow(non_camel_case_types)]
|
#[allow(non_camel_case_types)]
|
||||||
nasp,
|
r#mod,
|
||||||
/// Start a function declaration
|
/// Start a function declaration
|
||||||
#[allow(non_camel_case_types)]
|
#[allow(non_camel_case_types)]
|
||||||
r#fn,
|
r#fn,
|
||||||
|
@ -236,7 +236,7 @@ pub enum Keyword {
|
||||||
impl Display for Keyword {
|
impl Display for Keyword {
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
match self {
|
match self {
|
||||||
Keyword::nasp => f.write_str("nasp"),
|
Keyword::mod => f.write_str("mod"),
|
||||||
Keyword::r#fn => f.write_str("fn"),
|
Keyword::r#fn => f.write_str("fn"),
|
||||||
Keyword::r#struct => f.write_str("struct"),
|
Keyword::r#struct => f.write_str("struct"),
|
||||||
Keyword::r#enum => f.write_str("enum"),
|
Keyword::r#enum => f.write_str("enum"),
|
||||||
|
@ -252,7 +252,7 @@ impl Display for Keyword {
|
||||||
/// ```
|
/// ```
|
||||||
/// use trixy_lang_parser::token;
|
/// use trixy_lang_parser::token;
|
||||||
/// # fn main() {
|
/// # fn main() {
|
||||||
/// token![nasp];
|
/// token![mod];
|
||||||
/// token![;];
|
/// token![;];
|
||||||
/// token![Arrow];
|
/// token![Arrow];
|
||||||
/// # }
|
/// # }
|
||||||
|
@ -280,7 +280,7 @@ macro_rules! token {
|
||||||
[ParenClose] => { $crate::lexing::TokenKind::ParenClose };
|
[ParenClose] => { $crate::lexing::TokenKind::ParenClose };
|
||||||
// [)] => { $crate::lexing::TokenKind::ParenthesisClose };
|
// [)] => { $crate::lexing::TokenKind::ParenthesisClose };
|
||||||
|
|
||||||
[nasp] => { $crate::lexing::TokenKind::Keyword($crate::lexing::Keyword::nasp) };
|
[mod] => { $crate::lexing::TokenKind::Keyword($crate::lexing::Keyword::mod) };
|
||||||
[fn] => { $crate::lexing::TokenKind::Keyword($crate::lexing::Keyword::r#fn) };
|
[fn] => { $crate::lexing::TokenKind::Keyword($crate::lexing::Keyword::r#fn) };
|
||||||
[struct] => { $crate::lexing::TokenKind::Keyword($crate::lexing::Keyword::r#struct) };
|
[struct] => { $crate::lexing::TokenKind::Keyword($crate::lexing::Keyword::r#struct) };
|
||||||
[enum] => { $crate::lexing::TokenKind::Keyword($crate::lexing::Keyword::r#enum) };
|
[enum] => { $crate::lexing::TokenKind::Keyword($crate::lexing::Keyword::r#enum) };
|
||||||
|
@ -322,6 +322,6 @@ mod tests {
|
||||||
|
|
||||||
token_macro_test!(tok_expands_to_arrow, ->, => TokenKind::Arrow);
|
token_macro_test!(tok_expands_to_arrow, ->, => TokenKind::Arrow);
|
||||||
token_macro_test!(tok_expands_to_semicolon, Semicolon, => TokenKind::Semicolon);
|
token_macro_test!(tok_expands_to_semicolon, Semicolon, => TokenKind::Semicolon);
|
||||||
token_macro_test!(tok_expands_to_nasp, nasp, => TokenKind::Keyword(crate::lexing::Keyword::nasp));
|
token_macro_test!(tok_expands_to_mod, mod, => TokenKind::Keyword(crate::lexing::Keyword::mod));
|
||||||
token_macro_test!(tok_expands_to_fn, fn, => TokenKind::Keyword(crate::lexing::Keyword::r#fn));
|
token_macro_test!(tok_expands_to_fn, fn, => TokenKind::Keyword(crate::lexing::Keyword::r#fn));
|
||||||
}
|
}
|
||||||
|
|
|
@ -27,7 +27,7 @@ use pretty_assertions::assert_eq;
|
||||||
#[test]
|
#[test]
|
||||||
fn test_lexing_trixy() {
|
fn test_lexing_trixy() {
|
||||||
let input = "
|
let input = "
|
||||||
nasp commands {
|
mod commands {
|
||||||
fn expect(event: String) -> String;
|
fn expect(event: String) -> String;
|
||||||
}
|
}
|
||||||
";
|
";
|
||||||
|
@ -36,7 +36,7 @@ nasp commands {
|
||||||
let tokens = vec![
|
let tokens = vec![
|
||||||
Token {
|
Token {
|
||||||
span: TokenSpan { start: 1, end: 5 },
|
span: TokenSpan { start: 1, end: 5 },
|
||||||
kind: TokenKind::Keyword(Keyword::nasp),
|
kind: TokenKind::Keyword(Keyword::mod),
|
||||||
},
|
},
|
||||||
Token {
|
Token {
|
||||||
span: TokenSpan { start: 6, end: 14 },
|
span: TokenSpan { start: 6, end: 14 },
|
||||||
|
@ -102,8 +102,8 @@ nasp commands {
|
||||||
#[test]
|
#[test]
|
||||||
fn test_failing_lexing() {
|
fn test_failing_lexing() {
|
||||||
let input = "
|
let input = "
|
||||||
nasp trinitrix {
|
mod trinitrix {
|
||||||
nasp - commands {
|
mod - commands {
|
||||||
fn hi(strings: String) -> String;
|
fn hi(strings: String) -> String;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -119,7 +119,7 @@ nasp trinitrix {
|
||||||
#[test]
|
#[test]
|
||||||
fn test_multiple_tokens() {
|
fn test_multiple_tokens() {
|
||||||
let input = "
|
let input = "
|
||||||
nasp nasp {{
|
mod mod {{
|
||||||
}}
|
}}
|
||||||
";
|
";
|
||||||
let token_stream = TokenStream::lex(input).unwrap();
|
let token_stream = TokenStream::lex(input).unwrap();
|
||||||
|
@ -127,11 +127,11 @@ nasp nasp {{
|
||||||
let tokens = vec![
|
let tokens = vec![
|
||||||
Token {
|
Token {
|
||||||
span: TokenSpan { start: 1, end: 5 },
|
span: TokenSpan { start: 1, end: 5 },
|
||||||
kind: TokenKind::Keyword(Keyword::nasp),
|
kind: TokenKind::Keyword(Keyword::mod),
|
||||||
},
|
},
|
||||||
Token {
|
Token {
|
||||||
span: TokenSpan { start: 6, end: 10 },
|
span: TokenSpan { start: 6, end: 10 },
|
||||||
kind: TokenKind::Keyword(Keyword::nasp),
|
kind: TokenKind::Keyword(Keyword::mod),
|
||||||
},
|
},
|
||||||
Token {
|
Token {
|
||||||
span: TokenSpan { start: 11, end: 12 },
|
span: TokenSpan { start: 11, end: 12 },
|
||||||
|
@ -162,7 +162,7 @@ nasp nasp {{
|
||||||
fn test_comments() {
|
fn test_comments() {
|
||||||
let input = "
|
let input = "
|
||||||
// Some comment
|
// Some comment
|
||||||
nasp nasp {{
|
mod mod {{
|
||||||
|
|
||||||
}}
|
}}
|
||||||
// NOTE(@soispha): We do not support nested multi line comments <2023-12-16>
|
// NOTE(@soispha): We do not support nested multi line comments <2023-12-16>
|
||||||
|
@ -182,11 +182,11 @@ fn test_comments() {
|
||||||
let tokens = vec![
|
let tokens = vec![
|
||||||
Token {
|
Token {
|
||||||
span: TokenSpan { start: 33, end: 37 },
|
span: TokenSpan { start: 33, end: 37 },
|
||||||
kind: TokenKind::Keyword(Keyword::nasp),
|
kind: TokenKind::Keyword(Keyword::mod),
|
||||||
},
|
},
|
||||||
Token {
|
Token {
|
||||||
span: TokenSpan { start: 38, end: 42 },
|
span: TokenSpan { start: 38, end: 42 },
|
||||||
kind: TokenKind::Keyword(Keyword::nasp),
|
kind: TokenKind::Keyword(Keyword::mod),
|
||||||
},
|
},
|
||||||
Token {
|
Token {
|
||||||
span: TokenSpan { start: 43, end: 44 },
|
span: TokenSpan { start: 43, end: 44 },
|
||||||
|
|
|
@ -203,7 +203,7 @@ fn tokenize_ident(text: &str) -> Result<(TokenKind, usize), LexingError> {
|
||||||
|
|
||||||
// Filter out keywords
|
// Filter out keywords
|
||||||
let tokenkind = match got {
|
let tokenkind = match got {
|
||||||
"nasp" => TokenKind::Keyword(Keyword::nasp),
|
"mod" => TokenKind::Keyword(Keyword::mod),
|
||||||
"fn" => TokenKind::Keyword(Keyword::r#fn),
|
"fn" => TokenKind::Keyword(Keyword::r#fn),
|
||||||
"struct" => TokenKind::Keyword(Keyword::r#struct),
|
"struct" => TokenKind::Keyword(Keyword::r#struct),
|
||||||
"enum" => TokenKind::Keyword(Keyword::r#enum),
|
"enum" => TokenKind::Keyword(Keyword::r#enum),
|
||||||
|
|
|
@ -234,7 +234,6 @@ impl Parser {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
self.expect(token![BraceClose])?;
|
self.expect(token![BraceClose])?;
|
||||||
self.expect(token![;])?;
|
|
||||||
Ok(Enumeration {
|
Ok(Enumeration {
|
||||||
identifier,
|
identifier,
|
||||||
states,
|
states,
|
||||||
|
@ -261,7 +260,6 @@ impl Parser {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
self.expect(token![BraceClose])?;
|
self.expect(token![BraceClose])?;
|
||||||
self.expect(token![;])?;
|
|
||||||
|
|
||||||
Ok(Structure {
|
Ok(Structure {
|
||||||
identifier: name,
|
identifier: name,
|
||||||
|
|
Reference in New Issue