feat(trixy-lang_parser): Add support for Type
This includes rust like generic types like: ``` Option<T> Result<T, E> ```
This commit is contained in:
parent
be066afe23
commit
370aac4395
|
@ -11,8 +11,9 @@ Function = "fn" Identifier "(" [NamedType {"," NamedType }] ")" [ "->" Type ] ";
|
|||
Namespace = "nasp" Identifier "{" {Function | Namespace | Enumeration | Structure} "}" ;
|
||||
Structure = "struct" Identifier "{" [NamedType {"," NamedType } [","]] "}" ";";
|
||||
Enumeration = "enum" Identifier "{" [Identifier {"," Identifier} [","]] "}" ";";
|
||||
Identifier = CHARACTER { NUMBER | CHARACTER } ;
|
||||
Identifier = (CHARACTER | "_") { NUMBER | CHARACTER | "_" } ;
|
||||
NamedType = Identifier ":" Type;
|
||||
Type = Identifier ["<" Type {"," Type} ">"];
|
||||
|
||||
# (*
|
||||
# vim: ft=ebnf
|
||||
|
|
Binary file not shown.
|
@ -92,7 +92,7 @@ nasp trinitrix {
|
|||
/// Remove a keymapping
|
||||
///
|
||||
/// Does nothing, if the keymapping doesn't exists
|
||||
fn remove((/* mode: */ String, /* key: */ String));
|
||||
fn remove(mode: String, key: String);
|
||||
|
||||
/// List declared keymappings
|
||||
fn get(mode: String);
|
||||
|
|
|
@ -1,2 +1,2 @@
|
|||
pub mod checked;
|
||||
// pub mod checked;
|
||||
pub mod unchecked;
|
||||
|
|
|
@ -55,7 +55,7 @@ pub enum Genus {
|
|||
pub struct Function {
|
||||
pub identifier: Token, // Will later become an Identifier
|
||||
pub inputs: Vec<NamedType>,
|
||||
pub output: Option<Token>, // Will later become an Type
|
||||
pub output: Option<Type>,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord)]
|
||||
|
@ -73,5 +73,11 @@ pub struct Enumeration {
|
|||
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord)]
|
||||
pub struct NamedType {
|
||||
pub name: Token, // Will later become an Identifier
|
||||
pub r#type: Token, // Will later become an Type
|
||||
pub r#type: Type,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord)]
|
||||
pub struct Type {
|
||||
pub identifier: Token, // Will later become an Identifier
|
||||
pub generic_args: Vec<Type>,
|
||||
}
|
||||
|
|
|
@ -89,7 +89,7 @@ impl ErrorContext {
|
|||
pub fn from_index(start: usize, orginal_file: &str) -> Self {
|
||||
let span = TokenSpan {
|
||||
start,
|
||||
end: start,
|
||||
end: start + 1,
|
||||
};
|
||||
Self::from_span(span, orginal_file)
|
||||
}
|
||||
|
|
|
@ -121,6 +121,8 @@ pub enum TokenKind {
|
|||
BraceClose,
|
||||
ParenOpen,
|
||||
ParenClose,
|
||||
SquareOpen,
|
||||
SquareClose,
|
||||
/// This is not a real TokenKind, but only used for error handling
|
||||
Dummy,
|
||||
}
|
||||
|
@ -156,6 +158,8 @@ impl Display for TokenKind {
|
|||
TokenKind::ParenOpen => f.write_str("PARENOPEN"),
|
||||
TokenKind::ParenClose => f.write_str("PARENCLOSE"),
|
||||
TokenKind::Dummy => f.write_str("DUMMY"),
|
||||
TokenKind::SquareOpen => f.write_str("SQUAREOPEN"),
|
||||
TokenKind::SquareClose => f.write_str("SQUARECLOSE"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -211,6 +215,10 @@ macro_rules! token {
|
|||
[,] => { $crate::lexing::TokenKind::Comma };
|
||||
[Arrow] => { $crate::lexing::TokenKind::Arrow };
|
||||
[->] => { $crate::lexing::TokenKind::Arrow };
|
||||
[SquareOpen] => { $crate::lexing::TokenKind::SquareOpen };
|
||||
[<] => { $crate::lexing::TokenKind::SquareOpen };
|
||||
[SquareClose] => { $crate::lexing::TokenKind::SquareClose };
|
||||
[>] => { $crate::lexing::TokenKind::SquareClose};
|
||||
[BraceOpen] => { $crate::lexing::TokenKind::BraceOpen };
|
||||
// [{] => { $crate::lexing::TokenKind::BraceOpen };
|
||||
[BraceClose] => { $crate::lexing::TokenKind::BraceClose };
|
||||
|
|
|
@ -60,8 +60,14 @@ impl<'a> Tokenizer<'a> {
|
|||
':' => (TokenKind::Colon, 1),
|
||||
';' => (TokenKind::Semicolon, 1),
|
||||
',' => (TokenKind::Comma, 1),
|
||||
'<' => (TokenKind::SquareOpen, 1),
|
||||
'>' => (TokenKind::SquareClose, 1),
|
||||
'-' => tokenize_arrow(self.remaining_text)?,
|
||||
c @ '_' | c if c.is_alphanumeric() => tokenize_ident(self.remaining_text)?,
|
||||
|
||||
// can't use a OR (`|`) here, as the guard takes precedence
|
||||
c if c.is_alphabetic() => tokenize_ident(self.remaining_text)?,
|
||||
'_' => tokenize_ident(self.remaining_text)?,
|
||||
|
||||
other => return Err(LexingError::UnknownCharacter(other)),
|
||||
};
|
||||
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
use crate::{
|
||||
command_spec::unchecked::{
|
||||
CommandSpec, Declaration, Enumeration, Function, NamedType, Structure,
|
||||
CommandSpec, Declaration, Enumeration, Function, NamedType, Structure, Type,
|
||||
},
|
||||
error::ErrorContext,
|
||||
lexing::{Token, TokenKind, TokenStream},
|
||||
|
@ -73,6 +73,26 @@ impl Parser {
|
|||
}
|
||||
}
|
||||
|
||||
fn parse_type(&mut self) -> Result<Type, ParsingError> {
|
||||
let identifier = self.expect(token![Ident])?;
|
||||
|
||||
let mut generic_args = vec![];
|
||||
if self.expect_peek(token![<]) {
|
||||
self.expect(token![<])?;
|
||||
if self.expect_peek(token![Ident]) {
|
||||
generic_args.push(self.parse_type()?);
|
||||
}
|
||||
while self.expect_peek(token![Comma]) {
|
||||
generic_args.push(self.parse_type()?);
|
||||
}
|
||||
self.expect(token![>])?;
|
||||
}
|
||||
Ok(Type {
|
||||
identifier,
|
||||
generic_args,
|
||||
})
|
||||
}
|
||||
|
||||
fn parse_namespace(&mut self) -> Result<Vec<Declaration>, ParsingError> {
|
||||
self.expect(token![nasp])?;
|
||||
let namespace_name = self.expect(token![Ident])?;
|
||||
|
@ -140,7 +160,7 @@ impl Parser {
|
|||
fn parse_named_type(&mut self) -> Result<NamedType, ParsingError> {
|
||||
let name = self.expect(token![Ident])?;
|
||||
self.expect(token![Colon])?;
|
||||
let r#type = self.expect(token![Ident])?;
|
||||
let r#type = self.parse_type()?;
|
||||
Ok(NamedType { name, r#type })
|
||||
}
|
||||
|
||||
|
@ -162,7 +182,7 @@ impl Parser {
|
|||
let mut output_type = None;
|
||||
if self.expect_peek(token![->]) {
|
||||
self.expect(token![->])?;
|
||||
output_type = Some(self.expect(token![Ident])?);
|
||||
output_type = Some(self.parse_type()?);
|
||||
}
|
||||
self.expect(token![;])?;
|
||||
Ok(Function {
|
||||
|
|
Reference in New Issue