test(trixy-parser): Restore test functionality after nasp -> mod rename
This commit is contained in:
parent
3e59d53b8b
commit
dc8a7ecb33
|
@ -236,7 +236,7 @@ pub enum Keyword {
|
||||||
impl Display for Keyword {
|
impl Display for Keyword {
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
match self {
|
match self {
|
||||||
Keyword::mod => f.write_str("mod"),
|
Keyword::r#mod => f.write_str("mod"),
|
||||||
Keyword::r#fn => f.write_str("fn"),
|
Keyword::r#fn => f.write_str("fn"),
|
||||||
Keyword::r#struct => f.write_str("struct"),
|
Keyword::r#struct => f.write_str("struct"),
|
||||||
Keyword::r#enum => f.write_str("enum"),
|
Keyword::r#enum => f.write_str("enum"),
|
||||||
|
@ -250,7 +250,7 @@ impl Display for Keyword {
|
||||||
/// # Examples
|
/// # Examples
|
||||||
///
|
///
|
||||||
/// ```
|
/// ```
|
||||||
/// use trixy_lang_parser::token;
|
/// use trixy_parser::token;
|
||||||
/// # fn main() {
|
/// # fn main() {
|
||||||
/// token![mod];
|
/// token![mod];
|
||||||
/// token![;];
|
/// token![;];
|
||||||
|
@ -280,7 +280,7 @@ macro_rules! token {
|
||||||
[ParenClose] => { $crate::lexing::TokenKind::ParenClose };
|
[ParenClose] => { $crate::lexing::TokenKind::ParenClose };
|
||||||
// [)] => { $crate::lexing::TokenKind::ParenthesisClose };
|
// [)] => { $crate::lexing::TokenKind::ParenthesisClose };
|
||||||
|
|
||||||
[mod] => { $crate::lexing::TokenKind::Keyword($crate::lexing::Keyword::mod) };
|
[mod] => { $crate::lexing::TokenKind::Keyword($crate::lexing::Keyword::r#mod) };
|
||||||
[fn] => { $crate::lexing::TokenKind::Keyword($crate::lexing::Keyword::r#fn) };
|
[fn] => { $crate::lexing::TokenKind::Keyword($crate::lexing::Keyword::r#fn) };
|
||||||
[struct] => { $crate::lexing::TokenKind::Keyword($crate::lexing::Keyword::r#struct) };
|
[struct] => { $crate::lexing::TokenKind::Keyword($crate::lexing::Keyword::r#struct) };
|
||||||
[enum] => { $crate::lexing::TokenKind::Keyword($crate::lexing::Keyword::r#enum) };
|
[enum] => { $crate::lexing::TokenKind::Keyword($crate::lexing::Keyword::r#enum) };
|
||||||
|
@ -289,6 +289,7 @@ macro_rules! token {
|
||||||
// see the `same_kind` method on TokenKind
|
// see the `same_kind` method on TokenKind
|
||||||
[Ident] => { $crate::lexing::TokenKind::Identifier("".to_owned()) };
|
[Ident] => { $crate::lexing::TokenKind::Identifier("".to_owned()) };
|
||||||
[Identifier] => { $crate::lexing::TokenKind::Identifier("".to_owned()) };
|
[Identifier] => { $crate::lexing::TokenKind::Identifier("".to_owned()) };
|
||||||
|
|
||||||
[DocComment] => { $crate::lexing::TokenKind::DocComment("".to_owned()) };
|
[DocComment] => { $crate::lexing::TokenKind::DocComment("".to_owned()) };
|
||||||
[DocCommentMatch] => { $crate::lexing::TokenKind::DocComment(_doc_comment) };
|
[DocCommentMatch] => { $crate::lexing::TokenKind::DocComment(_doc_comment) };
|
||||||
[Comment] => { $crate::lexing::TokenKind::Comment("".to_owned()) };
|
[Comment] => { $crate::lexing::TokenKind::Comment("".to_owned()) };
|
||||||
|
@ -322,6 +323,6 @@ mod tests {
|
||||||
|
|
||||||
token_macro_test!(tok_expands_to_arrow, ->, => TokenKind::Arrow);
|
token_macro_test!(tok_expands_to_arrow, ->, => TokenKind::Arrow);
|
||||||
token_macro_test!(tok_expands_to_semicolon, Semicolon, => TokenKind::Semicolon);
|
token_macro_test!(tok_expands_to_semicolon, Semicolon, => TokenKind::Semicolon);
|
||||||
token_macro_test!(tok_expands_to_mod, mod, => TokenKind::Keyword(crate::lexing::Keyword::mod));
|
token_macro_test!(tok_expands_to_mod, mod, => TokenKind::Keyword(crate::lexing::Keyword::r#mod));
|
||||||
token_macro_test!(tok_expands_to_fn, fn, => TokenKind::Keyword(crate::lexing::Keyword::r#fn));
|
token_macro_test!(tok_expands_to_fn, fn, => TokenKind::Keyword(crate::lexing::Keyword::r#fn));
|
||||||
}
|
}
|
||||||
|
|
|
@ -35,59 +35,59 @@ mod commands {
|
||||||
let expected_token_stream = {
|
let expected_token_stream = {
|
||||||
let tokens = vec![
|
let tokens = vec![
|
||||||
Token {
|
Token {
|
||||||
span: TokenSpan { start: 1, end: 5 },
|
span: TokenSpan { start: 1, end: 4 },
|
||||||
kind: TokenKind::Keyword(Keyword::mod),
|
kind: TokenKind::Keyword(Keyword::r#mod),
|
||||||
},
|
},
|
||||||
Token {
|
Token {
|
||||||
span: TokenSpan { start: 6, end: 14 },
|
span: TokenSpan { start: 5, end: 13 },
|
||||||
kind: TokenKind::Identifier("commands".to_owned()),
|
kind: TokenKind::Identifier("commands".to_owned()),
|
||||||
},
|
},
|
||||||
Token {
|
Token {
|
||||||
span: TokenSpan { start: 15, end: 16 },
|
span: TokenSpan { start: 14, end: 15 },
|
||||||
kind: TokenKind::BraceOpen,
|
kind: TokenKind::BraceOpen,
|
||||||
},
|
},
|
||||||
Token {
|
Token {
|
||||||
span: TokenSpan { start: 21, end: 23 },
|
span: TokenSpan { start: 20, end: 22 },
|
||||||
kind: TokenKind::Keyword(Keyword::r#fn),
|
kind: TokenKind::Keyword(Keyword::r#fn),
|
||||||
},
|
},
|
||||||
Token {
|
Token {
|
||||||
span: TokenSpan { start: 24, end: 30 },
|
span: TokenSpan { start: 23, end: 29 },
|
||||||
kind: TokenKind::Identifier("expect".to_owned()),
|
kind: TokenKind::Identifier("expect".to_owned()),
|
||||||
},
|
},
|
||||||
Token {
|
Token {
|
||||||
span: TokenSpan { start: 30, end: 31 },
|
span: TokenSpan { start: 29, end: 30 },
|
||||||
kind: TokenKind::ParenOpen,
|
kind: TokenKind::ParenOpen,
|
||||||
},
|
},
|
||||||
Token {
|
Token {
|
||||||
span: TokenSpan { start: 31, end: 36 },
|
span: TokenSpan { start: 30, end: 35 },
|
||||||
kind: TokenKind::Identifier("event".to_owned()),
|
kind: TokenKind::Identifier("event".to_owned()),
|
||||||
},
|
},
|
||||||
Token {
|
Token {
|
||||||
span: TokenSpan { start: 36, end: 37 },
|
span: TokenSpan { start: 35, end: 36 },
|
||||||
kind: TokenKind::Colon,
|
kind: TokenKind::Colon,
|
||||||
},
|
},
|
||||||
Token {
|
Token {
|
||||||
span: TokenSpan { start: 38, end: 44 },
|
span: TokenSpan { start: 37, end: 43 },
|
||||||
kind: TokenKind::Identifier("String".to_owned()),
|
kind: TokenKind::Identifier("String".to_owned()),
|
||||||
},
|
},
|
||||||
Token {
|
Token {
|
||||||
span: TokenSpan { start: 44, end: 45 },
|
span: TokenSpan { start: 43, end: 44 },
|
||||||
kind: TokenKind::ParenClose,
|
kind: TokenKind::ParenClose,
|
||||||
},
|
},
|
||||||
Token {
|
Token {
|
||||||
span: TokenSpan { start: 46, end: 48 },
|
span: TokenSpan { start: 45, end: 47 },
|
||||||
kind: TokenKind::Arrow,
|
kind: TokenKind::Arrow,
|
||||||
},
|
},
|
||||||
Token {
|
Token {
|
||||||
span: TokenSpan { start: 49, end: 55 },
|
span: TokenSpan { start: 48, end: 54 },
|
||||||
kind: TokenKind::Identifier("String".to_owned()),
|
kind: TokenKind::Identifier("String".to_owned()),
|
||||||
},
|
},
|
||||||
Token {
|
Token {
|
||||||
span: TokenSpan { start: 55, end: 56 },
|
span: TokenSpan { start: 54, end: 55 },
|
||||||
kind: TokenKind::Semicolon,
|
kind: TokenKind::Semicolon,
|
||||||
},
|
},
|
||||||
Token {
|
Token {
|
||||||
span: TokenSpan { start: 57, end: 58 },
|
span: TokenSpan { start: 56, end: 57 },
|
||||||
kind: TokenKind::BraceClose,
|
kind: TokenKind::BraceClose,
|
||||||
},
|
},
|
||||||
];
|
];
|
||||||
|
@ -126,27 +126,27 @@ mod mod {{
|
||||||
let expected_token_stream = {
|
let expected_token_stream = {
|
||||||
let tokens = vec![
|
let tokens = vec![
|
||||||
Token {
|
Token {
|
||||||
span: TokenSpan { start: 1, end: 5 },
|
span: TokenSpan { start: 1, end: 4 },
|
||||||
kind: TokenKind::Keyword(Keyword::mod),
|
kind: TokenKind::Keyword(Keyword::r#mod),
|
||||||
},
|
},
|
||||||
Token {
|
Token {
|
||||||
span: TokenSpan { start: 6, end: 10 },
|
span: TokenSpan { start: 5, end: 8 },
|
||||||
kind: TokenKind::Keyword(Keyword::mod),
|
kind: TokenKind::Keyword(Keyword::r#mod),
|
||||||
},
|
},
|
||||||
Token {
|
Token {
|
||||||
span: TokenSpan { start: 11, end: 12 },
|
span: TokenSpan { start: 9, end: 10 },
|
||||||
|
kind: TokenKind::BraceOpen,
|
||||||
|
},
|
||||||
|
Token {
|
||||||
|
span: TokenSpan { start: 10, end: 11 },
|
||||||
kind: TokenKind::BraceOpen,
|
kind: TokenKind::BraceOpen,
|
||||||
},
|
},
|
||||||
Token {
|
Token {
|
||||||
span: TokenSpan { start: 12, end: 13 },
|
span: TokenSpan { start: 12, end: 13 },
|
||||||
kind: TokenKind::BraceOpen,
|
|
||||||
},
|
|
||||||
Token {
|
|
||||||
span: TokenSpan { start: 14, end: 15 },
|
|
||||||
kind: TokenKind::BraceClose,
|
kind: TokenKind::BraceClose,
|
||||||
},
|
},
|
||||||
Token {
|
Token {
|
||||||
span: TokenSpan { start: 15, end: 16 },
|
span: TokenSpan { start: 13, end: 14 },
|
||||||
kind: TokenKind::BraceClose,
|
kind: TokenKind::BraceClose,
|
||||||
},
|
},
|
||||||
];
|
];
|
||||||
|
@ -181,27 +181,27 @@ fn test_comments() {
|
||||||
let expected_token_stream = {
|
let expected_token_stream = {
|
||||||
let tokens = vec![
|
let tokens = vec![
|
||||||
Token {
|
Token {
|
||||||
span: TokenSpan { start: 33, end: 37 },
|
span: TokenSpan { start: 33, end: 36 },
|
||||||
kind: TokenKind::Keyword(Keyword::mod),
|
kind: TokenKind::Keyword(Keyword::r#mod),
|
||||||
},
|
},
|
||||||
Token {
|
Token {
|
||||||
span: TokenSpan { start: 38, end: 42 },
|
span: TokenSpan { start: 37, end: 40 },
|
||||||
kind: TokenKind::Keyword(Keyword::mod),
|
kind: TokenKind::Keyword(Keyword::r#mod),
|
||||||
},
|
},
|
||||||
Token {
|
Token {
|
||||||
span: TokenSpan { start: 43, end: 44 },
|
span: TokenSpan { start: 41, end: 42 },
|
||||||
kind: TokenKind::BraceOpen,
|
kind: TokenKind::BraceOpen,
|
||||||
},
|
},
|
||||||
Token {
|
Token {
|
||||||
span: TokenSpan { start: 44, end: 45 },
|
span: TokenSpan { start: 42, end: 43 },
|
||||||
kind: TokenKind::BraceOpen,
|
kind: TokenKind::BraceOpen,
|
||||||
},
|
},
|
||||||
Token {
|
Token {
|
||||||
span: TokenSpan { start: 55, end: 56 },
|
span: TokenSpan { start: 53, end: 54 },
|
||||||
kind: TokenKind::BraceClose,
|
kind: TokenKind::BraceClose,
|
||||||
},
|
},
|
||||||
Token {
|
Token {
|
||||||
span: TokenSpan { start: 56, end: 57 },
|
span: TokenSpan { start: 54, end: 55 },
|
||||||
kind: TokenKind::BraceClose,
|
kind: TokenKind::BraceClose,
|
||||||
},
|
},
|
||||||
];
|
];
|
||||||
|
|
|
@ -203,7 +203,7 @@ fn tokenize_ident(text: &str) -> Result<(TokenKind, usize), LexingError> {
|
||||||
|
|
||||||
// Filter out keywords
|
// Filter out keywords
|
||||||
let tokenkind = match got {
|
let tokenkind = match got {
|
||||||
"mod" => TokenKind::Keyword(Keyword::mod),
|
"mod" => TokenKind::Keyword(Keyword::r#mod),
|
||||||
"fn" => TokenKind::Keyword(Keyword::r#fn),
|
"fn" => TokenKind::Keyword(Keyword::r#fn),
|
||||||
"struct" => TokenKind::Keyword(Keyword::r#struct),
|
"struct" => TokenKind::Keyword(Keyword::r#struct),
|
||||||
"enum" => TokenKind::Keyword(Keyword::r#enum),
|
"enum" => TokenKind::Keyword(Keyword::r#enum),
|
||||||
|
|
|
@ -28,21 +28,30 @@ use pretty_assertions::assert_eq;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_full() {
|
fn test_full() {
|
||||||
let input = "nasp trinitrix {
|
let input = "
|
||||||
|
mod trinitrix {
|
||||||
struct Callback {
|
struct Callback {
|
||||||
func: Function,
|
func: void,
|
||||||
timeout: Integer,
|
timeout: u8,
|
||||||
};
|
}
|
||||||
|
|
||||||
enum CallbackPriority {
|
enum CallbackPriority {
|
||||||
High,
|
High,
|
||||||
Medium,
|
Medium,
|
||||||
Low,
|
Low,
|
||||||
};
|
}
|
||||||
|
|
||||||
fn execute_callback(callback: Callback, priority: CallbackPriority);
|
fn execute_callback(callback: Callback, priority: CallbackPriority);
|
||||||
}";
|
}
|
||||||
let output = TokenStream::lex(&input).unwrap().parse().unwrap();
|
";
|
||||||
|
let output = TokenStream::lex(&input)
|
||||||
|
.unwrap()
|
||||||
|
.parse_unchecked()
|
||||||
|
.map_err(|err| panic!("{}", err))
|
||||||
|
.unwrap()
|
||||||
|
.process(input.to_owned())
|
||||||
|
.map_err(|err| panic!("{}", err))
|
||||||
|
.unwrap();
|
||||||
let expected = CommandSpec {
|
let expected = CommandSpec {
|
||||||
structures: vec![],
|
structures: vec![],
|
||||||
enumerations: vec![],
|
enumerations: vec![],
|
||||||
|
@ -93,7 +102,7 @@ fn test_full() {
|
||||||
},
|
},
|
||||||
r#type: Type {
|
r#type: Type {
|
||||||
identifier: Identifier {
|
identifier: Identifier {
|
||||||
name: "Function".to_owned(),
|
name: "()".to_owned(),
|
||||||
},
|
},
|
||||||
generic_args: vec![],
|
generic_args: vec![],
|
||||||
},
|
},
|
||||||
|
@ -105,7 +114,7 @@ fn test_full() {
|
||||||
},
|
},
|
||||||
r#type: Type {
|
r#type: Type {
|
||||||
identifier: Identifier {
|
identifier: Identifier {
|
||||||
name: "Integer".to_owned(),
|
name: "u8".to_owned(),
|
||||||
},
|
},
|
||||||
generic_args: vec![],
|
generic_args: vec![],
|
||||||
},
|
},
|
||||||
|
@ -143,15 +152,20 @@ fn test_full() {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_failing() {
|
fn test_failing() {
|
||||||
let input = "struct Callback {
|
let input = "
|
||||||
func: Function,
|
struct Callback {
|
||||||
timeout: Integer,
|
func: void,
|
||||||
};
|
timeout: u32,
|
||||||
|
}
|
||||||
|
|
||||||
// The type \"Name\" should not be defined
|
// The type \"Name\" should not be defined
|
||||||
fn execute_callback(callback: Name);
|
fn execute_callback(callback: Name);
|
||||||
";
|
";
|
||||||
let output = TokenStream::lex(&input).unwrap().parse();
|
let output = TokenStream::lex(&input)
|
||||||
|
.unwrap()
|
||||||
|
.parse_unchecked()
|
||||||
|
.unwrap()
|
||||||
|
.process(input.to_owned());
|
||||||
match *(output.unwrap_err().source) {
|
match *(output.unwrap_err().source) {
|
||||||
super::error::ParsingError::TypeNotDeclared { r#type, .. } => {
|
super::error::ParsingError::TypeNotDeclared { r#type, .. } => {
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
|
@ -171,12 +185,17 @@ fn test_comments() {
|
||||||
|
|
||||||
/// First doc comment
|
/// First doc comment
|
||||||
// Some more text
|
// Some more text
|
||||||
nasp trinitrix {
|
mod trinitrix {
|
||||||
/// Second doc comment
|
/// Second doc comment
|
||||||
fn hi(name: String) -> String;
|
fn hi(name: String) -> String;
|
||||||
}
|
}
|
||||||
";
|
";
|
||||||
let output = TokenStream::lex(&input).unwrap().parse().unwrap();
|
let output = TokenStream::lex(&input)
|
||||||
|
.unwrap()
|
||||||
|
.parse_unchecked()
|
||||||
|
.unwrap()
|
||||||
|
.process(input.to_owned())
|
||||||
|
.unwrap();
|
||||||
let expected = CommandSpec {
|
let expected = CommandSpec {
|
||||||
structures: vec![],
|
structures: vec![],
|
||||||
enumerations: vec![],
|
enumerations: vec![],
|
||||||
|
|
|
@ -78,7 +78,7 @@ impl AdditionalHelp for ParsingError {
|
||||||
expected, actual
|
expected, actual
|
||||||
),
|
),
|
||||||
ParsingError::ExpectedKeyword { actual, .. } => format!(
|
ParsingError::ExpectedKeyword { actual, .. } => format!(
|
||||||
"I expected a keyword (that is something like 'fn' or 'nasp') but you put a '{}' there!",
|
"I expected a keyword (that is something like 'fn' or 'mod') but you put a '{}' there!",
|
||||||
actual),
|
actual),
|
||||||
ParsingError::TrailingDocComment { .. } => "I expected some target (a function, namespace, enum, or something like this) which this doc comment annotates, but you put nothing there".to_owned(),
|
ParsingError::TrailingDocComment { .. } => "I expected some target (a function, namespace, enum, or something like this) which this doc comment annotates, but you put nothing there".to_owned(),
|
||||||
ParsingError::UnexpectedEOF { expected, .. } => format!("Put the expected token ('{expected}') here."),
|
ParsingError::UnexpectedEOF { expected, .. } => format!("Put the expected token ('{expected}') here."),
|
||||||
|
|
|
@ -84,7 +84,7 @@ impl Parser {
|
||||||
// Use of [peek_raw] here is fine, as we know that the function is only called, when
|
// Use of [peek_raw] here is fine, as we know that the function is only called, when
|
||||||
// something should still be contained in the token stream
|
// something should still be contained in the token stream
|
||||||
match self.peek_raw().kind() {
|
match self.peek_raw().kind() {
|
||||||
token![nasp] => Ok(Declaration::Namespace(self.parse_namespace()?)),
|
token![mod] => Ok(Declaration::Namespace(self.parse_namespace()?)),
|
||||||
token![fn] => Ok(Declaration::Function(self.parse_function()?)),
|
token![fn] => Ok(Declaration::Function(self.parse_function()?)),
|
||||||
token![struct] => Ok(Declaration::Structure(self.parse_structure()?)),
|
token![struct] => Ok(Declaration::Structure(self.parse_structure()?)),
|
||||||
token![enum] => Ok(Declaration::Enumeration(self.parse_enumeration()?)),
|
token![enum] => Ok(Declaration::Enumeration(self.parse_enumeration()?)),
|
||||||
|
@ -181,7 +181,7 @@ impl Parser {
|
||||||
|
|
||||||
fn parse_namespace(&mut self) -> Result<Namespace, ParsingError> {
|
fn parse_namespace(&mut self) -> Result<Namespace, ParsingError> {
|
||||||
let attributes = self.parse_doc_comments()?;
|
let attributes = self.parse_doc_comments()?;
|
||||||
self.expect(token![nasp])?;
|
self.expect(token![mod])?;
|
||||||
|
|
||||||
let mut namespace = Namespace {
|
let mut namespace = Namespace {
|
||||||
name: self.expect(token![Ident])?,
|
name: self.expect(token![Ident])?,
|
||||||
|
@ -328,9 +328,9 @@ impl Parser {
|
||||||
/// };
|
/// };
|
||||||
///
|
///
|
||||||
/// # fn main() {
|
/// # fn main() {
|
||||||
/// let token_stream = TokenStream::lex("nasp {}").unwrap();
|
/// let token_stream = TokenStream::lex("mod {}").unwrap();
|
||||||
/// let parser = Parser::new(token_stream);
|
/// let parser = Parser::new(token_stream);
|
||||||
/// assert_eq!(parser.expect(token![nasp]).unwrap(), TokenKind::Keyword(Keyword::nasp));
|
/// assert_eq!(parser.expect(token![mod]).unwrap(), TokenKind::Keyword(Keyword::mod));
|
||||||
/// assert_eq!(parser.expect(token![BraceOpen]).unwrap(), TokenKind::BraceOpen);
|
/// assert_eq!(parser.expect(token![BraceOpen]).unwrap(), TokenKind::BraceOpen);
|
||||||
/// assert_eq!(parser.expect(token![BraceClose]).unwrap(), TokenKind::BraceClose);
|
/// assert_eq!(parser.expect(token![BraceClose]).unwrap(), TokenKind::BraceClose);
|
||||||
/// assert!(parser.expect(token![BraceClose]).is_err());
|
/// assert!(parser.expect(token![BraceClose]).is_err());
|
||||||
|
|
|
@ -32,7 +32,7 @@ fn test_failing() {
|
||||||
let input = "
|
let input = "
|
||||||
fn print(message: CommandTransferValue);
|
fn print(message: CommandTransferValue);
|
||||||
|
|
||||||
nasp trinitrix { {}
|
mod trinitrix { {}
|
||||||
fn hi honner(name: String) -> String; ;
|
fn hi honner(name: String) -> String; ;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -49,7 +49,7 @@ nasp trinitrix { {}
|
||||||
fn test_full() {
|
fn test_full() {
|
||||||
let input = "fn print(message: CommandTransferValue);
|
let input = "fn print(message: CommandTransferValue);
|
||||||
|
|
||||||
nasp trinitrix {
|
mod trinitrix {
|
||||||
fn hi(name: String) -> String;
|
fn hi(name: String) -> String;
|
||||||
}
|
}
|
||||||
";
|
";
|
||||||
|
@ -80,22 +80,22 @@ nasp trinitrix {
|
||||||
}],
|
}],
|
||||||
namespaces: vec![Namespace {
|
namespaces: vec![Namespace {
|
||||||
name: Token {
|
name: Token {
|
||||||
span: TokenSpan { start: 47, end: 56 },
|
span: TokenSpan { start: 46, end: 55 },
|
||||||
kind: TokenKind::Identifier("trinitrix".to_owned()),
|
kind: TokenKind::Identifier("trinitrix".to_owned()),
|
||||||
},
|
},
|
||||||
functions: vec![Function {
|
functions: vec![Function {
|
||||||
identifier: Token {
|
identifier: Token {
|
||||||
span: TokenSpan { start: 66, end: 68 },
|
span: TokenSpan { start: 65, end: 67 },
|
||||||
kind: TokenKind::Identifier("hi".to_owned()),
|
kind: TokenKind::Identifier("hi".to_owned()),
|
||||||
},
|
},
|
||||||
inputs: vec![NamedType {
|
inputs: vec![NamedType {
|
||||||
name: Token {
|
name: Token {
|
||||||
span: TokenSpan { start: 69, end: 73 },
|
span: TokenSpan { start: 68, end: 72 },
|
||||||
kind: TokenKind::Identifier("name".to_owned()),
|
kind: TokenKind::Identifier("name".to_owned()),
|
||||||
},
|
},
|
||||||
r#type: Type {
|
r#type: Type {
|
||||||
identifier: Token {
|
identifier: Token {
|
||||||
span: TokenSpan { start: 75, end: 81 },
|
span: TokenSpan { start: 74, end: 80 },
|
||||||
kind: TokenKind::Identifier("String".to_owned()),
|
kind: TokenKind::Identifier("String".to_owned()),
|
||||||
},
|
},
|
||||||
generic_args: vec![],
|
generic_args: vec![],
|
||||||
|
@ -103,7 +103,7 @@ nasp trinitrix {
|
||||||
}],
|
}],
|
||||||
output: Some(Type {
|
output: Some(Type {
|
||||||
identifier: Token {
|
identifier: Token {
|
||||||
span: TokenSpan { start: 86, end: 92 },
|
span: TokenSpan { start: 85, end: 91 },
|
||||||
kind: TokenKind::Identifier("String".to_owned()),
|
kind: TokenKind::Identifier("String".to_owned()),
|
||||||
},
|
},
|
||||||
generic_args: vec![],
|
generic_args: vec![],
|
||||||
|
|
Reference in New Issue