forked from trinitrix/core
chore(trixy-lang_parser): Apply clippy's suggestions
This commit is contained in:
parent
26e0bbb972
commit
d95b26655f
|
@ -51,7 +51,7 @@ impl ErrorContext {
|
||||||
|
|
||||||
let contexted_span = {
|
let contexted_span = {
|
||||||
let matched_line: Vec<_> = original_file.match_indices(&line).collect();
|
let matched_line: Vec<_> = original_file.match_indices(&line).collect();
|
||||||
let (index, matched_line) = matched_line.get(0).expect("This first index should always match, as we took the line from the string in the first place");
|
let (index, matched_line) = matched_line.first().expect("This first index should always match, as we took the line from the string in the first place");
|
||||||
debug_assert_eq!(matched_line, &&line);
|
debug_assert_eq!(matched_line, &&line);
|
||||||
TokenSpan {
|
TokenSpan {
|
||||||
start: span.start - index,
|
start: span.start - index,
|
||||||
|
@ -59,27 +59,25 @@ impl ErrorContext {
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
let line_above;
|
let line_above = if line_number == 1 {
|
||||||
if line_number == 1 {
|
|
||||||
// We only have one line, so no line above
|
// We only have one line, so no line above
|
||||||
line_above = "".to_owned();
|
"".to_owned()
|
||||||
} else {
|
} else {
|
||||||
line_above = (*lines
|
(*lines
|
||||||
.get((line_number - 1) - 1)
|
.get((line_number - 1) - 1)
|
||||||
.expect("We checked that this should work"))
|
.expect("We checked that this should work"))
|
||||||
.to_owned();
|
.to_owned()
|
||||||
}
|
};
|
||||||
|
|
||||||
let line_below;
|
let line_below = if lines.len() - 1 > line_number {
|
||||||
if lines.len() - 1 > line_number {
|
|
||||||
// We have a line after the current line
|
// We have a line after the current line
|
||||||
line_below = (*lines
|
(*lines
|
||||||
.get((line_number + 1) - 1)
|
.get((line_number + 1) - 1)
|
||||||
.expect("We checked that this should work"))
|
.expect("We checked that this should work"))
|
||||||
.to_owned();
|
.to_owned()
|
||||||
} else {
|
} else {
|
||||||
line_below = "".to_owned();
|
"".to_owned()
|
||||||
}
|
};
|
||||||
|
|
||||||
Self {
|
Self {
|
||||||
span,
|
span,
|
||||||
|
|
|
@ -19,7 +19,7 @@ pub enum LexingError {
|
||||||
|
|
||||||
impl AdditionalHelp for LexingError {
|
impl AdditionalHelp for LexingError {
|
||||||
fn additional_help(&self) -> String {
|
fn additional_help(&self) -> String {
|
||||||
let out = match self {
|
match self {
|
||||||
LexingError::NoMatchesTaken => "This token does not produce a possible match".to_owned(),
|
LexingError::NoMatchesTaken => "This token does not produce a possible match".to_owned(),
|
||||||
LexingError::UnexpectedEOF => "This eof was completely unexpected".to_owned(),
|
LexingError::UnexpectedEOF => "This eof was completely unexpected".to_owned(),
|
||||||
LexingError::ExpectedArrow => "The `-` token is interpretet as a started arrow (`->`), but we could not find the arrow tip (`>`)".to_owned(),
|
LexingError::ExpectedArrow => "The `-` token is interpretet as a started arrow (`->`), but we could not find the arrow tip (`>`)".to_owned(),
|
||||||
|
@ -27,8 +27,7 @@ impl AdditionalHelp for LexingError {
|
||||||
format!("This char: `{char}`; is not a valid token")
|
format!("This char: `{char}`; is not a valid token")
|
||||||
},
|
},
|
||||||
LexingError::ExpectedComment => "The '/' started comment parsing, but I could not find a matching '/'".to_owned(),
|
LexingError::ExpectedComment => "The '/' started comment parsing, but I could not find a matching '/'".to_owned(),
|
||||||
};
|
}
|
||||||
out
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -31,13 +31,7 @@ impl TokenStream {
|
||||||
// filter out comments
|
// filter out comments
|
||||||
let tokens = tokens
|
let tokens = tokens
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.filter(|token| {
|
.filter(|token| !matches!(token.kind, TokenKind::Comment(_)))
|
||||||
if let TokenKind::Comment(_) = token.kind {
|
|
||||||
false
|
|
||||||
} else {
|
|
||||||
true
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
Ok(Self {
|
Ok(Self {
|
||||||
|
@ -179,7 +173,7 @@ impl Display for TokenKind {
|
||||||
match self {
|
match self {
|
||||||
TokenKind::Keyword(word) => write!(f, "KEYWORD({})", word),
|
TokenKind::Keyword(word) => write!(f, "KEYWORD({})", word),
|
||||||
TokenKind::Identifier(ident) => {
|
TokenKind::Identifier(ident) => {
|
||||||
if ident == "" {
|
if ident.is_empty() {
|
||||||
write!(f, "IDENTIFIER")
|
write!(f, "IDENTIFIER")
|
||||||
} else {
|
} else {
|
||||||
write!(f, "IDENTIFIER({})", ident)
|
write!(f, "IDENTIFIER({})", ident)
|
||||||
|
|
|
@ -27,7 +27,7 @@ impl<'a> Tokenizer<'a> {
|
||||||
pub(super) fn next_token(&mut self) -> Result<Option<Token>, SpannedLexingError> {
|
pub(super) fn next_token(&mut self) -> Result<Option<Token>, SpannedLexingError> {
|
||||||
self.skip_ignored_tokens();
|
self.skip_ignored_tokens();
|
||||||
if self.remaining_text.is_empty() {
|
if self.remaining_text.is_empty() {
|
||||||
return Ok(None);
|
Ok(None)
|
||||||
} else {
|
} else {
|
||||||
let start = self.current_index;
|
let start = self.current_index;
|
||||||
|
|
||||||
|
@ -153,7 +153,7 @@ fn tokenize_comment(text: &str) -> Result<(TokenKind, usize), LexingError> {
|
||||||
let comment = comment.trim_start();
|
let comment = comment.trim_start();
|
||||||
let comment = comment.trim_end();
|
let comment = comment.trim_end();
|
||||||
|
|
||||||
return Ok((TokenKind::Comment(comment.to_owned()), chars_read + 2));
|
Ok((TokenKind::Comment(comment.to_owned()), chars_read + 2))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -9,7 +9,10 @@ pub mod error;
|
||||||
pub mod lexing;
|
pub mod lexing;
|
||||||
pub mod parsing;
|
pub mod parsing;
|
||||||
|
|
||||||
pub fn parse_trixy_lang(input: &str) -> Result<CommandSpec, TrixyError> {
|
pub fn parse_trixy_lang(input: &str) -> Result<CommandSpec, Box<TrixyError>> {
|
||||||
let input_tokens = TokenStream::lex(input)?.parse()?;
|
let input_tokens = TokenStream::lex(input)
|
||||||
|
.map_err(|err| Box::new(err.into()))?
|
||||||
|
.parse()
|
||||||
|
.map_err(Into::<TrixyError>::into)?;
|
||||||
Ok(input_tokens)
|
Ok(input_tokens)
|
||||||
}
|
}
|
||||||
|
|
|
@ -37,7 +37,7 @@ impl AdditionalHelp for ParsingError {
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct SpannedParsingError {
|
pub struct SpannedParsingError {
|
||||||
pub source: ParsingError,
|
pub source: Box<ParsingError>,
|
||||||
pub context: ErrorContext,
|
pub context: ErrorContext,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -37,7 +37,7 @@ impl TokenStream {
|
||||||
let unchecked = self.parse_unchecked().map_err(|err| {
|
let unchecked = self.parse_unchecked().map_err(|err| {
|
||||||
let span = *err.source.span();
|
let span = *err.source.span();
|
||||||
SpannedParsingError {
|
SpannedParsingError {
|
||||||
source: ParsingError::from(err),
|
source: Box::new(ParsingError::from(err)),
|
||||||
context: ErrorContext::from_span(span, &original_file),
|
context: ErrorContext::from_span(span, &original_file),
|
||||||
}
|
}
|
||||||
})?;
|
})?;
|
||||||
|
@ -79,7 +79,7 @@ impl Parser {
|
||||||
let namespace = self.process_namespace(namespace).map_err(|err| {
|
let namespace = self.process_namespace(namespace).map_err(|err| {
|
||||||
let span = *err.span();
|
let span = *err.span();
|
||||||
SpannedParsingError {
|
SpannedParsingError {
|
||||||
source: err,
|
source: Box::new(err),
|
||||||
context: ErrorContext::from_span(span, &self.original_file),
|
context: ErrorContext::from_span(span, &self.original_file),
|
||||||
}
|
}
|
||||||
})?;
|
})?;
|
||||||
|
|
|
@ -132,7 +132,7 @@ fn test_failing() {
|
||||||
fn execute_callback(callback: Name);
|
fn execute_callback(callback: Name);
|
||||||
";
|
";
|
||||||
let output = TokenStream::lex(&input).unwrap().parse();
|
let output = TokenStream::lex(&input).unwrap().parse();
|
||||||
match output.unwrap_err().source {
|
match *(output.unwrap_err().source) {
|
||||||
super::error::ParsingError::TypeNotDeclared { r#type, .. } => {
|
super::error::ParsingError::TypeNotDeclared { r#type, .. } => {
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
r#type,
|
r#type,
|
||||||
|
|
|
@ -68,7 +68,7 @@ impl AdditionalHelp for ParsingError {
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
pub struct SpannedParsingError {
|
pub struct SpannedParsingError {
|
||||||
pub source: ParsingError,
|
pub source: Box<ParsingError>,
|
||||||
pub context: ErrorContext,
|
pub context: ErrorContext,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -45,7 +45,7 @@ impl Parser {
|
||||||
let next = self.parse_next().map_err(|err| {
|
let next = self.parse_next().map_err(|err| {
|
||||||
let span = err.get_span();
|
let span = err.get_span();
|
||||||
SpannedParsingError {
|
SpannedParsingError {
|
||||||
source: err,
|
source: Box::new(err),
|
||||||
context: ErrorContext::from_span(span, &self.token_stream.original_file),
|
context: ErrorContext::from_span(span, &self.token_stream.original_file),
|
||||||
}
|
}
|
||||||
})?;
|
})?;
|
||||||
|
@ -112,7 +112,7 @@ impl Parser {
|
||||||
actual: self.peek_raw().kind().clone(),
|
actual: self.peek_raw().kind().clone(),
|
||||||
};
|
};
|
||||||
|
|
||||||
return Err(err);
|
Err(err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -162,9 +162,11 @@ impl Parser {
|
||||||
let attributes = self.parse_doc_comments()?;
|
let attributes = self.parse_doc_comments()?;
|
||||||
self.expect(token![nasp])?;
|
self.expect(token![nasp])?;
|
||||||
|
|
||||||
let mut namespace = Namespace::default();
|
let mut namespace = Namespace {
|
||||||
namespace.name = self.expect(token![Ident])?;
|
name: self.expect(token![Ident])?,
|
||||||
namespace.attributes = attributes;
|
attributes,
|
||||||
|
..Default::default()
|
||||||
|
};
|
||||||
|
|
||||||
self.expect(token![BraceOpen])?;
|
self.expect(token![BraceOpen])?;
|
||||||
|
|
||||||
|
@ -345,11 +347,7 @@ impl Parser {
|
||||||
Some(ok) => ok,
|
Some(ok) => ok,
|
||||||
None => return false,
|
None => return false,
|
||||||
};
|
};
|
||||||
if actual_token.kind().same_kind(&token) {
|
actual_token.kind().same_kind(&token)
|
||||||
true
|
|
||||||
} else {
|
|
||||||
false
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Looks at the next token without removing it
|
/// Looks at the next token without removing it
|
||||||
|
|
|
@ -19,7 +19,7 @@ nasp trinitrix { {}
|
||||||
";
|
";
|
||||||
let parsed = TokenStream::lex(input).unwrap().parse_unchecked();
|
let parsed = TokenStream::lex(input).unwrap().parse_unchecked();
|
||||||
let err = parsed.unwrap_err().source;
|
let err = parsed.unwrap_err().source;
|
||||||
match err {
|
match *err {
|
||||||
ParsingError::ExpectedKeyword { .. } => {}
|
ParsingError::ExpectedKeyword { .. } => {}
|
||||||
_ => panic!("Wrong error"),
|
_ => panic!("Wrong error"),
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue