Add token caching to TokenStream

Now when calling peek, TokenStream will remember the token it parses and
store it in a field. Then, when calling next, it looks at that field and
if it's not empty, it returns it instead. This allows us to avoid having
to parse same token in the input two times, which ultimately results in
better performance.
This commit is contained in:
Aodhnait Étaín 2021-05-23 08:02:23 +01:00
parent 3062ac9f45
commit a29dfc413c
1 changed files with 12 additions and 3 deletions

View File

@ -257,6 +257,7 @@ fn parse_expression<'a, 'b: 'a>(tokens: &'a mut TokenStream<'b>, highest_precede
struct TokenStream<'a> {
source: &'a str,
cursor: usize,
last: Option<Token>,
}
impl<'a> TokenStream<'a> {
@ -264,6 +265,7 @@ impl<'a> TokenStream<'a> {
return Self {
source,
cursor: 0,
last: None,
};
}
@ -318,15 +320,22 @@ impl<'a> TokenStream<'a> {
}
pub fn next(&mut self) -> Option<Token> {
self.skip_whitespace();
let token = self.parse_next()?;
let token = match self.last {
Some(_) => std::mem::take(&mut self.last).unwrap(),
None => {
self.skip_whitespace();
self.parse_next()?
},
};
self.cursor += token.len();
return Some(token);
}
pub fn peek(&mut self) -> Option<Token> {
self.skip_whitespace();
return self.parse_next();
self.last = Some(self.parse_next()?);
return self.last;
}
}