Add token caching to TokenStream
Now when calling peek, TokenStream will remember the token it parses and store it in a field. Then, when calling next, it looks at that field and if it's not empty, it returns it instead. This allows us to avoid having to parse same token in the input two times, which ultimately results in better performance.
This commit is contained in:
parent
3062ac9f45
commit
a29dfc413c
15
src/main.rs
15
src/main.rs
|
@ -257,6 +257,7 @@ fn parse_expression<'a, 'b: 'a>(tokens: &'a mut TokenStream<'b>, highest_precede
|
|||
struct TokenStream<'a> {
|
||||
source: &'a str,
|
||||
cursor: usize,
|
||||
last: Option<Token>,
|
||||
}
|
||||
|
||||
impl<'a> TokenStream<'a> {
|
||||
|
@ -264,6 +265,7 @@ impl<'a> TokenStream<'a> {
|
|||
return Self {
|
||||
source,
|
||||
cursor: 0,
|
||||
last: None,
|
||||
};
|
||||
}
|
||||
|
||||
|
@ -318,15 +320,22 @@ impl<'a> TokenStream<'a> {
|
|||
}
|
||||
|
||||
pub fn next(&mut self) -> Option<Token> {
|
||||
self.skip_whitespace();
|
||||
let token = self.parse_next()?;
|
||||
let token = match self.last {
|
||||
Some(_) => std::mem::take(&mut self.last).unwrap(),
|
||||
None => {
|
||||
self.skip_whitespace();
|
||||
self.parse_next()?
|
||||
},
|
||||
};
|
||||
|
||||
self.cursor += token.len();
|
||||
return Some(token);
|
||||
}
|
||||
|
||||
pub fn peek(&mut self) -> Option<Token> {
|
||||
self.skip_whitespace();
|
||||
return self.parse_next();
|
||||
self.last = Some(self.parse_next()?);
|
||||
return self.last;
|
||||
}
|
||||
}
|
||||
|
||||
|
|
Loading…
Reference in a new issue