summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorBen Bridle <ben@derelict.engineering>2025-03-06 15:57:12 +1300
committerBen Bridle <ben@derelict.engineering>2025-03-06 15:57:19 +1300
commit228c5f742eaf5d9a36b53d5374201138bd22e0e8 (patch)
treec6e6d2af5d091692d4f72045b9ff5cbb95f887ab
parent9e238ddaaa503210b8e079429a978edeccdef99c (diff)
downloadassembler-228c5f742eaf5d9a36b53d5374201138bd22e0e8.zip
Make Tokeniser::eat_to_delimiter method more general
The eat_to_delimiter method on Tokeniser has been renamed to eat_until, and it now consumes characters until a predicate returns true, not just until a particular character is reached. This means that the method can now be passed a closure that keeps track of nesting depth, and only returns true when a matching delimiter is found at the same level as the opening delimiter.
-rw-r--r--src/tokeniser.rs4
1 files changed, 2 insertions, 2 deletions
diff --git a/src/tokeniser.rs b/src/tokeniser.rs
index 7ab44f0..27f4855 100644
--- a/src/tokeniser.rs
+++ b/src/tokeniser.rs
@@ -121,10 +121,10 @@ impl Tokeniser {
/// Consume and return all characters up to and including the delimiter.
/// Returns None if end of source is reached before delimiter is found.
- pub fn eat_to_delimiter(&mut self, delim: char) -> Option<String> {
+ pub fn eat_until(&mut self, mut predicate: impl FnMut(&char) -> bool) -> Option<String> {
let mut token = String::new();
while let Some(c) = self.eat_char() {
- match c == delim {
+ match predicate(&c) {
true => {
self.end = self.prev;
return Some(token);