From 228c5f742eaf5d9a36b53d5374201138bd22e0e8 Mon Sep 17 00:00:00 2001 From: Ben Bridle Date: Thu, 6 Mar 2025 15:57:12 +1300 Subject: Make Tokeniser::eat_to_delimiter method more general The eat_to_delimiter method on Tokeniser has been renamed to eat_until, and it now consumes characters until a predicate returns true, not just until a particular character is reached. This means that the method can now be passed a closure that keeps track of nesting depth, and only returns true when a matching delimiter is found at the same level as the opening delimiter. --- src/tokeniser.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) (limited to 'src/tokeniser.rs') diff --git a/src/tokeniser.rs b/src/tokeniser.rs index 7ab44f0..27f4855 100644 --- a/src/tokeniser.rs +++ b/src/tokeniser.rs @@ -121,10 +121,10 @@ impl Tokeniser { /// Consume and return all characters up to and including the delimiter. /// Returns None if end of source is reached before delimiter is found. - pub fn eat_to_delimiter(&mut self, delim: char) -> Option { + pub fn eat_until(&mut self, mut predicate: impl FnMut(&char) -> bool) -> Option { let mut token = String::new(); while let Some(c) = self.eat_char() { - match c == delim { + match predicate(&c) { true => { self.end = self.prev; return Some(token); -- cgit v1.2.3-70-g09d2