summaryrefslogtreecommitdiff
path: root/src/stages/syntactic.rs
diff options
context:
space:
mode:
Diffstat (limited to 'src/stages/syntactic.rs')
-rw-r--r--src/stages/syntactic.rs100
1 files changed, 44 insertions, 56 deletions
diff --git a/src/stages/syntactic.rs b/src/stages/syntactic.rs
index 2e7f959..3be8307 100644
--- a/src/stages/syntactic.rs
+++ b/src/stages/syntactic.rs
@@ -12,9 +12,9 @@ fn parse_syntactic_from_tokeniser(mut t: Tokeniser) -> Result<Vec<Tracked<Syntac
let mut tokens = Vec::new();
let mut errors = Vec::new();
- macro_rules! push_err {
+ macro_rules! err {
($error:expr) => {{
- push_err!($error, t.get_source());
+ err!($error, t.get_source());
}};
($error:expr, $source:expr) => {{
errors.push(Tracked::from($error, $source));
@@ -22,6 +22,25 @@ fn parse_syntactic_from_tokeniser(mut t: Tokeniser) -> Result<Vec<Tracked<Syntac
}};
}
+ macro_rules! is_matching {
+ ($open:expr, $close:expr) => {{
+ let mut depth = 1;
+ move |t: &mut Tokeniser| {
+ match t.eat_char() {
+ Some($open) => { depth += 1; false }
+ Some($close) => { depth -= 1; depth == 0 }
+ _ => false,
+ }
+ }}
+ };
+ }
+
+ macro_rules! is_any {
+ ($close:expr) => {
+ |t: &mut Tokeniser| { t.eat_char() == Some($close) }
+ };
+ }
+
loop {
t.eat_whitespace();
t.mark_start();
@@ -30,47 +49,33 @@ fn parse_syntactic_from_tokeniser(mut t: Tokeniser) -> Result<Vec<Tracked<Syntac
'"' => {
let source = t.get_source();
t.mark_child();
- let is_any_close = |t: &mut Tokeniser| {
- t.eat_char() == Some('"')
- };
- if let Some(_) = t.track_until(is_any_close) {
+ if let Some(_) = t.track_until(is_any!('"')) {
let child = t.tokenise_child_span();
SyntacticToken::StringLiteral(parse_string_literal(child))
} else {
- push_err!(SyntacticError::UnterminatedStringLiteral, source);
+ err!(SyntacticError::UnterminatedStringLiteral, source);
}
}
'\'' => {
let source = t.get_source();
- let is_any_close = |t: &mut Tokeniser| {
- t.eat_char() == Some('\'')
- };
- if let Some(string) = t.track_until(is_any_close) {
+ if let Some(string) = t.track_until(is_any!('\'')) {
let mut chars: Vec<char> = string.chars().collect();
if chars.len() == 1 {
let value = parse_char(chars.pop().unwrap());
SyntacticToken::IntegerLiteral(value)
} else {
t.mark_end();
- push_err!(SyntacticError::ExpectedSingleCharacter, t.get_source());
+ err!(SyntacticError::ExpectedSingleCharacter, t.get_source());
}
} else {
- push_err!(SyntacticError::UnterminatedCharacterLiteral, source);
+ err!(SyntacticError::UnterminatedCharacterLiteral, source);
}
}
'{' => {
let source = t.get_source();
t.mark_child();
- let mut depth = 1;
- let is_matching_close = |t: &mut Tokeniser| {
- match t.eat_char() {
- Some('{') => { depth += 1; false }
- Some('}') => { depth -= 1; depth == 0 }
- _ => false,
- }
- };
- if let Some(_) = t.track_until(is_matching_close) {
+ if let Some(_) = t.track_until(is_matching!('{','}')) {
let child = t.tokenise_child_span();
match parse_syntactic_from_tokeniser(child) {
Ok(tokens) => SyntacticToken::BlockLiteral(tokens),
@@ -80,21 +85,13 @@ fn parse_syntactic_from_tokeniser(mut t: Tokeniser) -> Result<Vec<Tracked<Syntac
}
}
} else {
- push_err!(SyntacticError::UnterminatedBlock, source);
+ err!(SyntacticError::UnterminatedBlock, source);
}
}
'[' => {
let source = t.get_source();
t.mark_child();
- let mut depth = 1;
- let is_matching_close = |t: &mut Tokeniser| {
- match t.eat_char() {
- Some('[') => { depth += 1; false }
- Some(']') => { depth -= 1; depth == 0 }
- _ => false,
- }
- };
- if let Some(_) = t.track_until(is_matching_close) {
+ if let Some(_) = t.track_until(is_matching!('[',']')) {
let child = t.tokenise_child_span();
match parse_syntactic_from_tokeniser(child) {
Ok(tokens) => SyntacticToken::Expression(tokens),
@@ -104,20 +101,12 @@ fn parse_syntactic_from_tokeniser(mut t: Tokeniser) -> Result<Vec<Tracked<Syntac
}
}
} else {
- push_err!(SyntacticError::UnterminatedExpression, source);
+ err!(SyntacticError::UnterminatedExpression, source);
}
}
'(' => {
let source = t.get_source();
- let mut depth = 1;
- let is_matching_close = |t: &mut Tokeniser| {
- match t.eat_char() {
- Some('(') => { depth += 1; false }
- Some(')') => { depth -= 1; depth == 0 }
- _ => false,
- }
- };
- if let Some(string) = t.track_until(is_matching_close) {
+ if let Some(string) = t.track_until(is_matching!('(',')')) {
// Check if the comment fills the entire line.
if t.start.position.column == 0 && t.end_of_line() {
if let Some(path) = string.strip_prefix(": ") {
@@ -127,21 +116,20 @@ fn parse_syntactic_from_tokeniser(mut t: Tokeniser) -> Result<Vec<Tracked<Syntac
}
continue;
} else {
- push_err!(SyntacticError::UnterminatedComment, source);
+ err!(SyntacticError::UnterminatedComment, source);
}
}
'%' => {
let name = t.eat_token();
let source = t.get_source();
t.mark_child();
- let is_any_close = |t: &mut Tokeniser| t.eat_char() == Some(';');
- if let Some(_) = t.track_until(is_any_close) {
+ if let Some(_) = t.track_until(is_any!(';')) {
let child = t.tokenise_child_span();
match parse_syntactic_from_tokeniser(child) {
- Ok(tokens) => {
+ Ok(body) => {
let name = Tracked::from(name, source);
- let def = SyntacticMacroDefinition { name, tokens };
- SyntacticToken::MacroDefinition(def)
+ let definition = SyntacticMacroDefinition { name, body };
+ SyntacticToken::MacroDefinition(definition)
}
Err(mut parse_errors) => {
errors.append(&mut parse_errors);
@@ -149,14 +137,14 @@ fn parse_syntactic_from_tokeniser(mut t: Tokeniser) -> Result<Vec<Tracked<Syntac
}
}
} else {
- push_err!(SyntacticError::UnterminatedMacroDefinition(name), source);
+ err!(SyntacticError::UnterminatedMacroDefinition(name), source);
}
}
- '}' => push_err!(SyntacticError::UnmatchedBlockTerminator),
- ']' => push_err!(SyntacticError::UnmatchedExpressionTerminator),
- ')' => push_err!(SyntacticError::UnmatchedCommentTerminator),
- ';' => push_err!(SyntacticError::UnmatchedMacroTerminator),
+ '}' => err!(SyntacticError::UnmatchedBlockTerminator),
+ ']' => err!(SyntacticError::UnmatchedExpressionTerminator),
+ ')' => err!(SyntacticError::UnmatchedCommentTerminator),
+ ';' => err!(SyntacticError::UnmatchedMacroTerminator),
'@' => SyntacticToken::LabelDefinition(ScopedSymbol::Global(t.eat_token())),
'&' => SyntacticToken::LabelDefinition(ScopedSymbol::Local(t.eat_token())),
@@ -183,17 +171,17 @@ fn parse_syntactic_from_tokeniser(mut t: Tokeniser) -> Result<Vec<Tracked<Syntac
if let Some(hex_string) = token.strip_prefix("0x") {
match parse_integer_literal(hex_string, 16) {
Ok(value) => SyntacticToken::IntegerLiteral(value),
- Err(_) => push_err!(SyntacticError::InvalidHexadecimalLiteral(token)),
+ Err(_) => err!(SyntacticError::InvalidHexadecimalLiteral(token)),
}
} else if let Some(binary_string) = token.strip_prefix("0b") {
match parse_integer_literal(binary_string, 2) {
Ok(value) => SyntacticToken::IntegerLiteral(value),
- Err(_) => push_err!(SyntacticError::InvalidBinaryLiteral(token)),
+ Err(_) => err!(SyntacticError::InvalidBinaryLiteral(token)),
}
} else {
match parse_integer_literal(&token, 10) {
Ok(value) => SyntacticToken::IntegerLiteral(value),
- Err(true) => push_err!(SyntacticError::InvalidDecimalLiteral(token)),
+ Err(true) => err!(SyntacticError::InvalidDecimalLiteral(token)),
Err(false) => SyntacticToken::Symbol(ScopedSymbol::Global(token)),
}
}