diff options
author | Ben Bridle <ben@derelict.engineering> | 2025-03-18 13:24:02 +1300 |
---|---|---|
committer | Ben Bridle <ben@derelict.engineering> | 2025-03-18 13:24:20 +1300 |
commit | f25bc47f5c6b7e52304b1e9c9adb4310f2e77ee7 (patch) | |
tree | 5ecda1e2620ded0bfea03facf8a467b246dc03d4 | |
parent | dddc94424b124740dd8db8afb5abddc65a01b344 (diff) | |
download | torque-asm-f25bc47f5c6b7e52304b1e9c9adb4310f2e77ee7.zip |
Tidy cody
- Rename tokens field on SyntacticMacroDefinition to body
- Rename push_err! macro to err!
- Create macros for character-matching logic in syntactic parsing
- Replace .as_bytes().to_vec() pattern with .into_bytes()
-rw-r--r-- | src/bin/tq.rs | 4 | ||||
-rw-r--r-- | src/formats/debug.rs | 2 | ||||
-rw-r--r-- | src/formats/inhx.rs | 2 | ||||
-rw-r--r-- | src/formats/inhx32.rs | 2 | ||||
-rw-r--r-- | src/formats/mod.rs | 2 | ||||
-rw-r--r-- | src/stages/semantic.rs | 2 | ||||
-rw-r--r-- | src/stages/syntactic.rs | 100 | ||||
-rw-r--r-- | src/stages/syntactic_tokens.rs | 4 |
8 files changed, 53 insertions, 65 deletions
diff --git a/src/bin/tq.rs b/src/bin/tq.rs index d1e51f3..ca8fc69 100644 --- a/src/bin/tq.rs +++ b/src/bin/tq.rs @@ -32,7 +32,7 @@ fn main() { let no_libs = args.get("no-libs").as_bool(); let no_project_libs = args.get("no-project-libs").as_bool(); let no_env_libs = args.get("no-env-libs").as_bool(); - let format = Format::from_str(&args.get("format").as_string()); + let format = Format::from_str(args.get("format").as_str()); let width = args.get("width").as_u32_opt(); let dry_run = args.get("dry-run").as_bool(); let print_tree = args.get("tree").as_bool(); @@ -188,7 +188,7 @@ Created by Ben Bridle. Format::Inhx => format_inhx(&segments), Format::Inhx32 => format_inhx32(&segments), Format::Raw => format_raw(&segments, width), - Format::Source => unreachable!("Source output is handled before merged assembly"), + Format::Source => unreachable!("Source output is handled before full assembly"), }; match result { Ok(bytes) => write_bytes_and_exit(&bytes, destination.as_ref()), diff --git a/src/formats/debug.rs b/src/formats/debug.rs index 23fd34f..c264077 100644 --- a/src/formats/debug.rs +++ b/src/formats/debug.rs @@ -14,5 +14,5 @@ pub fn format_debug(segments: &[Segment]) -> Result<Vec<u8>, FormatError> { output.push_str(&format!(" {string:>w$}\n")); } } - return Ok(output.as_bytes().to_vec()); + return Ok(output.into_bytes()); } diff --git a/src/formats/inhx.rs b/src/formats/inhx.rs index fc4791b..7aa0c5e 100644 --- a/src/formats/inhx.rs +++ b/src/formats/inhx.rs @@ -17,7 +17,7 @@ pub fn format_inhx(segments: &[Segment]) -> Result<Vec<u8>, FormatError> { for record in records { output.push_str(&record.to_string()); } - return Ok(output.as_bytes().to_vec()); + return Ok(output.into_bytes()); } fn data_record(words: &[Tracked<Word>], address: usize) -> Result<InhxRecord, FormatError> { diff --git a/src/formats/inhx32.rs b/src/formats/inhx32.rs index 8febeae..88780eb 100644 --- a/src/formats/inhx32.rs +++ b/src/formats/inhx32.rs @@ -21,7 +21,7 @@ pub fn format_inhx32(segments: &[Segment]) -> Result<Vec<u8>, FormatError> { for record in records { output.push_str(&record.to_string()); } - return Ok(output.as_bytes().to_vec()); + return Ok(output.into_bytes()); } fn data_record(words: &[Tracked<Word>], address: usize) -> Result<InhxRecord, FormatError> { diff --git a/src/formats/mod.rs b/src/formats/mod.rs index 132001a..a77bd72 100644 --- a/src/formats/mod.rs +++ b/src/formats/mod.rs @@ -30,7 +30,7 @@ impl Format { "inhx32" => Self::Inhx32, "raw" => Self::Raw, "source" => Self::Source, - _ => fatal!("Unknown format '{string}', expected 'debug', 'inhx', 'inhx32', 'raw', or 'source'. "), + _ => fatal!("Unknown format '{string}', expected 'debug', 'inhx', 'inhx32', 'raw', or 'source'"), } } } diff --git a/src/stages/semantic.rs b/src/stages/semantic.rs index e225608..3c98192 100644 --- a/src/stages/semantic.rs +++ b/src/stages/semantic.rs @@ -96,7 +96,7 @@ impl SemanticParser { while let Some(token) = self.syntactic.pop() { if let SyntacticToken::MacroDefinition(definition) = token.value { let namespace = Namespace::Macro(definition.name.to_string()); - let mut parser = SemanticParser::from(definition.tokens, namespace); + let mut parser = SemanticParser::from(definition.body, namespace); let mut arguments = Vec::new(); while let Some(argument) = parser.pull_argument_definition() { arguments.push(argument); diff --git a/src/stages/syntactic.rs b/src/stages/syntactic.rs index 2e7f959..3be8307 100644 --- a/src/stages/syntactic.rs +++ b/src/stages/syntactic.rs @@ -12,9 +12,9 @@ fn parse_syntactic_from_tokeniser(mut t: Tokeniser) -> Result<Vec<Tracked<Syntac let mut tokens = Vec::new(); let mut errors = Vec::new(); - macro_rules! push_err { + macro_rules! err { ($error:expr) => {{ - push_err!($error, t.get_source()); + err!($error, t.get_source()); }}; ($error:expr, $source:expr) => {{ errors.push(Tracked::from($error, $source)); @@ -22,6 +22,25 @@ fn parse_syntactic_from_tokeniser(mut t: Tokeniser) -> Result<Vec<Tracked<Syntac }}; } + macro_rules! is_matching { + ($open:expr, $close:expr) => {{ + let mut depth = 1; + move |t: &mut Tokeniser| { + match t.eat_char() { + Some($open) => { depth += 1; false } + Some($close) => { depth -= 1; depth == 0 } + _ => false, + } + }} + }; + } + + macro_rules! is_any { + ($close:expr) => { + |t: &mut Tokeniser| { t.eat_char() == Some($close) } + }; + } + loop { t.eat_whitespace(); t.mark_start(); @@ -30,47 +49,33 @@ fn parse_syntactic_from_tokeniser(mut t: Tokeniser) -> Result<Vec<Tracked<Syntac '"' => { let source = t.get_source(); t.mark_child(); - let is_any_close = |t: &mut Tokeniser| { - t.eat_char() == Some('"') - }; - if let Some(_) = t.track_until(is_any_close) { + if let Some(_) = t.track_until(is_any!('"')) { let child = t.tokenise_child_span(); SyntacticToken::StringLiteral(parse_string_literal(child)) } else { - push_err!(SyntacticError::UnterminatedStringLiteral, source); + err!(SyntacticError::UnterminatedStringLiteral, source); } } '\'' => { let source = t.get_source(); - let is_any_close = |t: &mut Tokeniser| { - t.eat_char() == Some('\'') - }; - if let Some(string) = t.track_until(is_any_close) { + if let Some(string) = t.track_until(is_any!('\'')) { let mut chars: Vec<char> = string.chars().collect(); if chars.len() == 1 { let value = parse_char(chars.pop().unwrap()); SyntacticToken::IntegerLiteral(value) } else { t.mark_end(); - push_err!(SyntacticError::ExpectedSingleCharacter, t.get_source()); + err!(SyntacticError::ExpectedSingleCharacter, t.get_source()); } } else { - push_err!(SyntacticError::UnterminatedCharacterLiteral, source); + err!(SyntacticError::UnterminatedCharacterLiteral, source); } } '{' => { let source = t.get_source(); t.mark_child(); - let mut depth = 1; - let is_matching_close = |t: &mut Tokeniser| { - match t.eat_char() { - Some('{') => { depth += 1; false } - Some('}') => { depth -= 1; depth == 0 } - _ => false, - } - }; - if let Some(_) = t.track_until(is_matching_close) { + if let Some(_) = t.track_until(is_matching!('{','}')) { let child = t.tokenise_child_span(); match parse_syntactic_from_tokeniser(child) { Ok(tokens) => SyntacticToken::BlockLiteral(tokens), @@ -80,21 +85,13 @@ fn parse_syntactic_from_tokeniser(mut t: Tokeniser) -> Result<Vec<Tracked<Syntac } } } else { - push_err!(SyntacticError::UnterminatedBlock, source); + err!(SyntacticError::UnterminatedBlock, source); } } '[' => { let source = t.get_source(); t.mark_child(); - let mut depth = 1; - let is_matching_close = |t: &mut Tokeniser| { - match t.eat_char() { - Some('[') => { depth += 1; false } - Some(']') => { depth -= 1; depth == 0 } - _ => false, - } - }; - if let Some(_) = t.track_until(is_matching_close) { + if let Some(_) = t.track_until(is_matching!('[',']')) { let child = t.tokenise_child_span(); match parse_syntactic_from_tokeniser(child) { Ok(tokens) => SyntacticToken::Expression(tokens), @@ -104,20 +101,12 @@ fn parse_syntactic_from_tokeniser(mut t: Tokeniser) -> Result<Vec<Tracked<Syntac } } } else { - push_err!(SyntacticError::UnterminatedExpression, source); + err!(SyntacticError::UnterminatedExpression, source); } } '(' => { let source = t.get_source(); - let mut depth = 1; - let is_matching_close = |t: &mut Tokeniser| { - match t.eat_char() { - Some('(') => { depth += 1; false } - Some(')') => { depth -= 1; depth == 0 } - _ => false, - } - }; - if let Some(string) = t.track_until(is_matching_close) { + if let Some(string) = t.track_until(is_matching!('(',')')) { // Check if the comment fills the entire line. if t.start.position.column == 0 && t.end_of_line() { if let Some(path) = string.strip_prefix(": ") { @@ -127,21 +116,20 @@ fn parse_syntactic_from_tokeniser(mut t: Tokeniser) -> Result<Vec<Tracked<Syntac } continue; } else { - push_err!(SyntacticError::UnterminatedComment, source); + err!(SyntacticError::UnterminatedComment, source); } } '%' => { let name = t.eat_token(); let source = t.get_source(); t.mark_child(); - let is_any_close = |t: &mut Tokeniser| t.eat_char() == Some(';'); - if let Some(_) = t.track_until(is_any_close) { + if let Some(_) = t.track_until(is_any!(';')) { let child = t.tokenise_child_span(); match parse_syntactic_from_tokeniser(child) { - Ok(tokens) => { + Ok(body) => { let name = Tracked::from(name, source); - let def = SyntacticMacroDefinition { name, tokens }; - SyntacticToken::MacroDefinition(def) + let definition = SyntacticMacroDefinition { name, body }; + SyntacticToken::MacroDefinition(definition) } Err(mut parse_errors) => { errors.append(&mut parse_errors); @@ -149,14 +137,14 @@ fn parse_syntactic_from_tokeniser(mut t: Tokeniser) -> Result<Vec<Tracked<Syntac } } } else { - push_err!(SyntacticError::UnterminatedMacroDefinition(name), source); + err!(SyntacticError::UnterminatedMacroDefinition(name), source); } } - '}' => push_err!(SyntacticError::UnmatchedBlockTerminator), - ']' => push_err!(SyntacticError::UnmatchedExpressionTerminator), - ')' => push_err!(SyntacticError::UnmatchedCommentTerminator), - ';' => push_err!(SyntacticError::UnmatchedMacroTerminator), + '}' => err!(SyntacticError::UnmatchedBlockTerminator), + ']' => err!(SyntacticError::UnmatchedExpressionTerminator), + ')' => err!(SyntacticError::UnmatchedCommentTerminator), + ';' => err!(SyntacticError::UnmatchedMacroTerminator), '@' => SyntacticToken::LabelDefinition(ScopedSymbol::Global(t.eat_token())), '&' => SyntacticToken::LabelDefinition(ScopedSymbol::Local(t.eat_token())), @@ -183,17 +171,17 @@ fn parse_syntactic_from_tokeniser(mut t: Tokeniser) -> Result<Vec<Tracked<Syntac if let Some(hex_string) = token.strip_prefix("0x") { match parse_integer_literal(hex_string, 16) { Ok(value) => SyntacticToken::IntegerLiteral(value), - Err(_) => push_err!(SyntacticError::InvalidHexadecimalLiteral(token)), + Err(_) => err!(SyntacticError::InvalidHexadecimalLiteral(token)), } } else if let Some(binary_string) = token.strip_prefix("0b") { match parse_integer_literal(binary_string, 2) { Ok(value) => SyntacticToken::IntegerLiteral(value), - Err(_) => push_err!(SyntacticError::InvalidBinaryLiteral(token)), + Err(_) => err!(SyntacticError::InvalidBinaryLiteral(token)), } } else { match parse_integer_literal(&token, 10) { Ok(value) => SyntacticToken::IntegerLiteral(value), - Err(true) => push_err!(SyntacticError::InvalidDecimalLiteral(token)), + Err(true) => err!(SyntacticError::InvalidDecimalLiteral(token)), Err(false) => SyntacticToken::Symbol(ScopedSymbol::Global(token)), } } diff --git a/src/stages/syntactic_tokens.rs b/src/stages/syntactic_tokens.rs index eabf34b..041c568 100644 --- a/src/stages/syntactic_tokens.rs +++ b/src/stages/syntactic_tokens.rs @@ -20,7 +20,7 @@ pub enum SyntacticToken { pub struct SyntacticMacroDefinition { pub name: Tracked<String>, - pub tokens: Vec<Tracked<SyntacticToken>>, + pub body: Vec<Tracked<SyntacticToken>>, } pub struct StringLiteral { @@ -129,7 +129,7 @@ pub fn print_syntactic_token(i: usize, token: &SyntacticToken) { SyntacticToken::LabelDefinition(symbol) => indent!(i, "LabelDefinition({symbol})"), SyntacticToken::MacroDefinition(definition) => { indent!(i, "MacroDefinition({})", definition.name); - for token in &definition.tokens { + for token in &definition.body { print_syntactic_token(i+1, token); } } |