diff options
| author | Ben Bridle <ben@derelict.engineering> | 2025-03-25 11:57:02 +1300 | 
|---|---|---|
| committer | Ben Bridle <ben@derelict.engineering> | 2025-03-25 11:57:02 +1300 | 
| commit | 20bedfe69c8e6add6aa8fe4ccb6aebd0ca1167b9 (patch) | |
| tree | 6200f76bf97f4213d21a30c4f9632d14b84f75a5 /src | |
| parent | effc9ac938a8f582ebdec541990345f74b0abf65 (diff) | |
| download | bedrock-asm-20bedfe69c8e6add6aa8fe4ccb6aebd0ca1167b9.zip | |
Support local identifiers as macro names and in macro definitions
Macro names beginning with a ~ character will be prefixed with the
name of the most recently defined label, and local invocations inside
macro definition bodies will also expand correctly.
Diffstat (limited to 'src')
| -rw-r--r-- | src/stages/syntactic.rs | 19 | 
1 files changed, 11 insertions, 8 deletions
diff --git a/src/stages/syntactic.rs b/src/stages/syntactic.rs index ff3a65a..674ab08 100644 --- a/src/stages/syntactic.rs +++ b/src/stages/syntactic.rs @@ -4,15 +4,15 @@ use std::path::PathBuf;  pub fn parse_syntactic<P: Into<PathBuf>>(source_code: &str, path: Option<P>) -> Result<Vec<Tracked<SyntacticToken>>, Vec<Tracked<SyntacticError>>> { -    parse_syntactic_from_tokeniser(Tokeniser::new(source_code, path)) +    parse_syntactic_from_tokeniser(Tokeniser::new(source_code, path), "")  } -fn parse_syntactic_from_tokeniser(mut t: Tokeniser) -> Result<Vec<Tracked<SyntacticToken>>, Vec<Tracked<SyntacticError>>> { -    t.add_delimiters(&['@','&','%',';','{','}','(',')','[',']','#','~','"','\'']); +fn parse_syntactic_from_tokeniser(mut t: Tokeniser, label_name: &str) -> Result<Vec<Tracked<SyntacticToken>>, Vec<Tracked<SyntacticError>>> { +    t.add_delimiters(&['@','&','%',';','{','}','(',')','[',']','#','"','\'']);      t.add_terminators(&[':']);      let mut tokens = Vec::new();      let mut errors = Vec::new(); -    let mut label_name = String::new(); +    let mut label_name = label_name.to_string();      macro_rules! err {          ($error:expr) => {{ @@ -70,12 +70,15 @@ fn parse_syntactic_from_tokeniser(mut t: Tokeniser) -> Result<Vec<Tracked<Syntac                  }              }              '%' => { -                let name = t.eat_token(); +                let mut name = t.eat_token(); +                if let Some(local) = name.strip_prefix('~') { +                    name = format!("{label_name}/{local}"); +                }                  let source = t.get_source();                  t.mark_child();                  if let Some(_) = t.track_until(is_any!(';')) {                      let child = t.tokenise_child_span(); -                    match parse_body_from_tokeniser(child) { +                    match parse_body_from_tokeniser(child, &label_name) {                          Ok(body) => {                              let name = Tracked::from(name, source);                              let definition = SyntacticMacroDefinition { name, body }; @@ -161,11 +164,11 @@ fn parse_syntactic_from_tokeniser(mut t: Tokeniser) -> Result<Vec<Tracked<Syntac  } -fn parse_body_from_tokeniser(t: Tokeniser) -> Result<Vec<Tracked<SyntacticToken>>, Vec<Tracked<SyntacticError>>> { +fn parse_body_from_tokeniser(t: Tokeniser, label_name: &str) -> Result<Vec<Tracked<SyntacticToken>>, Vec<Tracked<SyntacticError>>> {      let mut tokens = Vec::new();      let mut errors = Vec::new(); -    for token in parse_syntactic_from_tokeniser(t)? { +    for token in parse_syntactic_from_tokeniser(t, label_name)? {          match token.value {              SyntacticToken::LabelDefinition(_) => {                  let error = SyntacticError::LabelDefinitionInMacroDefinition;  | 
