summaryrefslogtreecommitdiff
path: root/src/parsers/syntactic.rs
diff options
context:
space:
mode:
Diffstat (limited to 'src/parsers/syntactic.rs')
-rw-r--r--src/parsers/syntactic.rs172
1 files changed, 0 insertions, 172 deletions
diff --git a/src/parsers/syntactic.rs b/src/parsers/syntactic.rs
deleted file mode 100644
index f3fcec1..0000000
--- a/src/parsers/syntactic.rs
+++ /dev/null
@@ -1,172 +0,0 @@
-use crate::*;
-
-
-pub struct SyntacticParser {
- tokeniser: Tokeniser,
- tokens: Vec<SyntacticToken>,
- /// The name of the macro being parsed.
- macro_name: Option<String>,
- /// The name of the most recent label.
- label_name: String,
-}
-
-impl SyntacticParser {
- pub fn new<P: Into<PathBuf>>(source_code: &str, path: Option<P>) -> Self {
- let mut tokeniser = Tokeniser::new(source_code, path);
- tokeniser.add_delimiters(&['@','%',';',':','{','}','(','[','#','~']);
- Self {
- tokeniser,
- tokens: Vec::new(),
- macro_name: None,
- label_name: String::new(),
- }
- }
-
- pub fn parse(mut self) -> Vec<SyntacticToken> {
- use SyntacticTokenVariant as SynVar;
- use SyntacticParseError as SynErr;
- let t = &mut self.tokeniser;
-
- loop {
- t.eat_whitespace();
- t.mark_start();
- let Some(c) = t.eat_char() else { break };
- let variant = match c {
- ':' => SynVar::Separator,
- '{' => SynVar::BlockOpen,
- '}' => SynVar::BlockClose,
- '@' => match &self.macro_name {
- Some(_) => {
- t.eat_token();
- SynVar::Error(SynErr::LabelInMacroDefinition)
- }
- None => {
- self.label_name = t.eat_token();
- SynVar::LabelDefinition(self.label_name.clone())
- }
- }
- '&' => match &self.macro_name {
- Some(macro_name) => {
- let label_name = format!("{macro_name}:{}", t.eat_token());
- SynVar::LabelDefinition(label_name)
- }
- None => {
- let label_name = &self.label_name;
- let sublabel_name = format!("{label_name}/{}", t.eat_token());
- SynVar::LabelDefinition(sublabel_name)
- }
- }
- '%' => {
- let macro_name = t.eat_token();
- self.macro_name = Some(macro_name.clone());
- SynVar::MacroDefinition(macro_name)
- }
- ';' => {
- self.macro_name = None;
- SynVar::MacroDefinitionTerminator
- }
- '[' => {
- t.mark_child();
- match t.eat_to_delimiter(']') {
- Some(_) => {
- let child = t.subtokenise();
- t.mark_end();
- let expr = parse_constant_expression(child, t.get_source());
- SynVar::Expression(expr)
- }
- None => SynVar::Error(SynErr::UnterminatedExpression),
- }
- }
- '"' => {
- t.mark_child();
- match t.eat_to_delimiter('"') {
- Some(string) => {
- let child = t.subtokenise();
- t.mark_end();
- let chars = parse_tracked_chars(child);
- let tracked_string = TrackedString {
- source: t.get_source(), string, chars,
- };
- SynVar::String(tracked_string)
- }
- None => SynVar::Error(SynErr::UnterminatedString),
- }
- }
- '(' => match t.eat_to_delimiter(')') {
- Some(string) => {
- // Check if the comment fills the entire line.
- if t.start.position.column == 0 && t.end_of_line() {
- if let Some(path) = string.strip_prefix(": ") {
- t.embedded_path = Some(PathBuf::from(path.trim()));
- t.embedded_first_line = t.start.position.line + 1;
- }
- }
- continue;
- },
- None => SynVar::Error(SynErr::UnterminatedComment),
- }
- '|' => {
- let token = t.eat_token();
- if let Some(hex_string) = token.strip_prefix("0x") {
- match usize::from_str_radix(hex_string, 16) {
- Ok(addr) => SynVar::PinnedAddress(addr),
- Err(_) => SynVar::Error(SynErr::InvalidHexadecimalLiteral(token)),
- }
- } else {
- match usize::from_str_radix(&token, 10) {
- Ok(addr) => SynVar::PinnedAddress(addr),
- Err(_) => SynVar::Error(SynErr::InvalidDecimalLiteral(token)),
- }
- }
- }
- '#' => {
- t.mark_child();
- t.eat_token();
- let pbl = parse_packed_binary_literal(t.subtokenise(), t.get_source());
- SynVar::PackedBinaryLiteral(pbl)
- },
- '~' => match &self.macro_name {
- Some(macro_name) => {
- let symbol_name = format!("{macro_name}:{}", t.eat_token());
- SynVar::Symbol(symbol_name)
- }
- None => {
- let label_name = &self.label_name;
- let symbol_name = format!("{label_name}/{}", t.eat_token());
- SynVar::Symbol(symbol_name)
- }
- }
- c => {
- let token = format!("{c}{}", t.eat_token());
- if let Some(hex_string) = token.strip_prefix("0x") {
- match usize::from_str_radix(hex_string, 16) {
- Ok(value) => SynVar::IntegerLiteral(value as isize),
- Err(_) => SynVar::Error(SynErr::InvalidHexadecimalLiteral(token)),
- }
- } else {
- match usize::from_str_radix(&token, 10) {
- Ok(value) => SynVar::IntegerLiteral(value as isize),
- Err(_) => SynVar::Symbol(token),
- }
- }
- }
- };
-
- t.mark_end();
- let source = t.get_source();
- self.tokens.push(SyntacticToken { source, variant });
- }
-
- return self.tokens;
- }
-}
-
-
-fn parse_tracked_chars(mut t: Tokeniser) -> Vec<Tracked<char>> {
- let mut output = Vec::new();
- while let Some(c) = t.eat_char() {
- output.push(Tracked::from(c, t.get_source()));
- t.mark_start();
- }
- return output;
-}