1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
|
use crate::*;
use syntactic::*;
pub struct SyntacticParser {
tokeniser: Tokeniser,
/// The name of the most recently parsed label.
label_name: String,
/// The name of the macro being parsed.
macro_name: Option<String>,
}
impl SyntacticParser {
pub fn from_source_code<P: Into<PathBuf>>(source_code: &str, path: Option<P>) -> Self {
let mut tokeniser = Tokeniser::new(source_code, path);
tokeniser.add_delimiters(&['@','&','%',';',':','{','}','(','[','#','~']);
Self {
tokeniser,
label_name: String::new(),
macro_name: None,
}
}
}
impl Iterator for SyntacticParser {
type Item = Token;
/// Sequentially parse tokens from the source code.
fn next(&mut self) -> Option<Token> {
let t = &mut self.tokeniser;
t.drop_whitespace();
t.mark_start_position();
let variant = match t.eat_char()? {
'@' => {
self.label_name = t.eat_token();
TokenVariant::LabelDefinition(self.label_name.clone())
}
'&' => {
let token = t.eat_token();
TokenVariant::LabelDefinition(format!("{}/{token}", self.label_name))
}
'%' => {
let macro_name = t.eat_token();
self.macro_name = Some(macro_name.clone());
TokenVariant::MacroDefinition(macro_name)
}
';' => {
self.macro_name = None;
TokenVariant::MacroDefinitionTerminator
}
'[' => match t.eat_to_delimiter(']') {
Some(string) => {
let constant = ConstantExpression::from_str(&string, t);
TokenVariant::ConstantExpression(constant)
}
None => TokenVariant::Error(ParseError::UnterminatedConstantExpression),
}
'{' => TokenVariant::BlockOpen,
'}' => TokenVariant::BlockClose,
'(' => match t.eat_to_delimiter(')') {
Some(string) => TokenVariant::Comment(string),
None => TokenVariant::Error(ParseError::UnterminatedComment),
}
'#' => {
let token = t.eat_token();
let pbl = PackedBinaryLiteral::from_str(&token, t);
TokenVariant::PackedBinaryLiteral(pbl)
},
'~' => {
let token = t.eat_token();
TokenVariant::Symbol(format!("{}/{token}", self.label_name))
}
':' => TokenVariant::Separator,
c => {
let token = format!("{c}{}", t.eat_token());
if let Some(hex_string) = token.strip_prefix("0x") {
match usize::from_str_radix(hex_string, 16) {
Ok(hex) => TokenVariant::HexadecimalLiteral(hex),
Err(_) => TokenVariant::Error(ParseError::InvalidHexadecimalLiteral(token)),
}
} else {
match usize::from_str_radix(&token, 10) {
Ok(value) => TokenVariant::DecimalLiteral(value),
Err(_) => TokenVariant::Symbol(token),
}
}
}
};
// Parse source path comments.
if let TokenVariant::Comment(comment) = &variant {
// Check if the comment fills the entire line.
if t.start_position.column == 0 && t.end_of_line() {
if let Some(path) = comment.strip_prefix(": ") {
t.embedded_path = Some(PathBuf::from(path.trim()));
t.embedded_first_line = t.start_position.line + 1;
}
}
}
let source = t.mark_end_position();
Some( Token { source, variant } )
}
}
|