1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
|
use crate::*;
pub fn parse_constant_expression(mut t: Tokeniser, source: SourceSpan) -> Expression {
use ExpressionTokenVariant as TokenVar;
use ExpressionParseError as ParseError;
let mut tokens = Vec::new();
loop {
t.eat_whitespace();
t.mark_start();
let token = t.eat_token();
if token.is_empty() {
break;
}
let variant = match token.as_str() {
"=" => TokenVar::Operator(Operator::Equal),
"!=" => TokenVar::Operator(Operator::NotEqual),
"<" => TokenVar::Operator(Operator::LessThan),
">" => TokenVar::Operator(Operator::GreaterThan),
"+" => TokenVar::Operator(Operator::Add),
"-" => TokenVar::Operator(Operator::Subtract),
"<<" => TokenVar::Operator(Operator::LeftShift),
">>" => TokenVar::Operator(Operator::RightShift),
"&" => TokenVar::Operator(Operator::And),
"|" => TokenVar::Operator(Operator::Or),
"^" => TokenVar::Operator(Operator::Xor),
"~" => TokenVar::Operator(Operator::Not),
_ => if let Some(stripped) = token.strip_prefix("0x") {
match usize::from_str_radix(stripped, 16) {
Ok(value) => TokenVar::Literal(value as isize),
Err(_) => TokenVar::Error(
ParseError::InvalidHexadecimalLiteral(stripped.to_string())),
}
} else {
match usize::from_str_radix(&token, 10) {
Ok(value) => TokenVar::Literal(value as isize),
Err(_) => TokenVar::Invocation(token.to_string()),
}
}
};
let source = t.get_source();
tokens.push(ExpressionToken { source, variant });
}
return Expression { source, tokens };
}
|