summaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
Diffstat (limited to 'src')
-rw-r--r--src/bin/br-asm.rs8
-rw-r--r--src/formats/clang.rs10
-rw-r--r--src/formats/mod.rs23
-rw-r--r--src/lib.rs240
-rw-r--r--src/stages/bytecode.rs117
-rw-r--r--src/stages/bytecode_tokens.rs24
-rw-r--r--src/stages/compiler.rs7
-rw-r--r--src/stages/mod.rs1
-rw-r--r--src/stages/semantic.rs147
-rw-r--r--src/stages/semantic_tokens.rs68
-rw-r--r--src/stages/syntactic.rs76
-rw-r--r--src/stages/syntactic_tokens.rs63
-rw-r--r--src/types/instruction.rs143
-rw-r--r--src/types/mod.rs1
14 files changed, 625 insertions, 303 deletions
diff --git a/src/bin/br-asm.rs b/src/bin/br-asm.rs
new file mode 100644
index 0000000..e7a9230
--- /dev/null
+++ b/src/bin/br-asm.rs
@@ -0,0 +1,8 @@
+use bedrock_asm::*;
+use switchboard::*;
+
+
+fn main() {
+ let args = Switchboard::from_env();
+ assemble(args, "br-asm");
+}
diff --git a/src/formats/clang.rs b/src/formats/clang.rs
new file mode 100644
index 0000000..524b501
--- /dev/null
+++ b/src/formats/clang.rs
@@ -0,0 +1,10 @@
+pub fn format_clang(bytecode: &[u8]) -> Vec<u8> {
+ let mut output = String::new();
+ for chunk in bytecode.chunks(16) {
+ for byte in chunk {
+ output.push_str(&format!("0x{byte:02X}, "));
+ }
+ output.push('\n');
+ }
+ return output.into_bytes();
+}
diff --git a/src/formats/mod.rs b/src/formats/mod.rs
new file mode 100644
index 0000000..79b1c51
--- /dev/null
+++ b/src/formats/mod.rs
@@ -0,0 +1,23 @@
+mod clang;
+pub use clang::*;
+
+use crate::*;
+
+
+#[derive(Clone, Copy, PartialEq)]
+pub enum Format {
+ Raw,
+ Source,
+ Clang,
+}
+
+impl Format {
+ pub fn from_str(string: &str) -> Self {
+ match string {
+ "raw" => Self::Raw,
+ "source" => Self::Source,
+ "c" => Self::Clang,
+ _ => fatal!("Unknown format '{string}', expected 'raw', 'c', or 'source'"),
+ }
+ }
+}
diff --git a/src/lib.rs b/src/lib.rs
index 06f898d..76ec544 100644
--- a/src/lib.rs
+++ b/src/lib.rs
@@ -1,7 +1,245 @@
+#![feature(path_add_extension)]
+
+mod formats;
mod types;
mod stages;
-
+pub use formats::*;
pub use types::*;
pub use stages::*;
use assembler::*;
+use log::*;
+use switchboard::*;
+
+use std::io::Read;
+use std::io::Write;
+
+
+pub const RETURN_MODE: u8 = 0x80;
+pub const WIDE_MODE: u8 = 0x40;
+pub const IMMEDIATE_MODE: u8 = 0x20;
+
+
+pub fn assemble(mut args: Switchboard, invocation: &str) -> ! {
+ args.named("help").short('h');
+ args.named("version");
+ args.named("verbose").short('v');
+
+ if args.get("help").as_bool() {
+ print_help(invocation);
+ std::process::exit(0);
+ }
+ if args.get("version").as_bool() {
+ let name = env!("CARGO_PKG_NAME");
+ let version = env!("CARGO_PKG_VERSION");
+ eprintln!("{name} v{version}");
+ eprintln!("Written by Ben Bridle.");
+ std::process::exit(0);
+ }
+ if args.get("verbose").as_bool() {
+ log::set_log_level(log::LogLevel::Info);
+ }
+
+ args.positional("source");
+ args.positional("destination");
+ args.named("extension").default("brc");
+
+ args.named("no-libs");
+ args.named("no-project-libs");
+ args.named("no-env-libs");
+ args.named("no-truncate");
+
+ args.named("format").default("raw");
+ args.named("dry-run").short('n');
+ args.named("tree");
+ args.named("with-symbols");
+ args.raise_errors();
+
+ let source_path = args.get("source").as_path_opt().map(
+ |p| p.canonicalize().unwrap_or_else(|e| fatal!("{p:?}: {e:?}")));
+ let destination_path = args.get("destination").as_path_opt();
+ let extension = args.get("extension").as_string();
+ let opt_extension = Some(extension.as_str());
+
+ let no_libs = args.get("no-libs").as_bool();
+ let no_project_libs = args.get("no-project-libs").as_bool();
+ let no_env_libs = args.get("no-env-libs").as_bool();
+ let no_truncate = args.get("no-truncate").as_bool();
+
+ let format = Format::from_str(args.get("format").as_str());
+ let dry_run = args.get("dry-run").as_bool();
+ let print_tree = args.get("tree").as_bool();
+ let export_symbols = args.get("with-symbols").as_bool();
+
+ // -----------------------------------------------------------------------
+
+ let mut compiler = new_compiler();
+
+ if let Some(path) = &source_path {
+ info!("Reading program source from {path:?}");
+ compiler.root_from_path(path).unwrap_or_else(|err| fatal!("{err:?}: {path:?}"));
+ } else {
+ let mut source_code = String::new();
+ info!("Reading program source from standard input");
+ if let Err(err) = std::io::stdin().read_to_string(&mut source_code) {
+ fatal!("Could not read from standard input\n{err:?}");
+ }
+ compiler.root_from_string(source_code, "<standard input>")
+ };
+ if compiler.error().is_some() && !no_libs && !no_project_libs {
+ compiler.include_libs_from_parent(opt_extension);
+ }
+ if compiler.error().is_some() && !no_libs && !no_env_libs {
+ compiler.include_libs_from_path_variable("BEDROCK_LIBS", opt_extension);
+ }
+
+ if print_tree {
+ compiler.hierarchy().report()
+ }
+ if let Some(error) = compiler.error() {
+ error.report();
+ std::process::exit(1);
+ }
+
+ let merged_source = compiler.get_compiled_source().unwrap_or_else(|error| {
+ error.report();
+ std::process::exit(1);
+ });
+
+ if !dry_run && format == Format::Source {
+ write_bytes_and_exit(merged_source.as_bytes(), destination_path.as_ref());
+ }
+
+ // -----------------------------------------------------------------------
+
+ let path = Some("<merged source>");
+ let syntactic = match parse_syntactic(&merged_source, path) {
+ Ok(tokens) => tokens,
+ Err(errors) => {
+ report_syntactic_errors(&errors, &merged_source);
+ std::process::exit(1);
+ }
+ };
+
+ let semantic = match parse_semantic(syntactic) {
+ Ok(tokens) => tokens,
+ Err(errors) => {
+ report_semantic_errors(&errors, &merged_source);
+ std::process::exit(1);
+ }
+ };
+
+ let program = match generate_bytecode(&semantic) {
+ Ok(program) => program,
+ Err(errors) => {
+ report_bytecode_errors(&errors, &merged_source);
+ std::process::exit(1);
+ }
+ };
+
+ let AssembledProgram { mut bytecode, symbols } = program;
+
+ let length = bytecode.len();
+ let percentage = (length as f32 / 65536.0 * 100.0).round() as u16;
+ info!("Assembled program in {length} bytes ({percentage}% of maximum)");
+
+ if !no_truncate {
+ // Remove null bytes from end of bytecode.
+ while let Some(0) = bytecode.last() {
+ bytecode.pop();
+ }
+ let new_length = bytecode.len();
+ let difference = length - new_length;
+ if difference > 0 {
+ info!("Truncated program to {new_length} bytes (saved {difference} bytes)");
+ }
+ }
+
+ if !dry_run {
+ if export_symbols {
+ if let Some(path) = &destination_path {
+ let mut symbols_path = path.to_path_buf();
+ symbols_path.add_extension("sym");
+ let mut symbols_string = String::new();
+ for symbol in &symbols {
+ let address = &symbol.address;
+ let name = &symbol.name;
+ let location = &symbol.source.location();
+ symbols_string.push_str(&format!(
+ "{address:04x} {name} {location}\n"
+ ));
+ }
+ match std::fs::write(&symbols_path, symbols_string) {
+ Ok(_) => info!("Saved symbols to {symbols_path:?}"),
+ Err(err) => info!("Could not write symbols to {symbols_path:?}\n{err:?}"),
+ }
+ }
+ }
+
+ let bytes = match format {
+ Format::Raw => bytecode,
+ Format::Clang => format_clang(&bytecode),
+ Format::Source => unreachable!("Source output is handled before full assembly"),
+ };
+ write_bytes_and_exit(&bytes, destination_path.as_ref());
+ }
+ std::process::exit(0);
+}
+
+
+fn write_bytes_and_exit<P: AsRef<Path>>(bytes: &[u8], path: Option<&P>) -> ! {
+ match path {
+ Some(path) => match std::fs::write(path, bytes) {
+ Ok(_) => info!("Wrote output to {:?}", path.as_ref()),
+ Err(err) => fatal!("Could not write to {:?}\n{err:?}", path.as_ref()),
+ }
+ None => match std::io::stdout().write_all(bytes) {
+ Ok(_) => info!("Wrote output to standard output"),
+ Err(err) => fatal!("Could not write to standard output\n{err:?}"),
+ }
+ }
+ std::process::exit(0);
+}
+
+
+fn print_help(invocation: &str) {
+ eprintln!("\
+Usage: {invocation} [source] [destination]
+
+Assembler for the Bedrock computer system.
+
+Usage:
+ To assemble a Bedrock program from a source file and write to an output
+ file, run `br-asm [source] [destination]`, where [source] is the path
+ of the source file and [destination] is the path to write to.
+
+ If [destination] is omitted, the assembled program will be written to
+ standard output. If [source] is omitted, the program source code will
+ be read from standard input.
+
+Environment variables:
+ BEDROCK_LIBS
+ A list of colon-separated paths that will be searched to find Bedrock
+ source code files to use as libraries when assembling a Bedrock program.
+ If a library file resolves an unresolved symbol in the program being
+ assembled, the library file will be merged into the program.
+
+Arguments:
+ [source] Bedrock source code file to assemble.
+ [destination] Destination path for assembler output.
+
+Switches:
+ --dry-run (-n) Assemble and show errors only, don't write any output
+ --extension=<ext> File extension to identify source files (default is 'brc')
+ --format=<fmt> Output format to use for assembled program (default is 'raw')
+ --no-project-libs Don't search for libraries in the source parent folder
+ --no-env-libs Don't search for libraries in the BEDROCK_LIBS path variable
+ --no-libs Combination of --no-project-libs and --no-env-libs
+ --no-truncate Don't remove trailing zero-bytes from the assembled program
+ --tree Show a tree diagram of all included library files
+ --with-symbols Also generate debug symbols file with extension '.sym'
+ --help (-h) Print this help information
+ --verbose, (-v) Print additional information
+ --version Print the program version and exit
+");
+}
diff --git a/src/stages/bytecode.rs b/src/stages/bytecode.rs
index 3d43ef4..02cc739 100644
--- a/src/stages/bytecode.rs
+++ b/src/stages/bytecode.rs
@@ -1,10 +1,10 @@
use crate::*;
-use std::collections::HashMap;
+use indexmap::IndexMap;
/// Doesn't truncate trailing null bytes.
-pub fn generate_bytecode(semantic: &Program) -> AssembledProgram {
+pub fn generate_bytecode(semantic: &Program) -> Result<AssembledProgram, Vec<Tracked<BytecodeError>>> {
let mut generator = BytecodeGenerator::new(&semantic.definitions);
generator.parse(&semantic.tokens, false);
generator.fill_slots();
@@ -14,18 +14,19 @@ pub fn generate_bytecode(semantic: &Program) -> AssembledProgram {
let address = information.address;
symbols.push(AssembledSymbol { name, address, source });
}
- AssembledProgram {
- bytecode: generator.bytecode,
- symbols,
+ match generator.errors.is_empty() {
+ true => Ok(AssembledProgram { bytecode: generator.bytecode, symbols }),
+ false => Err(generator.errors),
}
}
pub struct BytecodeGenerator<'a> {
- definitions: &'a HashMap<String, Tracked<Definition>>,
- labels: HashMap<String, LabelInformation>,
+ definitions: &'a IndexMap<String, Tracked<Definition>>,
+ labels: IndexMap<String, LabelInformation>,
stack: Vec<usize>,
bytecode: Vec<u8>,
+ errors: Vec<Tracked<BytecodeError>>,
}
struct LabelInformation {
@@ -34,13 +35,13 @@ struct LabelInformation {
}
impl<'a> BytecodeGenerator<'a> {
- pub fn new(definitions: &'a HashMap<String, Tracked<Definition>>) -> Self {
- let mut labels = HashMap::new();
+ pub fn new(definitions: &'a IndexMap<String, Tracked<Definition>>) -> Self {
+ let mut labels = IndexMap::new();
for (name, definition) in definitions {
- if let DefinitionKind::LabelDefinition = definition.kind {
- let key = name.to_string();
- let value = LabelInformation { address: 0, slots: Vec::new() };
- labels.insert(key, value);
+ if let DefinitionVariant::LabelDefinition = definition.variant {
+ // Use fake address for now.
+ let information = LabelInformation { address: 0, slots: Vec::new() };
+ labels.insert(name.to_string(), information);
}
}
Self {
@@ -48,74 +49,45 @@ impl<'a> BytecodeGenerator<'a> {
labels,
stack: Vec::new(),
bytecode: Vec::new(),
+ errors: Vec::new(),
}
}
pub fn parse(&mut self, tokens: &[Tracked<SemanticToken>], in_macro: bool) {
macro_rules! byte {
- ($byte:expr) => {
- self.bytecode.push($byte)
- };
+ ($byte:expr) => { self.bytecode.push($byte) };
}
macro_rules! double {
($double:expr) => {{
let [high, low] = u16::to_be_bytes($double);
- self.bytecode.push(high);
- self.bytecode.push(low);
+ self.bytecode.push(high); self.bytecode.push(low);
}};
}
for token in tokens {
let i = self.bytecode.len();
match &token.value {
- SemanticToken::Comment(_) => (),
-
- SemanticToken::LabelDefinition(name) => if in_macro {
- unreachable!("Uncaught label definition in macro");
- } else {
- let information = self.labels.get_mut(name).unwrap();
- information.address = i;
- }
- SemanticToken::MacroDefinition{ .. } => if in_macro {
- unreachable!("Uncaught macro definition in macro");
- }
-
- SemanticToken::RawValue(value) => match value {
+ SemanticToken::Literal(value) => match value {
Value::Byte(byte) => byte!(*byte),
Value::Double(double) => double!(*double),
}
- SemanticToken::Instruction(instruction) => {
- byte!(instruction.value)
- }
- SemanticToken::Invocation(name) => {
- if let Some(definition) = self.definitions.get(name) {
- match &definition.kind {
- DefinitionKind::MacroDefinition(body) => {
- self.parse(body, true);
- }
- DefinitionKind::LabelDefinition => {
- let information = self.labels.get_mut(name).unwrap();
- information.slots.push(i);
- double!(0);
- }
- }
- } else {
- unreachable!("Uncaught undefined symbol '{name}'");
- }
- }
-
- SemanticToken::Padding(value) => {
+ SemanticToken::Pad(value) => {
self.bytecode.resize(i + usize::from(value), 0);
},
SemanticToken::String(bytes) => {
self.bytecode.extend_from_slice(bytes)
},
-
+ SemanticToken::Comment(_) => (),
SemanticToken::BlockOpen(_) => {
self.stack.push(i);
+ // Use a fake index for now.
double!(0);
}
SemanticToken::BlockClose(_) => {
+ if i > 0xFFFF {
+ let error = BytecodeError::InvalidBlockAddress(i);
+ self.errors.push(Tracked::from(error, token.source.clone()));
+ }
let Some(addr) = self.stack.pop() else {
unreachable!("Uncaught unmatched block terminator");
};
@@ -123,10 +95,49 @@ impl<'a> BytecodeGenerator<'a> {
self.bytecode[addr] = high;
self.bytecode[addr+1] = low;
}
+ SemanticToken::Symbol(name) => {
+ if let Some(definition) = self.definitions.get(name) {
+ match &definition.variant {
+ DefinitionVariant::MacroDefinition(body) => {
+ self.parse(body, true);
+ }
+ DefinitionVariant::LabelDefinition => {
+ let information = self.labels.get_mut(name).unwrap();
+ information.slots.push(i);
+ // Use a fake index for now.
+ double!(0);
+ }
+ }
+ } else {
+ unreachable!("Uncaught undefined symbol '{name}'");
+ }
+ }
+ SemanticToken::Instruction(instruction) => {
+ byte!(instruction.value)
+ }
+ SemanticToken::LabelDefinition(name) => if in_macro {
+ unreachable!("Uncaught label definition in macro");
+ } else {
+ if i > 0xFFFF {
+ let error = BytecodeError::InvalidLabelAddress(i);
+ self.errors.push(Tracked::from(error, token.source.clone()));
+ }
+ let information = self.labels.get_mut(name).unwrap();
+ // Replace fake index with real index.
+ information.address = i;
+ }
+ SemanticToken::MacroDefinition{ .. } => if in_macro {
+ unreachable!("Uncaught macro definition in macro");
+ }
}
}
+
+ if !in_macro && !self.stack.is_empty() {
+ unreachable!("Uncaught unterminated block");
+ }
}
+ /// Fill each label slot with a real label address.
pub fn fill_slots(&mut self) {
for information in self.labels.values() {
let [high, low] = (information.address as u16).to_be_bytes();
diff --git a/src/stages/bytecode_tokens.rs b/src/stages/bytecode_tokens.rs
index aef27f9..902fcd7 100644
--- a/src/stages/bytecode_tokens.rs
+++ b/src/stages/bytecode_tokens.rs
@@ -11,3 +11,27 @@ pub struct AssembledSymbol {
pub address: usize,
pub source: SourceSpan,
}
+
+pub enum BytecodeError {
+ InvalidLabelAddress(usize),
+ InvalidBlockAddress(usize),
+}
+
+
+pub fn report_bytecode_errors(errors: &[Tracked<BytecodeError>], source_code: &str) {
+ for error in errors {
+ report_bytecode_error(error, source_code);
+ }
+}
+
+
+fn report_bytecode_error(error: &Tracked<BytecodeError>, source_code: &str) {
+ let context = Context { source_code: &source_code, source: &error.source };
+ let message = match &error.value {
+ BytecodeError::InvalidLabelAddress(address) =>
+ &format!("The label address exceeds 0xFFFF: 0x{address:X}"),
+ BytecodeError::InvalidBlockAddress(address) =>
+ &format!("The block address exceeds 0xFFFF: 0x{address:X}"),
+ };
+ report_source_issue(LogLevel::Error, &context, message);
+}
diff --git a/src/stages/compiler.rs b/src/stages/compiler.rs
index cdeb601..97bf20c 100644
--- a/src/stages/compiler.rs
+++ b/src/stages/compiler.rs
@@ -1,4 +1,5 @@
use crate::*;
+
use assembler::SymbolRole::*;
use assembler::DefinitionType::*;
@@ -12,7 +13,7 @@ pub fn new_compiler() -> Compiler {
pub fn parse_symbols(source_code: &str, path: Option<&Path>) -> Option<Vec<Symbol>> {
let syntactic = match parse_syntactic(source_code, path) {
Ok(syntactic) => syntactic,
- Err(_errors) => return None,
+ Err(_) => return None,
};
Some(SymbolParser::new().parse(&syntactic))
}
@@ -64,7 +65,7 @@ impl SymbolParser {
Definition(MustPrecedeReference),
);
for token in &definition.body {
- if let SyntacticToken::Invocation(name) = &token.value {
+ if let SyntacticToken::Symbol(name) = &token.value {
self.record_symbol(&name, &token.source, Reference);
}
}
@@ -72,7 +73,7 @@ impl SymbolParser {
SyntacticToken::LabelDefinition(name) => {
self.record_symbol(&name, &token.source, Definition(CanFollowReference));
}
- SyntacticToken::Invocation(name) => {
+ SyntacticToken::Symbol(name) => {
self.record_symbol(&name, &token.source, Reference);
}
_ => (),
diff --git a/src/stages/mod.rs b/src/stages/mod.rs
index 65d14d7..76bda0d 100644
--- a/src/stages/mod.rs
+++ b/src/stages/mod.rs
@@ -5,7 +5,6 @@ mod semantic;
mod semantic_tokens;
mod bytecode;
mod bytecode_tokens;
-
pub use compiler::*;
pub use syntactic::*;
pub use syntactic_tokens::*;
diff --git a/src/stages/semantic.rs b/src/stages/semantic.rs
index 8b5f4f4..dc9709e 100644
--- a/src/stages/semantic.rs
+++ b/src/stages/semantic.rs
@@ -1,24 +1,38 @@
use crate::*;
-use std::collections::{HashMap, HashSet};
+use std::str::FromStr;
+
+use indexmap::{IndexMap, IndexSet};
pub fn parse_semantic(syntactic: Vec<Tracked<SyntacticToken>>) -> Result<Program, Vec<Tracked<SemanticError>>> {
+ let mut errors = Vec::new();
+
// Record all label definitions and macro names up front.
- let mut definitions = HashMap::new();
- let mut macro_names = HashSet::new();
+ let mut definitions = IndexMap::new();
+ let mut macro_names = IndexSet::new();
for token in &syntactic {
match &token.value {
SyntacticToken::LabelDefinition(name) => {
- let name = name.clone();
- let definition = Definition::new(0, DefinitionKind::LabelDefinition);
+ // Check if identifier is reserved.
+ if Instruction::from_str(&name).is_ok() {
+ let error = SemanticError::ReservedIdentifier(name.to_string());
+ errors.push(Tracked::from(error, token.source.clone()));
+ }
+ // Use a fake index for now.
+ let definition = Definition::new(0, DefinitionVariant::LabelDefinition);
let tracked = Tracked::from(definition, token.source.clone());
if let Some(_) = definitions.insert(name.clone(), tracked) {
unreachable!("Uncaught duplicate label definition '{name}'");
}
}
SyntacticToken::MacroDefinition(definition) => {
- let name = definition.name.clone();
+ let name = &definition.name;
+ // Check if identifier is reserved.
+ if Instruction::from_str(&name).is_ok() {
+ let error = SemanticError::ReservedIdentifier(name.to_string());
+ errors.push(Tracked::from(error, name.source.clone()));
+ }
if !macro_names.insert(name.clone()) {
unreachable!("Uncaught duplicate macro definition '{name}'")
}
@@ -29,110 +43,103 @@ pub fn parse_semantic(syntactic: Vec<Tracked<SyntacticToken>>) -> Result<Program
// Convert syntactic tokens to semantic tokens.
let mut tokens: Vec<Tracked<SemanticToken>> = Vec::new();
- let mut errors = Vec::new();
let mut stack = Vec::new();
for syn_token in syntactic {
let i = tokens.len();
let sem_token = match syn_token.value {
+ SyntacticToken::Literal(value) => SemanticToken::Literal(value),
+ SyntacticToken::Pad(value) => SemanticToken::Pad(value),
+ SyntacticToken::String(bytes) => SemanticToken::String(bytes),
SyntacticToken::Comment(string) => SemanticToken::Comment(string),
-
+ SyntacticToken::BlockOpen => {
+ stack.push(i);
+ // Use a fake index for now.
+ SemanticToken::BlockOpen(0)
+ }
+ SyntacticToken::BlockClose => {
+ let Some(k) = stack.pop() else {
+ unreachable!("Uncaught unmatched block terminator");
+ };
+ // Replace fake index with real index.
+ tokens[k].value = SemanticToken::BlockOpen(i);
+ SemanticToken::BlockClose(k)
+ }
+ SyntacticToken::Symbol(symbol) => {
+ if let Some(definition) = definitions.get_mut(&symbol) {
+ definition.value.references.push(i);
+ } else if let Some(definition) = macro_names.get(&symbol) {
+ let error = SemanticError::InvocationBeforeDefinition;
+ let source = syn_token.source.wrap(definition.source.clone());
+ errors.push(Tracked::from(error, source));
+ } else {
+ unreachable!("Uncaught undefined symbol '{symbol}'");
+ };
+ SemanticToken::Symbol(symbol)
+ }
+ SyntacticToken::Instruction(instruction) => SemanticToken::Instruction(instruction),
SyntacticToken::LabelDefinition(name) => {
let definition = definitions.get_mut(&name).unwrap();
+ // Replace fake index with real index.
definition.value.definition = i;
SemanticToken::LabelDefinition(name)
}
SyntacticToken::MacroDefinition(definition) => {
- let source = definition.name.source.clone();
let name = definition.name.clone();
-
let mut body: Vec<Tracked<SemanticToken>> = Vec::new();
let mut body_stack = Vec::new();
for syn_token in definition.body {
let j = body.len();
let sem_token = match syn_token.value {
- SyntacticToken::Comment(string) =>
- SemanticToken::Comment(string),
-
- SyntacticToken::LabelDefinition(label) =>
- unreachable!("Uncaught label definition '{label}' in macro '{name}'"),
- SyntacticToken::MacroDefinition(definition) =>
- unreachable!("Uncaught macro definition '{}' in macro '{name}'", definition.name),
-
- SyntacticToken::RawValue(value) => SemanticToken::RawValue(value),
- SyntacticToken::Instruction(instruction) => SemanticToken::Instruction(instruction),
- SyntacticToken::Invocation(symbol) => {
- if let Some(definition) = definitions.get_mut(&symbol) {
- definition.value.deep_references.push((i, j));
- } else if let Some(definition) = macro_names.get(&symbol) {
- let error = SemanticError::InvocationBeforeDefinition;
- let source = syn_token.source.wrap(definition.source.clone());
- errors.push(Tracked::from(error, source));
- } else {
- unreachable!("Uncaught undefined symbol '{symbol}'");
- };
- SemanticToken::Invocation(symbol)
- }
-
- SyntacticToken::Padding(value) => SemanticToken::Padding(value),
+ SyntacticToken::Literal(value) => SemanticToken::Literal(value),
+ SyntacticToken::Pad(value) => SemanticToken::Pad(value),
SyntacticToken::String(bytes) => SemanticToken::String(bytes),
-
+ SyntacticToken::Comment(string) => SemanticToken::Comment(string),
SyntacticToken::BlockOpen => {
body_stack.push(j);
+ // Use a fake index for now.
SemanticToken::BlockOpen(0)
}
SyntacticToken::BlockClose => {
let Some(k) = body_stack.pop() else {
- unreachable!("Uncaught unmatched block terminator in macro {name}");
+ unreachable!("Uncaught unmatched block terminator in macro '{name}'");
};
+ // Replace fake index with real index.
body[k].value = SemanticToken::BlockOpen(j);
SemanticToken::BlockClose(k)
}
+ SyntacticToken::Symbol(symbol) => {
+ if let Some(definition) = definitions.get_mut(&symbol) {
+ definition.value.deep_references.push((i, j));
+ } else if let Some(definition) = macro_names.get(&symbol) {
+ let error = SemanticError::InvocationBeforeDefinition;
+ let source = syn_token.source.wrap(definition.source.clone());
+ errors.push(Tracked::from(error, source));
+ } else {
+ unreachable!("Uncaught undefined symbol '{symbol}' in macro '{name}'");
+ };
+ SemanticToken::Symbol(symbol)
+ }
+ SyntacticToken::Instruction(instruction) => SemanticToken::Instruction(instruction),
+ SyntacticToken::LabelDefinition(label) =>
+ unreachable!("Uncaught label definition '{label}' in macro '{name}'"),
+ SyntacticToken::MacroDefinition(definition) =>
+ unreachable!("Uncaught macro definition '{}' in macro '{name}'", definition.name),
};
body.push(Tracked::from(sem_token, syn_token.source));
}
- let kind = DefinitionKind::MacroDefinition(body);
- let tracked = Tracked::from(Definition::new(i, kind), source);
+ let variant = DefinitionVariant::MacroDefinition(body);
+ let source = definition.name.source.clone();
+ let tracked = Tracked::from(Definition::new(i, variant), source);
if let Some(_) = definitions.insert(name.value.clone(), tracked) {
unreachable!("Uncaught duplicate definition '{name}'")
};
-
if !body_stack.is_empty() {
- unreachable!("Uncaught unterminated block in macro {name}");
+ unreachable!("Uncaught unterminated block in macro '{name}'");
}
SemanticToken::MacroDefinition(name)
}
-
- SyntacticToken::RawValue(value) => SemanticToken::RawValue(value),
- SyntacticToken::Instruction(instruction) => SemanticToken::Instruction(instruction),
- SyntacticToken::Invocation(symbol) => {
- if let Some(definition) = definitions.get_mut(&symbol) {
- definition.value.references.push(i);
- } else if let Some(definition) = macro_names.get(&symbol) {
- let error = SemanticError::InvocationBeforeDefinition;
- let source = syn_token.source.wrap(definition.source.clone());
- errors.push(Tracked::from(error, source));
- } else {
- unreachable!("Uncaught undefined symbol '{symbol}'");
- };
- SemanticToken::Invocation(symbol)
- }
-
- SyntacticToken::Padding(value) => SemanticToken::Padding(value),
- SyntacticToken::String(bytes) => SemanticToken::String(bytes),
-
- SyntacticToken::BlockOpen => {
- stack.push(i);
- SemanticToken::BlockOpen(0)
- }
- SyntacticToken::BlockClose => {
- let Some(k) = stack.pop() else {
- unreachable!("Uncaught unmatched block terminator");
- };
- tokens[k].value = SemanticToken::BlockOpen(i);
- SemanticToken::BlockClose(k)
- }
};
tokens.push(Tracked::from(sem_token, syn_token.source));
}
diff --git a/src/stages/semantic_tokens.rs b/src/stages/semantic_tokens.rs
index 365546f..c735828 100644
--- a/src/stages/semantic_tokens.rs
+++ b/src/stages/semantic_tokens.rs
@@ -1,24 +1,27 @@
use crate::*;
-use std::collections::HashMap;
+use indexmap::IndexMap;
pub struct Program {
- pub definitions: HashMap<String, Tracked<Definition>>,
+ pub definitions: IndexMap<String, Tracked<Definition>>,
pub tokens: Vec<Tracked<SemanticToken>>,
}
pub struct Definition {
- pub kind: DefinitionKind,
+ pub variant: DefinitionVariant,
+ /// Index of definition token.
pub definition: usize,
+ /// Indices of symbols referencing this definition.
pub references: Vec<usize>,
+ /// Indices of references inside other definitions.
pub deep_references: Vec<(usize, usize)>,
}
impl Definition {
- pub fn new(i: usize, kind: DefinitionKind) -> Self {
+ pub fn new(i: usize, variant: DefinitionVariant) -> Self {
Self {
- kind,
+ variant,
definition: i,
references: Vec::new(),
deep_references: Vec::new(),
@@ -26,30 +29,27 @@ impl Definition {
}
}
-pub enum DefinitionKind {
- MacroDefinition(Vec<Tracked<SemanticToken>>),
+pub enum DefinitionVariant {
LabelDefinition,
+ MacroDefinition(Vec<Tracked<SemanticToken>>),
}
pub enum SemanticToken {
+ Literal(Value),
+ Pad(Value),
+ String(Vec<u8>),
Comment(String),
-
+ BlockOpen(usize), // index to matching block-close
+ BlockClose(usize), // index to matching block-open
+ Symbol(String),
+ Instruction(Instruction),
LabelDefinition(String),
MacroDefinition(Tracked<String>),
-
- RawValue(Value),
- Instruction(Instruction),
- Invocation(String),
-
- Padding(Value),
- String(Vec<u8>),
-
- BlockOpen(usize),
- BlockClose(usize),
}
pub enum SemanticError {
InvocationBeforeDefinition,
+ ReservedIdentifier(String),
}
@@ -59,43 +59,39 @@ pub fn report_semantic_errors(errors: &[Tracked<SemanticError>], source_code: &s
}
}
+
fn report_semantic_error(error: &Tracked<SemanticError>, source_code: &str) {
let context = Context { source_code: &source_code, source: &error.source };
let message = match &error.value {
SemanticError::InvocationBeforeDefinition =>
"Macro cannot be invoked before it has been defined",
+ SemanticError::ReservedIdentifier(name) =>
+ &format!("Identifier '{name}' is reserved for a built-in instruction"),
};
-
report_source_issue(LogLevel::Error, &context, message);
}
-pub fn print_semantic_token(i: usize, token: &SemanticToken, definitions: &HashMap<String, Tracked<Definition>>) {
+pub fn print_semantic_token(i: usize, token: &SemanticToken, definitions: &IndexMap<String, Tracked<Definition>>) {
match token {
- SemanticToken::Comment(_) =>
- indent!(i, "Comment"),
-
- SemanticToken::LabelDefinition(name) =>
- indent!(i, "LabelDefinition({name})"),
+ SemanticToken::Literal(value) => indent!(i, "Literal({value})"),
+ SemanticToken::Pad(value) => indent!(i, "Pad({value})"),
+ SemanticToken::String(bytes) => indent!(i, "String({})", String::from_utf8_lossy(bytes)),
+ SemanticToken::Comment(_) => indent!(i, "Comment"),
+ SemanticToken::BlockOpen(pointer) => indent!(i, "BlockOpen(*{pointer})"),
+ SemanticToken::BlockClose(pointer) => indent!(i, "BlockClose(*{pointer})"),
+ SemanticToken::Symbol(name) => indent!(i, "Symbol({name})"),
+ SemanticToken::Instruction(instruction) => indent!(i, "Instruction({instruction})"),
+ SemanticToken::LabelDefinition(name) => indent!(i, "LabelDefinition({name})"),
SemanticToken::MacroDefinition(name) => {
indent!(i, "MacroDefinition({name})");
if let Some(definition) = definitions.get(name.as_str()) {
- if let DefinitionKind::MacroDefinition(body) = &definition.kind {
+ if let DefinitionVariant::MacroDefinition(body) = &definition.variant {
for token in body {
print_semantic_token(i+1, token, definitions);
}
}
}
}
-
- SemanticToken::RawValue(value) => indent!(i, "RawValue({value})"),
- SemanticToken::Instruction(instruction) => indent!(i, "Instruction({instruction})"),
- SemanticToken::Invocation(name) => indent!(i, "Invocation({name})"),
-
- SemanticToken::Padding(value) => indent!(i, "Padding({value})"),
- SemanticToken::String(bytes) => indent!(i, "String({})", String::from_utf8_lossy(bytes)),
-
- SemanticToken::BlockOpen(pointer) => indent!(i, "BlockOpen(*{pointer})"),
- SemanticToken::BlockClose(pointer) => indent!(i, "BlockOpen(*{pointer})"),
}
}
diff --git a/src/stages/syntactic.rs b/src/stages/syntactic.rs
index 674ab08..59b8b95 100644
--- a/src/stages/syntactic.rs
+++ b/src/stages/syntactic.rs
@@ -8,7 +8,7 @@ pub fn parse_syntactic<P: Into<PathBuf>>(source_code: &str, path: Option<P>) ->
}
fn parse_syntactic_from_tokeniser(mut t: Tokeniser, label_name: &str) -> Result<Vec<Tracked<SyntacticToken>>, Vec<Tracked<SyntacticError>>> {
- t.add_delimiters(&['@','&','%',';','{','}','(',')','[',']','#','"','\'']);
+ t.add_delimiters(&['(',')','[',']','{','}',';']);
t.add_terminators(&[':']);
let mut tokens = Vec::new();
let mut errors = Vec::new();
@@ -24,20 +24,41 @@ fn parse_syntactic_from_tokeniser(mut t: Tokeniser, label_name: &str) -> Result<
}};
}
- macro_rules! is_any {
- ($close:expr) => {
- |t: &mut Tokeniser| { t.eat_char() == Some($close) }
+ macro_rules! check_name {
+ ($name:expr) => {{
+ check_name!($name, t.get_source());
+ }};
+ ($name:expr, $source:expr) => {
+ if $name.chars().count() > 63 {
+ let error = SyntacticError::InvalidIdentifier($name.clone());
+ errors.push(Tracked::from(error, $source.clone()));
+ }
+ };
+ }
+
+ // Eat characters until the end character is found.
+ macro_rules! is_end {
+ ($end:expr) => {
+ |t: &mut Tokeniser| {
+ t.eat_char() == Some($end)
+ }
};
}
loop {
- t.eat_whitespace();
+ // Eat leading whitespace.
+ while let Some(c) = t.peek_char() {
+ match [' ', '\n', '\r', '\t'].contains(&c) {
+ true => t.eat_char(),
+ false => break,
+ };
+ }
t.mark_start();
let Some(c) = t.eat_char() else { break };
let token = match c {
'"' => {
let source = t.get_source();
- match t.track_until(is_any!('"')) {
+ match t.track_until(is_end!('"')) {
Some(string) => {
let mut bytes = string.into_bytes();
bytes.push(0x00);
@@ -48,14 +69,14 @@ fn parse_syntactic_from_tokeniser(mut t: Tokeniser, label_name: &str) -> Result<
}
'\'' => {
let source = t.get_source();
- match t.track_until(is_any!('\'')) {
+ match t.track_until(is_end!('\'')) {
Some(string) => SyntacticToken::String(string.into_bytes()),
None => err!(SyntacticError::UnterminatedRawString, source),
}
}
'(' => {
let source = t.get_source();
- if let Some(string) = t.track_until(is_any!(')')) {
+ if let Some(string) = t.track_until(is_end!(')')) {
// Check if the comment fills the entire line.
if t.start.position.column == 0 && t.end_of_line() {
if let Some(path) = string.strip_prefix(": ") {
@@ -69,14 +90,13 @@ fn parse_syntactic_from_tokeniser(mut t: Tokeniser, label_name: &str) -> Result<
err!(SyntacticError::UnterminatedComment, source)
}
}
+ ')' => err!(SyntacticError::UnmatchedCommentTerminator),
'%' => {
- let mut name = t.eat_token();
- if let Some(local) = name.strip_prefix('~') {
- name = format!("{label_name}/{local}");
- }
+ let name = t.eat_token();
let source = t.get_source();
+ check_name!(name, source);
t.mark_child();
- if let Some(_) = t.track_until(is_any!(';')) {
+ if let Some(_) = t.track_until(is_end!(';')) {
let child = t.tokenise_child_span();
match parse_body_from_tokeniser(child, &label_name) {
Ok(body) => {
@@ -93,44 +113,44 @@ fn parse_syntactic_from_tokeniser(mut t: Tokeniser, label_name: &str) -> Result<
err!(SyntacticError::UnterminatedMacroDefinition, source);
}
}
+ ';' => err!(SyntacticError::UnmatchedMacroTerminator),
'{' => SyntacticToken::BlockOpen,
'}' => SyntacticToken::BlockClose,
- '[' => continue,
- ']' => continue,
-
- ')' => err!(SyntacticError::UnmatchedCommentTerminator),
- ';' => err!(SyntacticError::UnmatchedMacroTerminator),
-
+ '['|']' => continue,
'@' => {
label_name = t.eat_token();
+ check_name!(label_name);
SyntacticToken::LabelDefinition(label_name.clone())
}
'&' => {
- let name = t.eat_token();
- SyntacticToken::LabelDefinition(format!("{label_name}/{name}"))
+ let name = format!("{label_name}/{}", t.eat_token());
+ check_name!(name);
+ SyntacticToken::LabelDefinition(name)
}
'~' => {
- let name = t.eat_token();
- SyntacticToken::Invocation(format!("{label_name}/{name}"))
+ let name = format!("{label_name}/{}", t.eat_token());
+ check_name!(name);
+ SyntacticToken::Symbol(name)
}
'#' => {
let token = t.eat_token();
match token.parse::<Value>() {
- Ok(value) => SyntacticToken::Padding(value),
- Err(_) => err!(SyntacticError::InvalidPaddingValue),
+ Ok(value) => SyntacticToken::Pad(value),
+ Err(_) => err!(SyntacticError::InvalidPadValue),
}
},
':' => {
- SyntacticToken::Invocation(String::from(':'))
+ SyntacticToken::Instruction(Instruction { value: 0x21 })
}
c => {
let token = format!("{c}{}", t.eat_token());
if let Ok(value) = token.parse::<Value>() {
- SyntacticToken::RawValue(value)
+ SyntacticToken::Literal(value)
} else if let Ok(instruction) = token.parse::<Instruction>() {
SyntacticToken::Instruction(instruction)
} else {
- SyntacticToken::Invocation(token)
+ check_name!(token);
+ SyntacticToken::Symbol(token)
}
}
};
diff --git a/src/stages/syntactic_tokens.rs b/src/stages/syntactic_tokens.rs
index 4c258c6..35afa80 100644
--- a/src/stages/syntactic_tokens.rs
+++ b/src/stages/syntactic_tokens.rs
@@ -1,20 +1,17 @@
use crate::*;
-pub enum SyntacticToken {
- Comment(String),
-
- LabelDefinition(String),
- MacroDefinition(SyntacticMacroDefinition),
-
- RawValue(Value),
- Instruction(Instruction),
- Invocation(String),
- Padding(Value),
+pub enum SyntacticToken {
+ Literal(Value),
+ Pad(Value),
String(Vec<u8>),
-
+ Comment(String),
BlockOpen,
BlockClose,
+ Symbol(String),
+ Instruction(Instruction),
+ LabelDefinition(String),
+ MacroDefinition(SyntacticMacroDefinition),
}
pub struct SyntacticMacroDefinition {
@@ -28,13 +25,11 @@ pub enum SyntacticError {
UnterminatedRawString,
UnterminatedNullString,
UnterminatedMacroDefinition,
-
UnmatchedBlockTerminator,
UnmatchedCommentTerminator,
UnmatchedMacroTerminator,
-
- InvalidPaddingValue,
-
+ InvalidPadValue,
+ InvalidIdentifier(String),
MacroDefinitionInMacroDefinition,
LabelDefinitionInMacroDefinition,
}
@@ -54,54 +49,46 @@ fn report_syntactic_error(error: &Tracked<SyntacticError>, source_code: &str) {
SyntacticError::UnterminatedComment =>
"Comment was not terminated, add a ')' character to terminate",
SyntacticError::UnterminatedRawString =>
- "Raw string was not terminated, add a ' character to terminate",
+ "String was not terminated, add a ' character to terminate",
SyntacticError::UnterminatedNullString =>
- "Null-terminated string was not terminated, add a '\"' character to terminate",
+ "String was not terminated, add a '\"' character to terminate",
SyntacticError::UnterminatedMacroDefinition =>
"Macro definition was not terminated, add a ';' character to terminate",
-
SyntacticError::UnmatchedBlockTerminator =>
"Attempted to terminate a block, but no block was in progress",
SyntacticError::UnmatchedCommentTerminator =>
"Attempted to terminate a comment, but no comment was in progress",
SyntacticError::UnmatchedMacroTerminator =>
"Attempted to terminate a macro definition, but no macro definition was in progress",
-
- SyntacticError::InvalidPaddingValue =>
- "The padding value must be either two or four hexadecimal digits",
-
+ SyntacticError::InvalidPadValue =>
+ "The pad value must be two or four hexadecimal digits",
+ SyntacticError::InvalidIdentifier(name) =>
+ &format!("An identifier cannot exceed 63 characters in length: {name}"),
SyntacticError::MacroDefinitionInMacroDefinition =>
"A macro cannot be defined inside another macro",
SyntacticError::LabelDefinitionInMacroDefinition =>
"A label cannot be defined inside a macro",
};
-
report_source_issue(LogLevel::Error, &context, message);
}
pub fn print_syntactic_token(i: usize, token: &SyntacticToken) {
match token {
- SyntacticToken::Comment(_) =>
- indent!(i, "Comment"),
-
- SyntacticToken::LabelDefinition(name) =>
- indent!(i, "LabelDefinition({name})"),
+ SyntacticToken::Literal(value) => indent!(i, "Literal({value})"),
+ SyntacticToken::Pad(value) => indent!(i, "Pad({value})"),
+ SyntacticToken::String(bytes) => indent!(i, "String({})", String::from_utf8_lossy(bytes)),
+ SyntacticToken::Comment(_) => indent!(i, "Comment"),
+ SyntacticToken::BlockOpen => indent!(i, "BlockOpen"),
+ SyntacticToken::BlockClose => indent!(i, "BlockClose"),
+ SyntacticToken::Symbol(name) => indent!(i, "Symbol({name})"),
+ SyntacticToken::Instruction(instruction) => indent!(i, "Instruction({instruction})"),
+ SyntacticToken::LabelDefinition(name) => indent!(i, "LabelDefinition({name})"),
SyntacticToken::MacroDefinition(definition) => {
indent!(i, "MacroDefinition({})", definition.name);
for token in &definition.body {
print_syntactic_token(i+1, token);
}
}
-
- SyntacticToken::RawValue(value) => indent!(i, "RawValue({value})"),
- SyntacticToken::Instruction(instruction) => indent!(i, "Instruction({instruction})"),
- SyntacticToken::Invocation(name) => indent!(i, "Invocation({name})"),
-
- SyntacticToken::Padding(value) => indent!(i, "Padding({value})"),
- SyntacticToken::String(bytes) => indent!(i, "String({})", String::from_utf8_lossy(bytes)),
-
- SyntacticToken::BlockOpen => indent!(i, "BlockOpen"),
- SyntacticToken::BlockClose => indent!(i, "BlockOpen"),
}
}
diff --git a/src/types/instruction.rs b/src/types/instruction.rs
index 3ab5eb9..252fc68 100644
--- a/src/types/instruction.rs
+++ b/src/types/instruction.rs
@@ -1,3 +1,5 @@
+use crate::*;
+
use Operation as Op;
@@ -21,104 +23,103 @@ impl Instruction {
}
pub fn return_mode(&self) -> bool {
- self.value & 0x80 != 0
+ self.value & RETURN_MODE != 0
}
- pub fn immediate_mode(&self) -> bool {
- self.value & 0x40 != 0
+ pub fn wide_mode(&self) -> bool {
+ self.value & WIDE_MODE != 0
}
- pub fn wide_mode(&self) -> bool {
- self.value & 0x20 != 0
+ pub fn immediate_mode(&self) -> bool {
+ self.value & IMMEDIATE_MODE != 0
}
}
-
impl std::fmt::Display for Instruction {
fn fmt(&self, f: &mut std::fmt::Formatter) -> Result<(), std::fmt::Error> {
write!(f, "{}", match self.value {
// Stack operators
0x00=>"HLT",0x20=>"NOP" ,0x40=>"DB1" ,0x60=>"DB2" ,0x80=>"DB3" ,0xA0=>"DB4" ,0xC0=>"DB5" ,0xE0=>"DB6" ,
- 0x01=>"PSH",0x21=>"PSH*",0x41=>"PSH:",0x61=>"PSH*:",0x81=>"PSHr",0xA1=>"PSHr*",0xC1=>"PSHr:",0xE1=>"PSHr*:",
- 0x02=>"POP",0x22=>"POP*",0x42=>"POP:",0x62=>"POP*:",0x82=>"POPr",0xA2=>"POPr*",0xC2=>"POPr:",0xE2=>"POPr*:",
- 0x03=>"CPY",0x23=>"CPY*",0x43=>"CPY:",0x63=>"CPY*:",0x83=>"CPYr",0xA3=>"CPYr*",0xC3=>"CPYr:",0xE3=>"CPYr*:",
- 0x04=>"DUP",0x24=>"DUP*",0x44=>"DUP:",0x64=>"DUP*:",0x84=>"DUPr",0xA4=>"DUPr*",0xC4=>"DUPr:",0xE4=>"DUPr*:",
- 0x05=>"OVR",0x25=>"OVR*",0x45=>"OVR:",0x65=>"OVR*:",0x85=>"OVRr",0xA5=>"OVRr*",0xC5=>"OVRr:",0xE5=>"OVRr*:",
- 0x06=>"SWP",0x26=>"SWP*",0x46=>"SWP:",0x66=>"SWP*:",0x86=>"SWPr",0xA6=>"SWPr*",0xC6=>"SWPr:",0xE6=>"SWPr*:",
- 0x07=>"ROT",0x27=>"ROT*",0x47=>"ROT:",0x67=>"ROT*:",0x87=>"ROTr",0xA7=>"ROTr*",0xC7=>"ROTr:",0xE7=>"ROTr*:",
+ 0x01=>"PSH",0x21=>"PSH:",0x41=>"PSH*",0x61=>"PSH*:",0x81=>"PSHr",0xA1=>"PSHr:",0xC1=>"PSHr*",0xE1=>"PSHr*:",
+ 0x02=>"POP",0x22=>"POP:",0x42=>"POP*",0x62=>"POP*:",0x82=>"POPr",0xA2=>"POPr:",0xC2=>"POPr*",0xE2=>"POPr*:",
+ 0x03=>"CPY",0x23=>"CPY:",0x43=>"CPY*",0x63=>"CPY*:",0x83=>"CPYr",0xA3=>"CPYr:",0xC3=>"CPYr*",0xE3=>"CPYr*:",
+ 0x04=>"DUP",0x24=>"DUP:",0x44=>"DUP*",0x64=>"DUP*:",0x84=>"DUPr",0xA4=>"DUPr:",0xC4=>"DUPr*",0xE4=>"DUPr*:",
+ 0x05=>"OVR",0x25=>"OVR:",0x45=>"OVR*",0x65=>"OVR*:",0x85=>"OVRr",0xA5=>"OVRr:",0xC5=>"OVRr*",0xE5=>"OVRr*:",
+ 0x06=>"SWP",0x26=>"SWP:",0x46=>"SWP*",0x66=>"SWP*:",0x86=>"SWPr",0xA6=>"SWPr:",0xC6=>"SWPr*",0xE6=>"SWPr*:",
+ 0x07=>"ROT",0x27=>"ROT:",0x47=>"ROT*",0x67=>"ROT*:",0x87=>"ROTr",0xA7=>"ROTr:",0xC7=>"ROTr*",0xE7=>"ROTr*:",
// Control operators
- 0x08=>"JMP",0x28=>"JMP*",0x48=>"JMP:",0x68=>"JMP*:",0x88=>"JMPr",0xA8=>"JMPr*",0xC8=>"JMPr:",0xE8=>"JMPr*:",
- 0x09=>"JMS",0x29=>"JMS*",0x49=>"JMS:",0x69=>"JMS*:",0x89=>"JMSr",0xA9=>"JMSr*",0xC9=>"JMSr:",0xE9=>"JMSr*:",
- 0x0A=>"JCN",0x2A=>"JCN*",0x4A=>"JCN:",0x6A=>"JCN*:",0x8A=>"JCNr",0xAA=>"JCNr*",0xCA=>"JCNr:",0xEA=>"JCNr*:",
- 0x0B=>"JCS",0x2B=>"JCS*",0x4B=>"JCS:",0x6B=>"JCS*:",0x8B=>"JCSr",0xAB=>"JCSr*",0xCB=>"JCSr:",0xEB=>"JCSr*:",
- 0x0C=>"LDA",0x2C=>"LDA*",0x4C=>"LDA:",0x6C=>"LDA*:",0x8C=>"LDAr",0xAC=>"LDAr*",0xCC=>"LDAr:",0xEC=>"LDAr*:",
- 0x0D=>"STA",0x2D=>"STA*",0x4D=>"STA:",0x6D=>"STA*:",0x8D=>"STAr",0xAD=>"STAr*",0xCD=>"STAr:",0xED=>"STAr*:",
- 0x0E=>"LDD",0x2E=>"LDD*",0x4E=>"LDD:",0x6E=>"LDD*:",0x8E=>"LDDr",0xAE=>"LDDr*",0xCE=>"LDDr:",0xEE=>"LDDr*:",
- 0x0F=>"STD",0x2F=>"STD*",0x4F=>"STD:",0x6F=>"STD*:",0x8F=>"STDr",0xAF=>"STDr*",0xCF=>"STDr:",0xEF=>"STDr*:",
+ 0x08=>"JMP",0x28=>"JMP:",0x48=>"JMP*",0x68=>"JMP*:",0x88=>"JMPr",0xA8=>"JMPr:",0xC8=>"JMPr*",0xE8=>"JMPr*:",
+ 0x09=>"JMS",0x29=>"JMS:",0x49=>"JMS*",0x69=>"JMS*:",0x89=>"JMSr",0xA9=>"JMSr:",0xC9=>"JMSr*",0xE9=>"JMSr*:",
+ 0x0A=>"JCN",0x2A=>"JCN:",0x4A=>"JCN*",0x6A=>"JCN*:",0x8A=>"JCNr",0xAA=>"JCNr:",0xCA=>"JCNr*",0xEA=>"JCNr*:",
+ 0x0B=>"JCS",0x2B=>"JCS:",0x4B=>"JCS*",0x6B=>"JCS*:",0x8B=>"JCSr",0xAB=>"JCSr:",0xCB=>"JCSr*",0xEB=>"JCSr*:",
+ 0x0C=>"LDA",0x2C=>"LDA:",0x4C=>"LDA*",0x6C=>"LDA*:",0x8C=>"LDAr",0xAC=>"LDAr:",0xCC=>"LDAr*",0xEC=>"LDAr*:",
+ 0x0D=>"STA",0x2D=>"STA:",0x4D=>"STA*",0x6D=>"STA*:",0x8D=>"STAr",0xAD=>"STAr:",0xCD=>"STAr*",0xED=>"STAr*:",
+ 0x0E=>"LDD",0x2E=>"LDD:",0x4E=>"LDD*",0x6E=>"LDD*:",0x8E=>"LDDr",0xAE=>"LDDr:",0xCE=>"LDDr*",0xEE=>"LDDr*:",
+ 0x0F=>"STD",0x2F=>"STD:",0x4F=>"STD*",0x6F=>"STD*:",0x8F=>"STDr",0xAF=>"STDr:",0xCF=>"STDr*",0xEF=>"STDr*:",
// Numeric operators
- 0x10=>"ADD",0x30=>"ADD*",0x50=>"ADD:",0x70=>"ADD*:",0x90=>"ADDr",0xB0=>"ADDr*",0xD0=>"ADDr:",0xF0=>"ADDr*:",
- 0x11=>"SUB",0x31=>"SUB*",0x51=>"SUB:",0x71=>"SUB*:",0x91=>"SUBr",0xB1=>"SUBr*",0xD1=>"SUBr:",0xF1=>"SUBr*:",
- 0x12=>"INC",0x32=>"INC*",0x52=>"INC:",0x72=>"INC*:",0x92=>"INCr",0xB2=>"INCr*",0xD2=>"INCr:",0xF2=>"INCr*:",
- 0x13=>"DEC",0x33=>"DEC*",0x53=>"DEC:",0x73=>"DEC*:",0x93=>"DECr",0xB3=>"DECr*",0xD3=>"DECr:",0xF3=>"DECr*:",
- 0x14=>"LTH",0x34=>"LTH*",0x54=>"LTH:",0x74=>"LTH*:",0x94=>"LTHr",0xB4=>"LTHr*",0xD4=>"LTHr:",0xF4=>"LTHr*:",
- 0x15=>"GTH",0x35=>"GTH*",0x55=>"GTH:",0x75=>"GTH*:",0x95=>"GTHr",0xB5=>"GTHr*",0xD5=>"GTHr:",0xF5=>"GTHr*:",
- 0x16=>"EQU",0x36=>"EQU*",0x56=>"EQU:",0x76=>"EQU*:",0x96=>"EQUr",0xB6=>"EQUr*",0xD6=>"EQUr:",0xF6=>"EQUr*:",
- 0x17=>"NQK",0x37=>"NQK*",0x57=>"NQK:",0x77=>"NQK*:",0x97=>"NQKr",0xB7=>"NQKr*",0xD7=>"NQKr:",0xF7=>"NQKr*:",
+ 0x10=>"ADD",0x30=>"ADD:",0x50=>"ADD*",0x70=>"ADD*:",0x90=>"ADDr",0xB0=>"ADDr:",0xD0=>"ADDr*",0xF0=>"ADDr*:",
+ 0x11=>"SUB",0x31=>"SUB:",0x51=>"SUB*",0x71=>"SUB*:",0x91=>"SUBr",0xB1=>"SUBr:",0xD1=>"SUBr*",0xF1=>"SUBr*:",
+ 0x12=>"INC",0x32=>"INC:",0x52=>"INC*",0x72=>"INC*:",0x92=>"INCr",0xB2=>"INCr:",0xD2=>"INCr*",0xF2=>"INCr*:",
+ 0x13=>"DEC",0x33=>"DEC:",0x53=>"DEC*",0x73=>"DEC*:",0x93=>"DECr",0xB3=>"DECr:",0xD3=>"DECr*",0xF3=>"DECr*:",
+ 0x14=>"LTH",0x34=>"LTH:",0x54=>"LTH*",0x74=>"LTH*:",0x94=>"LTHr",0xB4=>"LTHr:",0xD4=>"LTHr*",0xF4=>"LTHr*:",
+ 0x15=>"GTH",0x35=>"GTH:",0x55=>"GTH*",0x75=>"GTH*:",0x95=>"GTHr",0xB5=>"GTHr:",0xD5=>"GTHr*",0xF5=>"GTHr*:",
+ 0x16=>"EQU",0x36=>"EQU:",0x56=>"EQU*",0x76=>"EQU*:",0x96=>"EQUr",0xB6=>"EQUr:",0xD6=>"EQUr*",0xF6=>"EQUr*:",
+ 0x17=>"NQK",0x37=>"NQK:",0x57=>"NQK*",0x77=>"NQK*:",0x97=>"NQKr",0xB7=>"NQKr:",0xD7=>"NQKr*",0xF7=>"NQKr*:",
// Bitwise operators
- 0x18=>"SHL",0x38=>"SHL*",0x58=>"SHL:",0x78=>"SHL*:",0x98=>"SHLr",0xB8=>"SHLr*",0xD8=>"SHLr:",0xF8=>"SHLr*:",
- 0x19=>"SHR",0x39=>"SHR*",0x59=>"SHR:",0x79=>"SHR*:",0x99=>"SHRr",0xB9=>"SHRr*",0xD9=>"SHRr:",0xF9=>"SHRr*:",
- 0x1A=>"ROL",0x3A=>"ROL*",0x5A=>"ROL:",0x7A=>"ROL*:",0x9A=>"ROLr",0xBA=>"ROLr*",0xDA=>"ROLr:",0xFA=>"ROLr*:",
- 0x1B=>"ROR",0x3B=>"ROR*",0x5B=>"ROR:",0x7B=>"ROR*:",0x9B=>"RORr",0xBB=>"RORr*",0xDB=>"RORr:",0xFB=>"RORr*:",
- 0x1C=>"IOR",0x3C=>"IOR*",0x5C=>"IOR:",0x7C=>"IOR*:",0x9C=>"IORr",0xBC=>"IORr*",0xDC=>"IORr:",0xFC=>"IORr*:",
- 0x1D=>"XOR",0x3D=>"XOR*",0x5D=>"XOR:",0x7D=>"XOR*:",0x9D=>"XORr",0xBD=>"XORr*",0xDD=>"XORr:",0xFD=>"XORr*:",
- 0x1E=>"AND",0x3E=>"AND*",0x5E=>"AND:",0x7E=>"AND*:",0x9E=>"ANDr",0xBE=>"ANDr*",0xDE=>"ANDr:",0xFE=>"ANDr*:",
- 0x1F=>"NOT",0x3F=>"NOT*",0x5F=>"NOT:",0x7F=>"NOT*:",0x9F=>"NOTr",0xBF=>"NOTr*",0xDF=>"NOTr:",0xFF=>"NOTr*:",
+ 0x18=>"SHL",0x38=>"SHL:",0x58=>"SHL*",0x78=>"SHL*:",0x98=>"SHLr",0xB8=>"SHLr:",0xD8=>"SHLr*",0xF8=>"SHLr*:",
+ 0x19=>"SHR",0x39=>"SHR:",0x59=>"SHR*",0x79=>"SHR*:",0x99=>"SHRr",0xB9=>"SHRr:",0xD9=>"SHRr*",0xF9=>"SHRr*:",
+ 0x1A=>"ROL",0x3A=>"ROL:",0x5A=>"ROL*",0x7A=>"ROL*:",0x9A=>"ROLr",0xBA=>"ROLr:",0xDA=>"ROLr*",0xFA=>"ROLr*:",
+ 0x1B=>"ROR",0x3B=>"ROR:",0x5B=>"ROR*",0x7B=>"ROR*:",0x9B=>"RORr",0xBB=>"RORr:",0xDB=>"RORr*",0xFB=>"RORr*:",
+ 0x1C=>"IOR",0x3C=>"IOR:",0x5C=>"IOR*",0x7C=>"IOR*:",0x9C=>"IORr",0xBC=>"IORr:",0xDC=>"IORr*",0xFC=>"IORr*:",
+ 0x1D=>"XOR",0x3D=>"XOR:",0x5D=>"XOR*",0x7D=>"XOR*:",0x9D=>"XORr",0xBD=>"XORr:",0xDD=>"XORr*",0xFD=>"XORr*:",
+ 0x1E=>"AND",0x3E=>"AND:",0x5E=>"AND*",0x7E=>"AND*:",0x9E=>"ANDr",0xBE=>"ANDr:",0xDE=>"ANDr*",0xFE=>"ANDr*:",
+ 0x1F=>"NOT",0x3F=>"NOT:",0x5F=>"NOT*",0x7F=>"NOT*:",0x9F=>"NOTr",0xBF=>"NOTr:",0xDF=>"NOTr*",0xFF=>"NOTr*:",
})
}
}
-
impl std::str::FromStr for Instruction {
type Err = ();
fn from_str(token: &str) -> Result<Self, Self::Err> {
Ok( Instruction { value: match token {
// Stack operators
- "HLT"=>0x00,"NOP"=>0x20 ,"DB1"=>0x40 ,"DB2"=>0x60 ,"DB3"=>0x80 ,"DB4"=>0xA0 ,"DB5"=>0xC0 ,"DB6"=>0xE0 ,
- "PSH"=>0x01,"PSH*"=>0x21,"PSH:"=>0x41,"PSH*:"=>0x61,"PSHr"=>0x81,"PSHr*"=>0xA1,"PSHr:"=>0xC1,"PSHr*:"=>0xE1,
- "POP"=>0x02,"POP*"=>0x22,"POP:"=>0x42,"POP*:"=>0x62,"POPr"=>0x82,"POPr*"=>0xA2,"POPr:"=>0xC2,"POPr*:"=>0xE2,
- "CPY"=>0x03,"CPY*"=>0x23,"CPY:"=>0x43,"CPY*:"=>0x63,"CPYr"=>0x83,"CPYr*"=>0xA3,"CPYr:"=>0xC3,"CPYr*:"=>0xE3,
- "DUP"=>0x04,"DUP*"=>0x24,"DUP:"=>0x44,"DUP*:"=>0x64,"DUPr"=>0x84,"DUPr*"=>0xA4,"DUPr:"=>0xC4,"DUPr*:"=>0xE4,
- "OVR"=>0x05,"OVR*"=>0x25,"OVR:"=>0x45,"OVR*:"=>0x65,"OVRr"=>0x85,"OVRr*"=>0xA5,"OVRr:"=>0xC5,"OVRr*:"=>0xE5,
- "SWP"=>0x06,"SWP*"=>0x26,"SWP:"=>0x46,"SWP*:"=>0x66,"SWPr"=>0x86,"SWPr*"=>0xA6,"SWPr:"=>0xC6,"SWPr*:"=>0xE6,
- "ROT"=>0x07,"ROT*"=>0x27,"ROT:"=>0x47,"ROT*:"=>0x67,"ROTr"=>0x87,"ROTr*"=>0xA7,"ROTr:"=>0xC7,"ROTr*:"=>0xE7,
+ "HLT"=>0x00,"NOP" =>0x20,"DB1" =>0x40,"DB2" =>0x60,"DB3" =>0x80,"DB4" =>0xA0,"DB5" =>0xC0,"DB6" =>0xE0,
+ "PSH"=>0x01,"PSH:"=>0x21,"PSH*"=>0x41,"PSH*:"=>0x61,"PSHr"=>0x81,"PSHr:"=>0xA1,"PSHr*"=>0xC1,"PSHr*:"=>0xE1,
+ ":"=>0x21, "*:"=>0x61, "r:"=>0xA1, "r*:"=>0xE1,
+ "POP"=>0x02,"POP:"=>0x22,"POP*"=>0x42,"POP*:"=>0x62,"POPr"=>0x82,"POPr:"=>0xA2,"POPr*"=>0xC2,"POPr*:"=>0xE2,
+ "CPY"=>0x03,"CPY:"=>0x23,"CPY*"=>0x43,"CPY*:"=>0x63,"CPYr"=>0x83,"CPYr:"=>0xA3,"CPYr*"=>0xC3,"CPYr*:"=>0xE3,
+ "DUP"=>0x04,"DUP:"=>0x24,"DUP*"=>0x44,"DUP*:"=>0x64,"DUPr"=>0x84,"DUPr:"=>0xA4,"DUPr*"=>0xC4,"DUPr*:"=>0xE4,
+ "OVR"=>0x05,"OVR:"=>0x25,"OVR*"=>0x45,"OVR*:"=>0x65,"OVRr"=>0x85,"OVRr:"=>0xA5,"OVRr*"=>0xC5,"OVRr*:"=>0xE5,
+ "SWP"=>0x06,"SWP:"=>0x26,"SWP*"=>0x46,"SWP*:"=>0x66,"SWPr"=>0x86,"SWPr:"=>0xA6,"SWPr*"=>0xC6,"SWPr*:"=>0xE6,
+ "ROT"=>0x07,"ROT:"=>0x27,"ROT*"=>0x47,"ROT*:"=>0x67,"ROTr"=>0x87,"ROTr:"=>0xA7,"ROTr*"=>0xC7,"ROTr*:"=>0xE7,
// Control operators
- "JMP"=>0x08,"JMP*"=>0x28,"JMP:"=>0x48,"JMP*:"=>0x68,"JMPr"=>0x88,"JMPr*"=>0xA8,"JMPr:"=>0xC8,"JMPr*:"=>0xE8,
- "JMS"=>0x09,"JMS*"=>0x29,"JMS:"=>0x49,"JMS*:"=>0x69,"JMSr"=>0x89,"JMSr*"=>0xA9,"JMSr:"=>0xC9,"JMSr*:"=>0xE9,
- "JCN"=>0x0A,"JCN*"=>0x2A,"JCN:"=>0x4A,"JCN*:"=>0x6A,"JCNr"=>0x8A,"JCNr*"=>0xAA,"JCNr:"=>0xCA,"JCNr*:"=>0xEA,
- "JCS"=>0x0B,"JCS*"=>0x2B,"JCS:"=>0x4B,"JCS*:"=>0x6B,"JCSr"=>0x8B,"JCSr*"=>0xAB,"JCSr:"=>0xCB,"JCSr*:"=>0xEB,
- "LDA"=>0x0C,"LDA*"=>0x2C,"LDA:"=>0x4C,"LDA*:"=>0x6C,"LDAr"=>0x8C,"LDAr*"=>0xAC,"LDAr:"=>0xCC,"LDAr*:"=>0xEC,
- "STA"=>0x0D,"STA*"=>0x2D,"STA:"=>0x4D,"STA*:"=>0x6D,"STAr"=>0x8D,"STAr*"=>0xAD,"STAr:"=>0xCD,"STAr*:"=>0xED,
- "LDD"=>0x0E,"LDD*"=>0x2E,"LDD:"=>0x4E,"LDD*:"=>0x6E,"LDDr"=>0x8E,"LDDr*"=>0xAE,"LDDr:"=>0xCE,"LDDr*:"=>0xEE,
- "STD"=>0x0F,"STD*"=>0x2F,"STD:"=>0x4F,"STD*:"=>0x6F,"STDr"=>0x8F,"STDr*"=>0xAF,"STDr:"=>0xCF,"STDr*:"=>0xEF,
+ "JMP"=>0x08,"JMP:"=>0x28,"JMP*"=>0x48,"JMP*:"=>0x68,"JMPr"=>0x88,"JMPr:"=>0xA8,"JMPr*"=>0xC8,"JMPr*:"=>0xE8,
+ "JMS"=>0x09,"JMS:"=>0x29,"JMS*"=>0x49,"JMS*:"=>0x69,"JMSr"=>0x89,"JMSr:"=>0xA9,"JMSr*"=>0xC9,"JMSr*:"=>0xE9,
+ "JCN"=>0x0A,"JCN:"=>0x2A,"JCN*"=>0x4A,"JCN*:"=>0x6A,"JCNr"=>0x8A,"JCNr:"=>0xAA,"JCNr*"=>0xCA,"JCNr*:"=>0xEA,
+ "JCS"=>0x0B,"JCS:"=>0x2B,"JCS*"=>0x4B,"JCS*:"=>0x6B,"JCSr"=>0x8B,"JCSr:"=>0xAB,"JCSr*"=>0xCB,"JCSr*:"=>0xEB,
+ "LDA"=>0x0C,"LDA:"=>0x2C,"LDA*"=>0x4C,"LDA*:"=>0x6C,"LDAr"=>0x8C,"LDAr:"=>0xAC,"LDAr*"=>0xCC,"LDAr*:"=>0xEC,
+ "STA"=>0x0D,"STA:"=>0x2D,"STA*"=>0x4D,"STA*:"=>0x6D,"STAr"=>0x8D,"STAr:"=>0xAD,"STAr*"=>0xCD,"STAr*:"=>0xED,
+ "LDD"=>0x0E,"LDD:"=>0x2E,"LDD*"=>0x4E,"LDD*:"=>0x6E,"LDDr"=>0x8E,"LDDr:"=>0xAE,"LDDr*"=>0xCE,"LDDr*:"=>0xEE,
+ "STD"=>0x0F,"STD:"=>0x2F,"STD*"=>0x4F,"STD*:"=>0x6F,"STDr"=>0x8F,"STDr:"=>0xAF,"STDr*"=>0xCF,"STDr*:"=>0xEF,
// Numeric operators
- "ADD"=>0x10,"ADD*"=>0x30,"ADD:"=>0x50,"ADD*:"=>0x70,"ADDr"=>0x90,"ADDr*"=>0xB0,"ADDr:"=>0xD0,"ADDr*:"=>0xF0,
- "SUB"=>0x11,"SUB*"=>0x31,"SUB:"=>0x51,"SUB*:"=>0x71,"SUBr"=>0x91,"SUBr*"=>0xB1,"SUBr:"=>0xD1,"SUBr*:"=>0xF1,
- "INC"=>0x12,"INC*"=>0x32,"INC:"=>0x52,"INC*:"=>0x72,"INCr"=>0x92,"INCr*"=>0xB2,"INCr:"=>0xD2,"INCr*:"=>0xF2,
- "DEC"=>0x13,"DEC*"=>0x33,"DEC:"=>0x53,"DEC*:"=>0x73,"DECr"=>0x93,"DECr*"=>0xB3,"DECr:"=>0xD3,"DECr*:"=>0xF3,
- "LTH"=>0x14,"LTH*"=>0x34,"LTH:"=>0x54,"LTH*:"=>0x74,"LTHr"=>0x94,"LTHr*"=>0xB4,"LTHr:"=>0xD4,"LTHr*:"=>0xF4,
- "GTH"=>0x15,"GTH*"=>0x35,"GTH:"=>0x55,"GTH*:"=>0x75,"GTHr"=>0x95,"GTHr*"=>0xB5,"GTHr:"=>0xD5,"GTHr*:"=>0xF5,
- "EQU"=>0x16,"EQU*"=>0x36,"EQU:"=>0x56,"EQU*:"=>0x76,"EQUr"=>0x96,"EQUr*"=>0xB6,"EQUr:"=>0xD6,"EQUr*:"=>0xF6,
- "NQK"=>0x17,"NQK*"=>0x37,"NQK:"=>0x57,"NQK*:"=>0x77,"NQKr"=>0x97,"NQKr*"=>0xB7,"NQKr:"=>0xD7,"NQKr*:"=>0xF7,
+ "ADD"=>0x10,"ADD:"=>0x30,"ADD*"=>0x50,"ADD*:"=>0x70,"ADDr"=>0x90,"ADDr:"=>0xB0,"ADDr*"=>0xD0,"ADDr*:"=>0xF0,
+ "SUB"=>0x11,"SUB:"=>0x31,"SUB*"=>0x51,"SUB*:"=>0x71,"SUBr"=>0x91,"SUBr:"=>0xB1,"SUBr*"=>0xD1,"SUBr*:"=>0xF1,
+ "INC"=>0x12,"INC:"=>0x32,"INC*"=>0x52,"INC*:"=>0x72,"INCr"=>0x92,"INCr:"=>0xB2,"INCr*"=>0xD2,"INCr*:"=>0xF2,
+ "DEC"=>0x13,"DEC:"=>0x33,"DEC*"=>0x53,"DEC*:"=>0x73,"DECr"=>0x93,"DECr:"=>0xB3,"DECr*"=>0xD3,"DECr*:"=>0xF3,
+ "LTH"=>0x14,"LTH:"=>0x34,"LTH*"=>0x54,"LTH*:"=>0x74,"LTHr"=>0x94,"LTHr:"=>0xB4,"LTHr*"=>0xD4,"LTHr*:"=>0xF4,
+ "GTH"=>0x15,"GTH:"=>0x35,"GTH*"=>0x55,"GTH*:"=>0x75,"GTHr"=>0x95,"GTHr:"=>0xB5,"GTHr*"=>0xD5,"GTHr*:"=>0xF5,
+ "EQU"=>0x16,"EQU:"=>0x36,"EQU*"=>0x56,"EQU*:"=>0x76,"EQUr"=>0x96,"EQUr:"=>0xB6,"EQUr*"=>0xD6,"EQUr*:"=>0xF6,
+ "NQK"=>0x17,"NQK:"=>0x37,"NQK*"=>0x57,"NQK*:"=>0x77,"NQKr"=>0x97,"NQKr:"=>0xB7,"NQKr*"=>0xD7,"NQKr*:"=>0xF7,
// Bitwise operators
- "SHL"=>0x18,"SHL*"=>0x38,"SHL:"=>0x58,"SHL*:"=>0x78,"SHLr"=>0x98,"SHLr*"=>0xB8,"SHLr:"=>0xD8,"SHLr*:"=>0xF8,
- "SHR"=>0x19,"SHR*"=>0x39,"SHR:"=>0x59,"SHR*:"=>0x79,"SHRr"=>0x99,"SHRr*"=>0xB9,"SHRr:"=>0xD9,"SHRr*:"=>0xF9,
- "ROL"=>0x1A,"ROL*"=>0x3A,"ROL:"=>0x5A,"ROL*:"=>0x7A,"ROLr"=>0x9A,"ROLr*"=>0xBA,"ROLr:"=>0xDA,"ROLr*:"=>0xFA,
- "ROR"=>0x1B,"ROR*"=>0x3B,"ROR:"=>0x5B,"ROR*:"=>0x7B,"RORr"=>0x9B,"RORr*"=>0xBB,"RORr:"=>0xDB,"RORr*:"=>0xFB,
- "IOR"=>0x1C,"IOR*"=>0x3C,"IOR:"=>0x5C,"IOR*:"=>0x7C,"IORr"=>0x9C,"IORr*"=>0xBC,"IORr:"=>0xDC,"IORr*:"=>0xFC,
- "XOR"=>0x1D,"XOR*"=>0x3D,"XOR:"=>0x5D,"XOR*:"=>0x7D,"XORr"=>0x9D,"XORr*"=>0xBD,"XORr:"=>0xDD,"XORr*:"=>0xFD,
- "AND"=>0x1E,"AND*"=>0x3E,"AND:"=>0x5E,"AND*:"=>0x7E,"ANDr"=>0x9E,"ANDr*"=>0xBE,"ANDr:"=>0xDE,"ANDr*:"=>0xFE,
- "NOT"=>0x1F,"NOT*"=>0x3F,"NOT:"=>0x5F,"NOT*:"=>0x7F,"NOTr"=>0x9F,"NOTr*"=>0xBF,"NOTr:"=>0xDF,"NOTr*:"=>0xFF,
+ "SHL"=>0x18,"SHL:"=>0x38,"SHL*"=>0x58,"SHL*:"=>0x78,"SHLr"=>0x98,"SHLr:"=>0xB8,"SHLr*"=>0xD8,"SHLr*:"=>0xF8,
+ "SHR"=>0x19,"SHR:"=>0x39,"SHR*"=>0x59,"SHR*:"=>0x79,"SHRr"=>0x99,"SHRr:"=>0xB9,"SHRr*"=>0xD9,"SHRr*:"=>0xF9,
+ "ROL"=>0x1A,"ROL:"=>0x3A,"ROL*"=>0x5A,"ROL*:"=>0x7A,"ROLr"=>0x9A,"ROLr:"=>0xBA,"ROLr*"=>0xDA,"ROLr*:"=>0xFA,
+ "ROR"=>0x1B,"ROR:"=>0x3B,"ROR*"=>0x5B,"ROR*:"=>0x7B,"RORr"=>0x9B,"RORr:"=>0xBB,"RORr*"=>0xDB,"RORr*:"=>0xFB,
+ "IOR"=>0x1C,"IOR:"=>0x3C,"IOR*"=>0x5C,"IOR*:"=>0x7C,"IORr"=>0x9C,"IORr:"=>0xBC,"IORr*"=>0xDC,"IORr*:"=>0xFC,
+ "XOR"=>0x1D,"XOR:"=>0x3D,"XOR*"=>0x5D,"XOR*:"=>0x7D,"XORr"=>0x9D,"XORr:"=>0xBD,"XORr*"=>0xDD,"XORr*:"=>0xFD,
+ "AND"=>0x1E,"AND:"=>0x3E,"AND*"=>0x5E,"AND*:"=>0x7E,"ANDr"=>0x9E,"ANDr:"=>0xBE,"ANDr*"=>0xDE,"ANDr*:"=>0xFE,
+ "NOT"=>0x1F,"NOT:"=>0x3F,"NOT*"=>0x5F,"NOT*:"=>0x7F,"NOTr"=>0x9F,"NOTr:"=>0xBF,"NOTr*"=>0xDF,"NOTr*:"=>0xFF,
_ => return Err(()),
}})
}
@@ -136,7 +137,6 @@ pub enum Operation {
IOR, XOR, AND, NOT,
}
-
impl From<Operation> for u8 {
fn from(operation: Operation) -> Self {
match operation {
@@ -152,7 +152,6 @@ impl From<Operation> for u8 {
}
}
-
impl std::fmt::Display for Operation {
fn fmt(&self, f: &mut std::fmt::Formatter) -> Result<(), std::fmt::Error> {
write!(f, "{}", match self {
diff --git a/src/types/mod.rs b/src/types/mod.rs
index 998bc33..8094cb1 100644
--- a/src/types/mod.rs
+++ b/src/types/mod.rs
@@ -1,5 +1,4 @@
mod instruction;
mod value;
-
pub use instruction::*;
pub use value::*;