+ Inst::Halt | Inst::Nope => (),
+ Inst::Alu2(rd, op, r2) => {
+ self.reg[rd] = match op {
+ Op2::Add => self.reg[rd].wrapping_add(self.reg[r2]),
+ Op2::Sub => self.reg[rd].wrapping_sub(self.reg[r2]),
+ Op2::AddPc => self.pc.wrapping_add(self.reg[r2]),
+ Op2::And => self.reg[rd] & self.reg[r2],
+ Op2::Or => self.reg[rd] | self.reg[r2],
+ Op2::Xor => self.reg[rd] ^ self.reg[r2],
+ }
+ }
+ Inst::Jalr(r) => std::mem::swap(&mut self.pc, &mut self.reg[r]),
+ Inst::Move(rd, r2) => self.reg[rd] = self.reg[r2],
+ Inst::Alu1(rd, op) => {
+ self.reg[rd] = match op {
+ Op1::Inc => self.reg[rd].wrapping_add(1),
+ Op1::Dec => self.reg[rd].wrapping_sub(1),
+ Op1::Neg => 0u16.wrapping_sub(self.reg[rd]), // negate :)
+ Op1::Compl => !self.reg[rd],
+ }
+ }
+ Inst::Mem(LdSt::Ld, r, addr) => {
+ if let Addr::Extended {
+ base,
+ stack: true,
+ size,
+ ..
+ } = addr
+ {
+ self.reg[base] = self.reg[base].wrapping_sub(size.bytes());
+ }
+ let (addr0, size) = self.eval_addr(addr);
+ self.reg[r] = self.load(addr0, size);
+ }
+ Inst::Mem(LdSt::St, r, addr) => {
+ let (addr0, size) = self.eval_addr(addr);
+ self.store(addr0, size, self.reg[r]);
+ if let Addr::Extended {
+ base,
+ stack: true,
+ size,
+ ..
+ } = addr
+ {
+ self.reg[base] = self.reg[base].wrapping_add(size.bytes());
+ }
+ }
+ Inst::Branch(cond, offset) => {
+ if self.eval_cond(cond) {
+ self.pc = self.pc.offset(offset);
+ }
+ }
+ Inst::JumpI(offset) => self.pc = self.pc.offset(offset),
+ Inst::AddI(rd, i) => self.reg[rd] = self.reg[rd].offset(i),
+ Inst::AluCompact(rd, op, i) => {
+ self.reg[rd] = match op {
+ OpC::Lsl => self.reg[rd] << i.get(),
+ OpC::Lsr => self.reg[rd] >> i.get(),
+ OpC::Asr => (self.reg[rd] as i16 >> i.get()) as u16,
+ OpC::Rol => self.reg[rd].rotate_left(i.get() as u32),
+ OpC::Clr => self.reg[rd] & !(1 << i.get()),
+ OpC::Set => self.reg[rd] | (1 << i.get()),
+ OpC::Tog => self.reg[rd] ^ (1 << i.get()),
+ OpC::Ext => self.reg[rd] & (1 << i.get()),
+ }
+ }
+ Inst::LdImm(Half::Low, rd, i) => self.reg[rd] = self.reg[rd] & 0xFF00 | i as u16,
+ Inst::LdImm(Half::High, rd, i) => {
+ self.reg[rd] = self.reg[rd] & 0x00FF | (i as u16) << 8
+ }
+ }
+ }
+
+ fn eval_addr(&self, addr: Addr) -> (u16, Size) {
+ match addr {
+ Addr::Fixed(i) => (i.get() * 2, Size::Word),
+ Addr::Reg(r) => (self.reg[r], Size::Word),
+ Addr::Extended {
+ base, size, offset, ..
+ } => (self.reg[base].offset(offset), size),
+ }
+ }
+
+ fn load(&self, addr: u16, size: Size) -> u16 {
+ match size {
+ Size::Byte => self.memory[addr as usize] as u16,
+ Size::Word => u16::from_le_bytes([
+ self.memory[addr as usize],
+ self.memory[addr.wrapping_add(1) as usize],
+ ]),
+ }
+ }
+
+ fn store(&mut self, addr: u16, size: Size, value: u16) {
+ match size {
+ Size::Byte => self.memory[addr as usize] = value as u8,
+ Size::Word => {
+ let [b0, b1] = value.to_le_bytes();
+ self.memory[addr as usize] = b0;
+ self.memory[addr.wrapping_add(1) as usize] = b1;
+ }
+ }
+ }
+
+ fn eval_cond(&self, cond: Cond) -> bool {
+ match cond {
+ Cond::Eql(a, b) => self.reg[a] == self.reg[b],
+ Cond::Neq(a, b) => self.reg[a] != self.reg[b],
+ Cond::Test(a, b) => self.reg[a] & self.reg[b] != 0,
+ Cond::TestNot(a, b) => self.reg[a] & self.reg[b] == 0,
+ Cond::Lt(a, b) => (self.reg[a] as i16) < self.reg[b] as i16,
+ Cond::Ult(a, b) => self.reg[a] < self.reg[b],