|
@@ -536,6 +536,48 @@ void Compiler::compile_add(Bytecode::Op::Add const& op)
|
|
end.link(m_assembler);
|
|
end.link(m_assembler);
|
|
}
|
|
}
|
|
|
|
|
|
|
|
+static Value cxx_sub(VM& vm, Value lhs, Value rhs)
|
|
|
|
+{
|
|
|
|
+ return TRY_OR_SET_EXCEPTION(sub(vm, lhs, rhs));
|
|
|
|
+}
|
|
|
|
+
|
|
|
|
+void Compiler::compile_sub(Bytecode::Op::Sub const& op)
|
|
|
|
+{
|
|
|
|
+ load_vm_register(ARG1, op.lhs());
|
|
|
|
+ load_accumulator(ARG2);
|
|
|
|
+
|
|
|
|
+ Assembler::Label end {};
|
|
|
|
+ Assembler::Label slow_case {};
|
|
|
|
+
|
|
|
|
+ branch_if_both_int32(ARG1, ARG2, [&] {
|
|
|
|
+ // GPR0 = ARG1 + ARG2 (32-bit)
|
|
|
|
+ // if (overflow) goto slow_case;
|
|
|
|
+ m_assembler.mov(
|
|
|
|
+ Assembler::Operand::Register(GPR0),
|
|
|
|
+ Assembler::Operand::Register(ARG1));
|
|
|
|
+ m_assembler.sub32(
|
|
|
|
+ Assembler::Operand::Register(GPR0),
|
|
|
|
+ Assembler::Operand::Register(ARG2),
|
|
|
|
+ slow_case);
|
|
|
|
+
|
|
|
|
+ // accumulator = GPR0 | SHIFTED_INT32_TAG;
|
|
|
|
+ m_assembler.mov(
|
|
|
|
+ Assembler::Operand::Register(GPR1),
|
|
|
|
+ Assembler::Operand::Imm(SHIFTED_INT32_TAG));
|
|
|
|
+ m_assembler.bitwise_or(
|
|
|
|
+ Assembler::Operand::Register(GPR0),
|
|
|
|
+ Assembler::Operand::Register(GPR1));
|
|
|
|
+ store_accumulator(GPR0);
|
|
|
|
+ m_assembler.jump(end);
|
|
|
|
+ });
|
|
|
|
+
|
|
|
|
+ slow_case.link(m_assembler);
|
|
|
|
+ native_call((void*)cxx_sub);
|
|
|
|
+ store_accumulator(RET);
|
|
|
|
+ check_exception();
|
|
|
|
+ end.link(m_assembler);
|
|
|
|
+}
|
|
|
|
+
|
|
static Value cxx_less_than(VM& vm, Value lhs, Value rhs)
|
|
static Value cxx_less_than(VM& vm, Value lhs, Value rhs)
|
|
{
|
|
{
|
|
return TRY_OR_SET_EXCEPTION(less_than(vm, lhs, rhs));
|
|
return TRY_OR_SET_EXCEPTION(less_than(vm, lhs, rhs));
|