Compiler.cpp 8.6 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261
  1. /*
  2. * Copyright (c) 2023, Andreas Kling <kling@serenityos.org>
  3. *
  4. * SPDX-License-Identifier: BSD-2-Clause
  5. */
  6. #include <AK/OwnPtr.h>
  7. #include <LibJS/Bytecode/Instruction.h>
  8. #include <LibJS/JIT/Compiler.h>
  9. #include <LibJS/Runtime/ValueInlines.h>
  10. #include <sys/mman.h>
  11. #include <unistd.h>
  12. namespace JS::JIT {
  13. void Compiler::store_vm_register(Bytecode::Register dst, Assembler::Reg src)
  14. {
  15. m_assembler.mov(
  16. Assembler::Operand::Mem64BaseAndOffset(REGISTER_ARRAY_BASE, dst.index() * sizeof(Value)),
  17. Assembler::Operand::Register(src));
  18. }
  19. void Compiler::load_vm_register(Assembler::Reg dst, Bytecode::Register src)
  20. {
  21. m_assembler.mov(
  22. Assembler::Operand::Register(dst),
  23. Assembler::Operand::Mem64BaseAndOffset(REGISTER_ARRAY_BASE, src.index() * sizeof(Value)));
  24. }
  25. void Compiler::store_vm_local(size_t dst, Assembler::Reg src)
  26. {
  27. m_assembler.mov(
  28. Assembler::Operand::Mem64BaseAndOffset(LOCALS_ARRAY_BASE, dst * sizeof(Value)),
  29. Assembler::Operand::Register(src));
  30. }
  31. void Compiler::load_vm_local(Assembler::Reg dst, size_t src)
  32. {
  33. m_assembler.mov(
  34. Assembler::Operand::Register(dst),
  35. Assembler::Operand::Mem64BaseAndOffset(LOCALS_ARRAY_BASE, src * sizeof(Value)));
  36. }
  37. void Compiler::compile_load_immediate(Bytecode::Op::LoadImmediate const& op)
  38. {
  39. m_assembler.mov(
  40. Assembler::Operand::Register(GPR0),
  41. Assembler::Operand::Imm64(op.value().encoded()));
  42. store_vm_register(Bytecode::Register::accumulator(), GPR0);
  43. }
  44. void Compiler::compile_load(Bytecode::Op::Load const& op)
  45. {
  46. load_vm_register(GPR0, op.src());
  47. store_vm_register(Bytecode::Register::accumulator(), GPR0);
  48. }
  49. void Compiler::compile_store(Bytecode::Op::Store const& op)
  50. {
  51. load_vm_register(GPR0, Bytecode::Register::accumulator());
  52. store_vm_register(op.dst(), GPR0);
  53. }
  54. void Compiler::compile_get_local(Bytecode::Op::GetLocal const& op)
  55. {
  56. load_vm_local(GPR0, op.index());
  57. store_vm_register(Bytecode::Register::accumulator(), GPR0);
  58. }
  59. void Compiler::compile_set_local(Bytecode::Op::SetLocal const& op)
  60. {
  61. load_vm_register(GPR0, Bytecode::Register::accumulator());
  62. store_vm_local(op.index(), GPR0);
  63. }
  64. void Compiler::compile_jump(Bytecode::Op::Jump const& op)
  65. {
  66. m_assembler.jump(const_cast<Bytecode::BasicBlock&>(op.true_target()->block()));
  67. }
  68. static bool cxx_to_boolean(VM&, Value value)
  69. {
  70. return value.to_boolean();
  71. }
  72. void Compiler::compile_to_boolean(Assembler::Reg dst, Assembler::Reg src)
  73. {
  74. // dst = src;
  75. m_assembler.mov(
  76. Assembler::Operand::Register(dst),
  77. Assembler::Operand::Register(src));
  78. // dst >>= 48;
  79. m_assembler.shift_right(
  80. Assembler::Operand::Register(dst),
  81. Assembler::Operand::Imm8(48));
  82. // if (dst != BOOLEAN_TAG) goto slow_case;
  83. auto slow_case = m_assembler.make_label();
  84. m_assembler.jump_if_not_equal(
  85. Assembler::Operand::Register(dst),
  86. Assembler::Operand::Imm32(BOOLEAN_TAG),
  87. slow_case);
  88. // Fast path for JS::Value booleans.
  89. // dst = src;
  90. m_assembler.mov(
  91. Assembler::Operand::Register(dst),
  92. Assembler::Operand::Register(src));
  93. // dst &= 1;
  94. m_assembler.bitwise_and(
  95. Assembler::Operand::Register(dst),
  96. Assembler::Operand::Imm32(1));
  97. // goto end;
  98. auto end = m_assembler.jump();
  99. // slow_case: // call C++ helper
  100. slow_case.link(m_assembler);
  101. m_assembler.mov(
  102. Assembler::Operand::Register(ARG1),
  103. Assembler::Operand::Register(src));
  104. m_assembler.native_call((void*)cxx_to_boolean);
  105. m_assembler.mov(
  106. Assembler::Operand::Register(dst),
  107. Assembler::Operand::Register(RET));
  108. // end:
  109. end.link(m_assembler);
  110. }
  111. void Compiler::compile_jump_conditional(Bytecode::Op::JumpConditional const& op)
  112. {
  113. load_vm_register(GPR1, Bytecode::Register::accumulator());
  114. compile_to_boolean(GPR0, GPR1);
  115. m_assembler.jump_conditional(GPR0,
  116. const_cast<Bytecode::BasicBlock&>(op.true_target()->block()),
  117. const_cast<Bytecode::BasicBlock&>(op.false_target()->block()));
  118. }
  119. [[maybe_unused]] static Value cxx_less_than(VM& vm, Value lhs, Value rhs)
  120. {
  121. // FIXME: Handle exceptions!
  122. return MUST(less_than(vm, lhs, rhs));
  123. }
  124. void Compiler::compile_less_than(Bytecode::Op::LessThan const& op)
  125. {
  126. load_vm_register(ARG1, op.lhs());
  127. load_vm_register(ARG2, Bytecode::Register::accumulator());
  128. m_assembler.native_call((void*)cxx_less_than);
  129. store_vm_register(Bytecode::Register::accumulator(), RET);
  130. }
  131. [[maybe_unused]] static Value cxx_increment(VM& vm, Value value)
  132. {
  133. // FIXME: Handle exceptions!
  134. auto old_value = MUST(value.to_numeric(vm));
  135. if (old_value.is_number())
  136. return Value(old_value.as_double() + 1);
  137. return BigInt::create(vm, old_value.as_bigint().big_integer().plus(Crypto::SignedBigInteger { 1 }));
  138. }
  139. void Compiler::compile_increment(Bytecode::Op::Increment const&)
  140. {
  141. load_vm_register(ARG1, Bytecode::Register::accumulator());
  142. m_assembler.native_call((void*)cxx_increment);
  143. store_vm_register(Bytecode::Register::accumulator(), RET);
  144. }
  145. OwnPtr<NativeExecutable> Compiler::compile(Bytecode::Executable const& bytecode_executable)
  146. {
  147. if (getenv("LIBJS_NO_JIT"))
  148. return nullptr;
  149. Compiler compiler;
  150. compiler.m_assembler.enter();
  151. compiler.m_assembler.mov(
  152. Assembler::Operand::Register(REGISTER_ARRAY_BASE),
  153. Assembler::Operand::Register(ARG1));
  154. compiler.m_assembler.mov(
  155. Assembler::Operand::Register(LOCALS_ARRAY_BASE),
  156. Assembler::Operand::Register(ARG2));
  157. for (auto& block : bytecode_executable.basic_blocks) {
  158. block->offset = compiler.m_output.size();
  159. auto it = Bytecode::InstructionStreamIterator(block->instruction_stream());
  160. while (!it.at_end()) {
  161. auto const& op = *it;
  162. switch (op.type()) {
  163. case Bytecode::Instruction::Type::LoadImmediate:
  164. compiler.compile_load_immediate(static_cast<Bytecode::Op::LoadImmediate const&>(op));
  165. break;
  166. case Bytecode::Instruction::Type::Store:
  167. compiler.compile_store(static_cast<Bytecode::Op::Store const&>(op));
  168. break;
  169. case Bytecode::Instruction::Type::Load:
  170. compiler.compile_load(static_cast<Bytecode::Op::Load const&>(op));
  171. break;
  172. case Bytecode::Instruction::Type::GetLocal:
  173. compiler.compile_get_local(static_cast<Bytecode::Op::GetLocal const&>(op));
  174. break;
  175. case Bytecode::Instruction::Type::SetLocal:
  176. compiler.compile_set_local(static_cast<Bytecode::Op::SetLocal const&>(op));
  177. break;
  178. case Bytecode::Instruction::Type::Jump:
  179. compiler.compile_jump(static_cast<Bytecode::Op::Jump const&>(op));
  180. break;
  181. case Bytecode::Instruction::Type::JumpConditional:
  182. compiler.compile_jump_conditional(static_cast<Bytecode::Op::JumpConditional const&>(op));
  183. break;
  184. case Bytecode::Instruction::Type::LessThan:
  185. compiler.compile_less_than(static_cast<Bytecode::Op::LessThan const&>(op));
  186. break;
  187. case Bytecode::Instruction::Type::Increment:
  188. compiler.compile_increment(static_cast<Bytecode::Op::Increment const&>(op));
  189. break;
  190. default:
  191. dbgln("JIT compilation failed: {}", bytecode_executable.name);
  192. dbgln("Unsupported bytecode op: {}", op.to_deprecated_string(bytecode_executable));
  193. return nullptr;
  194. }
  195. ++it;
  196. }
  197. if (!block->is_terminated())
  198. compiler.m_assembler.exit();
  199. }
  200. // Patch up all the jumps
  201. for (auto& block : bytecode_executable.basic_blocks) {
  202. for (auto& jump : block->jumps_to_here) {
  203. auto offset = block->offset - jump - 4;
  204. compiler.m_output[jump + 0] = (offset >> 0) & 0xff;
  205. compiler.m_output[jump + 1] = (offset >> 8) & 0xff;
  206. compiler.m_output[jump + 2] = (offset >> 16) & 0xff;
  207. compiler.m_output[jump + 3] = (offset >> 24) & 0xff;
  208. }
  209. }
  210. write(STDOUT_FILENO, compiler.m_output.data(), compiler.m_output.size());
  211. auto* executable_memory = mmap(nullptr, compiler.m_output.size(), PROT_READ | PROT_WRITE, MAP_ANONYMOUS | MAP_PRIVATE, 0, 0);
  212. if (executable_memory == MAP_FAILED) {
  213. perror("mmap");
  214. return nullptr;
  215. }
  216. memcpy(executable_memory, compiler.m_output.data(), compiler.m_output.size());
  217. mprotect(executable_memory, compiler.m_output.size(), PROT_READ | PROT_EXEC);
  218. return make<NativeExecutable>(executable_memory, compiler.m_output.size());
  219. }
  220. }