Compiler.cpp 16 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424
  1. /*
  2. * Copyright (c) 2023, Andreas Kling <kling@serenityos.org>
  3. *
  4. * SPDX-License-Identifier: BSD-2-Clause
  5. */
  6. #include <AK/OwnPtr.h>
  7. #include <LibJS/Bytecode/Instruction.h>
  8. #include <LibJS/Bytecode/Interpreter.h>
  9. #include <LibJS/JIT/Compiler.h>
  10. #include <LibJS/Runtime/VM.h>
  11. #include <LibJS/Runtime/ValueInlines.h>
  12. #include <sys/mman.h>
  13. #include <unistd.h>
  14. #define TRY_OR_SET_EXCEPTION(expression) \
  15. ({ \
  16. /* Ignore -Wshadow to allow nesting the macro. */ \
  17. AK_IGNORE_DIAGNOSTIC("-Wshadow", \
  18. auto&& _temporary_result = (expression)); \
  19. static_assert(!::AK::Detail::IsLvalueReference<decltype(_temporary_result.release_value())>, \
  20. "Do not return a reference from a fallible expression"); \
  21. if (_temporary_result.is_error()) [[unlikely]] { \
  22. vm.bytecode_interpreter().reg(Bytecode::Register::exception()) = _temporary_result.release_error().value().value(); \
  23. return {}; \
  24. } \
  25. _temporary_result.release_value(); \
  26. })
  27. namespace JS::JIT {
  28. void Compiler::store_vm_register(Bytecode::Register dst, Assembler::Reg src)
  29. {
  30. m_assembler.mov(
  31. Assembler::Operand::Mem64BaseAndOffset(REGISTER_ARRAY_BASE, dst.index() * sizeof(Value)),
  32. Assembler::Operand::Register(src));
  33. }
  34. void Compiler::load_vm_register(Assembler::Reg dst, Bytecode::Register src)
  35. {
  36. m_assembler.mov(
  37. Assembler::Operand::Register(dst),
  38. Assembler::Operand::Mem64BaseAndOffset(REGISTER_ARRAY_BASE, src.index() * sizeof(Value)));
  39. }
  40. void Compiler::store_vm_local(size_t dst, Assembler::Reg src)
  41. {
  42. m_assembler.mov(
  43. Assembler::Operand::Mem64BaseAndOffset(LOCALS_ARRAY_BASE, dst * sizeof(Value)),
  44. Assembler::Operand::Register(src));
  45. }
  46. void Compiler::load_vm_local(Assembler::Reg dst, size_t src)
  47. {
  48. m_assembler.mov(
  49. Assembler::Operand::Register(dst),
  50. Assembler::Operand::Mem64BaseAndOffset(LOCALS_ARRAY_BASE, src * sizeof(Value)));
  51. }
  52. void Compiler::compile_load_immediate(Bytecode::Op::LoadImmediate const& op)
  53. {
  54. m_assembler.mov(
  55. Assembler::Operand::Register(GPR0),
  56. Assembler::Operand::Imm64(op.value().encoded()));
  57. store_vm_register(Bytecode::Register::accumulator(), GPR0);
  58. }
  59. void Compiler::compile_load(Bytecode::Op::Load const& op)
  60. {
  61. load_vm_register(GPR0, op.src());
  62. store_vm_register(Bytecode::Register::accumulator(), GPR0);
  63. }
  64. void Compiler::compile_store(Bytecode::Op::Store const& op)
  65. {
  66. load_vm_register(GPR0, Bytecode::Register::accumulator());
  67. store_vm_register(op.dst(), GPR0);
  68. }
  69. void Compiler::compile_get_local(Bytecode::Op::GetLocal const& op)
  70. {
  71. load_vm_local(GPR0, op.index());
  72. store_vm_register(Bytecode::Register::accumulator(), GPR0);
  73. }
  74. void Compiler::compile_set_local(Bytecode::Op::SetLocal const& op)
  75. {
  76. load_vm_register(GPR0, Bytecode::Register::accumulator());
  77. store_vm_local(op.index(), GPR0);
  78. }
  79. void Compiler::compile_jump(Bytecode::Op::Jump const& op)
  80. {
  81. m_assembler.jump(const_cast<Bytecode::BasicBlock&>(op.true_target()->block()));
  82. }
  83. static bool cxx_to_boolean(VM&, Value value)
  84. {
  85. return value.to_boolean();
  86. }
  87. void Compiler::compile_to_boolean(Assembler::Reg dst, Assembler::Reg src)
  88. {
  89. // dst = src;
  90. m_assembler.mov(
  91. Assembler::Operand::Register(dst),
  92. Assembler::Operand::Register(src));
  93. // dst >>= 48;
  94. m_assembler.shift_right(
  95. Assembler::Operand::Register(dst),
  96. Assembler::Operand::Imm8(48));
  97. // if (dst != BOOLEAN_TAG) goto slow_case;
  98. auto slow_case = m_assembler.make_label();
  99. m_assembler.jump_if_not_equal(
  100. Assembler::Operand::Register(dst),
  101. Assembler::Operand::Imm32(BOOLEAN_TAG),
  102. slow_case);
  103. // Fast path for JS::Value booleans.
  104. // dst = src;
  105. m_assembler.mov(
  106. Assembler::Operand::Register(dst),
  107. Assembler::Operand::Register(src));
  108. // dst &= 1;
  109. m_assembler.bitwise_and(
  110. Assembler::Operand::Register(dst),
  111. Assembler::Operand::Imm32(1));
  112. // goto end;
  113. auto end = m_assembler.jump();
  114. // slow_case: // call C++ helper
  115. slow_case.link(m_assembler);
  116. m_assembler.mov(
  117. Assembler::Operand::Register(ARG1),
  118. Assembler::Operand::Register(src));
  119. m_assembler.native_call((void*)cxx_to_boolean);
  120. m_assembler.mov(
  121. Assembler::Operand::Register(dst),
  122. Assembler::Operand::Register(RET));
  123. // end:
  124. end.link(m_assembler);
  125. }
  126. void Compiler::compile_jump_conditional(Bytecode::Op::JumpConditional const& op)
  127. {
  128. load_vm_register(GPR1, Bytecode::Register::accumulator());
  129. compile_to_boolean(GPR0, GPR1);
  130. m_assembler.jump_conditional(GPR0,
  131. const_cast<Bytecode::BasicBlock&>(op.true_target()->block()),
  132. const_cast<Bytecode::BasicBlock&>(op.false_target()->block()));
  133. }
  134. [[maybe_unused]] static Value cxx_less_than(VM& vm, Value lhs, Value rhs)
  135. {
  136. // FIXME: Handle exceptions!
  137. return MUST(less_than(vm, lhs, rhs));
  138. }
  139. void Compiler::compile_less_than(Bytecode::Op::LessThan const& op)
  140. {
  141. load_vm_register(ARG1, op.lhs());
  142. load_vm_register(ARG2, Bytecode::Register::accumulator());
  143. m_assembler.native_call((void*)cxx_less_than);
  144. store_vm_register(Bytecode::Register::accumulator(), RET);
  145. check_exception();
  146. }
  147. [[maybe_unused]] static Value cxx_increment(VM& vm, Value value)
  148. {
  149. auto old_value = TRY_OR_SET_EXCEPTION(value.to_numeric(vm));
  150. if (old_value.is_number())
  151. return Value(old_value.as_double() + 1);
  152. return BigInt::create(vm, old_value.as_bigint().big_integer().plus(Crypto::SignedBigInteger { 1 }));
  153. }
  154. void Compiler::compile_increment(Bytecode::Op::Increment const&)
  155. {
  156. load_vm_register(ARG1, Bytecode::Register::accumulator());
  157. m_assembler.native_call((void*)cxx_increment);
  158. store_vm_register(Bytecode::Register::accumulator(), RET);
  159. check_exception();
  160. }
  161. void Compiler::check_exception()
  162. {
  163. // if (exception.is_empty()) goto no_exception;
  164. load_vm_register(GPR0, Bytecode::Register::exception());
  165. m_assembler.mov(Assembler::Operand::Register(GPR1), Assembler::Operand::Imm64(Value().encoded()));
  166. auto no_exception = m_assembler.make_label();
  167. m_assembler.jump_if_equal(Assembler::Operand::Register(GPR0), Assembler::Operand::Register(GPR1), no_exception);
  168. // We have an exception!
  169. // if (!unwind_context.valid) return;
  170. auto handle_exception = m_assembler.make_label();
  171. m_assembler.mov(
  172. Assembler::Operand::Register(GPR0),
  173. Assembler::Operand::Mem64BaseAndOffset(UNWIND_CONTEXT_BASE, 0));
  174. m_assembler.jump_if_not_equal(
  175. Assembler::Operand::Register(GPR0),
  176. Assembler::Operand::Imm32(0),
  177. handle_exception);
  178. m_assembler.exit();
  179. // handle_exception:
  180. handle_exception.link(m_assembler);
  181. // if (unwind_context.handler) {
  182. // accumulator = exception;
  183. // exception = Value();
  184. // goto handler;
  185. // }
  186. auto no_handler = m_assembler.make_label();
  187. m_assembler.mov(
  188. Assembler::Operand::Register(GPR0),
  189. Assembler::Operand::Mem64BaseAndOffset(UNWIND_CONTEXT_BASE, 8));
  190. m_assembler.jump_if_equal(
  191. Assembler::Operand::Register(GPR0),
  192. Assembler::Operand::Imm32(0),
  193. no_handler);
  194. load_vm_register(GPR1, Bytecode::Register::exception());
  195. store_vm_register(Bytecode::Register::accumulator(), GPR1);
  196. m_assembler.mov(
  197. Assembler::Operand::Register(GPR1),
  198. Assembler::Operand::Imm64(Value().encoded()));
  199. store_vm_register(Bytecode::Register::exception(), GPR1);
  200. m_assembler.jump(Assembler::Operand::Register(GPR0));
  201. // no_handler:
  202. no_handler.link(m_assembler);
  203. // if (unwind_context.finalizer) goto finalizer;
  204. auto no_finalizer = m_assembler.make_label();
  205. m_assembler.mov(
  206. Assembler::Operand::Register(GPR0),
  207. Assembler::Operand::Mem64BaseAndOffset(UNWIND_CONTEXT_BASE, 16));
  208. m_assembler.jump_if_equal(
  209. Assembler::Operand::Register(GPR0),
  210. Assembler::Operand::Imm32(0),
  211. no_finalizer);
  212. m_assembler.jump(Assembler::Operand::Register(GPR0));
  213. // no_finalizer:
  214. // NOTE: No catch and no finally!? Crash.
  215. no_finalizer.link(m_assembler);
  216. m_assembler.verify_not_reached();
  217. // no_exception:
  218. no_exception.link(m_assembler);
  219. }
  220. void Compiler::push_unwind_context(bool valid, Optional<Bytecode::Label> const& handler, Optional<Bytecode::Label> const& finalizer)
  221. {
  222. // Put this on the stack, and then point UNWIND_CONTEXT_BASE at it.
  223. // struct {
  224. // u64 valid;
  225. // u64 handler;
  226. // u64 finalizer;
  227. // };
  228. // push finalizer (patched later)
  229. m_assembler.mov(
  230. Assembler::Operand::Register(GPR0),
  231. Assembler::Operand::Imm64(0));
  232. if (finalizer.has_value())
  233. const_cast<Bytecode::BasicBlock&>(finalizer.value().block()).absolute_references_to_here.append(m_assembler.m_output.size() - 8);
  234. m_assembler.push(Assembler::Operand::Register(GPR0));
  235. // push handler (patched later)
  236. m_assembler.mov(
  237. Assembler::Operand::Register(GPR0),
  238. Assembler::Operand::Imm64(0));
  239. if (handler.has_value())
  240. const_cast<Bytecode::BasicBlock&>(handler.value().block()).absolute_references_to_here.append(m_assembler.m_output.size() - 8);
  241. m_assembler.push(Assembler::Operand::Register(GPR0));
  242. // push valid
  243. m_assembler.push(Assembler::Operand::Imm32(valid));
  244. // UNWIND_CONTEXT_BASE = STACK_POINTER
  245. m_assembler.mov(
  246. Assembler::Operand::Register(UNWIND_CONTEXT_BASE),
  247. Assembler::Operand::Register(STACK_POINTER));
  248. // align stack pointer
  249. m_assembler.sub(Assembler::Operand::Register(STACK_POINTER), Assembler::Operand::Imm8(8));
  250. }
  251. void Compiler::pop_unwind_context()
  252. {
  253. m_assembler.add(Assembler::Operand::Register(STACK_POINTER), Assembler::Operand::Imm8(32));
  254. m_assembler.add(Assembler::Operand::Register(UNWIND_CONTEXT_BASE), Assembler::Operand::Imm8(32));
  255. }
  256. void Compiler::compile_enter_unwind_context(Bytecode::Op::EnterUnwindContext const& op)
  257. {
  258. push_unwind_context(true, op.handler_target(), op.finalizer_target());
  259. m_assembler.jump(const_cast<Bytecode::BasicBlock&>(op.entry_point().block()));
  260. }
  261. void Compiler::compile_leave_unwind_context(Bytecode::Op::LeaveUnwindContext const&)
  262. {
  263. pop_unwind_context();
  264. }
  265. OwnPtr<NativeExecutable> Compiler::compile(Bytecode::Executable const& bytecode_executable)
  266. {
  267. if (getenv("LIBJS_NO_JIT"))
  268. return nullptr;
  269. Compiler compiler;
  270. compiler.m_assembler.enter();
  271. compiler.m_assembler.mov(
  272. Assembler::Operand::Register(REGISTER_ARRAY_BASE),
  273. Assembler::Operand::Register(ARG1));
  274. compiler.m_assembler.mov(
  275. Assembler::Operand::Register(LOCALS_ARRAY_BASE),
  276. Assembler::Operand::Register(ARG2));
  277. compiler.push_unwind_context(false, {}, {});
  278. for (auto& block : bytecode_executable.basic_blocks) {
  279. block->offset = compiler.m_output.size();
  280. auto it = Bytecode::InstructionStreamIterator(block->instruction_stream());
  281. while (!it.at_end()) {
  282. auto const& op = *it;
  283. switch (op.type()) {
  284. case Bytecode::Instruction::Type::LoadImmediate:
  285. compiler.compile_load_immediate(static_cast<Bytecode::Op::LoadImmediate const&>(op));
  286. break;
  287. case Bytecode::Instruction::Type::Store:
  288. compiler.compile_store(static_cast<Bytecode::Op::Store const&>(op));
  289. break;
  290. case Bytecode::Instruction::Type::Load:
  291. compiler.compile_load(static_cast<Bytecode::Op::Load const&>(op));
  292. break;
  293. case Bytecode::Instruction::Type::GetLocal:
  294. compiler.compile_get_local(static_cast<Bytecode::Op::GetLocal const&>(op));
  295. break;
  296. case Bytecode::Instruction::Type::SetLocal:
  297. compiler.compile_set_local(static_cast<Bytecode::Op::SetLocal const&>(op));
  298. break;
  299. case Bytecode::Instruction::Type::Jump:
  300. compiler.compile_jump(static_cast<Bytecode::Op::Jump const&>(op));
  301. break;
  302. case Bytecode::Instruction::Type::JumpConditional:
  303. compiler.compile_jump_conditional(static_cast<Bytecode::Op::JumpConditional const&>(op));
  304. break;
  305. case Bytecode::Instruction::Type::LessThan:
  306. compiler.compile_less_than(static_cast<Bytecode::Op::LessThan const&>(op));
  307. break;
  308. case Bytecode::Instruction::Type::Increment:
  309. compiler.compile_increment(static_cast<Bytecode::Op::Increment const&>(op));
  310. break;
  311. case Bytecode::Instruction::Type::EnterUnwindContext:
  312. compiler.compile_enter_unwind_context(static_cast<Bytecode::Op::EnterUnwindContext const&>(op));
  313. break;
  314. case Bytecode::Instruction::Type::LeaveUnwindContext:
  315. compiler.compile_leave_unwind_context(static_cast<Bytecode::Op::LeaveUnwindContext const&>(op));
  316. break;
  317. default:
  318. dbgln("JIT compilation failed: {}", bytecode_executable.name);
  319. dbgln("Unsupported bytecode op: {}", op.to_deprecated_string(bytecode_executable));
  320. return nullptr;
  321. }
  322. ++it;
  323. }
  324. if (!block->is_terminated())
  325. compiler.m_assembler.exit();
  326. }
  327. auto* executable_memory = mmap(nullptr, compiler.m_output.size(), PROT_READ | PROT_WRITE, MAP_ANONYMOUS | MAP_PRIVATE, 0, 0);
  328. if (executable_memory == MAP_FAILED) {
  329. perror("mmap");
  330. return nullptr;
  331. }
  332. for (auto& block : bytecode_executable.basic_blocks) {
  333. // Patch up all the jumps
  334. for (auto& jump : block->jumps_to_here) {
  335. auto offset = block->offset - jump - 4;
  336. compiler.m_output[jump + 0] = (offset >> 0) & 0xff;
  337. compiler.m_output[jump + 1] = (offset >> 8) & 0xff;
  338. compiler.m_output[jump + 2] = (offset >> 16) & 0xff;
  339. compiler.m_output[jump + 3] = (offset >> 24) & 0xff;
  340. }
  341. // Patch up all the absolute references
  342. for (auto& absolute_reference : block->absolute_references_to_here) {
  343. auto offset = bit_cast<u64>(executable_memory) + block->offset;
  344. compiler.m_output[absolute_reference + 0] = (offset >> 0) & 0xff;
  345. compiler.m_output[absolute_reference + 1] = (offset >> 8) & 0xff;
  346. compiler.m_output[absolute_reference + 2] = (offset >> 16) & 0xff;
  347. compiler.m_output[absolute_reference + 3] = (offset >> 24) & 0xff;
  348. compiler.m_output[absolute_reference + 4] = (offset >> 32) & 0xff;
  349. compiler.m_output[absolute_reference + 5] = (offset >> 40) & 0xff;
  350. compiler.m_output[absolute_reference + 6] = (offset >> 48) & 0xff;
  351. compiler.m_output[absolute_reference + 7] = (offset >> 56) & 0xff;
  352. }
  353. }
  354. write(STDOUT_FILENO, compiler.m_output.data(), compiler.m_output.size());
  355. memcpy(executable_memory, compiler.m_output.data(), compiler.m_output.size());
  356. mprotect(executable_memory, compiler.m_output.size(), PROT_READ | PROT_EXEC);
  357. return make<NativeExecutable>(executable_memory, compiler.m_output.size());
  358. }
  359. }