Compiler.cpp 50 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397
  1. /*
  2. * Copyright (c) 2023, Andreas Kling <kling@serenityos.org>
  3. *
  4. * SPDX-License-Identifier: BSD-2-Clause
  5. */
  6. #include <AK/OwnPtr.h>
  7. #include <AK/Platform.h>
  8. #include <LibJS/Bytecode/CommonImplementations.h>
  9. #include <LibJS/Bytecode/Instruction.h>
  10. #include <LibJS/Bytecode/Interpreter.h>
  11. #include <LibJS/Bytecode/RegexTable.h>
  12. #include <LibJS/JIT/Compiler.h>
  13. #include <LibJS/Runtime/AbstractOperations.h>
  14. #include <LibJS/Runtime/Array.h>
  15. #include <LibJS/Runtime/DeclarativeEnvironment.h>
  16. #include <LibJS/Runtime/ECMAScriptFunctionObject.h>
  17. #include <LibJS/Runtime/VM.h>
  18. #include <LibJS/Runtime/ValueInlines.h>
  19. #include <sys/mman.h>
  20. #include <unistd.h>
  21. #if ARCH(X86_64)
  22. # define LOG_JIT_SUCCESS 1
  23. # define LOG_JIT_FAILURE 1
  24. # define DUMP_JIT_MACHINE_CODE_TO_STDOUT 0
  25. # define DUMP_JIT_DISASSEMBLY 0
  26. # define TRY_OR_SET_EXCEPTION(expression) \
  27. ({ \
  28. /* Ignore -Wshadow to allow nesting the macro. */ \
  29. AK_IGNORE_DIAGNOSTIC("-Wshadow", \
  30. auto&& _temporary_result = (expression)); \
  31. static_assert(!::AK::Detail::IsLvalueReference<decltype(_temporary_result.release_value())>, \
  32. "Do not return a reference from a fallible expression"); \
  33. if (_temporary_result.is_error()) [[unlikely]] { \
  34. vm.bytecode_interpreter().reg(Bytecode::Register::exception()) = _temporary_result.release_error().value().value(); \
  35. return {}; \
  36. } \
  37. _temporary_result.release_value(); \
  38. })
  39. namespace JS::JIT {
  40. void Compiler::store_vm_register(Bytecode::Register dst, Assembler::Reg src)
  41. {
  42. m_assembler.mov(
  43. Assembler::Operand::Mem64BaseAndOffset(REGISTER_ARRAY_BASE, dst.index() * sizeof(Value)),
  44. Assembler::Operand::Register(src));
  45. }
  46. void Compiler::load_vm_register(Assembler::Reg dst, Bytecode::Register src)
  47. {
  48. m_assembler.mov(
  49. Assembler::Operand::Register(dst),
  50. Assembler::Operand::Mem64BaseAndOffset(REGISTER_ARRAY_BASE, src.index() * sizeof(Value)));
  51. }
  52. void Compiler::store_vm_local(size_t dst, Assembler::Reg src)
  53. {
  54. m_assembler.mov(
  55. Assembler::Operand::Mem64BaseAndOffset(LOCALS_ARRAY_BASE, dst * sizeof(Value)),
  56. Assembler::Operand::Register(src));
  57. }
  58. void Compiler::load_vm_local(Assembler::Reg dst, size_t src)
  59. {
  60. m_assembler.mov(
  61. Assembler::Operand::Register(dst),
  62. Assembler::Operand::Mem64BaseAndOffset(LOCALS_ARRAY_BASE, src * sizeof(Value)));
  63. }
  64. void Compiler::compile_load_immediate(Bytecode::Op::LoadImmediate const& op)
  65. {
  66. m_assembler.mov(
  67. Assembler::Operand::Register(GPR0),
  68. Assembler::Operand::Imm(op.value().encoded()));
  69. store_vm_register(Bytecode::Register::accumulator(), GPR0);
  70. }
  71. void Compiler::compile_load(Bytecode::Op::Load const& op)
  72. {
  73. load_vm_register(GPR0, op.src());
  74. store_vm_register(Bytecode::Register::accumulator(), GPR0);
  75. }
  76. void Compiler::compile_store(Bytecode::Op::Store const& op)
  77. {
  78. load_vm_register(GPR0, Bytecode::Register::accumulator());
  79. store_vm_register(op.dst(), GPR0);
  80. }
  81. void Compiler::compile_get_local(Bytecode::Op::GetLocal const& op)
  82. {
  83. load_vm_local(GPR0, op.index());
  84. store_vm_register(Bytecode::Register::accumulator(), GPR0);
  85. }
  86. void Compiler::compile_set_local(Bytecode::Op::SetLocal const& op)
  87. {
  88. load_vm_register(GPR0, Bytecode::Register::accumulator());
  89. store_vm_local(op.index(), GPR0);
  90. }
  91. static Value cxx_typeof_local(VM& vm, Value value)
  92. {
  93. return PrimitiveString::create(vm, value.typeof());
  94. }
  95. void Compiler::compile_typeof_local(Bytecode::Op::TypeofLocal const& op)
  96. {
  97. load_vm_local(ARG1, op.index());
  98. native_call((void*)cxx_typeof_local);
  99. store_vm_register(Bytecode::Register::accumulator(), GPR0);
  100. }
  101. void Compiler::compile_jump(Bytecode::Op::Jump const& op)
  102. {
  103. m_assembler.jump(label_for(op.true_target()->block()));
  104. }
  105. static bool cxx_to_boolean(VM&, Value value)
  106. {
  107. return value.to_boolean();
  108. }
  109. void Compiler::compile_to_boolean(Assembler::Reg dst, Assembler::Reg src)
  110. {
  111. // dst = src;
  112. m_assembler.mov(
  113. Assembler::Operand::Register(dst),
  114. Assembler::Operand::Register(src));
  115. // dst >>= 48;
  116. m_assembler.shift_right(
  117. Assembler::Operand::Register(dst),
  118. Assembler::Operand::Imm(48));
  119. // if (dst != BOOLEAN_TAG) goto slow_case;
  120. Assembler::Label slow_case {};
  121. m_assembler.jump_if(
  122. Assembler::Operand::Register(dst),
  123. Assembler::Condition::NotEqualTo,
  124. Assembler::Operand::Imm(BOOLEAN_TAG),
  125. slow_case);
  126. // Fast path for JS::Value booleans.
  127. // dst = src;
  128. m_assembler.mov(
  129. Assembler::Operand::Register(dst),
  130. Assembler::Operand::Register(src));
  131. // goto end;
  132. auto end = m_assembler.jump();
  133. // slow_case: // call C++ helper
  134. slow_case.link(m_assembler);
  135. m_assembler.mov(
  136. Assembler::Operand::Register(ARG1),
  137. Assembler::Operand::Register(src));
  138. native_call((void*)cxx_to_boolean);
  139. m_assembler.mov(
  140. Assembler::Operand::Register(dst),
  141. Assembler::Operand::Register(RET));
  142. // end:
  143. end.link(m_assembler);
  144. // dst &= 1;
  145. m_assembler.bitwise_and(
  146. Assembler::Operand::Register(dst),
  147. Assembler::Operand::Imm(1));
  148. }
  149. void Compiler::compile_jump_conditional(Bytecode::Op::JumpConditional const& op)
  150. {
  151. load_vm_register(GPR1, Bytecode::Register::accumulator());
  152. compile_to_boolean(GPR0, GPR1);
  153. m_assembler.jump_if(
  154. Assembler::Operand::Register(GPR0),
  155. Assembler::Condition::EqualTo,
  156. Assembler::Operand::Imm(0),
  157. label_for(op.false_target()->block()));
  158. m_assembler.jump(label_for(op.true_target()->block()));
  159. }
  160. void Compiler::compile_jump_nullish(Bytecode::Op::JumpNullish const& op)
  161. {
  162. load_vm_register(GPR0, Bytecode::Register::accumulator());
  163. m_assembler.shift_right(
  164. Assembler::Operand::Register(GPR0),
  165. Assembler::Operand::Imm(48));
  166. m_assembler.bitwise_and(
  167. Assembler::Operand::Register(GPR0),
  168. Assembler::Operand::Imm(IS_NULLISH_EXTRACT_PATTERN));
  169. m_assembler.jump_if(
  170. Assembler::Operand::Register(GPR0),
  171. Assembler::Condition::EqualTo,
  172. Assembler::Operand::Imm(IS_NULLISH_PATTERN),
  173. label_for(op.true_target()->block()));
  174. m_assembler.jump(label_for(op.false_target()->block()));
  175. }
  176. [[maybe_unused]] static Value cxx_increment(VM& vm, Value value)
  177. {
  178. auto old_value = TRY_OR_SET_EXCEPTION(value.to_numeric(vm));
  179. if (old_value.is_number())
  180. return Value(old_value.as_double() + 1);
  181. return BigInt::create(vm, old_value.as_bigint().big_integer().plus(Crypto::SignedBigInteger { 1 }));
  182. }
  183. template<typename Codegen>
  184. void Compiler::branch_if_int32(Assembler::Reg reg, Codegen codegen)
  185. {
  186. // GPR0 = reg >> 48;
  187. m_assembler.mov(Assembler::Operand::Register(GPR0), Assembler::Operand::Register(reg));
  188. m_assembler.shift_right(Assembler::Operand::Register(GPR0), Assembler::Operand::Imm(48));
  189. Assembler::Label not_int32_case {};
  190. m_assembler.jump_if(
  191. Assembler::Operand::Register(GPR0),
  192. Assembler::Condition::NotEqualTo,
  193. Assembler::Operand::Imm(INT32_TAG),
  194. not_int32_case);
  195. codegen();
  196. not_int32_case.link(m_assembler);
  197. }
  198. template<typename Codegen>
  199. void Compiler::branch_if_both_int32(Assembler::Reg lhs, Assembler::Reg rhs, Codegen codegen)
  200. {
  201. // GPR0 = lhs >> 48;
  202. m_assembler.mov(Assembler::Operand::Register(GPR0), Assembler::Operand::Register(lhs));
  203. m_assembler.shift_right(Assembler::Operand::Register(GPR0), Assembler::Operand::Imm(48));
  204. // GPR1 = rhs >> 48;
  205. m_assembler.mov(Assembler::Operand::Register(GPR1), Assembler::Operand::Register(rhs));
  206. m_assembler.shift_right(Assembler::Operand::Register(GPR1), Assembler::Operand::Imm(48));
  207. Assembler::Label not_int32_case {};
  208. m_assembler.jump_if(
  209. Assembler::Operand::Register(GPR0),
  210. Assembler::Condition::NotEqualTo,
  211. Assembler::Operand::Imm(INT32_TAG),
  212. not_int32_case);
  213. m_assembler.jump_if(
  214. Assembler::Operand::Register(GPR1),
  215. Assembler::Condition::NotEqualTo,
  216. Assembler::Operand::Imm(INT32_TAG),
  217. not_int32_case);
  218. codegen();
  219. not_int32_case.link(m_assembler);
  220. }
  221. void Compiler::compile_increment(Bytecode::Op::Increment const&)
  222. {
  223. load_vm_register(ARG1, Bytecode::Register::accumulator());
  224. Assembler::Label end {};
  225. Assembler::Label slow_case {};
  226. branch_if_int32(ARG1, [&] {
  227. // GPR0 = ARG1 & 0xffffffff;
  228. m_assembler.mov(
  229. Assembler::Operand::Register(GPR0),
  230. Assembler::Operand::Register(ARG1));
  231. m_assembler.mov(
  232. Assembler::Operand::Register(GPR1),
  233. Assembler::Operand::Imm(0xffffffff));
  234. m_assembler.bitwise_and(
  235. Assembler::Operand::Register(GPR0),
  236. Assembler::Operand::Register(GPR1));
  237. // if (GPR0 == 0x7fffffff) goto slow_case;
  238. m_assembler.jump_if(
  239. Assembler::Operand::Register(GPR0),
  240. Assembler::Condition::EqualTo,
  241. Assembler::Operand::Imm(0x7fffffff),
  242. slow_case);
  243. // ARG1 += 1;
  244. m_assembler.add(
  245. Assembler::Operand::Register(ARG1),
  246. Assembler::Operand::Imm(1));
  247. // accumulator = ARG1;
  248. store_vm_register(Bytecode::Register::accumulator(), ARG1);
  249. m_assembler.jump(end);
  250. });
  251. slow_case.link(m_assembler);
  252. native_call((void*)cxx_increment);
  253. store_vm_register(Bytecode::Register::accumulator(), RET);
  254. check_exception();
  255. end.link(m_assembler);
  256. }
  257. static Value cxx_decrement(VM& vm, Value value)
  258. {
  259. auto old_value = TRY_OR_SET_EXCEPTION(value.to_numeric(vm));
  260. if (old_value.is_number())
  261. return Value(old_value.as_double() - 1);
  262. return BigInt::create(vm, old_value.as_bigint().big_integer().minus(Crypto::SignedBigInteger { 1 }));
  263. }
  264. void Compiler::compile_decrement(Bytecode::Op::Decrement const&)
  265. {
  266. load_vm_register(ARG1, Bytecode::Register::accumulator());
  267. native_call((void*)cxx_decrement);
  268. store_vm_register(Bytecode::Register::accumulator(), RET);
  269. check_exception();
  270. }
  271. void Compiler::check_exception()
  272. {
  273. // if (!exception.is_empty()) goto m_exception_handler;
  274. load_vm_register(GPR0, Bytecode::Register::exception());
  275. m_assembler.mov(Assembler::Operand::Register(GPR1), Assembler::Operand::Imm(Value().encoded()));
  276. m_assembler.jump_if(
  277. Assembler::Operand::Register(GPR0),
  278. Assembler::Condition::NotEqualTo,
  279. Assembler::Operand::Register(GPR1),
  280. m_exception_handler);
  281. }
  282. void Compiler::handle_exception()
  283. {
  284. // if (!unwind_context.valid) return;
  285. Assembler::Label handle_exception {};
  286. m_assembler.mov(
  287. Assembler::Operand::Register(GPR0),
  288. Assembler::Operand::Mem64BaseAndOffset(UNWIND_CONTEXT_BASE, 0));
  289. m_assembler.jump_if(
  290. Assembler::Operand::Register(GPR0),
  291. Assembler::Condition::NotEqualTo,
  292. Assembler::Operand::Imm(0),
  293. handle_exception);
  294. jump_to_exit();
  295. // handle_exception:
  296. handle_exception.link(m_assembler);
  297. // if (unwind_context.handler) {
  298. Assembler::Label no_handler {};
  299. m_assembler.mov(
  300. Assembler::Operand::Register(GPR0),
  301. Assembler::Operand::Mem64BaseAndOffset(UNWIND_CONTEXT_BASE, 8));
  302. m_assembler.jump_if(
  303. Assembler::Operand::Register(GPR0),
  304. Assembler::Condition::EqualTo,
  305. Assembler::Operand::Imm(0),
  306. no_handler);
  307. // accumulator = exception;
  308. load_vm_register(GPR1, Bytecode::Register::exception());
  309. store_vm_register(Bytecode::Register::accumulator(), GPR1);
  310. // exception = Value();
  311. m_assembler.mov(
  312. Assembler::Operand::Register(GPR1),
  313. Assembler::Operand::Imm(Value().encoded()));
  314. store_vm_register(Bytecode::Register::exception(), GPR1);
  315. // unwind_context.handler = nullptr;
  316. m_assembler.mov(
  317. Assembler::Operand::Register(GPR1),
  318. Assembler::Operand::Imm(0));
  319. m_assembler.mov(
  320. Assembler::Operand::Mem64BaseAndOffset(UNWIND_CONTEXT_BASE, 8),
  321. Assembler::Operand::Register(GPR1));
  322. // goto handler;
  323. m_assembler.jump(Assembler::Operand::Register(GPR0));
  324. // }
  325. // no_handler:
  326. no_handler.link(m_assembler);
  327. // if (unwind_context.finalizer) goto finalizer;
  328. Assembler::Label no_finalizer {};
  329. m_assembler.mov(
  330. Assembler::Operand::Register(GPR0),
  331. Assembler::Operand::Mem64BaseAndOffset(UNWIND_CONTEXT_BASE, 16));
  332. m_assembler.jump_if(
  333. Assembler::Operand::Register(GPR0),
  334. Assembler::Condition::EqualTo,
  335. Assembler::Operand::Imm(0),
  336. no_finalizer);
  337. m_assembler.jump(Assembler::Operand::Register(GPR0));
  338. // no_finalizer:
  339. // NOTE: No catch and no finally!? Crash.
  340. no_finalizer.link(m_assembler);
  341. m_assembler.verify_not_reached();
  342. }
  343. void Compiler::push_unwind_context(bool valid, Optional<Bytecode::Label> const& handler, Optional<Bytecode::Label> const& finalizer)
  344. {
  345. // Put this on the stack, and then point UNWIND_CONTEXT_BASE at it.
  346. // struct {
  347. // u64 valid;
  348. // u64 handler;
  349. // u64 finalizer;
  350. // };
  351. if (finalizer.has_value()) {
  352. // push finalizer (patched later)
  353. m_assembler.mov(
  354. Assembler::Operand::Register(GPR0),
  355. Assembler::Operand::Imm(0),
  356. Assembler::Patchable::Yes);
  357. block_data_for(finalizer.value().block()).absolute_references_to_here.append(m_assembler.m_output.size() - 8);
  358. m_assembler.push(Assembler::Operand::Register(GPR0));
  359. } else {
  360. m_assembler.push(Assembler::Operand::Imm(0));
  361. }
  362. if (handler.has_value()) {
  363. // push handler (patched later)
  364. m_assembler.mov(
  365. Assembler::Operand::Register(GPR0),
  366. Assembler::Operand::Imm(0),
  367. Assembler::Patchable::Yes);
  368. block_data_for(handler.value().block()).absolute_references_to_here.append(m_assembler.m_output.size() - 8);
  369. m_assembler.push(Assembler::Operand::Register(GPR0));
  370. } else {
  371. m_assembler.push(Assembler::Operand::Imm(0));
  372. }
  373. // push valid
  374. m_assembler.push(Assembler::Operand::Imm(valid));
  375. // UNWIND_CONTEXT_BASE = STACK_POINTER
  376. m_assembler.mov(
  377. Assembler::Operand::Register(UNWIND_CONTEXT_BASE),
  378. Assembler::Operand::Register(STACK_POINTER));
  379. // align stack pointer
  380. m_assembler.sub(Assembler::Operand::Register(STACK_POINTER), Assembler::Operand::Imm(8));
  381. }
  382. void Compiler::pop_unwind_context()
  383. {
  384. m_assembler.add(Assembler::Operand::Register(STACK_POINTER), Assembler::Operand::Imm(32));
  385. m_assembler.add(Assembler::Operand::Register(UNWIND_CONTEXT_BASE), Assembler::Operand::Imm(32));
  386. }
  387. void Compiler::compile_enter_unwind_context(Bytecode::Op::EnterUnwindContext const& op)
  388. {
  389. push_unwind_context(true, op.handler_target(), op.finalizer_target());
  390. m_assembler.jump(label_for(op.entry_point().block()));
  391. }
  392. void Compiler::compile_leave_unwind_context(Bytecode::Op::LeaveUnwindContext const&)
  393. {
  394. pop_unwind_context();
  395. }
  396. void Compiler::compile_throw(Bytecode::Op::Throw const&)
  397. {
  398. load_vm_register(GPR0, Bytecode::Register::accumulator());
  399. store_vm_register(Bytecode::Register::exception(), GPR0);
  400. check_exception();
  401. }
  402. static ThrowCompletionOr<Value> abstract_inequals(VM& vm, Value src1, Value src2)
  403. {
  404. return Value(!TRY(is_loosely_equal(vm, src1, src2)));
  405. }
  406. static ThrowCompletionOr<Value> abstract_equals(VM& vm, Value src1, Value src2)
  407. {
  408. return Value(TRY(is_loosely_equal(vm, src1, src2)));
  409. }
  410. static ThrowCompletionOr<Value> typed_inequals(VM&, Value src1, Value src2)
  411. {
  412. return Value(!is_strictly_equal(src1, src2));
  413. }
  414. static ThrowCompletionOr<Value> typed_equals(VM&, Value src1, Value src2)
  415. {
  416. return Value(is_strictly_equal(src1, src2));
  417. }
  418. # define DO_COMPILE_COMMON_BINARY_OP(TitleCaseName, snake_case_name) \
  419. static Value cxx_##snake_case_name(VM& vm, Value lhs, Value rhs) \
  420. { \
  421. return TRY_OR_SET_EXCEPTION(snake_case_name(vm, lhs, rhs)); \
  422. } \
  423. \
  424. void Compiler::compile_##snake_case_name(Bytecode::Op::TitleCaseName const& op) \
  425. { \
  426. load_vm_register(ARG1, op.lhs()); \
  427. load_vm_register(ARG2, Bytecode::Register::accumulator()); \
  428. native_call((void*)cxx_##snake_case_name); \
  429. store_vm_register(Bytecode::Register::accumulator(), RET); \
  430. check_exception(); \
  431. }
  432. JS_ENUMERATE_COMMON_BINARY_OPS_WITHOUT_FAST_PATH(DO_COMPILE_COMMON_BINARY_OP)
  433. # undef DO_COMPILE_COMMON_BINARY_OP
  434. static Value cxx_add(VM& vm, Value lhs, Value rhs)
  435. {
  436. return TRY_OR_SET_EXCEPTION(add(vm, lhs, rhs));
  437. }
  438. void Compiler::compile_add(Bytecode::Op::Add const& op)
  439. {
  440. load_vm_register(ARG1, op.lhs());
  441. load_vm_register(ARG2, Bytecode::Register::accumulator());
  442. Assembler::Label end {};
  443. Assembler::Label slow_case {};
  444. branch_if_both_int32(ARG1, ARG2, [&] {
  445. // GPR0 = ARG1 + ARG2 (32-bit)
  446. // if (overflow) goto slow_case;
  447. m_assembler.mov(
  448. Assembler::Operand::Register(GPR0),
  449. Assembler::Operand::Register(ARG1));
  450. m_assembler.add32(
  451. Assembler::Operand::Register(GPR0),
  452. Assembler::Operand::Register(ARG2),
  453. slow_case);
  454. // accumulator = GPR0 | SHIFTED_INT32_TAG;
  455. m_assembler.mov(
  456. Assembler::Operand::Register(GPR1),
  457. Assembler::Operand::Imm(SHIFTED_INT32_TAG));
  458. m_assembler.bitwise_or(
  459. Assembler::Operand::Register(GPR0),
  460. Assembler::Operand::Register(GPR1));
  461. store_vm_register(Bytecode::Register::accumulator(), GPR0);
  462. m_assembler.jump(end);
  463. });
  464. slow_case.link(m_assembler);
  465. native_call((void*)cxx_add);
  466. store_vm_register(Bytecode::Register::accumulator(), RET);
  467. check_exception();
  468. end.link(m_assembler);
  469. }
  470. static Value cxx_less_than(VM& vm, Value lhs, Value rhs)
  471. {
  472. return TRY_OR_SET_EXCEPTION(less_than(vm, lhs, rhs));
  473. }
  474. void Compiler::compile_less_than(Bytecode::Op::LessThan const& op)
  475. {
  476. load_vm_register(ARG1, op.lhs());
  477. load_vm_register(ARG2, Bytecode::Register::accumulator());
  478. Assembler::Label end {};
  479. branch_if_both_int32(ARG1, ARG2, [&] {
  480. // if (ARG1 < ARG2) return true;
  481. // else return false;
  482. Assembler::Label true_case {};
  483. m_assembler.sign_extend_32_to_64_bits(ARG1);
  484. m_assembler.sign_extend_32_to_64_bits(ARG2);
  485. m_assembler.jump_if(
  486. Assembler::Operand::Register(ARG1),
  487. Assembler::Condition::SignedLessThan,
  488. Assembler::Operand::Register(ARG2),
  489. true_case);
  490. m_assembler.mov(
  491. Assembler::Operand::Register(GPR0),
  492. Assembler::Operand::Imm(Value(false).encoded()));
  493. store_vm_register(Bytecode::Register::accumulator(), GPR0);
  494. m_assembler.jump(end);
  495. true_case.link(m_assembler);
  496. m_assembler.mov(
  497. Assembler::Operand::Register(GPR0),
  498. Assembler::Operand::Imm(Value(true).encoded()));
  499. store_vm_register(Bytecode::Register::accumulator(), GPR0);
  500. m_assembler.jump(end);
  501. });
  502. native_call((void*)cxx_less_than);
  503. store_vm_register(Bytecode::Register::accumulator(), RET);
  504. check_exception();
  505. end.link(m_assembler);
  506. }
  507. static ThrowCompletionOr<Value> not_(VM&, Value value)
  508. {
  509. return Value(!value.to_boolean());
  510. }
  511. static ThrowCompletionOr<Value> typeof_(VM& vm, Value value)
  512. {
  513. return PrimitiveString::create(vm, value.typeof());
  514. }
  515. # define DO_COMPILE_COMMON_UNARY_OP(TitleCaseName, snake_case_name) \
  516. static Value cxx_##snake_case_name(VM& vm, Value value) \
  517. { \
  518. return TRY_OR_SET_EXCEPTION(snake_case_name(vm, value)); \
  519. } \
  520. \
  521. void Compiler::compile_##snake_case_name(Bytecode::Op::TitleCaseName const&) \
  522. { \
  523. load_vm_register(ARG1, Bytecode::Register::accumulator()); \
  524. native_call((void*)cxx_##snake_case_name); \
  525. store_vm_register(Bytecode::Register::accumulator(), RET); \
  526. check_exception(); \
  527. }
  528. JS_ENUMERATE_COMMON_UNARY_OPS(DO_COMPILE_COMMON_UNARY_OP)
  529. # undef DO_COMPILE_COMMON_UNARY_OP
  530. void Compiler::compile_return(Bytecode::Op::Return const&)
  531. {
  532. load_vm_register(GPR0, Bytecode::Register::accumulator());
  533. // check for finalizer
  534. // if (!unwind_context.valid) goto normal_return;
  535. Assembler::Label normal_return {};
  536. m_assembler.mov(
  537. Assembler::Operand::Register(GPR1),
  538. Assembler::Operand::Mem64BaseAndOffset(UNWIND_CONTEXT_BASE, 0));
  539. m_assembler.jump_if(
  540. Assembler::Operand::Register(GPR1),
  541. Assembler::Condition::EqualTo,
  542. Assembler::Operand::Imm(0),
  543. normal_return);
  544. // if (!unwind_context.finalizer) goto normal_return;
  545. m_assembler.mov(
  546. Assembler::Operand::Register(GPR1),
  547. Assembler::Operand::Mem64BaseAndOffset(UNWIND_CONTEXT_BASE, 16));
  548. m_assembler.jump_if(
  549. Assembler::Operand::Register(GPR1),
  550. Assembler::Condition::EqualTo,
  551. Assembler::Operand::Imm(0),
  552. normal_return);
  553. store_vm_register(Bytecode::Register::saved_return_value(), GPR0);
  554. m_assembler.jump(Assembler::Operand::Register(GPR1));
  555. // normal_return:
  556. normal_return.link(m_assembler);
  557. store_vm_register(Bytecode::Register::return_value(), GPR0);
  558. jump_to_exit();
  559. }
  560. static Value cxx_new_string(VM& vm, DeprecatedString const& string)
  561. {
  562. return PrimitiveString::create(vm, string);
  563. }
  564. void Compiler::compile_new_string(Bytecode::Op::NewString const& op)
  565. {
  566. auto const& string = m_bytecode_executable.string_table->get(op.index());
  567. m_assembler.mov(
  568. Assembler::Operand::Register(ARG1),
  569. Assembler::Operand::Imm(bit_cast<u64>(&string)));
  570. native_call((void*)cxx_new_string);
  571. store_vm_register(Bytecode::Register::accumulator(), RET);
  572. }
  573. void Compiler::compile_new_regexp(Bytecode::Op::NewRegExp const& op)
  574. {
  575. auto const& parsed_regex = m_bytecode_executable.regex_table->get(op.regex_index());
  576. auto const& pattern = m_bytecode_executable.string_table->get(op.source_index());
  577. auto const& flags = m_bytecode_executable.string_table->get(op.flags_index());
  578. m_assembler.mov(
  579. Assembler::Operand::Register(ARG1),
  580. Assembler::Operand::Imm(bit_cast<u64>(&parsed_regex)));
  581. m_assembler.mov(
  582. Assembler::Operand::Register(ARG2),
  583. Assembler::Operand::Imm(bit_cast<u64>(&pattern)));
  584. m_assembler.mov(
  585. Assembler::Operand::Register(ARG3),
  586. Assembler::Operand::Imm(bit_cast<u64>(&flags)));
  587. native_call((void*)Bytecode::new_regexp);
  588. store_vm_register(Bytecode::Register::accumulator(), RET);
  589. }
  590. static Value cxx_new_bigint(VM& vm, Crypto::SignedBigInteger const& bigint)
  591. {
  592. return BigInt::create(vm, bigint);
  593. }
  594. void Compiler::compile_new_bigint(Bytecode::Op::NewBigInt const& op)
  595. {
  596. m_assembler.mov(
  597. Assembler::Operand::Register(ARG1),
  598. Assembler::Operand::Imm(bit_cast<u64>(&op.bigint())));
  599. native_call((void*)cxx_new_bigint);
  600. store_vm_register(Bytecode::Register::accumulator(), RET);
  601. }
  602. static Value cxx_new_object(VM& vm)
  603. {
  604. auto& realm = *vm.current_realm();
  605. return Object::create(realm, realm.intrinsics().object_prototype());
  606. }
  607. void Compiler::compile_new_object(Bytecode::Op::NewObject const&)
  608. {
  609. native_call((void*)cxx_new_object);
  610. store_vm_register(Bytecode::Register::accumulator(), RET);
  611. }
  612. static Value cxx_new_array(VM& vm, size_t element_count, u32 first_register_index)
  613. {
  614. auto& realm = *vm.current_realm();
  615. auto array = MUST(Array::create(realm, 0));
  616. for (size_t i = 0; i < element_count; ++i) {
  617. auto& value = vm.bytecode_interpreter().reg(Bytecode::Register(first_register_index + i));
  618. array->indexed_properties().put(i, value, default_attributes);
  619. }
  620. return array;
  621. }
  622. void Compiler::compile_new_array(Bytecode::Op::NewArray const& op)
  623. {
  624. m_assembler.mov(
  625. Assembler::Operand::Register(ARG1),
  626. Assembler::Operand::Imm(op.element_count()));
  627. m_assembler.mov(
  628. Assembler::Operand::Register(ARG2),
  629. Assembler::Operand::Imm(op.element_count() ? op.start().index() : 0));
  630. native_call((void*)cxx_new_array);
  631. store_vm_register(Bytecode::Register::accumulator(), RET);
  632. }
  633. void Compiler::compile_new_function(Bytecode::Op::NewFunction const& op)
  634. {
  635. m_assembler.mov(
  636. Assembler::Operand::Register(ARG1),
  637. Assembler::Operand::Imm(bit_cast<u64>(&op.function_node())));
  638. m_assembler.mov(
  639. Assembler::Operand::Register(ARG2),
  640. Assembler::Operand::Imm(bit_cast<u64>(&op.lhs_name())));
  641. m_assembler.mov(
  642. Assembler::Operand::Register(ARG3),
  643. Assembler::Operand::Imm(bit_cast<u64>(&op.home_object())));
  644. native_call((void*)Bytecode::new_function);
  645. store_vm_register(Bytecode::Register::accumulator(), RET);
  646. }
  647. static Value cxx_new_class(VM& vm, ClassExpression const& class_expression, Optional<Bytecode::IdentifierTableIndex> const& lhs_name)
  648. {
  649. return TRY_OR_SET_EXCEPTION(Bytecode::new_class(vm, class_expression, lhs_name));
  650. }
  651. void Compiler::compile_new_class(Bytecode::Op::NewClass const& op)
  652. {
  653. m_assembler.mov(
  654. Assembler::Operand::Register(ARG1),
  655. Assembler::Operand::Imm(bit_cast<u64>(&op.class_expression())));
  656. m_assembler.mov(
  657. Assembler::Operand::Register(ARG2),
  658. Assembler::Operand::Imm(bit_cast<u64>(&op.lhs_name())));
  659. native_call((void*)cxx_new_class);
  660. store_vm_register(Bytecode::Register::accumulator(), RET);
  661. }
  662. static Value cxx_get_by_id(VM& vm, Value base, Bytecode::IdentifierTableIndex property, u32 cache_index)
  663. {
  664. return TRY_OR_SET_EXCEPTION(Bytecode::get_by_id(vm.bytecode_interpreter(), property, base, base, cache_index));
  665. }
  666. void Compiler::compile_get_by_id(Bytecode::Op::GetById const& op)
  667. {
  668. load_vm_register(ARG1, Bytecode::Register::accumulator());
  669. m_assembler.mov(
  670. Assembler::Operand::Register(ARG2),
  671. Assembler::Operand::Imm(op.property().value()));
  672. m_assembler.mov(
  673. Assembler::Operand::Register(ARG3),
  674. Assembler::Operand::Imm(op.cache_index()));
  675. native_call((void*)cxx_get_by_id);
  676. store_vm_register(Bytecode::Register::accumulator(), RET);
  677. check_exception();
  678. }
  679. static Value cxx_get_by_value(VM& vm, Value base, Value property)
  680. {
  681. return TRY_OR_SET_EXCEPTION(Bytecode::get_by_value(vm.bytecode_interpreter(), base, property));
  682. }
  683. void Compiler::compile_get_by_value(Bytecode::Op::GetByValue const& op)
  684. {
  685. load_vm_register(ARG1, op.base());
  686. load_vm_register(ARG2, Bytecode::Register::accumulator());
  687. native_call((void*)cxx_get_by_value);
  688. store_vm_register(Bytecode::Register::accumulator(), RET);
  689. check_exception();
  690. }
  691. static Value cxx_get_global(VM& vm, Bytecode::IdentifierTableIndex identifier, u32 cache_index)
  692. {
  693. return TRY_OR_SET_EXCEPTION(Bytecode::get_global(vm.bytecode_interpreter(), identifier, cache_index));
  694. }
  695. void Compiler::compile_get_global(Bytecode::Op::GetGlobal const& op)
  696. {
  697. m_assembler.mov(
  698. Assembler::Operand::Register(ARG1),
  699. Assembler::Operand::Imm(op.identifier().value()));
  700. m_assembler.mov(
  701. Assembler::Operand::Register(ARG2),
  702. Assembler::Operand::Imm(op.cache_index()));
  703. native_call((void*)cxx_get_global);
  704. store_vm_register(Bytecode::Register::accumulator(), RET);
  705. check_exception();
  706. }
  707. static Value cxx_get_variable(VM& vm, DeprecatedFlyString const& name, u32 cache_index)
  708. {
  709. return TRY_OR_SET_EXCEPTION(Bytecode::get_variable(vm.bytecode_interpreter(), name, cache_index));
  710. }
  711. void Compiler::compile_get_variable(Bytecode::Op::GetVariable const& op)
  712. {
  713. m_assembler.mov(
  714. Assembler::Operand::Register(ARG1),
  715. Assembler::Operand::Imm(bit_cast<u64>(&m_bytecode_executable.get_identifier(op.identifier()))));
  716. m_assembler.mov(
  717. Assembler::Operand::Register(ARG2),
  718. Assembler::Operand::Imm(op.cache_index()));
  719. native_call((void*)cxx_get_variable);
  720. store_vm_register(Bytecode::Register::accumulator(), RET);
  721. check_exception();
  722. }
  723. static Value cxx_get_callee_and_this_from_environment(VM& vm, DeprecatedFlyString const& name, u32 cache_index, Bytecode::Register callee_reg, Bytecode::Register this_reg)
  724. {
  725. auto& bytecode_interpreter = vm.bytecode_interpreter();
  726. auto callee_and_this = TRY_OR_SET_EXCEPTION(Bytecode::get_callee_and_this_from_environment(
  727. bytecode_interpreter,
  728. name,
  729. cache_index));
  730. bytecode_interpreter.reg(callee_reg) = callee_and_this.callee;
  731. bytecode_interpreter.reg(this_reg) = callee_and_this.this_value;
  732. return {};
  733. }
  734. void Compiler::compile_get_callee_and_this_from_environment(Bytecode::Op::GetCalleeAndThisFromEnvironment const& op)
  735. {
  736. m_assembler.mov(
  737. Assembler::Operand::Register(ARG1),
  738. Assembler::Operand::Imm(bit_cast<u64>(&m_bytecode_executable.get_identifier(op.identifier()))));
  739. m_assembler.mov(
  740. Assembler::Operand::Register(ARG2),
  741. Assembler::Operand::Imm(op.cache_index()));
  742. m_assembler.mov(
  743. Assembler::Operand::Register(ARG3),
  744. Assembler::Operand::Imm(op.callee().index()));
  745. m_assembler.mov(
  746. Assembler::Operand::Register(ARG4),
  747. Assembler::Operand::Imm(op.this_().index()));
  748. native_call((void*)cxx_get_callee_and_this_from_environment);
  749. check_exception();
  750. }
  751. static Value cxx_to_numeric(VM& vm, Value value)
  752. {
  753. return TRY_OR_SET_EXCEPTION(value.to_numeric(vm));
  754. }
  755. void Compiler::compile_to_numeric(Bytecode::Op::ToNumeric const&)
  756. {
  757. load_vm_register(ARG1, Bytecode::Register::accumulator());
  758. native_call((void*)cxx_to_numeric);
  759. store_vm_register(Bytecode::Register::accumulator(), RET);
  760. check_exception();
  761. }
  762. static Value cxx_resolve_this_binding(VM& vm)
  763. {
  764. auto this_value = TRY_OR_SET_EXCEPTION(vm.resolve_this_binding());
  765. vm.bytecode_interpreter().reg(Bytecode::Register::this_value()) = this_value;
  766. return this_value;
  767. }
  768. void Compiler::compile_resolve_this_binding(Bytecode::Op::ResolveThisBinding const&)
  769. {
  770. // OPTIMIZATION: We cache the `this` value in a special VM register.
  771. // So first we check if the cache is non-empty, and if so,
  772. // we can avoid calling out to C++ at all. :^)
  773. load_vm_register(GPR0, Bytecode::Register::this_value());
  774. m_assembler.mov(
  775. Assembler::Operand::Register(GPR1),
  776. Assembler::Operand::Imm(Value().encoded()));
  777. Assembler::Label slow_case {};
  778. m_assembler.jump_if(
  779. Assembler::Operand::Register(GPR0),
  780. Assembler::Condition::EqualTo,
  781. Assembler::Operand::Register(GPR1),
  782. slow_case);
  783. // Fast case: We have a cached `this` value!
  784. store_vm_register(Bytecode::Register::accumulator(), GPR0);
  785. auto end = m_assembler.jump();
  786. slow_case.link(m_assembler);
  787. native_call((void*)cxx_resolve_this_binding);
  788. store_vm_register(Bytecode::Register::accumulator(), RET);
  789. check_exception();
  790. end.link(m_assembler);
  791. }
  792. static Value cxx_put_by_id(VM& vm, Value base, Bytecode::IdentifierTableIndex property, Value value, Bytecode::Op::PropertyKind kind)
  793. {
  794. PropertyKey name = vm.bytecode_interpreter().current_executable().get_identifier(property);
  795. TRY_OR_SET_EXCEPTION(Bytecode::put_by_property_key(vm, base, base, value, name, kind));
  796. vm.bytecode_interpreter().accumulator() = value;
  797. return {};
  798. }
  799. void Compiler::compile_put_by_id(Bytecode::Op::PutById const& op)
  800. {
  801. load_vm_register(ARG1, op.base());
  802. m_assembler.mov(
  803. Assembler::Operand::Register(ARG2),
  804. Assembler::Operand::Imm(op.property().value()));
  805. load_vm_register(ARG3, Bytecode::Register::accumulator());
  806. m_assembler.mov(
  807. Assembler::Operand::Register(ARG4),
  808. Assembler::Operand::Imm(to_underlying(op.kind())));
  809. native_call((void*)cxx_put_by_id);
  810. check_exception();
  811. }
  812. static Value cxx_put_by_value(VM& vm, Value base, Value property, Value value, Bytecode::Op::PropertyKind kind)
  813. {
  814. TRY_OR_SET_EXCEPTION(Bytecode::put_by_value(vm, base, property, value, kind));
  815. vm.bytecode_interpreter().accumulator() = value;
  816. return {};
  817. }
  818. void Compiler::compile_put_by_value(Bytecode::Op::PutByValue const& op)
  819. {
  820. load_vm_register(ARG1, op.base());
  821. load_vm_register(ARG2, op.property());
  822. load_vm_register(ARG3, Bytecode::Register::accumulator());
  823. m_assembler.mov(
  824. Assembler::Operand::Register(ARG4),
  825. Assembler::Operand::Imm(to_underlying(op.kind())));
  826. native_call((void*)cxx_put_by_value);
  827. check_exception();
  828. }
  829. static Value cxx_call(VM& vm, Value callee, u32 first_argument_index, u32 argument_count, Value this_value, Bytecode::Op::CallType call_type, Optional<Bytecode::StringTableIndex> const& expression_string)
  830. {
  831. TRY_OR_SET_EXCEPTION(throw_if_needed_for_call(vm.bytecode_interpreter(), callee, call_type, expression_string));
  832. MarkedVector<Value> argument_values(vm.heap());
  833. argument_values.ensure_capacity(argument_count);
  834. for (u32 i = 0; i < argument_count; ++i) {
  835. argument_values.unchecked_append(vm.bytecode_interpreter().reg(Bytecode::Register { first_argument_index + i }));
  836. }
  837. return TRY_OR_SET_EXCEPTION(perform_call(vm.bytecode_interpreter(), this_value, call_type, callee, move(argument_values)));
  838. }
  839. void Compiler::compile_call(Bytecode::Op::Call const& op)
  840. {
  841. load_vm_register(ARG1, op.callee());
  842. m_assembler.mov(
  843. Assembler::Operand::Register(ARG2),
  844. Assembler::Operand::Imm(op.first_argument().index()));
  845. m_assembler.mov(
  846. Assembler::Operand::Register(ARG3),
  847. Assembler::Operand::Imm(op.argument_count()));
  848. load_vm_register(ARG4, op.this_value());
  849. m_assembler.mov(
  850. Assembler::Operand::Register(ARG5),
  851. Assembler::Operand::Imm(to_underlying(op.call_type())));
  852. m_assembler.mov(
  853. Assembler::Operand::Register(GPR0),
  854. Assembler::Operand::Imm(bit_cast<u64>(&op.expression_string())));
  855. native_call((void*)cxx_call, { Assembler::Operand::Register(GPR0) });
  856. store_vm_register(Bytecode::Register::accumulator(), RET);
  857. check_exception();
  858. }
  859. static Value cxx_call_with_argument_array(VM& vm, Value callee, Value this_value, Bytecode::Op::CallType call_type, Optional<Bytecode::StringTableIndex> const& expression_string)
  860. {
  861. TRY_OR_SET_EXCEPTION(throw_if_needed_for_call(vm.bytecode_interpreter(), callee, call_type, expression_string));
  862. auto argument_values = argument_list_evaluation(vm.bytecode_interpreter());
  863. return TRY_OR_SET_EXCEPTION(perform_call(vm.bytecode_interpreter(), this_value, call_type, callee, move(argument_values)));
  864. }
  865. void Compiler::compile_call_with_argument_array(Bytecode::Op::CallWithArgumentArray const& op)
  866. {
  867. load_vm_register(ARG1, op.callee());
  868. load_vm_register(ARG2, op.this_value());
  869. m_assembler.mov(
  870. Assembler::Operand::Register(ARG3),
  871. Assembler::Operand::Imm(to_underlying(op.call_type())));
  872. m_assembler.mov(
  873. Assembler::Operand::Register(ARG4),
  874. Assembler::Operand::Imm(bit_cast<u64>(&op.expression_string())));
  875. native_call((void*)cxx_call_with_argument_array);
  876. store_vm_register(Bytecode::Register::accumulator(), RET);
  877. check_exception();
  878. }
  879. static Value cxx_typeof_variable(VM& vm, DeprecatedFlyString const& identifier)
  880. {
  881. return TRY_OR_SET_EXCEPTION(Bytecode::typeof_variable(vm, identifier));
  882. }
  883. void Compiler::compile_typeof_variable(Bytecode::Op::TypeofVariable const& op)
  884. {
  885. m_assembler.mov(
  886. Assembler::Operand::Register(ARG1),
  887. Assembler::Operand::Imm(bit_cast<u64>(&m_bytecode_executable.get_identifier(op.identifier().value()))));
  888. native_call((void*)cxx_typeof_variable);
  889. store_vm_register(Bytecode::Register::accumulator(), RET);
  890. check_exception();
  891. }
  892. static Value cxx_create_variable(
  893. VM& vm,
  894. DeprecatedFlyString const& name,
  895. Bytecode::Op::EnvironmentMode mode,
  896. bool is_global,
  897. bool is_immutable,
  898. bool is_strict)
  899. {
  900. TRY_OR_SET_EXCEPTION(Bytecode::create_variable(vm, name, mode, is_global, is_immutable, is_strict));
  901. return {};
  902. }
  903. void Compiler::compile_create_variable(Bytecode::Op::CreateVariable const& op)
  904. {
  905. m_assembler.mov(
  906. Assembler::Operand::Register(ARG1),
  907. Assembler::Operand::Imm(bit_cast<u64>(&m_bytecode_executable.get_identifier(op.identifier().value()))));
  908. m_assembler.mov(
  909. Assembler::Operand::Register(ARG2),
  910. Assembler::Operand::Imm(to_underlying(op.mode())));
  911. m_assembler.mov(
  912. Assembler::Operand::Register(ARG3),
  913. Assembler::Operand::Imm(static_cast<u64>(op.is_global())));
  914. m_assembler.mov(
  915. Assembler::Operand::Register(ARG4),
  916. Assembler::Operand::Imm(static_cast<u64>(op.is_immutable())));
  917. m_assembler.mov(
  918. Assembler::Operand::Register(ARG5),
  919. Assembler::Operand::Imm(static_cast<u64>(op.is_strict())));
  920. native_call((void*)cxx_create_variable);
  921. check_exception();
  922. }
  923. static Value cxx_set_variable(
  924. VM& vm,
  925. DeprecatedFlyString const& identifier,
  926. Value value,
  927. Bytecode::Op::EnvironmentMode environment_mode,
  928. Bytecode::Op::SetVariable::InitializationMode initialization_mode)
  929. {
  930. TRY_OR_SET_EXCEPTION(Bytecode::set_variable(vm, identifier, value, environment_mode, initialization_mode));
  931. return {};
  932. }
  933. void Compiler::compile_set_variable(Bytecode::Op::SetVariable const& op)
  934. {
  935. m_assembler.mov(
  936. Assembler::Operand::Register(ARG1),
  937. Assembler::Operand::Imm(bit_cast<u64>(&m_bytecode_executable.get_identifier(op.identifier().value()))));
  938. load_vm_register(ARG2, Bytecode::Register::accumulator());
  939. m_assembler.mov(
  940. Assembler::Operand::Register(ARG3),
  941. Assembler::Operand::Imm(to_underlying(op.mode())));
  942. m_assembler.mov(
  943. Assembler::Operand::Register(ARG4),
  944. Assembler::Operand::Imm(to_underlying(op.initialization_mode())));
  945. native_call((void*)cxx_set_variable);
  946. check_exception();
  947. }
  948. void Compiler::compile_continue_pending_unwind(Bytecode::Op::ContinuePendingUnwind const& op)
  949. {
  950. // re-throw the exception if we reached the end of the finally block and there was no catch block to handle it
  951. check_exception();
  952. // if (!saved_return_value.is_empty()) goto resume_block;
  953. load_vm_register(GPR0, Bytecode::Register::saved_return_value());
  954. m_assembler.mov(Assembler::Operand::Register(GPR1), Assembler::Operand::Imm(Value().encoded()));
  955. m_assembler.jump_if(
  956. Assembler::Operand::Register(GPR0),
  957. Assembler::Condition::NotEqualTo,
  958. Assembler::Operand::Register(GPR1),
  959. label_for(op.resume_target().block()));
  960. // finish the pending return from the try block
  961. store_vm_register(Bytecode::Register::return_value(), GPR0);
  962. jump_to_exit();
  963. }
  964. static void cxx_create_lexical_environment(VM& vm)
  965. {
  966. auto make_and_swap_envs = [&](auto& old_environment) {
  967. GCPtr<Environment> environment = new_declarative_environment(*old_environment).ptr();
  968. swap(old_environment, environment);
  969. return environment;
  970. };
  971. vm.bytecode_interpreter().saved_lexical_environment_stack().append(make_and_swap_envs(vm.running_execution_context().lexical_environment));
  972. }
  973. void Compiler::compile_create_lexical_environment(Bytecode::Op::CreateLexicalEnvironment const&)
  974. {
  975. native_call((void*)cxx_create_lexical_environment);
  976. }
  977. static void cxx_leave_lexical_environment(VM& vm)
  978. {
  979. vm.running_execution_context().lexical_environment = vm.bytecode_interpreter().saved_lexical_environment_stack().take_last();
  980. }
  981. void Compiler::compile_leave_lexical_environment(Bytecode::Op::LeaveLexicalEnvironment const&)
  982. {
  983. native_call((void*)cxx_leave_lexical_environment);
  984. }
  985. static Value cxx_concat_string(VM& vm, Value lhs, Value rhs)
  986. {
  987. auto string = TRY_OR_SET_EXCEPTION(rhs.to_primitive_string(vm));
  988. return PrimitiveString::create(vm, lhs.as_string(), string);
  989. }
  990. void Compiler::compile_concat_string(Bytecode::Op::ConcatString const& op)
  991. {
  992. load_vm_register(ARG1, op.lhs());
  993. load_vm_register(ARG2, Bytecode::Register::accumulator());
  994. native_call((void*)cxx_concat_string);
  995. store_vm_register(op.lhs(), RET);
  996. check_exception();
  997. }
  998. static void cxx_block_declaration_instantiation(VM& vm, ScopeNode const& scope_node)
  999. {
  1000. auto old_environment = vm.running_execution_context().lexical_environment;
  1001. vm.bytecode_interpreter().saved_lexical_environment_stack().append(old_environment);
  1002. vm.running_execution_context().lexical_environment = new_declarative_environment(*old_environment);
  1003. scope_node.block_declaration_instantiation(vm, vm.running_execution_context().lexical_environment);
  1004. }
  1005. void Compiler::compile_block_declaration_instantiation(Bytecode::Op::BlockDeclarationInstantiation const& op)
  1006. {
  1007. m_assembler.mov(
  1008. Assembler::Operand::Register(ARG1),
  1009. Assembler::Operand::Imm(bit_cast<u64>(&op.scope_node())));
  1010. native_call((void*)cxx_block_declaration_instantiation);
  1011. }
  1012. static Value cxx_super_call_with_argument_array(VM& vm, Value argument_array, bool is_synthetic)
  1013. {
  1014. TRY_OR_SET_EXCEPTION(Bytecode::super_call_with_argument_array(vm, argument_array, is_synthetic));
  1015. return {};
  1016. }
  1017. void Compiler::compile_super_call_with_argument_array(Bytecode::Op::SuperCallWithArgumentArray const& op)
  1018. {
  1019. load_vm_register(ARG1, Bytecode::Register::accumulator());
  1020. m_assembler.mov(
  1021. Assembler::Operand::Register(ARG2),
  1022. Assembler::Operand::Imm(static_cast<u64>(op.is_synthetic())));
  1023. native_call((void*)cxx_super_call_with_argument_array);
  1024. store_vm_register(Bytecode::Register::accumulator(), RET);
  1025. check_exception();
  1026. }
  1027. static Value cxx_get_iterator(VM& vm, Value value, IteratorHint hint)
  1028. {
  1029. auto iterator = TRY_OR_SET_EXCEPTION(get_iterator(vm, value, hint));
  1030. return Bytecode::iterator_to_object(vm, iterator);
  1031. }
  1032. void Compiler::compile_get_iterator(Bytecode::Op::GetIterator const& op)
  1033. {
  1034. load_vm_register(ARG1, Bytecode::Register::accumulator());
  1035. m_assembler.mov(
  1036. Assembler::Operand::Register(ARG2),
  1037. Assembler::Operand::Imm(to_underlying(op.hint())));
  1038. native_call((void*)cxx_get_iterator);
  1039. store_vm_register(Bytecode::Register::accumulator(), RET);
  1040. check_exception();
  1041. }
  1042. static Value cxx_iterator_next(VM& vm, Value iterator)
  1043. {
  1044. auto iterator_object = TRY_OR_SET_EXCEPTION(iterator.to_object(vm));
  1045. auto iterator_record = Bytecode::object_to_iterator(vm, iterator_object);
  1046. return TRY_OR_SET_EXCEPTION(iterator_next(vm, iterator_record));
  1047. }
  1048. void Compiler::compile_iterator_next(Bytecode::Op::IteratorNext const&)
  1049. {
  1050. load_vm_register(ARG1, Bytecode::Register::accumulator());
  1051. native_call((void*)cxx_iterator_next);
  1052. store_vm_register(Bytecode::Register::accumulator(), RET);
  1053. check_exception();
  1054. }
  1055. static Value cxx_iterator_result_done(VM& vm, Value iterator)
  1056. {
  1057. auto iterator_result = TRY_OR_SET_EXCEPTION(iterator.to_object(vm));
  1058. return Value(TRY_OR_SET_EXCEPTION(iterator_complete(vm, iterator_result)));
  1059. }
  1060. void Compiler::compile_iterator_result_done(Bytecode::Op::IteratorResultDone const&)
  1061. {
  1062. load_vm_register(ARG1, Bytecode::Register::accumulator());
  1063. native_call((void*)cxx_iterator_result_done);
  1064. store_vm_register(Bytecode::Register::accumulator(), RET);
  1065. check_exception();
  1066. }
  1067. static Value cxx_throw_if_not_object(VM& vm, Value value)
  1068. {
  1069. if (!value.is_object())
  1070. TRY_OR_SET_EXCEPTION(vm.throw_completion<TypeError>(ErrorType::NotAnObject, value.to_string_without_side_effects()));
  1071. return {};
  1072. }
  1073. void Compiler::compile_throw_if_not_object(Bytecode::Op::ThrowIfNotObject const&)
  1074. {
  1075. load_vm_register(ARG1, Bytecode::Register::accumulator());
  1076. native_call((void*)cxx_throw_if_not_object);
  1077. check_exception();
  1078. }
  1079. static Value cxx_throw_if_nullish(VM& vm, Value value)
  1080. {
  1081. if (value.is_nullish())
  1082. TRY_OR_SET_EXCEPTION(vm.throw_completion<TypeError>(ErrorType::NotObjectCoercible, value.to_string_without_side_effects()));
  1083. return {};
  1084. }
  1085. void Compiler::compile_throw_if_nullish(Bytecode::Op::ThrowIfNullish const&)
  1086. {
  1087. load_vm_register(ARG1, Bytecode::Register::accumulator());
  1088. native_call((void*)cxx_throw_if_nullish);
  1089. check_exception();
  1090. }
  1091. void Compiler::jump_to_exit()
  1092. {
  1093. m_assembler.jump(m_exit_label);
  1094. }
  1095. void Compiler::native_call(void* function_address, Vector<Assembler::Operand> const& stack_arguments)
  1096. {
  1097. // Make sure we don't clobber the VM&.
  1098. m_assembler.push(Assembler::Operand::Register(ARG0));
  1099. // Align the stack pointer.
  1100. m_assembler.sub(Assembler::Operand::Register(STACK_POINTER), Assembler::Operand::Imm(8));
  1101. // NOTE: We don't preserve caller-saved registers when making a native call.
  1102. // This means that they may have changed after we return from the call.
  1103. m_assembler.native_call(function_address, stack_arguments);
  1104. // Restore the stack pointer.
  1105. m_assembler.add(Assembler::Operand::Register(STACK_POINTER), Assembler::Operand::Imm(8));
  1106. // Restore our VM&.
  1107. m_assembler.pop(Assembler::Operand::Register(ARG0));
  1108. }
  1109. OwnPtr<NativeExecutable> Compiler::compile(Bytecode::Executable& bytecode_executable)
  1110. {
  1111. if (!getenv("LIBJS_JIT"))
  1112. return nullptr;
  1113. Compiler compiler { bytecode_executable };
  1114. compiler.m_assembler.enter();
  1115. compiler.m_assembler.mov(
  1116. Assembler::Operand::Register(REGISTER_ARRAY_BASE),
  1117. Assembler::Operand::Register(ARG1));
  1118. compiler.m_assembler.mov(
  1119. Assembler::Operand::Register(LOCALS_ARRAY_BASE),
  1120. Assembler::Operand::Register(ARG2));
  1121. compiler.push_unwind_context(false, {}, {});
  1122. for (auto& block : bytecode_executable.basic_blocks) {
  1123. compiler.block_data_for(*block).start_offset = compiler.m_output.size();
  1124. auto it = Bytecode::InstructionStreamIterator(block->instruction_stream());
  1125. while (!it.at_end()) {
  1126. auto const& op = *it;
  1127. switch (op.type()) {
  1128. # define CASE_BYTECODE_OP(OpTitleCase, op_snake_case) \
  1129. case Bytecode::Instruction::Type::OpTitleCase: \
  1130. compiler.compile_##op_snake_case(static_cast<Bytecode::Op::OpTitleCase const&>(op)); \
  1131. break;
  1132. JS_ENUMERATE_IMPLEMENTED_JIT_OPS(CASE_BYTECODE_OP)
  1133. # undef CASE_BYTECODE_OP
  1134. default:
  1135. if constexpr (LOG_JIT_FAILURE) {
  1136. dbgln("\033[31;1mJIT compilation failed\033[0m: {}", bytecode_executable.name);
  1137. dbgln("Unsupported bytecode op: {}", op.to_deprecated_string(bytecode_executable));
  1138. }
  1139. return nullptr;
  1140. }
  1141. ++it;
  1142. }
  1143. if (!block->is_terminated())
  1144. compiler.jump_to_exit();
  1145. }
  1146. compiler.m_exit_label.link(compiler.m_assembler);
  1147. compiler.m_assembler.exit();
  1148. if (!compiler.m_exception_handler.jump_slot_offsets_in_instruction_stream.is_empty()) {
  1149. compiler.m_exception_handler.link(compiler.m_assembler);
  1150. compiler.handle_exception();
  1151. }
  1152. auto* executable_memory = mmap(nullptr, compiler.m_output.size(), PROT_READ | PROT_WRITE, MAP_ANONYMOUS | MAP_PRIVATE, 0, 0);
  1153. if (executable_memory == MAP_FAILED) {
  1154. dbgln("mmap: {}", strerror(errno));
  1155. return nullptr;
  1156. }
  1157. for (auto& block : bytecode_executable.basic_blocks) {
  1158. auto& block_data = compiler.block_data_for(*block);
  1159. block_data.label.link_to(compiler.m_assembler, block_data.start_offset);
  1160. // Patch up all the absolute references
  1161. for (auto& absolute_reference : block_data.absolute_references_to_here) {
  1162. auto offset = bit_cast<u64>(executable_memory) + block_data.start_offset;
  1163. compiler.m_output[absolute_reference + 0] = (offset >> 0) & 0xff;
  1164. compiler.m_output[absolute_reference + 1] = (offset >> 8) & 0xff;
  1165. compiler.m_output[absolute_reference + 2] = (offset >> 16) & 0xff;
  1166. compiler.m_output[absolute_reference + 3] = (offset >> 24) & 0xff;
  1167. compiler.m_output[absolute_reference + 4] = (offset >> 32) & 0xff;
  1168. compiler.m_output[absolute_reference + 5] = (offset >> 40) & 0xff;
  1169. compiler.m_output[absolute_reference + 6] = (offset >> 48) & 0xff;
  1170. compiler.m_output[absolute_reference + 7] = (offset >> 56) & 0xff;
  1171. }
  1172. }
  1173. if constexpr (DUMP_JIT_MACHINE_CODE_TO_STDOUT) {
  1174. (void)write(STDOUT_FILENO, compiler.m_output.data(), compiler.m_output.size());
  1175. }
  1176. memcpy(executable_memory, compiler.m_output.data(), compiler.m_output.size());
  1177. if (mprotect(executable_memory, compiler.m_output.size(), PROT_READ | PROT_EXEC) < 0) {
  1178. dbgln("mprotect: {}", strerror(errno));
  1179. return nullptr;
  1180. }
  1181. if constexpr (LOG_JIT_SUCCESS) {
  1182. dbgln("\033[32;1mJIT compilation succeeded!\033[0m {}", bytecode_executable.name);
  1183. }
  1184. auto executable = make<NativeExecutable>(executable_memory, compiler.m_output.size());
  1185. if constexpr (DUMP_JIT_DISASSEMBLY)
  1186. executable->dump_disassembly();
  1187. return executable;
  1188. }
  1189. }
  1190. #endif