Generator.cpp 45 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078
  1. /*
  2. * Copyright (c) 2021-2024, Andreas Kling <kling@serenityos.org>
  3. *
  4. * SPDX-License-Identifier: BSD-2-Clause
  5. */
  6. #include <AK/QuickSort.h>
  7. #include <AK/TemporaryChange.h>
  8. #include <LibJS/AST.h>
  9. #include <LibJS/Bytecode/BasicBlock.h>
  10. #include <LibJS/Bytecode/Generator.h>
  11. #include <LibJS/Bytecode/Instruction.h>
  12. #include <LibJS/Bytecode/Op.h>
  13. #include <LibJS/Bytecode/Register.h>
  14. #include <LibJS/Runtime/ECMAScriptFunctionObject.h>
  15. #include <LibJS/Runtime/VM.h>
  16. namespace JS::Bytecode {
  17. Generator::Generator(VM& vm)
  18. : m_vm(vm)
  19. , m_string_table(make<StringTable>())
  20. , m_identifier_table(make<IdentifierTable>())
  21. , m_regex_table(make<RegexTable>())
  22. , m_constants(vm.heap())
  23. , m_accumulator(*this, Operand(Register::accumulator()))
  24. {
  25. }
  26. CodeGenerationErrorOr<void> Generator::emit_function_declaration_instantiation(ECMAScriptFunctionObject const& function)
  27. {
  28. if (function.m_has_parameter_expressions) {
  29. emit<Op::CreateLexicalEnvironment>();
  30. }
  31. for (auto const& parameter_name : function.m_parameter_names) {
  32. if (parameter_name.value == ECMAScriptFunctionObject::ParameterIsLocal::No) {
  33. auto id = intern_identifier(parameter_name.key);
  34. emit<Op::CreateVariable>(id, Op::EnvironmentMode::Lexical, false);
  35. if (function.m_has_duplicates) {
  36. emit<Op::SetVariable>(id, add_constant(js_undefined()), next_environment_variable_cache(), Op::SetVariable::InitializationMode::Initialize, Op::EnvironmentMode::Lexical);
  37. }
  38. }
  39. }
  40. if (function.m_arguments_object_needed) {
  41. if (function.m_strict || !function.has_simple_parameter_list()) {
  42. emit<Op::CreateArguments>(Op::CreateArguments::Kind::Unmapped, function.m_strict);
  43. } else {
  44. emit<Op::CreateArguments>(Op::CreateArguments::Kind::Mapped, function.m_strict);
  45. }
  46. }
  47. auto const& formal_parameters = function.formal_parameters();
  48. for (u32 param_index = 0; param_index < formal_parameters.size(); ++param_index) {
  49. auto const& parameter = formal_parameters[param_index];
  50. if (parameter.is_rest) {
  51. auto argument_reg = allocate_register();
  52. emit<Op::CreateRestParams>(argument_reg.operand(), param_index);
  53. emit<Op::SetArgument>(param_index, argument_reg.operand());
  54. } else if (parameter.default_value) {
  55. auto& if_undefined_block = make_block();
  56. auto& if_not_undefined_block = make_block();
  57. auto argument_reg = allocate_register();
  58. emit<Op::GetArgument>(argument_reg.operand(), param_index);
  59. emit<Op::JumpUndefined>(
  60. argument_reg.operand(),
  61. Label { if_undefined_block },
  62. Label { if_not_undefined_block });
  63. switch_to_basic_block(if_undefined_block);
  64. auto operand = TRY(parameter.default_value->generate_bytecode(*this));
  65. emit<Op::SetArgument>(param_index, *operand);
  66. emit<Op::Jump>(Label { if_not_undefined_block });
  67. switch_to_basic_block(if_not_undefined_block);
  68. }
  69. if (auto const* identifier = parameter.binding.get_pointer<NonnullRefPtr<Identifier const>>(); identifier) {
  70. if ((*identifier)->is_local()) {
  71. auto local_variable_index = (*identifier)->local_variable_index();
  72. emit<Op::GetArgument>(local(local_variable_index), param_index);
  73. set_local_initialized((*identifier)->local_variable_index());
  74. } else {
  75. auto id = intern_identifier((*identifier)->string());
  76. auto init_mode = function.m_has_duplicates ? Op::SetVariable::InitializationMode::Set : Op::SetVariable::InitializationMode::Initialize;
  77. auto argument_reg = allocate_register();
  78. emit<Op::GetArgument>(argument_reg.operand(), param_index);
  79. emit<Op::SetVariable>(id, argument_reg.operand(),
  80. next_environment_variable_cache(),
  81. init_mode,
  82. Op::EnvironmentMode::Lexical);
  83. }
  84. } else if (auto const* binding_pattern = parameter.binding.get_pointer<NonnullRefPtr<BindingPattern const>>(); binding_pattern) {
  85. auto input_operand = allocate_register();
  86. emit<Op::GetArgument>(input_operand.operand(), param_index);
  87. auto init_mode = function.m_has_duplicates ? Op::SetVariable::InitializationMode::Set : Bytecode::Op::SetVariable::InitializationMode::Initialize;
  88. TRY((*binding_pattern)->generate_bytecode(*this, init_mode, input_operand, false));
  89. }
  90. }
  91. ScopeNode const* scope_body = nullptr;
  92. if (is<ScopeNode>(*function.m_ecmascript_code))
  93. scope_body = static_cast<ScopeNode const*>(function.m_ecmascript_code.ptr());
  94. if (!function.m_has_parameter_expressions) {
  95. if (scope_body) {
  96. for (auto const& variable_to_initialize : function.m_var_names_to_initialize_binding) {
  97. auto const& id = variable_to_initialize.identifier;
  98. if (id.is_local()) {
  99. emit<Op::Mov>(local(id.local_variable_index()), add_constant(js_undefined()));
  100. } else {
  101. auto intern_id = intern_identifier(id.string());
  102. emit<Op::CreateVariable>(intern_id, Op::EnvironmentMode::Var, false);
  103. emit<Op::SetVariable>(intern_id, add_constant(js_undefined()), next_environment_variable_cache(), Bytecode::Op::SetVariable::InitializationMode::Initialize, Op::EnvironmentMode::Var);
  104. }
  105. }
  106. }
  107. } else {
  108. emit<Op::CreateVariableEnvironment>(function.m_var_environment_bindings_count);
  109. if (scope_body) {
  110. for (auto const& variable_to_initialize : function.m_var_names_to_initialize_binding) {
  111. auto const& id = variable_to_initialize.identifier;
  112. auto initial_value = allocate_register();
  113. if (!variable_to_initialize.parameter_binding || variable_to_initialize.function_name) {
  114. emit<Op::Mov>(initial_value, add_constant(js_undefined()));
  115. } else {
  116. if (id.is_local()) {
  117. emit<Op::Mov>(initial_value, local(id.local_variable_index()));
  118. } else {
  119. emit<Op::GetVariable>(initial_value, intern_identifier(id.string()), next_environment_variable_cache());
  120. }
  121. }
  122. if (id.is_local()) {
  123. emit<Op::Mov>(local(id.local_variable_index()), initial_value);
  124. } else {
  125. auto intern_id = intern_identifier(id.string());
  126. emit<Op::CreateVariable>(intern_id, Op::EnvironmentMode::Var, false);
  127. emit<Op::SetVariable>(intern_id, initial_value, next_environment_variable_cache(), Op::SetVariable::InitializationMode::Initialize, Op::EnvironmentMode::Var);
  128. }
  129. }
  130. }
  131. }
  132. if (!function.m_strict && scope_body) {
  133. for (auto const& function_name : function.m_function_names_to_initialize_binding) {
  134. auto intern_id = intern_identifier(function_name);
  135. emit<Op::CreateVariable>(intern_id, Op::EnvironmentMode::Var, false);
  136. emit<Op::SetVariable>(intern_id, add_constant(js_undefined()), next_environment_variable_cache(), Bytecode::Op::SetVariable::InitializationMode::Initialize, Op::EnvironmentMode::Var);
  137. }
  138. }
  139. if (!function.m_strict) {
  140. bool can_elide_declarative_environment = !function.m_contains_direct_call_to_eval && (!scope_body || !scope_body->has_non_local_lexical_declarations());
  141. if (!can_elide_declarative_environment) {
  142. emit<Op::CreateLexicalEnvironment>(function.m_lex_environment_bindings_count);
  143. }
  144. }
  145. if (scope_body) {
  146. MUST(scope_body->for_each_lexically_scoped_declaration([&](Declaration const& declaration) {
  147. MUST(declaration.for_each_bound_identifier([&](auto const& id) {
  148. if (id.is_local()) {
  149. return;
  150. }
  151. emit<Op::CreateVariable>(intern_identifier(id.string()),
  152. Op::EnvironmentMode::Lexical,
  153. declaration.is_constant_declaration(),
  154. false,
  155. declaration.is_constant_declaration());
  156. }));
  157. }));
  158. }
  159. for (auto const& declaration : function.m_functions_to_initialize) {
  160. auto function = allocate_register();
  161. emit<Op::NewFunction>(function, declaration, OptionalNone {});
  162. if (declaration.name_identifier()->is_local()) {
  163. emit<Op::Mov>(local(declaration.name_identifier()->local_variable_index()), function);
  164. } else {
  165. emit<Op::SetVariable>(intern_identifier(declaration.name()), function, next_environment_variable_cache(), Op::SetVariable::InitializationMode::Set, Op::EnvironmentMode::Var);
  166. }
  167. }
  168. return {};
  169. }
  170. CodeGenerationErrorOr<NonnullGCPtr<Executable>> Generator::emit_function_body_bytecode(VM& vm, ASTNode const& node, FunctionKind enclosing_function_kind, GCPtr<ECMAScriptFunctionObject const> function)
  171. {
  172. Generator generator(vm);
  173. generator.switch_to_basic_block(generator.make_block());
  174. SourceLocationScope scope(generator, node);
  175. generator.m_enclosing_function_kind = enclosing_function_kind;
  176. if (generator.is_in_async_function() && !generator.is_in_generator_function()) {
  177. // Immediately yield with no value.
  178. auto& start_block = generator.make_block();
  179. generator.emit<Bytecode::Op::Yield>(Label { start_block }, generator.add_constant(js_undefined()));
  180. generator.switch_to_basic_block(start_block);
  181. // NOTE: This doesn't have to handle received throw/return completions, as GeneratorObject::resume_abrupt
  182. // will not enter the generator from the SuspendedStart state and immediately completes the generator.
  183. }
  184. if (function)
  185. TRY(generator.emit_function_declaration_instantiation(*function));
  186. if (generator.is_in_generator_function()) {
  187. // Immediately yield with no value.
  188. auto& start_block = generator.make_block();
  189. generator.emit<Bytecode::Op::Yield>(Label { start_block }, generator.add_constant(js_undefined()));
  190. generator.switch_to_basic_block(start_block);
  191. // NOTE: This doesn't have to handle received throw/return completions, as GeneratorObject::resume_abrupt
  192. // will not enter the generator from the SuspendedStart state and immediately completes the generator.
  193. }
  194. auto last_value = TRY(node.generate_bytecode(generator));
  195. if (!generator.current_block().is_terminated() && last_value.has_value()) {
  196. generator.emit<Bytecode::Op::End>(last_value.value());
  197. }
  198. if (generator.is_in_generator_or_async_function()) {
  199. // Terminate all unterminated blocks with yield return
  200. for (auto& block : generator.m_root_basic_blocks) {
  201. if (block->is_terminated())
  202. continue;
  203. generator.switch_to_basic_block(*block);
  204. generator.emit<Bytecode::Op::Yield>(nullptr, generator.add_constant(js_undefined()));
  205. }
  206. }
  207. bool is_strict_mode = false;
  208. if (is<Program>(node))
  209. is_strict_mode = static_cast<Program const&>(node).is_strict_mode();
  210. else if (is<FunctionBody>(node))
  211. is_strict_mode = static_cast<FunctionBody const&>(node).in_strict_mode();
  212. else if (is<FunctionDeclaration>(node))
  213. is_strict_mode = static_cast<FunctionDeclaration const&>(node).is_strict_mode();
  214. size_t size_needed = 0;
  215. for (auto& block : generator.m_root_basic_blocks) {
  216. size_needed += block->size();
  217. }
  218. Vector<u8> bytecode;
  219. bytecode.ensure_capacity(size_needed);
  220. Vector<size_t> basic_block_start_offsets;
  221. basic_block_start_offsets.ensure_capacity(generator.m_root_basic_blocks.size());
  222. HashMap<BasicBlock const*, size_t> block_offsets;
  223. Vector<size_t> label_offsets;
  224. struct UnlinkedExceptionHandlers {
  225. size_t start_offset;
  226. size_t end_offset;
  227. BasicBlock const* handler;
  228. BasicBlock const* finalizer;
  229. };
  230. Vector<UnlinkedExceptionHandlers> unlinked_exception_handlers;
  231. HashMap<size_t, SourceRecord> source_map;
  232. for (auto& block : generator.m_root_basic_blocks) {
  233. basic_block_start_offsets.append(bytecode.size());
  234. if (block->handler() || block->finalizer()) {
  235. unlinked_exception_handlers.append({
  236. .start_offset = bytecode.size(),
  237. .end_offset = 0,
  238. .handler = block->handler(),
  239. .finalizer = block->finalizer(),
  240. });
  241. }
  242. block_offsets.set(block.ptr(), bytecode.size());
  243. for (auto& [offset, source_record] : block->source_map()) {
  244. source_map.set(bytecode.size() + offset, source_record);
  245. }
  246. Bytecode::InstructionStreamIterator it(block->instruction_stream());
  247. while (!it.at_end()) {
  248. auto& instruction = const_cast<Instruction&>(*it);
  249. // OPTIMIZATION: Don't emit jumps that just jump to the next block.
  250. if (instruction.type() == Instruction::Type::Jump) {
  251. auto& jump = static_cast<Bytecode::Op::Jump&>(instruction);
  252. if (jump.target().basic_block_index() == block->index() + 1) {
  253. if (basic_block_start_offsets.last() == bytecode.size()) {
  254. // This block is empty, just skip it.
  255. basic_block_start_offsets.take_last();
  256. }
  257. ++it;
  258. continue;
  259. }
  260. }
  261. // OPTIMIZATION: For `JumpIf` where one of the targets is the very next block,
  262. // we can emit a `JumpTrue` or `JumpFalse` (to the other block) instead.
  263. if (instruction.type() == Instruction::Type::JumpIf) {
  264. auto& jump = static_cast<Bytecode::Op::JumpIf&>(instruction);
  265. if (jump.true_target().basic_block_index() == block->index() + 1) {
  266. Op::JumpFalse jump_false(jump.condition(), Label { jump.false_target() });
  267. auto& label = jump_false.target();
  268. size_t label_offset = bytecode.size() + (bit_cast<FlatPtr>(&label) - bit_cast<FlatPtr>(&jump_false));
  269. label_offsets.append(label_offset);
  270. bytecode.append(reinterpret_cast<u8 const*>(&jump_false), jump_false.length());
  271. ++it;
  272. continue;
  273. }
  274. if (jump.false_target().basic_block_index() == block->index() + 1) {
  275. Op::JumpTrue jump_true(jump.condition(), Label { jump.true_target() });
  276. auto& label = jump_true.target();
  277. size_t label_offset = bytecode.size() + (bit_cast<FlatPtr>(&label) - bit_cast<FlatPtr>(&jump_true));
  278. label_offsets.append(label_offset);
  279. bytecode.append(reinterpret_cast<u8 const*>(&jump_true), jump_true.length());
  280. ++it;
  281. continue;
  282. }
  283. }
  284. instruction.visit_labels([&](Label& label) {
  285. size_t label_offset = bytecode.size() + (bit_cast<FlatPtr>(&label) - bit_cast<FlatPtr>(&instruction));
  286. label_offsets.append(label_offset);
  287. });
  288. bytecode.append(reinterpret_cast<u8 const*>(&instruction), instruction.length());
  289. ++it;
  290. }
  291. if (!block->is_terminated()) {
  292. Op::End end(generator.add_constant(js_undefined()));
  293. bytecode.append(reinterpret_cast<u8 const*>(&end), end.length());
  294. }
  295. if (block->handler() || block->finalizer()) {
  296. unlinked_exception_handlers.last().end_offset = bytecode.size();
  297. }
  298. }
  299. for (auto label_offset : label_offsets) {
  300. auto& label = *reinterpret_cast<Label*>(bytecode.data() + label_offset);
  301. auto* block = generator.m_root_basic_blocks[label.basic_block_index()].ptr();
  302. label.set_address(block_offsets.get(block).value());
  303. }
  304. auto executable = vm.heap().allocate_without_realm<Executable>(
  305. move(bytecode),
  306. move(generator.m_identifier_table),
  307. move(generator.m_string_table),
  308. move(generator.m_regex_table),
  309. move(generator.m_constants),
  310. node.source_code(),
  311. generator.m_next_property_lookup_cache,
  312. generator.m_next_global_variable_cache,
  313. generator.m_next_environment_variable_cache,
  314. generator.m_next_register,
  315. is_strict_mode);
  316. Vector<Executable::ExceptionHandlers> linked_exception_handlers;
  317. for (auto& unlinked_handler : unlinked_exception_handlers) {
  318. auto start_offset = unlinked_handler.start_offset;
  319. auto end_offset = unlinked_handler.end_offset;
  320. auto handler_offset = unlinked_handler.handler ? block_offsets.get(unlinked_handler.handler).value() : Optional<size_t> {};
  321. auto finalizer_offset = unlinked_handler.finalizer ? block_offsets.get(unlinked_handler.finalizer).value() : Optional<size_t> {};
  322. linked_exception_handlers.append({ start_offset, end_offset, handler_offset, finalizer_offset });
  323. }
  324. quick_sort(linked_exception_handlers, [](auto const& a, auto const& b) {
  325. return a.start_offset < b.start_offset;
  326. });
  327. executable->exception_handlers = move(linked_exception_handlers);
  328. executable->basic_block_start_offsets = move(basic_block_start_offsets);
  329. executable->source_map = move(source_map);
  330. generator.m_finished = true;
  331. return executable;
  332. }
  333. CodeGenerationErrorOr<NonnullGCPtr<Executable>> Generator::generate_from_ast_node(VM& vm, ASTNode const& node, FunctionKind enclosing_function_kind)
  334. {
  335. return emit_function_body_bytecode(vm, node, enclosing_function_kind, {});
  336. }
  337. CodeGenerationErrorOr<NonnullGCPtr<Executable>> Generator::generate_from_function(VM& vm, ECMAScriptFunctionObject const& function)
  338. {
  339. return emit_function_body_bytecode(vm, function.ecmascript_code(), function.kind(), &function);
  340. }
  341. void Generator::grow(size_t additional_size)
  342. {
  343. VERIFY(m_current_basic_block);
  344. m_current_basic_block->grow(additional_size);
  345. }
  346. ScopedOperand Generator::allocate_register()
  347. {
  348. if (!m_free_registers.is_empty()) {
  349. return ScopedOperand { *this, Operand { m_free_registers.take_last() } };
  350. }
  351. VERIFY(m_next_register != NumericLimits<u32>::max());
  352. return ScopedOperand { *this, Operand { Register { m_next_register++ } } };
  353. }
  354. void Generator::free_register(Register reg)
  355. {
  356. m_free_registers.append(reg);
  357. }
  358. ScopedOperand Generator::local(u32 local_index)
  359. {
  360. return ScopedOperand { *this, Operand { Operand::Type::Local, static_cast<u32>(local_index) } };
  361. }
  362. Generator::SourceLocationScope::SourceLocationScope(Generator& generator, ASTNode const& node)
  363. : m_generator(generator)
  364. , m_previous_node(m_generator.m_current_ast_node)
  365. {
  366. m_generator.m_current_ast_node = &node;
  367. }
  368. Generator::SourceLocationScope::~SourceLocationScope()
  369. {
  370. m_generator.m_current_ast_node = m_previous_node;
  371. }
  372. Generator::UnwindContext::UnwindContext(Generator& generator, Optional<Label> finalizer)
  373. : m_generator(generator)
  374. , m_finalizer(finalizer)
  375. , m_previous_context(m_generator.m_current_unwind_context)
  376. {
  377. m_generator.m_current_unwind_context = this;
  378. }
  379. Generator::UnwindContext::~UnwindContext()
  380. {
  381. VERIFY(m_generator.m_current_unwind_context == this);
  382. m_generator.m_current_unwind_context = m_previous_context;
  383. }
  384. Label Generator::nearest_continuable_scope() const
  385. {
  386. return m_continuable_scopes.last().bytecode_target;
  387. }
  388. bool Generator::emit_block_declaration_instantiation(ScopeNode const& scope_node)
  389. {
  390. bool needs_block_declaration_instantiation = false;
  391. MUST(scope_node.for_each_lexically_scoped_declaration([&](Declaration const& declaration) {
  392. if (declaration.is_function_declaration()) {
  393. needs_block_declaration_instantiation = true;
  394. return;
  395. }
  396. MUST(declaration.for_each_bound_identifier([&](auto const& id) {
  397. if (!id.is_local())
  398. needs_block_declaration_instantiation = true;
  399. }));
  400. }));
  401. if (!needs_block_declaration_instantiation)
  402. return false;
  403. // FIXME: Generate the actual bytecode for block declaration instantiation
  404. // and get rid of the BlockDeclarationInstantiation instruction.
  405. start_boundary(BlockBoundaryType::LeaveLexicalEnvironment);
  406. emit<Bytecode::Op::BlockDeclarationInstantiation>(scope_node);
  407. return true;
  408. }
  409. void Generator::begin_variable_scope()
  410. {
  411. start_boundary(BlockBoundaryType::LeaveLexicalEnvironment);
  412. emit<Bytecode::Op::CreateLexicalEnvironment>();
  413. }
  414. void Generator::end_variable_scope()
  415. {
  416. end_boundary(BlockBoundaryType::LeaveLexicalEnvironment);
  417. if (!m_current_basic_block->is_terminated()) {
  418. emit<Bytecode::Op::LeaveLexicalEnvironment>();
  419. }
  420. }
  421. void Generator::begin_continuable_scope(Label continue_target, Vector<DeprecatedFlyString> const& language_label_set)
  422. {
  423. m_continuable_scopes.append({ continue_target, language_label_set });
  424. start_boundary(BlockBoundaryType::Continue);
  425. }
  426. void Generator::end_continuable_scope()
  427. {
  428. m_continuable_scopes.take_last();
  429. end_boundary(BlockBoundaryType::Continue);
  430. }
  431. Label Generator::nearest_breakable_scope() const
  432. {
  433. return m_breakable_scopes.last().bytecode_target;
  434. }
  435. void Generator::begin_breakable_scope(Label breakable_target, Vector<DeprecatedFlyString> const& language_label_set)
  436. {
  437. m_breakable_scopes.append({ breakable_target, language_label_set });
  438. start_boundary(BlockBoundaryType::Break);
  439. }
  440. void Generator::end_breakable_scope()
  441. {
  442. m_breakable_scopes.take_last();
  443. end_boundary(BlockBoundaryType::Break);
  444. }
  445. CodeGenerationErrorOr<Generator::ReferenceOperands> Generator::emit_super_reference(MemberExpression const& expression)
  446. {
  447. VERIFY(is<SuperExpression>(expression.object()));
  448. // https://tc39.es/ecma262/#sec-super-keyword-runtime-semantics-evaluation
  449. // 1. Let env be GetThisEnvironment().
  450. // 2. Let actualThis be ? env.GetThisBinding().
  451. auto actual_this = allocate_register();
  452. emit<Bytecode::Op::ResolveThisBinding>(actual_this);
  453. Optional<ScopedOperand> computed_property_value;
  454. if (expression.is_computed()) {
  455. // SuperProperty : super [ Expression ]
  456. // 3. Let propertyNameReference be ? Evaluation of Expression.
  457. // 4. Let propertyNameValue be ? GetValue(propertyNameReference).
  458. computed_property_value = TRY(expression.property().generate_bytecode(*this)).value();
  459. }
  460. // 5/7. Return ? MakeSuperPropertyReference(actualThis, propertyKey, strict).
  461. // https://tc39.es/ecma262/#sec-makesuperpropertyreference
  462. // 1. Let env be GetThisEnvironment().
  463. // 2. Assert: env.HasSuperBinding() is true.
  464. // 3. Let baseValue be ? env.GetSuperBase().
  465. auto base_value = allocate_register();
  466. emit<Bytecode::Op::ResolveSuperBase>(base_value);
  467. // 4. Return the Reference Record { [[Base]]: baseValue, [[ReferencedName]]: propertyKey, [[Strict]]: strict, [[ThisValue]]: actualThis }.
  468. return ReferenceOperands {
  469. .base = base_value,
  470. .referenced_name = computed_property_value,
  471. .this_value = actual_this,
  472. };
  473. }
  474. CodeGenerationErrorOr<Generator::ReferenceOperands> Generator::emit_load_from_reference(JS::ASTNode const& node, Optional<ScopedOperand> preferred_dst)
  475. {
  476. if (is<Identifier>(node)) {
  477. auto& identifier = static_cast<Identifier const&>(node);
  478. auto loaded_value = TRY(identifier.generate_bytecode(*this, preferred_dst)).value();
  479. return ReferenceOperands {
  480. .loaded_value = loaded_value,
  481. };
  482. }
  483. if (!is<MemberExpression>(node)) {
  484. return CodeGenerationError {
  485. &node,
  486. "Unimplemented/invalid node used as a reference"sv
  487. };
  488. }
  489. auto& expression = static_cast<MemberExpression const&>(node);
  490. // https://tc39.es/ecma262/#sec-super-keyword-runtime-semantics-evaluation
  491. if (is<SuperExpression>(expression.object())) {
  492. auto super_reference = TRY(emit_super_reference(expression));
  493. auto dst = preferred_dst.has_value() ? preferred_dst.value() : allocate_register();
  494. if (super_reference.referenced_name.has_value()) {
  495. // 5. Let propertyKey be ? ToPropertyKey(propertyNameValue).
  496. // FIXME: This does ToPropertyKey out of order, which is observable by Symbol.toPrimitive!
  497. emit<Bytecode::Op::GetByValueWithThis>(dst, *super_reference.base, *super_reference.referenced_name, *super_reference.this_value);
  498. } else {
  499. // 3. Let propertyKey be StringValue of IdentifierName.
  500. auto identifier_table_ref = intern_identifier(verify_cast<Identifier>(expression.property()).string());
  501. emit_get_by_id_with_this(dst, *super_reference.base, identifier_table_ref, *super_reference.this_value);
  502. }
  503. super_reference.loaded_value = dst;
  504. return super_reference;
  505. }
  506. auto base = TRY(expression.object().generate_bytecode(*this)).value();
  507. auto base_identifier = intern_identifier_for_expression(expression.object());
  508. if (expression.is_computed()) {
  509. auto property = TRY(expression.property().generate_bytecode(*this)).value();
  510. auto saved_property = allocate_register();
  511. emit<Bytecode::Op::Mov>(saved_property, property);
  512. auto dst = preferred_dst.has_value() ? preferred_dst.value() : allocate_register();
  513. emit<Bytecode::Op::GetByValue>(dst, base, property, move(base_identifier));
  514. return ReferenceOperands {
  515. .base = base,
  516. .referenced_name = saved_property,
  517. .this_value = base,
  518. .loaded_value = dst,
  519. };
  520. }
  521. if (expression.property().is_identifier()) {
  522. auto identifier_table_ref = intern_identifier(verify_cast<Identifier>(expression.property()).string());
  523. auto dst = preferred_dst.has_value() ? preferred_dst.value() : allocate_register();
  524. emit_get_by_id(dst, base, identifier_table_ref, move(base_identifier));
  525. return ReferenceOperands {
  526. .base = base,
  527. .referenced_identifier = identifier_table_ref,
  528. .this_value = base,
  529. .loaded_value = dst,
  530. };
  531. }
  532. if (expression.property().is_private_identifier()) {
  533. auto identifier_table_ref = intern_identifier(verify_cast<PrivateIdentifier>(expression.property()).string());
  534. auto dst = preferred_dst.has_value() ? preferred_dst.value() : allocate_register();
  535. emit<Bytecode::Op::GetPrivateById>(dst, base, identifier_table_ref);
  536. return ReferenceOperands {
  537. .base = base,
  538. .referenced_private_identifier = identifier_table_ref,
  539. .this_value = base,
  540. .loaded_value = dst,
  541. };
  542. }
  543. return CodeGenerationError {
  544. &expression,
  545. "Unimplemented non-computed member expression"sv
  546. };
  547. }
  548. CodeGenerationErrorOr<void> Generator::emit_store_to_reference(JS::ASTNode const& node, ScopedOperand value)
  549. {
  550. if (is<Identifier>(node)) {
  551. auto& identifier = static_cast<Identifier const&>(node);
  552. emit_set_variable(identifier, value);
  553. return {};
  554. }
  555. if (is<MemberExpression>(node)) {
  556. auto& expression = static_cast<MemberExpression const&>(node);
  557. // https://tc39.es/ecma262/#sec-super-keyword-runtime-semantics-evaluation
  558. if (is<SuperExpression>(expression.object())) {
  559. auto super_reference = TRY(emit_super_reference(expression));
  560. // 4. Return the Reference Record { [[Base]]: baseValue, [[ReferencedName]]: propertyKey, [[Strict]]: strict, [[ThisValue]]: actualThis }.
  561. if (super_reference.referenced_name.has_value()) {
  562. // 5. Let propertyKey be ? ToPropertyKey(propertyNameValue).
  563. // FIXME: This does ToPropertyKey out of order, which is observable by Symbol.toPrimitive!
  564. emit<Bytecode::Op::PutByValueWithThis>(*super_reference.base, *super_reference.referenced_name, *super_reference.this_value, value);
  565. } else {
  566. // 3. Let propertyKey be StringValue of IdentifierName.
  567. auto identifier_table_ref = intern_identifier(verify_cast<Identifier>(expression.property()).string());
  568. emit<Bytecode::Op::PutByIdWithThis>(*super_reference.base, *super_reference.this_value, identifier_table_ref, value, Bytecode::Op::PropertyKind::KeyValue, next_property_lookup_cache());
  569. }
  570. } else {
  571. auto object = TRY(expression.object().generate_bytecode(*this)).value();
  572. if (expression.is_computed()) {
  573. auto property = TRY(expression.property().generate_bytecode(*this)).value();
  574. emit<Bytecode::Op::PutByValue>(object, property, value);
  575. } else if (expression.property().is_identifier()) {
  576. auto identifier_table_ref = intern_identifier(verify_cast<Identifier>(expression.property()).string());
  577. emit<Bytecode::Op::PutById>(object, identifier_table_ref, value, Bytecode::Op::PropertyKind::KeyValue, next_property_lookup_cache());
  578. } else if (expression.property().is_private_identifier()) {
  579. auto identifier_table_ref = intern_identifier(verify_cast<PrivateIdentifier>(expression.property()).string());
  580. emit<Bytecode::Op::PutPrivateById>(object, identifier_table_ref, value);
  581. } else {
  582. return CodeGenerationError {
  583. &expression,
  584. "Unimplemented non-computed member expression"sv
  585. };
  586. }
  587. }
  588. return {};
  589. }
  590. return CodeGenerationError {
  591. &node,
  592. "Unimplemented/invalid node used a reference"sv
  593. };
  594. }
  595. CodeGenerationErrorOr<void> Generator::emit_store_to_reference(ReferenceOperands const& reference, ScopedOperand value)
  596. {
  597. if (reference.referenced_private_identifier.has_value()) {
  598. emit<Bytecode::Op::PutPrivateById>(*reference.base, *reference.referenced_private_identifier, value);
  599. return {};
  600. }
  601. if (reference.referenced_identifier.has_value()) {
  602. if (reference.base == reference.this_value)
  603. emit<Bytecode::Op::PutById>(*reference.base, *reference.referenced_identifier, value, Bytecode::Op::PropertyKind::KeyValue, next_property_lookup_cache());
  604. else
  605. emit<Bytecode::Op::PutByIdWithThis>(*reference.base, *reference.this_value, *reference.referenced_identifier, value, Bytecode::Op::PropertyKind::KeyValue, next_property_lookup_cache());
  606. return {};
  607. }
  608. if (reference.base == reference.this_value)
  609. emit<Bytecode::Op::PutByValue>(*reference.base, *reference.referenced_name, value);
  610. else
  611. emit<Bytecode::Op::PutByValueWithThis>(*reference.base, *reference.referenced_name, *reference.this_value, value);
  612. return {};
  613. }
  614. CodeGenerationErrorOr<Optional<ScopedOperand>> Generator::emit_delete_reference(JS::ASTNode const& node)
  615. {
  616. if (is<Identifier>(node)) {
  617. auto& identifier = static_cast<Identifier const&>(node);
  618. if (identifier.is_local()) {
  619. return add_constant(Value(false));
  620. }
  621. auto dst = allocate_register();
  622. emit<Bytecode::Op::DeleteVariable>(dst, intern_identifier(identifier.string()));
  623. return dst;
  624. }
  625. if (is<MemberExpression>(node)) {
  626. auto& expression = static_cast<MemberExpression const&>(node);
  627. // https://tc39.es/ecma262/#sec-super-keyword-runtime-semantics-evaluation
  628. if (is<SuperExpression>(expression.object())) {
  629. auto super_reference = TRY(emit_super_reference(expression));
  630. auto dst = allocate_register();
  631. if (super_reference.referenced_name.has_value()) {
  632. emit<Bytecode::Op::DeleteByValueWithThis>(dst, *super_reference.base, *super_reference.this_value, *super_reference.referenced_name);
  633. } else {
  634. auto identifier_table_ref = intern_identifier(verify_cast<Identifier>(expression.property()).string());
  635. emit<Bytecode::Op::DeleteByIdWithThis>(dst, *super_reference.base, *super_reference.this_value, identifier_table_ref);
  636. }
  637. return Optional<ScopedOperand> {};
  638. }
  639. auto object = TRY(expression.object().generate_bytecode(*this)).value();
  640. auto dst = allocate_register();
  641. if (expression.is_computed()) {
  642. auto property = TRY(expression.property().generate_bytecode(*this)).value();
  643. emit<Bytecode::Op::DeleteByValue>(dst, object, property);
  644. } else if (expression.property().is_identifier()) {
  645. auto identifier_table_ref = intern_identifier(verify_cast<Identifier>(expression.property()).string());
  646. emit<Bytecode::Op::DeleteById>(dst, object, identifier_table_ref);
  647. } else {
  648. // NOTE: Trying to delete a private field generates a SyntaxError in the parser.
  649. return CodeGenerationError {
  650. &expression,
  651. "Unimplemented non-computed member expression"sv
  652. };
  653. }
  654. return dst;
  655. }
  656. // Though this will have no deletion effect, we still have to evaluate the node as it can have side effects.
  657. // For example: delete a(); delete ++c.b; etc.
  658. // 13.5.1.2 Runtime Semantics: Evaluation, https://tc39.es/ecma262/#sec-delete-operator-runtime-semantics-evaluation
  659. // 1. Let ref be the result of evaluating UnaryExpression.
  660. // 2. ReturnIfAbrupt(ref).
  661. (void)TRY(node.generate_bytecode(*this));
  662. // 3. If ref is not a Reference Record, return true.
  663. // NOTE: The rest of the steps are handled by Delete{Variable,ByValue,Id}.
  664. return add_constant(Value(true));
  665. }
  666. void Generator::emit_set_variable(JS::Identifier const& identifier, ScopedOperand value, Bytecode::Op::SetVariable::InitializationMode initialization_mode, Bytecode::Op::EnvironmentMode mode)
  667. {
  668. if (identifier.is_local()) {
  669. if (value.operand().is_local() && value.operand().index() == identifier.local_variable_index()) {
  670. // Moving a local to itself is a no-op.
  671. return;
  672. }
  673. emit<Bytecode::Op::SetLocal>(identifier.local_variable_index(), value);
  674. } else {
  675. emit<Bytecode::Op::SetVariable>(intern_identifier(identifier.string()), value, next_environment_variable_cache(), initialization_mode, mode);
  676. }
  677. }
  678. static Optional<ByteString> expression_identifier(Expression const& expression)
  679. {
  680. if (expression.is_identifier()) {
  681. auto const& identifier = static_cast<Identifier const&>(expression);
  682. return identifier.string();
  683. }
  684. if (expression.is_numeric_literal()) {
  685. auto const& literal = static_cast<NumericLiteral const&>(expression);
  686. return literal.value().to_string_without_side_effects().to_byte_string();
  687. }
  688. if (expression.is_string_literal()) {
  689. auto const& literal = static_cast<StringLiteral const&>(expression);
  690. return ByteString::formatted("'{}'", literal.value());
  691. }
  692. if (expression.is_member_expression()) {
  693. auto const& member_expression = static_cast<MemberExpression const&>(expression);
  694. StringBuilder builder;
  695. if (auto identifer = expression_identifier(member_expression.object()); identifer.has_value())
  696. builder.append(*identifer);
  697. if (auto identifer = expression_identifier(member_expression.property()); identifer.has_value()) {
  698. if (member_expression.is_computed())
  699. builder.appendff("[{}]", *identifer);
  700. else
  701. builder.appendff(".{}", *identifer);
  702. }
  703. return builder.to_byte_string();
  704. }
  705. return {};
  706. }
  707. Optional<IdentifierTableIndex> Generator::intern_identifier_for_expression(Expression const& expression)
  708. {
  709. if (auto identifer = expression_identifier(expression); identifer.has_value())
  710. return intern_identifier(identifer.release_value());
  711. return {};
  712. }
  713. void Generator::generate_scoped_jump(JumpType type)
  714. {
  715. TemporaryChange temp { m_current_unwind_context, m_current_unwind_context };
  716. bool last_was_finally = false;
  717. for (size_t i = m_boundaries.size(); i > 0; --i) {
  718. auto boundary = m_boundaries[i - 1];
  719. using enum BlockBoundaryType;
  720. switch (boundary) {
  721. case Break:
  722. if (type == JumpType::Break) {
  723. emit<Op::Jump>(nearest_breakable_scope());
  724. return;
  725. }
  726. break;
  727. case Continue:
  728. if (type == JumpType::Continue) {
  729. emit<Op::Jump>(nearest_continuable_scope());
  730. return;
  731. }
  732. break;
  733. case Unwind:
  734. VERIFY(last_was_finally || !m_current_unwind_context->finalizer().has_value());
  735. if (!last_was_finally) {
  736. VERIFY(m_current_unwind_context && m_current_unwind_context->handler().has_value());
  737. emit<Bytecode::Op::LeaveUnwindContext>();
  738. m_current_unwind_context = m_current_unwind_context->previous();
  739. }
  740. last_was_finally = false;
  741. break;
  742. case LeaveLexicalEnvironment:
  743. emit<Bytecode::Op::LeaveLexicalEnvironment>();
  744. break;
  745. case ReturnToFinally: {
  746. VERIFY(m_current_unwind_context->finalizer().has_value());
  747. m_current_unwind_context = m_current_unwind_context->previous();
  748. auto jump_type_name = type == JumpType::Break ? "break"sv : "continue"sv;
  749. auto block_name = MUST(String::formatted("{}.{}", current_block().name(), jump_type_name));
  750. auto& block = make_block(block_name);
  751. emit<Op::ScheduleJump>(Label { block });
  752. switch_to_basic_block(block);
  753. last_was_finally = true;
  754. break;
  755. }
  756. case LeaveFinally:
  757. emit<Op::LeaveFinally>();
  758. break;
  759. }
  760. }
  761. VERIFY_NOT_REACHED();
  762. }
  763. void Generator::generate_labelled_jump(JumpType type, DeprecatedFlyString const& label)
  764. {
  765. TemporaryChange temp { m_current_unwind_context, m_current_unwind_context };
  766. size_t current_boundary = m_boundaries.size();
  767. bool last_was_finally = false;
  768. auto const& jumpable_scopes = type == JumpType::Continue ? m_continuable_scopes : m_breakable_scopes;
  769. for (auto const& jumpable_scope : jumpable_scopes.in_reverse()) {
  770. for (; current_boundary > 0; --current_boundary) {
  771. auto boundary = m_boundaries[current_boundary - 1];
  772. if (boundary == BlockBoundaryType::Unwind) {
  773. VERIFY(last_was_finally || !m_current_unwind_context->finalizer().has_value());
  774. if (!last_was_finally) {
  775. VERIFY(m_current_unwind_context && m_current_unwind_context->handler().has_value());
  776. emit<Bytecode::Op::LeaveUnwindContext>();
  777. m_current_unwind_context = m_current_unwind_context->previous();
  778. }
  779. last_was_finally = false;
  780. } else if (boundary == BlockBoundaryType::LeaveLexicalEnvironment) {
  781. emit<Bytecode::Op::LeaveLexicalEnvironment>();
  782. } else if (boundary == BlockBoundaryType::ReturnToFinally) {
  783. VERIFY(m_current_unwind_context->finalizer().has_value());
  784. m_current_unwind_context = m_current_unwind_context->previous();
  785. auto jump_type_name = type == JumpType::Break ? "break"sv : "continue"sv;
  786. auto block_name = MUST(String::formatted("{}.{}", current_block().name(), jump_type_name));
  787. auto& block = make_block(block_name);
  788. emit<Op::ScheduleJump>(Label { block });
  789. switch_to_basic_block(block);
  790. last_was_finally = true;
  791. } else if ((type == JumpType::Continue && boundary == BlockBoundaryType::Continue) || (type == JumpType::Break && boundary == BlockBoundaryType::Break)) {
  792. // Make sure we don't process this boundary twice if the current jumpable scope doesn't contain the target label.
  793. --current_boundary;
  794. break;
  795. }
  796. }
  797. if (jumpable_scope.language_label_set.contains_slow(label)) {
  798. emit<Op::Jump>(jumpable_scope.bytecode_target);
  799. return;
  800. }
  801. }
  802. // We must have a jumpable scope available that contains the label, as this should be enforced by the parser.
  803. VERIFY_NOT_REACHED();
  804. }
  805. void Generator::generate_break()
  806. {
  807. generate_scoped_jump(JumpType::Break);
  808. }
  809. void Generator::generate_break(DeprecatedFlyString const& break_label)
  810. {
  811. generate_labelled_jump(JumpType::Break, break_label);
  812. }
  813. void Generator::generate_continue()
  814. {
  815. generate_scoped_jump(JumpType::Continue);
  816. }
  817. void Generator::generate_continue(DeprecatedFlyString const& continue_label)
  818. {
  819. generate_labelled_jump(JumpType::Continue, continue_label);
  820. }
  821. void Generator::push_home_object(ScopedOperand object)
  822. {
  823. m_home_objects.append(object);
  824. }
  825. void Generator::pop_home_object()
  826. {
  827. m_home_objects.take_last();
  828. }
  829. void Generator::emit_new_function(ScopedOperand dst, FunctionExpression const& function_node, Optional<IdentifierTableIndex> lhs_name)
  830. {
  831. if (m_home_objects.is_empty()) {
  832. emit<Op::NewFunction>(dst, function_node, lhs_name);
  833. } else {
  834. emit<Op::NewFunction>(dst, function_node, lhs_name, m_home_objects.last());
  835. }
  836. }
  837. CodeGenerationErrorOr<Optional<ScopedOperand>> Generator::emit_named_evaluation_if_anonymous_function(Expression const& expression, Optional<IdentifierTableIndex> lhs_name, Optional<ScopedOperand> preferred_dst)
  838. {
  839. if (is<FunctionExpression>(expression)) {
  840. auto const& function_expression = static_cast<FunctionExpression const&>(expression);
  841. if (!function_expression.has_name()) {
  842. return TRY(function_expression.generate_bytecode_with_lhs_name(*this, move(lhs_name), preferred_dst)).value();
  843. }
  844. }
  845. if (is<ClassExpression>(expression)) {
  846. auto const& class_expression = static_cast<ClassExpression const&>(expression);
  847. if (!class_expression.has_name()) {
  848. return TRY(class_expression.generate_bytecode_with_lhs_name(*this, move(lhs_name), preferred_dst)).value();
  849. }
  850. }
  851. return expression.generate_bytecode(*this, preferred_dst);
  852. }
  853. void Generator::emit_get_by_id(ScopedOperand dst, ScopedOperand base, IdentifierTableIndex property_identifier, Optional<IdentifierTableIndex> base_identifier)
  854. {
  855. emit<Op::GetById>(dst, base, property_identifier, move(base_identifier), m_next_property_lookup_cache++);
  856. }
  857. void Generator::emit_get_by_id_with_this(ScopedOperand dst, ScopedOperand base, IdentifierTableIndex id, ScopedOperand this_value)
  858. {
  859. emit<Op::GetByIdWithThis>(dst, base, id, this_value, m_next_property_lookup_cache++);
  860. }
  861. void Generator::emit_iterator_value(ScopedOperand dst, ScopedOperand result)
  862. {
  863. emit_get_by_id(dst, result, intern_identifier("value"sv));
  864. }
  865. void Generator::emit_iterator_complete(ScopedOperand dst, ScopedOperand result)
  866. {
  867. emit_get_by_id(dst, result, intern_identifier("done"sv));
  868. }
  869. bool Generator::is_local_initialized(u32 local_index) const
  870. {
  871. return m_initialized_locals.find(local_index) != m_initialized_locals.end();
  872. }
  873. void Generator::set_local_initialized(u32 local_index)
  874. {
  875. m_initialized_locals.set(local_index);
  876. }
  877. ScopedOperand Generator::get_this(Optional<ScopedOperand> preferred_dst)
  878. {
  879. if (m_current_basic_block->this_().has_value())
  880. return m_current_basic_block->this_().value();
  881. if (m_root_basic_blocks[0]->this_().has_value()) {
  882. m_current_basic_block->set_this(m_root_basic_blocks[0]->this_().value());
  883. return m_root_basic_blocks[0]->this_().value();
  884. }
  885. auto dst = preferred_dst.has_value() ? preferred_dst.value() : allocate_register();
  886. emit<Bytecode::Op::ResolveThisBinding>(dst);
  887. m_current_basic_block->set_this(dst);
  888. return dst;
  889. }
  890. ScopedOperand Generator::accumulator()
  891. {
  892. return m_accumulator;
  893. }
  894. bool Generator::fuse_compare_and_jump(ScopedOperand const& condition, Label true_target, Label false_target)
  895. {
  896. auto& last_instruction = *reinterpret_cast<Instruction const*>(m_current_basic_block->data() + m_current_basic_block->last_instruction_start_offset());
  897. #define HANDLE_COMPARISON_OP(op_TitleCase, op_snake_case) \
  898. if (last_instruction.type() == Instruction::Type::op_TitleCase) { \
  899. auto& comparison = static_cast<Op::op_TitleCase const&>(last_instruction); \
  900. VERIFY(comparison.dst() == condition); \
  901. auto lhs = comparison.lhs(); \
  902. auto rhs = comparison.rhs(); \
  903. m_current_basic_block->rewind(); \
  904. emit<Op::Jump##op_TitleCase>(lhs, rhs, true_target, false_target); \
  905. return true; \
  906. }
  907. JS_ENUMERATE_COMPARISON_OPS(HANDLE_COMPARISON_OP);
  908. #undef HANDLE_COMPARISON_OP
  909. return false;
  910. }
  911. void Generator::emit_jump_if(ScopedOperand const& condition, Label true_target, Label false_target)
  912. {
  913. if (condition.operand().is_constant()) {
  914. auto value = m_constants[condition.operand().index()];
  915. if (value.is_boolean()) {
  916. if (value.as_bool()) {
  917. emit<Op::Jump>(true_target);
  918. } else {
  919. emit<Op::Jump>(false_target);
  920. }
  921. return;
  922. }
  923. }
  924. // NOTE: It's only safe to fuse compare-and-jump if the condition is a temporary with no other dependents.
  925. if (condition.operand().is_register()
  926. && condition.ref_count() == 1
  927. && m_current_basic_block->size() > 0) {
  928. if (fuse_compare_and_jump(condition, true_target, false_target))
  929. return;
  930. }
  931. emit<Op::JumpIf>(condition, true_target, false_target);
  932. }
  933. }