Generator.cpp 52 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243
  1. /*
  2. * Copyright (c) 2021-2024, Andreas Kling <andreas@ladybird.org>
  3. *
  4. * SPDX-License-Identifier: BSD-2-Clause
  5. */
  6. #include <AK/QuickSort.h>
  7. #include <AK/TemporaryChange.h>
  8. #include <LibJS/AST.h>
  9. #include <LibJS/Bytecode/BasicBlock.h>
  10. #include <LibJS/Bytecode/Generator.h>
  11. #include <LibJS/Bytecode/Instruction.h>
  12. #include <LibJS/Bytecode/Op.h>
  13. #include <LibJS/Bytecode/Register.h>
  14. #include <LibJS/Runtime/ECMAScriptFunctionObject.h>
  15. #include <LibJS/Runtime/VM.h>
  16. namespace JS::Bytecode {
  17. Generator::Generator(VM& vm, GC::Ptr<ECMAScriptFunctionObject const> function, MustPropagateCompletion must_propagate_completion)
  18. : m_vm(vm)
  19. , m_string_table(make<StringTable>())
  20. , m_identifier_table(make<IdentifierTable>())
  21. , m_regex_table(make<RegexTable>())
  22. , m_constants(vm.heap())
  23. , m_accumulator(*this, Operand(Register::accumulator()))
  24. , m_this_value(*this, Operand(Register::this_value()))
  25. , m_must_propagate_completion(must_propagate_completion == MustPropagateCompletion::Yes)
  26. , m_function(function)
  27. {
  28. }
  29. CodeGenerationErrorOr<void> Generator::emit_function_declaration_instantiation(ECMAScriptFunctionObject const& function)
  30. {
  31. if (function.m_has_parameter_expressions) {
  32. emit<Op::CreateLexicalEnvironment>();
  33. }
  34. for (auto const& parameter_name : function.m_parameter_names) {
  35. if (parameter_name.value == ECMAScriptFunctionObject::ParameterIsLocal::No) {
  36. auto id = intern_identifier(parameter_name.key);
  37. emit<Op::CreateVariable>(id, Op::EnvironmentMode::Lexical, false);
  38. if (function.m_has_duplicates) {
  39. emit<Op::InitializeLexicalBinding>(id, add_constant(js_undefined()));
  40. }
  41. }
  42. }
  43. if (function.m_arguments_object_needed) {
  44. Optional<Operand> dst;
  45. auto local_var_index = function.m_local_variables_names.find_first_index("arguments"sv);
  46. if (local_var_index.has_value())
  47. dst = local(local_var_index.value());
  48. if (function.m_strict || !function.has_simple_parameter_list()) {
  49. emit<Op::CreateArguments>(dst, Op::CreateArguments::Kind::Unmapped, function.m_strict);
  50. } else {
  51. emit<Op::CreateArguments>(dst, Op::CreateArguments::Kind::Mapped, function.m_strict);
  52. }
  53. }
  54. auto const& formal_parameters = function.formal_parameters();
  55. for (u32 param_index = 0; param_index < formal_parameters.size(); ++param_index) {
  56. auto const& parameter = formal_parameters[param_index];
  57. if (parameter.is_rest) {
  58. auto argument_reg = allocate_register();
  59. emit<Op::CreateRestParams>(argument_reg.operand(), param_index);
  60. emit<Op::SetArgument>(param_index, argument_reg.operand());
  61. } else if (parameter.default_value) {
  62. auto& if_undefined_block = make_block();
  63. auto& if_not_undefined_block = make_block();
  64. auto argument_reg = allocate_register();
  65. emit<Op::GetArgument>(argument_reg.operand(), param_index);
  66. emit<Op::JumpUndefined>(
  67. argument_reg.operand(),
  68. Label { if_undefined_block },
  69. Label { if_not_undefined_block });
  70. switch_to_basic_block(if_undefined_block);
  71. auto operand = TRY(parameter.default_value->generate_bytecode(*this));
  72. emit<Op::SetArgument>(param_index, *operand);
  73. emit<Op::Jump>(Label { if_not_undefined_block });
  74. switch_to_basic_block(if_not_undefined_block);
  75. }
  76. if (auto const* identifier = parameter.binding.get_pointer<NonnullRefPtr<Identifier const>>(); identifier) {
  77. if ((*identifier)->is_local()) {
  78. auto local_variable_index = (*identifier)->local_variable_index();
  79. emit<Op::GetArgument>(local(local_variable_index), param_index);
  80. set_local_initialized((*identifier)->local_variable_index());
  81. } else {
  82. auto id = intern_identifier((*identifier)->string());
  83. auto argument_reg = allocate_register();
  84. emit<Op::GetArgument>(argument_reg.operand(), param_index);
  85. if (function.m_has_duplicates) {
  86. emit<Op::SetLexicalBinding>(id, argument_reg.operand());
  87. } else {
  88. emit<Op::InitializeLexicalBinding>(id, argument_reg.operand());
  89. }
  90. }
  91. } else if (auto const* binding_pattern = parameter.binding.get_pointer<NonnullRefPtr<BindingPattern const>>(); binding_pattern) {
  92. auto input_operand = allocate_register();
  93. emit<Op::GetArgument>(input_operand.operand(), param_index);
  94. auto init_mode = function.m_has_duplicates ? Op::BindingInitializationMode::Set : Bytecode::Op::BindingInitializationMode::Initialize;
  95. TRY((*binding_pattern)->generate_bytecode(*this, init_mode, input_operand, false));
  96. }
  97. }
  98. ScopeNode const* scope_body = nullptr;
  99. if (is<ScopeNode>(*function.m_ecmascript_code))
  100. scope_body = static_cast<ScopeNode const*>(function.m_ecmascript_code.ptr());
  101. if (!function.m_has_parameter_expressions) {
  102. if (scope_body) {
  103. for (auto const& variable_to_initialize : function.m_var_names_to_initialize_binding) {
  104. auto const& id = variable_to_initialize.identifier;
  105. if (id.is_local()) {
  106. emit<Op::Mov>(local(id.local_variable_index()), add_constant(js_undefined()));
  107. } else {
  108. auto intern_id = intern_identifier(id.string());
  109. emit<Op::CreateVariable>(intern_id, Op::EnvironmentMode::Var, false);
  110. emit<Op::InitializeVariableBinding>(intern_id, add_constant(js_undefined()));
  111. }
  112. }
  113. }
  114. } else {
  115. emit<Op::CreateVariableEnvironment>(function.m_var_environment_bindings_count);
  116. if (scope_body) {
  117. for (auto const& variable_to_initialize : function.m_var_names_to_initialize_binding) {
  118. auto const& id = variable_to_initialize.identifier;
  119. auto initial_value = allocate_register();
  120. if (!variable_to_initialize.parameter_binding || variable_to_initialize.function_name) {
  121. emit<Op::Mov>(initial_value, add_constant(js_undefined()));
  122. } else {
  123. if (id.is_local()) {
  124. emit<Op::Mov>(initial_value, local(id.local_variable_index()));
  125. } else {
  126. emit<Op::GetBinding>(initial_value, intern_identifier(id.string()));
  127. }
  128. }
  129. if (id.is_local()) {
  130. emit<Op::Mov>(local(id.local_variable_index()), initial_value);
  131. } else {
  132. auto intern_id = intern_identifier(id.string());
  133. emit<Op::CreateVariable>(intern_id, Op::EnvironmentMode::Var, false);
  134. emit<Op::InitializeVariableBinding>(intern_id, initial_value);
  135. }
  136. }
  137. }
  138. }
  139. if (!function.m_strict && scope_body) {
  140. for (auto const& function_name : function.m_function_names_to_initialize_binding) {
  141. auto intern_id = intern_identifier(function_name);
  142. emit<Op::CreateVariable>(intern_id, Op::EnvironmentMode::Var, false);
  143. emit<Op::InitializeVariableBinding>(intern_id, add_constant(js_undefined()));
  144. }
  145. }
  146. if (!function.m_strict) {
  147. bool can_elide_declarative_environment = !function.m_contains_direct_call_to_eval && (!scope_body || !scope_body->has_non_local_lexical_declarations());
  148. if (!can_elide_declarative_environment) {
  149. emit<Op::CreateLexicalEnvironment>(function.m_lex_environment_bindings_count);
  150. }
  151. }
  152. if (scope_body) {
  153. MUST(scope_body->for_each_lexically_scoped_declaration([&](Declaration const& declaration) {
  154. MUST(declaration.for_each_bound_identifier([&](auto const& id) {
  155. if (id.is_local()) {
  156. return;
  157. }
  158. emit<Op::CreateVariable>(intern_identifier(id.string()),
  159. Op::EnvironmentMode::Lexical,
  160. declaration.is_constant_declaration(),
  161. false,
  162. declaration.is_constant_declaration());
  163. }));
  164. }));
  165. }
  166. for (auto const& declaration : function.m_functions_to_initialize) {
  167. auto function = allocate_register();
  168. emit<Op::NewFunction>(function, declaration, OptionalNone {});
  169. if (declaration.name_identifier()->is_local()) {
  170. emit<Op::Mov>(local(declaration.name_identifier()->local_variable_index()), function);
  171. } else {
  172. emit<Op::SetVariableBinding>(intern_identifier(declaration.name()), function);
  173. }
  174. }
  175. return {};
  176. }
  177. CodeGenerationErrorOr<GC::Ref<Executable>> Generator::compile(VM& vm, ASTNode const& node, FunctionKind enclosing_function_kind, GC::Ptr<ECMAScriptFunctionObject const> function, MustPropagateCompletion must_propagate_completion, Vector<DeprecatedFlyString> local_variable_names)
  178. {
  179. Generator generator(vm, function, must_propagate_completion);
  180. generator.switch_to_basic_block(generator.make_block());
  181. SourceLocationScope scope(generator, node);
  182. generator.m_enclosing_function_kind = enclosing_function_kind;
  183. if (generator.is_in_async_function() && !generator.is_in_generator_function()) {
  184. // Immediately yield with no value.
  185. auto& start_block = generator.make_block();
  186. generator.emit<Bytecode::Op::Yield>(Label { start_block }, generator.add_constant(js_undefined()));
  187. generator.switch_to_basic_block(start_block);
  188. // NOTE: This doesn't have to handle received throw/return completions, as GeneratorObject::resume_abrupt
  189. // will not enter the generator from the SuspendedStart state and immediately completes the generator.
  190. }
  191. if (function)
  192. TRY(generator.emit_function_declaration_instantiation(*function));
  193. if (generator.is_in_generator_function()) {
  194. // Immediately yield with no value.
  195. auto& start_block = generator.make_block();
  196. generator.emit<Bytecode::Op::Yield>(Label { start_block }, generator.add_constant(js_undefined()));
  197. generator.switch_to_basic_block(start_block);
  198. // NOTE: This doesn't have to handle received throw/return completions, as GeneratorObject::resume_abrupt
  199. // will not enter the generator from the SuspendedStart state and immediately completes the generator.
  200. }
  201. auto last_value = TRY(node.generate_bytecode(generator));
  202. if (!generator.current_block().is_terminated() && last_value.has_value()) {
  203. generator.emit<Bytecode::Op::End>(last_value.value());
  204. }
  205. if (generator.is_in_generator_or_async_function()) {
  206. // Terminate all unterminated blocks with yield return
  207. for (auto& block : generator.m_root_basic_blocks) {
  208. if (block->is_terminated())
  209. continue;
  210. generator.switch_to_basic_block(*block);
  211. generator.emit_return<Bytecode::Op::Yield>(generator.add_constant(js_undefined()));
  212. }
  213. }
  214. bool is_strict_mode = false;
  215. if (is<Program>(node))
  216. is_strict_mode = static_cast<Program const&>(node).is_strict_mode();
  217. else if (is<FunctionBody>(node))
  218. is_strict_mode = static_cast<FunctionBody const&>(node).in_strict_mode();
  219. else if (is<FunctionDeclaration>(node))
  220. is_strict_mode = static_cast<FunctionDeclaration const&>(node).is_strict_mode();
  221. size_t size_needed = 0;
  222. for (auto& block : generator.m_root_basic_blocks) {
  223. size_needed += block->size();
  224. }
  225. Vector<u8> bytecode;
  226. bytecode.ensure_capacity(size_needed);
  227. Vector<size_t> basic_block_start_offsets;
  228. basic_block_start_offsets.ensure_capacity(generator.m_root_basic_blocks.size());
  229. HashMap<BasicBlock const*, size_t> block_offsets;
  230. Vector<size_t> label_offsets;
  231. struct UnlinkedExceptionHandlers {
  232. size_t start_offset;
  233. size_t end_offset;
  234. BasicBlock const* handler;
  235. BasicBlock const* finalizer;
  236. };
  237. Vector<UnlinkedExceptionHandlers> unlinked_exception_handlers;
  238. HashMap<size_t, SourceRecord> source_map;
  239. Optional<ScopedOperand> undefined_constant;
  240. for (auto& block : generator.m_root_basic_blocks) {
  241. if (!block->is_terminated()) {
  242. // NOTE: We must ensure that the "undefined" constant, which will be used by the not yet
  243. // emitted End instruction, is taken into account while shifting local operands by the
  244. // number of constants.
  245. undefined_constant = generator.add_constant(js_undefined());
  246. break;
  247. }
  248. }
  249. auto number_of_registers = generator.m_next_register;
  250. auto number_of_constants = generator.m_constants.size();
  251. // Pass: Rewrite the bytecode to use the correct register and constant indices.
  252. for (auto& block : generator.m_root_basic_blocks) {
  253. Bytecode::InstructionStreamIterator it(block->instruction_stream());
  254. while (!it.at_end()) {
  255. auto& instruction = const_cast<Instruction&>(*it);
  256. instruction.visit_operands([number_of_registers, number_of_constants](Operand& operand) {
  257. switch (operand.type()) {
  258. case Operand::Type::Register:
  259. break;
  260. case Operand::Type::Local:
  261. operand.offset_index_by(number_of_registers + number_of_constants);
  262. break;
  263. case Operand::Type::Constant:
  264. operand.offset_index_by(number_of_registers);
  265. break;
  266. default:
  267. VERIFY_NOT_REACHED();
  268. }
  269. });
  270. ++it;
  271. }
  272. }
  273. // Also rewrite the `undefined` constant if we have one for inserting End.
  274. if (undefined_constant.has_value())
  275. undefined_constant.value().operand().offset_index_by(number_of_registers);
  276. for (auto& block : generator.m_root_basic_blocks) {
  277. basic_block_start_offsets.append(bytecode.size());
  278. if (block->handler() || block->finalizer()) {
  279. unlinked_exception_handlers.append({
  280. .start_offset = bytecode.size(),
  281. .end_offset = 0,
  282. .handler = block->handler(),
  283. .finalizer = block->finalizer(),
  284. });
  285. }
  286. block_offsets.set(block.ptr(), bytecode.size());
  287. for (auto& [offset, source_record] : block->source_map()) {
  288. source_map.set(bytecode.size() + offset, source_record);
  289. }
  290. Bytecode::InstructionStreamIterator it(block->instruction_stream());
  291. while (!it.at_end()) {
  292. auto& instruction = const_cast<Instruction&>(*it);
  293. if (instruction.type() == Instruction::Type::Jump) {
  294. auto& jump = static_cast<Bytecode::Op::Jump&>(instruction);
  295. // OPTIMIZATION: Don't emit jumps that just jump to the next block.
  296. if (jump.target().basic_block_index() == block->index() + 1) {
  297. if (basic_block_start_offsets.last() == bytecode.size()) {
  298. // This block is empty, just skip it.
  299. basic_block_start_offsets.take_last();
  300. }
  301. ++it;
  302. continue;
  303. }
  304. // OPTIMIZATION: For jumps to a return-or-end-only block, we can emit a `Return` or `End` directly instead.
  305. auto& target_block = *generator.m_root_basic_blocks[jump.target().basic_block_index()];
  306. if (target_block.is_terminated()) {
  307. auto target_instruction_iterator = InstructionStreamIterator { target_block.instruction_stream() };
  308. auto& target_instruction = *target_instruction_iterator;
  309. if (target_instruction.type() == Instruction::Type::Return) {
  310. auto& return_instruction = static_cast<Bytecode::Op::Return const&>(target_instruction);
  311. Op::Return return_op(return_instruction.value());
  312. bytecode.append(reinterpret_cast<u8 const*>(&return_op), return_op.length());
  313. ++it;
  314. continue;
  315. }
  316. if (target_instruction.type() == Instruction::Type::End) {
  317. auto& return_instruction = static_cast<Bytecode::Op::End const&>(target_instruction);
  318. Op::End end_op(return_instruction.value());
  319. bytecode.append(reinterpret_cast<u8 const*>(&end_op), end_op.length());
  320. ++it;
  321. continue;
  322. }
  323. }
  324. }
  325. // OPTIMIZATION: For `JumpIf` where one of the targets is the very next block,
  326. // we can emit a `JumpTrue` or `JumpFalse` (to the other block) instead.
  327. if (instruction.type() == Instruction::Type::JumpIf) {
  328. auto& jump = static_cast<Bytecode::Op::JumpIf&>(instruction);
  329. if (jump.true_target().basic_block_index() == block->index() + 1) {
  330. Op::JumpFalse jump_false(jump.condition(), Label { jump.false_target() });
  331. auto& label = jump_false.target();
  332. size_t label_offset = bytecode.size() + (bit_cast<FlatPtr>(&label) - bit_cast<FlatPtr>(&jump_false));
  333. label_offsets.append(label_offset);
  334. bytecode.append(reinterpret_cast<u8 const*>(&jump_false), jump_false.length());
  335. ++it;
  336. continue;
  337. }
  338. if (jump.false_target().basic_block_index() == block->index() + 1) {
  339. Op::JumpTrue jump_true(jump.condition(), Label { jump.true_target() });
  340. auto& label = jump_true.target();
  341. size_t label_offset = bytecode.size() + (bit_cast<FlatPtr>(&label) - bit_cast<FlatPtr>(&jump_true));
  342. label_offsets.append(label_offset);
  343. bytecode.append(reinterpret_cast<u8 const*>(&jump_true), jump_true.length());
  344. ++it;
  345. continue;
  346. }
  347. }
  348. instruction.visit_labels([&](Label& label) {
  349. size_t label_offset = bytecode.size() + (bit_cast<FlatPtr>(&label) - bit_cast<FlatPtr>(&instruction));
  350. label_offsets.append(label_offset);
  351. });
  352. bytecode.append(reinterpret_cast<u8 const*>(&instruction), instruction.length());
  353. ++it;
  354. }
  355. if (!block->is_terminated()) {
  356. Op::End end(*undefined_constant);
  357. bytecode.append(reinterpret_cast<u8 const*>(&end), end.length());
  358. }
  359. if (block->handler() || block->finalizer()) {
  360. unlinked_exception_handlers.last().end_offset = bytecode.size();
  361. }
  362. }
  363. for (auto label_offset : label_offsets) {
  364. auto& label = *reinterpret_cast<Label*>(bytecode.data() + label_offset);
  365. auto* block = generator.m_root_basic_blocks[label.basic_block_index()].ptr();
  366. label.set_address(block_offsets.get(block).value());
  367. }
  368. auto executable = vm.heap().allocate<Executable>(
  369. move(bytecode),
  370. move(generator.m_identifier_table),
  371. move(generator.m_string_table),
  372. move(generator.m_regex_table),
  373. move(generator.m_constants),
  374. node.source_code(),
  375. generator.m_next_property_lookup_cache,
  376. generator.m_next_global_variable_cache,
  377. generator.m_next_register,
  378. is_strict_mode);
  379. Vector<Executable::ExceptionHandlers> linked_exception_handlers;
  380. for (auto& unlinked_handler : unlinked_exception_handlers) {
  381. auto start_offset = unlinked_handler.start_offset;
  382. auto end_offset = unlinked_handler.end_offset;
  383. auto handler_offset = unlinked_handler.handler ? block_offsets.get(unlinked_handler.handler).value() : Optional<size_t> {};
  384. auto finalizer_offset = unlinked_handler.finalizer ? block_offsets.get(unlinked_handler.finalizer).value() : Optional<size_t> {};
  385. linked_exception_handlers.append({ start_offset, end_offset, handler_offset, finalizer_offset });
  386. }
  387. quick_sort(linked_exception_handlers, [](auto const& a, auto const& b) {
  388. return a.start_offset < b.start_offset;
  389. });
  390. executable->exception_handlers = move(linked_exception_handlers);
  391. executable->basic_block_start_offsets = move(basic_block_start_offsets);
  392. executable->source_map = move(source_map);
  393. executable->local_variable_names = move(local_variable_names);
  394. executable->local_index_base = number_of_registers + number_of_constants;
  395. executable->length_identifier = generator.m_length_identifier;
  396. generator.m_finished = true;
  397. return executable;
  398. }
  399. CodeGenerationErrorOr<GC::Ref<Executable>> Generator::generate_from_ast_node(VM& vm, ASTNode const& node, FunctionKind enclosing_function_kind)
  400. {
  401. Vector<DeprecatedFlyString> local_variable_names;
  402. if (is<ScopeNode>(node))
  403. local_variable_names = static_cast<ScopeNode const&>(node).local_variables_names();
  404. return compile(vm, node, enclosing_function_kind, {}, MustPropagateCompletion::Yes, move(local_variable_names));
  405. }
  406. CodeGenerationErrorOr<GC::Ref<Executable>> Generator::generate_from_function(VM& vm, ECMAScriptFunctionObject const& function)
  407. {
  408. return compile(vm, function.ecmascript_code(), function.kind(), &function, MustPropagateCompletion::No, function.local_variables_names());
  409. }
  410. void Generator::grow(size_t additional_size)
  411. {
  412. VERIFY(m_current_basic_block);
  413. m_current_basic_block->grow(additional_size);
  414. }
  415. ScopedOperand Generator::allocate_register()
  416. {
  417. if (!m_free_registers.is_empty()) {
  418. return ScopedOperand { *this, Operand { m_free_registers.take_last() } };
  419. }
  420. VERIFY(m_next_register != NumericLimits<u32>::max());
  421. return ScopedOperand { *this, Operand { Register { m_next_register++ } } };
  422. }
  423. void Generator::free_register(Register reg)
  424. {
  425. m_free_registers.append(reg);
  426. }
  427. ScopedOperand Generator::local(u32 local_index)
  428. {
  429. return ScopedOperand { *this, Operand { Operand::Type::Local, static_cast<u32>(local_index) } };
  430. }
  431. Generator::SourceLocationScope::SourceLocationScope(Generator& generator, ASTNode const& node)
  432. : m_generator(generator)
  433. , m_previous_node(m_generator.m_current_ast_node)
  434. {
  435. m_generator.m_current_ast_node = &node;
  436. }
  437. Generator::SourceLocationScope::~SourceLocationScope()
  438. {
  439. m_generator.m_current_ast_node = m_previous_node;
  440. }
  441. Generator::UnwindContext::UnwindContext(Generator& generator, Optional<Label> finalizer)
  442. : m_generator(generator)
  443. , m_finalizer(finalizer)
  444. , m_previous_context(m_generator.m_current_unwind_context)
  445. {
  446. m_generator.m_current_unwind_context = this;
  447. }
  448. Generator::UnwindContext::~UnwindContext()
  449. {
  450. VERIFY(m_generator.m_current_unwind_context == this);
  451. m_generator.m_current_unwind_context = m_previous_context;
  452. }
  453. Label Generator::nearest_continuable_scope() const
  454. {
  455. return m_continuable_scopes.last().bytecode_target;
  456. }
  457. bool Generator::emit_block_declaration_instantiation(ScopeNode const& scope_node)
  458. {
  459. bool needs_block_declaration_instantiation = false;
  460. MUST(scope_node.for_each_lexically_scoped_declaration([&](Declaration const& declaration) {
  461. if (declaration.is_function_declaration()) {
  462. needs_block_declaration_instantiation = true;
  463. return;
  464. }
  465. MUST(declaration.for_each_bound_identifier([&](auto const& id) {
  466. if (!id.is_local())
  467. needs_block_declaration_instantiation = true;
  468. }));
  469. }));
  470. if (!needs_block_declaration_instantiation)
  471. return false;
  472. // FIXME: Generate the actual bytecode for block declaration instantiation
  473. // and get rid of the BlockDeclarationInstantiation instruction.
  474. start_boundary(BlockBoundaryType::LeaveLexicalEnvironment);
  475. emit<Bytecode::Op::BlockDeclarationInstantiation>(scope_node);
  476. return true;
  477. }
  478. void Generator::begin_variable_scope()
  479. {
  480. start_boundary(BlockBoundaryType::LeaveLexicalEnvironment);
  481. emit<Bytecode::Op::CreateLexicalEnvironment>();
  482. }
  483. void Generator::end_variable_scope()
  484. {
  485. end_boundary(BlockBoundaryType::LeaveLexicalEnvironment);
  486. if (!m_current_basic_block->is_terminated()) {
  487. emit<Bytecode::Op::LeaveLexicalEnvironment>();
  488. }
  489. }
  490. void Generator::begin_continuable_scope(Label continue_target, Vector<DeprecatedFlyString> const& language_label_set)
  491. {
  492. m_continuable_scopes.append({ continue_target, language_label_set });
  493. start_boundary(BlockBoundaryType::Continue);
  494. }
  495. void Generator::end_continuable_scope()
  496. {
  497. m_continuable_scopes.take_last();
  498. end_boundary(BlockBoundaryType::Continue);
  499. }
  500. Label Generator::nearest_breakable_scope() const
  501. {
  502. return m_breakable_scopes.last().bytecode_target;
  503. }
  504. void Generator::begin_breakable_scope(Label breakable_target, Vector<DeprecatedFlyString> const& language_label_set)
  505. {
  506. m_breakable_scopes.append({ breakable_target, language_label_set });
  507. start_boundary(BlockBoundaryType::Break);
  508. }
  509. void Generator::end_breakable_scope()
  510. {
  511. m_breakable_scopes.take_last();
  512. end_boundary(BlockBoundaryType::Break);
  513. }
  514. CodeGenerationErrorOr<Generator::ReferenceOperands> Generator::emit_super_reference(MemberExpression const& expression)
  515. {
  516. VERIFY(is<SuperExpression>(expression.object()));
  517. // https://tc39.es/ecma262/#sec-super-keyword-runtime-semantics-evaluation
  518. // 1. Let env be GetThisEnvironment().
  519. // 2. Let actualThis be ? env.GetThisBinding().
  520. auto actual_this = get_this();
  521. Optional<ScopedOperand> computed_property_value;
  522. if (expression.is_computed()) {
  523. // SuperProperty : super [ Expression ]
  524. // 3. Let propertyNameReference be ? Evaluation of Expression.
  525. // 4. Let propertyNameValue be ? GetValue(propertyNameReference).
  526. computed_property_value = TRY(expression.property().generate_bytecode(*this)).value();
  527. }
  528. // 5/7. Return ? MakeSuperPropertyReference(actualThis, propertyKey, strict).
  529. // https://tc39.es/ecma262/#sec-makesuperpropertyreference
  530. // 1. Let env be GetThisEnvironment().
  531. // 2. Assert: env.HasSuperBinding() is true.
  532. // 3. Let baseValue be ? env.GetSuperBase().
  533. auto base_value = allocate_register();
  534. emit<Bytecode::Op::ResolveSuperBase>(base_value);
  535. // 4. Return the Reference Record { [[Base]]: baseValue, [[ReferencedName]]: propertyKey, [[Strict]]: strict, [[ThisValue]]: actualThis }.
  536. return ReferenceOperands {
  537. .base = base_value,
  538. .referenced_name = computed_property_value,
  539. .this_value = actual_this,
  540. };
  541. }
  542. CodeGenerationErrorOr<Generator::ReferenceOperands> Generator::emit_load_from_reference(JS::ASTNode const& node, Optional<ScopedOperand> preferred_dst)
  543. {
  544. if (is<Identifier>(node)) {
  545. auto& identifier = static_cast<Identifier const&>(node);
  546. auto loaded_value = TRY(identifier.generate_bytecode(*this, preferred_dst)).value();
  547. return ReferenceOperands {
  548. .loaded_value = loaded_value,
  549. };
  550. }
  551. if (!is<MemberExpression>(node)) {
  552. return CodeGenerationError {
  553. &node,
  554. "Unimplemented/invalid node used as a reference"sv
  555. };
  556. }
  557. auto& expression = static_cast<MemberExpression const&>(node);
  558. // https://tc39.es/ecma262/#sec-super-keyword-runtime-semantics-evaluation
  559. if (is<SuperExpression>(expression.object())) {
  560. auto super_reference = TRY(emit_super_reference(expression));
  561. auto dst = preferred_dst.has_value() ? preferred_dst.value() : allocate_register();
  562. if (super_reference.referenced_name.has_value()) {
  563. // 5. Let propertyKey be ? ToPropertyKey(propertyNameValue).
  564. // FIXME: This does ToPropertyKey out of order, which is observable by Symbol.toPrimitive!
  565. emit<Bytecode::Op::GetByValueWithThis>(dst, *super_reference.base, *super_reference.referenced_name, *super_reference.this_value);
  566. } else {
  567. // 3. Let propertyKey be StringValue of IdentifierName.
  568. auto identifier_table_ref = intern_identifier(verify_cast<Identifier>(expression.property()).string());
  569. emit_get_by_id_with_this(dst, *super_reference.base, identifier_table_ref, *super_reference.this_value);
  570. }
  571. super_reference.loaded_value = dst;
  572. return super_reference;
  573. }
  574. auto base = TRY(expression.object().generate_bytecode(*this)).value();
  575. auto base_identifier = intern_identifier_for_expression(expression.object());
  576. if (expression.is_computed()) {
  577. auto property = TRY(expression.property().generate_bytecode(*this)).value();
  578. auto saved_property = allocate_register();
  579. emit<Bytecode::Op::Mov>(saved_property, property);
  580. auto dst = preferred_dst.has_value() ? preferred_dst.value() : allocate_register();
  581. emit<Bytecode::Op::GetByValue>(dst, base, property, move(base_identifier));
  582. return ReferenceOperands {
  583. .base = base,
  584. .referenced_name = saved_property,
  585. .this_value = base,
  586. .loaded_value = dst,
  587. };
  588. }
  589. if (expression.property().is_identifier()) {
  590. auto identifier_table_ref = intern_identifier(verify_cast<Identifier>(expression.property()).string());
  591. auto dst = preferred_dst.has_value() ? preferred_dst.value() : allocate_register();
  592. emit_get_by_id(dst, base, identifier_table_ref, move(base_identifier));
  593. return ReferenceOperands {
  594. .base = base,
  595. .referenced_identifier = identifier_table_ref,
  596. .this_value = base,
  597. .loaded_value = dst,
  598. };
  599. }
  600. if (expression.property().is_private_identifier()) {
  601. auto identifier_table_ref = intern_identifier(verify_cast<PrivateIdentifier>(expression.property()).string());
  602. auto dst = preferred_dst.has_value() ? preferred_dst.value() : allocate_register();
  603. emit<Bytecode::Op::GetPrivateById>(dst, base, identifier_table_ref);
  604. return ReferenceOperands {
  605. .base = base,
  606. .referenced_private_identifier = identifier_table_ref,
  607. .this_value = base,
  608. .loaded_value = dst,
  609. };
  610. }
  611. return CodeGenerationError {
  612. &expression,
  613. "Unimplemented non-computed member expression"sv
  614. };
  615. }
  616. CodeGenerationErrorOr<void> Generator::emit_store_to_reference(JS::ASTNode const& node, ScopedOperand value)
  617. {
  618. if (is<Identifier>(node)) {
  619. auto& identifier = static_cast<Identifier const&>(node);
  620. emit_set_variable(identifier, value);
  621. return {};
  622. }
  623. if (is<MemberExpression>(node)) {
  624. auto& expression = static_cast<MemberExpression const&>(node);
  625. // https://tc39.es/ecma262/#sec-super-keyword-runtime-semantics-evaluation
  626. if (is<SuperExpression>(expression.object())) {
  627. auto super_reference = TRY(emit_super_reference(expression));
  628. // 4. Return the Reference Record { [[Base]]: baseValue, [[ReferencedName]]: propertyKey, [[Strict]]: strict, [[ThisValue]]: actualThis }.
  629. if (super_reference.referenced_name.has_value()) {
  630. // 5. Let propertyKey be ? ToPropertyKey(propertyNameValue).
  631. // FIXME: This does ToPropertyKey out of order, which is observable by Symbol.toPrimitive!
  632. emit<Bytecode::Op::PutByValueWithThis>(*super_reference.base, *super_reference.referenced_name, *super_reference.this_value, value);
  633. } else {
  634. // 3. Let propertyKey be StringValue of IdentifierName.
  635. auto identifier_table_ref = intern_identifier(verify_cast<Identifier>(expression.property()).string());
  636. emit<Bytecode::Op::PutByIdWithThis>(*super_reference.base, *super_reference.this_value, identifier_table_ref, value, Bytecode::Op::PropertyKind::KeyValue, next_property_lookup_cache());
  637. }
  638. } else {
  639. auto object = TRY(expression.object().generate_bytecode(*this)).value();
  640. if (expression.is_computed()) {
  641. auto property = TRY(expression.property().generate_bytecode(*this)).value();
  642. emit<Bytecode::Op::PutByValue>(object, property, value);
  643. } else if (expression.property().is_identifier()) {
  644. auto identifier_table_ref = intern_identifier(verify_cast<Identifier>(expression.property()).string());
  645. emit<Bytecode::Op::PutById>(object, identifier_table_ref, value, Bytecode::Op::PropertyKind::KeyValue, next_property_lookup_cache());
  646. } else if (expression.property().is_private_identifier()) {
  647. auto identifier_table_ref = intern_identifier(verify_cast<PrivateIdentifier>(expression.property()).string());
  648. emit<Bytecode::Op::PutPrivateById>(object, identifier_table_ref, value);
  649. } else {
  650. return CodeGenerationError {
  651. &expression,
  652. "Unimplemented non-computed member expression"sv
  653. };
  654. }
  655. }
  656. return {};
  657. }
  658. return CodeGenerationError {
  659. &node,
  660. "Unimplemented/invalid node used a reference"sv
  661. };
  662. }
  663. CodeGenerationErrorOr<void> Generator::emit_store_to_reference(ReferenceOperands const& reference, ScopedOperand value)
  664. {
  665. if (reference.referenced_private_identifier.has_value()) {
  666. emit<Bytecode::Op::PutPrivateById>(*reference.base, *reference.referenced_private_identifier, value);
  667. return {};
  668. }
  669. if (reference.referenced_identifier.has_value()) {
  670. if (reference.base == reference.this_value)
  671. emit<Bytecode::Op::PutById>(*reference.base, *reference.referenced_identifier, value, Bytecode::Op::PropertyKind::KeyValue, next_property_lookup_cache());
  672. else
  673. emit<Bytecode::Op::PutByIdWithThis>(*reference.base, *reference.this_value, *reference.referenced_identifier, value, Bytecode::Op::PropertyKind::KeyValue, next_property_lookup_cache());
  674. return {};
  675. }
  676. if (reference.base == reference.this_value)
  677. emit<Bytecode::Op::PutByValue>(*reference.base, *reference.referenced_name, value);
  678. else
  679. emit<Bytecode::Op::PutByValueWithThis>(*reference.base, *reference.referenced_name, *reference.this_value, value);
  680. return {};
  681. }
  682. CodeGenerationErrorOr<Optional<ScopedOperand>> Generator::emit_delete_reference(JS::ASTNode const& node)
  683. {
  684. if (is<Identifier>(node)) {
  685. auto& identifier = static_cast<Identifier const&>(node);
  686. if (identifier.is_local()) {
  687. return add_constant(Value(false));
  688. }
  689. auto dst = allocate_register();
  690. emit<Bytecode::Op::DeleteVariable>(dst, intern_identifier(identifier.string()));
  691. return dst;
  692. }
  693. if (is<MemberExpression>(node)) {
  694. auto& expression = static_cast<MemberExpression const&>(node);
  695. // https://tc39.es/ecma262/#sec-super-keyword-runtime-semantics-evaluation
  696. if (is<SuperExpression>(expression.object())) {
  697. auto super_reference = TRY(emit_super_reference(expression));
  698. auto dst = allocate_register();
  699. if (super_reference.referenced_name.has_value()) {
  700. emit<Bytecode::Op::DeleteByValueWithThis>(dst, *super_reference.base, *super_reference.this_value, *super_reference.referenced_name);
  701. } else {
  702. auto identifier_table_ref = intern_identifier(verify_cast<Identifier>(expression.property()).string());
  703. emit<Bytecode::Op::DeleteByIdWithThis>(dst, *super_reference.base, *super_reference.this_value, identifier_table_ref);
  704. }
  705. return Optional<ScopedOperand> {};
  706. }
  707. auto object = TRY(expression.object().generate_bytecode(*this)).value();
  708. auto dst = allocate_register();
  709. if (expression.is_computed()) {
  710. auto property = TRY(expression.property().generate_bytecode(*this)).value();
  711. emit<Bytecode::Op::DeleteByValue>(dst, object, property);
  712. } else if (expression.property().is_identifier()) {
  713. auto identifier_table_ref = intern_identifier(verify_cast<Identifier>(expression.property()).string());
  714. emit<Bytecode::Op::DeleteById>(dst, object, identifier_table_ref);
  715. } else {
  716. // NOTE: Trying to delete a private field generates a SyntaxError in the parser.
  717. return CodeGenerationError {
  718. &expression,
  719. "Unimplemented non-computed member expression"sv
  720. };
  721. }
  722. return dst;
  723. }
  724. // Though this will have no deletion effect, we still have to evaluate the node as it can have side effects.
  725. // For example: delete a(); delete ++c.b; etc.
  726. // 13.5.1.2 Runtime Semantics: Evaluation, https://tc39.es/ecma262/#sec-delete-operator-runtime-semantics-evaluation
  727. // 1. Let ref be the result of evaluating UnaryExpression.
  728. // 2. ReturnIfAbrupt(ref).
  729. (void)TRY(node.generate_bytecode(*this));
  730. // 3. If ref is not a Reference Record, return true.
  731. // NOTE: The rest of the steps are handled by Delete{Variable,ByValue,Id}.
  732. return add_constant(Value(true));
  733. }
  734. void Generator::emit_set_variable(JS::Identifier const& identifier, ScopedOperand value, Bytecode::Op::BindingInitializationMode initialization_mode, Bytecode::Op::EnvironmentMode environment_mode)
  735. {
  736. if (identifier.is_local()) {
  737. if (value.operand().is_local() && value.operand().index() == identifier.local_variable_index()) {
  738. // Moving a local to itself is a no-op.
  739. return;
  740. }
  741. emit<Bytecode::Op::Mov>(local(identifier.local_variable_index()), value);
  742. } else {
  743. auto identifier_index = intern_identifier(identifier.string());
  744. if (environment_mode == Bytecode::Op::EnvironmentMode::Lexical) {
  745. if (initialization_mode == Bytecode::Op::BindingInitializationMode::Initialize) {
  746. emit<Bytecode::Op::InitializeLexicalBinding>(identifier_index, value);
  747. } else if (initialization_mode == Bytecode::Op::BindingInitializationMode::Set) {
  748. emit<Bytecode::Op::SetLexicalBinding>(identifier_index, value);
  749. }
  750. } else if (environment_mode == Bytecode::Op::EnvironmentMode::Var) {
  751. if (initialization_mode == Bytecode::Op::BindingInitializationMode::Initialize) {
  752. emit<Bytecode::Op::InitializeVariableBinding>(identifier_index, value);
  753. } else if (initialization_mode == Bytecode::Op::BindingInitializationMode::Set) {
  754. emit<Bytecode::Op::SetVariableBinding>(identifier_index, value);
  755. }
  756. } else {
  757. VERIFY_NOT_REACHED();
  758. }
  759. }
  760. }
  761. static Optional<ByteString> expression_identifier(Expression const& expression)
  762. {
  763. if (expression.is_identifier()) {
  764. auto const& identifier = static_cast<Identifier const&>(expression);
  765. return identifier.string();
  766. }
  767. if (expression.is_numeric_literal()) {
  768. auto const& literal = static_cast<NumericLiteral const&>(expression);
  769. return literal.value().to_string_without_side_effects().to_byte_string();
  770. }
  771. if (expression.is_string_literal()) {
  772. auto const& literal = static_cast<StringLiteral const&>(expression);
  773. return ByteString::formatted("'{}'", literal.value());
  774. }
  775. if (expression.is_member_expression()) {
  776. auto const& member_expression = static_cast<MemberExpression const&>(expression);
  777. StringBuilder builder;
  778. if (auto identifer = expression_identifier(member_expression.object()); identifer.has_value())
  779. builder.append(*identifer);
  780. if (auto identifer = expression_identifier(member_expression.property()); identifer.has_value()) {
  781. if (member_expression.is_computed())
  782. builder.appendff("[{}]", *identifer);
  783. else
  784. builder.appendff(".{}", *identifer);
  785. }
  786. return builder.to_byte_string();
  787. }
  788. return {};
  789. }
  790. Optional<IdentifierTableIndex> Generator::intern_identifier_for_expression(Expression const& expression)
  791. {
  792. if (auto identifer = expression_identifier(expression); identifer.has_value())
  793. return intern_identifier(identifer.release_value());
  794. return {};
  795. }
  796. void Generator::generate_scoped_jump(JumpType type)
  797. {
  798. TemporaryChange temp { m_current_unwind_context, m_current_unwind_context };
  799. bool last_was_finally = false;
  800. for (size_t i = m_boundaries.size(); i > 0; --i) {
  801. auto boundary = m_boundaries[i - 1];
  802. using enum BlockBoundaryType;
  803. switch (boundary) {
  804. case Break:
  805. if (type == JumpType::Break) {
  806. emit<Op::Jump>(nearest_breakable_scope());
  807. return;
  808. }
  809. break;
  810. case Continue:
  811. if (type == JumpType::Continue) {
  812. emit<Op::Jump>(nearest_continuable_scope());
  813. return;
  814. }
  815. break;
  816. case Unwind:
  817. if (!last_was_finally) {
  818. VERIFY(m_current_unwind_context && m_current_unwind_context->handler().has_value());
  819. emit<Bytecode::Op::LeaveUnwindContext>();
  820. m_current_unwind_context = m_current_unwind_context->previous();
  821. }
  822. last_was_finally = false;
  823. break;
  824. case LeaveLexicalEnvironment:
  825. emit<Bytecode::Op::LeaveLexicalEnvironment>();
  826. break;
  827. case ReturnToFinally: {
  828. VERIFY(m_current_unwind_context->finalizer().has_value());
  829. m_current_unwind_context = m_current_unwind_context->previous();
  830. auto jump_type_name = type == JumpType::Break ? "break"sv : "continue"sv;
  831. auto block_name = MUST(String::formatted("{}.{}", current_block().name(), jump_type_name));
  832. auto& block = make_block(block_name);
  833. emit<Op::ScheduleJump>(Label { block });
  834. switch_to_basic_block(block);
  835. last_was_finally = true;
  836. break;
  837. }
  838. case LeaveFinally:
  839. emit<Op::LeaveFinally>();
  840. break;
  841. }
  842. }
  843. VERIFY_NOT_REACHED();
  844. }
  845. void Generator::generate_labelled_jump(JumpType type, DeprecatedFlyString const& label)
  846. {
  847. TemporaryChange temp { m_current_unwind_context, m_current_unwind_context };
  848. size_t current_boundary = m_boundaries.size();
  849. bool last_was_finally = false;
  850. auto const& jumpable_scopes = type == JumpType::Continue ? m_continuable_scopes : m_breakable_scopes;
  851. for (auto const& jumpable_scope : jumpable_scopes.in_reverse()) {
  852. for (; current_boundary > 0; --current_boundary) {
  853. auto boundary = m_boundaries[current_boundary - 1];
  854. if (boundary == BlockBoundaryType::Unwind) {
  855. if (!last_was_finally) {
  856. VERIFY(m_current_unwind_context && m_current_unwind_context->handler().has_value());
  857. emit<Bytecode::Op::LeaveUnwindContext>();
  858. m_current_unwind_context = m_current_unwind_context->previous();
  859. }
  860. last_was_finally = false;
  861. } else if (boundary == BlockBoundaryType::LeaveLexicalEnvironment) {
  862. emit<Bytecode::Op::LeaveLexicalEnvironment>();
  863. } else if (boundary == BlockBoundaryType::ReturnToFinally) {
  864. VERIFY(m_current_unwind_context->finalizer().has_value());
  865. m_current_unwind_context = m_current_unwind_context->previous();
  866. auto jump_type_name = type == JumpType::Break ? "break"sv : "continue"sv;
  867. auto block_name = MUST(String::formatted("{}.{}", current_block().name(), jump_type_name));
  868. auto& block = make_block(block_name);
  869. emit<Op::ScheduleJump>(Label { block });
  870. switch_to_basic_block(block);
  871. last_was_finally = true;
  872. } else if ((type == JumpType::Continue && boundary == BlockBoundaryType::Continue) || (type == JumpType::Break && boundary == BlockBoundaryType::Break)) {
  873. // Make sure we don't process this boundary twice if the current jumpable scope doesn't contain the target label.
  874. --current_boundary;
  875. break;
  876. }
  877. }
  878. if (jumpable_scope.language_label_set.contains_slow(label)) {
  879. emit<Op::Jump>(jumpable_scope.bytecode_target);
  880. return;
  881. }
  882. }
  883. // We must have a jumpable scope available that contains the label, as this should be enforced by the parser.
  884. VERIFY_NOT_REACHED();
  885. }
  886. void Generator::generate_break()
  887. {
  888. generate_scoped_jump(JumpType::Break);
  889. }
  890. void Generator::generate_break(DeprecatedFlyString const& break_label)
  891. {
  892. generate_labelled_jump(JumpType::Break, break_label);
  893. }
  894. void Generator::generate_continue()
  895. {
  896. generate_scoped_jump(JumpType::Continue);
  897. }
  898. void Generator::generate_continue(DeprecatedFlyString const& continue_label)
  899. {
  900. generate_labelled_jump(JumpType::Continue, continue_label);
  901. }
  902. void Generator::push_home_object(ScopedOperand object)
  903. {
  904. m_home_objects.append(object);
  905. }
  906. void Generator::pop_home_object()
  907. {
  908. m_home_objects.take_last();
  909. }
  910. void Generator::emit_new_function(ScopedOperand dst, FunctionExpression const& function_node, Optional<IdentifierTableIndex> lhs_name)
  911. {
  912. if (m_home_objects.is_empty()) {
  913. emit<Op::NewFunction>(dst, function_node, lhs_name);
  914. } else {
  915. emit<Op::NewFunction>(dst, function_node, lhs_name, m_home_objects.last());
  916. }
  917. }
  918. CodeGenerationErrorOr<Optional<ScopedOperand>> Generator::emit_named_evaluation_if_anonymous_function(Expression const& expression, Optional<IdentifierTableIndex> lhs_name, Optional<ScopedOperand> preferred_dst)
  919. {
  920. if (is<FunctionExpression>(expression)) {
  921. auto const& function_expression = static_cast<FunctionExpression const&>(expression);
  922. if (!function_expression.has_name()) {
  923. return TRY(function_expression.generate_bytecode_with_lhs_name(*this, move(lhs_name), preferred_dst)).value();
  924. }
  925. }
  926. if (is<ClassExpression>(expression)) {
  927. auto const& class_expression = static_cast<ClassExpression const&>(expression);
  928. if (!class_expression.has_name()) {
  929. return TRY(class_expression.generate_bytecode_with_lhs_name(*this, move(lhs_name), preferred_dst)).value();
  930. }
  931. }
  932. return expression.generate_bytecode(*this, preferred_dst);
  933. }
  934. void Generator::emit_get_by_id(ScopedOperand dst, ScopedOperand base, IdentifierTableIndex property_identifier, Optional<IdentifierTableIndex> base_identifier)
  935. {
  936. if (m_identifier_table->get(property_identifier) == "length"sv) {
  937. m_length_identifier = property_identifier;
  938. emit<Op::GetLength>(dst, base, move(base_identifier), m_next_property_lookup_cache++);
  939. return;
  940. }
  941. emit<Op::GetById>(dst, base, property_identifier, move(base_identifier), m_next_property_lookup_cache++);
  942. }
  943. void Generator::emit_get_by_id_with_this(ScopedOperand dst, ScopedOperand base, IdentifierTableIndex id, ScopedOperand this_value)
  944. {
  945. if (m_identifier_table->get(id) == "length"sv) {
  946. emit<Op::GetLengthWithThis>(dst, base, this_value, m_next_property_lookup_cache++);
  947. return;
  948. }
  949. emit<Op::GetByIdWithThis>(dst, base, id, this_value, m_next_property_lookup_cache++);
  950. }
  951. void Generator::emit_iterator_value(ScopedOperand dst, ScopedOperand result)
  952. {
  953. emit_get_by_id(dst, result, intern_identifier("value"sv));
  954. }
  955. void Generator::emit_iterator_complete(ScopedOperand dst, ScopedOperand result)
  956. {
  957. emit_get_by_id(dst, result, intern_identifier("done"sv));
  958. }
  959. bool Generator::is_local_initialized(u32 local_index) const
  960. {
  961. return m_initialized_locals.find(local_index) != m_initialized_locals.end();
  962. }
  963. void Generator::set_local_initialized(u32 local_index)
  964. {
  965. m_initialized_locals.set(local_index);
  966. }
  967. ScopedOperand Generator::get_this(Optional<ScopedOperand> preferred_dst)
  968. {
  969. if (m_current_basic_block->has_resolved_this())
  970. return this_value();
  971. if (m_root_basic_blocks[0]->has_resolved_this()) {
  972. m_current_basic_block->set_has_resolved_this();
  973. return this_value();
  974. }
  975. // OPTIMIZATION: If we're compiling a function that doesn't allocate a FunctionEnvironment,
  976. // it will always have the same `this` value as the outer function,
  977. // and so the `this` value is already in the `this` register!
  978. if (m_function && !m_function->allocates_function_environment())
  979. return this_value();
  980. auto dst = preferred_dst.has_value() ? preferred_dst.value() : allocate_register();
  981. emit<Bytecode::Op::ResolveThisBinding>();
  982. m_current_basic_block->set_has_resolved_this();
  983. return this_value();
  984. }
  985. ScopedOperand Generator::accumulator()
  986. {
  987. return m_accumulator;
  988. }
  989. ScopedOperand Generator::this_value()
  990. {
  991. return m_this_value;
  992. }
  993. bool Generator::fuse_compare_and_jump(ScopedOperand const& condition, Label true_target, Label false_target)
  994. {
  995. auto& last_instruction = *reinterpret_cast<Instruction const*>(m_current_basic_block->data() + m_current_basic_block->last_instruction_start_offset());
  996. #define HANDLE_COMPARISON_OP(op_TitleCase, op_snake_case, numeric_operator) \
  997. if (last_instruction.type() == Instruction::Type::op_TitleCase) { \
  998. auto& comparison = static_cast<Op::op_TitleCase const&>(last_instruction); \
  999. VERIFY(comparison.dst() == condition); \
  1000. auto lhs = comparison.lhs(); \
  1001. auto rhs = comparison.rhs(); \
  1002. m_current_basic_block->rewind(); \
  1003. emit<Op::Jump##op_TitleCase>(lhs, rhs, true_target, false_target); \
  1004. return true; \
  1005. }
  1006. JS_ENUMERATE_COMPARISON_OPS(HANDLE_COMPARISON_OP);
  1007. #undef HANDLE_COMPARISON_OP
  1008. return false;
  1009. }
  1010. void Generator::emit_jump_if(ScopedOperand const& condition, Label true_target, Label false_target)
  1011. {
  1012. if (condition.operand().is_constant()) {
  1013. auto value = m_constants[condition.operand().index()];
  1014. if (value.is_boolean()) {
  1015. if (value.as_bool()) {
  1016. emit<Op::Jump>(true_target);
  1017. } else {
  1018. emit<Op::Jump>(false_target);
  1019. }
  1020. return;
  1021. }
  1022. }
  1023. // NOTE: It's only safe to fuse compare-and-jump if the condition is a temporary with no other dependents.
  1024. if (condition.operand().is_register()
  1025. && condition.ref_count() == 1
  1026. && m_current_basic_block->size() > 0) {
  1027. if (fuse_compare_and_jump(condition, true_target, false_target))
  1028. return;
  1029. }
  1030. emit<Op::JumpIf>(condition, true_target, false_target);
  1031. }
  1032. ScopedOperand Generator::copy_if_needed_to_preserve_evaluation_order(ScopedOperand const& operand)
  1033. {
  1034. if (!operand.operand().is_local())
  1035. return operand;
  1036. auto new_register = allocate_register();
  1037. emit<Bytecode::Op::Mov>(new_register, operand);
  1038. return new_register;
  1039. }
  1040. ScopedOperand Generator::add_constant(Value value)
  1041. {
  1042. auto append_new_constant = [&] {
  1043. m_constants.append(value);
  1044. return ScopedOperand { *this, Operand(Operand::Type::Constant, m_constants.size() - 1) };
  1045. };
  1046. if (value.is_boolean()) {
  1047. if (value.as_bool()) {
  1048. if (!m_true_constant.has_value())
  1049. m_true_constant = append_new_constant();
  1050. return m_true_constant.value();
  1051. } else {
  1052. if (!m_false_constant.has_value())
  1053. m_false_constant = append_new_constant();
  1054. return m_false_constant.value();
  1055. }
  1056. }
  1057. if (value.is_undefined()) {
  1058. if (!m_undefined_constant.has_value())
  1059. m_undefined_constant = append_new_constant();
  1060. return m_undefined_constant.value();
  1061. }
  1062. if (value.is_null()) {
  1063. if (!m_null_constant.has_value())
  1064. m_null_constant = append_new_constant();
  1065. return m_null_constant.value();
  1066. }
  1067. if (value.is_empty()) {
  1068. if (!m_empty_constant.has_value())
  1069. m_empty_constant = append_new_constant();
  1070. return m_empty_constant.value();
  1071. }
  1072. if (value.is_int32()) {
  1073. auto as_int32 = value.as_i32();
  1074. return m_int32_constants.ensure(as_int32, [&] {
  1075. return append_new_constant();
  1076. });
  1077. }
  1078. return append_new_constant();
  1079. }
  1080. }