Generator.cpp 34 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841
  1. /*
  2. * Copyright (c) 2021-2024, Andreas Kling <kling@serenityos.org>
  3. *
  4. * SPDX-License-Identifier: BSD-2-Clause
  5. */
  6. #include <AK/QuickSort.h>
  7. #include <AK/TemporaryChange.h>
  8. #include <LibJS/AST.h>
  9. #include <LibJS/Bytecode/BasicBlock.h>
  10. #include <LibJS/Bytecode/Generator.h>
  11. #include <LibJS/Bytecode/Instruction.h>
  12. #include <LibJS/Bytecode/Op.h>
  13. #include <LibJS/Bytecode/Register.h>
  14. #include <LibJS/Runtime/VM.h>
  15. namespace JS::Bytecode {
  16. Generator::Generator(VM& vm)
  17. : m_vm(vm)
  18. , m_string_table(make<StringTable>())
  19. , m_identifier_table(make<IdentifierTable>())
  20. , m_regex_table(make<RegexTable>())
  21. , m_constants(vm.heap())
  22. , m_accumulator(*this, Operand(Register::accumulator()))
  23. {
  24. }
  25. CodeGenerationErrorOr<NonnullGCPtr<Executable>> Generator::generate(VM& vm, ASTNode const& node, ReadonlySpan<FunctionParameter> parameters, FunctionKind enclosing_function_kind)
  26. {
  27. Generator generator(vm);
  28. for (auto const& parameter : parameters) {
  29. if (auto const* identifier = parameter.binding.get_pointer<NonnullRefPtr<Identifier const>>();
  30. identifier && (*identifier)->is_local()) {
  31. generator.set_local_initialized((*identifier)->local_variable_index());
  32. }
  33. }
  34. generator.switch_to_basic_block(generator.make_block());
  35. SourceLocationScope scope(generator, node);
  36. generator.m_enclosing_function_kind = enclosing_function_kind;
  37. if (generator.is_in_generator_or_async_function()) {
  38. // Immediately yield with no value.
  39. auto& start_block = generator.make_block();
  40. generator.emit<Bytecode::Op::Yield>(Label { start_block }, generator.add_constant(js_undefined()));
  41. generator.switch_to_basic_block(start_block);
  42. // NOTE: This doesn't have to handle received throw/return completions, as GeneratorObject::resume_abrupt
  43. // will not enter the generator from the SuspendedStart state and immediately completes the generator.
  44. }
  45. auto last_value = TRY(node.generate_bytecode(generator));
  46. if (!generator.current_block().is_terminated() && last_value.has_value()) {
  47. generator.emit<Bytecode::Op::End>(last_value.value());
  48. }
  49. if (generator.is_in_generator_or_async_function()) {
  50. // Terminate all unterminated blocks with yield return
  51. for (auto& block : generator.m_root_basic_blocks) {
  52. if (block->is_terminated())
  53. continue;
  54. generator.switch_to_basic_block(*block);
  55. generator.emit<Bytecode::Op::Yield>(nullptr, generator.add_constant(js_undefined()));
  56. }
  57. }
  58. bool is_strict_mode = false;
  59. if (is<Program>(node))
  60. is_strict_mode = static_cast<Program const&>(node).is_strict_mode();
  61. else if (is<FunctionBody>(node))
  62. is_strict_mode = static_cast<FunctionBody const&>(node).in_strict_mode();
  63. else if (is<FunctionDeclaration>(node))
  64. is_strict_mode = static_cast<FunctionDeclaration const&>(node).is_strict_mode();
  65. else if (is<FunctionExpression>(node))
  66. is_strict_mode = static_cast<FunctionExpression const&>(node).is_strict_mode();
  67. size_t size_needed = 0;
  68. for (auto& block : generator.m_root_basic_blocks) {
  69. size_needed += block->size();
  70. }
  71. Vector<u8> bytecode;
  72. bytecode.ensure_capacity(size_needed);
  73. Vector<size_t> basic_block_start_offsets;
  74. basic_block_start_offsets.ensure_capacity(generator.m_root_basic_blocks.size());
  75. HashMap<BasicBlock const*, size_t> block_offsets;
  76. Vector<size_t> label_offsets;
  77. struct UnlinkedExceptionHandlers {
  78. size_t start_offset;
  79. size_t end_offset;
  80. BasicBlock const* handler;
  81. BasicBlock const* finalizer;
  82. };
  83. Vector<UnlinkedExceptionHandlers> unlinked_exception_handlers;
  84. HashMap<size_t, SourceRecord> source_map;
  85. for (auto& block : generator.m_root_basic_blocks) {
  86. basic_block_start_offsets.append(bytecode.size());
  87. if (block->handler() || block->finalizer()) {
  88. unlinked_exception_handlers.append({
  89. .start_offset = bytecode.size(),
  90. .end_offset = 0,
  91. .handler = block->handler(),
  92. .finalizer = block->finalizer(),
  93. });
  94. }
  95. block_offsets.set(block.ptr(), bytecode.size());
  96. for (auto& [offset, source_record] : block->source_map()) {
  97. source_map.set(bytecode.size() + offset, source_record);
  98. }
  99. Bytecode::InstructionStreamIterator it(block->instruction_stream());
  100. while (!it.at_end()) {
  101. auto& instruction = const_cast<Instruction&>(*it);
  102. // OPTIMIZATION: Don't emit jumps that just jump to the next block.
  103. if (instruction.type() == Instruction::Type::Jump) {
  104. auto& jump = static_cast<Bytecode::Op::Jump&>(instruction);
  105. if (jump.target().basic_block_index() == block->index() + 1) {
  106. if (basic_block_start_offsets.last() == bytecode.size()) {
  107. // This block is empty, just skip it.
  108. basic_block_start_offsets.take_last();
  109. }
  110. ++it;
  111. continue;
  112. }
  113. }
  114. // OPTIMIZATION: For `JumpIf` where one of the targets is the very next block,
  115. // we can emit a `JumpTrue` or `JumpFalse` (to the other block) instead.
  116. if (instruction.type() == Instruction::Type::JumpIf) {
  117. auto& jump = static_cast<Bytecode::Op::JumpIf&>(instruction);
  118. if (jump.true_target().basic_block_index() == block->index() + 1) {
  119. Op::JumpFalse jump_false(jump.condition(), Label { jump.false_target() });
  120. auto& label = jump_false.target();
  121. size_t label_offset = bytecode.size() + (bit_cast<FlatPtr>(&label) - bit_cast<FlatPtr>(&jump_false));
  122. label_offsets.append(label_offset);
  123. bytecode.append(reinterpret_cast<u8 const*>(&jump_false), jump_false.length());
  124. ++it;
  125. continue;
  126. }
  127. if (jump.false_target().basic_block_index() == block->index() + 1) {
  128. Op::JumpTrue jump_true(jump.condition(), Label { jump.true_target() });
  129. auto& label = jump_true.target();
  130. size_t label_offset = bytecode.size() + (bit_cast<FlatPtr>(&label) - bit_cast<FlatPtr>(&jump_true));
  131. label_offsets.append(label_offset);
  132. bytecode.append(reinterpret_cast<u8 const*>(&jump_true), jump_true.length());
  133. ++it;
  134. continue;
  135. }
  136. }
  137. instruction.visit_labels([&](Label& label) {
  138. size_t label_offset = bytecode.size() + (bit_cast<FlatPtr>(&label) - bit_cast<FlatPtr>(&instruction));
  139. label_offsets.append(label_offset);
  140. });
  141. bytecode.append(reinterpret_cast<u8 const*>(&instruction), instruction.length());
  142. ++it;
  143. }
  144. if (!block->is_terminated()) {
  145. Op::End end(generator.add_constant(js_undefined()));
  146. bytecode.append(reinterpret_cast<u8 const*>(&end), end.length());
  147. }
  148. if (block->handler() || block->finalizer()) {
  149. unlinked_exception_handlers.last().end_offset = bytecode.size();
  150. }
  151. }
  152. for (auto label_offset : label_offsets) {
  153. auto& label = *reinterpret_cast<Label*>(bytecode.data() + label_offset);
  154. auto* block = generator.m_root_basic_blocks[label.basic_block_index()].ptr();
  155. label.set_address(block_offsets.get(block).value());
  156. }
  157. auto executable = vm.heap().allocate_without_realm<Executable>(
  158. move(bytecode),
  159. move(generator.m_identifier_table),
  160. move(generator.m_string_table),
  161. move(generator.m_regex_table),
  162. move(generator.m_constants),
  163. node.source_code(),
  164. generator.m_next_property_lookup_cache,
  165. generator.m_next_global_variable_cache,
  166. generator.m_next_environment_variable_cache,
  167. generator.m_next_register,
  168. is_strict_mode);
  169. Vector<Executable::ExceptionHandlers> linked_exception_handlers;
  170. for (auto& unlinked_handler : unlinked_exception_handlers) {
  171. auto start_offset = unlinked_handler.start_offset;
  172. auto end_offset = unlinked_handler.end_offset;
  173. auto handler_offset = unlinked_handler.handler ? block_offsets.get(unlinked_handler.handler).value() : Optional<size_t> {};
  174. auto finalizer_offset = unlinked_handler.finalizer ? block_offsets.get(unlinked_handler.finalizer).value() : Optional<size_t> {};
  175. linked_exception_handlers.append({ start_offset, end_offset, handler_offset, finalizer_offset });
  176. }
  177. quick_sort(linked_exception_handlers, [](auto const& a, auto const& b) {
  178. return a.start_offset < b.start_offset;
  179. });
  180. executable->exception_handlers = move(linked_exception_handlers);
  181. executable->basic_block_start_offsets = move(basic_block_start_offsets);
  182. executable->source_map = move(source_map);
  183. generator.m_finished = true;
  184. return executable;
  185. }
  186. void Generator::grow(size_t additional_size)
  187. {
  188. VERIFY(m_current_basic_block);
  189. m_current_basic_block->grow(additional_size);
  190. }
  191. ScopedOperand Generator::allocate_sequential_register()
  192. {
  193. VERIFY(m_next_register != NumericLimits<u32>::max());
  194. return ScopedOperand { *this, Operand { Register { m_next_register++ } } };
  195. }
  196. ScopedOperand Generator::allocate_register()
  197. {
  198. if (!m_free_registers.is_empty()) {
  199. return ScopedOperand { *this, Operand { m_free_registers.take_last() } };
  200. }
  201. return allocate_sequential_register();
  202. }
  203. void Generator::free_register(Register reg)
  204. {
  205. m_free_registers.append(reg);
  206. }
  207. ScopedOperand Generator::local(u32 local_index)
  208. {
  209. return ScopedOperand { *this, Operand { Operand::Type::Local, static_cast<u32>(local_index) } };
  210. }
  211. Generator::SourceLocationScope::SourceLocationScope(Generator& generator, ASTNode const& node)
  212. : m_generator(generator)
  213. , m_previous_node(m_generator.m_current_ast_node)
  214. {
  215. m_generator.m_current_ast_node = &node;
  216. }
  217. Generator::SourceLocationScope::~SourceLocationScope()
  218. {
  219. m_generator.m_current_ast_node = m_previous_node;
  220. }
  221. Generator::UnwindContext::UnwindContext(Generator& generator, Optional<Label> finalizer)
  222. : m_generator(generator)
  223. , m_finalizer(finalizer)
  224. , m_previous_context(m_generator.m_current_unwind_context)
  225. {
  226. m_generator.m_current_unwind_context = this;
  227. }
  228. Generator::UnwindContext::~UnwindContext()
  229. {
  230. VERIFY(m_generator.m_current_unwind_context == this);
  231. m_generator.m_current_unwind_context = m_previous_context;
  232. }
  233. Label Generator::nearest_continuable_scope() const
  234. {
  235. return m_continuable_scopes.last().bytecode_target;
  236. }
  237. void Generator::block_declaration_instantiation(ScopeNode const& scope_node)
  238. {
  239. start_boundary(BlockBoundaryType::LeaveLexicalEnvironment);
  240. emit<Bytecode::Op::BlockDeclarationInstantiation>(scope_node);
  241. }
  242. void Generator::begin_variable_scope()
  243. {
  244. start_boundary(BlockBoundaryType::LeaveLexicalEnvironment);
  245. emit<Bytecode::Op::CreateLexicalEnvironment>();
  246. }
  247. void Generator::end_variable_scope()
  248. {
  249. end_boundary(BlockBoundaryType::LeaveLexicalEnvironment);
  250. if (!m_current_basic_block->is_terminated()) {
  251. emit<Bytecode::Op::LeaveLexicalEnvironment>();
  252. }
  253. }
  254. void Generator::begin_continuable_scope(Label continue_target, Vector<DeprecatedFlyString> const& language_label_set)
  255. {
  256. m_continuable_scopes.append({ continue_target, language_label_set });
  257. start_boundary(BlockBoundaryType::Continue);
  258. }
  259. void Generator::end_continuable_scope()
  260. {
  261. m_continuable_scopes.take_last();
  262. end_boundary(BlockBoundaryType::Continue);
  263. }
  264. Label Generator::nearest_breakable_scope() const
  265. {
  266. return m_breakable_scopes.last().bytecode_target;
  267. }
  268. void Generator::begin_breakable_scope(Label breakable_target, Vector<DeprecatedFlyString> const& language_label_set)
  269. {
  270. m_breakable_scopes.append({ breakable_target, language_label_set });
  271. start_boundary(BlockBoundaryType::Break);
  272. }
  273. void Generator::end_breakable_scope()
  274. {
  275. m_breakable_scopes.take_last();
  276. end_boundary(BlockBoundaryType::Break);
  277. }
  278. CodeGenerationErrorOr<Generator::ReferenceOperands> Generator::emit_super_reference(MemberExpression const& expression)
  279. {
  280. VERIFY(is<SuperExpression>(expression.object()));
  281. // https://tc39.es/ecma262/#sec-super-keyword-runtime-semantics-evaluation
  282. // 1. Let env be GetThisEnvironment().
  283. // 2. Let actualThis be ? env.GetThisBinding().
  284. auto actual_this = allocate_register();
  285. emit<Bytecode::Op::ResolveThisBinding>(actual_this);
  286. Optional<ScopedOperand> computed_property_value;
  287. if (expression.is_computed()) {
  288. // SuperProperty : super [ Expression ]
  289. // 3. Let propertyNameReference be ? Evaluation of Expression.
  290. // 4. Let propertyNameValue be ? GetValue(propertyNameReference).
  291. computed_property_value = TRY(expression.property().generate_bytecode(*this)).value();
  292. }
  293. // 5/7. Return ? MakeSuperPropertyReference(actualThis, propertyKey, strict).
  294. // https://tc39.es/ecma262/#sec-makesuperpropertyreference
  295. // 1. Let env be GetThisEnvironment().
  296. // 2. Assert: env.HasSuperBinding() is true.
  297. // 3. Let baseValue be ? env.GetSuperBase().
  298. auto base_value = allocate_register();
  299. emit<Bytecode::Op::ResolveSuperBase>(base_value);
  300. // 4. Return the Reference Record { [[Base]]: baseValue, [[ReferencedName]]: propertyKey, [[Strict]]: strict, [[ThisValue]]: actualThis }.
  301. return ReferenceOperands {
  302. .base = base_value,
  303. .referenced_name = computed_property_value,
  304. .this_value = actual_this,
  305. };
  306. }
  307. CodeGenerationErrorOr<Generator::ReferenceOperands> Generator::emit_load_from_reference(JS::ASTNode const& node, Optional<ScopedOperand> preferred_dst)
  308. {
  309. if (is<Identifier>(node)) {
  310. auto& identifier = static_cast<Identifier const&>(node);
  311. auto loaded_value = TRY(identifier.generate_bytecode(*this, preferred_dst)).value();
  312. return ReferenceOperands {
  313. .loaded_value = loaded_value,
  314. };
  315. }
  316. if (!is<MemberExpression>(node)) {
  317. return CodeGenerationError {
  318. &node,
  319. "Unimplemented/invalid node used as a reference"sv
  320. };
  321. }
  322. auto& expression = static_cast<MemberExpression const&>(node);
  323. // https://tc39.es/ecma262/#sec-super-keyword-runtime-semantics-evaluation
  324. if (is<SuperExpression>(expression.object())) {
  325. auto super_reference = TRY(emit_super_reference(expression));
  326. auto dst = preferred_dst.has_value() ? preferred_dst.value() : allocate_register();
  327. if (super_reference.referenced_name.has_value()) {
  328. // 5. Let propertyKey be ? ToPropertyKey(propertyNameValue).
  329. // FIXME: This does ToPropertyKey out of order, which is observable by Symbol.toPrimitive!
  330. emit<Bytecode::Op::GetByValueWithThis>(dst, *super_reference.base, *super_reference.referenced_name, *super_reference.this_value);
  331. } else {
  332. // 3. Let propertyKey be StringValue of IdentifierName.
  333. auto identifier_table_ref = intern_identifier(verify_cast<Identifier>(expression.property()).string());
  334. emit_get_by_id_with_this(dst, *super_reference.base, identifier_table_ref, *super_reference.this_value);
  335. }
  336. super_reference.loaded_value = dst;
  337. return super_reference;
  338. }
  339. auto base = TRY(expression.object().generate_bytecode(*this)).value();
  340. auto base_identifier = intern_identifier_for_expression(expression.object());
  341. if (expression.is_computed()) {
  342. auto property = TRY(expression.property().generate_bytecode(*this)).value();
  343. auto saved_property = allocate_register();
  344. emit<Bytecode::Op::Mov>(saved_property, property);
  345. auto dst = preferred_dst.has_value() ? preferred_dst.value() : allocate_register();
  346. emit<Bytecode::Op::GetByValue>(dst, base, property, move(base_identifier));
  347. return ReferenceOperands {
  348. .base = base,
  349. .referenced_name = saved_property,
  350. .this_value = base,
  351. .loaded_value = dst,
  352. };
  353. }
  354. if (expression.property().is_identifier()) {
  355. auto identifier_table_ref = intern_identifier(verify_cast<Identifier>(expression.property()).string());
  356. auto dst = preferred_dst.has_value() ? preferred_dst.value() : allocate_register();
  357. emit_get_by_id(dst, base, identifier_table_ref, move(base_identifier));
  358. return ReferenceOperands {
  359. .base = base,
  360. .referenced_identifier = identifier_table_ref,
  361. .this_value = base,
  362. .loaded_value = dst,
  363. };
  364. }
  365. if (expression.property().is_private_identifier()) {
  366. auto identifier_table_ref = intern_identifier(verify_cast<PrivateIdentifier>(expression.property()).string());
  367. auto dst = preferred_dst.has_value() ? preferred_dst.value() : allocate_register();
  368. emit<Bytecode::Op::GetPrivateById>(dst, base, identifier_table_ref);
  369. return ReferenceOperands {
  370. .base = base,
  371. .referenced_private_identifier = identifier_table_ref,
  372. .this_value = base,
  373. .loaded_value = dst,
  374. };
  375. }
  376. return CodeGenerationError {
  377. &expression,
  378. "Unimplemented non-computed member expression"sv
  379. };
  380. }
  381. CodeGenerationErrorOr<void> Generator::emit_store_to_reference(JS::ASTNode const& node, ScopedOperand value)
  382. {
  383. if (is<Identifier>(node)) {
  384. auto& identifier = static_cast<Identifier const&>(node);
  385. emit_set_variable(identifier, value);
  386. return {};
  387. }
  388. if (is<MemberExpression>(node)) {
  389. auto& expression = static_cast<MemberExpression const&>(node);
  390. // https://tc39.es/ecma262/#sec-super-keyword-runtime-semantics-evaluation
  391. if (is<SuperExpression>(expression.object())) {
  392. auto super_reference = TRY(emit_super_reference(expression));
  393. // 4. Return the Reference Record { [[Base]]: baseValue, [[ReferencedName]]: propertyKey, [[Strict]]: strict, [[ThisValue]]: actualThis }.
  394. if (super_reference.referenced_name.has_value()) {
  395. // 5. Let propertyKey be ? ToPropertyKey(propertyNameValue).
  396. // FIXME: This does ToPropertyKey out of order, which is observable by Symbol.toPrimitive!
  397. emit<Bytecode::Op::PutByValueWithThis>(*super_reference.base, *super_reference.referenced_name, *super_reference.this_value, value);
  398. } else {
  399. // 3. Let propertyKey be StringValue of IdentifierName.
  400. auto identifier_table_ref = intern_identifier(verify_cast<Identifier>(expression.property()).string());
  401. emit<Bytecode::Op::PutByIdWithThis>(*super_reference.base, *super_reference.this_value, identifier_table_ref, value, Bytecode::Op::PropertyKind::KeyValue, next_property_lookup_cache());
  402. }
  403. } else {
  404. auto object = TRY(expression.object().generate_bytecode(*this)).value();
  405. if (expression.is_computed()) {
  406. auto property = TRY(expression.property().generate_bytecode(*this)).value();
  407. emit<Bytecode::Op::PutByValue>(object, property, value);
  408. } else if (expression.property().is_identifier()) {
  409. auto identifier_table_ref = intern_identifier(verify_cast<Identifier>(expression.property()).string());
  410. emit<Bytecode::Op::PutById>(object, identifier_table_ref, value, Bytecode::Op::PropertyKind::KeyValue, next_property_lookup_cache());
  411. } else if (expression.property().is_private_identifier()) {
  412. auto identifier_table_ref = intern_identifier(verify_cast<PrivateIdentifier>(expression.property()).string());
  413. emit<Bytecode::Op::PutPrivateById>(object, identifier_table_ref, value);
  414. } else {
  415. return CodeGenerationError {
  416. &expression,
  417. "Unimplemented non-computed member expression"sv
  418. };
  419. }
  420. }
  421. return {};
  422. }
  423. return CodeGenerationError {
  424. &node,
  425. "Unimplemented/invalid node used a reference"sv
  426. };
  427. }
  428. CodeGenerationErrorOr<void> Generator::emit_store_to_reference(ReferenceOperands const& reference, ScopedOperand value)
  429. {
  430. if (reference.referenced_private_identifier.has_value()) {
  431. emit<Bytecode::Op::PutPrivateById>(*reference.base, *reference.referenced_private_identifier, value);
  432. return {};
  433. }
  434. if (reference.referenced_identifier.has_value()) {
  435. if (reference.base == reference.this_value)
  436. emit<Bytecode::Op::PutById>(*reference.base, *reference.referenced_identifier, value, Bytecode::Op::PropertyKind::KeyValue, next_property_lookup_cache());
  437. else
  438. emit<Bytecode::Op::PutByIdWithThis>(*reference.base, *reference.this_value, *reference.referenced_identifier, value, Bytecode::Op::PropertyKind::KeyValue, next_property_lookup_cache());
  439. return {};
  440. }
  441. if (reference.base == reference.this_value)
  442. emit<Bytecode::Op::PutByValue>(*reference.base, *reference.referenced_name, value);
  443. else
  444. emit<Bytecode::Op::PutByValueWithThis>(*reference.base, *reference.referenced_name, *reference.this_value, value);
  445. return {};
  446. }
  447. CodeGenerationErrorOr<Optional<ScopedOperand>> Generator::emit_delete_reference(JS::ASTNode const& node)
  448. {
  449. if (is<Identifier>(node)) {
  450. auto& identifier = static_cast<Identifier const&>(node);
  451. if (identifier.is_local()) {
  452. return add_constant(Value(false));
  453. }
  454. auto dst = allocate_register();
  455. emit<Bytecode::Op::DeleteVariable>(dst, intern_identifier(identifier.string()));
  456. return dst;
  457. }
  458. if (is<MemberExpression>(node)) {
  459. auto& expression = static_cast<MemberExpression const&>(node);
  460. // https://tc39.es/ecma262/#sec-super-keyword-runtime-semantics-evaluation
  461. if (is<SuperExpression>(expression.object())) {
  462. auto super_reference = TRY(emit_super_reference(expression));
  463. auto dst = Operand(allocate_register());
  464. if (super_reference.referenced_name.has_value()) {
  465. emit<Bytecode::Op::DeleteByValueWithThis>(dst, *super_reference.base, *super_reference.this_value, *super_reference.referenced_name);
  466. } else {
  467. auto identifier_table_ref = intern_identifier(verify_cast<Identifier>(expression.property()).string());
  468. emit<Bytecode::Op::DeleteByIdWithThis>(dst, *super_reference.base, *super_reference.this_value, identifier_table_ref);
  469. }
  470. return Optional<ScopedOperand> {};
  471. }
  472. auto object = TRY(expression.object().generate_bytecode(*this)).value();
  473. auto dst = allocate_register();
  474. if (expression.is_computed()) {
  475. auto property = TRY(expression.property().generate_bytecode(*this)).value();
  476. emit<Bytecode::Op::DeleteByValue>(dst, object, property);
  477. } else if (expression.property().is_identifier()) {
  478. auto identifier_table_ref = intern_identifier(verify_cast<Identifier>(expression.property()).string());
  479. emit<Bytecode::Op::DeleteById>(dst, object, identifier_table_ref);
  480. } else {
  481. // NOTE: Trying to delete a private field generates a SyntaxError in the parser.
  482. return CodeGenerationError {
  483. &expression,
  484. "Unimplemented non-computed member expression"sv
  485. };
  486. }
  487. return dst;
  488. }
  489. // Though this will have no deletion effect, we still have to evaluate the node as it can have side effects.
  490. // For example: delete a(); delete ++c.b; etc.
  491. // 13.5.1.2 Runtime Semantics: Evaluation, https://tc39.es/ecma262/#sec-delete-operator-runtime-semantics-evaluation
  492. // 1. Let ref be the result of evaluating UnaryExpression.
  493. // 2. ReturnIfAbrupt(ref).
  494. (void)TRY(node.generate_bytecode(*this));
  495. // 3. If ref is not a Reference Record, return true.
  496. // NOTE: The rest of the steps are handled by Delete{Variable,ByValue,Id}.
  497. return add_constant(Value(true));
  498. }
  499. void Generator::emit_set_variable(JS::Identifier const& identifier, ScopedOperand value, Bytecode::Op::SetVariable::InitializationMode initialization_mode, Bytecode::Op::EnvironmentMode mode)
  500. {
  501. if (identifier.is_local()) {
  502. if (value.operand().is_local() && value.operand().index() == identifier.local_variable_index()) {
  503. // Moving a local to itself is a no-op.
  504. return;
  505. }
  506. emit<Bytecode::Op::SetLocal>(identifier.local_variable_index(), value);
  507. } else {
  508. emit<Bytecode::Op::SetVariable>(intern_identifier(identifier.string()), value, next_environment_variable_cache(), initialization_mode, mode);
  509. }
  510. }
  511. static Optional<ByteString> expression_identifier(Expression const& expression)
  512. {
  513. if (expression.is_identifier()) {
  514. auto const& identifier = static_cast<Identifier const&>(expression);
  515. return identifier.string();
  516. }
  517. if (expression.is_numeric_literal()) {
  518. auto const& literal = static_cast<NumericLiteral const&>(expression);
  519. return literal.value().to_string_without_side_effects().to_byte_string();
  520. }
  521. if (expression.is_string_literal()) {
  522. auto const& literal = static_cast<StringLiteral const&>(expression);
  523. return ByteString::formatted("'{}'", literal.value());
  524. }
  525. if (expression.is_member_expression()) {
  526. auto const& member_expression = static_cast<MemberExpression const&>(expression);
  527. StringBuilder builder;
  528. if (auto identifer = expression_identifier(member_expression.object()); identifer.has_value())
  529. builder.append(*identifer);
  530. if (auto identifer = expression_identifier(member_expression.property()); identifer.has_value()) {
  531. if (member_expression.is_computed())
  532. builder.appendff("[{}]", *identifer);
  533. else
  534. builder.appendff(".{}", *identifer);
  535. }
  536. return builder.to_byte_string();
  537. }
  538. return {};
  539. }
  540. Optional<IdentifierTableIndex> Generator::intern_identifier_for_expression(Expression const& expression)
  541. {
  542. if (auto identifer = expression_identifier(expression); identifer.has_value())
  543. return intern_identifier(identifer.release_value());
  544. return {};
  545. }
  546. void Generator::generate_scoped_jump(JumpType type)
  547. {
  548. TemporaryChange temp { m_current_unwind_context, m_current_unwind_context };
  549. bool last_was_finally = false;
  550. for (size_t i = m_boundaries.size(); i > 0; --i) {
  551. auto boundary = m_boundaries[i - 1];
  552. using enum BlockBoundaryType;
  553. switch (boundary) {
  554. case Break:
  555. if (type == JumpType::Break) {
  556. emit<Op::Jump>(nearest_breakable_scope());
  557. return;
  558. }
  559. break;
  560. case Continue:
  561. if (type == JumpType::Continue) {
  562. emit<Op::Jump>(nearest_continuable_scope());
  563. return;
  564. }
  565. break;
  566. case Unwind:
  567. VERIFY(last_was_finally || !m_current_unwind_context->finalizer().has_value());
  568. if (!last_was_finally) {
  569. VERIFY(m_current_unwind_context && m_current_unwind_context->handler().has_value());
  570. emit<Bytecode::Op::LeaveUnwindContext>();
  571. m_current_unwind_context = m_current_unwind_context->previous();
  572. }
  573. last_was_finally = false;
  574. break;
  575. case LeaveLexicalEnvironment:
  576. emit<Bytecode::Op::LeaveLexicalEnvironment>();
  577. break;
  578. case ReturnToFinally: {
  579. VERIFY(m_current_unwind_context->finalizer().has_value());
  580. m_current_unwind_context = m_current_unwind_context->previous();
  581. auto jump_type_name = type == JumpType::Break ? "break"sv : "continue"sv;
  582. auto block_name = MUST(String::formatted("{}.{}", current_block().name(), jump_type_name));
  583. auto& block = make_block(block_name);
  584. emit<Op::ScheduleJump>(Label { block });
  585. switch_to_basic_block(block);
  586. last_was_finally = true;
  587. break;
  588. }
  589. case LeaveFinally:
  590. emit<Op::LeaveFinally>();
  591. break;
  592. }
  593. }
  594. VERIFY_NOT_REACHED();
  595. }
  596. void Generator::generate_labelled_jump(JumpType type, DeprecatedFlyString const& label)
  597. {
  598. TemporaryChange temp { m_current_unwind_context, m_current_unwind_context };
  599. size_t current_boundary = m_boundaries.size();
  600. bool last_was_finally = false;
  601. auto const& jumpable_scopes = type == JumpType::Continue ? m_continuable_scopes : m_breakable_scopes;
  602. for (auto const& jumpable_scope : jumpable_scopes.in_reverse()) {
  603. for (; current_boundary > 0; --current_boundary) {
  604. auto boundary = m_boundaries[current_boundary - 1];
  605. if (boundary == BlockBoundaryType::Unwind) {
  606. VERIFY(last_was_finally || !m_current_unwind_context->finalizer().has_value());
  607. if (!last_was_finally) {
  608. VERIFY(m_current_unwind_context && m_current_unwind_context->handler().has_value());
  609. emit<Bytecode::Op::LeaveUnwindContext>();
  610. m_current_unwind_context = m_current_unwind_context->previous();
  611. }
  612. last_was_finally = false;
  613. } else if (boundary == BlockBoundaryType::LeaveLexicalEnvironment) {
  614. emit<Bytecode::Op::LeaveLexicalEnvironment>();
  615. } else if (boundary == BlockBoundaryType::ReturnToFinally) {
  616. VERIFY(m_current_unwind_context->finalizer().has_value());
  617. m_current_unwind_context = m_current_unwind_context->previous();
  618. auto jump_type_name = type == JumpType::Break ? "break"sv : "continue"sv;
  619. auto block_name = MUST(String::formatted("{}.{}", current_block().name(), jump_type_name));
  620. auto& block = make_block(block_name);
  621. emit<Op::ScheduleJump>(Label { block });
  622. switch_to_basic_block(block);
  623. last_was_finally = true;
  624. } else if ((type == JumpType::Continue && boundary == BlockBoundaryType::Continue) || (type == JumpType::Break && boundary == BlockBoundaryType::Break)) {
  625. // Make sure we don't process this boundary twice if the current jumpable scope doesn't contain the target label.
  626. --current_boundary;
  627. break;
  628. }
  629. }
  630. if (jumpable_scope.language_label_set.contains_slow(label)) {
  631. emit<Op::Jump>(jumpable_scope.bytecode_target);
  632. return;
  633. }
  634. }
  635. // We must have a jumpable scope available that contains the label, as this should be enforced by the parser.
  636. VERIFY_NOT_REACHED();
  637. }
  638. void Generator::generate_break()
  639. {
  640. generate_scoped_jump(JumpType::Break);
  641. }
  642. void Generator::generate_break(DeprecatedFlyString const& break_label)
  643. {
  644. generate_labelled_jump(JumpType::Break, break_label);
  645. }
  646. void Generator::generate_continue()
  647. {
  648. generate_scoped_jump(JumpType::Continue);
  649. }
  650. void Generator::generate_continue(DeprecatedFlyString const& continue_label)
  651. {
  652. generate_labelled_jump(JumpType::Continue, continue_label);
  653. }
  654. void Generator::push_home_object(ScopedOperand object)
  655. {
  656. m_home_objects.append(object);
  657. }
  658. void Generator::pop_home_object()
  659. {
  660. m_home_objects.take_last();
  661. }
  662. void Generator::emit_new_function(ScopedOperand dst, FunctionExpression const& function_node, Optional<IdentifierTableIndex> lhs_name)
  663. {
  664. if (m_home_objects.is_empty()) {
  665. emit<Op::NewFunction>(dst, function_node, lhs_name);
  666. } else {
  667. emit<Op::NewFunction>(dst, function_node, lhs_name, m_home_objects.last());
  668. }
  669. }
  670. CodeGenerationErrorOr<Optional<ScopedOperand>> Generator::emit_named_evaluation_if_anonymous_function(Expression const& expression, Optional<IdentifierTableIndex> lhs_name, Optional<ScopedOperand> preferred_dst)
  671. {
  672. if (is<FunctionExpression>(expression)) {
  673. auto const& function_expression = static_cast<FunctionExpression const&>(expression);
  674. if (!function_expression.has_name()) {
  675. return TRY(function_expression.generate_bytecode_with_lhs_name(*this, move(lhs_name), preferred_dst)).value();
  676. }
  677. }
  678. if (is<ClassExpression>(expression)) {
  679. auto const& class_expression = static_cast<ClassExpression const&>(expression);
  680. if (!class_expression.has_name()) {
  681. return TRY(class_expression.generate_bytecode_with_lhs_name(*this, move(lhs_name), preferred_dst)).value();
  682. }
  683. }
  684. return expression.generate_bytecode(*this, preferred_dst);
  685. }
  686. void Generator::emit_get_by_id(ScopedOperand dst, ScopedOperand base, IdentifierTableIndex property_identifier, Optional<IdentifierTableIndex> base_identifier)
  687. {
  688. emit<Op::GetById>(dst, base, property_identifier, move(base_identifier), m_next_property_lookup_cache++);
  689. }
  690. void Generator::emit_get_by_id_with_this(ScopedOperand dst, ScopedOperand base, IdentifierTableIndex id, ScopedOperand this_value)
  691. {
  692. emit<Op::GetByIdWithThis>(dst, base, id, this_value, m_next_property_lookup_cache++);
  693. }
  694. void Generator::emit_iterator_value(ScopedOperand dst, ScopedOperand result)
  695. {
  696. emit_get_by_id(dst, result, intern_identifier("value"sv));
  697. }
  698. void Generator::emit_iterator_complete(ScopedOperand dst, ScopedOperand result)
  699. {
  700. emit_get_by_id(dst, result, intern_identifier("done"sv));
  701. }
  702. bool Generator::is_local_initialized(u32 local_index) const
  703. {
  704. return m_initialized_locals.find(local_index) != m_initialized_locals.end();
  705. }
  706. void Generator::set_local_initialized(u32 local_index)
  707. {
  708. m_initialized_locals.set(local_index);
  709. }
  710. ScopedOperand Generator::get_this(Optional<ScopedOperand> preferred_dst)
  711. {
  712. if (m_current_basic_block->this_().has_value())
  713. return m_current_basic_block->this_().value();
  714. if (m_root_basic_blocks[0]->this_().has_value()) {
  715. m_current_basic_block->set_this(m_root_basic_blocks[0]->this_().value());
  716. return m_root_basic_blocks[0]->this_().value();
  717. }
  718. auto dst = preferred_dst.has_value() ? preferred_dst.value() : allocate_register();
  719. emit<Bytecode::Op::ResolveThisBinding>(dst);
  720. m_current_basic_block->set_this(dst);
  721. return dst;
  722. }
  723. ScopedOperand Generator::accumulator()
  724. {
  725. return m_accumulator;
  726. }
  727. }