Generator.cpp 36 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882
  1. /*
  2. * Copyright (c) 2021-2024, Andreas Kling <kling@serenityos.org>
  3. *
  4. * SPDX-License-Identifier: BSD-2-Clause
  5. */
  6. #include <AK/QuickSort.h>
  7. #include <AK/TemporaryChange.h>
  8. #include <LibJS/AST.h>
  9. #include <LibJS/Bytecode/BasicBlock.h>
  10. #include <LibJS/Bytecode/Generator.h>
  11. #include <LibJS/Bytecode/Instruction.h>
  12. #include <LibJS/Bytecode/Op.h>
  13. #include <LibJS/Bytecode/Register.h>
  14. #include <LibJS/Runtime/VM.h>
  15. namespace JS::Bytecode {
  16. Generator::Generator(VM& vm)
  17. : m_vm(vm)
  18. , m_string_table(make<StringTable>())
  19. , m_identifier_table(make<IdentifierTable>())
  20. , m_regex_table(make<RegexTable>())
  21. , m_constants(vm.heap())
  22. , m_accumulator(*this, Operand(Register::accumulator()))
  23. {
  24. }
  25. CodeGenerationErrorOr<NonnullGCPtr<Executable>> Generator::generate(VM& vm, ASTNode const& node, ReadonlySpan<FunctionParameter> parameters, FunctionKind enclosing_function_kind)
  26. {
  27. Generator generator(vm);
  28. for (auto const& parameter : parameters) {
  29. if (auto const* identifier = parameter.binding.get_pointer<NonnullRefPtr<Identifier const>>();
  30. identifier && (*identifier)->is_local()) {
  31. generator.set_local_initialized((*identifier)->local_variable_index());
  32. }
  33. }
  34. generator.switch_to_basic_block(generator.make_block());
  35. SourceLocationScope scope(generator, node);
  36. generator.m_enclosing_function_kind = enclosing_function_kind;
  37. if (generator.is_in_generator_or_async_function()) {
  38. // Immediately yield with no value.
  39. auto& start_block = generator.make_block();
  40. generator.emit<Bytecode::Op::Yield>(Label { start_block }, generator.add_constant(js_undefined()));
  41. generator.switch_to_basic_block(start_block);
  42. // NOTE: This doesn't have to handle received throw/return completions, as GeneratorObject::resume_abrupt
  43. // will not enter the generator from the SuspendedStart state and immediately completes the generator.
  44. }
  45. auto last_value = TRY(node.generate_bytecode(generator));
  46. if (!generator.current_block().is_terminated() && last_value.has_value()) {
  47. generator.emit<Bytecode::Op::End>(last_value.value());
  48. }
  49. if (generator.is_in_generator_or_async_function()) {
  50. // Terminate all unterminated blocks with yield return
  51. for (auto& block : generator.m_root_basic_blocks) {
  52. if (block->is_terminated())
  53. continue;
  54. generator.switch_to_basic_block(*block);
  55. generator.emit<Bytecode::Op::Yield>(nullptr, generator.add_constant(js_undefined()));
  56. }
  57. }
  58. bool is_strict_mode = false;
  59. if (is<Program>(node))
  60. is_strict_mode = static_cast<Program const&>(node).is_strict_mode();
  61. else if (is<FunctionBody>(node))
  62. is_strict_mode = static_cast<FunctionBody const&>(node).in_strict_mode();
  63. else if (is<FunctionDeclaration>(node))
  64. is_strict_mode = static_cast<FunctionDeclaration const&>(node).is_strict_mode();
  65. else if (is<FunctionExpression>(node))
  66. is_strict_mode = static_cast<FunctionExpression const&>(node).is_strict_mode();
  67. size_t size_needed = 0;
  68. for (auto& block : generator.m_root_basic_blocks) {
  69. size_needed += block->size();
  70. }
  71. Vector<u8> bytecode;
  72. bytecode.ensure_capacity(size_needed);
  73. Vector<size_t> basic_block_start_offsets;
  74. basic_block_start_offsets.ensure_capacity(generator.m_root_basic_blocks.size());
  75. HashMap<BasicBlock const*, size_t> block_offsets;
  76. Vector<size_t> label_offsets;
  77. struct UnlinkedExceptionHandlers {
  78. size_t start_offset;
  79. size_t end_offset;
  80. BasicBlock const* handler;
  81. BasicBlock const* finalizer;
  82. };
  83. Vector<UnlinkedExceptionHandlers> unlinked_exception_handlers;
  84. HashMap<size_t, SourceRecord> source_map;
  85. for (auto& block : generator.m_root_basic_blocks) {
  86. basic_block_start_offsets.append(bytecode.size());
  87. if (block->handler() || block->finalizer()) {
  88. unlinked_exception_handlers.append({
  89. .start_offset = bytecode.size(),
  90. .end_offset = 0,
  91. .handler = block->handler(),
  92. .finalizer = block->finalizer(),
  93. });
  94. }
  95. block_offsets.set(block.ptr(), bytecode.size());
  96. for (auto& [offset, source_record] : block->source_map()) {
  97. source_map.set(bytecode.size() + offset, source_record);
  98. }
  99. Bytecode::InstructionStreamIterator it(block->instruction_stream());
  100. while (!it.at_end()) {
  101. auto& instruction = const_cast<Instruction&>(*it);
  102. // OPTIMIZATION: Don't emit jumps that just jump to the next block.
  103. if (instruction.type() == Instruction::Type::Jump) {
  104. auto& jump = static_cast<Bytecode::Op::Jump&>(instruction);
  105. if (jump.target().basic_block_index() == block->index() + 1) {
  106. if (basic_block_start_offsets.last() == bytecode.size()) {
  107. // This block is empty, just skip it.
  108. basic_block_start_offsets.take_last();
  109. }
  110. ++it;
  111. continue;
  112. }
  113. }
  114. // OPTIMIZATION: For `JumpIf` where one of the targets is the very next block,
  115. // we can emit a `JumpTrue` or `JumpFalse` (to the other block) instead.
  116. if (instruction.type() == Instruction::Type::JumpIf) {
  117. auto& jump = static_cast<Bytecode::Op::JumpIf&>(instruction);
  118. if (jump.true_target().basic_block_index() == block->index() + 1) {
  119. Op::JumpFalse jump_false(jump.condition(), Label { jump.false_target() });
  120. auto& label = jump_false.target();
  121. size_t label_offset = bytecode.size() + (bit_cast<FlatPtr>(&label) - bit_cast<FlatPtr>(&jump_false));
  122. label_offsets.append(label_offset);
  123. bytecode.append(reinterpret_cast<u8 const*>(&jump_false), jump_false.length());
  124. ++it;
  125. continue;
  126. }
  127. if (jump.false_target().basic_block_index() == block->index() + 1) {
  128. Op::JumpTrue jump_true(jump.condition(), Label { jump.true_target() });
  129. auto& label = jump_true.target();
  130. size_t label_offset = bytecode.size() + (bit_cast<FlatPtr>(&label) - bit_cast<FlatPtr>(&jump_true));
  131. label_offsets.append(label_offset);
  132. bytecode.append(reinterpret_cast<u8 const*>(&jump_true), jump_true.length());
  133. ++it;
  134. continue;
  135. }
  136. }
  137. instruction.visit_labels([&](Label& label) {
  138. size_t label_offset = bytecode.size() + (bit_cast<FlatPtr>(&label) - bit_cast<FlatPtr>(&instruction));
  139. label_offsets.append(label_offset);
  140. });
  141. bytecode.append(reinterpret_cast<u8 const*>(&instruction), instruction.length());
  142. ++it;
  143. }
  144. if (!block->is_terminated()) {
  145. Op::End end(generator.add_constant(js_undefined()));
  146. bytecode.append(reinterpret_cast<u8 const*>(&end), end.length());
  147. }
  148. if (block->handler() || block->finalizer()) {
  149. unlinked_exception_handlers.last().end_offset = bytecode.size();
  150. }
  151. }
  152. for (auto label_offset : label_offsets) {
  153. auto& label = *reinterpret_cast<Label*>(bytecode.data() + label_offset);
  154. auto* block = generator.m_root_basic_blocks[label.basic_block_index()].ptr();
  155. label.set_address(block_offsets.get(block).value());
  156. }
  157. auto executable = vm.heap().allocate_without_realm<Executable>(
  158. move(bytecode),
  159. move(generator.m_identifier_table),
  160. move(generator.m_string_table),
  161. move(generator.m_regex_table),
  162. move(generator.m_constants),
  163. node.source_code(),
  164. generator.m_next_property_lookup_cache,
  165. generator.m_next_global_variable_cache,
  166. generator.m_next_environment_variable_cache,
  167. generator.m_next_register,
  168. is_strict_mode);
  169. Vector<Executable::ExceptionHandlers> linked_exception_handlers;
  170. for (auto& unlinked_handler : unlinked_exception_handlers) {
  171. auto start_offset = unlinked_handler.start_offset;
  172. auto end_offset = unlinked_handler.end_offset;
  173. auto handler_offset = unlinked_handler.handler ? block_offsets.get(unlinked_handler.handler).value() : Optional<size_t> {};
  174. auto finalizer_offset = unlinked_handler.finalizer ? block_offsets.get(unlinked_handler.finalizer).value() : Optional<size_t> {};
  175. linked_exception_handlers.append({ start_offset, end_offset, handler_offset, finalizer_offset });
  176. }
  177. quick_sort(linked_exception_handlers, [](auto const& a, auto const& b) {
  178. return a.start_offset < b.start_offset;
  179. });
  180. executable->exception_handlers = move(linked_exception_handlers);
  181. executable->basic_block_start_offsets = move(basic_block_start_offsets);
  182. executable->source_map = move(source_map);
  183. generator.m_finished = true;
  184. return executable;
  185. }
  186. void Generator::grow(size_t additional_size)
  187. {
  188. VERIFY(m_current_basic_block);
  189. m_current_basic_block->grow(additional_size);
  190. }
  191. ScopedOperand Generator::allocate_register()
  192. {
  193. if (!m_free_registers.is_empty()) {
  194. return ScopedOperand { *this, Operand { m_free_registers.take_last() } };
  195. }
  196. VERIFY(m_next_register != NumericLimits<u32>::max());
  197. return ScopedOperand { *this, Operand { Register { m_next_register++ } } };
  198. }
  199. void Generator::free_register(Register reg)
  200. {
  201. m_free_registers.append(reg);
  202. }
  203. ScopedOperand Generator::local(u32 local_index)
  204. {
  205. return ScopedOperand { *this, Operand { Operand::Type::Local, static_cast<u32>(local_index) } };
  206. }
  207. Generator::SourceLocationScope::SourceLocationScope(Generator& generator, ASTNode const& node)
  208. : m_generator(generator)
  209. , m_previous_node(m_generator.m_current_ast_node)
  210. {
  211. m_generator.m_current_ast_node = &node;
  212. }
  213. Generator::SourceLocationScope::~SourceLocationScope()
  214. {
  215. m_generator.m_current_ast_node = m_previous_node;
  216. }
  217. Generator::UnwindContext::UnwindContext(Generator& generator, Optional<Label> finalizer)
  218. : m_generator(generator)
  219. , m_finalizer(finalizer)
  220. , m_previous_context(m_generator.m_current_unwind_context)
  221. {
  222. m_generator.m_current_unwind_context = this;
  223. }
  224. Generator::UnwindContext::~UnwindContext()
  225. {
  226. VERIFY(m_generator.m_current_unwind_context == this);
  227. m_generator.m_current_unwind_context = m_previous_context;
  228. }
  229. Label Generator::nearest_continuable_scope() const
  230. {
  231. return m_continuable_scopes.last().bytecode_target;
  232. }
  233. void Generator::block_declaration_instantiation(ScopeNode const& scope_node)
  234. {
  235. start_boundary(BlockBoundaryType::LeaveLexicalEnvironment);
  236. emit<Bytecode::Op::BlockDeclarationInstantiation>(scope_node);
  237. }
  238. void Generator::begin_variable_scope()
  239. {
  240. start_boundary(BlockBoundaryType::LeaveLexicalEnvironment);
  241. emit<Bytecode::Op::CreateLexicalEnvironment>();
  242. }
  243. void Generator::end_variable_scope()
  244. {
  245. end_boundary(BlockBoundaryType::LeaveLexicalEnvironment);
  246. if (!m_current_basic_block->is_terminated()) {
  247. emit<Bytecode::Op::LeaveLexicalEnvironment>();
  248. }
  249. }
  250. void Generator::begin_continuable_scope(Label continue_target, Vector<DeprecatedFlyString> const& language_label_set)
  251. {
  252. m_continuable_scopes.append({ continue_target, language_label_set });
  253. start_boundary(BlockBoundaryType::Continue);
  254. }
  255. void Generator::end_continuable_scope()
  256. {
  257. m_continuable_scopes.take_last();
  258. end_boundary(BlockBoundaryType::Continue);
  259. }
  260. Label Generator::nearest_breakable_scope() const
  261. {
  262. return m_breakable_scopes.last().bytecode_target;
  263. }
  264. void Generator::begin_breakable_scope(Label breakable_target, Vector<DeprecatedFlyString> const& language_label_set)
  265. {
  266. m_breakable_scopes.append({ breakable_target, language_label_set });
  267. start_boundary(BlockBoundaryType::Break);
  268. }
  269. void Generator::end_breakable_scope()
  270. {
  271. m_breakable_scopes.take_last();
  272. end_boundary(BlockBoundaryType::Break);
  273. }
  274. CodeGenerationErrorOr<Generator::ReferenceOperands> Generator::emit_super_reference(MemberExpression const& expression)
  275. {
  276. VERIFY(is<SuperExpression>(expression.object()));
  277. // https://tc39.es/ecma262/#sec-super-keyword-runtime-semantics-evaluation
  278. // 1. Let env be GetThisEnvironment().
  279. // 2. Let actualThis be ? env.GetThisBinding().
  280. auto actual_this = allocate_register();
  281. emit<Bytecode::Op::ResolveThisBinding>(actual_this);
  282. Optional<ScopedOperand> computed_property_value;
  283. if (expression.is_computed()) {
  284. // SuperProperty : super [ Expression ]
  285. // 3. Let propertyNameReference be ? Evaluation of Expression.
  286. // 4. Let propertyNameValue be ? GetValue(propertyNameReference).
  287. computed_property_value = TRY(expression.property().generate_bytecode(*this)).value();
  288. }
  289. // 5/7. Return ? MakeSuperPropertyReference(actualThis, propertyKey, strict).
  290. // https://tc39.es/ecma262/#sec-makesuperpropertyreference
  291. // 1. Let env be GetThisEnvironment().
  292. // 2. Assert: env.HasSuperBinding() is true.
  293. // 3. Let baseValue be ? env.GetSuperBase().
  294. auto base_value = allocate_register();
  295. emit<Bytecode::Op::ResolveSuperBase>(base_value);
  296. // 4. Return the Reference Record { [[Base]]: baseValue, [[ReferencedName]]: propertyKey, [[Strict]]: strict, [[ThisValue]]: actualThis }.
  297. return ReferenceOperands {
  298. .base = base_value,
  299. .referenced_name = computed_property_value,
  300. .this_value = actual_this,
  301. };
  302. }
  303. CodeGenerationErrorOr<Generator::ReferenceOperands> Generator::emit_load_from_reference(JS::ASTNode const& node, Optional<ScopedOperand> preferred_dst)
  304. {
  305. if (is<Identifier>(node)) {
  306. auto& identifier = static_cast<Identifier const&>(node);
  307. auto loaded_value = TRY(identifier.generate_bytecode(*this, preferred_dst)).value();
  308. return ReferenceOperands {
  309. .loaded_value = loaded_value,
  310. };
  311. }
  312. if (!is<MemberExpression>(node)) {
  313. return CodeGenerationError {
  314. &node,
  315. "Unimplemented/invalid node used as a reference"sv
  316. };
  317. }
  318. auto& expression = static_cast<MemberExpression const&>(node);
  319. // https://tc39.es/ecma262/#sec-super-keyword-runtime-semantics-evaluation
  320. if (is<SuperExpression>(expression.object())) {
  321. auto super_reference = TRY(emit_super_reference(expression));
  322. auto dst = preferred_dst.has_value() ? preferred_dst.value() : allocate_register();
  323. if (super_reference.referenced_name.has_value()) {
  324. // 5. Let propertyKey be ? ToPropertyKey(propertyNameValue).
  325. // FIXME: This does ToPropertyKey out of order, which is observable by Symbol.toPrimitive!
  326. emit<Bytecode::Op::GetByValueWithThis>(dst, *super_reference.base, *super_reference.referenced_name, *super_reference.this_value);
  327. } else {
  328. // 3. Let propertyKey be StringValue of IdentifierName.
  329. auto identifier_table_ref = intern_identifier(verify_cast<Identifier>(expression.property()).string());
  330. emit_get_by_id_with_this(dst, *super_reference.base, identifier_table_ref, *super_reference.this_value);
  331. }
  332. super_reference.loaded_value = dst;
  333. return super_reference;
  334. }
  335. auto base = TRY(expression.object().generate_bytecode(*this)).value();
  336. auto base_identifier = intern_identifier_for_expression(expression.object());
  337. if (expression.is_computed()) {
  338. auto property = TRY(expression.property().generate_bytecode(*this)).value();
  339. auto saved_property = allocate_register();
  340. emit<Bytecode::Op::Mov>(saved_property, property);
  341. auto dst = preferred_dst.has_value() ? preferred_dst.value() : allocate_register();
  342. emit<Bytecode::Op::GetByValue>(dst, base, property, move(base_identifier));
  343. return ReferenceOperands {
  344. .base = base,
  345. .referenced_name = saved_property,
  346. .this_value = base,
  347. .loaded_value = dst,
  348. };
  349. }
  350. if (expression.property().is_identifier()) {
  351. auto identifier_table_ref = intern_identifier(verify_cast<Identifier>(expression.property()).string());
  352. auto dst = preferred_dst.has_value() ? preferred_dst.value() : allocate_register();
  353. emit_get_by_id(dst, base, identifier_table_ref, move(base_identifier));
  354. return ReferenceOperands {
  355. .base = base,
  356. .referenced_identifier = identifier_table_ref,
  357. .this_value = base,
  358. .loaded_value = dst,
  359. };
  360. }
  361. if (expression.property().is_private_identifier()) {
  362. auto identifier_table_ref = intern_identifier(verify_cast<PrivateIdentifier>(expression.property()).string());
  363. auto dst = preferred_dst.has_value() ? preferred_dst.value() : allocate_register();
  364. emit<Bytecode::Op::GetPrivateById>(dst, base, identifier_table_ref);
  365. return ReferenceOperands {
  366. .base = base,
  367. .referenced_private_identifier = identifier_table_ref,
  368. .this_value = base,
  369. .loaded_value = dst,
  370. };
  371. }
  372. return CodeGenerationError {
  373. &expression,
  374. "Unimplemented non-computed member expression"sv
  375. };
  376. }
  377. CodeGenerationErrorOr<void> Generator::emit_store_to_reference(JS::ASTNode const& node, ScopedOperand value)
  378. {
  379. if (is<Identifier>(node)) {
  380. auto& identifier = static_cast<Identifier const&>(node);
  381. emit_set_variable(identifier, value);
  382. return {};
  383. }
  384. if (is<MemberExpression>(node)) {
  385. auto& expression = static_cast<MemberExpression const&>(node);
  386. // https://tc39.es/ecma262/#sec-super-keyword-runtime-semantics-evaluation
  387. if (is<SuperExpression>(expression.object())) {
  388. auto super_reference = TRY(emit_super_reference(expression));
  389. // 4. Return the Reference Record { [[Base]]: baseValue, [[ReferencedName]]: propertyKey, [[Strict]]: strict, [[ThisValue]]: actualThis }.
  390. if (super_reference.referenced_name.has_value()) {
  391. // 5. Let propertyKey be ? ToPropertyKey(propertyNameValue).
  392. // FIXME: This does ToPropertyKey out of order, which is observable by Symbol.toPrimitive!
  393. emit<Bytecode::Op::PutByValueWithThis>(*super_reference.base, *super_reference.referenced_name, *super_reference.this_value, value);
  394. } else {
  395. // 3. Let propertyKey be StringValue of IdentifierName.
  396. auto identifier_table_ref = intern_identifier(verify_cast<Identifier>(expression.property()).string());
  397. emit<Bytecode::Op::PutByIdWithThis>(*super_reference.base, *super_reference.this_value, identifier_table_ref, value, Bytecode::Op::PropertyKind::KeyValue, next_property_lookup_cache());
  398. }
  399. } else {
  400. auto object = TRY(expression.object().generate_bytecode(*this)).value();
  401. if (expression.is_computed()) {
  402. auto property = TRY(expression.property().generate_bytecode(*this)).value();
  403. emit<Bytecode::Op::PutByValue>(object, property, value);
  404. } else if (expression.property().is_identifier()) {
  405. auto identifier_table_ref = intern_identifier(verify_cast<Identifier>(expression.property()).string());
  406. emit<Bytecode::Op::PutById>(object, identifier_table_ref, value, Bytecode::Op::PropertyKind::KeyValue, next_property_lookup_cache());
  407. } else if (expression.property().is_private_identifier()) {
  408. auto identifier_table_ref = intern_identifier(verify_cast<PrivateIdentifier>(expression.property()).string());
  409. emit<Bytecode::Op::PutPrivateById>(object, identifier_table_ref, value);
  410. } else {
  411. return CodeGenerationError {
  412. &expression,
  413. "Unimplemented non-computed member expression"sv
  414. };
  415. }
  416. }
  417. return {};
  418. }
  419. return CodeGenerationError {
  420. &node,
  421. "Unimplemented/invalid node used a reference"sv
  422. };
  423. }
  424. CodeGenerationErrorOr<void> Generator::emit_store_to_reference(ReferenceOperands const& reference, ScopedOperand value)
  425. {
  426. if (reference.referenced_private_identifier.has_value()) {
  427. emit<Bytecode::Op::PutPrivateById>(*reference.base, *reference.referenced_private_identifier, value);
  428. return {};
  429. }
  430. if (reference.referenced_identifier.has_value()) {
  431. if (reference.base == reference.this_value)
  432. emit<Bytecode::Op::PutById>(*reference.base, *reference.referenced_identifier, value, Bytecode::Op::PropertyKind::KeyValue, next_property_lookup_cache());
  433. else
  434. emit<Bytecode::Op::PutByIdWithThis>(*reference.base, *reference.this_value, *reference.referenced_identifier, value, Bytecode::Op::PropertyKind::KeyValue, next_property_lookup_cache());
  435. return {};
  436. }
  437. if (reference.base == reference.this_value)
  438. emit<Bytecode::Op::PutByValue>(*reference.base, *reference.referenced_name, value);
  439. else
  440. emit<Bytecode::Op::PutByValueWithThis>(*reference.base, *reference.referenced_name, *reference.this_value, value);
  441. return {};
  442. }
  443. CodeGenerationErrorOr<Optional<ScopedOperand>> Generator::emit_delete_reference(JS::ASTNode const& node)
  444. {
  445. if (is<Identifier>(node)) {
  446. auto& identifier = static_cast<Identifier const&>(node);
  447. if (identifier.is_local()) {
  448. return add_constant(Value(false));
  449. }
  450. auto dst = allocate_register();
  451. emit<Bytecode::Op::DeleteVariable>(dst, intern_identifier(identifier.string()));
  452. return dst;
  453. }
  454. if (is<MemberExpression>(node)) {
  455. auto& expression = static_cast<MemberExpression const&>(node);
  456. // https://tc39.es/ecma262/#sec-super-keyword-runtime-semantics-evaluation
  457. if (is<SuperExpression>(expression.object())) {
  458. auto super_reference = TRY(emit_super_reference(expression));
  459. auto dst = Operand(allocate_register());
  460. if (super_reference.referenced_name.has_value()) {
  461. emit<Bytecode::Op::DeleteByValueWithThis>(dst, *super_reference.base, *super_reference.this_value, *super_reference.referenced_name);
  462. } else {
  463. auto identifier_table_ref = intern_identifier(verify_cast<Identifier>(expression.property()).string());
  464. emit<Bytecode::Op::DeleteByIdWithThis>(dst, *super_reference.base, *super_reference.this_value, identifier_table_ref);
  465. }
  466. return Optional<ScopedOperand> {};
  467. }
  468. auto object = TRY(expression.object().generate_bytecode(*this)).value();
  469. auto dst = allocate_register();
  470. if (expression.is_computed()) {
  471. auto property = TRY(expression.property().generate_bytecode(*this)).value();
  472. emit<Bytecode::Op::DeleteByValue>(dst, object, property);
  473. } else if (expression.property().is_identifier()) {
  474. auto identifier_table_ref = intern_identifier(verify_cast<Identifier>(expression.property()).string());
  475. emit<Bytecode::Op::DeleteById>(dst, object, identifier_table_ref);
  476. } else {
  477. // NOTE: Trying to delete a private field generates a SyntaxError in the parser.
  478. return CodeGenerationError {
  479. &expression,
  480. "Unimplemented non-computed member expression"sv
  481. };
  482. }
  483. return dst;
  484. }
  485. // Though this will have no deletion effect, we still have to evaluate the node as it can have side effects.
  486. // For example: delete a(); delete ++c.b; etc.
  487. // 13.5.1.2 Runtime Semantics: Evaluation, https://tc39.es/ecma262/#sec-delete-operator-runtime-semantics-evaluation
  488. // 1. Let ref be the result of evaluating UnaryExpression.
  489. // 2. ReturnIfAbrupt(ref).
  490. (void)TRY(node.generate_bytecode(*this));
  491. // 3. If ref is not a Reference Record, return true.
  492. // NOTE: The rest of the steps are handled by Delete{Variable,ByValue,Id}.
  493. return add_constant(Value(true));
  494. }
  495. void Generator::emit_set_variable(JS::Identifier const& identifier, ScopedOperand value, Bytecode::Op::SetVariable::InitializationMode initialization_mode, Bytecode::Op::EnvironmentMode mode)
  496. {
  497. if (identifier.is_local()) {
  498. if (value.operand().is_local() && value.operand().index() == identifier.local_variable_index()) {
  499. // Moving a local to itself is a no-op.
  500. return;
  501. }
  502. emit<Bytecode::Op::SetLocal>(identifier.local_variable_index(), value);
  503. } else {
  504. emit<Bytecode::Op::SetVariable>(intern_identifier(identifier.string()), value, next_environment_variable_cache(), initialization_mode, mode);
  505. }
  506. }
  507. static Optional<ByteString> expression_identifier(Expression const& expression)
  508. {
  509. if (expression.is_identifier()) {
  510. auto const& identifier = static_cast<Identifier const&>(expression);
  511. return identifier.string();
  512. }
  513. if (expression.is_numeric_literal()) {
  514. auto const& literal = static_cast<NumericLiteral const&>(expression);
  515. return literal.value().to_string_without_side_effects().to_byte_string();
  516. }
  517. if (expression.is_string_literal()) {
  518. auto const& literal = static_cast<StringLiteral const&>(expression);
  519. return ByteString::formatted("'{}'", literal.value());
  520. }
  521. if (expression.is_member_expression()) {
  522. auto const& member_expression = static_cast<MemberExpression const&>(expression);
  523. StringBuilder builder;
  524. if (auto identifer = expression_identifier(member_expression.object()); identifer.has_value())
  525. builder.append(*identifer);
  526. if (auto identifer = expression_identifier(member_expression.property()); identifer.has_value()) {
  527. if (member_expression.is_computed())
  528. builder.appendff("[{}]", *identifer);
  529. else
  530. builder.appendff(".{}", *identifer);
  531. }
  532. return builder.to_byte_string();
  533. }
  534. return {};
  535. }
  536. Optional<IdentifierTableIndex> Generator::intern_identifier_for_expression(Expression const& expression)
  537. {
  538. if (auto identifer = expression_identifier(expression); identifer.has_value())
  539. return intern_identifier(identifer.release_value());
  540. return {};
  541. }
  542. void Generator::generate_scoped_jump(JumpType type)
  543. {
  544. TemporaryChange temp { m_current_unwind_context, m_current_unwind_context };
  545. bool last_was_finally = false;
  546. for (size_t i = m_boundaries.size(); i > 0; --i) {
  547. auto boundary = m_boundaries[i - 1];
  548. using enum BlockBoundaryType;
  549. switch (boundary) {
  550. case Break:
  551. if (type == JumpType::Break) {
  552. emit<Op::Jump>(nearest_breakable_scope());
  553. return;
  554. }
  555. break;
  556. case Continue:
  557. if (type == JumpType::Continue) {
  558. emit<Op::Jump>(nearest_continuable_scope());
  559. return;
  560. }
  561. break;
  562. case Unwind:
  563. VERIFY(last_was_finally || !m_current_unwind_context->finalizer().has_value());
  564. if (!last_was_finally) {
  565. VERIFY(m_current_unwind_context && m_current_unwind_context->handler().has_value());
  566. emit<Bytecode::Op::LeaveUnwindContext>();
  567. m_current_unwind_context = m_current_unwind_context->previous();
  568. }
  569. last_was_finally = false;
  570. break;
  571. case LeaveLexicalEnvironment:
  572. emit<Bytecode::Op::LeaveLexicalEnvironment>();
  573. break;
  574. case ReturnToFinally: {
  575. VERIFY(m_current_unwind_context->finalizer().has_value());
  576. m_current_unwind_context = m_current_unwind_context->previous();
  577. auto jump_type_name = type == JumpType::Break ? "break"sv : "continue"sv;
  578. auto block_name = MUST(String::formatted("{}.{}", current_block().name(), jump_type_name));
  579. auto& block = make_block(block_name);
  580. emit<Op::ScheduleJump>(Label { block });
  581. switch_to_basic_block(block);
  582. last_was_finally = true;
  583. break;
  584. }
  585. case LeaveFinally:
  586. emit<Op::LeaveFinally>();
  587. break;
  588. }
  589. }
  590. VERIFY_NOT_REACHED();
  591. }
  592. void Generator::generate_labelled_jump(JumpType type, DeprecatedFlyString const& label)
  593. {
  594. TemporaryChange temp { m_current_unwind_context, m_current_unwind_context };
  595. size_t current_boundary = m_boundaries.size();
  596. bool last_was_finally = false;
  597. auto const& jumpable_scopes = type == JumpType::Continue ? m_continuable_scopes : m_breakable_scopes;
  598. for (auto const& jumpable_scope : jumpable_scopes.in_reverse()) {
  599. for (; current_boundary > 0; --current_boundary) {
  600. auto boundary = m_boundaries[current_boundary - 1];
  601. if (boundary == BlockBoundaryType::Unwind) {
  602. VERIFY(last_was_finally || !m_current_unwind_context->finalizer().has_value());
  603. if (!last_was_finally) {
  604. VERIFY(m_current_unwind_context && m_current_unwind_context->handler().has_value());
  605. emit<Bytecode::Op::LeaveUnwindContext>();
  606. m_current_unwind_context = m_current_unwind_context->previous();
  607. }
  608. last_was_finally = false;
  609. } else if (boundary == BlockBoundaryType::LeaveLexicalEnvironment) {
  610. emit<Bytecode::Op::LeaveLexicalEnvironment>();
  611. } else if (boundary == BlockBoundaryType::ReturnToFinally) {
  612. VERIFY(m_current_unwind_context->finalizer().has_value());
  613. m_current_unwind_context = m_current_unwind_context->previous();
  614. auto jump_type_name = type == JumpType::Break ? "break"sv : "continue"sv;
  615. auto block_name = MUST(String::formatted("{}.{}", current_block().name(), jump_type_name));
  616. auto& block = make_block(block_name);
  617. emit<Op::ScheduleJump>(Label { block });
  618. switch_to_basic_block(block);
  619. last_was_finally = true;
  620. } else if ((type == JumpType::Continue && boundary == BlockBoundaryType::Continue) || (type == JumpType::Break && boundary == BlockBoundaryType::Break)) {
  621. // Make sure we don't process this boundary twice if the current jumpable scope doesn't contain the target label.
  622. --current_boundary;
  623. break;
  624. }
  625. }
  626. if (jumpable_scope.language_label_set.contains_slow(label)) {
  627. emit<Op::Jump>(jumpable_scope.bytecode_target);
  628. return;
  629. }
  630. }
  631. // We must have a jumpable scope available that contains the label, as this should be enforced by the parser.
  632. VERIFY_NOT_REACHED();
  633. }
  634. void Generator::generate_break()
  635. {
  636. generate_scoped_jump(JumpType::Break);
  637. }
  638. void Generator::generate_break(DeprecatedFlyString const& break_label)
  639. {
  640. generate_labelled_jump(JumpType::Break, break_label);
  641. }
  642. void Generator::generate_continue()
  643. {
  644. generate_scoped_jump(JumpType::Continue);
  645. }
  646. void Generator::generate_continue(DeprecatedFlyString const& continue_label)
  647. {
  648. generate_labelled_jump(JumpType::Continue, continue_label);
  649. }
  650. void Generator::push_home_object(ScopedOperand object)
  651. {
  652. m_home_objects.append(object);
  653. }
  654. void Generator::pop_home_object()
  655. {
  656. m_home_objects.take_last();
  657. }
  658. void Generator::emit_new_function(ScopedOperand dst, FunctionExpression const& function_node, Optional<IdentifierTableIndex> lhs_name)
  659. {
  660. if (m_home_objects.is_empty()) {
  661. emit<Op::NewFunction>(dst, function_node, lhs_name);
  662. } else {
  663. emit<Op::NewFunction>(dst, function_node, lhs_name, m_home_objects.last());
  664. }
  665. }
  666. CodeGenerationErrorOr<Optional<ScopedOperand>> Generator::emit_named_evaluation_if_anonymous_function(Expression const& expression, Optional<IdentifierTableIndex> lhs_name, Optional<ScopedOperand> preferred_dst)
  667. {
  668. if (is<FunctionExpression>(expression)) {
  669. auto const& function_expression = static_cast<FunctionExpression const&>(expression);
  670. if (!function_expression.has_name()) {
  671. return TRY(function_expression.generate_bytecode_with_lhs_name(*this, move(lhs_name), preferred_dst)).value();
  672. }
  673. }
  674. if (is<ClassExpression>(expression)) {
  675. auto const& class_expression = static_cast<ClassExpression const&>(expression);
  676. if (!class_expression.has_name()) {
  677. return TRY(class_expression.generate_bytecode_with_lhs_name(*this, move(lhs_name), preferred_dst)).value();
  678. }
  679. }
  680. return expression.generate_bytecode(*this, preferred_dst);
  681. }
  682. void Generator::emit_get_by_id(ScopedOperand dst, ScopedOperand base, IdentifierTableIndex property_identifier, Optional<IdentifierTableIndex> base_identifier)
  683. {
  684. emit<Op::GetById>(dst, base, property_identifier, move(base_identifier), m_next_property_lookup_cache++);
  685. }
  686. void Generator::emit_get_by_id_with_this(ScopedOperand dst, ScopedOperand base, IdentifierTableIndex id, ScopedOperand this_value)
  687. {
  688. emit<Op::GetByIdWithThis>(dst, base, id, this_value, m_next_property_lookup_cache++);
  689. }
  690. void Generator::emit_iterator_value(ScopedOperand dst, ScopedOperand result)
  691. {
  692. emit_get_by_id(dst, result, intern_identifier("value"sv));
  693. }
  694. void Generator::emit_iterator_complete(ScopedOperand dst, ScopedOperand result)
  695. {
  696. emit_get_by_id(dst, result, intern_identifier("done"sv));
  697. }
  698. bool Generator::is_local_initialized(u32 local_index) const
  699. {
  700. return m_initialized_locals.find(local_index) != m_initialized_locals.end();
  701. }
  702. void Generator::set_local_initialized(u32 local_index)
  703. {
  704. m_initialized_locals.set(local_index);
  705. }
  706. ScopedOperand Generator::get_this(Optional<ScopedOperand> preferred_dst)
  707. {
  708. if (m_current_basic_block->this_().has_value())
  709. return m_current_basic_block->this_().value();
  710. if (m_root_basic_blocks[0]->this_().has_value()) {
  711. m_current_basic_block->set_this(m_root_basic_blocks[0]->this_().value());
  712. return m_root_basic_blocks[0]->this_().value();
  713. }
  714. auto dst = preferred_dst.has_value() ? preferred_dst.value() : allocate_register();
  715. emit<Bytecode::Op::ResolveThisBinding>(dst);
  716. m_current_basic_block->set_this(dst);
  717. return dst;
  718. }
  719. ScopedOperand Generator::accumulator()
  720. {
  721. return m_accumulator;
  722. }
  723. bool Generator::fuse_compare_and_jump(ScopedOperand const& condition, Label true_target, Label false_target)
  724. {
  725. auto& last_instruction = *reinterpret_cast<Instruction const*>(m_current_basic_block->data() + m_current_basic_block->last_instruction_start_offset());
  726. #define HANDLE_COMPARISON_OP(op_TitleCase, op_snake_case) \
  727. if (last_instruction.type() == Instruction::Type::op_TitleCase) { \
  728. auto& comparison = static_cast<Op::op_TitleCase const&>(last_instruction); \
  729. VERIFY(comparison.dst() == condition); \
  730. auto lhs = comparison.lhs(); \
  731. auto rhs = comparison.rhs(); \
  732. m_current_basic_block->rewind(); \
  733. emit<Op::Jump##op_TitleCase>(lhs, rhs, true_target, false_target); \
  734. return true; \
  735. }
  736. JS_ENUMERATE_COMPARISON_OPS(HANDLE_COMPARISON_OP);
  737. #undef HANDLE_COMPARISON_OP
  738. return false;
  739. }
  740. void Generator::emit_jump_if(ScopedOperand const& condition, Label true_target, Label false_target)
  741. {
  742. if (condition.operand().is_constant()) {
  743. auto value = m_constants[condition.operand().index()];
  744. if (value.is_boolean()) {
  745. if (value.as_bool()) {
  746. emit<Op::Jump>(true_target);
  747. } else {
  748. emit<Op::Jump>(false_target);
  749. }
  750. return;
  751. }
  752. }
  753. // NOTE: It's only safe to fuse compare-and-jump if the condition is a temporary with no other dependents.
  754. if (condition.operand().is_register()
  755. && condition.ref_count() == 1
  756. && m_current_basic_block->size() > 0) {
  757. if (fuse_compare_and_jump(condition, true_target, false_target))
  758. return;
  759. }
  760. emit<Op::JumpIf>(condition, true_target, false_target);
  761. }
  762. }