Generator.cpp 33 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801
  1. /*
  2. * Copyright (c) 2021-2024, Andreas Kling <kling@serenityos.org>
  3. *
  4. * SPDX-License-Identifier: BSD-2-Clause
  5. */
  6. #include <AK/QuickSort.h>
  7. #include <AK/TemporaryChange.h>
  8. #include <LibJS/AST.h>
  9. #include <LibJS/Bytecode/BasicBlock.h>
  10. #include <LibJS/Bytecode/Generator.h>
  11. #include <LibJS/Bytecode/Instruction.h>
  12. #include <LibJS/Bytecode/Op.h>
  13. #include <LibJS/Bytecode/Register.h>
  14. #include <LibJS/Runtime/VM.h>
  15. namespace JS::Bytecode {
  16. Generator::Generator(VM& vm)
  17. : m_vm(vm)
  18. , m_string_table(make<StringTable>())
  19. , m_identifier_table(make<IdentifierTable>())
  20. , m_regex_table(make<RegexTable>())
  21. , m_constants(vm.heap())
  22. {
  23. }
  24. CodeGenerationErrorOr<NonnullGCPtr<Executable>> Generator::generate(VM& vm, ASTNode const& node, ReadonlySpan<FunctionParameter> parameters, FunctionKind enclosing_function_kind)
  25. {
  26. Generator generator(vm);
  27. for (auto const& parameter : parameters) {
  28. if (auto const* identifier = parameter.binding.get_pointer<NonnullRefPtr<Identifier const>>();
  29. identifier && (*identifier)->is_local()) {
  30. generator.set_local_initialized((*identifier)->local_variable_index());
  31. }
  32. }
  33. generator.switch_to_basic_block(generator.make_block());
  34. SourceLocationScope scope(generator, node);
  35. generator.m_enclosing_function_kind = enclosing_function_kind;
  36. if (generator.is_in_generator_or_async_function()) {
  37. // Immediately yield with no value.
  38. auto& start_block = generator.make_block();
  39. generator.emit<Bytecode::Op::Yield>(Label { start_block }, generator.add_constant(js_undefined()));
  40. generator.switch_to_basic_block(start_block);
  41. // NOTE: This doesn't have to handle received throw/return completions, as GeneratorObject::resume_abrupt
  42. // will not enter the generator from the SuspendedStart state and immediately completes the generator.
  43. }
  44. auto last_value = TRY(node.generate_bytecode(generator));
  45. if (!generator.current_block().is_terminated() && last_value.has_value()) {
  46. generator.emit<Bytecode::Op::End>(last_value.value());
  47. }
  48. if (generator.is_in_generator_or_async_function()) {
  49. // Terminate all unterminated blocks with yield return
  50. for (auto& block : generator.m_root_basic_blocks) {
  51. if (block->is_terminated())
  52. continue;
  53. generator.switch_to_basic_block(*block);
  54. generator.emit<Bytecode::Op::Yield>(nullptr, generator.add_constant(js_undefined()));
  55. }
  56. }
  57. bool is_strict_mode = false;
  58. if (is<Program>(node))
  59. is_strict_mode = static_cast<Program const&>(node).is_strict_mode();
  60. else if (is<FunctionBody>(node))
  61. is_strict_mode = static_cast<FunctionBody const&>(node).in_strict_mode();
  62. else if (is<FunctionDeclaration>(node))
  63. is_strict_mode = static_cast<FunctionDeclaration const&>(node).is_strict_mode();
  64. else if (is<FunctionExpression>(node))
  65. is_strict_mode = static_cast<FunctionExpression const&>(node).is_strict_mode();
  66. size_t size_needed = 0;
  67. for (auto& block : generator.m_root_basic_blocks) {
  68. size_needed += block->size();
  69. }
  70. Vector<u8> bytecode;
  71. bytecode.ensure_capacity(size_needed);
  72. Vector<size_t> basic_block_start_offsets;
  73. basic_block_start_offsets.ensure_capacity(generator.m_root_basic_blocks.size());
  74. HashMap<BasicBlock const*, size_t> block_offsets;
  75. Vector<size_t> label_offsets;
  76. struct UnlinkedExceptionHandlers {
  77. size_t start_offset;
  78. size_t end_offset;
  79. BasicBlock const* handler;
  80. BasicBlock const* finalizer;
  81. };
  82. Vector<UnlinkedExceptionHandlers> unlinked_exception_handlers;
  83. HashMap<size_t, SourceRecord> source_map;
  84. for (auto& block : generator.m_root_basic_blocks) {
  85. basic_block_start_offsets.append(bytecode.size());
  86. if (block->handler() || block->finalizer()) {
  87. unlinked_exception_handlers.append({
  88. .start_offset = bytecode.size(),
  89. .end_offset = 0,
  90. .handler = block->handler(),
  91. .finalizer = block->finalizer(),
  92. });
  93. }
  94. block_offsets.set(block.ptr(), bytecode.size());
  95. for (auto& [offset, source_record] : block->source_map()) {
  96. source_map.set(bytecode.size() + offset, source_record);
  97. }
  98. Bytecode::InstructionStreamIterator it(block->instruction_stream());
  99. while (!it.at_end()) {
  100. auto& instruction = const_cast<Instruction&>(*it);
  101. // OPTIMIZATION: Don't emit jumps that just jump to the next block.
  102. if (instruction.type() == Instruction::Type::Jump) {
  103. auto& jump = static_cast<Bytecode::Op::Jump&>(instruction);
  104. if (jump.target().basic_block_index() == block->index() + 1) {
  105. if (basic_block_start_offsets.last() == bytecode.size()) {
  106. // This block is empty, just skip it.
  107. basic_block_start_offsets.take_last();
  108. }
  109. ++it;
  110. continue;
  111. }
  112. }
  113. // OPTIMIZATION: For `JumpIf` where one of the targets is the very next block,
  114. // we can emit a `JumpTrue` or `JumpFalse` (to the other block) instead.
  115. if (instruction.type() == Instruction::Type::JumpIf) {
  116. auto& jump = static_cast<Bytecode::Op::JumpIf&>(instruction);
  117. if (jump.true_target().basic_block_index() == block->index() + 1) {
  118. Op::JumpFalse jump_false(jump.condition(), Label { jump.false_target() });
  119. auto& label = jump_false.target();
  120. size_t label_offset = bytecode.size() + (bit_cast<FlatPtr>(&label) - bit_cast<FlatPtr>(&jump_false));
  121. label_offsets.append(label_offset);
  122. bytecode.append(reinterpret_cast<u8 const*>(&jump_false), jump_false.length());
  123. ++it;
  124. continue;
  125. }
  126. if (jump.false_target().basic_block_index() == block->index() + 1) {
  127. Op::JumpTrue jump_true(jump.condition(), Label { jump.true_target() });
  128. auto& label = jump_true.target();
  129. size_t label_offset = bytecode.size() + (bit_cast<FlatPtr>(&label) - bit_cast<FlatPtr>(&jump_true));
  130. label_offsets.append(label_offset);
  131. bytecode.append(reinterpret_cast<u8 const*>(&jump_true), jump_true.length());
  132. ++it;
  133. continue;
  134. }
  135. }
  136. instruction.visit_labels([&](Label& label) {
  137. size_t label_offset = bytecode.size() + (bit_cast<FlatPtr>(&label) - bit_cast<FlatPtr>(&instruction));
  138. label_offsets.append(label_offset);
  139. });
  140. bytecode.append(reinterpret_cast<u8 const*>(&instruction), instruction.length());
  141. ++it;
  142. }
  143. if (!block->is_terminated()) {
  144. Op::End end(generator.add_constant(js_undefined()));
  145. bytecode.append(reinterpret_cast<u8 const*>(&end), end.length());
  146. }
  147. if (block->handler() || block->finalizer()) {
  148. unlinked_exception_handlers.last().end_offset = bytecode.size();
  149. }
  150. }
  151. for (auto label_offset : label_offsets) {
  152. auto& label = *reinterpret_cast<Label*>(bytecode.data() + label_offset);
  153. auto* block = generator.m_root_basic_blocks[label.basic_block_index()].ptr();
  154. label.set_address(block_offsets.get(block).value());
  155. }
  156. auto executable = vm.heap().allocate_without_realm<Executable>(
  157. move(bytecode),
  158. move(generator.m_identifier_table),
  159. move(generator.m_string_table),
  160. move(generator.m_regex_table),
  161. move(generator.m_constants),
  162. node.source_code(),
  163. generator.m_next_property_lookup_cache,
  164. generator.m_next_global_variable_cache,
  165. generator.m_next_environment_variable_cache,
  166. generator.m_next_register,
  167. is_strict_mode);
  168. Vector<Executable::ExceptionHandlers> linked_exception_handlers;
  169. for (auto& unlinked_handler : unlinked_exception_handlers) {
  170. auto start_offset = unlinked_handler.start_offset;
  171. auto end_offset = unlinked_handler.end_offset;
  172. auto handler_offset = unlinked_handler.handler ? block_offsets.get(unlinked_handler.handler).value() : Optional<size_t> {};
  173. auto finalizer_offset = unlinked_handler.finalizer ? block_offsets.get(unlinked_handler.finalizer).value() : Optional<size_t> {};
  174. linked_exception_handlers.append({ start_offset, end_offset, handler_offset, finalizer_offset });
  175. }
  176. quick_sort(linked_exception_handlers, [](auto const& a, auto const& b) {
  177. return a.start_offset < b.start_offset;
  178. });
  179. executable->exception_handlers = move(linked_exception_handlers);
  180. executable->basic_block_start_offsets = move(basic_block_start_offsets);
  181. executable->source_map = move(source_map);
  182. return executable;
  183. }
  184. void Generator::grow(size_t additional_size)
  185. {
  186. VERIFY(m_current_basic_block);
  187. m_current_basic_block->grow(additional_size);
  188. }
  189. Register Generator::allocate_register()
  190. {
  191. VERIFY(m_next_register != NumericLimits<u32>::max());
  192. return Register { m_next_register++ };
  193. }
  194. Generator::SourceLocationScope::SourceLocationScope(Generator& generator, ASTNode const& node)
  195. : m_generator(generator)
  196. , m_previous_node(m_generator.m_current_ast_node)
  197. {
  198. m_generator.m_current_ast_node = &node;
  199. }
  200. Generator::SourceLocationScope::~SourceLocationScope()
  201. {
  202. m_generator.m_current_ast_node = m_previous_node;
  203. }
  204. Generator::UnwindContext::UnwindContext(Generator& generator, Optional<Label> finalizer)
  205. : m_generator(generator)
  206. , m_finalizer(finalizer)
  207. , m_previous_context(m_generator.m_current_unwind_context)
  208. {
  209. m_generator.m_current_unwind_context = this;
  210. }
  211. Generator::UnwindContext::~UnwindContext()
  212. {
  213. VERIFY(m_generator.m_current_unwind_context == this);
  214. m_generator.m_current_unwind_context = m_previous_context;
  215. }
  216. Label Generator::nearest_continuable_scope() const
  217. {
  218. return m_continuable_scopes.last().bytecode_target;
  219. }
  220. void Generator::block_declaration_instantiation(ScopeNode const& scope_node)
  221. {
  222. start_boundary(BlockBoundaryType::LeaveLexicalEnvironment);
  223. emit<Bytecode::Op::BlockDeclarationInstantiation>(scope_node);
  224. }
  225. void Generator::begin_variable_scope()
  226. {
  227. start_boundary(BlockBoundaryType::LeaveLexicalEnvironment);
  228. emit<Bytecode::Op::CreateLexicalEnvironment>();
  229. }
  230. void Generator::end_variable_scope()
  231. {
  232. end_boundary(BlockBoundaryType::LeaveLexicalEnvironment);
  233. if (!m_current_basic_block->is_terminated()) {
  234. emit<Bytecode::Op::LeaveLexicalEnvironment>();
  235. }
  236. }
  237. void Generator::begin_continuable_scope(Label continue_target, Vector<DeprecatedFlyString> const& language_label_set)
  238. {
  239. m_continuable_scopes.append({ continue_target, language_label_set });
  240. start_boundary(BlockBoundaryType::Continue);
  241. }
  242. void Generator::end_continuable_scope()
  243. {
  244. m_continuable_scopes.take_last();
  245. end_boundary(BlockBoundaryType::Continue);
  246. }
  247. Label Generator::nearest_breakable_scope() const
  248. {
  249. return m_breakable_scopes.last().bytecode_target;
  250. }
  251. void Generator::begin_breakable_scope(Label breakable_target, Vector<DeprecatedFlyString> const& language_label_set)
  252. {
  253. m_breakable_scopes.append({ breakable_target, language_label_set });
  254. start_boundary(BlockBoundaryType::Break);
  255. }
  256. void Generator::end_breakable_scope()
  257. {
  258. m_breakable_scopes.take_last();
  259. end_boundary(BlockBoundaryType::Break);
  260. }
  261. CodeGenerationErrorOr<Generator::ReferenceOperands> Generator::emit_super_reference(MemberExpression const& expression)
  262. {
  263. VERIFY(is<SuperExpression>(expression.object()));
  264. // https://tc39.es/ecma262/#sec-super-keyword-runtime-semantics-evaluation
  265. // 1. Let env be GetThisEnvironment().
  266. // 2. Let actualThis be ? env.GetThisBinding().
  267. auto actual_this = Operand(allocate_register());
  268. emit<Bytecode::Op::ResolveThisBinding>(actual_this);
  269. Optional<Bytecode::Operand> computed_property_value;
  270. if (expression.is_computed()) {
  271. // SuperProperty : super [ Expression ]
  272. // 3. Let propertyNameReference be ? Evaluation of Expression.
  273. // 4. Let propertyNameValue be ? GetValue(propertyNameReference).
  274. computed_property_value = TRY(expression.property().generate_bytecode(*this)).value();
  275. }
  276. // 5/7. Return ? MakeSuperPropertyReference(actualThis, propertyKey, strict).
  277. // https://tc39.es/ecma262/#sec-makesuperpropertyreference
  278. // 1. Let env be GetThisEnvironment().
  279. // 2. Assert: env.HasSuperBinding() is true.
  280. // 3. Let baseValue be ? env.GetSuperBase().
  281. auto base_value = Operand(allocate_register());
  282. emit<Bytecode::Op::ResolveSuperBase>(base_value);
  283. // 4. Return the Reference Record { [[Base]]: baseValue, [[ReferencedName]]: propertyKey, [[Strict]]: strict, [[ThisValue]]: actualThis }.
  284. return ReferenceOperands {
  285. .base = base_value,
  286. .referenced_name = computed_property_value,
  287. .this_value = actual_this,
  288. };
  289. }
  290. CodeGenerationErrorOr<Generator::ReferenceOperands> Generator::emit_load_from_reference(JS::ASTNode const& node, Optional<Operand> preferred_dst)
  291. {
  292. if (is<Identifier>(node)) {
  293. auto& identifier = static_cast<Identifier const&>(node);
  294. auto loaded_value = TRY(identifier.generate_bytecode(*this, preferred_dst)).value();
  295. return ReferenceOperands {
  296. .loaded_value = loaded_value,
  297. };
  298. }
  299. if (!is<MemberExpression>(node)) {
  300. return CodeGenerationError {
  301. &node,
  302. "Unimplemented/invalid node used as a reference"sv
  303. };
  304. }
  305. auto& expression = static_cast<MemberExpression const&>(node);
  306. // https://tc39.es/ecma262/#sec-super-keyword-runtime-semantics-evaluation
  307. if (is<SuperExpression>(expression.object())) {
  308. auto super_reference = TRY(emit_super_reference(expression));
  309. auto dst = preferred_dst.has_value() ? preferred_dst.value() : Operand(allocate_register());
  310. if (super_reference.referenced_name.has_value()) {
  311. // 5. Let propertyKey be ? ToPropertyKey(propertyNameValue).
  312. // FIXME: This does ToPropertyKey out of order, which is observable by Symbol.toPrimitive!
  313. emit<Bytecode::Op::GetByValueWithThis>(dst, *super_reference.base, *super_reference.referenced_name, *super_reference.this_value);
  314. } else {
  315. // 3. Let propertyKey be StringValue of IdentifierName.
  316. auto identifier_table_ref = intern_identifier(verify_cast<Identifier>(expression.property()).string());
  317. emit_get_by_id_with_this(dst, *super_reference.base, identifier_table_ref, *super_reference.this_value);
  318. }
  319. super_reference.loaded_value = dst;
  320. return super_reference;
  321. }
  322. auto base = TRY(expression.object().generate_bytecode(*this)).value();
  323. auto base_identifier = intern_identifier_for_expression(expression.object());
  324. if (expression.is_computed()) {
  325. auto property = TRY(expression.property().generate_bytecode(*this)).value();
  326. auto saved_property = Operand(allocate_register());
  327. emit<Bytecode::Op::Mov>(saved_property, property);
  328. auto dst = preferred_dst.has_value() ? preferred_dst.value() : Operand(allocate_register());
  329. emit<Bytecode::Op::GetByValue>(dst, base, property, move(base_identifier));
  330. return ReferenceOperands {
  331. .base = base,
  332. .referenced_name = saved_property,
  333. .this_value = base,
  334. .loaded_value = dst,
  335. };
  336. }
  337. if (expression.property().is_identifier()) {
  338. auto identifier_table_ref = intern_identifier(verify_cast<Identifier>(expression.property()).string());
  339. auto dst = preferred_dst.has_value() ? preferred_dst.value() : Operand(allocate_register());
  340. emit_get_by_id(dst, base, identifier_table_ref, move(base_identifier));
  341. return ReferenceOperands {
  342. .base = base,
  343. .referenced_identifier = identifier_table_ref,
  344. .this_value = base,
  345. .loaded_value = dst,
  346. };
  347. }
  348. if (expression.property().is_private_identifier()) {
  349. auto identifier_table_ref = intern_identifier(verify_cast<PrivateIdentifier>(expression.property()).string());
  350. auto dst = preferred_dst.has_value() ? preferred_dst.value() : Operand(allocate_register());
  351. emit<Bytecode::Op::GetPrivateById>(dst, base, identifier_table_ref);
  352. return ReferenceOperands {
  353. .base = base,
  354. .referenced_private_identifier = identifier_table_ref,
  355. .this_value = base,
  356. .loaded_value = dst,
  357. };
  358. }
  359. return CodeGenerationError {
  360. &expression,
  361. "Unimplemented non-computed member expression"sv
  362. };
  363. }
  364. CodeGenerationErrorOr<void> Generator::emit_store_to_reference(JS::ASTNode const& node, Operand value)
  365. {
  366. if (is<Identifier>(node)) {
  367. auto& identifier = static_cast<Identifier const&>(node);
  368. emit_set_variable(identifier, value);
  369. return {};
  370. }
  371. if (is<MemberExpression>(node)) {
  372. auto& expression = static_cast<MemberExpression const&>(node);
  373. // https://tc39.es/ecma262/#sec-super-keyword-runtime-semantics-evaluation
  374. if (is<SuperExpression>(expression.object())) {
  375. auto super_reference = TRY(emit_super_reference(expression));
  376. // 4. Return the Reference Record { [[Base]]: baseValue, [[ReferencedName]]: propertyKey, [[Strict]]: strict, [[ThisValue]]: actualThis }.
  377. if (super_reference.referenced_name.has_value()) {
  378. // 5. Let propertyKey be ? ToPropertyKey(propertyNameValue).
  379. // FIXME: This does ToPropertyKey out of order, which is observable by Symbol.toPrimitive!
  380. emit<Bytecode::Op::PutByValueWithThis>(*super_reference.base, *super_reference.referenced_name, *super_reference.this_value, value);
  381. } else {
  382. // 3. Let propertyKey be StringValue of IdentifierName.
  383. auto identifier_table_ref = intern_identifier(verify_cast<Identifier>(expression.property()).string());
  384. emit<Bytecode::Op::PutByIdWithThis>(*super_reference.base, *super_reference.this_value, identifier_table_ref, value, Bytecode::Op::PropertyKind::KeyValue, next_property_lookup_cache());
  385. }
  386. } else {
  387. auto object = TRY(expression.object().generate_bytecode(*this)).value();
  388. if (expression.is_computed()) {
  389. auto property = TRY(expression.property().generate_bytecode(*this)).value();
  390. emit<Bytecode::Op::PutByValue>(object, property, value);
  391. } else if (expression.property().is_identifier()) {
  392. auto identifier_table_ref = intern_identifier(verify_cast<Identifier>(expression.property()).string());
  393. emit<Bytecode::Op::PutById>(object, identifier_table_ref, value, Bytecode::Op::PropertyKind::KeyValue, next_property_lookup_cache());
  394. } else if (expression.property().is_private_identifier()) {
  395. auto identifier_table_ref = intern_identifier(verify_cast<PrivateIdentifier>(expression.property()).string());
  396. emit<Bytecode::Op::PutPrivateById>(object, identifier_table_ref, value);
  397. } else {
  398. return CodeGenerationError {
  399. &expression,
  400. "Unimplemented non-computed member expression"sv
  401. };
  402. }
  403. }
  404. return {};
  405. }
  406. return CodeGenerationError {
  407. &node,
  408. "Unimplemented/invalid node used a reference"sv
  409. };
  410. }
  411. CodeGenerationErrorOr<void> Generator::emit_store_to_reference(ReferenceOperands const& reference, Operand value)
  412. {
  413. if (reference.referenced_private_identifier.has_value()) {
  414. emit<Bytecode::Op::PutPrivateById>(*reference.base, *reference.referenced_private_identifier, value);
  415. return {};
  416. }
  417. if (reference.referenced_identifier.has_value()) {
  418. if (reference.base == reference.this_value)
  419. emit<Bytecode::Op::PutById>(*reference.base, *reference.referenced_identifier, value, Bytecode::Op::PropertyKind::KeyValue, next_property_lookup_cache());
  420. else
  421. emit<Bytecode::Op::PutByIdWithThis>(*reference.base, *reference.this_value, *reference.referenced_identifier, value, Bytecode::Op::PropertyKind::KeyValue, next_property_lookup_cache());
  422. return {};
  423. }
  424. if (reference.base == reference.this_value)
  425. emit<Bytecode::Op::PutByValue>(*reference.base, *reference.referenced_name, value);
  426. else
  427. emit<Bytecode::Op::PutByValueWithThis>(*reference.base, *reference.referenced_name, *reference.this_value, value);
  428. return {};
  429. }
  430. CodeGenerationErrorOr<Optional<Operand>> Generator::emit_delete_reference(JS::ASTNode const& node)
  431. {
  432. if (is<Identifier>(node)) {
  433. auto& identifier = static_cast<Identifier const&>(node);
  434. if (identifier.is_local()) {
  435. return add_constant(Value(false));
  436. }
  437. auto dst = Operand(allocate_register());
  438. emit<Bytecode::Op::DeleteVariable>(dst, intern_identifier(identifier.string()));
  439. return dst;
  440. }
  441. if (is<MemberExpression>(node)) {
  442. auto& expression = static_cast<MemberExpression const&>(node);
  443. // https://tc39.es/ecma262/#sec-super-keyword-runtime-semantics-evaluation
  444. if (is<SuperExpression>(expression.object())) {
  445. auto super_reference = TRY(emit_super_reference(expression));
  446. auto dst = Operand(allocate_register());
  447. if (super_reference.referenced_name.has_value()) {
  448. emit<Bytecode::Op::DeleteByValueWithThis>(dst, *super_reference.base, *super_reference.this_value, *super_reference.referenced_name);
  449. } else {
  450. auto identifier_table_ref = intern_identifier(verify_cast<Identifier>(expression.property()).string());
  451. emit<Bytecode::Op::DeleteByIdWithThis>(dst, *super_reference.base, *super_reference.this_value, identifier_table_ref);
  452. }
  453. return Optional<Operand> {};
  454. }
  455. auto object = TRY(expression.object().generate_bytecode(*this)).value();
  456. auto dst = Operand(allocate_register());
  457. if (expression.is_computed()) {
  458. auto property = TRY(expression.property().generate_bytecode(*this)).value();
  459. emit<Bytecode::Op::DeleteByValue>(dst, object, property);
  460. } else if (expression.property().is_identifier()) {
  461. auto identifier_table_ref = intern_identifier(verify_cast<Identifier>(expression.property()).string());
  462. emit<Bytecode::Op::DeleteById>(dst, object, identifier_table_ref);
  463. } else {
  464. // NOTE: Trying to delete a private field generates a SyntaxError in the parser.
  465. return CodeGenerationError {
  466. &expression,
  467. "Unimplemented non-computed member expression"sv
  468. };
  469. }
  470. return dst;
  471. }
  472. // Though this will have no deletion effect, we still have to evaluate the node as it can have side effects.
  473. // For example: delete a(); delete ++c.b; etc.
  474. // 13.5.1.2 Runtime Semantics: Evaluation, https://tc39.es/ecma262/#sec-delete-operator-runtime-semantics-evaluation
  475. // 1. Let ref be the result of evaluating UnaryExpression.
  476. // 2. ReturnIfAbrupt(ref).
  477. (void)TRY(node.generate_bytecode(*this));
  478. // 3. If ref is not a Reference Record, return true.
  479. // NOTE: The rest of the steps are handled by Delete{Variable,ByValue,Id}.
  480. return add_constant(Value(true));
  481. }
  482. void Generator::emit_set_variable(JS::Identifier const& identifier, Operand value, Bytecode::Op::SetVariable::InitializationMode initialization_mode, Bytecode::Op::EnvironmentMode mode)
  483. {
  484. if (identifier.is_local()) {
  485. if (value.is_local() && value.index() == identifier.local_variable_index()) {
  486. // Moving a local to itself is a no-op.
  487. return;
  488. }
  489. emit<Bytecode::Op::SetLocal>(identifier.local_variable_index(), value);
  490. } else {
  491. emit<Bytecode::Op::SetVariable>(intern_identifier(identifier.string()), value, next_environment_variable_cache(), initialization_mode, mode);
  492. }
  493. }
  494. static Optional<ByteString> expression_identifier(Expression const& expression)
  495. {
  496. if (expression.is_identifier()) {
  497. auto const& identifier = static_cast<Identifier const&>(expression);
  498. return identifier.string();
  499. }
  500. if (expression.is_numeric_literal()) {
  501. auto const& literal = static_cast<NumericLiteral const&>(expression);
  502. return literal.value().to_string_without_side_effects().to_byte_string();
  503. }
  504. if (expression.is_string_literal()) {
  505. auto const& literal = static_cast<StringLiteral const&>(expression);
  506. return ByteString::formatted("'{}'", literal.value());
  507. }
  508. if (expression.is_member_expression()) {
  509. auto const& member_expression = static_cast<MemberExpression const&>(expression);
  510. StringBuilder builder;
  511. if (auto identifer = expression_identifier(member_expression.object()); identifer.has_value())
  512. builder.append(*identifer);
  513. if (auto identifer = expression_identifier(member_expression.property()); identifer.has_value()) {
  514. if (member_expression.is_computed())
  515. builder.appendff("[{}]", *identifer);
  516. else
  517. builder.appendff(".{}", *identifer);
  518. }
  519. return builder.to_byte_string();
  520. }
  521. return {};
  522. }
  523. Optional<IdentifierTableIndex> Generator::intern_identifier_for_expression(Expression const& expression)
  524. {
  525. if (auto identifer = expression_identifier(expression); identifer.has_value())
  526. return intern_identifier(identifer.release_value());
  527. return {};
  528. }
  529. void Generator::generate_scoped_jump(JumpType type)
  530. {
  531. TemporaryChange temp { m_current_unwind_context, m_current_unwind_context };
  532. bool last_was_finally = false;
  533. for (size_t i = m_boundaries.size(); i > 0; --i) {
  534. auto boundary = m_boundaries[i - 1];
  535. using enum BlockBoundaryType;
  536. switch (boundary) {
  537. case Break:
  538. if (type == JumpType::Break) {
  539. emit<Op::Jump>(nearest_breakable_scope());
  540. return;
  541. }
  542. break;
  543. case Continue:
  544. if (type == JumpType::Continue) {
  545. emit<Op::Jump>(nearest_continuable_scope());
  546. return;
  547. }
  548. break;
  549. case Unwind:
  550. VERIFY(last_was_finally || !m_current_unwind_context->finalizer().has_value());
  551. if (!last_was_finally) {
  552. VERIFY(m_current_unwind_context && m_current_unwind_context->handler().has_value());
  553. emit<Bytecode::Op::LeaveUnwindContext>();
  554. m_current_unwind_context = m_current_unwind_context->previous();
  555. }
  556. last_was_finally = false;
  557. break;
  558. case LeaveLexicalEnvironment:
  559. emit<Bytecode::Op::LeaveLexicalEnvironment>();
  560. break;
  561. case ReturnToFinally: {
  562. VERIFY(m_current_unwind_context->finalizer().has_value());
  563. m_current_unwind_context = m_current_unwind_context->previous();
  564. auto jump_type_name = type == JumpType::Break ? "break"sv : "continue"sv;
  565. auto block_name = MUST(String::formatted("{}.{}", current_block().name(), jump_type_name));
  566. auto& block = make_block(block_name);
  567. emit<Op::ScheduleJump>(Label { block });
  568. switch_to_basic_block(block);
  569. last_was_finally = true;
  570. break;
  571. }
  572. case LeaveFinally:
  573. emit<Op::LeaveFinally>();
  574. break;
  575. }
  576. }
  577. VERIFY_NOT_REACHED();
  578. }
  579. void Generator::generate_labelled_jump(JumpType type, DeprecatedFlyString const& label)
  580. {
  581. TemporaryChange temp { m_current_unwind_context, m_current_unwind_context };
  582. size_t current_boundary = m_boundaries.size();
  583. bool last_was_finally = false;
  584. auto const& jumpable_scopes = type == JumpType::Continue ? m_continuable_scopes : m_breakable_scopes;
  585. for (auto const& jumpable_scope : jumpable_scopes.in_reverse()) {
  586. for (; current_boundary > 0; --current_boundary) {
  587. auto boundary = m_boundaries[current_boundary - 1];
  588. if (boundary == BlockBoundaryType::Unwind) {
  589. VERIFY(last_was_finally || !m_current_unwind_context->finalizer().has_value());
  590. if (!last_was_finally) {
  591. VERIFY(m_current_unwind_context && m_current_unwind_context->handler().has_value());
  592. emit<Bytecode::Op::LeaveUnwindContext>();
  593. m_current_unwind_context = m_current_unwind_context->previous();
  594. }
  595. last_was_finally = false;
  596. } else if (boundary == BlockBoundaryType::LeaveLexicalEnvironment) {
  597. emit<Bytecode::Op::LeaveLexicalEnvironment>();
  598. } else if (boundary == BlockBoundaryType::ReturnToFinally) {
  599. VERIFY(m_current_unwind_context->finalizer().has_value());
  600. m_current_unwind_context = m_current_unwind_context->previous();
  601. auto jump_type_name = type == JumpType::Break ? "break"sv : "continue"sv;
  602. auto block_name = MUST(String::formatted("{}.{}", current_block().name(), jump_type_name));
  603. auto& block = make_block(block_name);
  604. emit<Op::ScheduleJump>(Label { block });
  605. switch_to_basic_block(block);
  606. last_was_finally = true;
  607. } else if ((type == JumpType::Continue && boundary == BlockBoundaryType::Continue) || (type == JumpType::Break && boundary == BlockBoundaryType::Break)) {
  608. // Make sure we don't process this boundary twice if the current jumpable scope doesn't contain the target label.
  609. --current_boundary;
  610. break;
  611. }
  612. }
  613. if (jumpable_scope.language_label_set.contains_slow(label)) {
  614. emit<Op::Jump>(jumpable_scope.bytecode_target);
  615. return;
  616. }
  617. }
  618. // We must have a jumpable scope available that contains the label, as this should be enforced by the parser.
  619. VERIFY_NOT_REACHED();
  620. }
  621. void Generator::generate_break()
  622. {
  623. generate_scoped_jump(JumpType::Break);
  624. }
  625. void Generator::generate_break(DeprecatedFlyString const& break_label)
  626. {
  627. generate_labelled_jump(JumpType::Break, break_label);
  628. }
  629. void Generator::generate_continue()
  630. {
  631. generate_scoped_jump(JumpType::Continue);
  632. }
  633. void Generator::generate_continue(DeprecatedFlyString const& continue_label)
  634. {
  635. generate_labelled_jump(JumpType::Continue, continue_label);
  636. }
  637. void Generator::push_home_object(Operand object)
  638. {
  639. m_home_objects.append(object);
  640. }
  641. void Generator::pop_home_object()
  642. {
  643. m_home_objects.take_last();
  644. }
  645. void Generator::emit_new_function(Operand dst, FunctionExpression const& function_node, Optional<IdentifierTableIndex> lhs_name)
  646. {
  647. if (m_home_objects.is_empty()) {
  648. emit<Op::NewFunction>(dst, function_node, lhs_name);
  649. } else {
  650. emit<Op::NewFunction>(dst, function_node, lhs_name, m_home_objects.last());
  651. }
  652. }
  653. CodeGenerationErrorOr<Optional<Operand>> Generator::emit_named_evaluation_if_anonymous_function(Expression const& expression, Optional<IdentifierTableIndex> lhs_name, Optional<Operand> preferred_dst)
  654. {
  655. if (is<FunctionExpression>(expression)) {
  656. auto const& function_expression = static_cast<FunctionExpression const&>(expression);
  657. if (!function_expression.has_name()) {
  658. return TRY(function_expression.generate_bytecode_with_lhs_name(*this, move(lhs_name), preferred_dst)).value();
  659. }
  660. }
  661. if (is<ClassExpression>(expression)) {
  662. auto const& class_expression = static_cast<ClassExpression const&>(expression);
  663. if (!class_expression.has_name()) {
  664. return TRY(class_expression.generate_bytecode_with_lhs_name(*this, move(lhs_name), preferred_dst)).value();
  665. }
  666. }
  667. return expression.generate_bytecode(*this, preferred_dst);
  668. }
  669. void Generator::emit_get_by_id(Operand dst, Operand base, IdentifierTableIndex property_identifier, Optional<IdentifierTableIndex> base_identifier)
  670. {
  671. emit<Op::GetById>(dst, base, property_identifier, move(base_identifier), m_next_property_lookup_cache++);
  672. }
  673. void Generator::emit_get_by_id_with_this(Operand dst, Operand base, IdentifierTableIndex id, Operand this_value)
  674. {
  675. emit<Op::GetByIdWithThis>(dst, base, id, this_value, m_next_property_lookup_cache++);
  676. }
  677. void Generator::emit_iterator_value(Operand dst, Operand result)
  678. {
  679. emit_get_by_id(dst, result, intern_identifier("value"sv));
  680. }
  681. void Generator::emit_iterator_complete(Operand dst, Operand result)
  682. {
  683. emit_get_by_id(dst, result, intern_identifier("done"sv));
  684. }
  685. bool Generator::is_local_initialized(u32 local_index) const
  686. {
  687. return m_initialized_locals.find(local_index) != m_initialized_locals.end();
  688. }
  689. void Generator::set_local_initialized(u32 local_index)
  690. {
  691. m_initialized_locals.set(local_index);
  692. }
  693. }