Generator.cpp 30 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755
  1. /*
  2. * Copyright (c) 2021-2024, Andreas Kling <kling@serenityos.org>
  3. *
  4. * SPDX-License-Identifier: BSD-2-Clause
  5. */
  6. #include <AK/QuickSort.h>
  7. #include <AK/TemporaryChange.h>
  8. #include <LibJS/AST.h>
  9. #include <LibJS/Bytecode/BasicBlock.h>
  10. #include <LibJS/Bytecode/Generator.h>
  11. #include <LibJS/Bytecode/Instruction.h>
  12. #include <LibJS/Bytecode/Op.h>
  13. #include <LibJS/Bytecode/Register.h>
  14. #include <LibJS/Runtime/VM.h>
  15. namespace JS::Bytecode {
  16. Generator::Generator(VM& vm)
  17. : m_vm(vm)
  18. , m_string_table(make<StringTable>())
  19. , m_identifier_table(make<IdentifierTable>())
  20. , m_regex_table(make<RegexTable>())
  21. , m_constants(vm.heap())
  22. {
  23. }
  24. CodeGenerationErrorOr<NonnullGCPtr<Executable>> Generator::generate(VM& vm, ASTNode const& node, ReadonlySpan<FunctionParameter> parameters, FunctionKind enclosing_function_kind)
  25. {
  26. Generator generator(vm);
  27. for (auto const& parameter : parameters) {
  28. if (auto const* identifier = parameter.binding.get_pointer<NonnullRefPtr<Identifier const>>();
  29. identifier && (*identifier)->is_local()) {
  30. generator.set_local_initialized((*identifier)->local_variable_index());
  31. }
  32. }
  33. generator.switch_to_basic_block(generator.make_block());
  34. SourceLocationScope scope(generator, node);
  35. generator.m_enclosing_function_kind = enclosing_function_kind;
  36. if (generator.is_in_generator_or_async_function()) {
  37. // Immediately yield with no value.
  38. auto& start_block = generator.make_block();
  39. generator.emit<Bytecode::Op::Yield>(Label { start_block }, generator.add_constant(js_undefined()));
  40. generator.switch_to_basic_block(start_block);
  41. // NOTE: This doesn't have to handle received throw/return completions, as GeneratorObject::resume_abrupt
  42. // will not enter the generator from the SuspendedStart state and immediately completes the generator.
  43. }
  44. auto last_value = TRY(node.generate_bytecode(generator));
  45. if (!generator.current_block().is_terminated() && last_value.has_value()) {
  46. generator.emit<Bytecode::Op::End>(last_value.value());
  47. }
  48. if (generator.is_in_generator_or_async_function()) {
  49. // Terminate all unterminated blocks with yield return
  50. for (auto& block : generator.m_root_basic_blocks) {
  51. if (block->is_terminated())
  52. continue;
  53. generator.switch_to_basic_block(*block);
  54. generator.emit<Bytecode::Op::Yield>(nullptr, generator.add_constant(js_undefined()));
  55. }
  56. }
  57. bool is_strict_mode = false;
  58. if (is<Program>(node))
  59. is_strict_mode = static_cast<Program const&>(node).is_strict_mode();
  60. else if (is<FunctionBody>(node))
  61. is_strict_mode = static_cast<FunctionBody const&>(node).in_strict_mode();
  62. else if (is<FunctionDeclaration>(node))
  63. is_strict_mode = static_cast<FunctionDeclaration const&>(node).is_strict_mode();
  64. else if (is<FunctionExpression>(node))
  65. is_strict_mode = static_cast<FunctionExpression const&>(node).is_strict_mode();
  66. size_t size_needed = 0;
  67. for (auto& block : generator.m_root_basic_blocks) {
  68. size_needed += block->size();
  69. }
  70. Vector<u8> bytecode;
  71. bytecode.ensure_capacity(size_needed);
  72. Vector<size_t> basic_block_start_offsets;
  73. basic_block_start_offsets.ensure_capacity(generator.m_root_basic_blocks.size());
  74. HashMap<BasicBlock const*, size_t> block_offsets;
  75. Vector<size_t> label_offsets;
  76. struct UnlinkedExceptionHandlers {
  77. size_t start_offset;
  78. size_t end_offset;
  79. BasicBlock const* handler;
  80. BasicBlock const* finalizer;
  81. };
  82. Vector<UnlinkedExceptionHandlers> unlinked_exception_handlers;
  83. for (auto& block : generator.m_root_basic_blocks) {
  84. basic_block_start_offsets.append(bytecode.size());
  85. if (block->handler() || block->finalizer()) {
  86. unlinked_exception_handlers.append({
  87. .start_offset = bytecode.size(),
  88. .end_offset = 0,
  89. .handler = block->handler(),
  90. .finalizer = block->finalizer(),
  91. });
  92. }
  93. block_offsets.set(block.ptr(), bytecode.size());
  94. Bytecode::InstructionStreamIterator it(block->instruction_stream());
  95. while (!it.at_end()) {
  96. auto& instruction = const_cast<Instruction&>(*it);
  97. instruction.visit_labels([&](Label& label) {
  98. size_t label_offset = bytecode.size() + (bit_cast<FlatPtr>(&label) - bit_cast<FlatPtr>(&instruction));
  99. label_offsets.append(label_offset);
  100. });
  101. bytecode.append(reinterpret_cast<u8 const*>(&instruction), instruction.length());
  102. ++it;
  103. }
  104. if (!block->is_terminated()) {
  105. Op::End end(generator.add_constant(js_undefined()));
  106. bytecode.append(reinterpret_cast<u8 const*>(&end), end.length());
  107. }
  108. if (block->handler() || block->finalizer()) {
  109. unlinked_exception_handlers.last().end_offset = bytecode.size();
  110. }
  111. }
  112. for (auto label_offset : label_offsets) {
  113. auto& label = *reinterpret_cast<Label*>(bytecode.data() + label_offset);
  114. auto* block = &label.block();
  115. label.set_address(block_offsets.get(block).value());
  116. }
  117. auto executable = vm.heap().allocate_without_realm<Executable>(
  118. move(bytecode),
  119. move(generator.m_identifier_table),
  120. move(generator.m_string_table),
  121. move(generator.m_regex_table),
  122. move(generator.m_constants),
  123. node.source_code(),
  124. generator.m_next_property_lookup_cache,
  125. generator.m_next_global_variable_cache,
  126. generator.m_next_environment_variable_cache,
  127. generator.m_next_register,
  128. is_strict_mode);
  129. Vector<Executable::ExceptionHandlers> linked_exception_handlers;
  130. for (auto& unlinked_handler : unlinked_exception_handlers) {
  131. auto start_offset = unlinked_handler.start_offset;
  132. auto end_offset = unlinked_handler.end_offset;
  133. auto handler_offset = unlinked_handler.handler ? block_offsets.get(unlinked_handler.handler).value() : Optional<size_t> {};
  134. auto finalizer_offset = unlinked_handler.finalizer ? block_offsets.get(unlinked_handler.finalizer).value() : Optional<size_t> {};
  135. linked_exception_handlers.append({ start_offset, end_offset, handler_offset, finalizer_offset });
  136. }
  137. quick_sort(linked_exception_handlers, [](auto const& a, auto const& b) {
  138. return a.start_offset < b.start_offset;
  139. });
  140. executable->exception_handlers = move(linked_exception_handlers);
  141. executable->basic_block_start_offsets = move(basic_block_start_offsets);
  142. return executable;
  143. }
  144. void Generator::grow(size_t additional_size)
  145. {
  146. VERIFY(m_current_basic_block);
  147. m_current_basic_block->grow(additional_size);
  148. }
  149. Register Generator::allocate_register()
  150. {
  151. VERIFY(m_next_register != NumericLimits<u32>::max());
  152. return Register { m_next_register++ };
  153. }
  154. Generator::SourceLocationScope::SourceLocationScope(Generator& generator, ASTNode const& node)
  155. : m_generator(generator)
  156. , m_previous_node(m_generator.m_current_ast_node)
  157. {
  158. m_generator.m_current_ast_node = &node;
  159. }
  160. Generator::SourceLocationScope::~SourceLocationScope()
  161. {
  162. m_generator.m_current_ast_node = m_previous_node;
  163. }
  164. Generator::UnwindContext::UnwindContext(Generator& generator, Optional<Label> finalizer)
  165. : m_generator(generator)
  166. , m_finalizer(finalizer)
  167. , m_previous_context(m_generator.m_current_unwind_context)
  168. {
  169. m_generator.m_current_unwind_context = this;
  170. }
  171. Generator::UnwindContext::~UnwindContext()
  172. {
  173. VERIFY(m_generator.m_current_unwind_context == this);
  174. m_generator.m_current_unwind_context = m_previous_context;
  175. }
  176. Label Generator::nearest_continuable_scope() const
  177. {
  178. return m_continuable_scopes.last().bytecode_target;
  179. }
  180. void Generator::block_declaration_instantiation(ScopeNode const& scope_node)
  181. {
  182. start_boundary(BlockBoundaryType::LeaveLexicalEnvironment);
  183. emit<Bytecode::Op::BlockDeclarationInstantiation>(scope_node);
  184. }
  185. void Generator::begin_variable_scope()
  186. {
  187. start_boundary(BlockBoundaryType::LeaveLexicalEnvironment);
  188. emit<Bytecode::Op::CreateLexicalEnvironment>();
  189. }
  190. void Generator::end_variable_scope()
  191. {
  192. end_boundary(BlockBoundaryType::LeaveLexicalEnvironment);
  193. if (!m_current_basic_block->is_terminated()) {
  194. emit<Bytecode::Op::LeaveLexicalEnvironment>();
  195. }
  196. }
  197. void Generator::begin_continuable_scope(Label continue_target, Vector<DeprecatedFlyString> const& language_label_set)
  198. {
  199. m_continuable_scopes.append({ continue_target, language_label_set });
  200. start_boundary(BlockBoundaryType::Continue);
  201. }
  202. void Generator::end_continuable_scope()
  203. {
  204. m_continuable_scopes.take_last();
  205. end_boundary(BlockBoundaryType::Continue);
  206. }
  207. Label Generator::nearest_breakable_scope() const
  208. {
  209. return m_breakable_scopes.last().bytecode_target;
  210. }
  211. void Generator::begin_breakable_scope(Label breakable_target, Vector<DeprecatedFlyString> const& language_label_set)
  212. {
  213. m_breakable_scopes.append({ breakable_target, language_label_set });
  214. start_boundary(BlockBoundaryType::Break);
  215. }
  216. void Generator::end_breakable_scope()
  217. {
  218. m_breakable_scopes.take_last();
  219. end_boundary(BlockBoundaryType::Break);
  220. }
  221. CodeGenerationErrorOr<Generator::ReferenceOperands> Generator::emit_super_reference(MemberExpression const& expression)
  222. {
  223. VERIFY(is<SuperExpression>(expression.object()));
  224. // https://tc39.es/ecma262/#sec-super-keyword-runtime-semantics-evaluation
  225. // 1. Let env be GetThisEnvironment().
  226. // 2. Let actualThis be ? env.GetThisBinding().
  227. auto actual_this = Operand(allocate_register());
  228. emit<Bytecode::Op::ResolveThisBinding>(actual_this);
  229. Optional<Bytecode::Operand> computed_property_value;
  230. if (expression.is_computed()) {
  231. // SuperProperty : super [ Expression ]
  232. // 3. Let propertyNameReference be ? Evaluation of Expression.
  233. // 4. Let propertyNameValue be ? GetValue(propertyNameReference).
  234. computed_property_value = TRY(expression.property().generate_bytecode(*this)).value();
  235. }
  236. // 5/7. Return ? MakeSuperPropertyReference(actualThis, propertyKey, strict).
  237. // https://tc39.es/ecma262/#sec-makesuperpropertyreference
  238. // 1. Let env be GetThisEnvironment().
  239. // 2. Assert: env.HasSuperBinding() is true.
  240. // 3. Let baseValue be ? env.GetSuperBase().
  241. auto base_value = Operand(allocate_register());
  242. emit<Bytecode::Op::ResolveSuperBase>(base_value);
  243. // 4. Return the Reference Record { [[Base]]: baseValue, [[ReferencedName]]: propertyKey, [[Strict]]: strict, [[ThisValue]]: actualThis }.
  244. return ReferenceOperands {
  245. .base = base_value,
  246. .referenced_name = computed_property_value,
  247. .this_value = actual_this,
  248. };
  249. }
  250. CodeGenerationErrorOr<Generator::ReferenceOperands> Generator::emit_load_from_reference(JS::ASTNode const& node, Optional<Operand> preferred_dst)
  251. {
  252. if (is<Identifier>(node)) {
  253. auto& identifier = static_cast<Identifier const&>(node);
  254. auto loaded_value = TRY(identifier.generate_bytecode(*this, preferred_dst)).value();
  255. return ReferenceOperands {
  256. .loaded_value = loaded_value,
  257. };
  258. }
  259. if (!is<MemberExpression>(node)) {
  260. return CodeGenerationError {
  261. &node,
  262. "Unimplemented/invalid node used as a reference"sv
  263. };
  264. }
  265. auto& expression = static_cast<MemberExpression const&>(node);
  266. // https://tc39.es/ecma262/#sec-super-keyword-runtime-semantics-evaluation
  267. if (is<SuperExpression>(expression.object())) {
  268. auto super_reference = TRY(emit_super_reference(expression));
  269. auto dst = preferred_dst.has_value() ? preferred_dst.value() : Operand(allocate_register());
  270. if (super_reference.referenced_name.has_value()) {
  271. // 5. Let propertyKey be ? ToPropertyKey(propertyNameValue).
  272. // FIXME: This does ToPropertyKey out of order, which is observable by Symbol.toPrimitive!
  273. emit<Bytecode::Op::GetByValueWithThis>(dst, *super_reference.base, *super_reference.referenced_name, *super_reference.this_value);
  274. } else {
  275. // 3. Let propertyKey be StringValue of IdentifierName.
  276. auto identifier_table_ref = intern_identifier(verify_cast<Identifier>(expression.property()).string());
  277. emit_get_by_id_with_this(dst, *super_reference.base, identifier_table_ref, *super_reference.this_value);
  278. }
  279. super_reference.loaded_value = dst;
  280. return super_reference;
  281. }
  282. auto base = TRY(expression.object().generate_bytecode(*this)).value();
  283. auto base_identifier = intern_identifier_for_expression(expression.object());
  284. if (expression.is_computed()) {
  285. auto property = TRY(expression.property().generate_bytecode(*this)).value();
  286. auto saved_property = Operand(allocate_register());
  287. emit<Bytecode::Op::Mov>(saved_property, property);
  288. auto dst = preferred_dst.has_value() ? preferred_dst.value() : Operand(allocate_register());
  289. emit<Bytecode::Op::GetByValue>(dst, base, property, move(base_identifier));
  290. return ReferenceOperands {
  291. .base = base,
  292. .referenced_name = saved_property,
  293. .this_value = base,
  294. .loaded_value = dst,
  295. };
  296. }
  297. if (expression.property().is_identifier()) {
  298. auto identifier_table_ref = intern_identifier(verify_cast<Identifier>(expression.property()).string());
  299. auto dst = preferred_dst.has_value() ? preferred_dst.value() : Operand(allocate_register());
  300. emit_get_by_id(dst, base, identifier_table_ref, move(base_identifier));
  301. return ReferenceOperands {
  302. .base = base,
  303. .referenced_identifier = identifier_table_ref,
  304. .this_value = base,
  305. .loaded_value = dst,
  306. };
  307. }
  308. if (expression.property().is_private_identifier()) {
  309. auto identifier_table_ref = intern_identifier(verify_cast<PrivateIdentifier>(expression.property()).string());
  310. auto dst = preferred_dst.has_value() ? preferred_dst.value() : Operand(allocate_register());
  311. emit<Bytecode::Op::GetPrivateById>(dst, base, identifier_table_ref);
  312. return ReferenceOperands {
  313. .base = base,
  314. .referenced_private_identifier = identifier_table_ref,
  315. .this_value = base,
  316. .loaded_value = dst,
  317. };
  318. }
  319. return CodeGenerationError {
  320. &expression,
  321. "Unimplemented non-computed member expression"sv
  322. };
  323. }
  324. CodeGenerationErrorOr<void> Generator::emit_store_to_reference(JS::ASTNode const& node, Operand value)
  325. {
  326. if (is<Identifier>(node)) {
  327. auto& identifier = static_cast<Identifier const&>(node);
  328. emit_set_variable(identifier, value);
  329. return {};
  330. }
  331. if (is<MemberExpression>(node)) {
  332. auto& expression = static_cast<MemberExpression const&>(node);
  333. // https://tc39.es/ecma262/#sec-super-keyword-runtime-semantics-evaluation
  334. if (is<SuperExpression>(expression.object())) {
  335. auto super_reference = TRY(emit_super_reference(expression));
  336. // 4. Return the Reference Record { [[Base]]: baseValue, [[ReferencedName]]: propertyKey, [[Strict]]: strict, [[ThisValue]]: actualThis }.
  337. if (super_reference.referenced_name.has_value()) {
  338. // 5. Let propertyKey be ? ToPropertyKey(propertyNameValue).
  339. // FIXME: This does ToPropertyKey out of order, which is observable by Symbol.toPrimitive!
  340. emit<Bytecode::Op::PutByValueWithThis>(*super_reference.base, *super_reference.referenced_name, *super_reference.this_value, value);
  341. } else {
  342. // 3. Let propertyKey be StringValue of IdentifierName.
  343. auto identifier_table_ref = intern_identifier(verify_cast<Identifier>(expression.property()).string());
  344. emit<Bytecode::Op::PutByIdWithThis>(*super_reference.base, *super_reference.this_value, identifier_table_ref, value, Bytecode::Op::PropertyKind::KeyValue, next_property_lookup_cache());
  345. }
  346. } else {
  347. auto object = TRY(expression.object().generate_bytecode(*this)).value();
  348. if (expression.is_computed()) {
  349. auto property = TRY(expression.property().generate_bytecode(*this)).value();
  350. emit<Bytecode::Op::PutByValue>(object, property, value);
  351. } else if (expression.property().is_identifier()) {
  352. auto identifier_table_ref = intern_identifier(verify_cast<Identifier>(expression.property()).string());
  353. emit<Bytecode::Op::PutById>(object, identifier_table_ref, value, Bytecode::Op::PropertyKind::KeyValue, next_property_lookup_cache());
  354. } else if (expression.property().is_private_identifier()) {
  355. auto identifier_table_ref = intern_identifier(verify_cast<PrivateIdentifier>(expression.property()).string());
  356. emit<Bytecode::Op::PutPrivateById>(object, identifier_table_ref, value);
  357. } else {
  358. return CodeGenerationError {
  359. &expression,
  360. "Unimplemented non-computed member expression"sv
  361. };
  362. }
  363. }
  364. return {};
  365. }
  366. return CodeGenerationError {
  367. &node,
  368. "Unimplemented/invalid node used a reference"sv
  369. };
  370. }
  371. CodeGenerationErrorOr<void> Generator::emit_store_to_reference(ReferenceOperands const& reference, Operand value)
  372. {
  373. if (reference.referenced_private_identifier.has_value()) {
  374. emit<Bytecode::Op::PutPrivateById>(*reference.base, *reference.referenced_private_identifier, value);
  375. return {};
  376. }
  377. if (reference.referenced_identifier.has_value()) {
  378. if (reference.base == reference.this_value)
  379. emit<Bytecode::Op::PutById>(*reference.base, *reference.referenced_identifier, value, Bytecode::Op::PropertyKind::KeyValue, next_property_lookup_cache());
  380. else
  381. emit<Bytecode::Op::PutByIdWithThis>(*reference.base, *reference.this_value, *reference.referenced_identifier, value, Bytecode::Op::PropertyKind::KeyValue, next_property_lookup_cache());
  382. return {};
  383. }
  384. if (reference.base == reference.this_value)
  385. emit<Bytecode::Op::PutByValue>(*reference.base, *reference.referenced_name, value);
  386. else
  387. emit<Bytecode::Op::PutByValueWithThis>(*reference.base, *reference.referenced_name, *reference.this_value, value);
  388. return {};
  389. }
  390. CodeGenerationErrorOr<Optional<Operand>> Generator::emit_delete_reference(JS::ASTNode const& node)
  391. {
  392. if (is<Identifier>(node)) {
  393. auto& identifier = static_cast<Identifier const&>(node);
  394. if (identifier.is_local()) {
  395. return add_constant(Value(false));
  396. }
  397. auto dst = Operand(allocate_register());
  398. emit<Bytecode::Op::DeleteVariable>(dst, intern_identifier(identifier.string()));
  399. return dst;
  400. }
  401. if (is<MemberExpression>(node)) {
  402. auto& expression = static_cast<MemberExpression const&>(node);
  403. // https://tc39.es/ecma262/#sec-super-keyword-runtime-semantics-evaluation
  404. if (is<SuperExpression>(expression.object())) {
  405. auto super_reference = TRY(emit_super_reference(expression));
  406. auto dst = Operand(allocate_register());
  407. if (super_reference.referenced_name.has_value()) {
  408. emit<Bytecode::Op::DeleteByValueWithThis>(dst, *super_reference.base, *super_reference.this_value, *super_reference.referenced_name);
  409. } else {
  410. auto identifier_table_ref = intern_identifier(verify_cast<Identifier>(expression.property()).string());
  411. emit<Bytecode::Op::DeleteByIdWithThis>(dst, *super_reference.base, *super_reference.this_value, identifier_table_ref);
  412. }
  413. return Optional<Operand> {};
  414. }
  415. auto object = TRY(expression.object().generate_bytecode(*this)).value();
  416. auto dst = Operand(allocate_register());
  417. if (expression.is_computed()) {
  418. auto property = TRY(expression.property().generate_bytecode(*this)).value();
  419. emit<Bytecode::Op::DeleteByValue>(dst, object, property);
  420. } else if (expression.property().is_identifier()) {
  421. auto identifier_table_ref = intern_identifier(verify_cast<Identifier>(expression.property()).string());
  422. emit<Bytecode::Op::DeleteById>(dst, object, identifier_table_ref);
  423. } else {
  424. // NOTE: Trying to delete a private field generates a SyntaxError in the parser.
  425. return CodeGenerationError {
  426. &expression,
  427. "Unimplemented non-computed member expression"sv
  428. };
  429. }
  430. return dst;
  431. }
  432. // Though this will have no deletion effect, we still have to evaluate the node as it can have side effects.
  433. // For example: delete a(); delete ++c.b; etc.
  434. // 13.5.1.2 Runtime Semantics: Evaluation, https://tc39.es/ecma262/#sec-delete-operator-runtime-semantics-evaluation
  435. // 1. Let ref be the result of evaluating UnaryExpression.
  436. // 2. ReturnIfAbrupt(ref).
  437. (void)TRY(node.generate_bytecode(*this));
  438. // 3. If ref is not a Reference Record, return true.
  439. // NOTE: The rest of the steps are handled by Delete{Variable,ByValue,Id}.
  440. return add_constant(Value(true));
  441. }
  442. void Generator::emit_set_variable(JS::Identifier const& identifier, Operand value, Bytecode::Op::SetVariable::InitializationMode initialization_mode, Bytecode::Op::EnvironmentMode mode)
  443. {
  444. if (identifier.is_local()) {
  445. if (value.is_local() && value.index() == identifier.local_variable_index()) {
  446. // Moving a local to itself is a no-op.
  447. return;
  448. }
  449. emit<Bytecode::Op::SetLocal>(identifier.local_variable_index(), value);
  450. } else {
  451. emit<Bytecode::Op::SetVariable>(intern_identifier(identifier.string()), value, next_environment_variable_cache(), initialization_mode, mode);
  452. }
  453. }
  454. static Optional<ByteString> expression_identifier(Expression const& expression)
  455. {
  456. if (expression.is_identifier()) {
  457. auto const& identifier = static_cast<Identifier const&>(expression);
  458. return identifier.string();
  459. }
  460. if (expression.is_numeric_literal()) {
  461. auto const& literal = static_cast<NumericLiteral const&>(expression);
  462. return literal.value().to_string_without_side_effects().to_byte_string();
  463. }
  464. if (expression.is_string_literal()) {
  465. auto const& literal = static_cast<StringLiteral const&>(expression);
  466. return ByteString::formatted("'{}'", literal.value());
  467. }
  468. if (expression.is_member_expression()) {
  469. auto const& member_expression = static_cast<MemberExpression const&>(expression);
  470. StringBuilder builder;
  471. if (auto identifer = expression_identifier(member_expression.object()); identifer.has_value())
  472. builder.append(*identifer);
  473. if (auto identifer = expression_identifier(member_expression.property()); identifer.has_value()) {
  474. if (member_expression.is_computed())
  475. builder.appendff("[{}]", *identifer);
  476. else
  477. builder.appendff(".{}", *identifer);
  478. }
  479. return builder.to_byte_string();
  480. }
  481. return {};
  482. }
  483. Optional<IdentifierTableIndex> Generator::intern_identifier_for_expression(Expression const& expression)
  484. {
  485. if (auto identifer = expression_identifier(expression); identifer.has_value())
  486. return intern_identifier(identifer.release_value());
  487. return {};
  488. }
  489. void Generator::generate_scoped_jump(JumpType type)
  490. {
  491. TemporaryChange temp { m_current_unwind_context, m_current_unwind_context };
  492. bool last_was_finally = false;
  493. for (size_t i = m_boundaries.size(); i > 0; --i) {
  494. auto boundary = m_boundaries[i - 1];
  495. using enum BlockBoundaryType;
  496. switch (boundary) {
  497. case Break:
  498. if (type == JumpType::Break) {
  499. emit<Op::Jump>(nearest_breakable_scope());
  500. return;
  501. }
  502. break;
  503. case Continue:
  504. if (type == JumpType::Continue) {
  505. emit<Op::Jump>(nearest_continuable_scope());
  506. return;
  507. }
  508. break;
  509. case Unwind:
  510. VERIFY(last_was_finally || !m_current_unwind_context->finalizer().has_value());
  511. if (!last_was_finally) {
  512. VERIFY(m_current_unwind_context && m_current_unwind_context->handler().has_value());
  513. emit<Bytecode::Op::LeaveUnwindContext>();
  514. m_current_unwind_context = m_current_unwind_context->previous();
  515. }
  516. last_was_finally = false;
  517. break;
  518. case LeaveLexicalEnvironment:
  519. emit<Bytecode::Op::LeaveLexicalEnvironment>();
  520. break;
  521. case ReturnToFinally: {
  522. VERIFY(m_current_unwind_context->finalizer().has_value());
  523. m_current_unwind_context = m_current_unwind_context->previous();
  524. auto jump_type_name = type == JumpType::Break ? "break"sv : "continue"sv;
  525. auto block_name = MUST(String::formatted("{}.{}", current_block().name(), jump_type_name));
  526. auto& block = make_block(block_name);
  527. emit<Op::ScheduleJump>(Label { block });
  528. switch_to_basic_block(block);
  529. last_was_finally = true;
  530. break;
  531. }
  532. case LeaveFinally:
  533. emit<Op::LeaveFinally>();
  534. break;
  535. }
  536. }
  537. VERIFY_NOT_REACHED();
  538. }
  539. void Generator::generate_labelled_jump(JumpType type, DeprecatedFlyString const& label)
  540. {
  541. TemporaryChange temp { m_current_unwind_context, m_current_unwind_context };
  542. size_t current_boundary = m_boundaries.size();
  543. bool last_was_finally = false;
  544. auto const& jumpable_scopes = type == JumpType::Continue ? m_continuable_scopes : m_breakable_scopes;
  545. for (auto const& jumpable_scope : jumpable_scopes.in_reverse()) {
  546. for (; current_boundary > 0; --current_boundary) {
  547. auto boundary = m_boundaries[current_boundary - 1];
  548. if (boundary == BlockBoundaryType::Unwind) {
  549. VERIFY(last_was_finally || !m_current_unwind_context->finalizer().has_value());
  550. if (!last_was_finally) {
  551. VERIFY(m_current_unwind_context && m_current_unwind_context->handler().has_value());
  552. emit<Bytecode::Op::LeaveUnwindContext>();
  553. m_current_unwind_context = m_current_unwind_context->previous();
  554. }
  555. last_was_finally = false;
  556. } else if (boundary == BlockBoundaryType::LeaveLexicalEnvironment) {
  557. emit<Bytecode::Op::LeaveLexicalEnvironment>();
  558. } else if (boundary == BlockBoundaryType::ReturnToFinally) {
  559. VERIFY(m_current_unwind_context->finalizer().has_value());
  560. m_current_unwind_context = m_current_unwind_context->previous();
  561. auto jump_type_name = type == JumpType::Break ? "break"sv : "continue"sv;
  562. auto block_name = MUST(String::formatted("{}.{}", current_block().name(), jump_type_name));
  563. auto& block = make_block(block_name);
  564. emit<Op::ScheduleJump>(Label { block });
  565. switch_to_basic_block(block);
  566. last_was_finally = true;
  567. } else if ((type == JumpType::Continue && boundary == BlockBoundaryType::Continue) || (type == JumpType::Break && boundary == BlockBoundaryType::Break)) {
  568. // Make sure we don't process this boundary twice if the current jumpable scope doesn't contain the target label.
  569. --current_boundary;
  570. break;
  571. }
  572. }
  573. if (jumpable_scope.language_label_set.contains_slow(label)) {
  574. emit<Op::Jump>(jumpable_scope.bytecode_target);
  575. return;
  576. }
  577. }
  578. // We must have a jumpable scope available that contains the label, as this should be enforced by the parser.
  579. VERIFY_NOT_REACHED();
  580. }
  581. void Generator::generate_break()
  582. {
  583. generate_scoped_jump(JumpType::Break);
  584. }
  585. void Generator::generate_break(DeprecatedFlyString const& break_label)
  586. {
  587. generate_labelled_jump(JumpType::Break, break_label);
  588. }
  589. void Generator::generate_continue()
  590. {
  591. generate_scoped_jump(JumpType::Continue);
  592. }
  593. void Generator::generate_continue(DeprecatedFlyString const& continue_label)
  594. {
  595. generate_labelled_jump(JumpType::Continue, continue_label);
  596. }
  597. void Generator::push_home_object(Operand object)
  598. {
  599. m_home_objects.append(object);
  600. }
  601. void Generator::pop_home_object()
  602. {
  603. m_home_objects.take_last();
  604. }
  605. void Generator::emit_new_function(Operand dst, FunctionExpression const& function_node, Optional<IdentifierTableIndex> lhs_name)
  606. {
  607. if (m_home_objects.is_empty()) {
  608. emit<Op::NewFunction>(dst, function_node, lhs_name);
  609. } else {
  610. emit<Op::NewFunction>(dst, function_node, lhs_name, m_home_objects.last());
  611. }
  612. }
  613. CodeGenerationErrorOr<Optional<Operand>> Generator::emit_named_evaluation_if_anonymous_function(Expression const& expression, Optional<IdentifierTableIndex> lhs_name, Optional<Operand> preferred_dst)
  614. {
  615. if (is<FunctionExpression>(expression)) {
  616. auto const& function_expression = static_cast<FunctionExpression const&>(expression);
  617. if (!function_expression.has_name()) {
  618. return TRY(function_expression.generate_bytecode_with_lhs_name(*this, move(lhs_name), preferred_dst)).value();
  619. }
  620. }
  621. if (is<ClassExpression>(expression)) {
  622. auto const& class_expression = static_cast<ClassExpression const&>(expression);
  623. if (!class_expression.has_name()) {
  624. return TRY(class_expression.generate_bytecode_with_lhs_name(*this, move(lhs_name), preferred_dst)).value();
  625. }
  626. }
  627. return expression.generate_bytecode(*this, preferred_dst);
  628. }
  629. void Generator::emit_get_by_id(Operand dst, Operand base, IdentifierTableIndex property_identifier, Optional<IdentifierTableIndex> base_identifier)
  630. {
  631. emit<Op::GetById>(dst, base, property_identifier, move(base_identifier), m_next_property_lookup_cache++);
  632. }
  633. void Generator::emit_get_by_id_with_this(Operand dst, Operand base, IdentifierTableIndex id, Operand this_value)
  634. {
  635. emit<Op::GetByIdWithThis>(dst, base, id, this_value, m_next_property_lookup_cache++);
  636. }
  637. void Generator::emit_iterator_value(Operand dst, Operand result)
  638. {
  639. emit_get_by_id(dst, result, intern_identifier("value"sv));
  640. }
  641. void Generator::emit_iterator_complete(Operand dst, Operand result)
  642. {
  643. emit_get_by_id(dst, result, intern_identifier("done"sv));
  644. }
  645. bool Generator::is_local_initialized(u32 local_index) const
  646. {
  647. return m_initialized_locals.find(local_index) != m_initialized_locals.end();
  648. }
  649. void Generator::set_local_initialized(u32 local_index)
  650. {
  651. m_initialized_locals.set(local_index);
  652. }
  653. }