Generator.h 16 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409
  1. /*
  2. * Copyright (c) 2021-2024, Andreas Kling <andreas@ladybird.org>
  3. *
  4. * SPDX-License-Identifier: BSD-2-Clause
  5. */
  6. #pragma once
  7. #include <AK/OwnPtr.h>
  8. #include <AK/SinglyLinkedList.h>
  9. #include <LibJS/AST.h>
  10. #include <LibJS/Bytecode/BasicBlock.h>
  11. #include <LibJS/Bytecode/CodeGenerationError.h>
  12. #include <LibJS/Bytecode/Executable.h>
  13. #include <LibJS/Bytecode/IdentifierTable.h>
  14. #include <LibJS/Bytecode/Label.h>
  15. #include <LibJS/Bytecode/Op.h>
  16. #include <LibJS/Bytecode/Register.h>
  17. #include <LibJS/Bytecode/StringTable.h>
  18. #include <LibJS/Forward.h>
  19. #include <LibJS/Runtime/FunctionKind.h>
  20. #include <LibRegex/Regex.h>
  21. namespace JS::Bytecode {
  22. class Generator {
  23. public:
  24. VM& vm() { return m_vm; }
  25. enum class SurroundingScopeKind {
  26. Global,
  27. Function,
  28. Block,
  29. };
  30. enum class MustPropagateCompletion {
  31. No,
  32. Yes,
  33. };
  34. static CodeGenerationErrorOr<GC::Ref<Executable>> generate_from_ast_node(VM&, ASTNode const&, FunctionKind = FunctionKind::Normal);
  35. static CodeGenerationErrorOr<GC::Ref<Executable>> generate_from_function(VM&, ECMAScriptFunctionObject const& function);
  36. CodeGenerationErrorOr<void> emit_function_declaration_instantiation(ECMAScriptFunctionObject const& function);
  37. [[nodiscard]] ScopedOperand allocate_register();
  38. [[nodiscard]] ScopedOperand local(u32 local_index);
  39. [[nodiscard]] ScopedOperand accumulator();
  40. [[nodiscard]] ScopedOperand this_value();
  41. void free_register(Register);
  42. void set_local_initialized(u32 local_index);
  43. [[nodiscard]] bool is_local_initialized(u32 local_index) const;
  44. class SourceLocationScope {
  45. public:
  46. SourceLocationScope(Generator&, ASTNode const& node);
  47. ~SourceLocationScope();
  48. private:
  49. Generator& m_generator;
  50. ASTNode const* m_previous_node { nullptr };
  51. };
  52. class UnwindContext {
  53. public:
  54. UnwindContext(Generator&, Optional<Label> finalizer);
  55. UnwindContext const* previous() const { return m_previous_context; }
  56. void set_handler(Label handler) { m_handler = handler; }
  57. Optional<Label> handler() const { return m_handler; }
  58. Optional<Label> finalizer() const { return m_finalizer; }
  59. ~UnwindContext();
  60. private:
  61. Generator& m_generator;
  62. Optional<Label> m_finalizer;
  63. Optional<Label> m_handler {};
  64. UnwindContext const* m_previous_context { nullptr };
  65. };
  66. template<typename OpType, typename... Args>
  67. requires(requires { OpType(declval<Args>()...); })
  68. void emit(Args&&... args)
  69. {
  70. VERIFY(!is_current_block_terminated());
  71. size_t slot_offset = m_current_basic_block->size();
  72. m_current_basic_block->set_last_instruction_start_offset(slot_offset);
  73. grow(sizeof(OpType));
  74. void* slot = m_current_basic_block->data() + slot_offset;
  75. new (slot) OpType(forward<Args>(args)...);
  76. if constexpr (OpType::IsTerminator)
  77. m_current_basic_block->terminate({});
  78. m_current_basic_block->add_source_map_entry(slot_offset, { m_current_ast_node->start_offset(), m_current_ast_node->end_offset() });
  79. }
  80. template<typename OpType, typename ExtraSlotType, typename... Args>
  81. requires(requires { OpType(declval<Args>()...); })
  82. void emit_with_extra_slots(size_t extra_slot_count, Args&&... args)
  83. {
  84. VERIFY(!is_current_block_terminated());
  85. size_t size_to_allocate = round_up_to_power_of_two(sizeof(OpType) + extra_slot_count * sizeof(ExtraSlotType), alignof(void*));
  86. size_t slot_offset = m_current_basic_block->size();
  87. m_current_basic_block->set_last_instruction_start_offset(slot_offset);
  88. grow(size_to_allocate);
  89. void* slot = m_current_basic_block->data() + slot_offset;
  90. new (slot) OpType(forward<Args>(args)...);
  91. if constexpr (OpType::IsTerminator)
  92. m_current_basic_block->terminate({});
  93. m_current_basic_block->add_source_map_entry(slot_offset, { m_current_ast_node->start_offset(), m_current_ast_node->end_offset() });
  94. }
  95. template<typename OpType, typename... Args>
  96. requires(requires { OpType(declval<Args>()...); })
  97. void emit_with_extra_operand_slots(size_t extra_operand_slots, Args&&... args)
  98. {
  99. emit_with_extra_slots<OpType, Operand>(extra_operand_slots, forward<Args>(args)...);
  100. }
  101. template<typename OpType, typename... Args>
  102. requires(requires { OpType(declval<Args>()...); })
  103. void emit_with_extra_value_slots(size_t extra_operand_slots, Args&&... args)
  104. {
  105. emit_with_extra_slots<OpType, Value>(extra_operand_slots, forward<Args>(args)...);
  106. }
  107. void emit_jump_if(ScopedOperand const& condition, Label true_target, Label false_target);
  108. struct ReferenceOperands {
  109. Optional<ScopedOperand> base {}; // [[Base]]
  110. Optional<ScopedOperand> referenced_name {}; // [[ReferencedName]] as an operand
  111. Optional<IdentifierTableIndex> referenced_identifier {}; // [[ReferencedName]] as an identifier
  112. Optional<IdentifierTableIndex> referenced_private_identifier {}; // [[ReferencedName]] as a private identifier
  113. Optional<ScopedOperand> this_value {}; // [[ThisValue]]
  114. Optional<ScopedOperand> loaded_value {}; // Loaded value, if we've performed a load.
  115. };
  116. CodeGenerationErrorOr<ReferenceOperands> emit_load_from_reference(JS::ASTNode const&, Optional<ScopedOperand> preferred_dst = {});
  117. CodeGenerationErrorOr<void> emit_store_to_reference(JS::ASTNode const&, ScopedOperand value);
  118. CodeGenerationErrorOr<void> emit_store_to_reference(ReferenceOperands const&, ScopedOperand value);
  119. CodeGenerationErrorOr<Optional<ScopedOperand>> emit_delete_reference(JS::ASTNode const&);
  120. CodeGenerationErrorOr<ReferenceOperands> emit_super_reference(MemberExpression const&);
  121. void emit_set_variable(JS::Identifier const& identifier, ScopedOperand value, Bytecode::Op::BindingInitializationMode initialization_mode = Bytecode::Op::BindingInitializationMode::Set, Bytecode::Op::EnvironmentMode mode = Bytecode::Op::EnvironmentMode::Lexical);
  122. void push_home_object(ScopedOperand);
  123. void pop_home_object();
  124. void emit_new_function(ScopedOperand dst, JS::FunctionExpression const&, Optional<IdentifierTableIndex> lhs_name);
  125. CodeGenerationErrorOr<Optional<ScopedOperand>> emit_named_evaluation_if_anonymous_function(Expression const&, Optional<IdentifierTableIndex> lhs_name, Optional<ScopedOperand> preferred_dst = {});
  126. void begin_continuable_scope(Label continue_target, Vector<DeprecatedFlyString> const& language_label_set);
  127. void end_continuable_scope();
  128. void begin_breakable_scope(Label breakable_target, Vector<DeprecatedFlyString> const& language_label_set);
  129. void end_breakable_scope();
  130. [[nodiscard]] Label nearest_continuable_scope() const;
  131. [[nodiscard]] Label nearest_breakable_scope() const;
  132. void switch_to_basic_block(BasicBlock& block)
  133. {
  134. m_current_basic_block = &block;
  135. }
  136. [[nodiscard]] BasicBlock& current_block() { return *m_current_basic_block; }
  137. BasicBlock& make_block(String name = {})
  138. {
  139. if (name.is_empty())
  140. name = String::number(m_next_block++);
  141. auto block = BasicBlock::create(m_root_basic_blocks.size(), name);
  142. if (auto const* context = m_current_unwind_context) {
  143. if (context->handler().has_value())
  144. block->set_handler(*m_root_basic_blocks[context->handler().value().basic_block_index()]);
  145. if (m_current_unwind_context->finalizer().has_value())
  146. block->set_finalizer(*m_root_basic_blocks[context->finalizer().value().basic_block_index()]);
  147. }
  148. m_root_basic_blocks.append(move(block));
  149. return *m_root_basic_blocks.last();
  150. }
  151. bool is_current_block_terminated() const
  152. {
  153. return m_current_basic_block->is_terminated();
  154. }
  155. StringTableIndex intern_string(ByteString string)
  156. {
  157. return m_string_table->insert(move(string));
  158. }
  159. RegexTableIndex intern_regex(ParsedRegex regex)
  160. {
  161. return m_regex_table->insert(move(regex));
  162. }
  163. IdentifierTableIndex intern_identifier(DeprecatedFlyString string)
  164. {
  165. return m_identifier_table->insert(move(string));
  166. }
  167. Optional<IdentifierTableIndex> intern_identifier_for_expression(Expression const& expression);
  168. bool is_in_generator_or_async_function() const { return m_enclosing_function_kind == FunctionKind::Async || m_enclosing_function_kind == FunctionKind::Generator || m_enclosing_function_kind == FunctionKind::AsyncGenerator; }
  169. bool is_in_generator_function() const { return m_enclosing_function_kind == FunctionKind::Generator || m_enclosing_function_kind == FunctionKind::AsyncGenerator; }
  170. bool is_in_async_function() const { return m_enclosing_function_kind == FunctionKind::Async || m_enclosing_function_kind == FunctionKind::AsyncGenerator; }
  171. bool is_in_async_generator_function() const { return m_enclosing_function_kind == FunctionKind::AsyncGenerator; }
  172. enum class BindingMode {
  173. Lexical,
  174. Var,
  175. Global,
  176. };
  177. struct LexicalScope {
  178. SurroundingScopeKind kind;
  179. };
  180. // Returns true if a lexical environment was created.
  181. bool emit_block_declaration_instantiation(ScopeNode const&);
  182. void begin_variable_scope();
  183. void end_variable_scope();
  184. enum class BlockBoundaryType {
  185. Break,
  186. Continue,
  187. Unwind,
  188. ReturnToFinally,
  189. LeaveFinally,
  190. LeaveLexicalEnvironment,
  191. };
  192. template<typename OpType>
  193. void perform_needed_unwinds()
  194. requires(OpType::IsTerminator && !IsSame<OpType, Op::Jump>)
  195. {
  196. for (size_t i = m_boundaries.size(); i > 0; --i) {
  197. auto boundary = m_boundaries[i - 1];
  198. using enum BlockBoundaryType;
  199. switch (boundary) {
  200. case Unwind:
  201. if constexpr (IsSame<OpType, Bytecode::Op::Throw>)
  202. return;
  203. emit<Bytecode::Op::LeaveUnwindContext>();
  204. break;
  205. case LeaveLexicalEnvironment:
  206. emit<Bytecode::Op::LeaveLexicalEnvironment>();
  207. break;
  208. case Break:
  209. case Continue:
  210. break;
  211. case ReturnToFinally:
  212. return;
  213. case LeaveFinally:
  214. emit<Bytecode::Op::LeaveFinally>();
  215. break;
  216. };
  217. }
  218. }
  219. bool is_in_finalizer() const { return m_boundaries.contains_slow(BlockBoundaryType::LeaveFinally); }
  220. bool must_enter_finalizer() const { return m_boundaries.contains_slow(BlockBoundaryType::ReturnToFinally); }
  221. void generate_break();
  222. void generate_break(DeprecatedFlyString const& break_label);
  223. void generate_continue();
  224. void generate_continue(DeprecatedFlyString const& continue_label);
  225. template<typename OpType>
  226. void emit_return(ScopedOperand value)
  227. requires(IsOneOf<OpType, Op::Return, Op::Yield>)
  228. {
  229. // FIXME: Tell the call sites about the `saved_return_value` destination
  230. // And take that into account in the movs below.
  231. perform_needed_unwinds<OpType>();
  232. if (must_enter_finalizer()) {
  233. VERIFY(m_current_basic_block->finalizer() != nullptr);
  234. // Compare to:
  235. // * Interpreter::do_return
  236. // * Interpreter::run_bytecode::handle_ContinuePendingUnwind
  237. // * Return::execute_impl
  238. // * Yield::execute_impl
  239. if constexpr (IsSame<OpType, Op::Yield>)
  240. emit<Bytecode::Op::PrepareYield>(Operand(Register::saved_return_value()), value);
  241. else
  242. emit<Bytecode::Op::Mov>(Operand(Register::saved_return_value()), value);
  243. emit<Bytecode::Op::Mov>(Operand(Register::exception()), add_constant(Value {}));
  244. // FIXME: Do we really need to clear the return value register here?
  245. emit<Bytecode::Op::Mov>(Operand(Register::return_value()), add_constant(Value {}));
  246. emit<Bytecode::Op::Jump>(Label { *m_current_basic_block->finalizer() });
  247. return;
  248. }
  249. if constexpr (IsSame<OpType, Op::Return>)
  250. emit<Op::Return>(value);
  251. else
  252. emit<Op::Yield>(nullptr, value);
  253. }
  254. void start_boundary(BlockBoundaryType type) { m_boundaries.append(type); }
  255. void end_boundary(BlockBoundaryType type)
  256. {
  257. VERIFY(m_boundaries.last() == type);
  258. m_boundaries.take_last();
  259. }
  260. [[nodiscard]] ScopedOperand copy_if_needed_to_preserve_evaluation_order(ScopedOperand const&);
  261. [[nodiscard]] ScopedOperand get_this(Optional<ScopedOperand> preferred_dst = {});
  262. void emit_get_by_id(ScopedOperand dst, ScopedOperand base, IdentifierTableIndex property_identifier, Optional<IdentifierTableIndex> base_identifier = {});
  263. void emit_get_by_id_with_this(ScopedOperand dst, ScopedOperand base, IdentifierTableIndex, ScopedOperand this_value);
  264. void emit_iterator_value(ScopedOperand dst, ScopedOperand result);
  265. void emit_iterator_complete(ScopedOperand dst, ScopedOperand result);
  266. [[nodiscard]] size_t next_global_variable_cache() { return m_next_global_variable_cache++; }
  267. [[nodiscard]] size_t next_property_lookup_cache() { return m_next_property_lookup_cache++; }
  268. enum class DeduplicateConstant {
  269. Yes,
  270. No,
  271. };
  272. [[nodiscard]] ScopedOperand add_constant(Value);
  273. [[nodiscard]] Value get_constant(ScopedOperand const& operand) const
  274. {
  275. VERIFY(operand.operand().is_constant());
  276. return m_constants[operand.operand().index()];
  277. }
  278. UnwindContext const* current_unwind_context() const { return m_current_unwind_context; }
  279. [[nodiscard]] bool is_finished() const { return m_finished; }
  280. [[nodiscard]] bool must_propagate_completion() const { return m_must_propagate_completion; }
  281. private:
  282. VM& m_vm;
  283. static CodeGenerationErrorOr<GC::Ref<Executable>> compile(VM&, ASTNode const&, FunctionKind, GC::Ptr<ECMAScriptFunctionObject const>, MustPropagateCompletion, Vector<DeprecatedFlyString> local_variable_names);
  284. enum class JumpType {
  285. Continue,
  286. Break,
  287. };
  288. void generate_scoped_jump(JumpType);
  289. void generate_labelled_jump(JumpType, DeprecatedFlyString const& label);
  290. Generator(VM&, GC::Ptr<ECMAScriptFunctionObject const>, MustPropagateCompletion);
  291. ~Generator() = default;
  292. void grow(size_t);
  293. // Returns true if a fused instruction was emitted.
  294. [[nodiscard]] bool fuse_compare_and_jump(ScopedOperand const& condition, Label true_target, Label false_target);
  295. struct LabelableScope {
  296. Label bytecode_target;
  297. Vector<DeprecatedFlyString> language_label_set;
  298. };
  299. BasicBlock* m_current_basic_block { nullptr };
  300. ASTNode const* m_current_ast_node { nullptr };
  301. UnwindContext const* m_current_unwind_context { nullptr };
  302. Vector<NonnullOwnPtr<BasicBlock>> m_root_basic_blocks;
  303. NonnullOwnPtr<StringTable> m_string_table;
  304. NonnullOwnPtr<IdentifierTable> m_identifier_table;
  305. NonnullOwnPtr<RegexTable> m_regex_table;
  306. GC::MarkedVector<Value> m_constants;
  307. mutable Optional<ScopedOperand> m_true_constant;
  308. mutable Optional<ScopedOperand> m_false_constant;
  309. mutable Optional<ScopedOperand> m_null_constant;
  310. mutable Optional<ScopedOperand> m_undefined_constant;
  311. mutable Optional<ScopedOperand> m_empty_constant;
  312. mutable HashMap<i32, ScopedOperand> m_int32_constants;
  313. ScopedOperand m_accumulator;
  314. ScopedOperand m_this_value;
  315. Vector<Register> m_free_registers;
  316. u32 m_next_register { Register::reserved_register_count };
  317. u32 m_next_block { 1 };
  318. u32 m_next_property_lookup_cache { 0 };
  319. u32 m_next_global_variable_cache { 0 };
  320. FunctionKind m_enclosing_function_kind { FunctionKind::Normal };
  321. Vector<LabelableScope> m_continuable_scopes;
  322. Vector<LabelableScope> m_breakable_scopes;
  323. Vector<BlockBoundaryType> m_boundaries;
  324. Vector<ScopedOperand> m_home_objects;
  325. HashTable<u32> m_initialized_locals;
  326. bool m_finished { false };
  327. bool m_must_propagate_completion { true };
  328. GC::Ptr<ECMAScriptFunctionObject const> m_function;
  329. Optional<IdentifierTableIndex> m_length_identifier;
  330. };
  331. }