Generator.h 16 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396
  1. /*
  2. * Copyright (c) 2021-2024, Andreas Kling <kling@serenityos.org>
  3. *
  4. * SPDX-License-Identifier: BSD-2-Clause
  5. */
  6. #pragma once
  7. #include <AK/OwnPtr.h>
  8. #include <AK/SinglyLinkedList.h>
  9. #include <LibJS/AST.h>
  10. #include <LibJS/Bytecode/BasicBlock.h>
  11. #include <LibJS/Bytecode/CodeGenerationError.h>
  12. #include <LibJS/Bytecode/Executable.h>
  13. #include <LibJS/Bytecode/IdentifierTable.h>
  14. #include <LibJS/Bytecode/Label.h>
  15. #include <LibJS/Bytecode/Op.h>
  16. #include <LibJS/Bytecode/Register.h>
  17. #include <LibJS/Bytecode/StringTable.h>
  18. #include <LibJS/Forward.h>
  19. #include <LibJS/Runtime/FunctionKind.h>
  20. #include <LibRegex/Regex.h>
  21. namespace JS::Bytecode {
  22. class Generator {
  23. public:
  24. VM& vm() { return m_vm; }
  25. enum class SurroundingScopeKind {
  26. Global,
  27. Function,
  28. Block,
  29. };
  30. enum class MustPropagateCompletion {
  31. No,
  32. Yes,
  33. };
  34. static CodeGenerationErrorOr<NonnullGCPtr<Executable>> generate_from_ast_node(VM&, ASTNode const&, FunctionKind = FunctionKind::Normal);
  35. static CodeGenerationErrorOr<NonnullGCPtr<Executable>> generate_from_function(VM&, ECMAScriptFunctionObject const& function);
  36. CodeGenerationErrorOr<void> emit_function_declaration_instantiation(ECMAScriptFunctionObject const& function);
  37. [[nodiscard]] ScopedOperand allocate_register();
  38. [[nodiscard]] ScopedOperand local(u32 local_index);
  39. [[nodiscard]] ScopedOperand accumulator();
  40. void free_register(Register);
  41. void set_local_initialized(u32 local_index);
  42. [[nodiscard]] bool is_local_initialized(u32 local_index) const;
  43. class SourceLocationScope {
  44. public:
  45. SourceLocationScope(Generator&, ASTNode const& node);
  46. ~SourceLocationScope();
  47. private:
  48. Generator& m_generator;
  49. ASTNode const* m_previous_node { nullptr };
  50. };
  51. class UnwindContext {
  52. public:
  53. UnwindContext(Generator&, Optional<Label> finalizer);
  54. UnwindContext const* previous() const { return m_previous_context; }
  55. void set_handler(Label handler) { m_handler = handler; }
  56. Optional<Label> handler() const { return m_handler; }
  57. Optional<Label> finalizer() const { return m_finalizer; }
  58. ~UnwindContext();
  59. private:
  60. Generator& m_generator;
  61. Optional<Label> m_finalizer;
  62. Optional<Label> m_handler {};
  63. UnwindContext const* m_previous_context { nullptr };
  64. };
  65. template<typename OpType, typename... Args>
  66. requires(requires { OpType(declval<Args>()...); })
  67. void emit(Args&&... args)
  68. {
  69. VERIFY(!is_current_block_terminated());
  70. size_t slot_offset = m_current_basic_block->size();
  71. m_current_basic_block->set_last_instruction_start_offset(slot_offset);
  72. grow(sizeof(OpType));
  73. void* slot = m_current_basic_block->data() + slot_offset;
  74. new (slot) OpType(forward<Args>(args)...);
  75. if constexpr (OpType::IsTerminator)
  76. m_current_basic_block->terminate({});
  77. m_current_basic_block->add_source_map_entry(slot_offset, { m_current_ast_node->start_offset(), m_current_ast_node->end_offset() });
  78. }
  79. template<typename OpType, typename ExtraSlotType, typename... Args>
  80. requires(requires { OpType(declval<Args>()...); })
  81. void emit_with_extra_slots(size_t extra_slot_count, Args&&... args)
  82. {
  83. VERIFY(!is_current_block_terminated());
  84. size_t size_to_allocate = round_up_to_power_of_two(sizeof(OpType) + extra_slot_count * sizeof(ExtraSlotType), alignof(void*));
  85. size_t slot_offset = m_current_basic_block->size();
  86. m_current_basic_block->set_last_instruction_start_offset(slot_offset);
  87. grow(size_to_allocate);
  88. void* slot = m_current_basic_block->data() + slot_offset;
  89. new (slot) OpType(forward<Args>(args)...);
  90. if constexpr (OpType::IsTerminator)
  91. m_current_basic_block->terminate({});
  92. m_current_basic_block->add_source_map_entry(slot_offset, { m_current_ast_node->start_offset(), m_current_ast_node->end_offset() });
  93. }
  94. template<typename OpType, typename... Args>
  95. requires(requires { OpType(declval<Args>()...); })
  96. void emit_with_extra_operand_slots(size_t extra_operand_slots, Args&&... args)
  97. {
  98. emit_with_extra_slots<OpType, Operand>(extra_operand_slots, forward<Args>(args)...);
  99. }
  100. template<typename OpType, typename... Args>
  101. requires(requires { OpType(declval<Args>()...); })
  102. void emit_with_extra_value_slots(size_t extra_operand_slots, Args&&... args)
  103. {
  104. emit_with_extra_slots<OpType, Value>(extra_operand_slots, forward<Args>(args)...);
  105. }
  106. void emit_jump_if(ScopedOperand const& condition, Label true_target, Label false_target);
  107. struct ReferenceOperands {
  108. Optional<ScopedOperand> base {}; // [[Base]]
  109. Optional<ScopedOperand> referenced_name {}; // [[ReferencedName]] as an operand
  110. Optional<IdentifierTableIndex> referenced_identifier {}; // [[ReferencedName]] as an identifier
  111. Optional<IdentifierTableIndex> referenced_private_identifier {}; // [[ReferencedName]] as a private identifier
  112. Optional<ScopedOperand> this_value {}; // [[ThisValue]]
  113. Optional<ScopedOperand> loaded_value {}; // Loaded value, if we've performed a load.
  114. };
  115. CodeGenerationErrorOr<ReferenceOperands> emit_load_from_reference(JS::ASTNode const&, Optional<ScopedOperand> preferred_dst = {});
  116. CodeGenerationErrorOr<void> emit_store_to_reference(JS::ASTNode const&, ScopedOperand value);
  117. CodeGenerationErrorOr<void> emit_store_to_reference(ReferenceOperands const&, ScopedOperand value);
  118. CodeGenerationErrorOr<Optional<ScopedOperand>> emit_delete_reference(JS::ASTNode const&);
  119. CodeGenerationErrorOr<ReferenceOperands> emit_super_reference(MemberExpression const&);
  120. void emit_set_variable(JS::Identifier const& identifier, ScopedOperand value, Bytecode::Op::BindingInitializationMode initialization_mode = Bytecode::Op::BindingInitializationMode::Set, Bytecode::Op::EnvironmentMode mode = Bytecode::Op::EnvironmentMode::Lexical);
  121. void push_home_object(ScopedOperand);
  122. void pop_home_object();
  123. void emit_new_function(ScopedOperand dst, JS::FunctionExpression const&, Optional<IdentifierTableIndex> lhs_name);
  124. CodeGenerationErrorOr<Optional<ScopedOperand>> emit_named_evaluation_if_anonymous_function(Expression const&, Optional<IdentifierTableIndex> lhs_name, Optional<ScopedOperand> preferred_dst = {});
  125. void begin_continuable_scope(Label continue_target, Vector<DeprecatedFlyString> const& language_label_set);
  126. void end_continuable_scope();
  127. void begin_breakable_scope(Label breakable_target, Vector<DeprecatedFlyString> const& language_label_set);
  128. void end_breakable_scope();
  129. [[nodiscard]] Label nearest_continuable_scope() const;
  130. [[nodiscard]] Label nearest_breakable_scope() const;
  131. void switch_to_basic_block(BasicBlock& block)
  132. {
  133. m_current_basic_block = &block;
  134. }
  135. [[nodiscard]] BasicBlock& current_block() { return *m_current_basic_block; }
  136. BasicBlock& make_block(String name = {})
  137. {
  138. if (name.is_empty())
  139. name = MUST(String::number(m_next_block++));
  140. auto block = BasicBlock::create(m_root_basic_blocks.size(), name);
  141. if (auto const* context = m_current_unwind_context) {
  142. if (context->handler().has_value())
  143. block->set_handler(*m_root_basic_blocks[context->handler().value().basic_block_index()]);
  144. if (m_current_unwind_context->finalizer().has_value())
  145. block->set_finalizer(*m_root_basic_blocks[context->finalizer().value().basic_block_index()]);
  146. }
  147. m_root_basic_blocks.append(move(block));
  148. return *m_root_basic_blocks.last();
  149. }
  150. bool is_current_block_terminated() const
  151. {
  152. return m_current_basic_block->is_terminated();
  153. }
  154. StringTableIndex intern_string(ByteString string)
  155. {
  156. return m_string_table->insert(move(string));
  157. }
  158. RegexTableIndex intern_regex(ParsedRegex regex)
  159. {
  160. return m_regex_table->insert(move(regex));
  161. }
  162. IdentifierTableIndex intern_identifier(DeprecatedFlyString string)
  163. {
  164. return m_identifier_table->insert(move(string));
  165. }
  166. Optional<IdentifierTableIndex> intern_identifier_for_expression(Expression const& expression);
  167. bool is_in_generator_or_async_function() const { return m_enclosing_function_kind == FunctionKind::Async || m_enclosing_function_kind == FunctionKind::Generator || m_enclosing_function_kind == FunctionKind::AsyncGenerator; }
  168. bool is_in_generator_function() const { return m_enclosing_function_kind == FunctionKind::Generator || m_enclosing_function_kind == FunctionKind::AsyncGenerator; }
  169. bool is_in_async_function() const { return m_enclosing_function_kind == FunctionKind::Async || m_enclosing_function_kind == FunctionKind::AsyncGenerator; }
  170. bool is_in_async_generator_function() const { return m_enclosing_function_kind == FunctionKind::AsyncGenerator; }
  171. enum class BindingMode {
  172. Lexical,
  173. Var,
  174. Global,
  175. };
  176. struct LexicalScope {
  177. SurroundingScopeKind kind;
  178. };
  179. // Returns true if a lexical environment was created.
  180. bool emit_block_declaration_instantiation(ScopeNode const&);
  181. void begin_variable_scope();
  182. void end_variable_scope();
  183. enum class BlockBoundaryType {
  184. Break,
  185. Continue,
  186. Unwind,
  187. ReturnToFinally,
  188. LeaveFinally,
  189. LeaveLexicalEnvironment,
  190. };
  191. template<typename OpType>
  192. void perform_needed_unwinds()
  193. requires(OpType::IsTerminator && !IsSame<OpType, Op::Jump>)
  194. {
  195. for (size_t i = m_boundaries.size(); i > 0; --i) {
  196. auto boundary = m_boundaries[i - 1];
  197. using enum BlockBoundaryType;
  198. switch (boundary) {
  199. case Unwind:
  200. if constexpr (IsSame<OpType, Bytecode::Op::Throw>)
  201. return;
  202. emit<Bytecode::Op::LeaveUnwindContext>();
  203. break;
  204. case LeaveLexicalEnvironment:
  205. emit<Bytecode::Op::LeaveLexicalEnvironment>();
  206. break;
  207. case Break:
  208. case Continue:
  209. break;
  210. case ReturnToFinally:
  211. return;
  212. case LeaveFinally:
  213. emit<Bytecode::Op::LeaveFinally>();
  214. break;
  215. };
  216. }
  217. }
  218. bool is_in_finalizer() const { return m_boundaries.contains_slow(BlockBoundaryType::LeaveFinally); }
  219. bool must_enter_finalizer() const { return m_boundaries.contains_slow(BlockBoundaryType::ReturnToFinally); }
  220. void generate_break();
  221. void generate_break(DeprecatedFlyString const& break_label);
  222. void generate_continue();
  223. void generate_continue(DeprecatedFlyString const& continue_label);
  224. template<typename OpType>
  225. void emit_return(ScopedOperand value)
  226. requires(IsOneOf<OpType, Op::Return, Op::Yield>)
  227. {
  228. // FIXME: Tell the call sites about the `saved_return_value` destination
  229. // And take that into account in the movs below.
  230. perform_needed_unwinds<OpType>();
  231. if (must_enter_finalizer()) {
  232. VERIFY(m_current_basic_block->finalizer() != nullptr);
  233. // Compare to:
  234. // * Interpreter::do_return
  235. // * Interpreter::run_bytecode::handle_ContinuePendingUnwind
  236. // * Return::execute_impl
  237. // * Yield::execute_impl
  238. emit<Bytecode::Op::Mov>(Operand(Register::saved_return_value()), value);
  239. emit<Bytecode::Op::Mov>(Operand(Register::exception()), add_constant(Value {}));
  240. // FIXME: Do we really need to clear the return value register here?
  241. emit<Bytecode::Op::Mov>(Operand(Register::return_value()), add_constant(Value {}));
  242. emit<Bytecode::Op::Jump>(Label { *m_current_basic_block->finalizer() });
  243. return;
  244. }
  245. if constexpr (IsSame<OpType, Op::Return>)
  246. emit<Op::Return>(value);
  247. else
  248. emit<Op::Yield>(nullptr, value);
  249. }
  250. void start_boundary(BlockBoundaryType type) { m_boundaries.append(type); }
  251. void end_boundary(BlockBoundaryType type)
  252. {
  253. VERIFY(m_boundaries.last() == type);
  254. m_boundaries.take_last();
  255. }
  256. [[nodiscard]] ScopedOperand get_this(Optional<ScopedOperand> preferred_dst = {});
  257. void emit_get_by_id(ScopedOperand dst, ScopedOperand base, IdentifierTableIndex property_identifier, Optional<IdentifierTableIndex> base_identifier = {});
  258. void emit_get_by_id_with_this(ScopedOperand dst, ScopedOperand base, IdentifierTableIndex, ScopedOperand this_value);
  259. void emit_iterator_value(ScopedOperand dst, ScopedOperand result);
  260. void emit_iterator_complete(ScopedOperand dst, ScopedOperand result);
  261. [[nodiscard]] size_t next_global_variable_cache() { return m_next_global_variable_cache++; }
  262. [[nodiscard]] size_t next_property_lookup_cache() { return m_next_property_lookup_cache++; }
  263. enum class DeduplicateConstant {
  264. Yes,
  265. No,
  266. };
  267. [[nodiscard]] ScopedOperand add_constant(Value value, DeduplicateConstant deduplicate_constant = DeduplicateConstant::Yes)
  268. {
  269. if (deduplicate_constant == DeduplicateConstant::Yes) {
  270. for (size_t i = 0; i < m_constants.size(); ++i) {
  271. if (m_constants[i] == value)
  272. return ScopedOperand(*this, Operand(Operand::Type::Constant, i));
  273. }
  274. }
  275. m_constants.append(value);
  276. return ScopedOperand(*this, Operand(Operand::Type::Constant, m_constants.size() - 1));
  277. }
  278. UnwindContext const* current_unwind_context() const { return m_current_unwind_context; }
  279. [[nodiscard]] bool is_finished() const { return m_finished; }
  280. [[nodiscard]] bool must_propagate_completion() const { return m_must_propagate_completion; }
  281. private:
  282. VM& m_vm;
  283. static CodeGenerationErrorOr<NonnullGCPtr<Executable>> emit_function_body_bytecode(VM&, ASTNode const&, FunctionKind, GCPtr<ECMAScriptFunctionObject const>, MustPropagateCompletion = MustPropagateCompletion::Yes);
  284. enum class JumpType {
  285. Continue,
  286. Break,
  287. };
  288. void generate_scoped_jump(JumpType);
  289. void generate_labelled_jump(JumpType, DeprecatedFlyString const& label);
  290. Generator(VM&, MustPropagateCompletion);
  291. ~Generator() = default;
  292. void grow(size_t);
  293. // Returns true if a fused instruction was emitted.
  294. [[nodiscard]] bool fuse_compare_and_jump(ScopedOperand const& condition, Label true_target, Label false_target);
  295. struct LabelableScope {
  296. Label bytecode_target;
  297. Vector<DeprecatedFlyString> language_label_set;
  298. };
  299. BasicBlock* m_current_basic_block { nullptr };
  300. ASTNode const* m_current_ast_node { nullptr };
  301. UnwindContext const* m_current_unwind_context { nullptr };
  302. Vector<NonnullOwnPtr<BasicBlock>> m_root_basic_blocks;
  303. NonnullOwnPtr<StringTable> m_string_table;
  304. NonnullOwnPtr<IdentifierTable> m_identifier_table;
  305. NonnullOwnPtr<RegexTable> m_regex_table;
  306. MarkedVector<Value> m_constants;
  307. ScopedOperand m_accumulator;
  308. Vector<Register> m_free_registers;
  309. u32 m_next_register { Register::reserved_register_count };
  310. u32 m_next_block { 1 };
  311. u32 m_next_property_lookup_cache { 0 };
  312. u32 m_next_global_variable_cache { 0 };
  313. FunctionKind m_enclosing_function_kind { FunctionKind::Normal };
  314. Vector<LabelableScope> m_continuable_scopes;
  315. Vector<LabelableScope> m_breakable_scopes;
  316. Vector<BlockBoundaryType> m_boundaries;
  317. Vector<ScopedOperand> m_home_objects;
  318. HashTable<u32> m_initialized_locals;
  319. bool m_finished { false };
  320. bool m_must_propagate_completion { true };
  321. };
  322. }