Generator.h 14 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353
  1. /*
  2. * Copyright (c) 2021-2024, Andreas Kling <kling@serenityos.org>
  3. *
  4. * SPDX-License-Identifier: BSD-2-Clause
  5. */
  6. #pragma once
  7. #include <AK/OwnPtr.h>
  8. #include <AK/SinglyLinkedList.h>
  9. #include <LibJS/AST.h>
  10. #include <LibJS/Bytecode/BasicBlock.h>
  11. #include <LibJS/Bytecode/CodeGenerationError.h>
  12. #include <LibJS/Bytecode/Executable.h>
  13. #include <LibJS/Bytecode/IdentifierTable.h>
  14. #include <LibJS/Bytecode/Label.h>
  15. #include <LibJS/Bytecode/Op.h>
  16. #include <LibJS/Bytecode/Register.h>
  17. #include <LibJS/Bytecode/StringTable.h>
  18. #include <LibJS/Forward.h>
  19. #include <LibJS/Runtime/FunctionKind.h>
  20. #include <LibRegex/Regex.h>
  21. namespace JS::Bytecode {
  22. class Generator {
  23. public:
  24. VM& vm() { return m_vm; }
  25. enum class SurroundingScopeKind {
  26. Global,
  27. Function,
  28. Block,
  29. };
  30. static CodeGenerationErrorOr<NonnullGCPtr<Executable>> generate(VM&, ASTNode const&, ReadonlySpan<FunctionParameter> parameters, FunctionKind = FunctionKind::Normal);
  31. [[nodiscard]] ScopedOperand allocate_register();
  32. [[nodiscard]] ScopedOperand local(u32 local_index);
  33. [[nodiscard]] ScopedOperand accumulator();
  34. void free_register(Register);
  35. void set_local_initialized(u32 local_index);
  36. [[nodiscard]] bool is_local_initialized(u32 local_index) const;
  37. class SourceLocationScope {
  38. public:
  39. SourceLocationScope(Generator&, ASTNode const& node);
  40. ~SourceLocationScope();
  41. private:
  42. Generator& m_generator;
  43. ASTNode const* m_previous_node { nullptr };
  44. };
  45. class UnwindContext {
  46. public:
  47. UnwindContext(Generator&, Optional<Label> finalizer);
  48. UnwindContext const* previous() const { return m_previous_context; }
  49. void set_handler(Label handler) { m_handler = handler; }
  50. Optional<Label> handler() const { return m_handler; }
  51. Optional<Label> finalizer() const { return m_finalizer; }
  52. ~UnwindContext();
  53. private:
  54. Generator& m_generator;
  55. Optional<Label> m_finalizer;
  56. Optional<Label> m_handler {};
  57. UnwindContext const* m_previous_context { nullptr };
  58. };
  59. template<typename OpType, typename... Args>
  60. requires(requires { OpType(declval<Args>()...); })
  61. void emit(Args&&... args)
  62. {
  63. VERIFY(!is_current_block_terminated());
  64. size_t slot_offset = m_current_basic_block->size();
  65. m_current_basic_block->set_last_instruction_start_offset(slot_offset);
  66. grow(sizeof(OpType));
  67. void* slot = m_current_basic_block->data() + slot_offset;
  68. new (slot) OpType(forward<Args>(args)...);
  69. if constexpr (OpType::IsTerminator)
  70. m_current_basic_block->terminate({});
  71. m_current_basic_block->add_source_map_entry(slot_offset, { m_current_ast_node->start_offset(), m_current_ast_node->end_offset() });
  72. }
  73. template<typename OpType, typename ExtraSlotType, typename... Args>
  74. requires(requires { OpType(declval<Args>()...); })
  75. void emit_with_extra_slots(size_t extra_slot_count, Args&&... args)
  76. {
  77. VERIFY(!is_current_block_terminated());
  78. size_t size_to_allocate = round_up_to_power_of_two(sizeof(OpType) + extra_slot_count * sizeof(ExtraSlotType), alignof(void*));
  79. size_t slot_offset = m_current_basic_block->size();
  80. m_current_basic_block->set_last_instruction_start_offset(slot_offset);
  81. grow(size_to_allocate);
  82. void* slot = m_current_basic_block->data() + slot_offset;
  83. new (slot) OpType(forward<Args>(args)...);
  84. if constexpr (OpType::IsTerminator)
  85. m_current_basic_block->terminate({});
  86. m_current_basic_block->add_source_map_entry(slot_offset, { m_current_ast_node->start_offset(), m_current_ast_node->end_offset() });
  87. }
  88. template<typename OpType, typename... Args>
  89. requires(requires { OpType(declval<Args>()...); })
  90. void emit_with_extra_operand_slots(size_t extra_operand_slots, Args&&... args)
  91. {
  92. emit_with_extra_slots<OpType, Operand>(extra_operand_slots, forward<Args>(args)...);
  93. }
  94. template<typename OpType, typename... Args>
  95. requires(requires { OpType(declval<Args>()...); })
  96. void emit_with_extra_value_slots(size_t extra_operand_slots, Args&&... args)
  97. {
  98. emit_with_extra_slots<OpType, Value>(extra_operand_slots, forward<Args>(args)...);
  99. }
  100. void emit_jump_if(ScopedOperand const& condition, Label true_target, Label false_target);
  101. struct ReferenceOperands {
  102. Optional<ScopedOperand> base {}; // [[Base]]
  103. Optional<ScopedOperand> referenced_name {}; // [[ReferencedName]] as an operand
  104. Optional<IdentifierTableIndex> referenced_identifier {}; // [[ReferencedName]] as an identifier
  105. Optional<IdentifierTableIndex> referenced_private_identifier {}; // [[ReferencedName]] as a private identifier
  106. Optional<ScopedOperand> this_value {}; // [[ThisValue]]
  107. Optional<ScopedOperand> loaded_value {}; // Loaded value, if we've performed a load.
  108. };
  109. CodeGenerationErrorOr<ReferenceOperands> emit_load_from_reference(JS::ASTNode const&, Optional<ScopedOperand> preferred_dst = {});
  110. CodeGenerationErrorOr<void> emit_store_to_reference(JS::ASTNode const&, ScopedOperand value);
  111. CodeGenerationErrorOr<void> emit_store_to_reference(ReferenceOperands const&, ScopedOperand value);
  112. CodeGenerationErrorOr<Optional<ScopedOperand>> emit_delete_reference(JS::ASTNode const&);
  113. CodeGenerationErrorOr<ReferenceOperands> emit_super_reference(MemberExpression const&);
  114. void emit_set_variable(JS::Identifier const& identifier, ScopedOperand value, Bytecode::Op::SetVariable::InitializationMode initialization_mode = Bytecode::Op::SetVariable::InitializationMode::Set, Bytecode::Op::EnvironmentMode mode = Bytecode::Op::EnvironmentMode::Lexical);
  115. void push_home_object(ScopedOperand);
  116. void pop_home_object();
  117. void emit_new_function(ScopedOperand dst, JS::FunctionExpression const&, Optional<IdentifierTableIndex> lhs_name);
  118. CodeGenerationErrorOr<Optional<ScopedOperand>> emit_named_evaluation_if_anonymous_function(Expression const&, Optional<IdentifierTableIndex> lhs_name, Optional<ScopedOperand> preferred_dst = {});
  119. void begin_continuable_scope(Label continue_target, Vector<DeprecatedFlyString> const& language_label_set);
  120. void end_continuable_scope();
  121. void begin_breakable_scope(Label breakable_target, Vector<DeprecatedFlyString> const& language_label_set);
  122. void end_breakable_scope();
  123. [[nodiscard]] Label nearest_continuable_scope() const;
  124. [[nodiscard]] Label nearest_breakable_scope() const;
  125. void switch_to_basic_block(BasicBlock& block)
  126. {
  127. m_current_basic_block = &block;
  128. }
  129. [[nodiscard]] BasicBlock& current_block() { return *m_current_basic_block; }
  130. BasicBlock& make_block(String name = {})
  131. {
  132. if (name.is_empty())
  133. name = MUST(String::number(m_next_block++));
  134. auto block = BasicBlock::create(m_root_basic_blocks.size(), name);
  135. if (auto const* context = m_current_unwind_context) {
  136. if (context->handler().has_value())
  137. block->set_handler(*m_root_basic_blocks[context->handler().value().basic_block_index()]);
  138. if (m_current_unwind_context->finalizer().has_value())
  139. block->set_finalizer(*m_root_basic_blocks[context->finalizer().value().basic_block_index()]);
  140. }
  141. m_root_basic_blocks.append(move(block));
  142. return *m_root_basic_blocks.last();
  143. }
  144. bool is_current_block_terminated() const
  145. {
  146. return m_current_basic_block->is_terminated();
  147. }
  148. StringTableIndex intern_string(ByteString string)
  149. {
  150. return m_string_table->insert(move(string));
  151. }
  152. RegexTableIndex intern_regex(ParsedRegex regex)
  153. {
  154. return m_regex_table->insert(move(regex));
  155. }
  156. IdentifierTableIndex intern_identifier(DeprecatedFlyString string)
  157. {
  158. return m_identifier_table->insert(move(string));
  159. }
  160. Optional<IdentifierTableIndex> intern_identifier_for_expression(Expression const& expression);
  161. bool is_in_generator_or_async_function() const { return m_enclosing_function_kind == FunctionKind::Async || m_enclosing_function_kind == FunctionKind::Generator || m_enclosing_function_kind == FunctionKind::AsyncGenerator; }
  162. bool is_in_generator_function() const { return m_enclosing_function_kind == FunctionKind::Generator || m_enclosing_function_kind == FunctionKind::AsyncGenerator; }
  163. bool is_in_async_function() const { return m_enclosing_function_kind == FunctionKind::Async || m_enclosing_function_kind == FunctionKind::AsyncGenerator; }
  164. bool is_in_async_generator_function() const { return m_enclosing_function_kind == FunctionKind::AsyncGenerator; }
  165. enum class BindingMode {
  166. Lexical,
  167. Var,
  168. Global,
  169. };
  170. struct LexicalScope {
  171. SurroundingScopeKind kind;
  172. };
  173. void block_declaration_instantiation(ScopeNode const&);
  174. void begin_variable_scope();
  175. void end_variable_scope();
  176. enum class BlockBoundaryType {
  177. Break,
  178. Continue,
  179. Unwind,
  180. ReturnToFinally,
  181. LeaveFinally,
  182. LeaveLexicalEnvironment,
  183. };
  184. template<typename OpType>
  185. void perform_needed_unwinds()
  186. requires(OpType::IsTerminator && !IsSame<OpType, Op::Jump>)
  187. {
  188. for (size_t i = m_boundaries.size(); i > 0; --i) {
  189. auto boundary = m_boundaries[i - 1];
  190. using enum BlockBoundaryType;
  191. switch (boundary) {
  192. case Unwind:
  193. if constexpr (IsSame<OpType, Bytecode::Op::Throw>)
  194. return;
  195. emit<Bytecode::Op::LeaveUnwindContext>();
  196. break;
  197. case LeaveLexicalEnvironment:
  198. emit<Bytecode::Op::LeaveLexicalEnvironment>();
  199. break;
  200. case Break:
  201. case Continue:
  202. break;
  203. case ReturnToFinally:
  204. return;
  205. case LeaveFinally:
  206. emit<Bytecode::Op::LeaveFinally>();
  207. break;
  208. };
  209. }
  210. }
  211. bool is_in_finalizer() const { return m_boundaries.contains_slow(BlockBoundaryType::LeaveFinally); }
  212. void generate_break();
  213. void generate_break(DeprecatedFlyString const& break_label);
  214. void generate_continue();
  215. void generate_continue(DeprecatedFlyString const& continue_label);
  216. void start_boundary(BlockBoundaryType type) { m_boundaries.append(type); }
  217. void end_boundary(BlockBoundaryType type)
  218. {
  219. VERIFY(m_boundaries.last() == type);
  220. m_boundaries.take_last();
  221. }
  222. [[nodiscard]] ScopedOperand get_this(Optional<ScopedOperand> preferred_dst = {});
  223. void emit_get_by_id(ScopedOperand dst, ScopedOperand base, IdentifierTableIndex property_identifier, Optional<IdentifierTableIndex> base_identifier = {});
  224. void emit_get_by_id_with_this(ScopedOperand dst, ScopedOperand base, IdentifierTableIndex, ScopedOperand this_value);
  225. void emit_iterator_value(ScopedOperand dst, ScopedOperand result);
  226. void emit_iterator_complete(ScopedOperand dst, ScopedOperand result);
  227. [[nodiscard]] size_t next_global_variable_cache() { return m_next_global_variable_cache++; }
  228. [[nodiscard]] size_t next_environment_variable_cache() { return m_next_environment_variable_cache++; }
  229. [[nodiscard]] size_t next_property_lookup_cache() { return m_next_property_lookup_cache++; }
  230. enum class DeduplicateConstant {
  231. Yes,
  232. No,
  233. };
  234. [[nodiscard]] ScopedOperand add_constant(Value value, DeduplicateConstant deduplicate_constant = DeduplicateConstant::Yes)
  235. {
  236. if (deduplicate_constant == DeduplicateConstant::Yes) {
  237. for (size_t i = 0; i < m_constants.size(); ++i) {
  238. if (m_constants[i] == value)
  239. return ScopedOperand(*this, Operand(Operand::Type::Constant, i));
  240. }
  241. }
  242. m_constants.append(value);
  243. return ScopedOperand(*this, Operand(Operand::Type::Constant, m_constants.size() - 1));
  244. }
  245. UnwindContext const* current_unwind_context() const { return m_current_unwind_context; }
  246. [[nodiscard]] bool is_finished() const { return m_finished; }
  247. private:
  248. VM& m_vm;
  249. enum class JumpType {
  250. Continue,
  251. Break,
  252. };
  253. void generate_scoped_jump(JumpType);
  254. void generate_labelled_jump(JumpType, DeprecatedFlyString const& label);
  255. explicit Generator(VM&);
  256. ~Generator() = default;
  257. void grow(size_t);
  258. // Returns true if a fused instruction was emitted.
  259. [[nodiscard]] bool fuse_compare_and_jump(ScopedOperand const& condition, Label true_target, Label false_target);
  260. struct LabelableScope {
  261. Label bytecode_target;
  262. Vector<DeprecatedFlyString> language_label_set;
  263. };
  264. BasicBlock* m_current_basic_block { nullptr };
  265. ASTNode const* m_current_ast_node { nullptr };
  266. UnwindContext const* m_current_unwind_context { nullptr };
  267. Vector<NonnullOwnPtr<BasicBlock>> m_root_basic_blocks;
  268. NonnullOwnPtr<StringTable> m_string_table;
  269. NonnullOwnPtr<IdentifierTable> m_identifier_table;
  270. NonnullOwnPtr<RegexTable> m_regex_table;
  271. MarkedVector<Value> m_constants;
  272. ScopedOperand m_accumulator;
  273. Vector<Register> m_free_registers;
  274. u32 m_next_register { Register::reserved_register_count };
  275. u32 m_next_block { 1 };
  276. u32 m_next_property_lookup_cache { 0 };
  277. u32 m_next_global_variable_cache { 0 };
  278. u32 m_next_environment_variable_cache { 0 };
  279. FunctionKind m_enclosing_function_kind { FunctionKind::Normal };
  280. Vector<LabelableScope> m_continuable_scopes;
  281. Vector<LabelableScope> m_breakable_scopes;
  282. Vector<BlockBoundaryType> m_boundaries;
  283. Vector<ScopedOperand> m_home_objects;
  284. HashTable<u32> m_initialized_locals;
  285. bool m_finished { false };
  286. };
  287. }