Generator.h 14 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360
  1. /*
  2. * Copyright (c) 2021-2024, Andreas Kling <kling@serenityos.org>
  3. *
  4. * SPDX-License-Identifier: BSD-2-Clause
  5. */
  6. #pragma once
  7. #include <AK/OwnPtr.h>
  8. #include <AK/SinglyLinkedList.h>
  9. #include <LibJS/AST.h>
  10. #include <LibJS/Bytecode/BasicBlock.h>
  11. #include <LibJS/Bytecode/CodeGenerationError.h>
  12. #include <LibJS/Bytecode/Executable.h>
  13. #include <LibJS/Bytecode/IdentifierTable.h>
  14. #include <LibJS/Bytecode/Label.h>
  15. #include <LibJS/Bytecode/Op.h>
  16. #include <LibJS/Bytecode/Register.h>
  17. #include <LibJS/Bytecode/StringTable.h>
  18. #include <LibJS/Forward.h>
  19. #include <LibJS/Runtime/FunctionKind.h>
  20. #include <LibRegex/Regex.h>
  21. namespace JS::Bytecode {
  22. class Generator {
  23. public:
  24. VM& vm() { return m_vm; }
  25. enum class SurroundingScopeKind {
  26. Global,
  27. Function,
  28. Block,
  29. };
  30. static CodeGenerationErrorOr<NonnullGCPtr<Executable>> generate_from_ast_node(VM&, ASTNode const&, FunctionKind = FunctionKind::Normal);
  31. static CodeGenerationErrorOr<NonnullGCPtr<Executable>> generate_from_function(VM&, ECMAScriptFunctionObject const& function);
  32. CodeGenerationErrorOr<void> emit_function_declaration_instantiation(ECMAScriptFunctionObject const& function);
  33. [[nodiscard]] ScopedOperand allocate_register();
  34. [[nodiscard]] ScopedOperand local(u32 local_index);
  35. [[nodiscard]] ScopedOperand accumulator();
  36. void free_register(Register);
  37. void set_local_initialized(u32 local_index);
  38. [[nodiscard]] bool is_local_initialized(u32 local_index) const;
  39. class SourceLocationScope {
  40. public:
  41. SourceLocationScope(Generator&, ASTNode const& node);
  42. ~SourceLocationScope();
  43. private:
  44. Generator& m_generator;
  45. ASTNode const* m_previous_node { nullptr };
  46. };
  47. class UnwindContext {
  48. public:
  49. UnwindContext(Generator&, Optional<Label> finalizer);
  50. UnwindContext const* previous() const { return m_previous_context; }
  51. void set_handler(Label handler) { m_handler = handler; }
  52. Optional<Label> handler() const { return m_handler; }
  53. Optional<Label> finalizer() const { return m_finalizer; }
  54. ~UnwindContext();
  55. private:
  56. Generator& m_generator;
  57. Optional<Label> m_finalizer;
  58. Optional<Label> m_handler {};
  59. UnwindContext const* m_previous_context { nullptr };
  60. };
  61. template<typename OpType, typename... Args>
  62. requires(requires { OpType(declval<Args>()...); })
  63. void emit(Args&&... args)
  64. {
  65. VERIFY(!is_current_block_terminated());
  66. size_t slot_offset = m_current_basic_block->size();
  67. m_current_basic_block->set_last_instruction_start_offset(slot_offset);
  68. grow(sizeof(OpType));
  69. void* slot = m_current_basic_block->data() + slot_offset;
  70. new (slot) OpType(forward<Args>(args)...);
  71. if constexpr (OpType::IsTerminator)
  72. m_current_basic_block->terminate({});
  73. m_current_basic_block->add_source_map_entry(slot_offset, { m_current_ast_node->start_offset(), m_current_ast_node->end_offset() });
  74. }
  75. template<typename OpType, typename ExtraSlotType, typename... Args>
  76. requires(requires { OpType(declval<Args>()...); })
  77. void emit_with_extra_slots(size_t extra_slot_count, Args&&... args)
  78. {
  79. VERIFY(!is_current_block_terminated());
  80. size_t size_to_allocate = round_up_to_power_of_two(sizeof(OpType) + extra_slot_count * sizeof(ExtraSlotType), alignof(void*));
  81. size_t slot_offset = m_current_basic_block->size();
  82. m_current_basic_block->set_last_instruction_start_offset(slot_offset);
  83. grow(size_to_allocate);
  84. void* slot = m_current_basic_block->data() + slot_offset;
  85. new (slot) OpType(forward<Args>(args)...);
  86. if constexpr (OpType::IsTerminator)
  87. m_current_basic_block->terminate({});
  88. m_current_basic_block->add_source_map_entry(slot_offset, { m_current_ast_node->start_offset(), m_current_ast_node->end_offset() });
  89. }
  90. template<typename OpType, typename... Args>
  91. requires(requires { OpType(declval<Args>()...); })
  92. void emit_with_extra_operand_slots(size_t extra_operand_slots, Args&&... args)
  93. {
  94. emit_with_extra_slots<OpType, Operand>(extra_operand_slots, forward<Args>(args)...);
  95. }
  96. template<typename OpType, typename... Args>
  97. requires(requires { OpType(declval<Args>()...); })
  98. void emit_with_extra_value_slots(size_t extra_operand_slots, Args&&... args)
  99. {
  100. emit_with_extra_slots<OpType, Value>(extra_operand_slots, forward<Args>(args)...);
  101. }
  102. void emit_jump_if(ScopedOperand const& condition, Label true_target, Label false_target);
  103. struct ReferenceOperands {
  104. Optional<ScopedOperand> base {}; // [[Base]]
  105. Optional<ScopedOperand> referenced_name {}; // [[ReferencedName]] as an operand
  106. Optional<IdentifierTableIndex> referenced_identifier {}; // [[ReferencedName]] as an identifier
  107. Optional<IdentifierTableIndex> referenced_private_identifier {}; // [[ReferencedName]] as a private identifier
  108. Optional<ScopedOperand> this_value {}; // [[ThisValue]]
  109. Optional<ScopedOperand> loaded_value {}; // Loaded value, if we've performed a load.
  110. };
  111. CodeGenerationErrorOr<ReferenceOperands> emit_load_from_reference(JS::ASTNode const&, Optional<ScopedOperand> preferred_dst = {});
  112. CodeGenerationErrorOr<void> emit_store_to_reference(JS::ASTNode const&, ScopedOperand value);
  113. CodeGenerationErrorOr<void> emit_store_to_reference(ReferenceOperands const&, ScopedOperand value);
  114. CodeGenerationErrorOr<Optional<ScopedOperand>> emit_delete_reference(JS::ASTNode const&);
  115. CodeGenerationErrorOr<ReferenceOperands> emit_super_reference(MemberExpression const&);
  116. void emit_set_variable(JS::Identifier const& identifier, ScopedOperand value, Bytecode::Op::SetVariable::InitializationMode initialization_mode = Bytecode::Op::SetVariable::InitializationMode::Set, Bytecode::Op::EnvironmentMode mode = Bytecode::Op::EnvironmentMode::Lexical);
  117. void push_home_object(ScopedOperand);
  118. void pop_home_object();
  119. void emit_new_function(ScopedOperand dst, JS::FunctionExpression const&, Optional<IdentifierTableIndex> lhs_name);
  120. CodeGenerationErrorOr<Optional<ScopedOperand>> emit_named_evaluation_if_anonymous_function(Expression const&, Optional<IdentifierTableIndex> lhs_name, Optional<ScopedOperand> preferred_dst = {});
  121. void begin_continuable_scope(Label continue_target, Vector<DeprecatedFlyString> const& language_label_set);
  122. void end_continuable_scope();
  123. void begin_breakable_scope(Label breakable_target, Vector<DeprecatedFlyString> const& language_label_set);
  124. void end_breakable_scope();
  125. [[nodiscard]] Label nearest_continuable_scope() const;
  126. [[nodiscard]] Label nearest_breakable_scope() const;
  127. void switch_to_basic_block(BasicBlock& block)
  128. {
  129. m_current_basic_block = &block;
  130. }
  131. [[nodiscard]] BasicBlock& current_block() { return *m_current_basic_block; }
  132. BasicBlock& make_block(String name = {})
  133. {
  134. if (name.is_empty())
  135. name = MUST(String::number(m_next_block++));
  136. auto block = BasicBlock::create(m_root_basic_blocks.size(), name);
  137. if (auto const* context = m_current_unwind_context) {
  138. if (context->handler().has_value())
  139. block->set_handler(*m_root_basic_blocks[context->handler().value().basic_block_index()]);
  140. if (m_current_unwind_context->finalizer().has_value())
  141. block->set_finalizer(*m_root_basic_blocks[context->finalizer().value().basic_block_index()]);
  142. }
  143. m_root_basic_blocks.append(move(block));
  144. return *m_root_basic_blocks.last();
  145. }
  146. bool is_current_block_terminated() const
  147. {
  148. return m_current_basic_block->is_terminated();
  149. }
  150. StringTableIndex intern_string(ByteString string)
  151. {
  152. return m_string_table->insert(move(string));
  153. }
  154. RegexTableIndex intern_regex(ParsedRegex regex)
  155. {
  156. return m_regex_table->insert(move(regex));
  157. }
  158. IdentifierTableIndex intern_identifier(DeprecatedFlyString string)
  159. {
  160. return m_identifier_table->insert(move(string));
  161. }
  162. Optional<IdentifierTableIndex> intern_identifier_for_expression(Expression const& expression);
  163. bool is_in_generator_or_async_function() const { return m_enclosing_function_kind == FunctionKind::Async || m_enclosing_function_kind == FunctionKind::Generator || m_enclosing_function_kind == FunctionKind::AsyncGenerator; }
  164. bool is_in_generator_function() const { return m_enclosing_function_kind == FunctionKind::Generator || m_enclosing_function_kind == FunctionKind::AsyncGenerator; }
  165. bool is_in_async_function() const { return m_enclosing_function_kind == FunctionKind::Async || m_enclosing_function_kind == FunctionKind::AsyncGenerator; }
  166. bool is_in_async_generator_function() const { return m_enclosing_function_kind == FunctionKind::AsyncGenerator; }
  167. enum class BindingMode {
  168. Lexical,
  169. Var,
  170. Global,
  171. };
  172. struct LexicalScope {
  173. SurroundingScopeKind kind;
  174. };
  175. // Returns true if a lexical environment was created.
  176. bool emit_block_declaration_instantiation(ScopeNode const&);
  177. void begin_variable_scope();
  178. void end_variable_scope();
  179. enum class BlockBoundaryType {
  180. Break,
  181. Continue,
  182. Unwind,
  183. ReturnToFinally,
  184. LeaveFinally,
  185. LeaveLexicalEnvironment,
  186. };
  187. template<typename OpType>
  188. void perform_needed_unwinds()
  189. requires(OpType::IsTerminator && !IsSame<OpType, Op::Jump>)
  190. {
  191. for (size_t i = m_boundaries.size(); i > 0; --i) {
  192. auto boundary = m_boundaries[i - 1];
  193. using enum BlockBoundaryType;
  194. switch (boundary) {
  195. case Unwind:
  196. if constexpr (IsSame<OpType, Bytecode::Op::Throw>)
  197. return;
  198. emit<Bytecode::Op::LeaveUnwindContext>();
  199. break;
  200. case LeaveLexicalEnvironment:
  201. emit<Bytecode::Op::LeaveLexicalEnvironment>();
  202. break;
  203. case Break:
  204. case Continue:
  205. break;
  206. case ReturnToFinally:
  207. return;
  208. case LeaveFinally:
  209. emit<Bytecode::Op::LeaveFinally>();
  210. break;
  211. };
  212. }
  213. }
  214. bool is_in_finalizer() const { return m_boundaries.contains_slow(BlockBoundaryType::LeaveFinally); }
  215. void generate_break();
  216. void generate_break(DeprecatedFlyString const& break_label);
  217. void generate_continue();
  218. void generate_continue(DeprecatedFlyString const& continue_label);
  219. void start_boundary(BlockBoundaryType type) { m_boundaries.append(type); }
  220. void end_boundary(BlockBoundaryType type)
  221. {
  222. VERIFY(m_boundaries.last() == type);
  223. m_boundaries.take_last();
  224. }
  225. [[nodiscard]] ScopedOperand get_this(Optional<ScopedOperand> preferred_dst = {});
  226. void emit_get_by_id(ScopedOperand dst, ScopedOperand base, IdentifierTableIndex property_identifier, Optional<IdentifierTableIndex> base_identifier = {});
  227. void emit_get_by_id_with_this(ScopedOperand dst, ScopedOperand base, IdentifierTableIndex, ScopedOperand this_value);
  228. void emit_iterator_value(ScopedOperand dst, ScopedOperand result);
  229. void emit_iterator_complete(ScopedOperand dst, ScopedOperand result);
  230. [[nodiscard]] size_t next_global_variable_cache() { return m_next_global_variable_cache++; }
  231. [[nodiscard]] size_t next_environment_variable_cache() { return m_next_environment_variable_cache++; }
  232. [[nodiscard]] size_t next_property_lookup_cache() { return m_next_property_lookup_cache++; }
  233. enum class DeduplicateConstant {
  234. Yes,
  235. No,
  236. };
  237. [[nodiscard]] ScopedOperand add_constant(Value value, DeduplicateConstant deduplicate_constant = DeduplicateConstant::Yes)
  238. {
  239. if (deduplicate_constant == DeduplicateConstant::Yes) {
  240. for (size_t i = 0; i < m_constants.size(); ++i) {
  241. if (m_constants[i] == value)
  242. return ScopedOperand(*this, Operand(Operand::Type::Constant, i));
  243. }
  244. }
  245. m_constants.append(value);
  246. return ScopedOperand(*this, Operand(Operand::Type::Constant, m_constants.size() - 1));
  247. }
  248. UnwindContext const* current_unwind_context() const { return m_current_unwind_context; }
  249. [[nodiscard]] bool is_finished() const { return m_finished; }
  250. private:
  251. VM& m_vm;
  252. static CodeGenerationErrorOr<NonnullGCPtr<Executable>> emit_function_body_bytecode(VM&, ASTNode const&, FunctionKind, GCPtr<ECMAScriptFunctionObject const>);
  253. enum class JumpType {
  254. Continue,
  255. Break,
  256. };
  257. void generate_scoped_jump(JumpType);
  258. void generate_labelled_jump(JumpType, DeprecatedFlyString const& label);
  259. explicit Generator(VM&);
  260. ~Generator() = default;
  261. void grow(size_t);
  262. // Returns true if a fused instruction was emitted.
  263. [[nodiscard]] bool fuse_compare_and_jump(ScopedOperand const& condition, Label true_target, Label false_target);
  264. struct LabelableScope {
  265. Label bytecode_target;
  266. Vector<DeprecatedFlyString> language_label_set;
  267. };
  268. BasicBlock* m_current_basic_block { nullptr };
  269. ASTNode const* m_current_ast_node { nullptr };
  270. UnwindContext const* m_current_unwind_context { nullptr };
  271. Vector<NonnullOwnPtr<BasicBlock>> m_root_basic_blocks;
  272. NonnullOwnPtr<StringTable> m_string_table;
  273. NonnullOwnPtr<IdentifierTable> m_identifier_table;
  274. NonnullOwnPtr<RegexTable> m_regex_table;
  275. MarkedVector<Value> m_constants;
  276. ScopedOperand m_accumulator;
  277. Vector<Register> m_free_registers;
  278. u32 m_next_register { Register::reserved_register_count };
  279. u32 m_next_block { 1 };
  280. u32 m_next_property_lookup_cache { 0 };
  281. u32 m_next_global_variable_cache { 0 };
  282. u32 m_next_environment_variable_cache { 0 };
  283. FunctionKind m_enclosing_function_kind { FunctionKind::Normal };
  284. Vector<LabelableScope> m_continuable_scopes;
  285. Vector<LabelableScope> m_breakable_scopes;
  286. Vector<BlockBoundaryType> m_boundaries;
  287. Vector<ScopedOperand> m_home_objects;
  288. HashTable<u32> m_initialized_locals;
  289. bool m_finished { false };
  290. };
  291. }