GenerateCFG.cpp 9.3 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220
  1. /*
  2. * Copyright (c) 2021, Ali Mohammad Pur <mpfard@serenityos.org>
  3. *
  4. * SPDX-License-Identifier: BSD-2-Clause
  5. */
  6. #include <AK/TemporaryChange.h>
  7. #include <LibJS/Bytecode/PassManager.h>
  8. namespace JS::Bytecode::Passes {
  9. struct UnwindFrame {
  10. BasicBlock const* handler;
  11. BasicBlock const* finalizer;
  12. Vector<BasicBlock const*> finalizer_targets;
  13. };
  14. static HashTable<BasicBlock const*> seen_blocks;
  15. static Vector<UnwindFrame> unwind_frames;
  16. static BasicBlock const* next_handler_or_finalizer()
  17. {
  18. return unwind_frames.last().handler ?: unwind_frames.last().finalizer;
  19. }
  20. static void generate_cfg_for_block(BasicBlock const& current_block, PassPipelineExecutable& executable)
  21. {
  22. seen_blocks.set(&current_block);
  23. auto enter_label = [&](Label const& label, BasicBlock const& entering_block) {
  24. executable.cfg->ensure(&entering_block).set(&label.block());
  25. executable.inverted_cfg->ensure(&label.block()).set(&entering_block);
  26. // The finalizers and handlers of an unwind context are handled separately
  27. if (!seen_blocks.contains(&label.block())
  28. && &label.block() != unwind_frames.last().handler
  29. && &label.block() != unwind_frames.last().finalizer)
  30. generate_cfg_for_block(label.block(), executable);
  31. };
  32. if (auto const* block = next_handler_or_finalizer())
  33. enter_label(Label { *block }, current_block);
  34. for (InstructionStreamIterator it { current_block.instruction_stream() }; !it.at_end(); ++it) {
  35. auto const& instruction = *it;
  36. if (instruction.type() == Instruction::Type::LeaveUnwindContext) {
  37. if (unwind_frames.last().finalizer && unwind_frames.last().finalizer != &current_block)
  38. dbgln("FIXME: Popping finalizer from the unwind context from outside the finalizer");
  39. unwind_frames.take_last();
  40. if (auto const* block = next_handler_or_finalizer())
  41. enter_label(Label { *block }, current_block);
  42. }
  43. if (!instruction.is_terminator())
  44. continue;
  45. using enum Instruction::Type;
  46. switch (instruction.type()) {
  47. case Jump: {
  48. auto true_target = *static_cast<Op::Jump const&>(instruction).true_target();
  49. enter_label(true_target, current_block);
  50. return;
  51. }
  52. case JumpIf:
  53. case JumpNullish:
  54. case JumpUndefined: {
  55. // FIXME: It would be nice if we could avoid this copy, if we know that the unwind context stays the same in both paths
  56. // Or with a COW capable Vector alternative
  57. // Note: We might partially unwind here, so we need to make a copy of
  58. // the current context to assure that the falsy code path has the same one
  59. {
  60. TemporaryChange saved_context { unwind_frames, unwind_frames };
  61. auto true_target = *static_cast<Op::Jump const&>(instruction).true_target();
  62. enter_label(true_target, current_block);
  63. }
  64. auto false_target = *static_cast<Op::Jump const&>(instruction).false_target();
  65. enter_label(false_target, current_block);
  66. return;
  67. }
  68. case Yield: {
  69. auto continuation = static_cast<Op::Yield const&>(instruction).continuation();
  70. if (continuation.has_value()) {
  71. executable.exported_blocks->set(&continuation->block());
  72. enter_label(*continuation, current_block);
  73. } else if (auto const* finalizer = unwind_frames.last().finalizer) {
  74. enter_label(Label { *finalizer }, current_block);
  75. unwind_frames.last().finalizer_targets.append(nullptr);
  76. }
  77. return;
  78. }
  79. case Await: {
  80. auto const& continuation = static_cast<Op::Await const&>(instruction).continuation();
  81. executable.exported_blocks->set(&continuation.block());
  82. enter_label(continuation, current_block);
  83. return;
  84. }
  85. case EnterUnwindContext: {
  86. auto entry_point = static_cast<Op::EnterUnwindContext const&>(instruction).entry_point();
  87. auto handler_target = static_cast<Op::EnterUnwindContext const&>(instruction).handler();
  88. auto finalizer_target = static_cast<Op::EnterUnwindContext const&>(instruction).finalizer();
  89. // We keep the frame alive here on the stack, to save some allocation size
  90. UnwindFrame frame {
  91. .handler = handler_target.has_value() ? &handler_target->block() : nullptr,
  92. .finalizer = finalizer_target.has_value() ? &finalizer_target->block() : nullptr,
  93. .finalizer_targets = {}
  94. };
  95. unwind_frames.append(frame);
  96. {
  97. // This will enter the handler and finalizer when needed.
  98. TemporaryChange saved_context { unwind_frames, unwind_frames };
  99. enter_label(entry_point, current_block);
  100. }
  101. frame.handler = nullptr;
  102. if (handler_target.has_value()) {
  103. // We manually generate the CFG, because we previously skiped it
  104. TemporaryChange saved_context { unwind_frames, unwind_frames };
  105. generate_cfg_for_block(handler_target->block(), executable);
  106. }
  107. if (finalizer_target.has_value()) {
  108. // We manually generate the CFG, because we previously halted before entering it
  109. generate_cfg_for_block(finalizer_target->block(), executable);
  110. // We previously halted execution when we would enter the finalizer,
  111. // So we now have to visit all possible targets
  112. // This mainly affects the ScheduleJump instruction
  113. for (auto const* block : frame.finalizer_targets) {
  114. if (block == nullptr) {
  115. // This signals a `return`, which we do not handle specially, so we skip
  116. continue;
  117. }
  118. if (!seen_blocks.contains(block))
  119. generate_cfg_for_block(*block, executable);
  120. }
  121. } else {
  122. unwind_frames.take_last();
  123. VERIFY(frame.finalizer_targets.is_empty());
  124. }
  125. return;
  126. }
  127. case ContinuePendingUnwind: {
  128. auto resume_target = static_cast<Op::ContinuePendingUnwind const&>(instruction).resume_target();
  129. enter_label(resume_target, current_block);
  130. // Note: We already mark these possible control flow changes further up, but when we get
  131. // get better error awareness, being explicit here will be required
  132. if (auto const* handler = unwind_frames.last().handler)
  133. enter_label(Label { *handler }, current_block);
  134. else if (auto const* finalizer = unwind_frames.last().finalizer)
  135. enter_label(Label { *finalizer }, current_block);
  136. return;
  137. }
  138. case Throw:
  139. // Note: We technically register that we enter the handler in the prelude,
  140. // but lets be correct and mark it again,
  141. // this will be useful once we have more info on which instruction can
  142. // actually fail
  143. if (auto const* handler = unwind_frames.last().handler) {
  144. enter_label(Label { *handler }, current_block);
  145. } else if (auto const* finalizer = unwind_frames.last().finalizer) {
  146. enter_label(Label { *finalizer }, current_block);
  147. // Note: This error might bubble through the finalizer to the next handler/finalizer,
  148. // This is currently marked in the general path
  149. }
  150. return;
  151. case Return:
  152. if (auto const* finalizer = unwind_frames.last().finalizer) {
  153. enter_label(Label { *finalizer }, current_block);
  154. unwind_frames.last().finalizer_targets.append(nullptr);
  155. }
  156. return;
  157. case ScheduleJump: {
  158. enter_label(Label { *unwind_frames.last().finalizer }, current_block);
  159. unwind_frames.last().finalizer_targets.append(
  160. &static_cast<Op::ScheduleJump const&>(instruction).target().block());
  161. return;
  162. }
  163. case End:
  164. return;
  165. default:
  166. dbgln("Unhandled terminator instruction: `{}`", instruction.to_byte_string(executable.executable));
  167. VERIFY_NOT_REACHED();
  168. };
  169. }
  170. // We have left the block, but not through a designated terminator,
  171. // so before we return, we need to check if we still need to go through a finalizer
  172. if (auto const* finalizer = unwind_frames.last().finalizer)
  173. enter_label(Label { *finalizer }, current_block);
  174. }
  175. void GenerateCFG::perform(PassPipelineExecutable& executable)
  176. {
  177. started();
  178. executable.cfg = HashMap<BasicBlock const*, HashTable<BasicBlock const*>> {};
  179. executable.inverted_cfg = HashMap<BasicBlock const*, HashTable<BasicBlock const*>> {};
  180. executable.exported_blocks = HashTable<BasicBlock const*> {};
  181. seen_blocks.clear();
  182. unwind_frames.clear();
  183. UnwindFrame top_level_frame = {};
  184. unwind_frames.append(top_level_frame);
  185. generate_cfg_for_block(*executable.executable.basic_blocks.first(), executable);
  186. finished();
  187. }
  188. }