GenerateCFG.cpp 9.4 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220
  1. /*
  2. * Copyright (c) 2021, Ali Mohammad Pur <mpfard@serenityos.org>
  3. *
  4. * SPDX-License-Identifier: BSD-2-Clause
  5. */
  6. #include <AK/TemporaryChange.h>
  7. #include <LibJS/Bytecode/PassManager.h>
  8. namespace JS::Bytecode::Passes {
  9. struct UnwindFrame {
  10. BasicBlock const* handler;
  11. BasicBlock const* finalizer;
  12. Vector<BasicBlock const*> finalizer_targets;
  13. };
  14. static HashTable<BasicBlock const*> seen_blocks;
  15. static Vector<UnwindFrame*> unwind_frames;
  16. static BasicBlock const* next_handler_or_finalizer()
  17. {
  18. return unwind_frames.last()->handler ?: unwind_frames.last()->finalizer;
  19. }
  20. static void generate_cfg_for_block(BasicBlock const& current_block, PassPipelineExecutable executable)
  21. {
  22. seen_blocks.set(&current_block);
  23. auto enter_label = [&](Label const& label, BasicBlock const& entering_block) {
  24. executable.cfg->ensure(&entering_block).set(&label.block());
  25. executable.inverted_cfg->ensure(&label.block()).set(&entering_block);
  26. // The finalizers and handlers of an unwind context are handled separately
  27. if (!seen_blocks.contains(&label.block())
  28. && &label.block() != unwind_frames.last()->handler
  29. && &label.block() != unwind_frames.last()->finalizer)
  30. generate_cfg_for_block(label.block(), executable);
  31. };
  32. if (auto const* block = next_handler_or_finalizer())
  33. enter_label(Label { *block }, current_block);
  34. for (InstructionStreamIterator it { current_block.instruction_stream() }; !it.at_end(); ++it) {
  35. auto const& instruction = *it;
  36. if (instruction.type() == Instruction::Type::LeaveUnwindContext) {
  37. if (unwind_frames.last()->finalizer && unwind_frames.last()->finalizer != &current_block)
  38. dbgln("FIXME: Popping finalizer from the unwind context from outside the finalizer");
  39. unwind_frames.take_last();
  40. if (auto const* block = next_handler_or_finalizer())
  41. enter_label(Label { *block }, current_block);
  42. }
  43. if (!instruction.is_terminator())
  44. continue;
  45. using enum Instruction::Type;
  46. switch (instruction.type()) {
  47. case Jump: {
  48. auto true_target = *static_cast<Op::Jump const&>(instruction).true_target();
  49. enter_label(true_target, current_block);
  50. return;
  51. }
  52. case JumpConditional:
  53. case JumpNullish:
  54. case JumpUndefined: {
  55. // FIXME: It would be nice if we could avoid this copy, if we know that the unwind context stays the same in both paths
  56. // Or with a COW capable Vector alternative
  57. // Note: We might partially unwind here, so we need to make a copy of
  58. // the current context to assure that the falsy code path has the same one
  59. {
  60. TemporaryChange saved_context { unwind_frames, unwind_frames };
  61. auto true_target = *static_cast<Op::Jump const&>(instruction).true_target();
  62. enter_label(true_target, current_block);
  63. }
  64. auto false_target = *static_cast<Op::Jump const&>(instruction).false_target();
  65. enter_label(false_target, current_block);
  66. return;
  67. }
  68. case Yield: {
  69. auto continuation = static_cast<Op::Yield const&>(instruction).continuation();
  70. if (continuation.has_value()) {
  71. executable.exported_blocks->set(&continuation->block());
  72. enter_label(*continuation, current_block);
  73. } else if (auto const* finalizer = unwind_frames.last()->finalizer) {
  74. enter_label(Label { *finalizer }, current_block);
  75. unwind_frames.last()->finalizer_targets.append(nullptr);
  76. }
  77. return;
  78. }
  79. case Await: {
  80. auto const& continuation = static_cast<Op::Await const&>(instruction).continuation();
  81. executable.exported_blocks->set(&continuation.block());
  82. enter_label(continuation, current_block);
  83. return;
  84. }
  85. case EnterUnwindContext: {
  86. auto entry_point = static_cast<Op::EnterUnwindContext const&>(instruction).entry_point();
  87. auto handler_target = static_cast<Op::EnterUnwindContext const&>(instruction).handler_target();
  88. auto finalizer_target = static_cast<Op::EnterUnwindContext const&>(instruction).finalizer_target();
  89. // We keep the frame alive here on the stack, to save some allocation size
  90. UnwindFrame frame {
  91. .handler = handler_target.has_value() ? &handler_target->block() : nullptr,
  92. .finalizer = finalizer_target.has_value() ? &finalizer_target->block() : nullptr,
  93. .finalizer_targets = {}
  94. };
  95. unwind_frames.append(&frame);
  96. {
  97. // This will enter the handler and finalizer when needed.
  98. TemporaryChange saved_context { unwind_frames, unwind_frames };
  99. enter_label(entry_point, current_block);
  100. }
  101. frame.handler = nullptr;
  102. if (handler_target.has_value()) {
  103. // We manually generate the CFG, because we previously skiped it
  104. TemporaryChange saved_context { unwind_frames, unwind_frames };
  105. generate_cfg_for_block(handler_target->block(), executable);
  106. }
  107. if (finalizer_target.has_value()) {
  108. // We manually generate the CFG, because we previously halted before entering it
  109. generate_cfg_for_block(finalizer_target->block(), executable);
  110. VERIFY(unwind_frames.last() != &frame);
  111. // We previously halted execution when we would enter the finalizer,
  112. // So we now have to visit all possible targets
  113. // This mainly affects the ScheduleJump instruction
  114. for (auto const* block : frame.finalizer_targets) {
  115. if (block == nullptr) {
  116. // This signals a `return`, which we do not handle specially, so we skip
  117. continue;
  118. }
  119. if (!seen_blocks.contains(block))
  120. generate_cfg_for_block(*block, executable);
  121. }
  122. } else {
  123. VERIFY(unwind_frames.last() == &frame);
  124. unwind_frames.take_last();
  125. VERIFY(frame.finalizer_targets.is_empty());
  126. }
  127. return;
  128. }
  129. case ContinuePendingUnwind: {
  130. auto resume_target = static_cast<Op::ContinuePendingUnwind const&>(instruction).resume_target();
  131. enter_label(resume_target, current_block);
  132. // Note: We already mark these possible control flow changes further up, but when we get
  133. // get better error awareness, being explicit here will be required
  134. if (auto const* handler = unwind_frames.last()->handler)
  135. enter_label(Label { *handler }, current_block);
  136. else if (auto const* finalizer = unwind_frames.last()->finalizer)
  137. enter_label(Label { *finalizer }, current_block);
  138. return;
  139. }
  140. case Throw:
  141. // Note: We technically register that we enter the handler in the prelude,
  142. // but lets be correct and mark it again,
  143. // this will be useful once we have more info on which instruction can
  144. // actually fail
  145. if (auto const* handler = unwind_frames.last()->handler) {
  146. enter_label(Label { *handler }, current_block);
  147. } else if (auto const* finalizer = unwind_frames.last()->finalizer) {
  148. enter_label(Label { *finalizer }, current_block);
  149. // Note: This error might bubble through the finalizer to the next handler/finalizer,
  150. // This is currently marked in the general path
  151. }
  152. return;
  153. case Return:
  154. if (auto const* finalizer = unwind_frames.last()->finalizer) {
  155. enter_label(Label { *finalizer }, current_block);
  156. unwind_frames.last()->finalizer_targets.append(nullptr);
  157. }
  158. return;
  159. case ScheduleJump: {
  160. enter_label(Label { *unwind_frames.last()->finalizer }, current_block);
  161. unwind_frames.last()->finalizer_targets.append(
  162. &static_cast<Op::ScheduleJump const&>(instruction).target().block());
  163. return;
  164. }
  165. default:
  166. dbgln("Unhandled terminator instruction: `{}`", instruction.to_deprecated_string(executable.executable));
  167. VERIFY_NOT_REACHED();
  168. };
  169. }
  170. // We have left the block, but not through a designated terminator,
  171. // so before we return, we need to check if we still need to go through a finalizer
  172. if (auto const* finalizer = unwind_frames.last()->finalizer)
  173. enter_label(Label { *finalizer }, current_block);
  174. }
  175. void GenerateCFG::perform(PassPipelineExecutable& executable)
  176. {
  177. started();
  178. executable.cfg = HashMap<BasicBlock const*, HashTable<BasicBlock const*>> {};
  179. executable.inverted_cfg = HashMap<BasicBlock const*, HashTable<BasicBlock const*>> {};
  180. executable.exported_blocks = HashTable<BasicBlock const*> {};
  181. seen_blocks.clear();
  182. unwind_frames.clear();
  183. UnwindFrame top_level_frame = {};
  184. unwind_frames.append(&top_level_frame);
  185. generate_cfg_for_block(*executable.executable.basic_blocks.first(), executable);
  186. finished();
  187. }
  188. }