GenerateCFG.cpp 9.1 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214
  1. /*
  2. * Copyright (c) 2021, Ali Mohammad Pur <mpfard@serenityos.org>
  3. *
  4. * SPDX-License-Identifier: BSD-2-Clause
  5. */
  6. #include <AK/TemporaryChange.h>
  7. #include <LibJS/Bytecode/PassManager.h>
  8. namespace JS::Bytecode::Passes {
  9. struct UnwindFrame {
  10. BasicBlock const* handler;
  11. BasicBlock const* finalizer;
  12. Vector<BasicBlock const*> finalizer_targets;
  13. };
  14. static HashTable<BasicBlock const*> seen_blocks;
  15. static Vector<UnwindFrame*> unwind_frames;
  16. static BasicBlock const* next_handler_or_finalizer()
  17. {
  18. return unwind_frames.last()->handler ?: unwind_frames.last()->finalizer;
  19. }
  20. static void generate_cfg_for_block(BasicBlock const& current_block, PassPipelineExecutable executable)
  21. {
  22. seen_blocks.set(&current_block);
  23. auto enter_label = [&](Label const& label, BasicBlock const& entering_block) {
  24. executable.cfg->ensure(&entering_block).set(&label.block());
  25. executable.inverted_cfg->ensure(&label.block()).set(&entering_block);
  26. // The finalizers and handlers of an unwind context are handled separately
  27. if (!seen_blocks.contains(&label.block())
  28. && &label.block() != unwind_frames.last()->handler
  29. && &label.block() != unwind_frames.last()->finalizer)
  30. generate_cfg_for_block(label.block(), executable);
  31. };
  32. if (auto const* block = next_handler_or_finalizer())
  33. enter_label(Label { *block }, current_block);
  34. for (InstructionStreamIterator it { current_block.instruction_stream() }; !it.at_end(); ++it) {
  35. auto const& instruction = *it;
  36. if (instruction.type() == Instruction::Type::LeaveUnwindContext) {
  37. if (unwind_frames.last()->finalizer && unwind_frames.last()->finalizer != &current_block)
  38. dbgln("FIXME: Popping finalizer from the unwind context from outside the finalizer");
  39. unwind_frames.take_last();
  40. if (auto const* block = next_handler_or_finalizer())
  41. enter_label(Label { *block }, current_block);
  42. }
  43. if (!instruction.is_terminator())
  44. continue;
  45. using enum Instruction::Type;
  46. switch (instruction.type()) {
  47. case Jump: {
  48. auto true_target = *static_cast<Op::Jump const&>(instruction).true_target();
  49. enter_label(true_target, current_block);
  50. return;
  51. }
  52. case JumpConditional:
  53. case JumpNullish:
  54. case JumpUndefined: {
  55. // FIXME: It would be nice if we could avoid this copy, if we know that the unwind context stays the same in both paths
  56. // Or with a COW capable Vector alternative
  57. // Note: We might partially unwind here, so we need to make a copy of
  58. // the current context to assure that the falsy code path has the same one
  59. {
  60. TemporaryChange saved_context { unwind_frames, unwind_frames };
  61. auto true_target = *static_cast<Op::Jump const&>(instruction).true_target();
  62. enter_label(true_target, current_block);
  63. }
  64. auto false_target = *static_cast<Op::Jump const&>(instruction).false_target();
  65. enter_label(false_target, current_block);
  66. return;
  67. }
  68. case Yield: {
  69. auto continuation = static_cast<Op::Yield const&>(instruction).continuation();
  70. if (continuation.has_value()) {
  71. executable.exported_blocks->set(&continuation->block());
  72. enter_label(*continuation, current_block);
  73. } else if (auto const* finalizer = unwind_frames.last()->finalizer) {
  74. enter_label(Label { *finalizer }, current_block);
  75. unwind_frames.last()->finalizer_targets.append(nullptr);
  76. }
  77. return;
  78. }
  79. case EnterUnwindContext: {
  80. auto entry_point = static_cast<Op::EnterUnwindContext const&>(instruction).entry_point();
  81. auto handler_target = static_cast<Op::EnterUnwindContext const&>(instruction).handler_target();
  82. auto finalizer_target = static_cast<Op::EnterUnwindContext const&>(instruction).finalizer_target();
  83. // We keep the frame alive here on the stack, to save some allocation size
  84. UnwindFrame frame {
  85. .handler = handler_target.has_value() ? &handler_target->block() : nullptr,
  86. .finalizer = finalizer_target.has_value() ? &finalizer_target->block() : nullptr,
  87. .finalizer_targets = {}
  88. };
  89. unwind_frames.append(&frame);
  90. {
  91. // This will enter the handler and finalizer when needed.
  92. TemporaryChange saved_context { unwind_frames, unwind_frames };
  93. enter_label(entry_point, current_block);
  94. }
  95. frame.handler = nullptr;
  96. if (handler_target.has_value()) {
  97. // We manually generate the CFG, because we previously skiped it
  98. TemporaryChange saved_context { unwind_frames, unwind_frames };
  99. generate_cfg_for_block(handler_target->block(), executable);
  100. }
  101. if (finalizer_target.has_value()) {
  102. // We manually generate the CFG, because we previously halted before entering it
  103. generate_cfg_for_block(finalizer_target->block(), executable);
  104. VERIFY(unwind_frames.last() != &frame);
  105. // We previously halted execution when we would enter the finalizer,
  106. // So we now have to visit all possible targets
  107. // This mainly affects the ScheduleJump instruction
  108. for (auto const* block : frame.finalizer_targets) {
  109. if (block == nullptr) {
  110. // This signals a `return`, which we do not handle specially, so we skip
  111. continue;
  112. }
  113. if (!seen_blocks.contains(block))
  114. generate_cfg_for_block(*block, executable);
  115. }
  116. } else {
  117. VERIFY(unwind_frames.last() = &frame);
  118. unwind_frames.take_last();
  119. VERIFY(frame.finalizer_targets.is_empty());
  120. }
  121. return;
  122. }
  123. case ContinuePendingUnwind: {
  124. auto resume_target = static_cast<Op::ContinuePendingUnwind const&>(instruction).resume_target();
  125. enter_label(resume_target, current_block);
  126. // Note: We already mark these possible control flow changes further up, but when we get
  127. // get better error awareness, being explicit here will be required
  128. if (auto const* handler = unwind_frames.last()->handler)
  129. enter_label(Label { *handler }, current_block);
  130. else if (auto const* finalizer = unwind_frames.last()->finalizer)
  131. enter_label(Label { *finalizer }, current_block);
  132. return;
  133. }
  134. case Throw:
  135. // Note: We technically register that we enter the handler in the prelude,
  136. // but lets be correct and mark it again,
  137. // this will be useful once we have more info on which instruction can
  138. // actually fail
  139. if (auto const* handler = unwind_frames.last()->handler) {
  140. enter_label(Label { *handler }, current_block);
  141. } else if (auto const* finalizer = unwind_frames.last()->finalizer) {
  142. enter_label(Label { *finalizer }, current_block);
  143. // Note: This error might bubble through the finalizer to the next handler/finalizer,
  144. // This is currently marked in the general path
  145. }
  146. return;
  147. case Return:
  148. if (auto const* finalizer = unwind_frames.last()->finalizer) {
  149. enter_label(Label { *finalizer }, current_block);
  150. unwind_frames.last()->finalizer_targets.append(nullptr);
  151. }
  152. return;
  153. case ScheduleJump: {
  154. enter_label(Label { *unwind_frames.last()->finalizer }, current_block);
  155. unwind_frames.last()->finalizer_targets.append(
  156. &static_cast<Op::ScheduleJump const&>(instruction).target().block());
  157. return;
  158. }
  159. default:
  160. dbgln("Unhandled terminator instruction: `{}`", instruction.to_deprecated_string(executable.executable));
  161. VERIFY_NOT_REACHED();
  162. };
  163. }
  164. // We have left the block, but not through a designated terminator,
  165. // so before we return, we need to check if we still need to go through a finalizer
  166. if (auto const* finalizer = unwind_frames.last()->finalizer)
  167. enter_label(Label { *finalizer }, current_block);
  168. }
  169. void GenerateCFG::perform(PassPipelineExecutable& executable)
  170. {
  171. started();
  172. executable.cfg = HashMap<BasicBlock const*, HashTable<BasicBlock const*>> {};
  173. executable.inverted_cfg = HashMap<BasicBlock const*, HashTable<BasicBlock const*>> {};
  174. executable.exported_blocks = HashTable<BasicBlock const*> {};
  175. seen_blocks.clear();
  176. unwind_frames.clear();
  177. UnwindFrame top_level_frame = {};
  178. unwind_frames.append(&top_level_frame);
  179. generate_cfg_for_block(*executable.executable.basic_blocks.first(), executable);
  180. finished();
  181. }
  182. }