RegexOptimizer.cpp 62 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403
  1. /*
  2. * Copyright (c) 2021, Ali Mohammad Pur <mpfard@serenityos.org>
  3. *
  4. * SPDX-License-Identifier: BSD-2-Clause
  5. */
  6. #include <AK/Debug.h>
  7. #include <AK/Function.h>
  8. #include <AK/Queue.h>
  9. #include <AK/QuickSort.h>
  10. #include <AK/RedBlackTree.h>
  11. #include <AK/Stack.h>
  12. #include <AK/Trie.h>
  13. #include <LibRegex/Regex.h>
  14. #include <LibRegex/RegexBytecodeStreamOptimizer.h>
  15. #include <LibUnicode/CharacterTypes.h>
  16. #if REGEX_DEBUG
  17. # include <AK/ScopeGuard.h>
  18. # include <AK/ScopeLogger.h>
  19. #endif
  20. namespace regex {
  21. using Detail::Block;
  22. template<typename Parser>
  23. void Regex<Parser>::run_optimization_passes()
  24. {
  25. parser_result.bytecode.flatten();
  26. auto blocks = split_basic_blocks(parser_result.bytecode);
  27. if (attempt_rewrite_entire_match_as_substring_search(blocks))
  28. return;
  29. // Rewrite fork loops as atomic groups
  30. // e.g. a*b -> (ATOMIC a*)b
  31. attempt_rewrite_loops_as_atomic_groups(blocks);
  32. // FIXME: "There are a few more conditions this can be true in (e.g. within an arbitrarily nested capture group)"
  33. MatchState state;
  34. auto& opcode = parser_result.bytecode.get_opcode(state);
  35. if (opcode.opcode_id() == OpCodeId::CheckBegin)
  36. parser_result.optimization_data.only_start_of_line = true;
  37. parser_result.bytecode.flatten();
  38. }
  39. template<typename Parser>
  40. typename Regex<Parser>::BasicBlockList Regex<Parser>::split_basic_blocks(ByteCode const& bytecode)
  41. {
  42. BasicBlockList block_boundaries;
  43. size_t end_of_last_block = 0;
  44. auto bytecode_size = bytecode.size();
  45. MatchState state;
  46. state.instruction_position = 0;
  47. auto check_jump = [&]<typename T>(OpCode const& opcode) {
  48. auto& op = static_cast<T const&>(opcode);
  49. ssize_t jump_offset = op.size() + op.offset();
  50. if (jump_offset >= 0) {
  51. block_boundaries.append({ end_of_last_block, state.instruction_position });
  52. end_of_last_block = state.instruction_position + opcode.size();
  53. } else {
  54. // This op jumps back, see if that's within this "block".
  55. if (jump_offset + state.instruction_position > end_of_last_block) {
  56. // Split the block!
  57. block_boundaries.append({ end_of_last_block, jump_offset + state.instruction_position });
  58. block_boundaries.append({ jump_offset + state.instruction_position, state.instruction_position });
  59. end_of_last_block = state.instruction_position + opcode.size();
  60. } else {
  61. // Nope, it's just a jump to another block
  62. block_boundaries.append({ end_of_last_block, state.instruction_position });
  63. end_of_last_block = state.instruction_position + opcode.size();
  64. }
  65. }
  66. };
  67. for (;;) {
  68. auto& opcode = bytecode.get_opcode(state);
  69. switch (opcode.opcode_id()) {
  70. case OpCodeId::Jump:
  71. check_jump.template operator()<OpCode_Jump>(opcode);
  72. break;
  73. case OpCodeId::JumpNonEmpty:
  74. check_jump.template operator()<OpCode_JumpNonEmpty>(opcode);
  75. break;
  76. case OpCodeId::ForkJump:
  77. check_jump.template operator()<OpCode_ForkJump>(opcode);
  78. break;
  79. case OpCodeId::ForkStay:
  80. check_jump.template operator()<OpCode_ForkStay>(opcode);
  81. break;
  82. case OpCodeId::FailForks:
  83. block_boundaries.append({ end_of_last_block, state.instruction_position });
  84. end_of_last_block = state.instruction_position + opcode.size();
  85. break;
  86. case OpCodeId::Repeat: {
  87. // Repeat produces two blocks, one containing its repeated expr, and one after that.
  88. auto repeat_start = state.instruction_position - static_cast<OpCode_Repeat const&>(opcode).offset();
  89. if (repeat_start > end_of_last_block)
  90. block_boundaries.append({ end_of_last_block, repeat_start });
  91. block_boundaries.append({ repeat_start, state.instruction_position });
  92. end_of_last_block = state.instruction_position + opcode.size();
  93. break;
  94. }
  95. default:
  96. break;
  97. }
  98. auto next_ip = state.instruction_position + opcode.size();
  99. if (next_ip < bytecode_size)
  100. state.instruction_position = next_ip;
  101. else
  102. break;
  103. }
  104. if (end_of_last_block < bytecode_size)
  105. block_boundaries.append({ end_of_last_block, bytecode_size });
  106. quick_sort(block_boundaries, [](auto& a, auto& b) { return a.start < b.start; });
  107. return block_boundaries;
  108. }
  109. static bool has_overlap(Vector<CompareTypeAndValuePair> const& lhs, Vector<CompareTypeAndValuePair> const& rhs)
  110. {
  111. // We have to fully interpret the two sequences to determine if they overlap (that is, keep track of inversion state and what ranges they cover).
  112. bool inverse { false };
  113. bool temporary_inverse { false };
  114. bool reset_temporary_inverse { false };
  115. auto current_lhs_inversion_state = [&]() -> bool { return temporary_inverse ^ inverse; };
  116. RedBlackTree<u32, u32> lhs_ranges;
  117. RedBlackTree<u32, u32> lhs_negated_ranges;
  118. HashTable<CharClass> lhs_char_classes;
  119. HashTable<CharClass> lhs_negated_char_classes;
  120. auto has_any_unicode_property = false;
  121. HashTable<Unicode::GeneralCategory> lhs_unicode_general_categories;
  122. HashTable<Unicode::Property> lhs_unicode_properties;
  123. HashTable<Unicode::Script> lhs_unicode_scripts;
  124. HashTable<Unicode::Script> lhs_unicode_script_extensions;
  125. HashTable<Unicode::GeneralCategory> lhs_negated_unicode_general_categories;
  126. HashTable<Unicode::Property> lhs_negated_unicode_properties;
  127. HashTable<Unicode::Script> lhs_negated_unicode_scripts;
  128. HashTable<Unicode::Script> lhs_negated_unicode_script_extensions;
  129. auto any_unicode_property_matches = [&](u32 code_point) {
  130. if (any_of(lhs_negated_unicode_general_categories, [code_point](auto category) { return Unicode::code_point_has_general_category(code_point, category); }))
  131. return false;
  132. if (any_of(lhs_negated_unicode_properties, [code_point](auto property) { return Unicode::code_point_has_property(code_point, property); }))
  133. return false;
  134. if (any_of(lhs_negated_unicode_scripts, [code_point](auto script) { return Unicode::code_point_has_script(code_point, script); }))
  135. return false;
  136. if (any_of(lhs_negated_unicode_script_extensions, [code_point](auto script) { return Unicode::code_point_has_script_extension(code_point, script); }))
  137. return false;
  138. if (any_of(lhs_unicode_general_categories, [code_point](auto category) { return Unicode::code_point_has_general_category(code_point, category); }))
  139. return true;
  140. if (any_of(lhs_unicode_properties, [code_point](auto property) { return Unicode::code_point_has_property(code_point, property); }))
  141. return true;
  142. if (any_of(lhs_unicode_scripts, [code_point](auto script) { return Unicode::code_point_has_script(code_point, script); }))
  143. return true;
  144. if (any_of(lhs_unicode_script_extensions, [code_point](auto script) { return Unicode::code_point_has_script_extension(code_point, script); }))
  145. return true;
  146. return false;
  147. };
  148. auto range_contains = [&]<typename T>(T& value) -> bool {
  149. u32 start;
  150. u32 end;
  151. if constexpr (IsSame<T, CharRange>) {
  152. start = value.from;
  153. end = value.to;
  154. } else {
  155. start = value;
  156. end = value;
  157. }
  158. if (has_any_unicode_property) {
  159. // We have some properties, and a range is present
  160. // Instead of checking every single code point in the range, assume it's a match.
  161. return start != end || any_unicode_property_matches(start);
  162. }
  163. auto* max = lhs_ranges.find_smallest_not_below(start);
  164. return max && *max <= end;
  165. };
  166. auto char_class_contains = [&](CharClass const& value) -> bool {
  167. if (lhs_char_classes.contains(value))
  168. return true;
  169. if (lhs_negated_char_classes.contains(value))
  170. return false;
  171. // This char class might match something in the ranges we have, and checking that is far too expensive, so just bail out.
  172. return true;
  173. };
  174. for (auto const& pair : lhs) {
  175. if (reset_temporary_inverse) {
  176. reset_temporary_inverse = false;
  177. temporary_inverse = false;
  178. } else {
  179. reset_temporary_inverse = true;
  180. }
  181. switch (pair.type) {
  182. case CharacterCompareType::Inverse:
  183. inverse = !inverse;
  184. break;
  185. case CharacterCompareType::TemporaryInverse:
  186. temporary_inverse = true;
  187. reset_temporary_inverse = true;
  188. break;
  189. case CharacterCompareType::AnyChar:
  190. // Special case: if not inverted, AnyChar is always in the range.
  191. if (!current_lhs_inversion_state())
  192. return true;
  193. break;
  194. case CharacterCompareType::Char:
  195. if (!current_lhs_inversion_state())
  196. lhs_ranges.insert(pair.value, pair.value);
  197. else
  198. lhs_negated_ranges.insert(pair.value, pair.value);
  199. break;
  200. case CharacterCompareType::String:
  201. // FIXME: We just need to look at the last character of this string, but we only have the first character here.
  202. // Just bail out to avoid false positives.
  203. return true;
  204. case CharacterCompareType::CharClass:
  205. if (!current_lhs_inversion_state())
  206. lhs_char_classes.set(static_cast<CharClass>(pair.value));
  207. else
  208. lhs_negated_char_classes.set(static_cast<CharClass>(pair.value));
  209. break;
  210. case CharacterCompareType::CharRange: {
  211. auto range = CharRange(pair.value);
  212. if (!current_lhs_inversion_state())
  213. lhs_ranges.insert(range.from, range.to);
  214. else
  215. lhs_negated_ranges.insert(range.from, range.to);
  216. break;
  217. }
  218. case CharacterCompareType::LookupTable:
  219. // We've transformed this into a series of ranges in flat_compares(), so bail out if we see it.
  220. return true;
  221. case CharacterCompareType::Reference:
  222. // We've handled this before coming here.
  223. break;
  224. case CharacterCompareType::Property:
  225. has_any_unicode_property = true;
  226. if (!current_lhs_inversion_state())
  227. lhs_unicode_properties.set(static_cast<Unicode::Property>(pair.value));
  228. else
  229. lhs_negated_unicode_properties.set(static_cast<Unicode::Property>(pair.value));
  230. break;
  231. case CharacterCompareType::GeneralCategory:
  232. has_any_unicode_property = true;
  233. if (!current_lhs_inversion_state())
  234. lhs_unicode_general_categories.set(static_cast<Unicode::GeneralCategory>(pair.value));
  235. else
  236. lhs_negated_unicode_general_categories.set(static_cast<Unicode::GeneralCategory>(pair.value));
  237. break;
  238. case CharacterCompareType::Script:
  239. has_any_unicode_property = true;
  240. if (!current_lhs_inversion_state())
  241. lhs_unicode_scripts.set(static_cast<Unicode::Script>(pair.value));
  242. else
  243. lhs_negated_unicode_scripts.set(static_cast<Unicode::Script>(pair.value));
  244. break;
  245. case CharacterCompareType::ScriptExtension:
  246. has_any_unicode_property = true;
  247. if (!current_lhs_inversion_state())
  248. lhs_unicode_script_extensions.set(static_cast<Unicode::Script>(pair.value));
  249. else
  250. lhs_negated_unicode_script_extensions.set(static_cast<Unicode::Script>(pair.value));
  251. break;
  252. case CharacterCompareType::And:
  253. case CharacterCompareType::Or:
  254. case CharacterCompareType::EndAndOr:
  255. // FIXME: These are too difficult to handle, so bail out.
  256. return true;
  257. case CharacterCompareType::Undefined:
  258. case CharacterCompareType::RangeExpressionDummy:
  259. // These do not occur in valid bytecode.
  260. VERIFY_NOT_REACHED();
  261. }
  262. }
  263. if constexpr (REGEX_DEBUG) {
  264. dbgln("lhs ranges:");
  265. for (auto it = lhs_ranges.begin(); it != lhs_ranges.end(); ++it)
  266. dbgln(" {}..{}", it.key(), *it);
  267. dbgln("lhs negated ranges:");
  268. for (auto it = lhs_negated_ranges.begin(); it != lhs_negated_ranges.end(); ++it)
  269. dbgln(" {}..{}", it.key(), *it);
  270. }
  271. for (auto const& pair : rhs) {
  272. if (reset_temporary_inverse) {
  273. reset_temporary_inverse = false;
  274. temporary_inverse = false;
  275. } else {
  276. reset_temporary_inverse = true;
  277. }
  278. dbgln_if(REGEX_DEBUG, "check {} ({})...", character_compare_type_name(pair.type), pair.value);
  279. switch (pair.type) {
  280. case CharacterCompareType::Inverse:
  281. inverse = !inverse;
  282. break;
  283. case CharacterCompareType::TemporaryInverse:
  284. temporary_inverse = true;
  285. reset_temporary_inverse = true;
  286. break;
  287. case CharacterCompareType::AnyChar:
  288. // Special case: if not inverted, AnyChar is always in the range.
  289. if (!current_lhs_inversion_state())
  290. return true;
  291. break;
  292. case CharacterCompareType::Char:
  293. if (current_lhs_inversion_state() ^ range_contains(pair.value))
  294. return true;
  295. break;
  296. case CharacterCompareType::String:
  297. // FIXME: We just need to look at the last character of this string, but we only have the first character here.
  298. // Just bail out to avoid false positives.
  299. return true;
  300. case CharacterCompareType::CharClass:
  301. if (current_lhs_inversion_state() ^ char_class_contains(static_cast<CharClass>(pair.value)))
  302. return true;
  303. break;
  304. case CharacterCompareType::CharRange: {
  305. auto range = CharRange(pair.value);
  306. if (current_lhs_inversion_state() ^ range_contains(range))
  307. return true;
  308. break;
  309. }
  310. case CharacterCompareType::LookupTable:
  311. // We've transformed this into a series of ranges in flat_compares(), so bail out if we see it.
  312. return true;
  313. case CharacterCompareType::Reference:
  314. // We've handled this before coming here.
  315. break;
  316. case CharacterCompareType::Property:
  317. // The only reasonable scenario where we can check these properties without spending too much time is if:
  318. // - the ranges are empty
  319. // - the char classes are empty
  320. // - the unicode properties are empty or contain only this property
  321. if (!lhs_ranges.is_empty() || !lhs_negated_ranges.is_empty() || !lhs_char_classes.is_empty() || !lhs_negated_char_classes.is_empty())
  322. return true;
  323. if (has_any_unicode_property && !lhs_unicode_properties.is_empty() && !lhs_negated_unicode_properties.is_empty()) {
  324. if (current_lhs_inversion_state() ^ lhs_unicode_properties.contains(static_cast<Unicode::Property>(pair.value)))
  325. return true;
  326. if (false == (current_lhs_inversion_state() ^ lhs_negated_unicode_properties.contains(static_cast<Unicode::Property>(pair.value))))
  327. return true;
  328. }
  329. break;
  330. case CharacterCompareType::GeneralCategory:
  331. if (!lhs_ranges.is_empty() || !lhs_negated_ranges.is_empty() || !lhs_char_classes.is_empty() || !lhs_negated_char_classes.is_empty())
  332. return true;
  333. if (has_any_unicode_property && !lhs_unicode_general_categories.is_empty() && !lhs_negated_unicode_general_categories.is_empty()) {
  334. if (current_lhs_inversion_state() ^ lhs_unicode_general_categories.contains(static_cast<Unicode::GeneralCategory>(pair.value)))
  335. return true;
  336. if (false == (current_lhs_inversion_state() ^ lhs_negated_unicode_general_categories.contains(static_cast<Unicode::GeneralCategory>(pair.value))))
  337. return true;
  338. }
  339. break;
  340. case CharacterCompareType::Script:
  341. if (!lhs_ranges.is_empty() || !lhs_negated_ranges.is_empty() || !lhs_char_classes.is_empty() || !lhs_negated_char_classes.is_empty())
  342. return true;
  343. if (has_any_unicode_property && !lhs_unicode_scripts.is_empty() && !lhs_negated_unicode_scripts.is_empty()) {
  344. if (current_lhs_inversion_state() ^ lhs_unicode_scripts.contains(static_cast<Unicode::Script>(pair.value)))
  345. return true;
  346. if (false == (current_lhs_inversion_state() ^ lhs_negated_unicode_scripts.contains(static_cast<Unicode::Script>(pair.value))))
  347. return true;
  348. }
  349. break;
  350. case CharacterCompareType::ScriptExtension:
  351. if (!lhs_ranges.is_empty() || !lhs_negated_ranges.is_empty() || !lhs_char_classes.is_empty() || !lhs_negated_char_classes.is_empty())
  352. return true;
  353. if (has_any_unicode_property && !lhs_unicode_script_extensions.is_empty() && !lhs_negated_unicode_script_extensions.is_empty()) {
  354. if (current_lhs_inversion_state() ^ lhs_unicode_script_extensions.contains(static_cast<Unicode::Script>(pair.value)))
  355. return true;
  356. if (false == (current_lhs_inversion_state() ^ lhs_negated_unicode_script_extensions.contains(static_cast<Unicode::Script>(pair.value))))
  357. return true;
  358. }
  359. break;
  360. case CharacterCompareType::And:
  361. case CharacterCompareType::Or:
  362. case CharacterCompareType::EndAndOr:
  363. // FIXME: These are too difficult to handle, so bail out.
  364. return true;
  365. case CharacterCompareType::Undefined:
  366. case CharacterCompareType::RangeExpressionDummy:
  367. // These do not occur in valid bytecode.
  368. VERIFY_NOT_REACHED();
  369. }
  370. }
  371. return false;
  372. }
  373. enum class AtomicRewritePreconditionResult {
  374. SatisfiedWithProperHeader,
  375. SatisfiedWithEmptyHeader,
  376. NotSatisfied,
  377. };
  378. static AtomicRewritePreconditionResult block_satisfies_atomic_rewrite_precondition(ByteCode const& bytecode, Block const& repeated_block, Block const& following_block)
  379. {
  380. Vector<Vector<CompareTypeAndValuePair>> repeated_values;
  381. HashTable<size_t> active_capture_groups;
  382. MatchState state;
  383. auto has_seen_actionable_opcode = false;
  384. for (state.instruction_position = repeated_block.start; state.instruction_position < repeated_block.end;) {
  385. auto& opcode = bytecode.get_opcode(state);
  386. switch (opcode.opcode_id()) {
  387. case OpCodeId::Compare: {
  388. has_seen_actionable_opcode = true;
  389. auto compares = static_cast<OpCode_Compare const&>(opcode).flat_compares();
  390. if (repeated_values.is_empty() && any_of(compares, [](auto& compare) { return compare.type == CharacterCompareType::AnyChar; }))
  391. return AtomicRewritePreconditionResult::NotSatisfied;
  392. repeated_values.append(move(compares));
  393. break;
  394. }
  395. case OpCodeId::CheckBegin:
  396. case OpCodeId::CheckEnd:
  397. has_seen_actionable_opcode = true;
  398. if (repeated_values.is_empty())
  399. return AtomicRewritePreconditionResult::SatisfiedWithProperHeader;
  400. break;
  401. case OpCodeId::CheckBoundary:
  402. // FIXME: What should we do with these? for now, let's fail.
  403. return AtomicRewritePreconditionResult::NotSatisfied;
  404. case OpCodeId::Restore:
  405. case OpCodeId::GoBack:
  406. return AtomicRewritePreconditionResult::NotSatisfied;
  407. case OpCodeId::SaveRightCaptureGroup:
  408. active_capture_groups.set(static_cast<OpCode_SaveRightCaptureGroup const&>(opcode).id());
  409. break;
  410. case OpCodeId::SaveLeftCaptureGroup:
  411. active_capture_groups.set(static_cast<OpCode_SaveLeftCaptureGroup const&>(opcode).id());
  412. break;
  413. case OpCodeId::ForkJump:
  414. case OpCodeId::ForkReplaceJump:
  415. case OpCodeId::JumpNonEmpty:
  416. // We could attempt to recursively resolve the follow set, but pretending that this just goes nowhere is faster.
  417. if (!has_seen_actionable_opcode)
  418. return AtomicRewritePreconditionResult::NotSatisfied;
  419. break;
  420. default:
  421. break;
  422. }
  423. state.instruction_position += opcode.size();
  424. }
  425. dbgln_if(REGEX_DEBUG, "Found {} entries in reference", repeated_values.size());
  426. dbgln_if(REGEX_DEBUG, "Found {} active capture groups", active_capture_groups.size());
  427. bool following_block_has_at_least_one_compare = false;
  428. // Find the first compare in the following block, it must NOT match any of the values in `repeated_values'.
  429. auto final_instruction = following_block.start;
  430. for (state.instruction_position = following_block.start; state.instruction_position < following_block.end;) {
  431. final_instruction = state.instruction_position;
  432. auto& opcode = bytecode.get_opcode(state);
  433. switch (opcode.opcode_id()) {
  434. // Note: These have to exist since we're effectively repeating the following block as well
  435. case OpCodeId::SaveRightCaptureGroup:
  436. active_capture_groups.set(static_cast<OpCode_SaveRightCaptureGroup const&>(opcode).id());
  437. break;
  438. case OpCodeId::SaveLeftCaptureGroup:
  439. active_capture_groups.set(static_cast<OpCode_SaveLeftCaptureGroup const&>(opcode).id());
  440. break;
  441. case OpCodeId::Compare: {
  442. following_block_has_at_least_one_compare = true;
  443. // We found a compare, let's see what it has.
  444. auto compares = static_cast<OpCode_Compare const&>(opcode).flat_compares();
  445. if (compares.is_empty())
  446. break;
  447. if (any_of(compares, [&](auto& compare) {
  448. return compare.type == CharacterCompareType::AnyChar
  449. || (compare.type == CharacterCompareType::Reference && active_capture_groups.contains(compare.value));
  450. }))
  451. return AtomicRewritePreconditionResult::NotSatisfied;
  452. if (any_of(repeated_values, [&](auto& repeated_value) { return has_overlap(compares, repeated_value); }))
  453. return AtomicRewritePreconditionResult::NotSatisfied;
  454. return AtomicRewritePreconditionResult::SatisfiedWithProperHeader;
  455. }
  456. case OpCodeId::CheckBegin:
  457. case OpCodeId::CheckEnd:
  458. return AtomicRewritePreconditionResult::SatisfiedWithProperHeader; // Nothing can match the end!
  459. case OpCodeId::CheckBoundary:
  460. // FIXME: What should we do with these? For now, consider them a failure.
  461. return AtomicRewritePreconditionResult::NotSatisfied;
  462. case OpCodeId::ForkJump:
  463. case OpCodeId::ForkReplaceJump:
  464. case OpCodeId::JumpNonEmpty:
  465. // See note in the previous switch, same cases.
  466. if (!following_block_has_at_least_one_compare)
  467. return AtomicRewritePreconditionResult::NotSatisfied;
  468. break;
  469. default:
  470. break;
  471. }
  472. state.instruction_position += opcode.size();
  473. }
  474. // If the following block falls through, we can't rewrite it.
  475. state.instruction_position = final_instruction;
  476. switch (bytecode.get_opcode(state).opcode_id()) {
  477. case OpCodeId::Jump:
  478. case OpCodeId::JumpNonEmpty:
  479. case OpCodeId::ForkJump:
  480. case OpCodeId::ForkReplaceJump:
  481. break;
  482. default:
  483. return AtomicRewritePreconditionResult::NotSatisfied;
  484. }
  485. if (following_block_has_at_least_one_compare)
  486. return AtomicRewritePreconditionResult::SatisfiedWithProperHeader;
  487. return AtomicRewritePreconditionResult::SatisfiedWithEmptyHeader;
  488. }
  489. template<typename Parser>
  490. bool Regex<Parser>::attempt_rewrite_entire_match_as_substring_search(BasicBlockList const& basic_blocks)
  491. {
  492. // If there's no jumps, we can probably rewrite this as a substring search (Compare { string = str }).
  493. if (basic_blocks.size() > 1)
  494. return false;
  495. if (basic_blocks.is_empty()) {
  496. parser_result.optimization_data.pure_substring_search = ""sv;
  497. return true; // Empty regex, sure.
  498. }
  499. auto& bytecode = parser_result.bytecode;
  500. auto is_unicode = parser_result.options.has_flag_set(AllFlags::Unicode);
  501. // We have a single basic block, let's see if it's a series of character or string compares.
  502. StringBuilder final_string;
  503. MatchState state;
  504. while (state.instruction_position < bytecode.size()) {
  505. auto& opcode = bytecode.get_opcode(state);
  506. switch (opcode.opcode_id()) {
  507. case OpCodeId::Compare: {
  508. auto& compare = static_cast<OpCode_Compare const&>(opcode);
  509. for (auto& flat_compare : compare.flat_compares()) {
  510. if (flat_compare.type != CharacterCompareType::Char)
  511. return false;
  512. if (is_unicode || flat_compare.value <= 0x7f)
  513. final_string.append_code_point(flat_compare.value);
  514. else
  515. final_string.append(bit_cast<char>(static_cast<u8>(flat_compare.value)));
  516. }
  517. break;
  518. }
  519. default:
  520. return false;
  521. }
  522. state.instruction_position += opcode.size();
  523. }
  524. parser_result.optimization_data.pure_substring_search = final_string.to_byte_string();
  525. return true;
  526. }
  527. template<typename Parser>
  528. void Regex<Parser>::attempt_rewrite_loops_as_atomic_groups(BasicBlockList const& basic_blocks)
  529. {
  530. auto& bytecode = parser_result.bytecode;
  531. if constexpr (REGEX_DEBUG) {
  532. RegexDebug dbg;
  533. dbg.print_bytecode(*this);
  534. for (auto const& block : basic_blocks)
  535. dbgln("block from {} to {}", block.start, block.end);
  536. }
  537. // A pattern such as:
  538. // bb0 | RE0
  539. // | ForkX bb0
  540. // -------------------------
  541. // bb1 | RE1
  542. // can be rewritten as:
  543. // -------------------------
  544. // bb0 | RE0
  545. // | ForkReplaceX bb0
  546. // -------------------------
  547. // bb1 | RE1
  548. // provided that first(RE1) not-in end(RE0), which is to say
  549. // that RE1 cannot start with whatever RE0 has matched (ever).
  550. //
  551. // Alternatively, a second form of this pattern can also occur:
  552. // bb0 | *
  553. // | ForkX bb2
  554. // ------------------------
  555. // bb1 | RE0
  556. // | Jump bb0
  557. // ------------------------
  558. // bb2 | RE1
  559. // which can be transformed (with the same preconditions) to:
  560. // bb0 | *
  561. // | ForkReplaceX bb2
  562. // ------------------------
  563. // bb1 | RE0
  564. // | Jump bb0
  565. // ------------------------
  566. // bb2 | RE1
  567. enum class AlternateForm {
  568. DirectLoopWithoutHeader, // loop without proper header, a block forking to itself. i.e. the first form.
  569. DirectLoopWithoutHeaderAndEmptyFollow, // loop without proper header, a block forking to itself. i.e. the first form but with RE1 being empty.
  570. DirectLoopWithHeader, // loop with proper header, i.e. the second form.
  571. };
  572. struct CandidateBlock {
  573. Block forking_block;
  574. Optional<Block> new_target_block;
  575. AlternateForm form;
  576. };
  577. Vector<CandidateBlock> candidate_blocks;
  578. auto is_an_eligible_jump = [](OpCode const& opcode, size_t ip, size_t block_start, AlternateForm alternate_form) {
  579. switch (opcode.opcode_id()) {
  580. case OpCodeId::JumpNonEmpty: {
  581. auto const& op = static_cast<OpCode_JumpNonEmpty const&>(opcode);
  582. auto form = op.form();
  583. if (form != OpCodeId::Jump && alternate_form == AlternateForm::DirectLoopWithHeader)
  584. return false;
  585. if (form != OpCodeId::ForkJump && form != OpCodeId::ForkStay && alternate_form == AlternateForm::DirectLoopWithoutHeader)
  586. return false;
  587. return op.offset() + ip + opcode.size() == block_start;
  588. }
  589. case OpCodeId::ForkJump:
  590. if (alternate_form == AlternateForm::DirectLoopWithHeader)
  591. return false;
  592. return static_cast<OpCode_ForkJump const&>(opcode).offset() + ip + opcode.size() == block_start;
  593. case OpCodeId::ForkStay:
  594. if (alternate_form == AlternateForm::DirectLoopWithHeader)
  595. return false;
  596. return static_cast<OpCode_ForkStay const&>(opcode).offset() + ip + opcode.size() == block_start;
  597. case OpCodeId::Jump:
  598. // Infinite loop does *not* produce forks.
  599. if (alternate_form == AlternateForm::DirectLoopWithoutHeader)
  600. return false;
  601. if (alternate_form == AlternateForm::DirectLoopWithHeader)
  602. return static_cast<OpCode_Jump const&>(opcode).offset() + ip + opcode.size() == block_start;
  603. VERIFY_NOT_REACHED();
  604. default:
  605. return false;
  606. }
  607. };
  608. for (size_t i = 0; i < basic_blocks.size(); ++i) {
  609. auto forking_block = basic_blocks[i];
  610. Optional<Block> fork_fallback_block;
  611. if (i + 1 < basic_blocks.size())
  612. fork_fallback_block = basic_blocks[i + 1];
  613. MatchState state;
  614. // Check if the last instruction in this block is a jump to the block itself:
  615. {
  616. state.instruction_position = forking_block.end;
  617. auto& opcode = bytecode.get_opcode(state);
  618. if (is_an_eligible_jump(opcode, state.instruction_position, forking_block.start, AlternateForm::DirectLoopWithoutHeader)) {
  619. // We've found RE0 (and RE1 is just the following block, if any), let's see if the precondition applies.
  620. // if RE1 is empty, there's no first(RE1), so this is an automatic pass.
  621. if (!fork_fallback_block.has_value()
  622. || (fork_fallback_block->end == fork_fallback_block->start && block_satisfies_atomic_rewrite_precondition(bytecode, forking_block, *fork_fallback_block) != AtomicRewritePreconditionResult::NotSatisfied)) {
  623. candidate_blocks.append({ forking_block, fork_fallback_block, AlternateForm::DirectLoopWithoutHeader });
  624. break;
  625. }
  626. auto precondition = block_satisfies_atomic_rewrite_precondition(bytecode, forking_block, *fork_fallback_block);
  627. if (precondition == AtomicRewritePreconditionResult::SatisfiedWithProperHeader) {
  628. candidate_blocks.append({ forking_block, fork_fallback_block, AlternateForm::DirectLoopWithoutHeader });
  629. break;
  630. }
  631. if (precondition == AtomicRewritePreconditionResult::SatisfiedWithEmptyHeader) {
  632. candidate_blocks.append({ forking_block, fork_fallback_block, AlternateForm::DirectLoopWithoutHeaderAndEmptyFollow });
  633. break;
  634. }
  635. }
  636. }
  637. // Check if the last instruction in the last block is a direct jump to this block
  638. if (fork_fallback_block.has_value()) {
  639. state.instruction_position = fork_fallback_block->end;
  640. auto& opcode = bytecode.get_opcode(state);
  641. if (is_an_eligible_jump(opcode, state.instruction_position, forking_block.start, AlternateForm::DirectLoopWithHeader)) {
  642. // We've found bb1 and bb0, let's just make sure that bb0 forks to bb2.
  643. state.instruction_position = forking_block.end;
  644. auto& opcode = bytecode.get_opcode(state);
  645. if (opcode.opcode_id() == OpCodeId::ForkJump || opcode.opcode_id() == OpCodeId::ForkStay) {
  646. Optional<Block> block_following_fork_fallback;
  647. if (i + 2 < basic_blocks.size())
  648. block_following_fork_fallback = basic_blocks[i + 2];
  649. if (!block_following_fork_fallback.has_value()
  650. || block_satisfies_atomic_rewrite_precondition(bytecode, *fork_fallback_block, *block_following_fork_fallback) != AtomicRewritePreconditionResult::NotSatisfied) {
  651. candidate_blocks.append({ forking_block, {}, AlternateForm::DirectLoopWithHeader });
  652. break;
  653. }
  654. }
  655. }
  656. }
  657. }
  658. dbgln_if(REGEX_DEBUG, "Found {} candidate blocks", candidate_blocks.size());
  659. if (candidate_blocks.is_empty()) {
  660. dbgln_if(REGEX_DEBUG, "Failed to find anything for {}", pattern_value);
  661. return;
  662. }
  663. RedBlackTree<size_t, size_t> needed_patches;
  664. // Reverse the blocks, so we can patch the bytecode without messing with the latter patches.
  665. quick_sort(candidate_blocks, [](auto& a, auto& b) { return b.forking_block.start > a.forking_block.start; });
  666. for (auto& candidate : candidate_blocks) {
  667. // Note that both forms share a ForkReplace patch in forking_block.
  668. // Patch the ForkX in forking_block to be a ForkReplaceX instead.
  669. auto& opcode_id = bytecode[candidate.forking_block.end];
  670. if (opcode_id == (ByteCodeValueType)OpCodeId::ForkStay) {
  671. opcode_id = (ByteCodeValueType)OpCodeId::ForkReplaceStay;
  672. } else if (opcode_id == (ByteCodeValueType)OpCodeId::ForkJump) {
  673. opcode_id = (ByteCodeValueType)OpCodeId::ForkReplaceJump;
  674. } else if (opcode_id == (ByteCodeValueType)OpCodeId::JumpNonEmpty) {
  675. auto& jump_opcode_id = bytecode[candidate.forking_block.end + 3];
  676. if (jump_opcode_id == (ByteCodeValueType)OpCodeId::ForkStay)
  677. jump_opcode_id = (ByteCodeValueType)OpCodeId::ForkReplaceStay;
  678. else if (jump_opcode_id == (ByteCodeValueType)OpCodeId::ForkJump)
  679. jump_opcode_id = (ByteCodeValueType)OpCodeId::ForkReplaceJump;
  680. else
  681. VERIFY_NOT_REACHED();
  682. } else {
  683. VERIFY_NOT_REACHED();
  684. }
  685. }
  686. if (!needed_patches.is_empty()) {
  687. MatchState state;
  688. auto bytecode_size = bytecode.size();
  689. state.instruction_position = 0;
  690. struct Patch {
  691. ssize_t value;
  692. size_t offset;
  693. bool should_negate { false };
  694. };
  695. for (;;) {
  696. if (state.instruction_position >= bytecode_size)
  697. break;
  698. auto& opcode = bytecode.get_opcode(state);
  699. Stack<Patch, 2> patch_points;
  700. switch (opcode.opcode_id()) {
  701. case OpCodeId::Jump:
  702. patch_points.push({ static_cast<OpCode_Jump const&>(opcode).offset(), state.instruction_position + 1 });
  703. break;
  704. case OpCodeId::JumpNonEmpty:
  705. patch_points.push({ static_cast<OpCode_JumpNonEmpty const&>(opcode).offset(), state.instruction_position + 1 });
  706. patch_points.push({ static_cast<OpCode_JumpNonEmpty const&>(opcode).checkpoint(), state.instruction_position + 2 });
  707. break;
  708. case OpCodeId::ForkJump:
  709. patch_points.push({ static_cast<OpCode_ForkJump const&>(opcode).offset(), state.instruction_position + 1 });
  710. break;
  711. case OpCodeId::ForkStay:
  712. patch_points.push({ static_cast<OpCode_ForkStay const&>(opcode).offset(), state.instruction_position + 1 });
  713. break;
  714. case OpCodeId::Repeat:
  715. patch_points.push({ -(ssize_t) static_cast<OpCode_Repeat const&>(opcode).offset(), state.instruction_position + 1, true });
  716. break;
  717. default:
  718. break;
  719. }
  720. while (!patch_points.is_empty()) {
  721. auto& patch_point = patch_points.top();
  722. auto target_offset = patch_point.value + state.instruction_position + opcode.size();
  723. constexpr auto do_patch = [](auto& patch_it, auto& patch_point, auto& target_offset, auto& bytecode, auto ip) {
  724. if (patch_it.key() == ip)
  725. return;
  726. if (patch_point.value < 0 && target_offset <= patch_it.key() && ip > patch_it.key())
  727. bytecode[patch_point.offset] += (patch_point.should_negate ? 1 : -1) * (*patch_it);
  728. else if (patch_point.value > 0 && target_offset >= patch_it.key() && ip < patch_it.key())
  729. bytecode[patch_point.offset] += (patch_point.should_negate ? -1 : 1) * (*patch_it);
  730. };
  731. if (auto patch_it = needed_patches.find_largest_not_above_iterator(target_offset); !patch_it.is_end())
  732. do_patch(patch_it, patch_point, target_offset, bytecode, state.instruction_position);
  733. else if (auto patch_it = needed_patches.find_largest_not_above_iterator(state.instruction_position); !patch_it.is_end())
  734. do_patch(patch_it, patch_point, target_offset, bytecode, state.instruction_position);
  735. patch_points.pop();
  736. }
  737. state.instruction_position += opcode.size();
  738. }
  739. }
  740. if constexpr (REGEX_DEBUG) {
  741. warnln("Transformed to:");
  742. RegexDebug dbg;
  743. dbg.print_bytecode(*this);
  744. }
  745. }
  746. void Optimizer::append_alternation(ByteCode& target, ByteCode&& left, ByteCode&& right)
  747. {
  748. Array<ByteCode, 2> alternatives;
  749. alternatives[0] = move(left);
  750. alternatives[1] = move(right);
  751. append_alternation(target, alternatives);
  752. }
  753. template<typename K, typename V, typename KTraits>
  754. using OrderedHashMapForTrie = OrderedHashMap<K, V, KTraits>;
  755. void Optimizer::append_alternation(ByteCode& target, Span<ByteCode> alternatives)
  756. {
  757. if (alternatives.size() == 0)
  758. return;
  759. if (alternatives.size() == 1)
  760. return target.extend(move(alternatives[0]));
  761. if (all_of(alternatives, [](auto& x) { return x.is_empty(); }))
  762. return;
  763. for (auto& entry : alternatives)
  764. entry.flatten();
  765. #if REGEX_DEBUG
  766. ScopeLogger<true> log;
  767. warnln("Alternations:");
  768. RegexDebug dbg;
  769. for (auto& entry : alternatives) {
  770. warnln("----------");
  771. dbg.print_bytecode(entry);
  772. }
  773. ScopeGuard print_at_end {
  774. [&] {
  775. warnln("======================");
  776. RegexDebug dbg;
  777. dbg.print_bytecode(target);
  778. }
  779. };
  780. #endif
  781. // First, find incoming jump edges.
  782. // We need them for two reasons:
  783. // - We need to distinguish between insn-A-jumped-to-by-insn-B and insn-A-jumped-to-by-insn-C (as otherwise we'd break trie invariants)
  784. // - We need to know which jumps to patch when we're done
  785. struct JumpEdge {
  786. Span<ByteCodeValueType const> jump_insn;
  787. };
  788. Vector<HashMap<size_t, Vector<JumpEdge>>> incoming_jump_edges_for_each_alternative;
  789. incoming_jump_edges_for_each_alternative.resize(alternatives.size());
  790. auto has_any_backwards_jump = false;
  791. MatchState state;
  792. for (size_t i = 0; i < alternatives.size(); ++i) {
  793. auto& alternative = alternatives[i];
  794. // Add a jump to the "end" of the block; this is implicit in the bytecode, but we need it to be explicit in the trie.
  795. // Jump{offset=0}
  796. alternative.append(static_cast<ByteCodeValueType>(OpCodeId::Jump));
  797. alternative.append(0);
  798. auto& incoming_jump_edges = incoming_jump_edges_for_each_alternative[i];
  799. auto alternative_bytes = alternative.spans<1>().singular_span();
  800. for (state.instruction_position = 0; state.instruction_position < alternative.size();) {
  801. auto& opcode = alternative.get_opcode(state);
  802. auto opcode_bytes = alternative_bytes.slice(state.instruction_position, opcode.size());
  803. switch (opcode.opcode_id()) {
  804. case OpCodeId::Jump:
  805. incoming_jump_edges.ensure(static_cast<OpCode_Jump const&>(opcode).offset() + state.instruction_position).append({ opcode_bytes });
  806. has_any_backwards_jump |= static_cast<OpCode_Jump const&>(opcode).offset() < 0;
  807. break;
  808. case OpCodeId::JumpNonEmpty:
  809. incoming_jump_edges.ensure(static_cast<OpCode_JumpNonEmpty const&>(opcode).offset() + state.instruction_position).append({ opcode_bytes });
  810. has_any_backwards_jump |= static_cast<OpCode_JumpNonEmpty const&>(opcode).offset() < 0;
  811. break;
  812. case OpCodeId::ForkJump:
  813. incoming_jump_edges.ensure(static_cast<OpCode_ForkJump const&>(opcode).offset() + state.instruction_position).append({ opcode_bytes });
  814. has_any_backwards_jump |= static_cast<OpCode_ForkJump const&>(opcode).offset() < 0;
  815. break;
  816. case OpCodeId::ForkStay:
  817. incoming_jump_edges.ensure(static_cast<OpCode_ForkStay const&>(opcode).offset() + state.instruction_position).append({ opcode_bytes });
  818. has_any_backwards_jump |= static_cast<OpCode_ForkStay const&>(opcode).offset() < 0;
  819. break;
  820. case OpCodeId::ForkReplaceJump:
  821. incoming_jump_edges.ensure(static_cast<OpCode_ForkReplaceJump const&>(opcode).offset() + state.instruction_position).append({ opcode_bytes });
  822. has_any_backwards_jump |= static_cast<OpCode_ForkReplaceJump const&>(opcode).offset() < 0;
  823. break;
  824. case OpCodeId::ForkReplaceStay:
  825. incoming_jump_edges.ensure(static_cast<OpCode_ForkReplaceStay const&>(opcode).offset() + state.instruction_position).append({ opcode_bytes });
  826. has_any_backwards_jump |= static_cast<OpCode_ForkReplaceStay const&>(opcode).offset() < 0;
  827. break;
  828. case OpCodeId::Repeat:
  829. incoming_jump_edges.ensure(state.instruction_position - static_cast<OpCode_Repeat const&>(opcode).offset()).append({ opcode_bytes });
  830. has_any_backwards_jump = true;
  831. break;
  832. default:
  833. break;
  834. }
  835. state.instruction_position += opcode.size();
  836. }
  837. }
  838. struct QualifiedIP {
  839. size_t alternative_index;
  840. size_t instruction_position;
  841. };
  842. using Tree = Trie<DisjointSpans<ByteCodeValueType const>, Vector<QualifiedIP>, Traits<DisjointSpans<ByteCodeValueType const>>, void, OrderedHashMapForTrie>;
  843. Tree trie { {} }; // Root node is empty, key{ instruction_bytes, dependent_instruction_bytes... } -> IP
  844. size_t common_hits = 0;
  845. size_t total_nodes = 0;
  846. size_t total_bytecode_entries_in_tree = 0;
  847. for (size_t i = 0; i < alternatives.size(); ++i) {
  848. auto& alternative = alternatives[i];
  849. auto& incoming_jump_edges = incoming_jump_edges_for_each_alternative[i];
  850. auto* active_node = &trie;
  851. auto alternative_span = alternative.spans<1>().singular_span();
  852. for (state.instruction_position = 0; state.instruction_position < alternative_span.size();) {
  853. total_nodes += 1;
  854. auto& opcode = alternative.get_opcode(state);
  855. auto opcode_bytes = alternative_span.slice(state.instruction_position, opcode.size());
  856. Vector<Span<ByteCodeValueType const>> node_key_bytes;
  857. node_key_bytes.append(opcode_bytes);
  858. if (auto edges = incoming_jump_edges.get(state.instruction_position); edges.has_value()) {
  859. for (auto& edge : *edges)
  860. node_key_bytes.append(edge.jump_insn);
  861. }
  862. active_node = static_cast<decltype(active_node)>(MUST(active_node->ensure_child(DisjointSpans<ByteCodeValueType const> { move(node_key_bytes) })));
  863. if (active_node->has_metadata()) {
  864. active_node->metadata_value().append({ i, state.instruction_position });
  865. common_hits += 1;
  866. } else {
  867. active_node->set_metadata(Vector<QualifiedIP> { QualifiedIP { i, state.instruction_position } });
  868. total_bytecode_entries_in_tree += opcode.size();
  869. }
  870. state.instruction_position += opcode.size();
  871. }
  872. }
  873. if constexpr (REGEX_DEBUG) {
  874. Function<void(decltype(trie)&, size_t)> print_tree = [&](decltype(trie)& node, size_t indent = 0) mutable {
  875. ByteString name = "(no ip)";
  876. ByteString insn;
  877. if (node.has_metadata()) {
  878. name = ByteString::formatted(
  879. "{}@{} ({} node{})",
  880. node.metadata_value().first().instruction_position,
  881. node.metadata_value().first().alternative_index,
  882. node.metadata_value().size(),
  883. node.metadata_value().size() == 1 ? "" : "s");
  884. MatchState state;
  885. state.instruction_position = node.metadata_value().first().instruction_position;
  886. auto& opcode = alternatives[node.metadata_value().first().alternative_index].get_opcode(state);
  887. insn = ByteString::formatted("{} {}", opcode.to_byte_string(), opcode.arguments_string());
  888. }
  889. dbgln("{:->{}}| {} -- {}", "", indent * 2, name, insn);
  890. for (auto& child : node.children())
  891. print_tree(static_cast<decltype(trie)&>(*child.value), indent + 1);
  892. };
  893. print_tree(trie, 0);
  894. }
  895. // This is really only worth it if we don't blow up the size by the 2-extra-instruction-per-node scheme, similarly, if no nodes are shared, we're better off not using a tree.
  896. auto tree_cost = (total_nodes - common_hits) * 2;
  897. auto chain_cost = total_nodes + alternatives.size() * 2;
  898. dbgln_if(REGEX_DEBUG, "Total nodes: {}, common hits: {} (tree cost = {}, chain cost = {})", total_nodes, common_hits, tree_cost, chain_cost);
  899. if (common_hits == 0 || tree_cost > chain_cost) {
  900. // It's better to lay these out as a normal sequence of instructions.
  901. auto patch_start = target.size();
  902. for (size_t i = 1; i < alternatives.size(); ++i) {
  903. target.empend(static_cast<ByteCodeValueType>(OpCodeId::ForkJump));
  904. target.empend(0u); // To be filled later.
  905. }
  906. size_t size_to_jump = 0;
  907. bool seen_one_empty = false;
  908. for (size_t i = alternatives.size(); i > 0; --i) {
  909. auto& entry = alternatives[i - 1];
  910. if (entry.is_empty()) {
  911. if (seen_one_empty)
  912. continue;
  913. seen_one_empty = true;
  914. }
  915. auto is_first = i == 1;
  916. auto instruction_size = entry.size() + (is_first ? 0 : 2); // Jump; -> +2
  917. size_to_jump += instruction_size;
  918. if (!is_first)
  919. target[patch_start + (i - 2) * 2 + 1] = size_to_jump + (alternatives.size() - i) * 2;
  920. dbgln_if(REGEX_DEBUG, "{} size = {}, cum={}", i - 1, instruction_size, size_to_jump);
  921. }
  922. seen_one_empty = false;
  923. for (size_t i = alternatives.size(); i > 0; --i) {
  924. auto& chunk = alternatives[i - 1];
  925. if (chunk.is_empty()) {
  926. if (seen_one_empty)
  927. continue;
  928. seen_one_empty = true;
  929. }
  930. ByteCode* previous_chunk = nullptr;
  931. size_t j = i - 1;
  932. auto seen_one_empty_before = chunk.is_empty();
  933. while (j >= 1) {
  934. --j;
  935. auto& candidate_chunk = alternatives[j];
  936. if (candidate_chunk.is_empty()) {
  937. if (seen_one_empty_before)
  938. continue;
  939. }
  940. previous_chunk = &candidate_chunk;
  941. break;
  942. }
  943. size_to_jump -= chunk.size() + (previous_chunk ? 2 : 0);
  944. target.extend(move(chunk));
  945. target.empend(static_cast<ByteCodeValueType>(OpCodeId::Jump));
  946. target.empend(size_to_jump); // Jump to the _END label
  947. }
  948. } else {
  949. target.ensure_capacity(total_bytecode_entries_in_tree + common_hits * 6);
  950. auto node_is = [](Tree const* node, QualifiedIP ip) {
  951. if (!node->has_metadata())
  952. return false;
  953. for (auto& node_ip : node->metadata_value()) {
  954. if (node_ip.alternative_index == ip.alternative_index && node_ip.instruction_position == ip.instruction_position)
  955. return true;
  956. }
  957. return false;
  958. };
  959. struct Patch {
  960. QualifiedIP source_ip;
  961. size_t target_ip;
  962. bool done { false };
  963. };
  964. Vector<Patch> patch_locations;
  965. patch_locations.ensure_capacity(total_nodes);
  966. auto add_patch_point = [&](Tree const* node, size_t target_ip) {
  967. if (!node->has_metadata())
  968. return;
  969. auto& node_ip = node->metadata_value().first();
  970. patch_locations.append({ node_ip, target_ip });
  971. };
  972. Queue<Tree*> nodes_to_visit;
  973. nodes_to_visit.enqueue(&trie);
  974. HashMap<size_t, NonnullOwnPtr<RedBlackTree<u64, u64>>> instruction_positions;
  975. if (has_any_backwards_jump)
  976. MUST(instruction_positions.try_ensure_capacity(alternatives.size()));
  977. auto ip_mapping_for_alternative = [&](size_t i) -> RedBlackTree<u64, u64>& {
  978. return *instruction_positions.ensure(i, [] {
  979. return make<RedBlackTree<u64, u64>>();
  980. });
  981. };
  982. // each node:
  983. // node.re
  984. // forkjump child1
  985. // forkjump child2
  986. // ...
  987. while (!nodes_to_visit.is_empty()) {
  988. auto const* node = nodes_to_visit.dequeue();
  989. for (auto& patch : patch_locations) {
  990. if (!patch.done && node_is(node, patch.source_ip)) {
  991. auto value = static_cast<ByteCodeValueType>(target.size() - patch.target_ip - 1);
  992. target[patch.target_ip] = value;
  993. patch.done = true;
  994. }
  995. }
  996. if (!node->value().individual_spans().is_empty()) {
  997. auto insn_bytes = node->value().individual_spans().first();
  998. target.ensure_capacity(target.size() + insn_bytes.size());
  999. state.instruction_position = target.size();
  1000. target.append(insn_bytes);
  1001. if (has_any_backwards_jump) {
  1002. for (auto& ip : node->metadata_value())
  1003. ip_mapping_for_alternative(ip.alternative_index).insert(ip.instruction_position, state.instruction_position);
  1004. }
  1005. auto& opcode = target.get_opcode(state);
  1006. ssize_t jump_offset;
  1007. auto is_jump = true;
  1008. auto patch_location = state.instruction_position + 1;
  1009. switch (opcode.opcode_id()) {
  1010. case OpCodeId::Jump:
  1011. jump_offset = static_cast<OpCode_Jump const&>(opcode).offset();
  1012. break;
  1013. case OpCodeId::JumpNonEmpty:
  1014. jump_offset = static_cast<OpCode_JumpNonEmpty const&>(opcode).offset();
  1015. break;
  1016. case OpCodeId::ForkJump:
  1017. jump_offset = static_cast<OpCode_ForkJump const&>(opcode).offset();
  1018. break;
  1019. case OpCodeId::ForkStay:
  1020. jump_offset = static_cast<OpCode_ForkStay const&>(opcode).offset();
  1021. break;
  1022. case OpCodeId::ForkReplaceJump:
  1023. jump_offset = static_cast<OpCode_ForkReplaceJump const&>(opcode).offset();
  1024. break;
  1025. case OpCodeId::ForkReplaceStay:
  1026. jump_offset = static_cast<OpCode_ForkReplaceStay const&>(opcode).offset();
  1027. break;
  1028. case OpCodeId::Repeat:
  1029. jump_offset = static_cast<ssize_t>(0) - static_cast<ssize_t>(static_cast<OpCode_Repeat const&>(opcode).offset()) - static_cast<ssize_t>(opcode.size());
  1030. break;
  1031. default:
  1032. is_jump = false;
  1033. break;
  1034. }
  1035. if (is_jump) {
  1036. VERIFY(node->has_metadata());
  1037. QualifiedIP ip = node->metadata_value().first();
  1038. auto intended_jump_ip = ip.instruction_position + jump_offset + opcode.size();
  1039. if (jump_offset < 0) {
  1040. VERIFY(has_any_backwards_jump);
  1041. // We should've already seen this instruction, so we can just patch it in.
  1042. auto& ip_mapping = ip_mapping_for_alternative(ip.alternative_index);
  1043. auto target_ip = ip_mapping.find(intended_jump_ip);
  1044. if (!target_ip) {
  1045. RegexDebug dbg;
  1046. size_t x = 0;
  1047. for (auto& entry : alternatives) {
  1048. warnln("----------- {} ----------", x++);
  1049. dbg.print_bytecode(entry);
  1050. }
  1051. dbgln("Regex Tree / Unknown backwards jump: {}@{} -> {}",
  1052. ip.instruction_position,
  1053. ip.alternative_index,
  1054. intended_jump_ip);
  1055. VERIFY_NOT_REACHED();
  1056. }
  1057. target[patch_location] = static_cast<ByteCodeValueType>(*target_ip - patch_location - 1);
  1058. } else {
  1059. patch_locations.append({ QualifiedIP { ip.alternative_index, intended_jump_ip }, patch_location });
  1060. }
  1061. }
  1062. }
  1063. for (auto const& child : node->children()) {
  1064. auto* child_node = static_cast<Tree*>(child.value.ptr());
  1065. target.append(static_cast<ByteCodeValueType>(OpCodeId::ForkJump));
  1066. add_patch_point(child_node, target.size());
  1067. target.append(static_cast<ByteCodeValueType>(0));
  1068. nodes_to_visit.enqueue(child_node);
  1069. }
  1070. }
  1071. for (auto& patch : patch_locations) {
  1072. if (patch.done)
  1073. continue;
  1074. auto& alternative = alternatives[patch.source_ip.alternative_index];
  1075. if (patch.source_ip.instruction_position >= alternative.size()) {
  1076. // This just wants to jump to the end of the alternative, which is fine.
  1077. // Patch it to jump to the end of the target instead.
  1078. target[patch.target_ip] = static_cast<ByteCodeValueType>(target.size() - patch.target_ip - 1);
  1079. continue;
  1080. }
  1081. dbgln("Regex Tree / Unpatched jump: {}@{} -> {}@{}",
  1082. patch.source_ip.instruction_position,
  1083. patch.source_ip.alternative_index,
  1084. patch.target_ip,
  1085. target[patch.target_ip]);
  1086. VERIFY_NOT_REACHED();
  1087. }
  1088. }
  1089. }
  1090. enum class LookupTableInsertionOutcome {
  1091. Successful,
  1092. ReplaceWithAnyChar,
  1093. TemporaryInversionNeeded,
  1094. PermanentInversionNeeded,
  1095. FlushOnInsertion,
  1096. FinishFlushOnInsertion,
  1097. CannotPlaceInTable,
  1098. };
  1099. static LookupTableInsertionOutcome insert_into_lookup_table(RedBlackTree<ByteCodeValueType, CharRange>& table, CompareTypeAndValuePair pair)
  1100. {
  1101. switch (pair.type) {
  1102. case CharacterCompareType::Inverse:
  1103. return LookupTableInsertionOutcome::PermanentInversionNeeded;
  1104. case CharacterCompareType::TemporaryInverse:
  1105. return LookupTableInsertionOutcome::TemporaryInversionNeeded;
  1106. case CharacterCompareType::AnyChar:
  1107. return LookupTableInsertionOutcome::ReplaceWithAnyChar;
  1108. case CharacterCompareType::CharClass:
  1109. return LookupTableInsertionOutcome::CannotPlaceInTable;
  1110. case CharacterCompareType::Char:
  1111. table.insert(pair.value, { (u32)pair.value, (u32)pair.value });
  1112. break;
  1113. case CharacterCompareType::CharRange: {
  1114. CharRange range { pair.value };
  1115. table.insert(range.from, range);
  1116. break;
  1117. }
  1118. case CharacterCompareType::EndAndOr:
  1119. return LookupTableInsertionOutcome::FinishFlushOnInsertion;
  1120. case CharacterCompareType::And:
  1121. return LookupTableInsertionOutcome::FlushOnInsertion;
  1122. case CharacterCompareType::Reference:
  1123. case CharacterCompareType::Property:
  1124. case CharacterCompareType::GeneralCategory:
  1125. case CharacterCompareType::Script:
  1126. case CharacterCompareType::ScriptExtension:
  1127. case CharacterCompareType::Or:
  1128. return LookupTableInsertionOutcome::CannotPlaceInTable;
  1129. case CharacterCompareType::Undefined:
  1130. case CharacterCompareType::RangeExpressionDummy:
  1131. case CharacterCompareType::String:
  1132. case CharacterCompareType::LookupTable:
  1133. VERIFY_NOT_REACHED();
  1134. }
  1135. return LookupTableInsertionOutcome::Successful;
  1136. }
  1137. void Optimizer::append_character_class(ByteCode& target, Vector<CompareTypeAndValuePair>&& pairs)
  1138. {
  1139. ByteCode arguments;
  1140. size_t argument_count = 0;
  1141. if (pairs.size() <= 1) {
  1142. for (auto& pair : pairs) {
  1143. arguments.append(to_underlying(pair.type));
  1144. if (pair.type != CharacterCompareType::AnyChar
  1145. && pair.type != CharacterCompareType::TemporaryInverse
  1146. && pair.type != CharacterCompareType::Inverse
  1147. && pair.type != CharacterCompareType::And
  1148. && pair.type != CharacterCompareType::Or
  1149. && pair.type != CharacterCompareType::EndAndOr)
  1150. arguments.append(pair.value);
  1151. ++argument_count;
  1152. }
  1153. } else {
  1154. RedBlackTree<ByteCodeValueType, CharRange> table;
  1155. RedBlackTree<ByteCodeValueType, CharRange> inverted_table;
  1156. auto* current_table = &table;
  1157. auto* current_inverted_table = &inverted_table;
  1158. bool invert_for_next_iteration = false;
  1159. bool is_currently_inverted = false;
  1160. auto flush_tables = [&] {
  1161. auto append_table = [&](auto& table) {
  1162. ++argument_count;
  1163. arguments.append(to_underlying(CharacterCompareType::LookupTable));
  1164. auto size_index = arguments.size();
  1165. arguments.append(0);
  1166. Optional<CharRange> active_range;
  1167. size_t range_count = 0;
  1168. for (auto& range : table) {
  1169. if (!active_range.has_value()) {
  1170. active_range = range;
  1171. continue;
  1172. }
  1173. if (range.from <= active_range->to + 1 && range.to + 1 >= active_range->from) {
  1174. active_range = CharRange { min(range.from, active_range->from), max(range.to, active_range->to) };
  1175. } else {
  1176. ++range_count;
  1177. arguments.append(active_range.release_value());
  1178. active_range = range;
  1179. }
  1180. }
  1181. if (active_range.has_value()) {
  1182. ++range_count;
  1183. arguments.append(active_range.release_value());
  1184. }
  1185. arguments[size_index] = range_count;
  1186. };
  1187. auto contains_regular_table = !table.is_empty();
  1188. auto contains_inverted_table = !inverted_table.is_empty();
  1189. if (contains_regular_table)
  1190. append_table(table);
  1191. if (contains_inverted_table) {
  1192. ++argument_count;
  1193. arguments.append(to_underlying(CharacterCompareType::TemporaryInverse));
  1194. append_table(inverted_table);
  1195. }
  1196. table.clear();
  1197. inverted_table.clear();
  1198. };
  1199. auto flush_on_every_insertion = false;
  1200. for (auto& value : pairs) {
  1201. auto should_invert_after_this_iteration = invert_for_next_iteration;
  1202. invert_for_next_iteration = false;
  1203. auto insertion_result = insert_into_lookup_table(*current_table, value);
  1204. switch (insertion_result) {
  1205. case LookupTableInsertionOutcome::Successful:
  1206. if (flush_on_every_insertion)
  1207. flush_tables();
  1208. break;
  1209. case LookupTableInsertionOutcome::ReplaceWithAnyChar: {
  1210. table.clear();
  1211. inverted_table.clear();
  1212. arguments.append(to_underlying(CharacterCompareType::AnyChar));
  1213. ++argument_count;
  1214. break;
  1215. }
  1216. case LookupTableInsertionOutcome::TemporaryInversionNeeded:
  1217. swap(current_table, current_inverted_table);
  1218. invert_for_next_iteration = true;
  1219. is_currently_inverted = !is_currently_inverted;
  1220. break;
  1221. case LookupTableInsertionOutcome::PermanentInversionNeeded:
  1222. flush_tables();
  1223. arguments.append(to_underlying(CharacterCompareType::Inverse));
  1224. ++argument_count;
  1225. break;
  1226. case LookupTableInsertionOutcome::FlushOnInsertion:
  1227. case LookupTableInsertionOutcome::FinishFlushOnInsertion:
  1228. flush_tables();
  1229. flush_on_every_insertion = insertion_result == LookupTableInsertionOutcome::FlushOnInsertion;
  1230. [[fallthrough]];
  1231. case LookupTableInsertionOutcome::CannotPlaceInTable:
  1232. if (is_currently_inverted) {
  1233. arguments.append(to_underlying(CharacterCompareType::TemporaryInverse));
  1234. ++argument_count;
  1235. }
  1236. arguments.append(to_underlying(value.type));
  1237. if (value.type != CharacterCompareType::AnyChar
  1238. && value.type != CharacterCompareType::TemporaryInverse
  1239. && value.type != CharacterCompareType::Inverse
  1240. && value.type != CharacterCompareType::And
  1241. && value.type != CharacterCompareType::Or
  1242. && value.type != CharacterCompareType::EndAndOr)
  1243. arguments.append(value.value);
  1244. ++argument_count;
  1245. break;
  1246. }
  1247. if (should_invert_after_this_iteration) {
  1248. swap(current_table, current_inverted_table);
  1249. is_currently_inverted = !is_currently_inverted;
  1250. }
  1251. }
  1252. flush_tables();
  1253. }
  1254. target.empend(static_cast<ByteCodeValueType>(OpCodeId::Compare));
  1255. target.empend(argument_count); // number of arguments
  1256. target.empend(arguments.size()); // size of arguments
  1257. target.extend(move(arguments));
  1258. }
  1259. template void Regex<PosixBasicParser>::run_optimization_passes();
  1260. template void Regex<PosixExtendedParser>::run_optimization_passes();
  1261. template void Regex<ECMA262Parser>::run_optimization_passes();
  1262. }