RegexOptimizer.cpp 62 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397
  1. /*
  2. * Copyright (c) 2021, Ali Mohammad Pur <mpfard@serenityos.org>
  3. *
  4. * SPDX-License-Identifier: BSD-2-Clause
  5. */
  6. #include <AK/Debug.h>
  7. #include <AK/Function.h>
  8. #include <AK/Queue.h>
  9. #include <AK/QuickSort.h>
  10. #include <AK/RedBlackTree.h>
  11. #include <AK/Stack.h>
  12. #include <AK/Trie.h>
  13. #include <LibRegex/Regex.h>
  14. #include <LibRegex/RegexBytecodeStreamOptimizer.h>
  15. #include <LibUnicode/CharacterTypes.h>
  16. #if REGEX_DEBUG
  17. # include <AK/ScopeGuard.h>
  18. # include <AK/ScopeLogger.h>
  19. #endif
  20. namespace regex {
  21. using Detail::Block;
  22. template<typename Parser>
  23. void Regex<Parser>::run_optimization_passes()
  24. {
  25. parser_result.bytecode.flatten();
  26. auto blocks = split_basic_blocks(parser_result.bytecode);
  27. if (attempt_rewrite_entire_match_as_substring_search(blocks))
  28. return;
  29. // Rewrite fork loops as atomic groups
  30. // e.g. a*b -> (ATOMIC a*)b
  31. attempt_rewrite_loops_as_atomic_groups(blocks);
  32. parser_result.bytecode.flatten();
  33. }
  34. template<typename Parser>
  35. typename Regex<Parser>::BasicBlockList Regex<Parser>::split_basic_blocks(ByteCode const& bytecode)
  36. {
  37. BasicBlockList block_boundaries;
  38. size_t end_of_last_block = 0;
  39. auto bytecode_size = bytecode.size();
  40. MatchState state;
  41. state.instruction_position = 0;
  42. auto check_jump = [&]<typename T>(OpCode const& opcode) {
  43. auto& op = static_cast<T const&>(opcode);
  44. ssize_t jump_offset = op.size() + op.offset();
  45. if (jump_offset >= 0) {
  46. block_boundaries.append({ end_of_last_block, state.instruction_position });
  47. end_of_last_block = state.instruction_position + opcode.size();
  48. } else {
  49. // This op jumps back, see if that's within this "block".
  50. if (jump_offset + state.instruction_position > end_of_last_block) {
  51. // Split the block!
  52. block_boundaries.append({ end_of_last_block, jump_offset + state.instruction_position });
  53. block_boundaries.append({ jump_offset + state.instruction_position, state.instruction_position });
  54. end_of_last_block = state.instruction_position + opcode.size();
  55. } else {
  56. // Nope, it's just a jump to another block
  57. block_boundaries.append({ end_of_last_block, state.instruction_position });
  58. end_of_last_block = state.instruction_position + opcode.size();
  59. }
  60. }
  61. };
  62. for (;;) {
  63. auto& opcode = bytecode.get_opcode(state);
  64. switch (opcode.opcode_id()) {
  65. case OpCodeId::Jump:
  66. check_jump.template operator()<OpCode_Jump>(opcode);
  67. break;
  68. case OpCodeId::JumpNonEmpty:
  69. check_jump.template operator()<OpCode_JumpNonEmpty>(opcode);
  70. break;
  71. case OpCodeId::ForkJump:
  72. check_jump.template operator()<OpCode_ForkJump>(opcode);
  73. break;
  74. case OpCodeId::ForkStay:
  75. check_jump.template operator()<OpCode_ForkStay>(opcode);
  76. break;
  77. case OpCodeId::FailForks:
  78. block_boundaries.append({ end_of_last_block, state.instruction_position });
  79. end_of_last_block = state.instruction_position + opcode.size();
  80. break;
  81. case OpCodeId::Repeat: {
  82. // Repeat produces two blocks, one containing its repeated expr, and one after that.
  83. auto repeat_start = state.instruction_position - static_cast<OpCode_Repeat const&>(opcode).offset();
  84. if (repeat_start > end_of_last_block)
  85. block_boundaries.append({ end_of_last_block, repeat_start });
  86. block_boundaries.append({ repeat_start, state.instruction_position });
  87. end_of_last_block = state.instruction_position + opcode.size();
  88. break;
  89. }
  90. default:
  91. break;
  92. }
  93. auto next_ip = state.instruction_position + opcode.size();
  94. if (next_ip < bytecode_size)
  95. state.instruction_position = next_ip;
  96. else
  97. break;
  98. }
  99. if (end_of_last_block < bytecode_size)
  100. block_boundaries.append({ end_of_last_block, bytecode_size });
  101. quick_sort(block_boundaries, [](auto& a, auto& b) { return a.start < b.start; });
  102. return block_boundaries;
  103. }
  104. static bool has_overlap(Vector<CompareTypeAndValuePair> const& lhs, Vector<CompareTypeAndValuePair> const& rhs)
  105. {
  106. // We have to fully interpret the two sequences to determine if they overlap (that is, keep track of inversion state and what ranges they cover).
  107. bool inverse { false };
  108. bool temporary_inverse { false };
  109. bool reset_temporary_inverse { false };
  110. auto current_lhs_inversion_state = [&]() -> bool { return temporary_inverse ^ inverse; };
  111. RedBlackTree<u32, u32> lhs_ranges;
  112. RedBlackTree<u32, u32> lhs_negated_ranges;
  113. HashTable<CharClass> lhs_char_classes;
  114. HashTable<CharClass> lhs_negated_char_classes;
  115. auto has_any_unicode_property = false;
  116. HashTable<Unicode::GeneralCategory> lhs_unicode_general_categories;
  117. HashTable<Unicode::Property> lhs_unicode_properties;
  118. HashTable<Unicode::Script> lhs_unicode_scripts;
  119. HashTable<Unicode::Script> lhs_unicode_script_extensions;
  120. HashTable<Unicode::GeneralCategory> lhs_negated_unicode_general_categories;
  121. HashTable<Unicode::Property> lhs_negated_unicode_properties;
  122. HashTable<Unicode::Script> lhs_negated_unicode_scripts;
  123. HashTable<Unicode::Script> lhs_negated_unicode_script_extensions;
  124. auto any_unicode_property_matches = [&](u32 code_point) {
  125. if (any_of(lhs_negated_unicode_general_categories, [code_point](auto category) { return Unicode::code_point_has_general_category(code_point, category); }))
  126. return false;
  127. if (any_of(lhs_negated_unicode_properties, [code_point](auto property) { return Unicode::code_point_has_property(code_point, property); }))
  128. return false;
  129. if (any_of(lhs_negated_unicode_scripts, [code_point](auto script) { return Unicode::code_point_has_script(code_point, script); }))
  130. return false;
  131. if (any_of(lhs_negated_unicode_script_extensions, [code_point](auto script) { return Unicode::code_point_has_script_extension(code_point, script); }))
  132. return false;
  133. if (any_of(lhs_unicode_general_categories, [code_point](auto category) { return Unicode::code_point_has_general_category(code_point, category); }))
  134. return true;
  135. if (any_of(lhs_unicode_properties, [code_point](auto property) { return Unicode::code_point_has_property(code_point, property); }))
  136. return true;
  137. if (any_of(lhs_unicode_scripts, [code_point](auto script) { return Unicode::code_point_has_script(code_point, script); }))
  138. return true;
  139. if (any_of(lhs_unicode_script_extensions, [code_point](auto script) { return Unicode::code_point_has_script_extension(code_point, script); }))
  140. return true;
  141. return false;
  142. };
  143. auto range_contains = [&]<typename T>(T& value) -> bool {
  144. u32 start;
  145. u32 end;
  146. if constexpr (IsSame<T, CharRange>) {
  147. start = value.from;
  148. end = value.to;
  149. } else {
  150. start = value;
  151. end = value;
  152. }
  153. if (has_any_unicode_property) {
  154. // We have some properties, and a range is present
  155. // Instead of checking every single code point in the range, assume it's a match.
  156. return start != end || any_unicode_property_matches(start);
  157. }
  158. auto* max = lhs_ranges.find_smallest_not_below(start);
  159. return max && *max <= end;
  160. };
  161. auto char_class_contains = [&](CharClass const& value) -> bool {
  162. if (lhs_char_classes.contains(value))
  163. return true;
  164. if (lhs_negated_char_classes.contains(value))
  165. return false;
  166. // This char class might match something in the ranges we have, and checking that is far too expensive, so just bail out.
  167. return true;
  168. };
  169. for (auto const& pair : lhs) {
  170. if (reset_temporary_inverse) {
  171. reset_temporary_inverse = false;
  172. temporary_inverse = false;
  173. } else {
  174. reset_temporary_inverse = true;
  175. }
  176. switch (pair.type) {
  177. case CharacterCompareType::Inverse:
  178. inverse = !inverse;
  179. break;
  180. case CharacterCompareType::TemporaryInverse:
  181. temporary_inverse = true;
  182. reset_temporary_inverse = true;
  183. break;
  184. case CharacterCompareType::AnyChar:
  185. // Special case: if not inverted, AnyChar is always in the range.
  186. if (!current_lhs_inversion_state())
  187. return true;
  188. break;
  189. case CharacterCompareType::Char:
  190. if (!current_lhs_inversion_state())
  191. lhs_ranges.insert(pair.value, pair.value);
  192. else
  193. lhs_negated_ranges.insert(pair.value, pair.value);
  194. break;
  195. case CharacterCompareType::String:
  196. // FIXME: We just need to look at the last character of this string, but we only have the first character here.
  197. // Just bail out to avoid false positives.
  198. return true;
  199. case CharacterCompareType::CharClass:
  200. if (!current_lhs_inversion_state())
  201. lhs_char_classes.set(static_cast<CharClass>(pair.value));
  202. else
  203. lhs_negated_char_classes.set(static_cast<CharClass>(pair.value));
  204. break;
  205. case CharacterCompareType::CharRange: {
  206. auto range = CharRange(pair.value);
  207. if (!current_lhs_inversion_state())
  208. lhs_ranges.insert(range.from, range.to);
  209. else
  210. lhs_negated_ranges.insert(range.from, range.to);
  211. break;
  212. }
  213. case CharacterCompareType::LookupTable:
  214. // We've transformed this into a series of ranges in flat_compares(), so bail out if we see it.
  215. return true;
  216. case CharacterCompareType::Reference:
  217. // We've handled this before coming here.
  218. break;
  219. case CharacterCompareType::Property:
  220. has_any_unicode_property = true;
  221. if (!current_lhs_inversion_state())
  222. lhs_unicode_properties.set(static_cast<Unicode::Property>(pair.value));
  223. else
  224. lhs_negated_unicode_properties.set(static_cast<Unicode::Property>(pair.value));
  225. break;
  226. case CharacterCompareType::GeneralCategory:
  227. has_any_unicode_property = true;
  228. if (!current_lhs_inversion_state())
  229. lhs_unicode_general_categories.set(static_cast<Unicode::GeneralCategory>(pair.value));
  230. else
  231. lhs_negated_unicode_general_categories.set(static_cast<Unicode::GeneralCategory>(pair.value));
  232. break;
  233. case CharacterCompareType::Script:
  234. has_any_unicode_property = true;
  235. if (!current_lhs_inversion_state())
  236. lhs_unicode_scripts.set(static_cast<Unicode::Script>(pair.value));
  237. else
  238. lhs_negated_unicode_scripts.set(static_cast<Unicode::Script>(pair.value));
  239. break;
  240. case CharacterCompareType::ScriptExtension:
  241. has_any_unicode_property = true;
  242. if (!current_lhs_inversion_state())
  243. lhs_unicode_script_extensions.set(static_cast<Unicode::Script>(pair.value));
  244. else
  245. lhs_negated_unicode_script_extensions.set(static_cast<Unicode::Script>(pair.value));
  246. break;
  247. case CharacterCompareType::And:
  248. case CharacterCompareType::Or:
  249. case CharacterCompareType::EndAndOr:
  250. // FIXME: These are too difficult to handle, so bail out.
  251. return true;
  252. case CharacterCompareType::Undefined:
  253. case CharacterCompareType::RangeExpressionDummy:
  254. // These do not occur in valid bytecode.
  255. VERIFY_NOT_REACHED();
  256. }
  257. }
  258. if constexpr (REGEX_DEBUG) {
  259. dbgln("lhs ranges:");
  260. for (auto it = lhs_ranges.begin(); it != lhs_ranges.end(); ++it)
  261. dbgln(" {}..{}", it.key(), *it);
  262. dbgln("lhs negated ranges:");
  263. for (auto it = lhs_negated_ranges.begin(); it != lhs_negated_ranges.end(); ++it)
  264. dbgln(" {}..{}", it.key(), *it);
  265. }
  266. for (auto const& pair : rhs) {
  267. if (reset_temporary_inverse) {
  268. reset_temporary_inverse = false;
  269. temporary_inverse = false;
  270. } else {
  271. reset_temporary_inverse = true;
  272. }
  273. dbgln_if(REGEX_DEBUG, "check {} ({})...", character_compare_type_name(pair.type), pair.value);
  274. switch (pair.type) {
  275. case CharacterCompareType::Inverse:
  276. inverse = !inverse;
  277. break;
  278. case CharacterCompareType::TemporaryInverse:
  279. temporary_inverse = true;
  280. reset_temporary_inverse = true;
  281. break;
  282. case CharacterCompareType::AnyChar:
  283. // Special case: if not inverted, AnyChar is always in the range.
  284. if (!current_lhs_inversion_state())
  285. return true;
  286. break;
  287. case CharacterCompareType::Char:
  288. if (current_lhs_inversion_state() ^ range_contains(pair.value))
  289. return true;
  290. break;
  291. case CharacterCompareType::String:
  292. // FIXME: We just need to look at the last character of this string, but we only have the first character here.
  293. // Just bail out to avoid false positives.
  294. return true;
  295. case CharacterCompareType::CharClass:
  296. if (current_lhs_inversion_state() ^ char_class_contains(static_cast<CharClass>(pair.value)))
  297. return true;
  298. break;
  299. case CharacterCompareType::CharRange: {
  300. auto range = CharRange(pair.value);
  301. if (current_lhs_inversion_state() ^ range_contains(range))
  302. return true;
  303. break;
  304. }
  305. case CharacterCompareType::LookupTable:
  306. // We've transformed this into a series of ranges in flat_compares(), so bail out if we see it.
  307. return true;
  308. case CharacterCompareType::Reference:
  309. // We've handled this before coming here.
  310. break;
  311. case CharacterCompareType::Property:
  312. // The only reasonable scenario where we can check these properties without spending too much time is if:
  313. // - the ranges are empty
  314. // - the char classes are empty
  315. // - the unicode properties are empty or contain only this property
  316. if (!lhs_ranges.is_empty() || !lhs_negated_ranges.is_empty() || !lhs_char_classes.is_empty() || !lhs_negated_char_classes.is_empty())
  317. return true;
  318. if (has_any_unicode_property && !lhs_unicode_properties.is_empty() && !lhs_negated_unicode_properties.is_empty()) {
  319. if (current_lhs_inversion_state() ^ lhs_unicode_properties.contains(static_cast<Unicode::Property>(pair.value)))
  320. return true;
  321. if (false == (current_lhs_inversion_state() ^ lhs_negated_unicode_properties.contains(static_cast<Unicode::Property>(pair.value))))
  322. return true;
  323. }
  324. break;
  325. case CharacterCompareType::GeneralCategory:
  326. if (!lhs_ranges.is_empty() || !lhs_negated_ranges.is_empty() || !lhs_char_classes.is_empty() || !lhs_negated_char_classes.is_empty())
  327. return true;
  328. if (has_any_unicode_property && !lhs_unicode_general_categories.is_empty() && !lhs_negated_unicode_general_categories.is_empty()) {
  329. if (current_lhs_inversion_state() ^ lhs_unicode_general_categories.contains(static_cast<Unicode::GeneralCategory>(pair.value)))
  330. return true;
  331. if (false == (current_lhs_inversion_state() ^ lhs_negated_unicode_general_categories.contains(static_cast<Unicode::GeneralCategory>(pair.value))))
  332. return true;
  333. }
  334. break;
  335. case CharacterCompareType::Script:
  336. if (!lhs_ranges.is_empty() || !lhs_negated_ranges.is_empty() || !lhs_char_classes.is_empty() || !lhs_negated_char_classes.is_empty())
  337. return true;
  338. if (has_any_unicode_property && !lhs_unicode_scripts.is_empty() && !lhs_negated_unicode_scripts.is_empty()) {
  339. if (current_lhs_inversion_state() ^ lhs_unicode_scripts.contains(static_cast<Unicode::Script>(pair.value)))
  340. return true;
  341. if (false == (current_lhs_inversion_state() ^ lhs_negated_unicode_scripts.contains(static_cast<Unicode::Script>(pair.value))))
  342. return true;
  343. }
  344. break;
  345. case CharacterCompareType::ScriptExtension:
  346. if (!lhs_ranges.is_empty() || !lhs_negated_ranges.is_empty() || !lhs_char_classes.is_empty() || !lhs_negated_char_classes.is_empty())
  347. return true;
  348. if (has_any_unicode_property && !lhs_unicode_script_extensions.is_empty() && !lhs_negated_unicode_script_extensions.is_empty()) {
  349. if (current_lhs_inversion_state() ^ lhs_unicode_script_extensions.contains(static_cast<Unicode::Script>(pair.value)))
  350. return true;
  351. if (false == (current_lhs_inversion_state() ^ lhs_negated_unicode_script_extensions.contains(static_cast<Unicode::Script>(pair.value))))
  352. return true;
  353. }
  354. break;
  355. case CharacterCompareType::And:
  356. case CharacterCompareType::Or:
  357. case CharacterCompareType::EndAndOr:
  358. // FIXME: These are too difficult to handle, so bail out.
  359. return true;
  360. case CharacterCompareType::Undefined:
  361. case CharacterCompareType::RangeExpressionDummy:
  362. // These do not occur in valid bytecode.
  363. VERIFY_NOT_REACHED();
  364. }
  365. }
  366. return false;
  367. }
  368. enum class AtomicRewritePreconditionResult {
  369. SatisfiedWithProperHeader,
  370. SatisfiedWithEmptyHeader,
  371. NotSatisfied,
  372. };
  373. static AtomicRewritePreconditionResult block_satisfies_atomic_rewrite_precondition(ByteCode const& bytecode, Block const& repeated_block, Block const& following_block)
  374. {
  375. Vector<Vector<CompareTypeAndValuePair>> repeated_values;
  376. HashTable<size_t> active_capture_groups;
  377. MatchState state;
  378. auto has_seen_actionable_opcode = false;
  379. for (state.instruction_position = repeated_block.start; state.instruction_position < repeated_block.end;) {
  380. auto& opcode = bytecode.get_opcode(state);
  381. switch (opcode.opcode_id()) {
  382. case OpCodeId::Compare: {
  383. has_seen_actionable_opcode = true;
  384. auto compares = static_cast<OpCode_Compare const&>(opcode).flat_compares();
  385. if (repeated_values.is_empty() && any_of(compares, [](auto& compare) { return compare.type == CharacterCompareType::AnyChar; }))
  386. return AtomicRewritePreconditionResult::NotSatisfied;
  387. repeated_values.append(move(compares));
  388. break;
  389. }
  390. case OpCodeId::CheckBegin:
  391. case OpCodeId::CheckEnd:
  392. has_seen_actionable_opcode = true;
  393. if (repeated_values.is_empty())
  394. return AtomicRewritePreconditionResult::SatisfiedWithProperHeader;
  395. break;
  396. case OpCodeId::CheckBoundary:
  397. // FIXME: What should we do with these? for now, let's fail.
  398. return AtomicRewritePreconditionResult::NotSatisfied;
  399. case OpCodeId::Restore:
  400. case OpCodeId::GoBack:
  401. return AtomicRewritePreconditionResult::NotSatisfied;
  402. case OpCodeId::SaveRightCaptureGroup:
  403. active_capture_groups.set(static_cast<OpCode_SaveRightCaptureGroup const&>(opcode).id());
  404. break;
  405. case OpCodeId::SaveLeftCaptureGroup:
  406. active_capture_groups.set(static_cast<OpCode_SaveLeftCaptureGroup const&>(opcode).id());
  407. break;
  408. case OpCodeId::ForkJump:
  409. case OpCodeId::ForkReplaceJump:
  410. case OpCodeId::JumpNonEmpty:
  411. // We could attempt to recursively resolve the follow set, but pretending that this just goes nowhere is faster.
  412. if (!has_seen_actionable_opcode)
  413. return AtomicRewritePreconditionResult::NotSatisfied;
  414. break;
  415. default:
  416. break;
  417. }
  418. state.instruction_position += opcode.size();
  419. }
  420. dbgln_if(REGEX_DEBUG, "Found {} entries in reference", repeated_values.size());
  421. dbgln_if(REGEX_DEBUG, "Found {} active capture groups", active_capture_groups.size());
  422. bool following_block_has_at_least_one_compare = false;
  423. // Find the first compare in the following block, it must NOT match any of the values in `repeated_values'.
  424. auto final_instruction = following_block.start;
  425. for (state.instruction_position = following_block.start; state.instruction_position < following_block.end;) {
  426. final_instruction = state.instruction_position;
  427. auto& opcode = bytecode.get_opcode(state);
  428. switch (opcode.opcode_id()) {
  429. // Note: These have to exist since we're effectively repeating the following block as well
  430. case OpCodeId::SaveRightCaptureGroup:
  431. active_capture_groups.set(static_cast<OpCode_SaveRightCaptureGroup const&>(opcode).id());
  432. break;
  433. case OpCodeId::SaveLeftCaptureGroup:
  434. active_capture_groups.set(static_cast<OpCode_SaveLeftCaptureGroup const&>(opcode).id());
  435. break;
  436. case OpCodeId::Compare: {
  437. following_block_has_at_least_one_compare = true;
  438. // We found a compare, let's see what it has.
  439. auto compares = static_cast<OpCode_Compare const&>(opcode).flat_compares();
  440. if (compares.is_empty())
  441. break;
  442. if (any_of(compares, [&](auto& compare) {
  443. return compare.type == CharacterCompareType::AnyChar
  444. || (compare.type == CharacterCompareType::Reference && active_capture_groups.contains(compare.value));
  445. }))
  446. return AtomicRewritePreconditionResult::NotSatisfied;
  447. if (any_of(repeated_values, [&](auto& repeated_value) { return has_overlap(compares, repeated_value); }))
  448. return AtomicRewritePreconditionResult::NotSatisfied;
  449. return AtomicRewritePreconditionResult::SatisfiedWithProperHeader;
  450. }
  451. case OpCodeId::CheckBegin:
  452. case OpCodeId::CheckEnd:
  453. return AtomicRewritePreconditionResult::SatisfiedWithProperHeader; // Nothing can match the end!
  454. case OpCodeId::CheckBoundary:
  455. // FIXME: What should we do with these? For now, consider them a failure.
  456. return AtomicRewritePreconditionResult::NotSatisfied;
  457. case OpCodeId::ForkJump:
  458. case OpCodeId::ForkReplaceJump:
  459. case OpCodeId::JumpNonEmpty:
  460. // See note in the previous switch, same cases.
  461. if (!following_block_has_at_least_one_compare)
  462. return AtomicRewritePreconditionResult::NotSatisfied;
  463. break;
  464. default:
  465. break;
  466. }
  467. state.instruction_position += opcode.size();
  468. }
  469. // If the following block falls through, we can't rewrite it.
  470. state.instruction_position = final_instruction;
  471. switch (bytecode.get_opcode(state).opcode_id()) {
  472. case OpCodeId::Jump:
  473. case OpCodeId::JumpNonEmpty:
  474. case OpCodeId::ForkJump:
  475. case OpCodeId::ForkReplaceJump:
  476. break;
  477. default:
  478. return AtomicRewritePreconditionResult::NotSatisfied;
  479. }
  480. if (following_block_has_at_least_one_compare)
  481. return AtomicRewritePreconditionResult::SatisfiedWithProperHeader;
  482. return AtomicRewritePreconditionResult::SatisfiedWithEmptyHeader;
  483. }
  484. template<typename Parser>
  485. bool Regex<Parser>::attempt_rewrite_entire_match_as_substring_search(BasicBlockList const& basic_blocks)
  486. {
  487. // If there's no jumps, we can probably rewrite this as a substring search (Compare { string = str }).
  488. if (basic_blocks.size() > 1)
  489. return false;
  490. if (basic_blocks.is_empty()) {
  491. parser_result.optimization_data.pure_substring_search = ""sv;
  492. return true; // Empty regex, sure.
  493. }
  494. auto& bytecode = parser_result.bytecode;
  495. auto is_unicode = parser_result.options.has_flag_set(AllFlags::Unicode);
  496. // We have a single basic block, let's see if it's a series of character or string compares.
  497. StringBuilder final_string;
  498. MatchState state;
  499. while (state.instruction_position < bytecode.size()) {
  500. auto& opcode = bytecode.get_opcode(state);
  501. switch (opcode.opcode_id()) {
  502. case OpCodeId::Compare: {
  503. auto& compare = static_cast<OpCode_Compare const&>(opcode);
  504. for (auto& flat_compare : compare.flat_compares()) {
  505. if (flat_compare.type != CharacterCompareType::Char)
  506. return false;
  507. if (is_unicode || flat_compare.value <= 0x7f)
  508. final_string.append_code_point(flat_compare.value);
  509. else
  510. final_string.append(bit_cast<char>(static_cast<u8>(flat_compare.value)));
  511. }
  512. break;
  513. }
  514. default:
  515. return false;
  516. }
  517. state.instruction_position += opcode.size();
  518. }
  519. parser_result.optimization_data.pure_substring_search = final_string.to_byte_string();
  520. return true;
  521. }
  522. template<typename Parser>
  523. void Regex<Parser>::attempt_rewrite_loops_as_atomic_groups(BasicBlockList const& basic_blocks)
  524. {
  525. auto& bytecode = parser_result.bytecode;
  526. if constexpr (REGEX_DEBUG) {
  527. RegexDebug dbg;
  528. dbg.print_bytecode(*this);
  529. for (auto const& block : basic_blocks)
  530. dbgln("block from {} to {}", block.start, block.end);
  531. }
  532. // A pattern such as:
  533. // bb0 | RE0
  534. // | ForkX bb0
  535. // -------------------------
  536. // bb1 | RE1
  537. // can be rewritten as:
  538. // -------------------------
  539. // bb0 | RE0
  540. // | ForkReplaceX bb0
  541. // -------------------------
  542. // bb1 | RE1
  543. // provided that first(RE1) not-in end(RE0), which is to say
  544. // that RE1 cannot start with whatever RE0 has matched (ever).
  545. //
  546. // Alternatively, a second form of this pattern can also occur:
  547. // bb0 | *
  548. // | ForkX bb2
  549. // ------------------------
  550. // bb1 | RE0
  551. // | Jump bb0
  552. // ------------------------
  553. // bb2 | RE1
  554. // which can be transformed (with the same preconditions) to:
  555. // bb0 | *
  556. // | ForkReplaceX bb2
  557. // ------------------------
  558. // bb1 | RE0
  559. // | Jump bb0
  560. // ------------------------
  561. // bb2 | RE1
  562. enum class AlternateForm {
  563. DirectLoopWithoutHeader, // loop without proper header, a block forking to itself. i.e. the first form.
  564. DirectLoopWithoutHeaderAndEmptyFollow, // loop without proper header, a block forking to itself. i.e. the first form but with RE1 being empty.
  565. DirectLoopWithHeader, // loop with proper header, i.e. the second form.
  566. };
  567. struct CandidateBlock {
  568. Block forking_block;
  569. Optional<Block> new_target_block;
  570. AlternateForm form;
  571. };
  572. Vector<CandidateBlock> candidate_blocks;
  573. auto is_an_eligible_jump = [](OpCode const& opcode, size_t ip, size_t block_start, AlternateForm alternate_form) {
  574. switch (opcode.opcode_id()) {
  575. case OpCodeId::JumpNonEmpty: {
  576. auto const& op = static_cast<OpCode_JumpNonEmpty const&>(opcode);
  577. auto form = op.form();
  578. if (form != OpCodeId::Jump && alternate_form == AlternateForm::DirectLoopWithHeader)
  579. return false;
  580. if (form != OpCodeId::ForkJump && form != OpCodeId::ForkStay && alternate_form == AlternateForm::DirectLoopWithoutHeader)
  581. return false;
  582. return op.offset() + ip + opcode.size() == block_start;
  583. }
  584. case OpCodeId::ForkJump:
  585. if (alternate_form == AlternateForm::DirectLoopWithHeader)
  586. return false;
  587. return static_cast<OpCode_ForkJump const&>(opcode).offset() + ip + opcode.size() == block_start;
  588. case OpCodeId::ForkStay:
  589. if (alternate_form == AlternateForm::DirectLoopWithHeader)
  590. return false;
  591. return static_cast<OpCode_ForkStay const&>(opcode).offset() + ip + opcode.size() == block_start;
  592. case OpCodeId::Jump:
  593. // Infinite loop does *not* produce forks.
  594. if (alternate_form == AlternateForm::DirectLoopWithoutHeader)
  595. return false;
  596. if (alternate_form == AlternateForm::DirectLoopWithHeader)
  597. return static_cast<OpCode_Jump const&>(opcode).offset() + ip + opcode.size() == block_start;
  598. VERIFY_NOT_REACHED();
  599. default:
  600. return false;
  601. }
  602. };
  603. for (size_t i = 0; i < basic_blocks.size(); ++i) {
  604. auto forking_block = basic_blocks[i];
  605. Optional<Block> fork_fallback_block;
  606. if (i + 1 < basic_blocks.size())
  607. fork_fallback_block = basic_blocks[i + 1];
  608. MatchState state;
  609. // Check if the last instruction in this block is a jump to the block itself:
  610. {
  611. state.instruction_position = forking_block.end;
  612. auto& opcode = bytecode.get_opcode(state);
  613. if (is_an_eligible_jump(opcode, state.instruction_position, forking_block.start, AlternateForm::DirectLoopWithoutHeader)) {
  614. // We've found RE0 (and RE1 is just the following block, if any), let's see if the precondition applies.
  615. // if RE1 is empty, there's no first(RE1), so this is an automatic pass.
  616. if (!fork_fallback_block.has_value()
  617. || (fork_fallback_block->end == fork_fallback_block->start && block_satisfies_atomic_rewrite_precondition(bytecode, forking_block, *fork_fallback_block) != AtomicRewritePreconditionResult::NotSatisfied)) {
  618. candidate_blocks.append({ forking_block, fork_fallback_block, AlternateForm::DirectLoopWithoutHeader });
  619. break;
  620. }
  621. auto precondition = block_satisfies_atomic_rewrite_precondition(bytecode, forking_block, *fork_fallback_block);
  622. if (precondition == AtomicRewritePreconditionResult::SatisfiedWithProperHeader) {
  623. candidate_blocks.append({ forking_block, fork_fallback_block, AlternateForm::DirectLoopWithoutHeader });
  624. break;
  625. }
  626. if (precondition == AtomicRewritePreconditionResult::SatisfiedWithEmptyHeader) {
  627. candidate_blocks.append({ forking_block, fork_fallback_block, AlternateForm::DirectLoopWithoutHeaderAndEmptyFollow });
  628. break;
  629. }
  630. }
  631. }
  632. // Check if the last instruction in the last block is a direct jump to this block
  633. if (fork_fallback_block.has_value()) {
  634. state.instruction_position = fork_fallback_block->end;
  635. auto& opcode = bytecode.get_opcode(state);
  636. if (is_an_eligible_jump(opcode, state.instruction_position, forking_block.start, AlternateForm::DirectLoopWithHeader)) {
  637. // We've found bb1 and bb0, let's just make sure that bb0 forks to bb2.
  638. state.instruction_position = forking_block.end;
  639. auto& opcode = bytecode.get_opcode(state);
  640. if (opcode.opcode_id() == OpCodeId::ForkJump || opcode.opcode_id() == OpCodeId::ForkStay) {
  641. Optional<Block> block_following_fork_fallback;
  642. if (i + 2 < basic_blocks.size())
  643. block_following_fork_fallback = basic_blocks[i + 2];
  644. if (!block_following_fork_fallback.has_value()
  645. || block_satisfies_atomic_rewrite_precondition(bytecode, *fork_fallback_block, *block_following_fork_fallback) != AtomicRewritePreconditionResult::NotSatisfied) {
  646. candidate_blocks.append({ forking_block, {}, AlternateForm::DirectLoopWithHeader });
  647. break;
  648. }
  649. }
  650. }
  651. }
  652. }
  653. dbgln_if(REGEX_DEBUG, "Found {} candidate blocks", candidate_blocks.size());
  654. if (candidate_blocks.is_empty()) {
  655. dbgln_if(REGEX_DEBUG, "Failed to find anything for {}", pattern_value);
  656. return;
  657. }
  658. RedBlackTree<size_t, size_t> needed_patches;
  659. // Reverse the blocks, so we can patch the bytecode without messing with the latter patches.
  660. quick_sort(candidate_blocks, [](auto& a, auto& b) { return b.forking_block.start > a.forking_block.start; });
  661. for (auto& candidate : candidate_blocks) {
  662. // Note that both forms share a ForkReplace patch in forking_block.
  663. // Patch the ForkX in forking_block to be a ForkReplaceX instead.
  664. auto& opcode_id = bytecode[candidate.forking_block.end];
  665. if (opcode_id == (ByteCodeValueType)OpCodeId::ForkStay) {
  666. opcode_id = (ByteCodeValueType)OpCodeId::ForkReplaceStay;
  667. } else if (opcode_id == (ByteCodeValueType)OpCodeId::ForkJump) {
  668. opcode_id = (ByteCodeValueType)OpCodeId::ForkReplaceJump;
  669. } else if (opcode_id == (ByteCodeValueType)OpCodeId::JumpNonEmpty) {
  670. auto& jump_opcode_id = bytecode[candidate.forking_block.end + 3];
  671. if (jump_opcode_id == (ByteCodeValueType)OpCodeId::ForkStay)
  672. jump_opcode_id = (ByteCodeValueType)OpCodeId::ForkReplaceStay;
  673. else if (jump_opcode_id == (ByteCodeValueType)OpCodeId::ForkJump)
  674. jump_opcode_id = (ByteCodeValueType)OpCodeId::ForkReplaceJump;
  675. else
  676. VERIFY_NOT_REACHED();
  677. } else {
  678. VERIFY_NOT_REACHED();
  679. }
  680. }
  681. if (!needed_patches.is_empty()) {
  682. MatchState state;
  683. auto bytecode_size = bytecode.size();
  684. state.instruction_position = 0;
  685. struct Patch {
  686. ssize_t value;
  687. size_t offset;
  688. bool should_negate { false };
  689. };
  690. for (;;) {
  691. if (state.instruction_position >= bytecode_size)
  692. break;
  693. auto& opcode = bytecode.get_opcode(state);
  694. Stack<Patch, 2> patch_points;
  695. switch (opcode.opcode_id()) {
  696. case OpCodeId::Jump:
  697. patch_points.push({ static_cast<OpCode_Jump const&>(opcode).offset(), state.instruction_position + 1 });
  698. break;
  699. case OpCodeId::JumpNonEmpty:
  700. patch_points.push({ static_cast<OpCode_JumpNonEmpty const&>(opcode).offset(), state.instruction_position + 1 });
  701. patch_points.push({ static_cast<OpCode_JumpNonEmpty const&>(opcode).checkpoint(), state.instruction_position + 2 });
  702. break;
  703. case OpCodeId::ForkJump:
  704. patch_points.push({ static_cast<OpCode_ForkJump const&>(opcode).offset(), state.instruction_position + 1 });
  705. break;
  706. case OpCodeId::ForkStay:
  707. patch_points.push({ static_cast<OpCode_ForkStay const&>(opcode).offset(), state.instruction_position + 1 });
  708. break;
  709. case OpCodeId::Repeat:
  710. patch_points.push({ -(ssize_t) static_cast<OpCode_Repeat const&>(opcode).offset(), state.instruction_position + 1, true });
  711. break;
  712. default:
  713. break;
  714. }
  715. while (!patch_points.is_empty()) {
  716. auto& patch_point = patch_points.top();
  717. auto target_offset = patch_point.value + state.instruction_position + opcode.size();
  718. constexpr auto do_patch = [](auto& patch_it, auto& patch_point, auto& target_offset, auto& bytecode, auto ip) {
  719. if (patch_it.key() == ip)
  720. return;
  721. if (patch_point.value < 0 && target_offset <= patch_it.key() && ip > patch_it.key())
  722. bytecode[patch_point.offset] += (patch_point.should_negate ? 1 : -1) * (*patch_it);
  723. else if (patch_point.value > 0 && target_offset >= patch_it.key() && ip < patch_it.key())
  724. bytecode[patch_point.offset] += (patch_point.should_negate ? -1 : 1) * (*patch_it);
  725. };
  726. if (auto patch_it = needed_patches.find_largest_not_above_iterator(target_offset); !patch_it.is_end())
  727. do_patch(patch_it, patch_point, target_offset, bytecode, state.instruction_position);
  728. else if (auto patch_it = needed_patches.find_largest_not_above_iterator(state.instruction_position); !patch_it.is_end())
  729. do_patch(patch_it, patch_point, target_offset, bytecode, state.instruction_position);
  730. patch_points.pop();
  731. }
  732. state.instruction_position += opcode.size();
  733. }
  734. }
  735. if constexpr (REGEX_DEBUG) {
  736. warnln("Transformed to:");
  737. RegexDebug dbg;
  738. dbg.print_bytecode(*this);
  739. }
  740. }
  741. void Optimizer::append_alternation(ByteCode& target, ByteCode&& left, ByteCode&& right)
  742. {
  743. Array<ByteCode, 2> alternatives;
  744. alternatives[0] = move(left);
  745. alternatives[1] = move(right);
  746. append_alternation(target, alternatives);
  747. }
  748. template<typename K, typename V, typename KTraits>
  749. using OrderedHashMapForTrie = OrderedHashMap<K, V, KTraits>;
  750. void Optimizer::append_alternation(ByteCode& target, Span<ByteCode> alternatives)
  751. {
  752. if (alternatives.size() == 0)
  753. return;
  754. if (alternatives.size() == 1)
  755. return target.extend(move(alternatives[0]));
  756. if (all_of(alternatives, [](auto& x) { return x.is_empty(); }))
  757. return;
  758. for (auto& entry : alternatives)
  759. entry.flatten();
  760. #if REGEX_DEBUG
  761. ScopeLogger<true> log;
  762. warnln("Alternations:");
  763. RegexDebug dbg;
  764. for (auto& entry : alternatives) {
  765. warnln("----------");
  766. dbg.print_bytecode(entry);
  767. }
  768. ScopeGuard print_at_end {
  769. [&] {
  770. warnln("======================");
  771. RegexDebug dbg;
  772. dbg.print_bytecode(target);
  773. }
  774. };
  775. #endif
  776. // First, find incoming jump edges.
  777. // We need them for two reasons:
  778. // - We need to distinguish between insn-A-jumped-to-by-insn-B and insn-A-jumped-to-by-insn-C (as otherwise we'd break trie invariants)
  779. // - We need to know which jumps to patch when we're done
  780. struct JumpEdge {
  781. Span<ByteCodeValueType const> jump_insn;
  782. };
  783. Vector<HashMap<size_t, Vector<JumpEdge>>> incoming_jump_edges_for_each_alternative;
  784. incoming_jump_edges_for_each_alternative.resize(alternatives.size());
  785. auto has_any_backwards_jump = false;
  786. MatchState state;
  787. for (size_t i = 0; i < alternatives.size(); ++i) {
  788. auto& alternative = alternatives[i];
  789. // Add a jump to the "end" of the block; this is implicit in the bytecode, but we need it to be explicit in the trie.
  790. // Jump{offset=0}
  791. alternative.append(static_cast<ByteCodeValueType>(OpCodeId::Jump));
  792. alternative.append(0);
  793. auto& incoming_jump_edges = incoming_jump_edges_for_each_alternative[i];
  794. auto alternative_bytes = alternative.spans<1>().singular_span();
  795. for (state.instruction_position = 0; state.instruction_position < alternative.size();) {
  796. auto& opcode = alternative.get_opcode(state);
  797. auto opcode_bytes = alternative_bytes.slice(state.instruction_position, opcode.size());
  798. switch (opcode.opcode_id()) {
  799. case OpCodeId::Jump:
  800. incoming_jump_edges.ensure(static_cast<OpCode_Jump const&>(opcode).offset() + state.instruction_position).append({ opcode_bytes });
  801. has_any_backwards_jump |= static_cast<OpCode_Jump const&>(opcode).offset() < 0;
  802. break;
  803. case OpCodeId::JumpNonEmpty:
  804. incoming_jump_edges.ensure(static_cast<OpCode_JumpNonEmpty const&>(opcode).offset() + state.instruction_position).append({ opcode_bytes });
  805. has_any_backwards_jump |= static_cast<OpCode_JumpNonEmpty const&>(opcode).offset() < 0;
  806. break;
  807. case OpCodeId::ForkJump:
  808. incoming_jump_edges.ensure(static_cast<OpCode_ForkJump const&>(opcode).offset() + state.instruction_position).append({ opcode_bytes });
  809. has_any_backwards_jump |= static_cast<OpCode_ForkJump const&>(opcode).offset() < 0;
  810. break;
  811. case OpCodeId::ForkStay:
  812. incoming_jump_edges.ensure(static_cast<OpCode_ForkStay const&>(opcode).offset() + state.instruction_position).append({ opcode_bytes });
  813. has_any_backwards_jump |= static_cast<OpCode_ForkStay const&>(opcode).offset() < 0;
  814. break;
  815. case OpCodeId::ForkReplaceJump:
  816. incoming_jump_edges.ensure(static_cast<OpCode_ForkReplaceJump const&>(opcode).offset() + state.instruction_position).append({ opcode_bytes });
  817. has_any_backwards_jump |= static_cast<OpCode_ForkReplaceJump const&>(opcode).offset() < 0;
  818. break;
  819. case OpCodeId::ForkReplaceStay:
  820. incoming_jump_edges.ensure(static_cast<OpCode_ForkReplaceStay const&>(opcode).offset() + state.instruction_position).append({ opcode_bytes });
  821. has_any_backwards_jump |= static_cast<OpCode_ForkReplaceStay const&>(opcode).offset() < 0;
  822. break;
  823. case OpCodeId::Repeat:
  824. incoming_jump_edges.ensure(state.instruction_position - static_cast<OpCode_Repeat const&>(opcode).offset()).append({ opcode_bytes });
  825. has_any_backwards_jump = true;
  826. break;
  827. default:
  828. break;
  829. }
  830. state.instruction_position += opcode.size();
  831. }
  832. }
  833. struct QualifiedIP {
  834. size_t alternative_index;
  835. size_t instruction_position;
  836. };
  837. using Tree = Trie<DisjointSpans<ByteCodeValueType const>, Vector<QualifiedIP>, Traits<DisjointSpans<ByteCodeValueType const>>, void, OrderedHashMapForTrie>;
  838. Tree trie { {} }; // Root node is empty, key{ instruction_bytes, dependent_instruction_bytes... } -> IP
  839. size_t common_hits = 0;
  840. size_t total_nodes = 0;
  841. size_t total_bytecode_entries_in_tree = 0;
  842. for (size_t i = 0; i < alternatives.size(); ++i) {
  843. auto& alternative = alternatives[i];
  844. auto& incoming_jump_edges = incoming_jump_edges_for_each_alternative[i];
  845. auto* active_node = &trie;
  846. auto alternative_span = alternative.spans<1>().singular_span();
  847. for (state.instruction_position = 0; state.instruction_position < alternative_span.size();) {
  848. total_nodes += 1;
  849. auto& opcode = alternative.get_opcode(state);
  850. auto opcode_bytes = alternative_span.slice(state.instruction_position, opcode.size());
  851. Vector<Span<ByteCodeValueType const>> node_key_bytes;
  852. node_key_bytes.append(opcode_bytes);
  853. if (auto edges = incoming_jump_edges.get(state.instruction_position); edges.has_value()) {
  854. for (auto& edge : *edges)
  855. node_key_bytes.append(edge.jump_insn);
  856. }
  857. active_node = static_cast<decltype(active_node)>(MUST(active_node->ensure_child(DisjointSpans<ByteCodeValueType const> { move(node_key_bytes) })));
  858. if (active_node->has_metadata()) {
  859. active_node->metadata_value().append({ i, state.instruction_position });
  860. common_hits += 1;
  861. } else {
  862. active_node->set_metadata(Vector<QualifiedIP> { QualifiedIP { i, state.instruction_position } });
  863. total_bytecode_entries_in_tree += opcode.size();
  864. }
  865. state.instruction_position += opcode.size();
  866. }
  867. }
  868. if constexpr (REGEX_DEBUG) {
  869. Function<void(decltype(trie)&, size_t)> print_tree = [&](decltype(trie)& node, size_t indent = 0) mutable {
  870. ByteString name = "(no ip)";
  871. ByteString insn;
  872. if (node.has_metadata()) {
  873. name = ByteString::formatted(
  874. "{}@{} ({} node{})",
  875. node.metadata_value().first().instruction_position,
  876. node.metadata_value().first().alternative_index,
  877. node.metadata_value().size(),
  878. node.metadata_value().size() == 1 ? "" : "s");
  879. MatchState state;
  880. state.instruction_position = node.metadata_value().first().instruction_position;
  881. auto& opcode = alternatives[node.metadata_value().first().alternative_index].get_opcode(state);
  882. insn = ByteString::formatted("{} {}", opcode.to_byte_string(), opcode.arguments_string());
  883. }
  884. dbgln("{:->{}}| {} -- {}", "", indent * 2, name, insn);
  885. for (auto& child : node.children())
  886. print_tree(static_cast<decltype(trie)&>(*child.value), indent + 1);
  887. };
  888. print_tree(trie, 0);
  889. }
  890. // This is really only worth it if we don't blow up the size by the 2-extra-instruction-per-node scheme, similarly, if no nodes are shared, we're better off not using a tree.
  891. auto tree_cost = (total_nodes - common_hits) * 2;
  892. auto chain_cost = total_nodes + alternatives.size() * 2;
  893. dbgln_if(REGEX_DEBUG, "Total nodes: {}, common hits: {} (tree cost = {}, chain cost = {})", total_nodes, common_hits, tree_cost, chain_cost);
  894. if (common_hits == 0 || tree_cost > chain_cost) {
  895. // It's better to lay these out as a normal sequence of instructions.
  896. auto patch_start = target.size();
  897. for (size_t i = 1; i < alternatives.size(); ++i) {
  898. target.empend(static_cast<ByteCodeValueType>(OpCodeId::ForkJump));
  899. target.empend(0u); // To be filled later.
  900. }
  901. size_t size_to_jump = 0;
  902. bool seen_one_empty = false;
  903. for (size_t i = alternatives.size(); i > 0; --i) {
  904. auto& entry = alternatives[i - 1];
  905. if (entry.is_empty()) {
  906. if (seen_one_empty)
  907. continue;
  908. seen_one_empty = true;
  909. }
  910. auto is_first = i == 1;
  911. auto instruction_size = entry.size() + (is_first ? 0 : 2); // Jump; -> +2
  912. size_to_jump += instruction_size;
  913. if (!is_first)
  914. target[patch_start + (i - 2) * 2 + 1] = size_to_jump + (alternatives.size() - i) * 2;
  915. dbgln_if(REGEX_DEBUG, "{} size = {}, cum={}", i - 1, instruction_size, size_to_jump);
  916. }
  917. seen_one_empty = false;
  918. for (size_t i = alternatives.size(); i > 0; --i) {
  919. auto& chunk = alternatives[i - 1];
  920. if (chunk.is_empty()) {
  921. if (seen_one_empty)
  922. continue;
  923. seen_one_empty = true;
  924. }
  925. ByteCode* previous_chunk = nullptr;
  926. size_t j = i - 1;
  927. auto seen_one_empty_before = chunk.is_empty();
  928. while (j >= 1) {
  929. --j;
  930. auto& candidate_chunk = alternatives[j];
  931. if (candidate_chunk.is_empty()) {
  932. if (seen_one_empty_before)
  933. continue;
  934. }
  935. previous_chunk = &candidate_chunk;
  936. break;
  937. }
  938. size_to_jump -= chunk.size() + (previous_chunk ? 2 : 0);
  939. target.extend(move(chunk));
  940. target.empend(static_cast<ByteCodeValueType>(OpCodeId::Jump));
  941. target.empend(size_to_jump); // Jump to the _END label
  942. }
  943. } else {
  944. target.ensure_capacity(total_bytecode_entries_in_tree + common_hits * 6);
  945. auto node_is = [](Tree const* node, QualifiedIP ip) {
  946. if (!node->has_metadata())
  947. return false;
  948. for (auto& node_ip : node->metadata_value()) {
  949. if (node_ip.alternative_index == ip.alternative_index && node_ip.instruction_position == ip.instruction_position)
  950. return true;
  951. }
  952. return false;
  953. };
  954. struct Patch {
  955. QualifiedIP source_ip;
  956. size_t target_ip;
  957. bool done { false };
  958. };
  959. Vector<Patch> patch_locations;
  960. patch_locations.ensure_capacity(total_nodes);
  961. auto add_patch_point = [&](Tree const* node, size_t target_ip) {
  962. if (!node->has_metadata())
  963. return;
  964. auto& node_ip = node->metadata_value().first();
  965. patch_locations.append({ node_ip, target_ip });
  966. };
  967. Queue<Tree*> nodes_to_visit;
  968. nodes_to_visit.enqueue(&trie);
  969. HashMap<size_t, NonnullOwnPtr<RedBlackTree<u64, u64>>> instruction_positions;
  970. if (has_any_backwards_jump)
  971. MUST(instruction_positions.try_ensure_capacity(alternatives.size()));
  972. auto ip_mapping_for_alternative = [&](size_t i) -> RedBlackTree<u64, u64>& {
  973. return *instruction_positions.ensure(i, [] {
  974. return make<RedBlackTree<u64, u64>>();
  975. });
  976. };
  977. // each node:
  978. // node.re
  979. // forkjump child1
  980. // forkjump child2
  981. // ...
  982. while (!nodes_to_visit.is_empty()) {
  983. auto const* node = nodes_to_visit.dequeue();
  984. for (auto& patch : patch_locations) {
  985. if (!patch.done && node_is(node, patch.source_ip)) {
  986. auto value = static_cast<ByteCodeValueType>(target.size() - patch.target_ip - 1);
  987. target[patch.target_ip] = value;
  988. patch.done = true;
  989. }
  990. }
  991. if (!node->value().individual_spans().is_empty()) {
  992. auto insn_bytes = node->value().individual_spans().first();
  993. target.ensure_capacity(target.size() + insn_bytes.size());
  994. state.instruction_position = target.size();
  995. target.append(insn_bytes);
  996. if (has_any_backwards_jump) {
  997. for (auto& ip : node->metadata_value())
  998. ip_mapping_for_alternative(ip.alternative_index).insert(ip.instruction_position, state.instruction_position);
  999. }
  1000. auto& opcode = target.get_opcode(state);
  1001. ssize_t jump_offset;
  1002. auto is_jump = true;
  1003. auto patch_location = state.instruction_position + 1;
  1004. switch (opcode.opcode_id()) {
  1005. case OpCodeId::Jump:
  1006. jump_offset = static_cast<OpCode_Jump const&>(opcode).offset();
  1007. break;
  1008. case OpCodeId::JumpNonEmpty:
  1009. jump_offset = static_cast<OpCode_JumpNonEmpty const&>(opcode).offset();
  1010. break;
  1011. case OpCodeId::ForkJump:
  1012. jump_offset = static_cast<OpCode_ForkJump const&>(opcode).offset();
  1013. break;
  1014. case OpCodeId::ForkStay:
  1015. jump_offset = static_cast<OpCode_ForkStay const&>(opcode).offset();
  1016. break;
  1017. case OpCodeId::ForkReplaceJump:
  1018. jump_offset = static_cast<OpCode_ForkReplaceJump const&>(opcode).offset();
  1019. break;
  1020. case OpCodeId::ForkReplaceStay:
  1021. jump_offset = static_cast<OpCode_ForkReplaceStay const&>(opcode).offset();
  1022. break;
  1023. case OpCodeId::Repeat:
  1024. jump_offset = static_cast<ssize_t>(0) - static_cast<ssize_t>(static_cast<OpCode_Repeat const&>(opcode).offset()) - static_cast<ssize_t>(opcode.size());
  1025. break;
  1026. default:
  1027. is_jump = false;
  1028. break;
  1029. }
  1030. if (is_jump) {
  1031. VERIFY(node->has_metadata());
  1032. QualifiedIP ip = node->metadata_value().first();
  1033. auto intended_jump_ip = ip.instruction_position + jump_offset + opcode.size();
  1034. if (jump_offset < 0) {
  1035. VERIFY(has_any_backwards_jump);
  1036. // We should've already seen this instruction, so we can just patch it in.
  1037. auto& ip_mapping = ip_mapping_for_alternative(ip.alternative_index);
  1038. auto target_ip = ip_mapping.find(intended_jump_ip);
  1039. if (!target_ip) {
  1040. RegexDebug dbg;
  1041. size_t x = 0;
  1042. for (auto& entry : alternatives) {
  1043. warnln("----------- {} ----------", x++);
  1044. dbg.print_bytecode(entry);
  1045. }
  1046. dbgln("Regex Tree / Unknown backwards jump: {}@{} -> {}",
  1047. ip.instruction_position,
  1048. ip.alternative_index,
  1049. intended_jump_ip);
  1050. VERIFY_NOT_REACHED();
  1051. }
  1052. target[patch_location] = static_cast<ByteCodeValueType>(*target_ip - patch_location - 1);
  1053. } else {
  1054. patch_locations.append({ QualifiedIP { ip.alternative_index, intended_jump_ip }, patch_location });
  1055. }
  1056. }
  1057. }
  1058. for (auto const& child : node->children()) {
  1059. auto* child_node = static_cast<Tree*>(child.value.ptr());
  1060. target.append(static_cast<ByteCodeValueType>(OpCodeId::ForkJump));
  1061. add_patch_point(child_node, target.size());
  1062. target.append(static_cast<ByteCodeValueType>(0));
  1063. nodes_to_visit.enqueue(child_node);
  1064. }
  1065. }
  1066. for (auto& patch : patch_locations) {
  1067. if (patch.done)
  1068. continue;
  1069. auto& alternative = alternatives[patch.source_ip.alternative_index];
  1070. if (patch.source_ip.instruction_position >= alternative.size()) {
  1071. // This just wants to jump to the end of the alternative, which is fine.
  1072. // Patch it to jump to the end of the target instead.
  1073. target[patch.target_ip] = static_cast<ByteCodeValueType>(target.size() - patch.target_ip - 1);
  1074. continue;
  1075. }
  1076. dbgln("Regex Tree / Unpatched jump: {}@{} -> {}@{}",
  1077. patch.source_ip.instruction_position,
  1078. patch.source_ip.alternative_index,
  1079. patch.target_ip,
  1080. target[patch.target_ip]);
  1081. VERIFY_NOT_REACHED();
  1082. }
  1083. }
  1084. }
  1085. enum class LookupTableInsertionOutcome {
  1086. Successful,
  1087. ReplaceWithAnyChar,
  1088. TemporaryInversionNeeded,
  1089. PermanentInversionNeeded,
  1090. FlushOnInsertion,
  1091. FinishFlushOnInsertion,
  1092. CannotPlaceInTable,
  1093. };
  1094. static LookupTableInsertionOutcome insert_into_lookup_table(RedBlackTree<ByteCodeValueType, CharRange>& table, CompareTypeAndValuePair pair)
  1095. {
  1096. switch (pair.type) {
  1097. case CharacterCompareType::Inverse:
  1098. return LookupTableInsertionOutcome::PermanentInversionNeeded;
  1099. case CharacterCompareType::TemporaryInverse:
  1100. return LookupTableInsertionOutcome::TemporaryInversionNeeded;
  1101. case CharacterCompareType::AnyChar:
  1102. return LookupTableInsertionOutcome::ReplaceWithAnyChar;
  1103. case CharacterCompareType::CharClass:
  1104. return LookupTableInsertionOutcome::CannotPlaceInTable;
  1105. case CharacterCompareType::Char:
  1106. table.insert(pair.value, { (u32)pair.value, (u32)pair.value });
  1107. break;
  1108. case CharacterCompareType::CharRange: {
  1109. CharRange range { pair.value };
  1110. table.insert(range.from, range);
  1111. break;
  1112. }
  1113. case CharacterCompareType::EndAndOr:
  1114. return LookupTableInsertionOutcome::FinishFlushOnInsertion;
  1115. case CharacterCompareType::And:
  1116. return LookupTableInsertionOutcome::FlushOnInsertion;
  1117. case CharacterCompareType::Reference:
  1118. case CharacterCompareType::Property:
  1119. case CharacterCompareType::GeneralCategory:
  1120. case CharacterCompareType::Script:
  1121. case CharacterCompareType::ScriptExtension:
  1122. case CharacterCompareType::Or:
  1123. return LookupTableInsertionOutcome::CannotPlaceInTable;
  1124. case CharacterCompareType::Undefined:
  1125. case CharacterCompareType::RangeExpressionDummy:
  1126. case CharacterCompareType::String:
  1127. case CharacterCompareType::LookupTable:
  1128. VERIFY_NOT_REACHED();
  1129. }
  1130. return LookupTableInsertionOutcome::Successful;
  1131. }
  1132. void Optimizer::append_character_class(ByteCode& target, Vector<CompareTypeAndValuePair>&& pairs)
  1133. {
  1134. ByteCode arguments;
  1135. size_t argument_count = 0;
  1136. if (pairs.size() <= 1) {
  1137. for (auto& pair : pairs) {
  1138. arguments.append(to_underlying(pair.type));
  1139. if (pair.type != CharacterCompareType::AnyChar
  1140. && pair.type != CharacterCompareType::TemporaryInverse
  1141. && pair.type != CharacterCompareType::Inverse
  1142. && pair.type != CharacterCompareType::And
  1143. && pair.type != CharacterCompareType::Or
  1144. && pair.type != CharacterCompareType::EndAndOr)
  1145. arguments.append(pair.value);
  1146. ++argument_count;
  1147. }
  1148. } else {
  1149. RedBlackTree<ByteCodeValueType, CharRange> table;
  1150. RedBlackTree<ByteCodeValueType, CharRange> inverted_table;
  1151. auto* current_table = &table;
  1152. auto* current_inverted_table = &inverted_table;
  1153. bool invert_for_next_iteration = false;
  1154. bool is_currently_inverted = false;
  1155. auto flush_tables = [&] {
  1156. auto append_table = [&](auto& table) {
  1157. ++argument_count;
  1158. arguments.append(to_underlying(CharacterCompareType::LookupTable));
  1159. auto size_index = arguments.size();
  1160. arguments.append(0);
  1161. Optional<CharRange> active_range;
  1162. size_t range_count = 0;
  1163. for (auto& range : table) {
  1164. if (!active_range.has_value()) {
  1165. active_range = range;
  1166. continue;
  1167. }
  1168. if (range.from <= active_range->to + 1 && range.to + 1 >= active_range->from) {
  1169. active_range = CharRange { min(range.from, active_range->from), max(range.to, active_range->to) };
  1170. } else {
  1171. ++range_count;
  1172. arguments.append(active_range.release_value());
  1173. active_range = range;
  1174. }
  1175. }
  1176. if (active_range.has_value()) {
  1177. ++range_count;
  1178. arguments.append(active_range.release_value());
  1179. }
  1180. arguments[size_index] = range_count;
  1181. };
  1182. auto contains_regular_table = !table.is_empty();
  1183. auto contains_inverted_table = !inverted_table.is_empty();
  1184. if (contains_regular_table)
  1185. append_table(table);
  1186. if (contains_inverted_table) {
  1187. ++argument_count;
  1188. arguments.append(to_underlying(CharacterCompareType::TemporaryInverse));
  1189. append_table(inverted_table);
  1190. }
  1191. table.clear();
  1192. inverted_table.clear();
  1193. };
  1194. auto flush_on_every_insertion = false;
  1195. for (auto& value : pairs) {
  1196. auto should_invert_after_this_iteration = invert_for_next_iteration;
  1197. invert_for_next_iteration = false;
  1198. auto insertion_result = insert_into_lookup_table(*current_table, value);
  1199. switch (insertion_result) {
  1200. case LookupTableInsertionOutcome::Successful:
  1201. if (flush_on_every_insertion)
  1202. flush_tables();
  1203. break;
  1204. case LookupTableInsertionOutcome::ReplaceWithAnyChar: {
  1205. table.clear();
  1206. inverted_table.clear();
  1207. arguments.append(to_underlying(CharacterCompareType::AnyChar));
  1208. ++argument_count;
  1209. break;
  1210. }
  1211. case LookupTableInsertionOutcome::TemporaryInversionNeeded:
  1212. swap(current_table, current_inverted_table);
  1213. invert_for_next_iteration = true;
  1214. is_currently_inverted = !is_currently_inverted;
  1215. break;
  1216. case LookupTableInsertionOutcome::PermanentInversionNeeded:
  1217. flush_tables();
  1218. arguments.append(to_underlying(CharacterCompareType::Inverse));
  1219. ++argument_count;
  1220. break;
  1221. case LookupTableInsertionOutcome::FlushOnInsertion:
  1222. case LookupTableInsertionOutcome::FinishFlushOnInsertion:
  1223. flush_tables();
  1224. flush_on_every_insertion = insertion_result == LookupTableInsertionOutcome::FlushOnInsertion;
  1225. [[fallthrough]];
  1226. case LookupTableInsertionOutcome::CannotPlaceInTable:
  1227. if (is_currently_inverted) {
  1228. arguments.append(to_underlying(CharacterCompareType::TemporaryInverse));
  1229. ++argument_count;
  1230. }
  1231. arguments.append(to_underlying(value.type));
  1232. if (value.type != CharacterCompareType::AnyChar
  1233. && value.type != CharacterCompareType::TemporaryInverse
  1234. && value.type != CharacterCompareType::Inverse
  1235. && value.type != CharacterCompareType::And
  1236. && value.type != CharacterCompareType::Or
  1237. && value.type != CharacterCompareType::EndAndOr)
  1238. arguments.append(value.value);
  1239. ++argument_count;
  1240. break;
  1241. }
  1242. if (should_invert_after_this_iteration) {
  1243. swap(current_table, current_inverted_table);
  1244. is_currently_inverted = !is_currently_inverted;
  1245. }
  1246. }
  1247. flush_tables();
  1248. }
  1249. target.empend(static_cast<ByteCodeValueType>(OpCodeId::Compare));
  1250. target.empend(argument_count); // number of arguments
  1251. target.empend(arguments.size()); // size of arguments
  1252. target.extend(move(arguments));
  1253. }
  1254. template void Regex<PosixBasicParser>::run_optimization_passes();
  1255. template void Regex<PosixExtendedParser>::run_optimization_passes();
  1256. template void Regex<ECMA262Parser>::run_optimization_passes();
  1257. }