YarrJIT.cpp 118 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066206720682069207020712072207320742075207620772078207920802081208220832084208520862087208820892090209120922093209420952096209720982099210021012102210321042105210621072108210921102111211221132114211521162117211821192120212121222123212421252126212721282129213021312132213321342135213621372138213921402141214221432144214521462147214821492150215121522153215421552156215721582159216021612162216321642165216621672168216921702171217221732174217521762177217821792180218121822183218421852186218721882189219021912192219321942195219621972198219922002201220222032204220522062207220822092210221122122213221422152216221722182219222022212222222322242225222622272228222922302231223222332234223522362237223822392240224122422243224422452246224722482249225022512252225322542255225622572258225922602261226222632264226522662267226822692270227122722273227422752276227722782279228022812282228322842285228622872288228922902291229222932294229522962297229822992300230123022303230423052306230723082309231023112312231323142315231623172318231923202321232223232324232523262327232823292330233123322333233423352336233723382339234023412342234323442345234623472348234923502351235223532354235523562357235823592360236123622363236423652366236723682369237023712372237323742375237623772378237923802381238223832384238523862387238823892390239123922393239423952396239723982399240024012402240324042405240624072408240924102411241224132414241524162417241824192420242124222423242424252426242724282429243024312432243324342435243624372438243924402441244224432444244524462447244824492450245124522453245424552456245724582459246024612462246324642465246624672468246924702471247224732474247524762477247824792480248124822483248424852486248724882489249024912492249324942495249624972498249925002501250225032504250525062507250825092510251125122513251425152516251725182519252025212522252325242525252625272528252925302531253225332534253525362537253825392540254125422543254425452546254725482549255025512552255325542555255625572558255925602561256225632564256525662567256825692570257125722573257425752576257725782579258025812582258325842585258625872588258925902591259225932594259525962597259825992600260126022603260426052606260726082609261026112612261326142615261626172618261926202621262226232624262526262627262826292630263126322633263426352636263726382639264026412642264326442645264626472648264926502651265226532654265526562657265826592660266126622663266426652666266726682669267026712672267326742675267626772678267926802681268226832684268526862687268826892690269126922693269426952696269726982699270027012702270327042705
  1. /*
  2. * Copyright (C) 2009, 2013 Apple Inc. All rights reserved.
  3. *
  4. * Redistribution and use in source and binary forms, with or without
  5. * modification, are permitted provided that the following conditions
  6. * are met:
  7. * 1. Redistributions of source code must retain the above copyright
  8. * notice, this list of conditions and the following disclaimer.
  9. * 2. Redistributions in binary form must reproduce the above copyright
  10. * notice, this list of conditions and the following disclaimer in the
  11. * documentation and/or other materials provided with the distribution.
  12. *
  13. * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
  14. * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
  15. * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
  16. * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
  17. * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
  18. * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
  19. * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
  20. * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
  21. * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
  22. * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
  23. * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
  24. */
  25. #include "config.h"
  26. #include "YarrJIT.h"
  27. #include <wtf/ASCIICType.h>
  28. #include "LinkBuffer.h"
  29. #include "Options.h"
  30. #include "Yarr.h"
  31. #include "YarrCanonicalizeUCS2.h"
  32. #if ENABLE(YARR_JIT)
  33. #if !(ENABLE(DETACHED_JIT) && !BUILDING_DETACHED_JIT)
  34. using namespace WTF;
  35. namespace JSC { namespace Yarr {
  36. template<YarrJITCompileMode compileMode>
  37. class YarrGenerator : private MacroAssembler {
  38. friend void jitCompile(VM*, YarrCodeBlock& jitObject, const String& pattern, unsigned& numSubpatterns, const char*& error, bool ignoreCase, bool multiline);
  39. #if CPU(ARM)
  40. static const RegisterID input = ARMRegisters::r0;
  41. static const RegisterID index = ARMRegisters::r1;
  42. static const RegisterID length = ARMRegisters::r2;
  43. static const RegisterID output = ARMRegisters::r4;
  44. static const RegisterID regT0 = ARMRegisters::r5;
  45. static const RegisterID regT1 = ARMRegisters::r6;
  46. static const RegisterID returnRegister = ARMRegisters::r0;
  47. static const RegisterID returnRegister2 = ARMRegisters::r1;
  48. #elif CPU(MIPS)
  49. static const RegisterID input = MIPSRegisters::a0;
  50. static const RegisterID index = MIPSRegisters::a1;
  51. static const RegisterID length = MIPSRegisters::a2;
  52. static const RegisterID output = MIPSRegisters::a3;
  53. static const RegisterID regT0 = MIPSRegisters::t4;
  54. static const RegisterID regT1 = MIPSRegisters::t5;
  55. static const RegisterID returnRegister = MIPSRegisters::v0;
  56. static const RegisterID returnRegister2 = MIPSRegisters::v1;
  57. #elif CPU(SH4)
  58. static const RegisterID input = SH4Registers::r4;
  59. static const RegisterID index = SH4Registers::r5;
  60. static const RegisterID length = SH4Registers::r6;
  61. static const RegisterID output = SH4Registers::r7;
  62. static const RegisterID regT0 = SH4Registers::r0;
  63. static const RegisterID regT1 = SH4Registers::r1;
  64. static const RegisterID returnRegister = SH4Registers::r0;
  65. static const RegisterID returnRegister2 = SH4Registers::r1;
  66. #elif CPU(X86)
  67. static const RegisterID input = X86Registers::eax;
  68. static const RegisterID index = X86Registers::edx;
  69. static const RegisterID length = X86Registers::ecx;
  70. static const RegisterID output = X86Registers::edi;
  71. static const RegisterID regT0 = X86Registers::ebx;
  72. static const RegisterID regT1 = X86Registers::esi;
  73. static const RegisterID returnRegister = X86Registers::eax;
  74. static const RegisterID returnRegister2 = X86Registers::edx;
  75. #elif CPU(X86_64)
  76. #if !OS(WINDOWS)
  77. static const RegisterID input = X86Registers::edi;
  78. static const RegisterID index = X86Registers::esi;
  79. static const RegisterID length = X86Registers::edx;
  80. static const RegisterID output = X86Registers::ecx;
  81. #else
  82. // If the return value doesn't fit in 64bits, its destination is pointed by rcx and the parameters are shifted.
  83. // http://msdn.microsoft.com/en-us/library/7572ztz4.aspx
  84. COMPILE_ASSERT(sizeof(MatchResult) > sizeof(void*), MatchResult_does_not_fit_in_64bits);
  85. static const RegisterID input = X86Registers::edx;
  86. static const RegisterID index = X86Registers::r8;
  87. static const RegisterID length = X86Registers::r9;
  88. static const RegisterID output = X86Registers::r10;
  89. #endif
  90. static const RegisterID regT0 = X86Registers::eax;
  91. static const RegisterID regT1 = X86Registers::ebx;
  92. static const RegisterID returnRegister = X86Registers::eax;
  93. static const RegisterID returnRegister2 = X86Registers::edx;
  94. #endif
  95. void optimizeAlternative(PatternAlternative* alternative)
  96. {
  97. if (!alternative->m_terms.size())
  98. return;
  99. for (unsigned i = 0; i < alternative->m_terms.size() - 1; ++i) {
  100. PatternTerm& term = alternative->m_terms[i];
  101. PatternTerm& nextTerm = alternative->m_terms[i + 1];
  102. if ((term.type == PatternTerm::TypeCharacterClass)
  103. && (term.quantityType == QuantifierFixedCount)
  104. && (nextTerm.type == PatternTerm::TypePatternCharacter)
  105. && (nextTerm.quantityType == QuantifierFixedCount)) {
  106. PatternTerm termCopy = term;
  107. alternative->m_terms[i] = nextTerm;
  108. alternative->m_terms[i + 1] = termCopy;
  109. }
  110. }
  111. }
  112. void matchCharacterClassRange(RegisterID character, JumpList& failures, JumpList& matchDest, const CharacterRange* ranges, unsigned count, unsigned* matchIndex, const UChar* matches, unsigned matchCount)
  113. {
  114. do {
  115. // pick which range we're going to generate
  116. int which = count >> 1;
  117. char lo = ranges[which].begin;
  118. char hi = ranges[which].end;
  119. // check if there are any ranges or matches below lo. If not, just jl to failure -
  120. // if there is anything else to check, check that first, if it falls through jmp to failure.
  121. if ((*matchIndex < matchCount) && (matches[*matchIndex] < lo)) {
  122. Jump loOrAbove = branch32(GreaterThanOrEqual, character, Imm32((unsigned short)lo));
  123. // generate code for all ranges before this one
  124. if (which)
  125. matchCharacterClassRange(character, failures, matchDest, ranges, which, matchIndex, matches, matchCount);
  126. while ((*matchIndex < matchCount) && (matches[*matchIndex] < lo)) {
  127. matchDest.append(branch32(Equal, character, Imm32((unsigned short)matches[*matchIndex])));
  128. ++*matchIndex;
  129. }
  130. failures.append(jump());
  131. loOrAbove.link(this);
  132. } else if (which) {
  133. Jump loOrAbove = branch32(GreaterThanOrEqual, character, Imm32((unsigned short)lo));
  134. matchCharacterClassRange(character, failures, matchDest, ranges, which, matchIndex, matches, matchCount);
  135. failures.append(jump());
  136. loOrAbove.link(this);
  137. } else
  138. failures.append(branch32(LessThan, character, Imm32((unsigned short)lo)));
  139. while ((*matchIndex < matchCount) && (matches[*matchIndex] <= hi))
  140. ++*matchIndex;
  141. matchDest.append(branch32(LessThanOrEqual, character, Imm32((unsigned short)hi)));
  142. // fall through to here, the value is above hi.
  143. // shuffle along & loop around if there are any more matches to handle.
  144. unsigned next = which + 1;
  145. ranges += next;
  146. count -= next;
  147. } while (count);
  148. }
  149. void matchCharacterClass(RegisterID character, JumpList& matchDest, const CharacterClass* charClass)
  150. {
  151. if (charClass->m_table) {
  152. ExtendedAddress tableEntry(character, reinterpret_cast<intptr_t>(charClass->m_table));
  153. matchDest.append(branchTest8(charClass->m_tableInverted ? Zero : NonZero, tableEntry));
  154. return;
  155. }
  156. Jump unicodeFail;
  157. if (charClass->m_matchesUnicode.size() || charClass->m_rangesUnicode.size()) {
  158. Jump isAscii = branch32(LessThanOrEqual, character, TrustedImm32(0x7f));
  159. if (charClass->m_matchesUnicode.size()) {
  160. for (unsigned i = 0; i < charClass->m_matchesUnicode.size(); ++i) {
  161. UChar ch = charClass->m_matchesUnicode[i];
  162. matchDest.append(branch32(Equal, character, Imm32(ch)));
  163. }
  164. }
  165. if (charClass->m_rangesUnicode.size()) {
  166. for (unsigned i = 0; i < charClass->m_rangesUnicode.size(); ++i) {
  167. UChar lo = charClass->m_rangesUnicode[i].begin;
  168. UChar hi = charClass->m_rangesUnicode[i].end;
  169. Jump below = branch32(LessThan, character, Imm32(lo));
  170. matchDest.append(branch32(LessThanOrEqual, character, Imm32(hi)));
  171. below.link(this);
  172. }
  173. }
  174. unicodeFail = jump();
  175. isAscii.link(this);
  176. }
  177. if (charClass->m_ranges.size()) {
  178. unsigned matchIndex = 0;
  179. JumpList failures;
  180. matchCharacterClassRange(character, failures, matchDest, charClass->m_ranges.begin(), charClass->m_ranges.size(), &matchIndex, charClass->m_matches.begin(), charClass->m_matches.size());
  181. while (matchIndex < charClass->m_matches.size())
  182. matchDest.append(branch32(Equal, character, Imm32((unsigned short)charClass->m_matches[matchIndex++])));
  183. failures.link(this);
  184. } else if (charClass->m_matches.size()) {
  185. // optimization: gather 'a','A' etc back together, can mask & test once.
  186. Vector<char> matchesAZaz;
  187. for (unsigned i = 0; i < charClass->m_matches.size(); ++i) {
  188. char ch = charClass->m_matches[i];
  189. if (m_pattern.m_ignoreCase) {
  190. if (isASCIILower(ch)) {
  191. matchesAZaz.append(ch);
  192. continue;
  193. }
  194. if (isASCIIUpper(ch))
  195. continue;
  196. }
  197. matchDest.append(branch32(Equal, character, Imm32((unsigned short)ch)));
  198. }
  199. if (unsigned countAZaz = matchesAZaz.size()) {
  200. or32(TrustedImm32(32), character);
  201. for (unsigned i = 0; i < countAZaz; ++i)
  202. matchDest.append(branch32(Equal, character, TrustedImm32(matchesAZaz[i])));
  203. }
  204. }
  205. if (charClass->m_matchesUnicode.size() || charClass->m_rangesUnicode.size())
  206. unicodeFail.link(this);
  207. }
  208. // Jumps if input not available; will have (incorrectly) incremented already!
  209. Jump jumpIfNoAvailableInput(unsigned countToCheck = 0)
  210. {
  211. if (countToCheck)
  212. add32(Imm32(countToCheck), index);
  213. return branch32(Above, index, length);
  214. }
  215. Jump jumpIfAvailableInput(unsigned countToCheck)
  216. {
  217. add32(Imm32(countToCheck), index);
  218. return branch32(BelowOrEqual, index, length);
  219. }
  220. Jump checkInput()
  221. {
  222. return branch32(BelowOrEqual, index, length);
  223. }
  224. Jump atEndOfInput()
  225. {
  226. return branch32(Equal, index, length);
  227. }
  228. Jump notAtEndOfInput()
  229. {
  230. return branch32(NotEqual, index, length);
  231. }
  232. Jump jumpIfCharNotEquals(UChar ch, int inputPosition, RegisterID character)
  233. {
  234. readCharacter(inputPosition, character);
  235. // For case-insesitive compares, non-ascii characters that have different
  236. // upper & lower case representations are converted to a character class.
  237. ASSERT(!m_pattern.m_ignoreCase || isASCIIAlpha(ch) || isCanonicallyUnique(ch));
  238. if (m_pattern.m_ignoreCase && isASCIIAlpha(ch)) {
  239. or32(TrustedImm32(0x20), character);
  240. ch |= 0x20;
  241. }
  242. return branch32(NotEqual, character, Imm32(ch));
  243. }
  244. void readCharacter(int inputPosition, RegisterID reg)
  245. {
  246. if (m_charSize == Char8)
  247. load8(BaseIndex(input, index, TimesOne, inputPosition * sizeof(char)), reg);
  248. else
  249. load16(BaseIndex(input, index, TimesTwo, inputPosition * sizeof(UChar)), reg);
  250. }
  251. void storeToFrame(RegisterID reg, unsigned frameLocation)
  252. {
  253. poke(reg, frameLocation);
  254. }
  255. void storeToFrame(TrustedImm32 imm, unsigned frameLocation)
  256. {
  257. poke(imm, frameLocation);
  258. }
  259. DataLabelPtr storeToFrameWithPatch(unsigned frameLocation)
  260. {
  261. return storePtrWithPatch(TrustedImmPtr(0), Address(stackPointerRegister, frameLocation * sizeof(void*)));
  262. }
  263. void loadFromFrame(unsigned frameLocation, RegisterID reg)
  264. {
  265. peek(reg, frameLocation);
  266. }
  267. void loadFromFrameAndJump(unsigned frameLocation)
  268. {
  269. jump(Address(stackPointerRegister, frameLocation * sizeof(void*)));
  270. }
  271. void initCallFrame()
  272. {
  273. unsigned callFrameSize = m_pattern.m_body->m_callFrameSize;
  274. if (callFrameSize)
  275. subPtr(Imm32(callFrameSize * sizeof(void*)), stackPointerRegister);
  276. }
  277. void removeCallFrame()
  278. {
  279. unsigned callFrameSize = m_pattern.m_body->m_callFrameSize;
  280. if (callFrameSize)
  281. addPtr(Imm32(callFrameSize * sizeof(void*)), stackPointerRegister);
  282. }
  283. // Used to record subpatters, should only be called if compileMode is IncludeSubpatterns.
  284. void setSubpatternStart(RegisterID reg, unsigned subpattern)
  285. {
  286. ASSERT(subpattern);
  287. // FIXME: should be able to ASSERT(compileMode == IncludeSubpatterns), but then this function is conditionally NORETURN. :-(
  288. store32(reg, Address(output, (subpattern << 1) * sizeof(int)));
  289. }
  290. void setSubpatternEnd(RegisterID reg, unsigned subpattern)
  291. {
  292. ASSERT(subpattern);
  293. // FIXME: should be able to ASSERT(compileMode == IncludeSubpatterns), but then this function is conditionally NORETURN. :-(
  294. store32(reg, Address(output, ((subpattern << 1) + 1) * sizeof(int)));
  295. }
  296. void clearSubpatternStart(unsigned subpattern)
  297. {
  298. ASSERT(subpattern);
  299. // FIXME: should be able to ASSERT(compileMode == IncludeSubpatterns), but then this function is conditionally NORETURN. :-(
  300. store32(TrustedImm32(-1), Address(output, (subpattern << 1) * sizeof(int)));
  301. }
  302. // We use one of three different strategies to track the start of the current match,
  303. // while matching.
  304. // 1) If the pattern has a fixed size, do nothing! - we calculate the value lazily
  305. // at the end of matching. This is irrespective of compileMode, and in this case
  306. // these methods should never be called.
  307. // 2) If we're compiling IncludeSubpatterns, 'output' contains a pointer to an output
  308. // vector, store the match start in the output vector.
  309. // 3) If we're compiling MatchOnly, 'output' is unused, store the match start directly
  310. // in this register.
  311. void setMatchStart(RegisterID reg)
  312. {
  313. ASSERT(!m_pattern.m_body->m_hasFixedSize);
  314. if (compileMode == IncludeSubpatterns)
  315. store32(reg, output);
  316. else
  317. move(reg, output);
  318. }
  319. void getMatchStart(RegisterID reg)
  320. {
  321. ASSERT(!m_pattern.m_body->m_hasFixedSize);
  322. if (compileMode == IncludeSubpatterns)
  323. load32(output, reg);
  324. else
  325. move(output, reg);
  326. }
  327. enum YarrOpCode {
  328. // These nodes wrap body alternatives - those in the main disjunction,
  329. // rather than subpatterns or assertions. These are chained together in
  330. // a doubly linked list, with a 'begin' node for the first alternative,
  331. // a 'next' node for each subsequent alternative, and an 'end' node at
  332. // the end. In the case of repeating alternatives, the 'end' node also
  333. // has a reference back to 'begin'.
  334. OpBodyAlternativeBegin,
  335. OpBodyAlternativeNext,
  336. OpBodyAlternativeEnd,
  337. // Similar to the body alternatives, but used for subpatterns with two
  338. // or more alternatives.
  339. OpNestedAlternativeBegin,
  340. OpNestedAlternativeNext,
  341. OpNestedAlternativeEnd,
  342. // Used for alternatives in subpatterns where there is only a single
  343. // alternative (backtrackingis easier in these cases), or for alternatives
  344. // which never need to be backtracked (those in parenthetical assertions,
  345. // terminal subpatterns).
  346. OpSimpleNestedAlternativeBegin,
  347. OpSimpleNestedAlternativeNext,
  348. OpSimpleNestedAlternativeEnd,
  349. // Used to wrap 'Once' subpattern matches (quantityCount == 1).
  350. OpParenthesesSubpatternOnceBegin,
  351. OpParenthesesSubpatternOnceEnd,
  352. // Used to wrap 'Terminal' subpattern matches (at the end of the regexp).
  353. OpParenthesesSubpatternTerminalBegin,
  354. OpParenthesesSubpatternTerminalEnd,
  355. // Used to wrap parenthetical assertions.
  356. OpParentheticalAssertionBegin,
  357. OpParentheticalAssertionEnd,
  358. // Wraps all simple terms (pattern characters, character classes).
  359. OpTerm,
  360. // Where an expression contains only 'once through' body alternatives
  361. // and no repeating ones, this op is used to return match failure.
  362. OpMatchFailed
  363. };
  364. // This structure is used to hold the compiled opcode information,
  365. // including reference back to the original PatternTerm/PatternAlternatives,
  366. // and JIT compilation data structures.
  367. struct YarrOp {
  368. explicit YarrOp(PatternTerm* term)
  369. : m_op(OpTerm)
  370. , m_term(term)
  371. , m_isDeadCode(false)
  372. {
  373. }
  374. explicit YarrOp(YarrOpCode op)
  375. : m_op(op)
  376. , m_isDeadCode(false)
  377. {
  378. }
  379. // The operation, as a YarrOpCode, and also a reference to the PatternTerm.
  380. YarrOpCode m_op;
  381. PatternTerm* m_term;
  382. // For alternatives, this holds the PatternAlternative and doubly linked
  383. // references to this alternative's siblings. In the case of the
  384. // OpBodyAlternativeEnd node at the end of a section of repeating nodes,
  385. // m_nextOp will reference the OpBodyAlternativeBegin node of the first
  386. // repeating alternative.
  387. PatternAlternative* m_alternative;
  388. size_t m_previousOp;
  389. size_t m_nextOp;
  390. // Used to record a set of Jumps out of the generated code, typically
  391. // used for jumps out to backtracking code, and a single reentry back
  392. // into the code for a node (likely where a backtrack will trigger
  393. // rematching).
  394. Label m_reentry;
  395. JumpList m_jumps;
  396. // Used for backtracking when the prior alternative did not consume any
  397. // characters but matched.
  398. Jump m_zeroLengthMatch;
  399. // This flag is used to null out the second pattern character, when
  400. // two are fused to match a pair together.
  401. bool m_isDeadCode;
  402. // Currently used in the case of some of the more complex management of
  403. // 'm_checked', to cache the offset used in this alternative, to avoid
  404. // recalculating it.
  405. int m_checkAdjust;
  406. // Used by OpNestedAlternativeNext/End to hold the pointer to the
  407. // value that will be pushed into the pattern's frame to return to,
  408. // upon backtracking back into the disjunction.
  409. DataLabelPtr m_returnAddress;
  410. };
  411. // BacktrackingState
  412. // This class encapsulates information about the state of code generation
  413. // whilst generating the code for backtracking, when a term fails to match.
  414. // Upon entry to code generation of the backtracking code for a given node,
  415. // the Backtracking state will hold references to all control flow sources
  416. // that are outputs in need of further backtracking from the prior node
  417. // generated (which is the subsequent operation in the regular expression,
  418. // and in the m_ops Vector, since we generated backtracking backwards).
  419. // These references to control flow take the form of:
  420. // - A jump list of jumps, to be linked to code that will backtrack them
  421. // further.
  422. // - A set of DataLabelPtr values, to be populated with values to be
  423. // treated effectively as return addresses backtracking into complex
  424. // subpatterns.
  425. // - A flag indicating that the current sequence of generated code up to
  426. // this point requires backtracking.
  427. class BacktrackingState {
  428. public:
  429. BacktrackingState()
  430. : m_pendingFallthrough(false)
  431. {
  432. }
  433. // Add a jump or jumps, a return address, or set the flag indicating
  434. // that the current 'fallthrough' control flow requires backtracking.
  435. void append(const Jump& jump)
  436. {
  437. m_laterFailures.append(jump);
  438. }
  439. void append(JumpList& jumpList)
  440. {
  441. m_laterFailures.append(jumpList);
  442. }
  443. void append(const DataLabelPtr& returnAddress)
  444. {
  445. m_pendingReturns.append(returnAddress);
  446. }
  447. void fallthrough()
  448. {
  449. ASSERT(!m_pendingFallthrough);
  450. m_pendingFallthrough = true;
  451. }
  452. // These methods clear the backtracking state, either linking to the
  453. // current location, a provided label, or copying the backtracking out
  454. // to a JumpList. All actions may require code generation to take place,
  455. // and as such are passed a pointer to the assembler.
  456. void link(MacroAssembler* assembler)
  457. {
  458. if (m_pendingReturns.size()) {
  459. Label here(assembler);
  460. for (unsigned i = 0; i < m_pendingReturns.size(); ++i)
  461. m_backtrackRecords.append(ReturnAddressRecord(m_pendingReturns[i], here));
  462. m_pendingReturns.clear();
  463. }
  464. m_laterFailures.link(assembler);
  465. m_laterFailures.clear();
  466. m_pendingFallthrough = false;
  467. }
  468. void linkTo(Label label, MacroAssembler* assembler)
  469. {
  470. if (m_pendingReturns.size()) {
  471. for (unsigned i = 0; i < m_pendingReturns.size(); ++i)
  472. m_backtrackRecords.append(ReturnAddressRecord(m_pendingReturns[i], label));
  473. m_pendingReturns.clear();
  474. }
  475. if (m_pendingFallthrough)
  476. assembler->jump(label);
  477. m_laterFailures.linkTo(label, assembler);
  478. m_laterFailures.clear();
  479. m_pendingFallthrough = false;
  480. }
  481. void takeBacktracksToJumpList(JumpList& jumpList, MacroAssembler* assembler)
  482. {
  483. if (m_pendingReturns.size()) {
  484. Label here(assembler);
  485. for (unsigned i = 0; i < m_pendingReturns.size(); ++i)
  486. m_backtrackRecords.append(ReturnAddressRecord(m_pendingReturns[i], here));
  487. m_pendingReturns.clear();
  488. m_pendingFallthrough = true;
  489. }
  490. if (m_pendingFallthrough)
  491. jumpList.append(assembler->jump());
  492. jumpList.append(m_laterFailures);
  493. m_laterFailures.clear();
  494. m_pendingFallthrough = false;
  495. }
  496. bool isEmpty()
  497. {
  498. return m_laterFailures.empty() && m_pendingReturns.isEmpty() && !m_pendingFallthrough;
  499. }
  500. // Called at the end of code generation to link all return addresses.
  501. void linkDataLabels(LinkBuffer& linkBuffer)
  502. {
  503. ASSERT(isEmpty());
  504. for (unsigned i = 0; i < m_backtrackRecords.size(); ++i)
  505. linkBuffer.patch(m_backtrackRecords[i].m_dataLabel, linkBuffer.locationOf(m_backtrackRecords[i].m_backtrackLocation));
  506. }
  507. private:
  508. struct ReturnAddressRecord {
  509. ReturnAddressRecord(DataLabelPtr dataLabel, Label backtrackLocation)
  510. : m_dataLabel(dataLabel)
  511. , m_backtrackLocation(backtrackLocation)
  512. {
  513. }
  514. DataLabelPtr m_dataLabel;
  515. Label m_backtrackLocation;
  516. };
  517. JumpList m_laterFailures;
  518. bool m_pendingFallthrough;
  519. Vector<DataLabelPtr, 4> m_pendingReturns;
  520. Vector<ReturnAddressRecord, 4> m_backtrackRecords;
  521. };
  522. // Generation methods:
  523. // ===================
  524. // This method provides a default implementation of backtracking common
  525. // to many terms; terms commonly jump out of the forwards matching path
  526. // on any failed conditions, and add these jumps to the m_jumps list. If
  527. // no special handling is required we can often just backtrack to m_jumps.
  528. void backtrackTermDefault(size_t opIndex)
  529. {
  530. YarrOp& op = m_ops[opIndex];
  531. m_backtrackingState.append(op.m_jumps);
  532. }
  533. void generateAssertionBOL(size_t opIndex)
  534. {
  535. YarrOp& op = m_ops[opIndex];
  536. PatternTerm* term = op.m_term;
  537. if (m_pattern.m_multiline) {
  538. const RegisterID character = regT0;
  539. JumpList matchDest;
  540. if (!term->inputPosition)
  541. matchDest.append(branch32(Equal, index, Imm32(m_checked)));
  542. readCharacter((term->inputPosition - m_checked) - 1, character);
  543. matchCharacterClass(character, matchDest, m_pattern.newlineCharacterClass());
  544. op.m_jumps.append(jump());
  545. matchDest.link(this);
  546. } else {
  547. // Erk, really should poison out these alternatives early. :-/
  548. if (term->inputPosition)
  549. op.m_jumps.append(jump());
  550. else
  551. op.m_jumps.append(branch32(NotEqual, index, Imm32(m_checked)));
  552. }
  553. }
  554. void backtrackAssertionBOL(size_t opIndex)
  555. {
  556. backtrackTermDefault(opIndex);
  557. }
  558. void generateAssertionEOL(size_t opIndex)
  559. {
  560. YarrOp& op = m_ops[opIndex];
  561. PatternTerm* term = op.m_term;
  562. if (m_pattern.m_multiline) {
  563. const RegisterID character = regT0;
  564. JumpList matchDest;
  565. if (term->inputPosition == m_checked)
  566. matchDest.append(atEndOfInput());
  567. readCharacter(term->inputPosition - m_checked, character);
  568. matchCharacterClass(character, matchDest, m_pattern.newlineCharacterClass());
  569. op.m_jumps.append(jump());
  570. matchDest.link(this);
  571. } else {
  572. if (term->inputPosition == m_checked)
  573. op.m_jumps.append(notAtEndOfInput());
  574. // Erk, really should poison out these alternatives early. :-/
  575. else
  576. op.m_jumps.append(jump());
  577. }
  578. }
  579. void backtrackAssertionEOL(size_t opIndex)
  580. {
  581. backtrackTermDefault(opIndex);
  582. }
  583. // Also falls though on nextIsNotWordChar.
  584. void matchAssertionWordchar(size_t opIndex, JumpList& nextIsWordChar, JumpList& nextIsNotWordChar)
  585. {
  586. YarrOp& op = m_ops[opIndex];
  587. PatternTerm* term = op.m_term;
  588. const RegisterID character = regT0;
  589. if (term->inputPosition == m_checked)
  590. nextIsNotWordChar.append(atEndOfInput());
  591. readCharacter((term->inputPosition - m_checked), character);
  592. matchCharacterClass(character, nextIsWordChar, m_pattern.wordcharCharacterClass());
  593. }
  594. void generateAssertionWordBoundary(size_t opIndex)
  595. {
  596. YarrOp& op = m_ops[opIndex];
  597. PatternTerm* term = op.m_term;
  598. const RegisterID character = regT0;
  599. Jump atBegin;
  600. JumpList matchDest;
  601. if (!term->inputPosition)
  602. atBegin = branch32(Equal, index, Imm32(m_checked));
  603. readCharacter((term->inputPosition - m_checked) - 1, character);
  604. matchCharacterClass(character, matchDest, m_pattern.wordcharCharacterClass());
  605. if (!term->inputPosition)
  606. atBegin.link(this);
  607. // We fall through to here if the last character was not a wordchar.
  608. JumpList nonWordCharThenWordChar;
  609. JumpList nonWordCharThenNonWordChar;
  610. if (term->invert()) {
  611. matchAssertionWordchar(opIndex, nonWordCharThenNonWordChar, nonWordCharThenWordChar);
  612. nonWordCharThenWordChar.append(jump());
  613. } else {
  614. matchAssertionWordchar(opIndex, nonWordCharThenWordChar, nonWordCharThenNonWordChar);
  615. nonWordCharThenNonWordChar.append(jump());
  616. }
  617. op.m_jumps.append(nonWordCharThenNonWordChar);
  618. // We jump here if the last character was a wordchar.
  619. matchDest.link(this);
  620. JumpList wordCharThenWordChar;
  621. JumpList wordCharThenNonWordChar;
  622. if (term->invert()) {
  623. matchAssertionWordchar(opIndex, wordCharThenNonWordChar, wordCharThenWordChar);
  624. wordCharThenWordChar.append(jump());
  625. } else {
  626. matchAssertionWordchar(opIndex, wordCharThenWordChar, wordCharThenNonWordChar);
  627. // This can fall-though!
  628. }
  629. op.m_jumps.append(wordCharThenWordChar);
  630. nonWordCharThenWordChar.link(this);
  631. wordCharThenNonWordChar.link(this);
  632. }
  633. void backtrackAssertionWordBoundary(size_t opIndex)
  634. {
  635. backtrackTermDefault(opIndex);
  636. }
  637. void generatePatternCharacterOnce(size_t opIndex)
  638. {
  639. YarrOp& op = m_ops[opIndex];
  640. if (op.m_isDeadCode)
  641. return;
  642. // m_ops always ends with a OpBodyAlternativeEnd or OpMatchFailed
  643. // node, so there must always be at least one more node.
  644. ASSERT(opIndex + 1 < m_ops.size());
  645. YarrOp* nextOp = &m_ops[opIndex + 1];
  646. PatternTerm* term = op.m_term;
  647. UChar ch = term->patternCharacter;
  648. if ((ch > 0xff) && (m_charSize == Char8)) {
  649. // Have a 16 bit pattern character and an 8 bit string - short circuit
  650. op.m_jumps.append(jump());
  651. return;
  652. }
  653. const RegisterID character = regT0;
  654. int maxCharactersAtOnce = m_charSize == Char8 ? 4 : 2;
  655. unsigned ignoreCaseMask = 0;
  656. #if CPU(BIG_ENDIAN)
  657. int allCharacters = ch << (m_charSize == Char8 ? 24 : 16);
  658. #else
  659. int allCharacters = ch;
  660. #endif
  661. int numberCharacters;
  662. int startTermPosition = term->inputPosition;
  663. // For case-insesitive compares, non-ascii characters that have different
  664. // upper & lower case representations are converted to a character class.
  665. ASSERT(!m_pattern.m_ignoreCase || isASCIIAlpha(ch) || isCanonicallyUnique(ch));
  666. if (m_pattern.m_ignoreCase && isASCIIAlpha(ch))
  667. #if CPU(BIG_ENDIAN)
  668. ignoreCaseMask |= 32 << (m_charSize == Char8 ? 24 : 16);
  669. #else
  670. ignoreCaseMask |= 32;
  671. #endif
  672. for (numberCharacters = 1; numberCharacters < maxCharactersAtOnce && nextOp->m_op == OpTerm; ++numberCharacters, nextOp = &m_ops[opIndex + numberCharacters]) {
  673. PatternTerm* nextTerm = nextOp->m_term;
  674. if (nextTerm->type != PatternTerm::TypePatternCharacter
  675. || nextTerm->quantityType != QuantifierFixedCount
  676. || nextTerm->quantityCount != 1
  677. || nextTerm->inputPosition != (startTermPosition + numberCharacters))
  678. break;
  679. nextOp->m_isDeadCode = true;
  680. #if CPU(BIG_ENDIAN)
  681. int shiftAmount = (m_charSize == Char8 ? 24 : 16) - ((m_charSize == Char8 ? 8 : 16) * numberCharacters);
  682. #else
  683. int shiftAmount = (m_charSize == Char8 ? 8 : 16) * numberCharacters;
  684. #endif
  685. UChar currentCharacter = nextTerm->patternCharacter;
  686. if ((currentCharacter > 0xff) && (m_charSize == Char8)) {
  687. // Have a 16 bit pattern character and an 8 bit string - short circuit
  688. op.m_jumps.append(jump());
  689. return;
  690. }
  691. // For case-insesitive compares, non-ascii characters that have different
  692. // upper & lower case representations are converted to a character class.
  693. ASSERT(!m_pattern.m_ignoreCase || isASCIIAlpha(currentCharacter) || isCanonicallyUnique(currentCharacter));
  694. allCharacters |= (currentCharacter << shiftAmount);
  695. if ((m_pattern.m_ignoreCase) && (isASCIIAlpha(currentCharacter)))
  696. ignoreCaseMask |= 32 << shiftAmount;
  697. }
  698. if (m_charSize == Char8) {
  699. switch (numberCharacters) {
  700. case 1:
  701. op.m_jumps.append(jumpIfCharNotEquals(ch, startTermPosition - m_checked, character));
  702. return;
  703. case 2: {
  704. BaseIndex address(input, index, TimesOne, (startTermPosition - m_checked) * sizeof(LChar));
  705. load16Unaligned(address, character);
  706. break;
  707. }
  708. case 3: {
  709. BaseIndex highAddress(input, index, TimesOne, (startTermPosition - m_checked) * sizeof(LChar));
  710. load16Unaligned(highAddress, character);
  711. if (ignoreCaseMask)
  712. or32(Imm32(ignoreCaseMask), character);
  713. op.m_jumps.append(branch32(NotEqual, character, Imm32((allCharacters & 0xffff) | ignoreCaseMask)));
  714. op.m_jumps.append(jumpIfCharNotEquals(allCharacters >> 16, startTermPosition + 2 - m_checked, character));
  715. return;
  716. }
  717. case 4: {
  718. BaseIndex address(input, index, TimesOne, (startTermPosition - m_checked) * sizeof(LChar));
  719. load32WithUnalignedHalfWords(address, character);
  720. break;
  721. }
  722. }
  723. } else {
  724. switch (numberCharacters) {
  725. case 1:
  726. op.m_jumps.append(jumpIfCharNotEquals(ch, term->inputPosition - m_checked, character));
  727. return;
  728. case 2:
  729. BaseIndex address(input, index, TimesTwo, (term->inputPosition - m_checked) * sizeof(UChar));
  730. load32WithUnalignedHalfWords(address, character);
  731. break;
  732. }
  733. }
  734. if (ignoreCaseMask)
  735. or32(Imm32(ignoreCaseMask), character);
  736. op.m_jumps.append(branch32(NotEqual, character, Imm32(allCharacters | ignoreCaseMask)));
  737. return;
  738. }
  739. void backtrackPatternCharacterOnce(size_t opIndex)
  740. {
  741. backtrackTermDefault(opIndex);
  742. }
  743. void generatePatternCharacterFixed(size_t opIndex)
  744. {
  745. YarrOp& op = m_ops[opIndex];
  746. PatternTerm* term = op.m_term;
  747. UChar ch = term->patternCharacter;
  748. const RegisterID character = regT0;
  749. const RegisterID countRegister = regT1;
  750. move(index, countRegister);
  751. sub32(Imm32(term->quantityCount.unsafeGet()), countRegister);
  752. Label loop(this);
  753. BaseIndex address(input, countRegister, m_charScale, (Checked<int>(term->inputPosition - m_checked + Checked<int64_t>(term->quantityCount)) * static_cast<int>(m_charSize == Char8 ? sizeof(char) : sizeof(UChar))).unsafeGet());
  754. if (m_charSize == Char8)
  755. load8(address, character);
  756. else
  757. load16(address, character);
  758. // For case-insesitive compares, non-ascii characters that have different
  759. // upper & lower case representations are converted to a character class.
  760. ASSERT(!m_pattern.m_ignoreCase || isASCIIAlpha(ch) || isCanonicallyUnique(ch));
  761. if (m_pattern.m_ignoreCase && isASCIIAlpha(ch)) {
  762. or32(TrustedImm32(0x20), character);
  763. ch |= 0x20;
  764. }
  765. op.m_jumps.append(branch32(NotEqual, character, Imm32(ch)));
  766. add32(TrustedImm32(1), countRegister);
  767. branch32(NotEqual, countRegister, index).linkTo(loop, this);
  768. }
  769. void backtrackPatternCharacterFixed(size_t opIndex)
  770. {
  771. backtrackTermDefault(opIndex);
  772. }
  773. void generatePatternCharacterGreedy(size_t opIndex)
  774. {
  775. YarrOp& op = m_ops[opIndex];
  776. PatternTerm* term = op.m_term;
  777. UChar ch = term->patternCharacter;
  778. const RegisterID character = regT0;
  779. const RegisterID countRegister = regT1;
  780. move(TrustedImm32(0), countRegister);
  781. // Unless have a 16 bit pattern character and an 8 bit string - short circuit
  782. if (!((ch > 0xff) && (m_charSize == Char8))) {
  783. JumpList failures;
  784. Label loop(this);
  785. failures.append(atEndOfInput());
  786. failures.append(jumpIfCharNotEquals(ch, term->inputPosition - m_checked, character));
  787. add32(TrustedImm32(1), countRegister);
  788. add32(TrustedImm32(1), index);
  789. if (term->quantityCount == quantifyInfinite)
  790. jump(loop);
  791. else
  792. branch32(NotEqual, countRegister, Imm32(term->quantityCount.unsafeGet())).linkTo(loop, this);
  793. failures.link(this);
  794. }
  795. op.m_reentry = label();
  796. storeToFrame(countRegister, term->frameLocation);
  797. }
  798. void backtrackPatternCharacterGreedy(size_t opIndex)
  799. {
  800. YarrOp& op = m_ops[opIndex];
  801. PatternTerm* term = op.m_term;
  802. const RegisterID countRegister = regT1;
  803. m_backtrackingState.link(this);
  804. loadFromFrame(term->frameLocation, countRegister);
  805. m_backtrackingState.append(branchTest32(Zero, countRegister));
  806. sub32(TrustedImm32(1), countRegister);
  807. sub32(TrustedImm32(1), index);
  808. jump(op.m_reentry);
  809. }
  810. void generatePatternCharacterNonGreedy(size_t opIndex)
  811. {
  812. YarrOp& op = m_ops[opIndex];
  813. PatternTerm* term = op.m_term;
  814. const RegisterID countRegister = regT1;
  815. move(TrustedImm32(0), countRegister);
  816. op.m_reentry = label();
  817. storeToFrame(countRegister, term->frameLocation);
  818. }
  819. void backtrackPatternCharacterNonGreedy(size_t opIndex)
  820. {
  821. YarrOp& op = m_ops[opIndex];
  822. PatternTerm* term = op.m_term;
  823. UChar ch = term->patternCharacter;
  824. const RegisterID character = regT0;
  825. const RegisterID countRegister = regT1;
  826. m_backtrackingState.link(this);
  827. loadFromFrame(term->frameLocation, countRegister);
  828. // Unless have a 16 bit pattern character and an 8 bit string - short circuit
  829. if (!((ch > 0xff) && (m_charSize == Char8))) {
  830. JumpList nonGreedyFailures;
  831. nonGreedyFailures.append(atEndOfInput());
  832. if (term->quantityCount != quantifyInfinite)
  833. nonGreedyFailures.append(branch32(Equal, countRegister, Imm32(term->quantityCount.unsafeGet())));
  834. nonGreedyFailures.append(jumpIfCharNotEquals(ch, term->inputPosition - m_checked, character));
  835. add32(TrustedImm32(1), countRegister);
  836. add32(TrustedImm32(1), index);
  837. jump(op.m_reentry);
  838. nonGreedyFailures.link(this);
  839. }
  840. sub32(countRegister, index);
  841. m_backtrackingState.fallthrough();
  842. }
  843. void generateCharacterClassOnce(size_t opIndex)
  844. {
  845. YarrOp& op = m_ops[opIndex];
  846. PatternTerm* term = op.m_term;
  847. const RegisterID character = regT0;
  848. JumpList matchDest;
  849. readCharacter(term->inputPosition - m_checked, character);
  850. matchCharacterClass(character, matchDest, term->characterClass);
  851. if (term->invert())
  852. op.m_jumps.append(matchDest);
  853. else {
  854. op.m_jumps.append(jump());
  855. matchDest.link(this);
  856. }
  857. }
  858. void backtrackCharacterClassOnce(size_t opIndex)
  859. {
  860. backtrackTermDefault(opIndex);
  861. }
  862. void generateCharacterClassFixed(size_t opIndex)
  863. {
  864. YarrOp& op = m_ops[opIndex];
  865. PatternTerm* term = op.m_term;
  866. const RegisterID character = regT0;
  867. const RegisterID countRegister = regT1;
  868. move(index, countRegister);
  869. sub32(Imm32(term->quantityCount.unsafeGet()), countRegister);
  870. Label loop(this);
  871. JumpList matchDest;
  872. if (m_charSize == Char8)
  873. load8(BaseIndex(input, countRegister, TimesOne, (Checked<int>(term->inputPosition - m_checked + Checked<int64_t>(term->quantityCount)) * static_cast<int>(sizeof(char))).unsafeGet()), character);
  874. else
  875. load16(BaseIndex(input, countRegister, TimesTwo, (Checked<int>(term->inputPosition - m_checked + Checked<int64_t>(term->quantityCount)) * static_cast<int>(sizeof(UChar))).unsafeGet()), character);
  876. matchCharacterClass(character, matchDest, term->characterClass);
  877. if (term->invert())
  878. op.m_jumps.append(matchDest);
  879. else {
  880. op.m_jumps.append(jump());
  881. matchDest.link(this);
  882. }
  883. add32(TrustedImm32(1), countRegister);
  884. branch32(NotEqual, countRegister, index).linkTo(loop, this);
  885. }
  886. void backtrackCharacterClassFixed(size_t opIndex)
  887. {
  888. backtrackTermDefault(opIndex);
  889. }
  890. void generateCharacterClassGreedy(size_t opIndex)
  891. {
  892. YarrOp& op = m_ops[opIndex];
  893. PatternTerm* term = op.m_term;
  894. const RegisterID character = regT0;
  895. const RegisterID countRegister = regT1;
  896. move(TrustedImm32(0), countRegister);
  897. JumpList failures;
  898. Label loop(this);
  899. failures.append(atEndOfInput());
  900. if (term->invert()) {
  901. readCharacter(term->inputPosition - m_checked, character);
  902. matchCharacterClass(character, failures, term->characterClass);
  903. } else {
  904. JumpList matchDest;
  905. readCharacter(term->inputPosition - m_checked, character);
  906. matchCharacterClass(character, matchDest, term->characterClass);
  907. failures.append(jump());
  908. matchDest.link(this);
  909. }
  910. add32(TrustedImm32(1), countRegister);
  911. add32(TrustedImm32(1), index);
  912. if (term->quantityCount != quantifyInfinite) {
  913. branch32(NotEqual, countRegister, Imm32(term->quantityCount.unsafeGet())).linkTo(loop, this);
  914. failures.append(jump());
  915. } else
  916. jump(loop);
  917. failures.link(this);
  918. op.m_reentry = label();
  919. storeToFrame(countRegister, term->frameLocation);
  920. }
  921. void backtrackCharacterClassGreedy(size_t opIndex)
  922. {
  923. YarrOp& op = m_ops[opIndex];
  924. PatternTerm* term = op.m_term;
  925. const RegisterID countRegister = regT1;
  926. m_backtrackingState.link(this);
  927. loadFromFrame(term->frameLocation, countRegister);
  928. m_backtrackingState.append(branchTest32(Zero, countRegister));
  929. sub32(TrustedImm32(1), countRegister);
  930. sub32(TrustedImm32(1), index);
  931. jump(op.m_reentry);
  932. }
  933. void generateCharacterClassNonGreedy(size_t opIndex)
  934. {
  935. YarrOp& op = m_ops[opIndex];
  936. PatternTerm* term = op.m_term;
  937. const RegisterID countRegister = regT1;
  938. move(TrustedImm32(0), countRegister);
  939. op.m_reentry = label();
  940. storeToFrame(countRegister, term->frameLocation);
  941. }
  942. void backtrackCharacterClassNonGreedy(size_t opIndex)
  943. {
  944. YarrOp& op = m_ops[opIndex];
  945. PatternTerm* term = op.m_term;
  946. const RegisterID character = regT0;
  947. const RegisterID countRegister = regT1;
  948. JumpList nonGreedyFailures;
  949. m_backtrackingState.link(this);
  950. loadFromFrame(term->frameLocation, countRegister);
  951. nonGreedyFailures.append(atEndOfInput());
  952. nonGreedyFailures.append(branch32(Equal, countRegister, Imm32(term->quantityCount.unsafeGet())));
  953. JumpList matchDest;
  954. readCharacter(term->inputPosition - m_checked, character);
  955. matchCharacterClass(character, matchDest, term->characterClass);
  956. if (term->invert())
  957. nonGreedyFailures.append(matchDest);
  958. else {
  959. nonGreedyFailures.append(jump());
  960. matchDest.link(this);
  961. }
  962. add32(TrustedImm32(1), countRegister);
  963. add32(TrustedImm32(1), index);
  964. jump(op.m_reentry);
  965. nonGreedyFailures.link(this);
  966. sub32(countRegister, index);
  967. m_backtrackingState.fallthrough();
  968. }
  969. void generateDotStarEnclosure(size_t opIndex)
  970. {
  971. YarrOp& op = m_ops[opIndex];
  972. PatternTerm* term = op.m_term;
  973. const RegisterID character = regT0;
  974. const RegisterID matchPos = regT1;
  975. JumpList foundBeginningNewLine;
  976. JumpList saveStartIndex;
  977. JumpList foundEndingNewLine;
  978. ASSERT(!m_pattern.m_body->m_hasFixedSize);
  979. getMatchStart(matchPos);
  980. saveStartIndex.append(branchTest32(Zero, matchPos));
  981. Label findBOLLoop(this);
  982. sub32(TrustedImm32(1), matchPos);
  983. if (m_charSize == Char8)
  984. load8(BaseIndex(input, matchPos, TimesOne, 0), character);
  985. else
  986. load16(BaseIndex(input, matchPos, TimesTwo, 0), character);
  987. matchCharacterClass(character, foundBeginningNewLine, m_pattern.newlineCharacterClass());
  988. branchTest32(NonZero, matchPos).linkTo(findBOLLoop, this);
  989. saveStartIndex.append(jump());
  990. foundBeginningNewLine.link(this);
  991. add32(TrustedImm32(1), matchPos); // Advance past newline
  992. saveStartIndex.link(this);
  993. if (!m_pattern.m_multiline && term->anchors.bolAnchor)
  994. op.m_jumps.append(branchTest32(NonZero, matchPos));
  995. ASSERT(!m_pattern.m_body->m_hasFixedSize);
  996. setMatchStart(matchPos);
  997. move(index, matchPos);
  998. Label findEOLLoop(this);
  999. foundEndingNewLine.append(branch32(Equal, matchPos, length));
  1000. if (m_charSize == Char8)
  1001. load8(BaseIndex(input, matchPos, TimesOne, 0), character);
  1002. else
  1003. load16(BaseIndex(input, matchPos, TimesTwo, 0), character);
  1004. matchCharacterClass(character, foundEndingNewLine, m_pattern.newlineCharacterClass());
  1005. add32(TrustedImm32(1), matchPos);
  1006. jump(findEOLLoop);
  1007. foundEndingNewLine.link(this);
  1008. if (!m_pattern.m_multiline && term->anchors.eolAnchor)
  1009. op.m_jumps.append(branch32(NotEqual, matchPos, length));
  1010. move(matchPos, index);
  1011. }
  1012. void backtrackDotStarEnclosure(size_t opIndex)
  1013. {
  1014. backtrackTermDefault(opIndex);
  1015. }
  1016. // Code generation/backtracking for simple terms
  1017. // (pattern characters, character classes, and assertions).
  1018. // These methods farm out work to the set of functions above.
  1019. void generateTerm(size_t opIndex)
  1020. {
  1021. YarrOp& op = m_ops[opIndex];
  1022. PatternTerm* term = op.m_term;
  1023. switch (term->type) {
  1024. case PatternTerm::TypePatternCharacter:
  1025. switch (term->quantityType) {
  1026. case QuantifierFixedCount:
  1027. if (term->quantityCount == 1)
  1028. generatePatternCharacterOnce(opIndex);
  1029. else
  1030. generatePatternCharacterFixed(opIndex);
  1031. break;
  1032. case QuantifierGreedy:
  1033. generatePatternCharacterGreedy(opIndex);
  1034. break;
  1035. case QuantifierNonGreedy:
  1036. generatePatternCharacterNonGreedy(opIndex);
  1037. break;
  1038. }
  1039. break;
  1040. case PatternTerm::TypeCharacterClass:
  1041. switch (term->quantityType) {
  1042. case QuantifierFixedCount:
  1043. if (term->quantityCount == 1)
  1044. generateCharacterClassOnce(opIndex);
  1045. else
  1046. generateCharacterClassFixed(opIndex);
  1047. break;
  1048. case QuantifierGreedy:
  1049. generateCharacterClassGreedy(opIndex);
  1050. break;
  1051. case QuantifierNonGreedy:
  1052. generateCharacterClassNonGreedy(opIndex);
  1053. break;
  1054. }
  1055. break;
  1056. case PatternTerm::TypeAssertionBOL:
  1057. generateAssertionBOL(opIndex);
  1058. break;
  1059. case PatternTerm::TypeAssertionEOL:
  1060. generateAssertionEOL(opIndex);
  1061. break;
  1062. case PatternTerm::TypeAssertionWordBoundary:
  1063. generateAssertionWordBoundary(opIndex);
  1064. break;
  1065. case PatternTerm::TypeForwardReference:
  1066. break;
  1067. case PatternTerm::TypeParenthesesSubpattern:
  1068. case PatternTerm::TypeParentheticalAssertion:
  1069. RELEASE_ASSERT_NOT_REACHED();
  1070. case PatternTerm::TypeBackReference:
  1071. m_shouldFallBack = true;
  1072. break;
  1073. case PatternTerm::TypeDotStarEnclosure:
  1074. generateDotStarEnclosure(opIndex);
  1075. break;
  1076. }
  1077. }
  1078. void backtrackTerm(size_t opIndex)
  1079. {
  1080. YarrOp& op = m_ops[opIndex];
  1081. PatternTerm* term = op.m_term;
  1082. switch (term->type) {
  1083. case PatternTerm::TypePatternCharacter:
  1084. switch (term->quantityType) {
  1085. case QuantifierFixedCount:
  1086. if (term->quantityCount == 1)
  1087. backtrackPatternCharacterOnce(opIndex);
  1088. else
  1089. backtrackPatternCharacterFixed(opIndex);
  1090. break;
  1091. case QuantifierGreedy:
  1092. backtrackPatternCharacterGreedy(opIndex);
  1093. break;
  1094. case QuantifierNonGreedy:
  1095. backtrackPatternCharacterNonGreedy(opIndex);
  1096. break;
  1097. }
  1098. break;
  1099. case PatternTerm::TypeCharacterClass:
  1100. switch (term->quantityType) {
  1101. case QuantifierFixedCount:
  1102. if (term->quantityCount == 1)
  1103. backtrackCharacterClassOnce(opIndex);
  1104. else
  1105. backtrackCharacterClassFixed(opIndex);
  1106. break;
  1107. case QuantifierGreedy:
  1108. backtrackCharacterClassGreedy(opIndex);
  1109. break;
  1110. case QuantifierNonGreedy:
  1111. backtrackCharacterClassNonGreedy(opIndex);
  1112. break;
  1113. }
  1114. break;
  1115. case PatternTerm::TypeAssertionBOL:
  1116. backtrackAssertionBOL(opIndex);
  1117. break;
  1118. case PatternTerm::TypeAssertionEOL:
  1119. backtrackAssertionEOL(opIndex);
  1120. break;
  1121. case PatternTerm::TypeAssertionWordBoundary:
  1122. backtrackAssertionWordBoundary(opIndex);
  1123. break;
  1124. case PatternTerm::TypeForwardReference:
  1125. break;
  1126. case PatternTerm::TypeParenthesesSubpattern:
  1127. case PatternTerm::TypeParentheticalAssertion:
  1128. RELEASE_ASSERT_NOT_REACHED();
  1129. case PatternTerm::TypeDotStarEnclosure:
  1130. backtrackDotStarEnclosure(opIndex);
  1131. break;
  1132. case PatternTerm::TypeBackReference:
  1133. m_shouldFallBack = true;
  1134. break;
  1135. }
  1136. }
  1137. void generate()
  1138. {
  1139. // Forwards generate the matching code.
  1140. ASSERT(m_ops.size());
  1141. size_t opIndex = 0;
  1142. do {
  1143. YarrOp& op = m_ops[opIndex];
  1144. switch (op.m_op) {
  1145. case OpTerm:
  1146. generateTerm(opIndex);
  1147. break;
  1148. // OpBodyAlternativeBegin/Next/End
  1149. //
  1150. // These nodes wrap the set of alternatives in the body of the regular expression.
  1151. // There may be either one or two chains of OpBodyAlternative nodes, one representing
  1152. // the 'once through' sequence of alternatives (if any exist), and one representing
  1153. // the repeating alternatives (again, if any exist).
  1154. //
  1155. // Upon normal entry to the Begin alternative, we will check that input is available.
  1156. // Reentry to the Begin alternative will take place after the check has taken place,
  1157. // and will assume that the input position has already been progressed as appropriate.
  1158. //
  1159. // Entry to subsequent Next/End alternatives occurs when the prior alternative has
  1160. // successfully completed a match - return a success state from JIT code.
  1161. //
  1162. // Next alternatives allow for reentry optimized to suit backtracking from its
  1163. // preceding alternative. It expects the input position to still be set to a position
  1164. // appropriate to its predecessor, and it will only perform an input check if the
  1165. // predecessor had a minimum size less than its own.
  1166. //
  1167. // In the case 'once through' expressions, the End node will also have a reentry
  1168. // point to jump to when the last alternative fails. Again, this expects the input
  1169. // position to still reflect that expected by the prior alternative.
  1170. case OpBodyAlternativeBegin: {
  1171. PatternAlternative* alternative = op.m_alternative;
  1172. // Upon entry at the head of the set of alternatives, check if input is available
  1173. // to run the first alternative. (This progresses the input position).
  1174. op.m_jumps.append(jumpIfNoAvailableInput(alternative->m_minimumSize));
  1175. // We will reenter after the check, and assume the input position to have been
  1176. // set as appropriate to this alternative.
  1177. op.m_reentry = label();
  1178. m_checked += alternative->m_minimumSize;
  1179. break;
  1180. }
  1181. case OpBodyAlternativeNext:
  1182. case OpBodyAlternativeEnd: {
  1183. PatternAlternative* priorAlternative = m_ops[op.m_previousOp].m_alternative;
  1184. PatternAlternative* alternative = op.m_alternative;
  1185. // If we get here, the prior alternative matched - return success.
  1186. // Adjust the stack pointer to remove the pattern's frame.
  1187. removeCallFrame();
  1188. // Load appropriate values into the return register and the first output
  1189. // slot, and return. In the case of pattern with a fixed size, we will
  1190. // not have yet set the value in the first
  1191. ASSERT(index != returnRegister);
  1192. if (m_pattern.m_body->m_hasFixedSize) {
  1193. move(index, returnRegister);
  1194. if (priorAlternative->m_minimumSize)
  1195. sub32(Imm32(priorAlternative->m_minimumSize), returnRegister);
  1196. if (compileMode == IncludeSubpatterns)
  1197. store32(returnRegister, output);
  1198. } else
  1199. getMatchStart(returnRegister);
  1200. if (compileMode == IncludeSubpatterns)
  1201. store32(index, Address(output, 4));
  1202. move(index, returnRegister2);
  1203. generateReturn();
  1204. // This is the divide between the tail of the prior alternative, above, and
  1205. // the head of the subsequent alternative, below.
  1206. if (op.m_op == OpBodyAlternativeNext) {
  1207. // This is the reentry point for the Next alternative. We expect any code
  1208. // that jumps here to do so with the input position matching that of the
  1209. // PRIOR alteranative, and we will only check input availability if we
  1210. // need to progress it forwards.
  1211. op.m_reentry = label();
  1212. if (alternative->m_minimumSize > priorAlternative->m_minimumSize) {
  1213. add32(Imm32(alternative->m_minimumSize - priorAlternative->m_minimumSize), index);
  1214. op.m_jumps.append(jumpIfNoAvailableInput());
  1215. } else if (priorAlternative->m_minimumSize > alternative->m_minimumSize)
  1216. sub32(Imm32(priorAlternative->m_minimumSize - alternative->m_minimumSize), index);
  1217. } else if (op.m_nextOp == notFound) {
  1218. // This is the reentry point for the End of 'once through' alternatives,
  1219. // jumped to when the last alternative fails to match.
  1220. op.m_reentry = label();
  1221. sub32(Imm32(priorAlternative->m_minimumSize), index);
  1222. }
  1223. if (op.m_op == OpBodyAlternativeNext)
  1224. m_checked += alternative->m_minimumSize;
  1225. m_checked -= priorAlternative->m_minimumSize;
  1226. break;
  1227. }
  1228. // OpSimpleNestedAlternativeBegin/Next/End
  1229. // OpNestedAlternativeBegin/Next/End
  1230. //
  1231. // These nodes are used to handle sets of alternatives that are nested within
  1232. // subpatterns and parenthetical assertions. The 'simple' forms are used where
  1233. // we do not need to be able to backtrack back into any alternative other than
  1234. // the last, the normal forms allow backtracking into any alternative.
  1235. //
  1236. // Each Begin/Next node is responsible for planting an input check to ensure
  1237. // sufficient input is available on entry. Next nodes additionally need to
  1238. // jump to the end - Next nodes use the End node's m_jumps list to hold this
  1239. // set of jumps.
  1240. //
  1241. // In the non-simple forms, successful alternative matches must store a
  1242. // 'return address' using a DataLabelPtr, used to store the address to jump
  1243. // to when backtracking, to get to the code for the appropriate alternative.
  1244. case OpSimpleNestedAlternativeBegin:
  1245. case OpNestedAlternativeBegin: {
  1246. PatternTerm* term = op.m_term;
  1247. PatternAlternative* alternative = op.m_alternative;
  1248. PatternDisjunction* disjunction = term->parentheses.disjunction;
  1249. // Calculate how much input we need to check for, and if non-zero check.
  1250. op.m_checkAdjust = alternative->m_minimumSize;
  1251. if ((term->quantityType == QuantifierFixedCount) && (term->type != PatternTerm::TypeParentheticalAssertion))
  1252. op.m_checkAdjust -= disjunction->m_minimumSize;
  1253. if (op.m_checkAdjust)
  1254. op.m_jumps.append(jumpIfNoAvailableInput(op.m_checkAdjust));
  1255. m_checked += op.m_checkAdjust;
  1256. break;
  1257. }
  1258. case OpSimpleNestedAlternativeNext:
  1259. case OpNestedAlternativeNext: {
  1260. PatternTerm* term = op.m_term;
  1261. PatternAlternative* alternative = op.m_alternative;
  1262. PatternDisjunction* disjunction = term->parentheses.disjunction;
  1263. // In the non-simple case, store a 'return address' so we can backtrack correctly.
  1264. if (op.m_op == OpNestedAlternativeNext) {
  1265. unsigned parenthesesFrameLocation = term->frameLocation;
  1266. unsigned alternativeFrameLocation = parenthesesFrameLocation;
  1267. if (term->quantityType != QuantifierFixedCount)
  1268. alternativeFrameLocation += YarrStackSpaceForBackTrackInfoParenthesesOnce;
  1269. op.m_returnAddress = storeToFrameWithPatch(alternativeFrameLocation);
  1270. }
  1271. if (term->quantityType != QuantifierFixedCount && !m_ops[op.m_previousOp].m_alternative->m_minimumSize) {
  1272. // If the previous alternative matched without consuming characters then
  1273. // backtrack to try to match while consumming some input.
  1274. op.m_zeroLengthMatch = branch32(Equal, index, Address(stackPointerRegister, term->frameLocation * sizeof(void*)));
  1275. }
  1276. // If we reach here then the last alternative has matched - jump to the
  1277. // End node, to skip over any further alternatives.
  1278. //
  1279. // FIXME: this is logically O(N^2) (though N can be expected to be very
  1280. // small). We could avoid this either by adding an extra jump to the JIT
  1281. // data structures, or by making backtracking code that jumps to Next
  1282. // alternatives are responsible for checking that input is available (if
  1283. // we didn't need to plant the input checks, then m_jumps would be free).
  1284. YarrOp* endOp = &m_ops[op.m_nextOp];
  1285. while (endOp->m_nextOp != notFound) {
  1286. ASSERT(endOp->m_op == OpSimpleNestedAlternativeNext || endOp->m_op == OpNestedAlternativeNext);
  1287. endOp = &m_ops[endOp->m_nextOp];
  1288. }
  1289. ASSERT(endOp->m_op == OpSimpleNestedAlternativeEnd || endOp->m_op == OpNestedAlternativeEnd);
  1290. endOp->m_jumps.append(jump());
  1291. // This is the entry point for the next alternative.
  1292. op.m_reentry = label();
  1293. // Calculate how much input we need to check for, and if non-zero check.
  1294. op.m_checkAdjust = alternative->m_minimumSize;
  1295. if ((term->quantityType == QuantifierFixedCount) && (term->type != PatternTerm::TypeParentheticalAssertion))
  1296. op.m_checkAdjust -= disjunction->m_minimumSize;
  1297. if (op.m_checkAdjust)
  1298. op.m_jumps.append(jumpIfNoAvailableInput(op.m_checkAdjust));
  1299. YarrOp& lastOp = m_ops[op.m_previousOp];
  1300. m_checked -= lastOp.m_checkAdjust;
  1301. m_checked += op.m_checkAdjust;
  1302. break;
  1303. }
  1304. case OpSimpleNestedAlternativeEnd:
  1305. case OpNestedAlternativeEnd: {
  1306. PatternTerm* term = op.m_term;
  1307. // In the non-simple case, store a 'return address' so we can backtrack correctly.
  1308. if (op.m_op == OpNestedAlternativeEnd) {
  1309. unsigned parenthesesFrameLocation = term->frameLocation;
  1310. unsigned alternativeFrameLocation = parenthesesFrameLocation;
  1311. if (term->quantityType != QuantifierFixedCount)
  1312. alternativeFrameLocation += YarrStackSpaceForBackTrackInfoParenthesesOnce;
  1313. op.m_returnAddress = storeToFrameWithPatch(alternativeFrameLocation);
  1314. }
  1315. if (term->quantityType != QuantifierFixedCount && !m_ops[op.m_previousOp].m_alternative->m_minimumSize) {
  1316. // If the previous alternative matched without consuming characters then
  1317. // backtrack to try to match while consumming some input.
  1318. op.m_zeroLengthMatch = branch32(Equal, index, Address(stackPointerRegister, term->frameLocation * sizeof(void*)));
  1319. }
  1320. // If this set of alternatives contains more than one alternative,
  1321. // then the Next nodes will have planted jumps to the End, and added
  1322. // them to this node's m_jumps list.
  1323. op.m_jumps.link(this);
  1324. op.m_jumps.clear();
  1325. YarrOp& lastOp = m_ops[op.m_previousOp];
  1326. m_checked -= lastOp.m_checkAdjust;
  1327. break;
  1328. }
  1329. // OpParenthesesSubpatternOnceBegin/End
  1330. //
  1331. // These nodes support (optionally) capturing subpatterns, that have a
  1332. // quantity count of 1 (this covers fixed once, and ?/?? quantifiers).
  1333. case OpParenthesesSubpatternOnceBegin: {
  1334. PatternTerm* term = op.m_term;
  1335. unsigned parenthesesFrameLocation = term->frameLocation;
  1336. const RegisterID indexTemporary = regT0;
  1337. ASSERT(term->quantityCount == 1);
  1338. // Upon entry to a Greedy quantified set of parenthese store the index.
  1339. // We'll use this for two purposes:
  1340. // - To indicate which iteration we are on of mathing the remainder of
  1341. // the expression after the parentheses - the first, including the
  1342. // match within the parentheses, or the second having skipped over them.
  1343. // - To check for empty matches, which must be rejected.
  1344. //
  1345. // At the head of a NonGreedy set of parentheses we'll immediately set the
  1346. // value on the stack to -1 (indicating a match skipping the subpattern),
  1347. // and plant a jump to the end. We'll also plant a label to backtrack to
  1348. // to reenter the subpattern later, with a store to set up index on the
  1349. // second iteration.
  1350. //
  1351. // FIXME: for capturing parens, could use the index in the capture array?
  1352. if (term->quantityType == QuantifierGreedy)
  1353. storeToFrame(index, parenthesesFrameLocation);
  1354. else if (term->quantityType == QuantifierNonGreedy) {
  1355. storeToFrame(TrustedImm32(-1), parenthesesFrameLocation);
  1356. op.m_jumps.append(jump());
  1357. op.m_reentry = label();
  1358. storeToFrame(index, parenthesesFrameLocation);
  1359. }
  1360. // If the parenthese are capturing, store the starting index value to the
  1361. // captures array, offsetting as necessary.
  1362. //
  1363. // FIXME: could avoid offsetting this value in JIT code, apply
  1364. // offsets only afterwards, at the point the results array is
  1365. // being accessed.
  1366. if (term->capture() && compileMode == IncludeSubpatterns) {
  1367. int inputOffset = term->inputPosition - m_checked;
  1368. if (term->quantityType == QuantifierFixedCount)
  1369. inputOffset -= term->parentheses.disjunction->m_minimumSize;
  1370. if (inputOffset) {
  1371. move(index, indexTemporary);
  1372. add32(Imm32(inputOffset), indexTemporary);
  1373. setSubpatternStart(indexTemporary, term->parentheses.subpatternId);
  1374. } else
  1375. setSubpatternStart(index, term->parentheses.subpatternId);
  1376. }
  1377. break;
  1378. }
  1379. case OpParenthesesSubpatternOnceEnd: {
  1380. PatternTerm* term = op.m_term;
  1381. const RegisterID indexTemporary = regT0;
  1382. ASSERT(term->quantityCount == 1);
  1383. #ifndef NDEBUG
  1384. // Runtime ASSERT to make sure that the nested alternative handled the
  1385. // "no input consumed" check.
  1386. if (term->quantityType != QuantifierFixedCount && !term->parentheses.disjunction->m_minimumSize) {
  1387. Jump pastBreakpoint;
  1388. pastBreakpoint = branch32(NotEqual, index, Address(stackPointerRegister, term->frameLocation * sizeof(void*)));
  1389. breakpoint();
  1390. pastBreakpoint.link(this);
  1391. }
  1392. #endif
  1393. // If the parenthese are capturing, store the ending index value to the
  1394. // captures array, offsetting as necessary.
  1395. //
  1396. // FIXME: could avoid offsetting this value in JIT code, apply
  1397. // offsets only afterwards, at the point the results array is
  1398. // being accessed.
  1399. if (term->capture() && compileMode == IncludeSubpatterns) {
  1400. int inputOffset = term->inputPosition - m_checked;
  1401. if (inputOffset) {
  1402. move(index, indexTemporary);
  1403. add32(Imm32(inputOffset), indexTemporary);
  1404. setSubpatternEnd(indexTemporary, term->parentheses.subpatternId);
  1405. } else
  1406. setSubpatternEnd(index, term->parentheses.subpatternId);
  1407. }
  1408. // If the parentheses are quantified Greedy then add a label to jump back
  1409. // to if get a failed match from after the parentheses. For NonGreedy
  1410. // parentheses, link the jump from before the subpattern to here.
  1411. if (term->quantityType == QuantifierGreedy)
  1412. op.m_reentry = label();
  1413. else if (term->quantityType == QuantifierNonGreedy) {
  1414. YarrOp& beginOp = m_ops[op.m_previousOp];
  1415. beginOp.m_jumps.link(this);
  1416. }
  1417. break;
  1418. }
  1419. // OpParenthesesSubpatternTerminalBegin/End
  1420. case OpParenthesesSubpatternTerminalBegin: {
  1421. PatternTerm* term = op.m_term;
  1422. ASSERT(term->quantityType == QuantifierGreedy);
  1423. ASSERT(term->quantityCount == quantifyInfinite);
  1424. ASSERT(!term->capture());
  1425. // Upon entry set a label to loop back to.
  1426. op.m_reentry = label();
  1427. // Store the start index of the current match; we need to reject zero
  1428. // length matches.
  1429. storeToFrame(index, term->frameLocation);
  1430. break;
  1431. }
  1432. case OpParenthesesSubpatternTerminalEnd: {
  1433. YarrOp& beginOp = m_ops[op.m_previousOp];
  1434. #ifndef NDEBUG
  1435. PatternTerm* term = op.m_term;
  1436. // Runtime ASSERT to make sure that the nested alternative handled the
  1437. // "no input consumed" check.
  1438. Jump pastBreakpoint;
  1439. pastBreakpoint = branch32(NotEqual, index, Address(stackPointerRegister, term->frameLocation * sizeof(void*)));
  1440. breakpoint();
  1441. pastBreakpoint.link(this);
  1442. #endif
  1443. // We know that the match is non-zero, we can accept it and
  1444. // loop back up to the head of the subpattern.
  1445. jump(beginOp.m_reentry);
  1446. // This is the entry point to jump to when we stop matching - we will
  1447. // do so once the subpattern cannot match any more.
  1448. op.m_reentry = label();
  1449. break;
  1450. }
  1451. // OpParentheticalAssertionBegin/End
  1452. case OpParentheticalAssertionBegin: {
  1453. PatternTerm* term = op.m_term;
  1454. // Store the current index - assertions should not update index, so
  1455. // we will need to restore it upon a successful match.
  1456. unsigned parenthesesFrameLocation = term->frameLocation;
  1457. storeToFrame(index, parenthesesFrameLocation);
  1458. // Check
  1459. op.m_checkAdjust = m_checked - term->inputPosition;
  1460. if (op.m_checkAdjust)
  1461. sub32(Imm32(op.m_checkAdjust), index);
  1462. m_checked -= op.m_checkAdjust;
  1463. break;
  1464. }
  1465. case OpParentheticalAssertionEnd: {
  1466. PatternTerm* term = op.m_term;
  1467. // Restore the input index value.
  1468. unsigned parenthesesFrameLocation = term->frameLocation;
  1469. loadFromFrame(parenthesesFrameLocation, index);
  1470. // If inverted, a successful match of the assertion must be treated
  1471. // as a failure, so jump to backtracking.
  1472. if (term->invert()) {
  1473. op.m_jumps.append(jump());
  1474. op.m_reentry = label();
  1475. }
  1476. YarrOp& lastOp = m_ops[op.m_previousOp];
  1477. m_checked += lastOp.m_checkAdjust;
  1478. break;
  1479. }
  1480. case OpMatchFailed:
  1481. removeCallFrame();
  1482. move(TrustedImmPtr((void*)WTF::notFound), returnRegister);
  1483. move(TrustedImm32(0), returnRegister2);
  1484. generateReturn();
  1485. break;
  1486. }
  1487. ++opIndex;
  1488. } while (opIndex < m_ops.size());
  1489. }
  1490. void backtrack()
  1491. {
  1492. // Backwards generate the backtracking code.
  1493. size_t opIndex = m_ops.size();
  1494. ASSERT(opIndex);
  1495. do {
  1496. --opIndex;
  1497. YarrOp& op = m_ops[opIndex];
  1498. switch (op.m_op) {
  1499. case OpTerm:
  1500. backtrackTerm(opIndex);
  1501. break;
  1502. // OpBodyAlternativeBegin/Next/End
  1503. //
  1504. // For each Begin/Next node representing an alternative, we need to decide what to do
  1505. // in two circumstances:
  1506. // - If we backtrack back into this node, from within the alternative.
  1507. // - If the input check at the head of the alternative fails (if this exists).
  1508. //
  1509. // We treat these two cases differently since in the former case we have slightly
  1510. // more information - since we are backtracking out of a prior alternative we know
  1511. // that at least enough input was available to run it. For example, given the regular
  1512. // expression /a|b/, if we backtrack out of the first alternative (a failed pattern
  1513. // character match of 'a'), then we need not perform an additional input availability
  1514. // check before running the second alternative.
  1515. //
  1516. // Backtracking required differs for the last alternative, which in the case of the
  1517. // repeating set of alternatives must loop. The code generated for the last alternative
  1518. // will also be used to handle all input check failures from any prior alternatives -
  1519. // these require similar functionality, in seeking the next available alternative for
  1520. // which there is sufficient input.
  1521. //
  1522. // Since backtracking of all other alternatives simply requires us to link backtracks
  1523. // to the reentry point for the subsequent alternative, we will only be generating any
  1524. // code when backtracking the last alternative.
  1525. case OpBodyAlternativeBegin:
  1526. case OpBodyAlternativeNext: {
  1527. PatternAlternative* alternative = op.m_alternative;
  1528. if (op.m_op == OpBodyAlternativeNext) {
  1529. PatternAlternative* priorAlternative = m_ops[op.m_previousOp].m_alternative;
  1530. m_checked += priorAlternative->m_minimumSize;
  1531. }
  1532. m_checked -= alternative->m_minimumSize;
  1533. // Is this the last alternative? If not, then if we backtrack to this point we just
  1534. // need to jump to try to match the next alternative.
  1535. if (m_ops[op.m_nextOp].m_op != OpBodyAlternativeEnd) {
  1536. m_backtrackingState.linkTo(m_ops[op.m_nextOp].m_reentry, this);
  1537. break;
  1538. }
  1539. YarrOp& endOp = m_ops[op.m_nextOp];
  1540. YarrOp* beginOp = &op;
  1541. while (beginOp->m_op != OpBodyAlternativeBegin) {
  1542. ASSERT(beginOp->m_op == OpBodyAlternativeNext);
  1543. beginOp = &m_ops[beginOp->m_previousOp];
  1544. }
  1545. bool onceThrough = endOp.m_nextOp == notFound;
  1546. // First, generate code to handle cases where we backtrack out of an attempted match
  1547. // of the last alternative. If this is a 'once through' set of alternatives then we
  1548. // have nothing to do - link this straight through to the End.
  1549. if (onceThrough)
  1550. m_backtrackingState.linkTo(endOp.m_reentry, this);
  1551. else {
  1552. // If we don't need to move the input poistion, and the pattern has a fixed size
  1553. // (in which case we omit the store of the start index until the pattern has matched)
  1554. // then we can just link the backtrack out of the last alternative straight to the
  1555. // head of the first alternative.
  1556. if (m_pattern.m_body->m_hasFixedSize
  1557. && (alternative->m_minimumSize > beginOp->m_alternative->m_minimumSize)
  1558. && (alternative->m_minimumSize - beginOp->m_alternative->m_minimumSize == 1))
  1559. m_backtrackingState.linkTo(beginOp->m_reentry, this);
  1560. else {
  1561. // We need to generate a trampoline of code to execute before looping back
  1562. // around to the first alternative.
  1563. m_backtrackingState.link(this);
  1564. // If the pattern size is not fixed, then store the start index, for use if we match.
  1565. if (!m_pattern.m_body->m_hasFixedSize) {
  1566. if (alternative->m_minimumSize == 1)
  1567. setMatchStart(index);
  1568. else {
  1569. move(index, regT0);
  1570. if (alternative->m_minimumSize)
  1571. sub32(Imm32(alternative->m_minimumSize - 1), regT0);
  1572. else
  1573. add32(TrustedImm32(1), regT0);
  1574. setMatchStart(regT0);
  1575. }
  1576. }
  1577. // Generate code to loop. Check whether the last alternative is longer than the
  1578. // first (e.g. /a|xy/ or /a|xyz/).
  1579. if (alternative->m_minimumSize > beginOp->m_alternative->m_minimumSize) {
  1580. // We want to loop, and increment input position. If the delta is 1, it is
  1581. // already correctly incremented, if more than one then decrement as appropriate.
  1582. unsigned delta = alternative->m_minimumSize - beginOp->m_alternative->m_minimumSize;
  1583. ASSERT(delta);
  1584. if (delta != 1)
  1585. sub32(Imm32(delta - 1), index);
  1586. jump(beginOp->m_reentry);
  1587. } else {
  1588. // If the first alternative has minimum size 0xFFFFFFFFu, then there cannot
  1589. // be sufficent input available to handle this, so just fall through.
  1590. unsigned delta = beginOp->m_alternative->m_minimumSize - alternative->m_minimumSize;
  1591. if (delta != 0xFFFFFFFFu) {
  1592. // We need to check input because we are incrementing the input.
  1593. add32(Imm32(delta + 1), index);
  1594. checkInput().linkTo(beginOp->m_reentry, this);
  1595. }
  1596. }
  1597. }
  1598. }
  1599. // We can reach this point in the code in two ways:
  1600. // - Fallthrough from the code above (a repeating alternative backtracked out of its
  1601. // last alternative, and did not have sufficent input to run the first).
  1602. // - We will loop back up to the following label when a releating alternative loops,
  1603. // following a failed input check.
  1604. //
  1605. // Either way, we have just failed the input check for the first alternative.
  1606. Label firstInputCheckFailed(this);
  1607. // Generate code to handle input check failures from alternatives except the last.
  1608. // prevOp is the alternative we're handling a bail out from (initially Begin), and
  1609. // nextOp is the alternative we will be attempting to reenter into.
  1610. //
  1611. // We will link input check failures from the forwards matching path back to the code
  1612. // that can handle them.
  1613. YarrOp* prevOp = beginOp;
  1614. YarrOp* nextOp = &m_ops[beginOp->m_nextOp];
  1615. while (nextOp->m_op != OpBodyAlternativeEnd) {
  1616. prevOp->m_jumps.link(this);
  1617. // We only get here if an input check fails, it is only worth checking again
  1618. // if the next alternative has a minimum size less than the last.
  1619. if (prevOp->m_alternative->m_minimumSize > nextOp->m_alternative->m_minimumSize) {
  1620. // FIXME: if we added an extra label to YarrOp, we could avoid needing to
  1621. // subtract delta back out, and reduce this code. Should performance test
  1622. // the benefit of this.
  1623. unsigned delta = prevOp->m_alternative->m_minimumSize - nextOp->m_alternative->m_minimumSize;
  1624. sub32(Imm32(delta), index);
  1625. Jump fail = jumpIfNoAvailableInput();
  1626. add32(Imm32(delta), index);
  1627. jump(nextOp->m_reentry);
  1628. fail.link(this);
  1629. } else if (prevOp->m_alternative->m_minimumSize < nextOp->m_alternative->m_minimumSize)
  1630. add32(Imm32(nextOp->m_alternative->m_minimumSize - prevOp->m_alternative->m_minimumSize), index);
  1631. prevOp = nextOp;
  1632. nextOp = &m_ops[nextOp->m_nextOp];
  1633. }
  1634. // We fall through to here if there is insufficient input to run the last alternative.
  1635. // If there is insufficient input to run the last alternative, then for 'once through'
  1636. // alternatives we are done - just jump back up into the forwards matching path at the End.
  1637. if (onceThrough) {
  1638. op.m_jumps.linkTo(endOp.m_reentry, this);
  1639. jump(endOp.m_reentry);
  1640. break;
  1641. }
  1642. // For repeating alternatives, link any input check failure from the last alternative to
  1643. // this point.
  1644. op.m_jumps.link(this);
  1645. bool needsToUpdateMatchStart = !m_pattern.m_body->m_hasFixedSize;
  1646. // Check for cases where input position is already incremented by 1 for the last
  1647. // alternative (this is particularly useful where the minimum size of the body
  1648. // disjunction is 0, e.g. /a*|b/).
  1649. if (needsToUpdateMatchStart && alternative->m_minimumSize == 1) {
  1650. // index is already incremented by 1, so just store it now!
  1651. setMatchStart(index);
  1652. needsToUpdateMatchStart = false;
  1653. }
  1654. // Check whether there is sufficient input to loop. Increment the input position by
  1655. // one, and check. Also add in the minimum disjunction size before checking - there
  1656. // is no point in looping if we're just going to fail all the input checks around
  1657. // the next iteration.
  1658. ASSERT(alternative->m_minimumSize >= m_pattern.m_body->m_minimumSize);
  1659. if (alternative->m_minimumSize == m_pattern.m_body->m_minimumSize) {
  1660. // If the last alternative had the same minimum size as the disjunction,
  1661. // just simply increment input pos by 1, no adjustment based on minimum size.
  1662. add32(TrustedImm32(1), index);
  1663. } else {
  1664. // If the minumum for the last alternative was one greater than than that
  1665. // for the disjunction, we're already progressed by 1, nothing to do!
  1666. unsigned delta = (alternative->m_minimumSize - m_pattern.m_body->m_minimumSize) - 1;
  1667. if (delta)
  1668. sub32(Imm32(delta), index);
  1669. }
  1670. Jump matchFailed = jumpIfNoAvailableInput();
  1671. if (needsToUpdateMatchStart) {
  1672. if (!m_pattern.m_body->m_minimumSize)
  1673. setMatchStart(index);
  1674. else {
  1675. move(index, regT0);
  1676. sub32(Imm32(m_pattern.m_body->m_minimumSize), regT0);
  1677. setMatchStart(regT0);
  1678. }
  1679. }
  1680. // Calculate how much more input the first alternative requires than the minimum
  1681. // for the body as a whole. If no more is needed then we dont need an additional
  1682. // input check here - jump straight back up to the start of the first alternative.
  1683. if (beginOp->m_alternative->m_minimumSize == m_pattern.m_body->m_minimumSize)
  1684. jump(beginOp->m_reentry);
  1685. else {
  1686. if (beginOp->m_alternative->m_minimumSize > m_pattern.m_body->m_minimumSize)
  1687. add32(Imm32(beginOp->m_alternative->m_minimumSize - m_pattern.m_body->m_minimumSize), index);
  1688. else
  1689. sub32(Imm32(m_pattern.m_body->m_minimumSize - beginOp->m_alternative->m_minimumSize), index);
  1690. checkInput().linkTo(beginOp->m_reentry, this);
  1691. jump(firstInputCheckFailed);
  1692. }
  1693. // We jump to here if we iterate to the point that there is insufficient input to
  1694. // run any matches, and need to return a failure state from JIT code.
  1695. matchFailed.link(this);
  1696. removeCallFrame();
  1697. move(TrustedImmPtr((void*)WTF::notFound), returnRegister);
  1698. move(TrustedImm32(0), returnRegister2);
  1699. generateReturn();
  1700. break;
  1701. }
  1702. case OpBodyAlternativeEnd: {
  1703. // We should never backtrack back into a body disjunction.
  1704. ASSERT(m_backtrackingState.isEmpty());
  1705. PatternAlternative* priorAlternative = m_ops[op.m_previousOp].m_alternative;
  1706. m_checked += priorAlternative->m_minimumSize;
  1707. break;
  1708. }
  1709. // OpSimpleNestedAlternativeBegin/Next/End
  1710. // OpNestedAlternativeBegin/Next/End
  1711. //
  1712. // Generate code for when we backtrack back out of an alternative into
  1713. // a Begin or Next node, or when the entry input count check fails. If
  1714. // there are more alternatives we need to jump to the next alternative,
  1715. // if not we backtrack back out of the current set of parentheses.
  1716. //
  1717. // In the case of non-simple nested assertions we need to also link the
  1718. // 'return address' appropriately to backtrack back out into the correct
  1719. // alternative.
  1720. case OpSimpleNestedAlternativeBegin:
  1721. case OpSimpleNestedAlternativeNext:
  1722. case OpNestedAlternativeBegin:
  1723. case OpNestedAlternativeNext: {
  1724. YarrOp& nextOp = m_ops[op.m_nextOp];
  1725. bool isBegin = op.m_previousOp == notFound;
  1726. bool isLastAlternative = nextOp.m_nextOp == notFound;
  1727. ASSERT(isBegin == (op.m_op == OpSimpleNestedAlternativeBegin || op.m_op == OpNestedAlternativeBegin));
  1728. ASSERT(isLastAlternative == (nextOp.m_op == OpSimpleNestedAlternativeEnd || nextOp.m_op == OpNestedAlternativeEnd));
  1729. // Treat an input check failure the same as a failed match.
  1730. m_backtrackingState.append(op.m_jumps);
  1731. // Set the backtracks to jump to the appropriate place. We may need
  1732. // to link the backtracks in one of three different way depending on
  1733. // the type of alternative we are dealing with:
  1734. // - A single alternative, with no simplings.
  1735. // - The last alternative of a set of two or more.
  1736. // - An alternative other than the last of a set of two or more.
  1737. //
  1738. // In the case of a single alternative on its own, we don't need to
  1739. // jump anywhere - if the alternative fails to match we can just
  1740. // continue to backtrack out of the parentheses without jumping.
  1741. //
  1742. // In the case of the last alternative in a set of more than one, we
  1743. // need to jump to return back out to the beginning. We'll do so by
  1744. // adding a jump to the End node's m_jumps list, and linking this
  1745. // when we come to generate the Begin node. For alternatives other
  1746. // than the last, we need to jump to the next alternative.
  1747. //
  1748. // If the alternative had adjusted the input position we must link
  1749. // backtracking to here, correct, and then jump on. If not we can
  1750. // link the backtracks directly to their destination.
  1751. if (op.m_checkAdjust) {
  1752. // Handle the cases where we need to link the backtracks here.
  1753. m_backtrackingState.link(this);
  1754. sub32(Imm32(op.m_checkAdjust), index);
  1755. if (!isLastAlternative) {
  1756. // An alternative that is not the last should jump to its successor.
  1757. jump(nextOp.m_reentry);
  1758. } else if (!isBegin) {
  1759. // The last of more than one alternatives must jump back to the beginning.
  1760. nextOp.m_jumps.append(jump());
  1761. } else {
  1762. // A single alternative on its own can fall through.
  1763. m_backtrackingState.fallthrough();
  1764. }
  1765. } else {
  1766. // Handle the cases where we can link the backtracks directly to their destinations.
  1767. if (!isLastAlternative) {
  1768. // An alternative that is not the last should jump to its successor.
  1769. m_backtrackingState.linkTo(nextOp.m_reentry, this);
  1770. } else if (!isBegin) {
  1771. // The last of more than one alternatives must jump back to the beginning.
  1772. m_backtrackingState.takeBacktracksToJumpList(nextOp.m_jumps, this);
  1773. }
  1774. // In the case of a single alternative on its own do nothing - it can fall through.
  1775. }
  1776. // If there is a backtrack jump from a zero length match link it here.
  1777. if (op.m_zeroLengthMatch.isSet())
  1778. m_backtrackingState.append(op.m_zeroLengthMatch);
  1779. // At this point we've handled the backtracking back into this node.
  1780. // Now link any backtracks that need to jump to here.
  1781. // For non-simple alternatives, link the alternative's 'return address'
  1782. // so that we backtrack back out into the previous alternative.
  1783. if (op.m_op == OpNestedAlternativeNext)
  1784. m_backtrackingState.append(op.m_returnAddress);
  1785. // If there is more than one alternative, then the last alternative will
  1786. // have planted a jump to be linked to the end. This jump was added to the
  1787. // End node's m_jumps list. If we are back at the beginning, link it here.
  1788. if (isBegin) {
  1789. YarrOp* endOp = &m_ops[op.m_nextOp];
  1790. while (endOp->m_nextOp != notFound) {
  1791. ASSERT(endOp->m_op == OpSimpleNestedAlternativeNext || endOp->m_op == OpNestedAlternativeNext);
  1792. endOp = &m_ops[endOp->m_nextOp];
  1793. }
  1794. ASSERT(endOp->m_op == OpSimpleNestedAlternativeEnd || endOp->m_op == OpNestedAlternativeEnd);
  1795. m_backtrackingState.append(endOp->m_jumps);
  1796. }
  1797. if (!isBegin) {
  1798. YarrOp& lastOp = m_ops[op.m_previousOp];
  1799. m_checked += lastOp.m_checkAdjust;
  1800. }
  1801. m_checked -= op.m_checkAdjust;
  1802. break;
  1803. }
  1804. case OpSimpleNestedAlternativeEnd:
  1805. case OpNestedAlternativeEnd: {
  1806. PatternTerm* term = op.m_term;
  1807. // If there is a backtrack jump from a zero length match link it here.
  1808. if (op.m_zeroLengthMatch.isSet())
  1809. m_backtrackingState.append(op.m_zeroLengthMatch);
  1810. // If we backtrack into the end of a simple subpattern do nothing;
  1811. // just continue through into the last alternative. If we backtrack
  1812. // into the end of a non-simple set of alterntives we need to jump
  1813. // to the backtracking return address set up during generation.
  1814. if (op.m_op == OpNestedAlternativeEnd) {
  1815. m_backtrackingState.link(this);
  1816. // Plant a jump to the return address.
  1817. unsigned parenthesesFrameLocation = term->frameLocation;
  1818. unsigned alternativeFrameLocation = parenthesesFrameLocation;
  1819. if (term->quantityType != QuantifierFixedCount)
  1820. alternativeFrameLocation += YarrStackSpaceForBackTrackInfoParenthesesOnce;
  1821. loadFromFrameAndJump(alternativeFrameLocation);
  1822. // Link the DataLabelPtr associated with the end of the last
  1823. // alternative to this point.
  1824. m_backtrackingState.append(op.m_returnAddress);
  1825. }
  1826. YarrOp& lastOp = m_ops[op.m_previousOp];
  1827. m_checked += lastOp.m_checkAdjust;
  1828. break;
  1829. }
  1830. // OpParenthesesSubpatternOnceBegin/End
  1831. //
  1832. // When we are backtracking back out of a capturing subpattern we need
  1833. // to clear the start index in the matches output array, to record that
  1834. // this subpattern has not been captured.
  1835. //
  1836. // When backtracking back out of a Greedy quantified subpattern we need
  1837. // to catch this, and try running the remainder of the alternative after
  1838. // the subpattern again, skipping the parentheses.
  1839. //
  1840. // Upon backtracking back into a quantified set of parentheses we need to
  1841. // check whether we were currently skipping the subpattern. If not, we
  1842. // can backtrack into them, if we were we need to either backtrack back
  1843. // out of the start of the parentheses, or jump back to the forwards
  1844. // matching start, depending of whether the match is Greedy or NonGreedy.
  1845. case OpParenthesesSubpatternOnceBegin: {
  1846. PatternTerm* term = op.m_term;
  1847. ASSERT(term->quantityCount == 1);
  1848. // We only need to backtrack to thispoint if capturing or greedy.
  1849. if ((term->capture() && compileMode == IncludeSubpatterns) || term->quantityType == QuantifierGreedy) {
  1850. m_backtrackingState.link(this);
  1851. // If capturing, clear the capture (we only need to reset start).
  1852. if (term->capture() && compileMode == IncludeSubpatterns)
  1853. clearSubpatternStart(term->parentheses.subpatternId);
  1854. // If Greedy, jump to the end.
  1855. if (term->quantityType == QuantifierGreedy) {
  1856. // Clear the flag in the stackframe indicating we ran through the subpattern.
  1857. unsigned parenthesesFrameLocation = term->frameLocation;
  1858. storeToFrame(TrustedImm32(-1), parenthesesFrameLocation);
  1859. // Jump to after the parentheses, skipping the subpattern.
  1860. jump(m_ops[op.m_nextOp].m_reentry);
  1861. // A backtrack from after the parentheses, when skipping the subpattern,
  1862. // will jump back to here.
  1863. op.m_jumps.link(this);
  1864. }
  1865. m_backtrackingState.fallthrough();
  1866. }
  1867. break;
  1868. }
  1869. case OpParenthesesSubpatternOnceEnd: {
  1870. PatternTerm* term = op.m_term;
  1871. if (term->quantityType != QuantifierFixedCount) {
  1872. m_backtrackingState.link(this);
  1873. // Check whether we should backtrack back into the parentheses, or if we
  1874. // are currently in a state where we had skipped over the subpattern
  1875. // (in which case the flag value on the stack will be -1).
  1876. unsigned parenthesesFrameLocation = term->frameLocation;
  1877. Jump hadSkipped = branch32(Equal, Address(stackPointerRegister, parenthesesFrameLocation * sizeof(void*)), TrustedImm32(-1));
  1878. if (term->quantityType == QuantifierGreedy) {
  1879. // For Greedy parentheses, we skip after having already tried going
  1880. // through the subpattern, so if we get here we're done.
  1881. YarrOp& beginOp = m_ops[op.m_previousOp];
  1882. beginOp.m_jumps.append(hadSkipped);
  1883. } else {
  1884. // For NonGreedy parentheses, we try skipping the subpattern first,
  1885. // so if we get here we need to try running through the subpattern
  1886. // next. Jump back to the start of the parentheses in the forwards
  1887. // matching path.
  1888. ASSERT(term->quantityType == QuantifierNonGreedy);
  1889. YarrOp& beginOp = m_ops[op.m_previousOp];
  1890. hadSkipped.linkTo(beginOp.m_reentry, this);
  1891. }
  1892. m_backtrackingState.fallthrough();
  1893. }
  1894. m_backtrackingState.append(op.m_jumps);
  1895. break;
  1896. }
  1897. // OpParenthesesSubpatternTerminalBegin/End
  1898. //
  1899. // Terminal subpatterns will always match - there is nothing after them to
  1900. // force a backtrack, and they have a minimum count of 0, and as such will
  1901. // always produce an acceptable result.
  1902. case OpParenthesesSubpatternTerminalBegin: {
  1903. // We will backtrack to this point once the subpattern cannot match any
  1904. // more. Since no match is accepted as a successful match (we are Greedy
  1905. // quantified with a minimum of zero) jump back to the forwards matching
  1906. // path at the end.
  1907. YarrOp& endOp = m_ops[op.m_nextOp];
  1908. m_backtrackingState.linkTo(endOp.m_reentry, this);
  1909. break;
  1910. }
  1911. case OpParenthesesSubpatternTerminalEnd:
  1912. // We should never be backtracking to here (hence the 'terminal' in the name).
  1913. ASSERT(m_backtrackingState.isEmpty());
  1914. m_backtrackingState.append(op.m_jumps);
  1915. break;
  1916. // OpParentheticalAssertionBegin/End
  1917. case OpParentheticalAssertionBegin: {
  1918. PatternTerm* term = op.m_term;
  1919. YarrOp& endOp = m_ops[op.m_nextOp];
  1920. // We need to handle the backtracks upon backtracking back out
  1921. // of a parenthetical assertion if either we need to correct
  1922. // the input index, or the assertion was inverted.
  1923. if (op.m_checkAdjust || term->invert()) {
  1924. m_backtrackingState.link(this);
  1925. if (op.m_checkAdjust)
  1926. add32(Imm32(op.m_checkAdjust), index);
  1927. // In an inverted assertion failure to match the subpattern
  1928. // is treated as a successful match - jump to the end of the
  1929. // subpattern. We already have adjusted the input position
  1930. // back to that before the assertion, which is correct.
  1931. if (term->invert())
  1932. jump(endOp.m_reentry);
  1933. m_backtrackingState.fallthrough();
  1934. }
  1935. // The End node's jump list will contain any backtracks into
  1936. // the end of the assertion. Also, if inverted, we will have
  1937. // added the failure caused by a successful match to this.
  1938. m_backtrackingState.append(endOp.m_jumps);
  1939. m_checked += op.m_checkAdjust;
  1940. break;
  1941. }
  1942. case OpParentheticalAssertionEnd: {
  1943. // FIXME: We should really be clearing any nested subpattern
  1944. // matches on bailing out from after the pattern. Firefox has
  1945. // this bug too (presumably because they use YARR!)
  1946. // Never backtrack into an assertion; later failures bail to before the begin.
  1947. m_backtrackingState.takeBacktracksToJumpList(op.m_jumps, this);
  1948. YarrOp& lastOp = m_ops[op.m_previousOp];
  1949. m_checked -= lastOp.m_checkAdjust;
  1950. break;
  1951. }
  1952. case OpMatchFailed:
  1953. break;
  1954. }
  1955. } while (opIndex);
  1956. }
  1957. // Compilation methods:
  1958. // ====================
  1959. // opCompileParenthesesSubpattern
  1960. // Emits ops for a subpattern (set of parentheses). These consist
  1961. // of a set of alternatives wrapped in an outer set of nodes for
  1962. // the parentheses.
  1963. // Supported types of parentheses are 'Once' (quantityCount == 1)
  1964. // and 'Terminal' (non-capturing parentheses quantified as greedy
  1965. // and infinite).
  1966. // Alternatives will use the 'Simple' set of ops if either the
  1967. // subpattern is terminal (in which case we will never need to
  1968. // backtrack), or if the subpattern only contains one alternative.
  1969. void opCompileParenthesesSubpattern(PatternTerm* term)
  1970. {
  1971. YarrOpCode parenthesesBeginOpCode;
  1972. YarrOpCode parenthesesEndOpCode;
  1973. YarrOpCode alternativeBeginOpCode = OpSimpleNestedAlternativeBegin;
  1974. YarrOpCode alternativeNextOpCode = OpSimpleNestedAlternativeNext;
  1975. YarrOpCode alternativeEndOpCode = OpSimpleNestedAlternativeEnd;
  1976. // We can currently only compile quantity 1 subpatterns that are
  1977. // not copies. We generate a copy in the case of a range quantifier,
  1978. // e.g. /(?:x){3,9}/, or /(?:x)+/ (These are effectively expanded to
  1979. // /(?:x){3,3}(?:x){0,6}/ and /(?:x)(?:x)*/ repectively). The problem
  1980. // comes where the subpattern is capturing, in which case we would
  1981. // need to restore the capture from the first subpattern upon a
  1982. // failure in the second.
  1983. if (term->quantityCount == 1 && !term->parentheses.isCopy) {
  1984. // Select the 'Once' nodes.
  1985. parenthesesBeginOpCode = OpParenthesesSubpatternOnceBegin;
  1986. parenthesesEndOpCode = OpParenthesesSubpatternOnceEnd;
  1987. // If there is more than one alternative we cannot use the 'simple' nodes.
  1988. if (term->parentheses.disjunction->m_alternatives.size() != 1) {
  1989. alternativeBeginOpCode = OpNestedAlternativeBegin;
  1990. alternativeNextOpCode = OpNestedAlternativeNext;
  1991. alternativeEndOpCode = OpNestedAlternativeEnd;
  1992. }
  1993. } else if (term->parentheses.isTerminal) {
  1994. // Select the 'Terminal' nodes.
  1995. parenthesesBeginOpCode = OpParenthesesSubpatternTerminalBegin;
  1996. parenthesesEndOpCode = OpParenthesesSubpatternTerminalEnd;
  1997. } else {
  1998. // This subpattern is not supported by the JIT.
  1999. m_shouldFallBack = true;
  2000. return;
  2001. }
  2002. size_t parenBegin = m_ops.size();
  2003. m_ops.append(parenthesesBeginOpCode);
  2004. m_ops.append(alternativeBeginOpCode);
  2005. m_ops.last().m_previousOp = notFound;
  2006. m_ops.last().m_term = term;
  2007. Vector_shared<OwnPtr<PatternAlternative> >& alternatives = term->parentheses.disjunction->m_alternatives;
  2008. for (unsigned i = 0; i < alternatives.size(); ++i) {
  2009. size_t lastOpIndex = m_ops.size() - 1;
  2010. PatternAlternative* nestedAlternative = alternatives[i].get();
  2011. opCompileAlternative(nestedAlternative);
  2012. size_t thisOpIndex = m_ops.size();
  2013. m_ops.append(YarrOp(alternativeNextOpCode));
  2014. YarrOp& lastOp = m_ops[lastOpIndex];
  2015. YarrOp& thisOp = m_ops[thisOpIndex];
  2016. lastOp.m_alternative = nestedAlternative;
  2017. lastOp.m_nextOp = thisOpIndex;
  2018. thisOp.m_previousOp = lastOpIndex;
  2019. thisOp.m_term = term;
  2020. }
  2021. YarrOp& lastOp = m_ops.last();
  2022. ASSERT(lastOp.m_op == alternativeNextOpCode);
  2023. lastOp.m_op = alternativeEndOpCode;
  2024. lastOp.m_alternative = 0;
  2025. lastOp.m_nextOp = notFound;
  2026. size_t parenEnd = m_ops.size();
  2027. m_ops.append(parenthesesEndOpCode);
  2028. m_ops[parenBegin].m_term = term;
  2029. m_ops[parenBegin].m_previousOp = notFound;
  2030. m_ops[parenBegin].m_nextOp = parenEnd;
  2031. m_ops[parenEnd].m_term = term;
  2032. m_ops[parenEnd].m_previousOp = parenBegin;
  2033. m_ops[parenEnd].m_nextOp = notFound;
  2034. }
  2035. // opCompileParentheticalAssertion
  2036. // Emits ops for a parenthetical assertion. These consist of an
  2037. // OpSimpleNestedAlternativeBegin/Next/End set of nodes wrapping
  2038. // the alternatives, with these wrapped by an outer pair of
  2039. // OpParentheticalAssertionBegin/End nodes.
  2040. // We can always use the OpSimpleNestedAlternative nodes in the
  2041. // case of parenthetical assertions since these only ever match
  2042. // once, and will never backtrack back into the assertion.
  2043. void opCompileParentheticalAssertion(PatternTerm* term)
  2044. {
  2045. size_t parenBegin = m_ops.size();
  2046. m_ops.append(OpParentheticalAssertionBegin);
  2047. m_ops.append(OpSimpleNestedAlternativeBegin);
  2048. m_ops.last().m_previousOp = notFound;
  2049. m_ops.last().m_term = term;
  2050. Vector_shared<OwnPtr<PatternAlternative> >& alternatives = term->parentheses.disjunction->m_alternatives;
  2051. for (unsigned i = 0; i < alternatives.size(); ++i) {
  2052. size_t lastOpIndex = m_ops.size() - 1;
  2053. PatternAlternative* nestedAlternative = alternatives[i].get();
  2054. opCompileAlternative(nestedAlternative);
  2055. size_t thisOpIndex = m_ops.size();
  2056. m_ops.append(YarrOp(OpSimpleNestedAlternativeNext));
  2057. YarrOp& lastOp = m_ops[lastOpIndex];
  2058. YarrOp& thisOp = m_ops[thisOpIndex];
  2059. lastOp.m_alternative = nestedAlternative;
  2060. lastOp.m_nextOp = thisOpIndex;
  2061. thisOp.m_previousOp = lastOpIndex;
  2062. thisOp.m_term = term;
  2063. }
  2064. YarrOp& lastOp = m_ops.last();
  2065. ASSERT(lastOp.m_op == OpSimpleNestedAlternativeNext);
  2066. lastOp.m_op = OpSimpleNestedAlternativeEnd;
  2067. lastOp.m_alternative = 0;
  2068. lastOp.m_nextOp = notFound;
  2069. size_t parenEnd = m_ops.size();
  2070. m_ops.append(OpParentheticalAssertionEnd);
  2071. m_ops[parenBegin].m_term = term;
  2072. m_ops[parenBegin].m_previousOp = notFound;
  2073. m_ops[parenBegin].m_nextOp = parenEnd;
  2074. m_ops[parenEnd].m_term = term;
  2075. m_ops[parenEnd].m_previousOp = parenBegin;
  2076. m_ops[parenEnd].m_nextOp = notFound;
  2077. }
  2078. // opCompileAlternative
  2079. // Called to emit nodes for all terms in an alternative.
  2080. void opCompileAlternative(PatternAlternative* alternative)
  2081. {
  2082. optimizeAlternative(alternative);
  2083. for (unsigned i = 0; i < alternative->m_terms.size(); ++i) {
  2084. PatternTerm* term = &alternative->m_terms[i];
  2085. switch (term->type) {
  2086. case PatternTerm::TypeParenthesesSubpattern:
  2087. opCompileParenthesesSubpattern(term);
  2088. break;
  2089. case PatternTerm::TypeParentheticalAssertion:
  2090. opCompileParentheticalAssertion(term);
  2091. break;
  2092. default:
  2093. m_ops.append(term);
  2094. }
  2095. }
  2096. }
  2097. // opCompileBody
  2098. // This method compiles the body disjunction of the regular expression.
  2099. // The body consists of two sets of alternatives - zero or more 'once
  2100. // through' (BOL anchored) alternatives, followed by zero or more
  2101. // repeated alternatives.
  2102. // For each of these two sets of alteratives, if not empty they will be
  2103. // wrapped in a set of OpBodyAlternativeBegin/Next/End nodes (with the
  2104. // 'begin' node referencing the first alternative, and 'next' nodes
  2105. // referencing any further alternatives. The begin/next/end nodes are
  2106. // linked together in a doubly linked list. In the case of repeating
  2107. // alternatives, the end node is also linked back to the beginning.
  2108. // If no repeating alternatives exist, then a OpMatchFailed node exists
  2109. // to return the failing result.
  2110. void opCompileBody(PatternDisjunction* disjunction)
  2111. {
  2112. Vector_shared<OwnPtr<PatternAlternative> >& alternatives = disjunction->m_alternatives;
  2113. size_t currentAlternativeIndex = 0;
  2114. // Emit the 'once through' alternatives.
  2115. if (alternatives.size() && alternatives[0]->onceThrough()) {
  2116. m_ops.append(YarrOp(OpBodyAlternativeBegin));
  2117. m_ops.last().m_previousOp = notFound;
  2118. do {
  2119. size_t lastOpIndex = m_ops.size() - 1;
  2120. PatternAlternative* alternative = alternatives[currentAlternativeIndex].get();
  2121. opCompileAlternative(alternative);
  2122. size_t thisOpIndex = m_ops.size();
  2123. m_ops.append(YarrOp(OpBodyAlternativeNext));
  2124. YarrOp& lastOp = m_ops[lastOpIndex];
  2125. YarrOp& thisOp = m_ops[thisOpIndex];
  2126. lastOp.m_alternative = alternative;
  2127. lastOp.m_nextOp = thisOpIndex;
  2128. thisOp.m_previousOp = lastOpIndex;
  2129. ++currentAlternativeIndex;
  2130. } while (currentAlternativeIndex < alternatives.size() && alternatives[currentAlternativeIndex]->onceThrough());
  2131. YarrOp& lastOp = m_ops.last();
  2132. ASSERT(lastOp.m_op == OpBodyAlternativeNext);
  2133. lastOp.m_op = OpBodyAlternativeEnd;
  2134. lastOp.m_alternative = 0;
  2135. lastOp.m_nextOp = notFound;
  2136. }
  2137. if (currentAlternativeIndex == alternatives.size()) {
  2138. m_ops.append(YarrOp(OpMatchFailed));
  2139. return;
  2140. }
  2141. // Emit the repeated alternatives.
  2142. size_t repeatLoop = m_ops.size();
  2143. m_ops.append(YarrOp(OpBodyAlternativeBegin));
  2144. m_ops.last().m_previousOp = notFound;
  2145. do {
  2146. size_t lastOpIndex = m_ops.size() - 1;
  2147. PatternAlternative* alternative = alternatives[currentAlternativeIndex].get();
  2148. ASSERT(!alternative->onceThrough());
  2149. opCompileAlternative(alternative);
  2150. size_t thisOpIndex = m_ops.size();
  2151. m_ops.append(YarrOp(OpBodyAlternativeNext));
  2152. YarrOp& lastOp = m_ops[lastOpIndex];
  2153. YarrOp& thisOp = m_ops[thisOpIndex];
  2154. lastOp.m_alternative = alternative;
  2155. lastOp.m_nextOp = thisOpIndex;
  2156. thisOp.m_previousOp = lastOpIndex;
  2157. ++currentAlternativeIndex;
  2158. } while (currentAlternativeIndex < alternatives.size());
  2159. YarrOp& lastOp = m_ops.last();
  2160. ASSERT(lastOp.m_op == OpBodyAlternativeNext);
  2161. lastOp.m_op = OpBodyAlternativeEnd;
  2162. lastOp.m_alternative = 0;
  2163. lastOp.m_nextOp = repeatLoop;
  2164. }
  2165. void generateEnter()
  2166. {
  2167. #if CPU(X86_64)
  2168. push(X86Registers::ebp);
  2169. move(stackPointerRegister, X86Registers::ebp);
  2170. push(X86Registers::ebx);
  2171. // The ABI doesn't guarantee the upper bits are zero on unsigned arguments, so clear them ourselves.
  2172. zeroExtend32ToPtr(index, index);
  2173. zeroExtend32ToPtr(length, length);
  2174. #if OS(WINDOWS)
  2175. if (compileMode == IncludeSubpatterns)
  2176. loadPtr(Address(X86Registers::ebp, 6 * sizeof(void*)), output);
  2177. #endif
  2178. #elif CPU(X86)
  2179. push(X86Registers::ebp);
  2180. move(stackPointerRegister, X86Registers::ebp);
  2181. // TODO: do we need spill registers to fill the output pointer if there are no sub captures?
  2182. push(X86Registers::ebx);
  2183. push(X86Registers::edi);
  2184. push(X86Registers::esi);
  2185. // load output into edi (2 = saved ebp + return address).
  2186. #if COMPILER(MSVC)
  2187. loadPtr(Address(X86Registers::ebp, 2 * sizeof(void*)), input);
  2188. loadPtr(Address(X86Registers::ebp, 3 * sizeof(void*)), index);
  2189. loadPtr(Address(X86Registers::ebp, 4 * sizeof(void*)), length);
  2190. if (compileMode == IncludeSubpatterns)
  2191. loadPtr(Address(X86Registers::ebp, 5 * sizeof(void*)), output);
  2192. #else
  2193. if (compileMode == IncludeSubpatterns)
  2194. loadPtr(Address(X86Registers::ebp, 2 * sizeof(void*)), output);
  2195. #endif
  2196. #elif CPU(ARM)
  2197. push(ARMRegisters::r4);
  2198. push(ARMRegisters::r5);
  2199. push(ARMRegisters::r6);
  2200. #if CPU(ARM_TRADITIONAL)
  2201. push(ARMRegisters::r8); // scratch register
  2202. #endif
  2203. if (compileMode == IncludeSubpatterns)
  2204. move(ARMRegisters::r3, output);
  2205. #elif CPU(SH4)
  2206. push(SH4Registers::r11);
  2207. push(SH4Registers::r13);
  2208. #elif CPU(MIPS)
  2209. // Do nothing.
  2210. #endif
  2211. }
  2212. void generateReturn()
  2213. {
  2214. #if CPU(X86_64)
  2215. #if OS(WINDOWS)
  2216. // Store the return value in the allocated space pointed by rcx.
  2217. store64(returnRegister, Address(X86Registers::ecx));
  2218. store64(returnRegister2, Address(X86Registers::ecx, sizeof(void*)));
  2219. move(X86Registers::ecx, returnRegister);
  2220. #endif
  2221. pop(X86Registers::ebx);
  2222. pop(X86Registers::ebp);
  2223. #elif CPU(X86)
  2224. pop(X86Registers::esi);
  2225. pop(X86Registers::edi);
  2226. pop(X86Registers::ebx);
  2227. pop(X86Registers::ebp);
  2228. #elif CPU(ARM)
  2229. #if CPU(ARM_TRADITIONAL)
  2230. pop(ARMRegisters::r8); // scratch register
  2231. #endif
  2232. pop(ARMRegisters::r6);
  2233. pop(ARMRegisters::r5);
  2234. pop(ARMRegisters::r4);
  2235. #elif CPU(SH4)
  2236. pop(SH4Registers::r13);
  2237. pop(SH4Registers::r11);
  2238. #elif CPU(MIPS)
  2239. // Do nothing
  2240. #endif
  2241. ret();
  2242. }
  2243. public:
  2244. YarrGenerator(YarrPattern& pattern, YarrCharSize charSize)
  2245. : m_pattern(pattern)
  2246. , m_charSize(charSize)
  2247. , m_charScale(m_charSize == Char8 ? TimesOne: TimesTwo)
  2248. , m_shouldFallBack(false)
  2249. , m_checked(0)
  2250. {
  2251. }
  2252. void compile(VM* vm, YarrCodeBlock& jitObject)
  2253. {
  2254. generateEnter();
  2255. Jump hasInput = checkInput();
  2256. move(TrustedImmPtr((void*)WTF::notFound), returnRegister);
  2257. move(TrustedImm32(0), returnRegister2);
  2258. generateReturn();
  2259. hasInput.link(this);
  2260. if (compileMode == IncludeSubpatterns) {
  2261. for (unsigned i = 0; i < m_pattern.m_numSubpatterns + 1; ++i)
  2262. store32(TrustedImm32(-1), Address(output, (i << 1) * sizeof(int)));
  2263. }
  2264. if (!m_pattern.m_body->m_hasFixedSize)
  2265. setMatchStart(index);
  2266. initCallFrame();
  2267. // Compile the pattern to the internal 'YarrOp' representation.
  2268. opCompileBody(m_pattern.m_body);
  2269. // If we encountered anything we can't handle in the JIT code
  2270. // (e.g. backreferences) then return early.
  2271. if (m_shouldFallBack) {
  2272. jitObject.setFallBack(true);
  2273. return;
  2274. }
  2275. generate();
  2276. backtrack();
  2277. // Link & finalize the code.
  2278. LinkBuffer linkBuffer(*vm, this, REGEXP_CODE_ID);
  2279. m_backtrackingState.linkDataLabels(linkBuffer);
  2280. if (compileMode == MatchOnly) {
  2281. if (m_charSize == Char8)
  2282. jitObject.set8BitCodeMatchOnly(FINALIZE_CODE(linkBuffer, ("Match-only 8-bit regular expression")));
  2283. else
  2284. jitObject.set16BitCodeMatchOnly(FINALIZE_CODE(linkBuffer, ("Match-only 16-bit regular expression")));
  2285. } else {
  2286. if (m_charSize == Char8)
  2287. jitObject.set8BitCode(FINALIZE_CODE(linkBuffer, ("8-bit regular expression")));
  2288. else
  2289. jitObject.set16BitCode(FINALIZE_CODE(linkBuffer, ("16-bit regular expression")));
  2290. }
  2291. jitObject.setFallBack(m_shouldFallBack);
  2292. }
  2293. private:
  2294. YarrPattern& m_pattern;
  2295. YarrCharSize m_charSize;
  2296. Scale m_charScale;
  2297. // Used to detect regular expression constructs that are not currently
  2298. // supported in the JIT; fall back to the interpreter when this is detected.
  2299. bool m_shouldFallBack;
  2300. // The regular expression expressed as a linear sequence of operations.
  2301. Vector<YarrOp, 128> m_ops;
  2302. // This records the current input offset being applied due to the current
  2303. // set of alternatives we are nested within. E.g. when matching the
  2304. // character 'b' within the regular expression /abc/, we will know that
  2305. // the minimum size for the alternative is 3, checked upon entry to the
  2306. // alternative, and that 'b' is at offset 1 from the start, and as such
  2307. // when matching 'b' we need to apply an offset of -2 to the load.
  2308. //
  2309. // FIXME: This should go away. Rather than tracking this value throughout
  2310. // code generation, we should gather this information up front & store it
  2311. // on the YarrOp structure.
  2312. int m_checked;
  2313. // This class records state whilst generating the backtracking path of code.
  2314. BacktrackingState m_backtrackingState;
  2315. };
  2316. void jitCompile(YarrPattern& pattern, YarrCharSize charSize, VM* vm, YarrCodeBlock& jitObject, YarrJITCompileMode mode)
  2317. {
  2318. if (mode == MatchOnly)
  2319. YarrGenerator<MatchOnly>(pattern, charSize).compile(vm, jitObject);
  2320. else
  2321. YarrGenerator<IncludeSubpatterns>(pattern, charSize).compile(vm, jitObject);
  2322. }
  2323. }}
  2324. #endif // #if !(ENABLE(DETACHED_JIT) && !BUILDING_DETACHED_JIT)
  2325. #endif