DFGByteCodeParser.cpp 167 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066206720682069207020712072207320742075207620772078207920802081208220832084208520862087208820892090209120922093209420952096209720982099210021012102210321042105210621072108210921102111211221132114211521162117211821192120212121222123212421252126212721282129213021312132213321342135213621372138213921402141214221432144214521462147214821492150215121522153215421552156215721582159216021612162216321642165216621672168216921702171217221732174217521762177217821792180218121822183218421852186218721882189219021912192219321942195219621972198219922002201220222032204220522062207220822092210221122122213221422152216221722182219222022212222222322242225222622272228222922302231223222332234223522362237223822392240224122422243224422452246224722482249225022512252225322542255225622572258225922602261226222632264226522662267226822692270227122722273227422752276227722782279228022812282228322842285228622872288228922902291229222932294229522962297229822992300230123022303230423052306230723082309231023112312231323142315231623172318231923202321232223232324232523262327232823292330233123322333233423352336233723382339234023412342234323442345234623472348234923502351235223532354235523562357235823592360236123622363236423652366236723682369237023712372237323742375237623772378237923802381238223832384238523862387238823892390239123922393239423952396239723982399240024012402240324042405240624072408240924102411241224132414241524162417241824192420242124222423242424252426242724282429243024312432243324342435243624372438243924402441244224432444244524462447244824492450245124522453245424552456245724582459246024612462246324642465246624672468246924702471247224732474247524762477247824792480248124822483248424852486248724882489249024912492249324942495249624972498249925002501250225032504250525062507250825092510251125122513251425152516251725182519252025212522252325242525252625272528252925302531253225332534253525362537253825392540254125422543254425452546254725482549255025512552255325542555255625572558255925602561256225632564256525662567256825692570257125722573257425752576257725782579258025812582258325842585258625872588258925902591259225932594259525962597259825992600260126022603260426052606260726082609261026112612261326142615261626172618261926202621262226232624262526262627262826292630263126322633263426352636263726382639264026412642264326442645264626472648264926502651265226532654265526562657265826592660266126622663266426652666266726682669267026712672267326742675267626772678267926802681268226832684268526862687268826892690269126922693269426952696269726982699270027012702270327042705270627072708270927102711271227132714271527162717271827192720272127222723272427252726272727282729273027312732273327342735273627372738273927402741274227432744274527462747274827492750275127522753275427552756275727582759276027612762276327642765276627672768276927702771277227732774277527762777277827792780278127822783278427852786278727882789279027912792279327942795279627972798279928002801280228032804280528062807280828092810281128122813281428152816281728182819282028212822282328242825282628272828282928302831283228332834283528362837283828392840284128422843284428452846284728482849285028512852285328542855285628572858285928602861286228632864286528662867286828692870287128722873287428752876287728782879288028812882288328842885288628872888288928902891289228932894289528962897289828992900290129022903290429052906290729082909291029112912291329142915291629172918291929202921292229232924292529262927292829292930293129322933293429352936293729382939294029412942294329442945294629472948294929502951295229532954295529562957295829592960296129622963296429652966296729682969297029712972297329742975297629772978297929802981298229832984298529862987298829892990299129922993299429952996299729982999300030013002300330043005300630073008300930103011301230133014301530163017301830193020302130223023302430253026302730283029303030313032303330343035303630373038303930403041304230433044304530463047304830493050305130523053305430553056305730583059306030613062306330643065306630673068306930703071307230733074307530763077307830793080308130823083308430853086308730883089309030913092309330943095309630973098309931003101310231033104310531063107310831093110311131123113311431153116311731183119312031213122312331243125312631273128312931303131313231333134313531363137313831393140314131423143314431453146314731483149315031513152315331543155315631573158315931603161316231633164316531663167316831693170317131723173317431753176317731783179318031813182318331843185318631873188318931903191319231933194319531963197319831993200320132023203320432053206320732083209321032113212321332143215321632173218321932203221322232233224322532263227322832293230323132323233323432353236323732383239324032413242324332443245324632473248324932503251325232533254325532563257325832593260326132623263326432653266326732683269327032713272327332743275327632773278327932803281328232833284328532863287328832893290329132923293329432953296329732983299330033013302330333043305330633073308330933103311331233133314331533163317331833193320332133223323332433253326332733283329333033313332333333343335333633373338333933403341334233433344334533463347334833493350335133523353335433553356335733583359336033613362336333643365336633673368336933703371337233733374337533763377337833793380338133823383338433853386338733883389339033913392339333943395339633973398339934003401340234033404340534063407340834093410341134123413341434153416341734183419342034213422342334243425342634273428342934303431343234333434343534363437343834393440344134423443344434453446344734483449345034513452345334543455345634573458345934603461346234633464346534663467346834693470347134723473347434753476347734783479348034813482348334843485348634873488348934903491349234933494349534963497349834993500350135023503350435053506350735083509351035113512351335143515351635173518351935203521352235233524352535263527352835293530353135323533353435353536353735383539354035413542354335443545354635473548354935503551355235533554355535563557355835593560356135623563356435653566356735683569357035713572357335743575357635773578357935803581358235833584358535863587358835893590359135923593359435953596359735983599360036013602360336043605360636073608360936103611361236133614361536163617361836193620362136223623362436253626362736283629363036313632363336343635363636373638363936403641364236433644364536463647364836493650365136523653365436553656365736583659366036613662366336643665366636673668366936703671367236733674367536763677367836793680368136823683368436853686368736883689369036913692369336943695369636973698369937003701370237033704370537063707370837093710371137123713371437153716371737183719372037213722372337243725372637273728372937303731373237333734373537363737373837393740374137423743374437453746374737483749375037513752375337543755375637573758375937603761376237633764376537663767376837693770377137723773377437753776377737783779378037813782378337843785378637873788378937903791379237933794379537963797379837993800380138023803380438053806380738083809381038113812381338143815381638173818381938203821382238233824382538263827382838293830
  1. /*
  2. * Copyright (C) 2011, 2012, 2013 Apple Inc. All rights reserved.
  3. *
  4. * Redistribution and use in source and binary forms, with or without
  5. * modification, are permitted provided that the following conditions
  6. * are met:
  7. * 1. Redistributions of source code must retain the above copyright
  8. * notice, this list of conditions and the following disclaimer.
  9. * 2. Redistributions in binary form must reproduce the above copyright
  10. * notice, this list of conditions and the following disclaimer in the
  11. * documentation and/or other materials provided with the distribution.
  12. *
  13. * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
  14. * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
  15. * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
  16. * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
  17. * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
  18. * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
  19. * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
  20. * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
  21. * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
  22. * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
  23. * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
  24. */
  25. #include "config.h"
  26. #include "DFGByteCodeParser.h"
  27. #if ENABLE(DFG_JIT)
  28. #include "ArrayConstructor.h"
  29. #include "CallLinkStatus.h"
  30. #include "CodeBlock.h"
  31. #include "CodeBlockWithJITType.h"
  32. #include "DFGArrayMode.h"
  33. #include "DFGCapabilities.h"
  34. #include "GetByIdStatus.h"
  35. #include "Operations.h"
  36. #include "PreciseJumpTargets.h"
  37. #include "PutByIdStatus.h"
  38. #include "ResolveGlobalStatus.h"
  39. #include "StringConstructor.h"
  40. #include <wtf/CommaPrinter.h>
  41. #include <wtf/HashMap.h>
  42. #include <wtf/MathExtras.h>
  43. namespace JSC { namespace DFG {
  44. class ConstantBufferKey {
  45. public:
  46. ConstantBufferKey()
  47. : m_codeBlock(0)
  48. , m_index(0)
  49. {
  50. }
  51. ConstantBufferKey(WTF::HashTableDeletedValueType)
  52. : m_codeBlock(0)
  53. , m_index(1)
  54. {
  55. }
  56. ConstantBufferKey(CodeBlock* codeBlock, unsigned index)
  57. : m_codeBlock(codeBlock)
  58. , m_index(index)
  59. {
  60. }
  61. bool operator==(const ConstantBufferKey& other) const
  62. {
  63. return m_codeBlock == other.m_codeBlock
  64. && m_index == other.m_index;
  65. }
  66. unsigned hash() const
  67. {
  68. return WTF::PtrHash<CodeBlock*>::hash(m_codeBlock) ^ m_index;
  69. }
  70. bool isHashTableDeletedValue() const
  71. {
  72. return !m_codeBlock && m_index;
  73. }
  74. CodeBlock* codeBlock() const { return m_codeBlock; }
  75. unsigned index() const { return m_index; }
  76. private:
  77. CodeBlock* m_codeBlock;
  78. unsigned m_index;
  79. };
  80. struct ConstantBufferKeyHash {
  81. static unsigned hash(const ConstantBufferKey& key) { return key.hash(); }
  82. static bool equal(const ConstantBufferKey& a, const ConstantBufferKey& b)
  83. {
  84. return a == b;
  85. }
  86. static const bool safeToCompareToEmptyOrDeleted = true;
  87. };
  88. } } // namespace JSC::DFG
  89. namespace WTF {
  90. template<typename T> struct DefaultHash;
  91. template<> struct DefaultHash<JSC::DFG::ConstantBufferKey> {
  92. typedef JSC::DFG::ConstantBufferKeyHash Hash;
  93. };
  94. template<typename T> struct HashTraits;
  95. template<> struct HashTraits<JSC::DFG::ConstantBufferKey> : SimpleClassHashTraits<JSC::DFG::ConstantBufferKey> { };
  96. } // namespace WTF
  97. namespace JSC { namespace DFG {
  98. // === ByteCodeParser ===
  99. //
  100. // This class is used to compile the dataflow graph from a CodeBlock.
  101. class ByteCodeParser {
  102. public:
  103. ByteCodeParser(Graph& graph)
  104. : m_vm(&graph.m_vm)
  105. , m_codeBlock(graph.m_codeBlock)
  106. , m_profiledBlock(graph.m_profiledBlock)
  107. , m_graph(graph)
  108. , m_currentBlock(0)
  109. , m_currentIndex(0)
  110. , m_currentProfilingIndex(0)
  111. , m_constantUndefined(UINT_MAX)
  112. , m_constantNull(UINT_MAX)
  113. , m_constantNaN(UINT_MAX)
  114. , m_constant1(UINT_MAX)
  115. , m_constants(m_codeBlock->numberOfConstantRegisters())
  116. , m_numArguments(m_codeBlock->numParameters())
  117. , m_numLocals(m_codeBlock->m_numCalleeRegisters)
  118. , m_preservedVars(m_codeBlock->m_numVars)
  119. , m_parameterSlots(0)
  120. , m_numPassedVarArgs(0)
  121. , m_inlineStackTop(0)
  122. , m_haveBuiltOperandMaps(false)
  123. , m_emptyJSValueIndex(UINT_MAX)
  124. , m_currentInstruction(0)
  125. {
  126. ASSERT(m_profiledBlock);
  127. for (int i = 0; i < m_codeBlock->m_numVars; ++i)
  128. m_preservedVars.set(i);
  129. }
  130. // Parse a full CodeBlock of bytecode.
  131. bool parse();
  132. private:
  133. struct InlineStackEntry;
  134. // Just parse from m_currentIndex to the end of the current CodeBlock.
  135. void parseCodeBlock();
  136. // Helper for min and max.
  137. bool handleMinMax(bool usesResult, int resultOperand, NodeType op, int registerOffset, int argumentCountIncludingThis);
  138. // Handle calls. This resolves issues surrounding inlining and intrinsics.
  139. void handleCall(Interpreter*, Instruction* currentInstruction, NodeType op, CodeSpecializationKind);
  140. void emitFunctionChecks(const CallLinkStatus&, Node* callTarget, int registerOffset, CodeSpecializationKind);
  141. void emitArgumentPhantoms(int registerOffset, int argumentCountIncludingThis, CodeSpecializationKind);
  142. // Handle inlining. Return true if it succeeded, false if we need to plant a call.
  143. bool handleInlining(bool usesResult, Node* callTargetNode, int resultOperand, const CallLinkStatus&, int registerOffset, int argumentCountIncludingThis, unsigned nextOffset, CodeSpecializationKind);
  144. // Handle setting the result of an intrinsic.
  145. void setIntrinsicResult(bool usesResult, int resultOperand, Node*);
  146. // Handle intrinsic functions. Return true if it succeeded, false if we need to plant a call.
  147. bool handleIntrinsic(bool usesResult, int resultOperand, Intrinsic, int registerOffset, int argumentCountIncludingThis, SpeculatedType prediction);
  148. bool handleConstantInternalFunction(bool usesResult, int resultOperand, InternalFunction*, int registerOffset, int argumentCountIncludingThis, SpeculatedType prediction, CodeSpecializationKind);
  149. Node* handleGetByOffset(SpeculatedType, Node* base, unsigned identifierNumber, PropertyOffset);
  150. void handleGetByOffset(
  151. int destinationOperand, SpeculatedType, Node* base, unsigned identifierNumber,
  152. PropertyOffset);
  153. void handleGetById(
  154. int destinationOperand, SpeculatedType, Node* base, unsigned identifierNumber,
  155. const GetByIdStatus&);
  156. Node* getScope(bool skipTop, unsigned skipCount);
  157. // Convert a set of ResolveOperations into graph nodes
  158. bool parseResolveOperations(SpeculatedType, unsigned identifierNumber, ResolveOperations*, PutToBaseOperation*, Node** base, Node** value);
  159. // Prepare to parse a block.
  160. void prepareToParseBlock();
  161. // Parse a single basic block of bytecode instructions.
  162. bool parseBlock(unsigned limit);
  163. // Link block successors.
  164. void linkBlock(BasicBlock*, Vector<BlockIndex>& possibleTargets);
  165. void linkBlocks(Vector<UnlinkedBlock>& unlinkedBlocks, Vector<BlockIndex>& possibleTargets);
  166. VariableAccessData* newVariableAccessData(int operand, bool isCaptured)
  167. {
  168. ASSERT(operand < FirstConstantRegisterIndex);
  169. m_graph.m_variableAccessData.append(VariableAccessData(static_cast<VirtualRegister>(operand), isCaptured));
  170. return &m_graph.m_variableAccessData.last();
  171. }
  172. // Get/Set the operands/result of a bytecode instruction.
  173. Node* getDirect(int operand)
  174. {
  175. // Is this a constant?
  176. if (operand >= FirstConstantRegisterIndex) {
  177. unsigned constant = operand - FirstConstantRegisterIndex;
  178. ASSERT(constant < m_constants.size());
  179. return getJSConstant(constant);
  180. }
  181. ASSERT(operand != JSStack::Callee);
  182. // Is this an argument?
  183. if (operandIsArgument(operand))
  184. return getArgument(operand);
  185. // Must be a local.
  186. return getLocal((unsigned)operand);
  187. }
  188. Node* get(int operand)
  189. {
  190. if (operand == JSStack::Callee) {
  191. if (inlineCallFrame() && inlineCallFrame()->callee)
  192. return cellConstant(inlineCallFrame()->callee.get());
  193. return getCallee();
  194. }
  195. return getDirect(m_inlineStackTop->remapOperand(operand));
  196. }
  197. enum SetMode { NormalSet, SetOnEntry };
  198. void setDirect(int operand, Node* value, SetMode setMode = NormalSet)
  199. {
  200. // Is this an argument?
  201. if (operandIsArgument(operand)) {
  202. setArgument(operand, value, setMode);
  203. return;
  204. }
  205. // Must be a local.
  206. setLocal((unsigned)operand, value, setMode);
  207. }
  208. void set(int operand, Node* value, SetMode setMode = NormalSet)
  209. {
  210. setDirect(m_inlineStackTop->remapOperand(operand), value, setMode);
  211. }
  212. void setPair(int operand1, Node* value1, int operand2, Node* value2)
  213. {
  214. // First emit dead SetLocals for the benefit of OSR.
  215. set(operand1, value1);
  216. set(operand2, value2);
  217. // Now emit the real SetLocals.
  218. set(operand1, value1);
  219. set(operand2, value2);
  220. }
  221. Node* injectLazyOperandSpeculation(Node* node)
  222. {
  223. ASSERT(node->op() == GetLocal);
  224. ASSERT(node->codeOrigin.bytecodeIndex == m_currentIndex);
  225. SpeculatedType prediction =
  226. m_inlineStackTop->m_lazyOperands.prediction(
  227. LazyOperandValueProfileKey(m_currentIndex, node->local()));
  228. #if DFG_ENABLE(DEBUG_VERBOSE)
  229. dataLog("Lazy operand [@", node->index(), ", bc#", m_currentIndex, ", r", node->local(), "] prediction: ", SpeculationDump(prediction), "\n");
  230. #endif
  231. node->variableAccessData()->predict(prediction);
  232. return node;
  233. }
  234. // Used in implementing get/set, above, where the operand is a local variable.
  235. Node* getLocal(unsigned operand)
  236. {
  237. Node* node = m_currentBlock->variablesAtTail.local(operand);
  238. bool isCaptured = m_codeBlock->isCaptured(operand, inlineCallFrame());
  239. // This has two goals: 1) link together variable access datas, and 2)
  240. // try to avoid creating redundant GetLocals. (1) is required for
  241. // correctness - no other phase will ensure that block-local variable
  242. // access data unification is done correctly. (2) is purely opportunistic
  243. // and is meant as an compile-time optimization only.
  244. VariableAccessData* variable;
  245. if (node) {
  246. variable = node->variableAccessData();
  247. variable->mergeIsCaptured(isCaptured);
  248. if (!isCaptured) {
  249. switch (node->op()) {
  250. case GetLocal:
  251. return node;
  252. case SetLocal:
  253. return node->child1().node();
  254. default:
  255. break;
  256. }
  257. }
  258. } else {
  259. m_preservedVars.set(operand);
  260. variable = newVariableAccessData(operand, isCaptured);
  261. }
  262. node = injectLazyOperandSpeculation(addToGraph(GetLocal, OpInfo(variable)));
  263. m_currentBlock->variablesAtTail.local(operand) = node;
  264. return node;
  265. }
  266. void setLocal(unsigned operand, Node* value, SetMode setMode = NormalSet)
  267. {
  268. bool isCaptured = m_codeBlock->isCaptured(operand, inlineCallFrame());
  269. if (setMode == NormalSet) {
  270. ArgumentPosition* argumentPosition = findArgumentPositionForLocal(operand);
  271. if (isCaptured || argumentPosition)
  272. flushDirect(operand, argumentPosition);
  273. }
  274. VariableAccessData* variableAccessData = newVariableAccessData(operand, isCaptured);
  275. variableAccessData->mergeStructureCheckHoistingFailed(
  276. m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, BadCache));
  277. Node* node = addToGraph(SetLocal, OpInfo(variableAccessData), value);
  278. m_currentBlock->variablesAtTail.local(operand) = node;
  279. }
  280. // Used in implementing get/set, above, where the operand is an argument.
  281. Node* getArgument(unsigned operand)
  282. {
  283. unsigned argument = operandToArgument(operand);
  284. ASSERT(argument < m_numArguments);
  285. Node* node = m_currentBlock->variablesAtTail.argument(argument);
  286. bool isCaptured = m_codeBlock->isCaptured(operand);
  287. VariableAccessData* variable;
  288. if (node) {
  289. variable = node->variableAccessData();
  290. variable->mergeIsCaptured(isCaptured);
  291. switch (node->op()) {
  292. case GetLocal:
  293. return node;
  294. case SetLocal:
  295. return node->child1().node();
  296. default:
  297. break;
  298. }
  299. } else
  300. variable = newVariableAccessData(operand, isCaptured);
  301. node = injectLazyOperandSpeculation(addToGraph(GetLocal, OpInfo(variable)));
  302. m_currentBlock->variablesAtTail.argument(argument) = node;
  303. return node;
  304. }
  305. void setArgument(int operand, Node* value, SetMode setMode = NormalSet)
  306. {
  307. unsigned argument = operandToArgument(operand);
  308. ASSERT(argument < m_numArguments);
  309. bool isCaptured = m_codeBlock->isCaptured(operand);
  310. VariableAccessData* variableAccessData = newVariableAccessData(operand, isCaptured);
  311. // Always flush arguments, except for 'this'. If 'this' is created by us,
  312. // then make sure that it's never unboxed.
  313. if (argument) {
  314. if (setMode == NormalSet)
  315. flushDirect(operand);
  316. } else if (m_codeBlock->specializationKind() == CodeForConstruct)
  317. variableAccessData->mergeShouldNeverUnbox(true);
  318. variableAccessData->mergeStructureCheckHoistingFailed(
  319. m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, BadCache));
  320. Node* node = addToGraph(SetLocal, OpInfo(variableAccessData), value);
  321. m_currentBlock->variablesAtTail.argument(argument) = node;
  322. }
  323. ArgumentPosition* findArgumentPositionForArgument(int argument)
  324. {
  325. InlineStackEntry* stack = m_inlineStackTop;
  326. while (stack->m_inlineCallFrame)
  327. stack = stack->m_caller;
  328. return stack->m_argumentPositions[argument];
  329. }
  330. ArgumentPosition* findArgumentPositionForLocal(int operand)
  331. {
  332. for (InlineStackEntry* stack = m_inlineStackTop; ; stack = stack->m_caller) {
  333. InlineCallFrame* inlineCallFrame = stack->m_inlineCallFrame;
  334. if (!inlineCallFrame)
  335. break;
  336. if (operand >= static_cast<int>(inlineCallFrame->stackOffset - JSStack::CallFrameHeaderSize))
  337. continue;
  338. if (operand == inlineCallFrame->stackOffset + CallFrame::thisArgumentOffset())
  339. continue;
  340. if (operand < static_cast<int>(inlineCallFrame->stackOffset - JSStack::CallFrameHeaderSize - inlineCallFrame->arguments.size()))
  341. continue;
  342. int argument = operandToArgument(operand - inlineCallFrame->stackOffset);
  343. return stack->m_argumentPositions[argument];
  344. }
  345. return 0;
  346. }
  347. ArgumentPosition* findArgumentPosition(int operand)
  348. {
  349. if (operandIsArgument(operand))
  350. return findArgumentPositionForArgument(operandToArgument(operand));
  351. return findArgumentPositionForLocal(operand);
  352. }
  353. void flush(int operand)
  354. {
  355. flushDirect(m_inlineStackTop->remapOperand(operand));
  356. }
  357. void flushDirect(int operand)
  358. {
  359. flushDirect(operand, findArgumentPosition(operand));
  360. }
  361. void flushDirect(int operand, ArgumentPosition* argumentPosition)
  362. {
  363. bool isCaptured = m_codeBlock->isCaptured(operand, inlineCallFrame());
  364. ASSERT(operand < FirstConstantRegisterIndex);
  365. if (!operandIsArgument(operand))
  366. m_preservedVars.set(operand);
  367. Node* node = m_currentBlock->variablesAtTail.operand(operand);
  368. VariableAccessData* variable;
  369. if (node) {
  370. variable = node->variableAccessData();
  371. variable->mergeIsCaptured(isCaptured);
  372. } else
  373. variable = newVariableAccessData(operand, isCaptured);
  374. node = addToGraph(Flush, OpInfo(variable));
  375. m_currentBlock->variablesAtTail.operand(operand) = node;
  376. if (argumentPosition)
  377. argumentPosition->addVariable(variable);
  378. }
  379. void flush(InlineStackEntry* inlineStackEntry)
  380. {
  381. int numArguments;
  382. if (InlineCallFrame* inlineCallFrame = inlineStackEntry->m_inlineCallFrame)
  383. numArguments = inlineCallFrame->arguments.size();
  384. else
  385. numArguments = inlineStackEntry->m_codeBlock->numParameters();
  386. for (unsigned argument = numArguments; argument-- > 1;)
  387. flushDirect(inlineStackEntry->remapOperand(argumentToOperand(argument)));
  388. for (int local = 0; local < inlineStackEntry->m_codeBlock->m_numVars; ++local) {
  389. if (!inlineStackEntry->m_codeBlock->isCaptured(local))
  390. continue;
  391. flushDirect(inlineStackEntry->remapOperand(local));
  392. }
  393. }
  394. void flushAllArgumentsAndCapturedVariablesInInlineStack()
  395. {
  396. for (InlineStackEntry* inlineStackEntry = m_inlineStackTop; inlineStackEntry; inlineStackEntry = inlineStackEntry->m_caller)
  397. flush(inlineStackEntry);
  398. }
  399. void flushArgumentsAndCapturedVariables()
  400. {
  401. flush(m_inlineStackTop);
  402. }
  403. // Get an operand, and perform a ToInt32/ToNumber conversion on it.
  404. Node* getToInt32(int operand)
  405. {
  406. return toInt32(get(operand));
  407. }
  408. // Perform an ES5 ToInt32 operation - returns a node of type NodeResultInt32.
  409. Node* toInt32(Node* node)
  410. {
  411. if (node->hasInt32Result())
  412. return node;
  413. if (node->op() == UInt32ToNumber)
  414. return node->child1().node();
  415. // Check for numeric constants boxed as JSValues.
  416. if (canFold(node)) {
  417. JSValue v = valueOfJSConstant(node);
  418. if (v.isInt32())
  419. return getJSConstant(node->constantNumber());
  420. if (v.isNumber())
  421. return getJSConstantForValue(JSValue(JSC::toInt32(v.asNumber())));
  422. }
  423. return addToGraph(ValueToInt32, node);
  424. }
  425. // NOTE: Only use this to construct constants that arise from non-speculative
  426. // constant folding. I.e. creating constants using this if we had constant
  427. // field inference would be a bad idea, since the bytecode parser's folding
  428. // doesn't handle liveness preservation.
  429. Node* getJSConstantForValue(JSValue constantValue)
  430. {
  431. unsigned constantIndex = m_codeBlock->addOrFindConstant(constantValue);
  432. if (constantIndex >= m_constants.size())
  433. m_constants.append(ConstantRecord());
  434. ASSERT(m_constants.size() == m_codeBlock->numberOfConstantRegisters());
  435. return getJSConstant(constantIndex);
  436. }
  437. Node* getJSConstant(unsigned constant)
  438. {
  439. Node* node = m_constants[constant].asJSValue;
  440. if (node)
  441. return node;
  442. Node* result = addToGraph(JSConstant, OpInfo(constant));
  443. m_constants[constant].asJSValue = result;
  444. return result;
  445. }
  446. Node* getCallee()
  447. {
  448. return addToGraph(GetCallee);
  449. }
  450. // Helper functions to get/set the this value.
  451. Node* getThis()
  452. {
  453. return get(m_inlineStackTop->m_codeBlock->thisRegister());
  454. }
  455. void setThis(Node* value)
  456. {
  457. set(m_inlineStackTop->m_codeBlock->thisRegister(), value);
  458. }
  459. // Convenience methods for checking nodes for constants.
  460. bool isJSConstant(Node* node)
  461. {
  462. return node->op() == JSConstant;
  463. }
  464. bool isInt32Constant(Node* node)
  465. {
  466. return isJSConstant(node) && valueOfJSConstant(node).isInt32();
  467. }
  468. // Convenience methods for getting constant values.
  469. JSValue valueOfJSConstant(Node* node)
  470. {
  471. ASSERT(isJSConstant(node));
  472. return m_codeBlock->getConstant(FirstConstantRegisterIndex + node->constantNumber());
  473. }
  474. int32_t valueOfInt32Constant(Node* node)
  475. {
  476. ASSERT(isInt32Constant(node));
  477. return valueOfJSConstant(node).asInt32();
  478. }
  479. // This method returns a JSConstant with the value 'undefined'.
  480. Node* constantUndefined()
  481. {
  482. // Has m_constantUndefined been set up yet?
  483. if (m_constantUndefined == UINT_MAX) {
  484. // Search the constant pool for undefined, if we find it, we can just reuse this!
  485. unsigned numberOfConstants = m_codeBlock->numberOfConstantRegisters();
  486. for (m_constantUndefined = 0; m_constantUndefined < numberOfConstants; ++m_constantUndefined) {
  487. JSValue testMe = m_codeBlock->getConstant(FirstConstantRegisterIndex + m_constantUndefined);
  488. if (testMe.isUndefined())
  489. return getJSConstant(m_constantUndefined);
  490. }
  491. // Add undefined to the CodeBlock's constants, and add a corresponding slot in m_constants.
  492. ASSERT(m_constants.size() == numberOfConstants);
  493. m_codeBlock->addConstant(jsUndefined());
  494. m_constants.append(ConstantRecord());
  495. ASSERT(m_constants.size() == m_codeBlock->numberOfConstantRegisters());
  496. }
  497. // m_constantUndefined must refer to an entry in the CodeBlock's constant pool that has the value 'undefined'.
  498. ASSERT(m_codeBlock->getConstant(FirstConstantRegisterIndex + m_constantUndefined).isUndefined());
  499. return getJSConstant(m_constantUndefined);
  500. }
  501. // This method returns a JSConstant with the value 'null'.
  502. Node* constantNull()
  503. {
  504. // Has m_constantNull been set up yet?
  505. if (m_constantNull == UINT_MAX) {
  506. // Search the constant pool for null, if we find it, we can just reuse this!
  507. unsigned numberOfConstants = m_codeBlock->numberOfConstantRegisters();
  508. for (m_constantNull = 0; m_constantNull < numberOfConstants; ++m_constantNull) {
  509. JSValue testMe = m_codeBlock->getConstant(FirstConstantRegisterIndex + m_constantNull);
  510. if (testMe.isNull())
  511. return getJSConstant(m_constantNull);
  512. }
  513. // Add null to the CodeBlock's constants, and add a corresponding slot in m_constants.
  514. ASSERT(m_constants.size() == numberOfConstants);
  515. m_codeBlock->addConstant(jsNull());
  516. m_constants.append(ConstantRecord());
  517. ASSERT(m_constants.size() == m_codeBlock->numberOfConstantRegisters());
  518. }
  519. // m_constantNull must refer to an entry in the CodeBlock's constant pool that has the value 'null'.
  520. ASSERT(m_codeBlock->getConstant(FirstConstantRegisterIndex + m_constantNull).isNull());
  521. return getJSConstant(m_constantNull);
  522. }
  523. // This method returns a DoubleConstant with the value 1.
  524. Node* one()
  525. {
  526. // Has m_constant1 been set up yet?
  527. if (m_constant1 == UINT_MAX) {
  528. // Search the constant pool for the value 1, if we find it, we can just reuse this!
  529. unsigned numberOfConstants = m_codeBlock->numberOfConstantRegisters();
  530. for (m_constant1 = 0; m_constant1 < numberOfConstants; ++m_constant1) {
  531. JSValue testMe = m_codeBlock->getConstant(FirstConstantRegisterIndex + m_constant1);
  532. if (testMe.isInt32() && testMe.asInt32() == 1)
  533. return getJSConstant(m_constant1);
  534. }
  535. // Add the value 1 to the CodeBlock's constants, and add a corresponding slot in m_constants.
  536. ASSERT(m_constants.size() == numberOfConstants);
  537. m_codeBlock->addConstant(jsNumber(1));
  538. m_constants.append(ConstantRecord());
  539. ASSERT(m_constants.size() == m_codeBlock->numberOfConstantRegisters());
  540. }
  541. // m_constant1 must refer to an entry in the CodeBlock's constant pool that has the integer value 1.
  542. ASSERT(m_codeBlock->getConstant(FirstConstantRegisterIndex + m_constant1).isInt32());
  543. ASSERT(m_codeBlock->getConstant(FirstConstantRegisterIndex + m_constant1).asInt32() == 1);
  544. return getJSConstant(m_constant1);
  545. }
  546. // This method returns a DoubleConstant with the value NaN.
  547. Node* constantNaN()
  548. {
  549. JSValue nan = jsNaN();
  550. // Has m_constantNaN been set up yet?
  551. if (m_constantNaN == UINT_MAX) {
  552. // Search the constant pool for the value NaN, if we find it, we can just reuse this!
  553. unsigned numberOfConstants = m_codeBlock->numberOfConstantRegisters();
  554. for (m_constantNaN = 0; m_constantNaN < numberOfConstants; ++m_constantNaN) {
  555. JSValue testMe = m_codeBlock->getConstant(FirstConstantRegisterIndex + m_constantNaN);
  556. if (JSValue::encode(testMe) == JSValue::encode(nan))
  557. return getJSConstant(m_constantNaN);
  558. }
  559. // Add the value nan to the CodeBlock's constants, and add a corresponding slot in m_constants.
  560. ASSERT(m_constants.size() == numberOfConstants);
  561. m_codeBlock->addConstant(nan);
  562. m_constants.append(ConstantRecord());
  563. ASSERT(m_constants.size() == m_codeBlock->numberOfConstantRegisters());
  564. }
  565. // m_constantNaN must refer to an entry in the CodeBlock's constant pool that has the value nan.
  566. ASSERT(m_codeBlock->getConstant(FirstConstantRegisterIndex + m_constantNaN).isDouble());
  567. ASSERT(std::isnan(m_codeBlock->getConstant(FirstConstantRegisterIndex + m_constantNaN).asDouble()));
  568. return getJSConstant(m_constantNaN);
  569. }
  570. Node* cellConstant(JSCell* cell)
  571. {
  572. HashMap<JSCell*, Node*>::AddResult result = m_cellConstantNodes.add(cell, 0);
  573. if (result.isNewEntry)
  574. result.iterator->value = addToGraph(WeakJSConstant, OpInfo(cell));
  575. return result.iterator->value;
  576. }
  577. InlineCallFrame* inlineCallFrame()
  578. {
  579. return m_inlineStackTop->m_inlineCallFrame;
  580. }
  581. CodeOrigin currentCodeOrigin()
  582. {
  583. return CodeOrigin(m_currentIndex, inlineCallFrame(), m_currentProfilingIndex - m_currentIndex);
  584. }
  585. bool canFold(Node* node)
  586. {
  587. return node->isStronglyProvedConstantIn(inlineCallFrame());
  588. }
  589. // Our codegen for constant strict equality performs a bitwise comparison,
  590. // so we can only select values that have a consistent bitwise identity.
  591. bool isConstantForCompareStrictEq(Node* node)
  592. {
  593. if (!node->isConstant())
  594. return false;
  595. JSValue value = valueOfJSConstant(node);
  596. return value.isBoolean() || value.isUndefinedOrNull();
  597. }
  598. Node* addToGraph(NodeType op, Node* child1 = 0, Node* child2 = 0, Node* child3 = 0)
  599. {
  600. Node* result = m_graph.addNode(
  601. SpecNone, op, currentCodeOrigin(), Edge(child1), Edge(child2), Edge(child3));
  602. ASSERT(op != Phi);
  603. m_currentBlock->append(result);
  604. return result;
  605. }
  606. Node* addToGraph(NodeType op, Edge child1, Edge child2 = Edge(), Edge child3 = Edge())
  607. {
  608. Node* result = m_graph.addNode(
  609. SpecNone, op, currentCodeOrigin(), child1, child2, child3);
  610. ASSERT(op != Phi);
  611. m_currentBlock->append(result);
  612. return result;
  613. }
  614. Node* addToGraph(NodeType op, OpInfo info, Node* child1 = 0, Node* child2 = 0, Node* child3 = 0)
  615. {
  616. Node* result = m_graph.addNode(
  617. SpecNone, op, currentCodeOrigin(), info, Edge(child1), Edge(child2), Edge(child3));
  618. ASSERT(op != Phi);
  619. m_currentBlock->append(result);
  620. return result;
  621. }
  622. Node* addToGraph(NodeType op, OpInfo info1, OpInfo info2, Node* child1 = 0, Node* child2 = 0, Node* child3 = 0)
  623. {
  624. Node* result = m_graph.addNode(
  625. SpecNone, op, currentCodeOrigin(), info1, info2,
  626. Edge(child1), Edge(child2), Edge(child3));
  627. ASSERT(op != Phi);
  628. m_currentBlock->append(result);
  629. return result;
  630. }
  631. Node* addToGraph(Node::VarArgTag, NodeType op, OpInfo info1, OpInfo info2)
  632. {
  633. Node* result = m_graph.addNode(
  634. SpecNone, Node::VarArg, op, currentCodeOrigin(), info1, info2,
  635. m_graph.m_varArgChildren.size() - m_numPassedVarArgs, m_numPassedVarArgs);
  636. ASSERT(op != Phi);
  637. m_currentBlock->append(result);
  638. m_numPassedVarArgs = 0;
  639. return result;
  640. }
  641. void addVarArgChild(Node* child)
  642. {
  643. m_graph.m_varArgChildren.append(Edge(child));
  644. m_numPassedVarArgs++;
  645. }
  646. Node* addCall(Interpreter* interpreter, Instruction* currentInstruction, NodeType op)
  647. {
  648. Instruction* putInstruction = currentInstruction + OPCODE_LENGTH(op_call);
  649. SpeculatedType prediction = SpecNone;
  650. if (interpreter->getOpcodeID(putInstruction->u.opcode) == op_call_put_result) {
  651. m_currentProfilingIndex = m_currentIndex + OPCODE_LENGTH(op_call);
  652. prediction = getPrediction();
  653. }
  654. addVarArgChild(get(currentInstruction[1].u.operand));
  655. int argCount = currentInstruction[2].u.operand;
  656. if (JSStack::CallFrameHeaderSize + (unsigned)argCount > m_parameterSlots)
  657. m_parameterSlots = JSStack::CallFrameHeaderSize + argCount;
  658. int registerOffset = currentInstruction[3].u.operand;
  659. int dummyThisArgument = op == Call ? 0 : 1;
  660. for (int i = 0 + dummyThisArgument; i < argCount; ++i)
  661. addVarArgChild(get(registerOffset + argumentToOperand(i)));
  662. Node* call = addToGraph(Node::VarArg, op, OpInfo(0), OpInfo(prediction));
  663. if (interpreter->getOpcodeID(putInstruction->u.opcode) == op_call_put_result)
  664. set(putInstruction[1].u.operand, call);
  665. return call;
  666. }
  667. Node* addStructureTransitionCheck(JSCell* object, Structure* structure)
  668. {
  669. // Add a weak JS constant for the object regardless, since the code should
  670. // be jettisoned if the object ever dies.
  671. Node* objectNode = cellConstant(object);
  672. if (object->structure() == structure && structure->transitionWatchpointSetIsStillValid()) {
  673. addToGraph(StructureTransitionWatchpoint, OpInfo(structure), objectNode);
  674. return objectNode;
  675. }
  676. addToGraph(CheckStructure, OpInfo(m_graph.addStructureSet(structure)), objectNode);
  677. return objectNode;
  678. }
  679. Node* addStructureTransitionCheck(JSCell* object)
  680. {
  681. return addStructureTransitionCheck(object, object->structure());
  682. }
  683. SpeculatedType getPredictionWithoutOSRExit(unsigned bytecodeIndex)
  684. {
  685. return m_inlineStackTop->m_profiledBlock->valueProfilePredictionForBytecodeOffset(bytecodeIndex);
  686. }
  687. SpeculatedType getPrediction(unsigned bytecodeIndex)
  688. {
  689. SpeculatedType prediction = getPredictionWithoutOSRExit(bytecodeIndex);
  690. if (prediction == SpecNone) {
  691. // We have no information about what values this node generates. Give up
  692. // on executing this code, since we're likely to do more damage than good.
  693. addToGraph(ForceOSRExit);
  694. }
  695. return prediction;
  696. }
  697. SpeculatedType getPredictionWithoutOSRExit()
  698. {
  699. return getPredictionWithoutOSRExit(m_currentProfilingIndex);
  700. }
  701. SpeculatedType getPrediction()
  702. {
  703. return getPrediction(m_currentProfilingIndex);
  704. }
  705. ArrayMode getArrayMode(ArrayProfile* profile, Array::Action action)
  706. {
  707. profile->computeUpdatedPrediction(m_inlineStackTop->m_codeBlock);
  708. return ArrayMode::fromObserved(profile, action, false);
  709. }
  710. ArrayMode getArrayMode(ArrayProfile* profile)
  711. {
  712. return getArrayMode(profile, Array::Read);
  713. }
  714. ArrayMode getArrayModeAndEmitChecks(ArrayProfile* profile, Array::Action action, Node* base)
  715. {
  716. profile->computeUpdatedPrediction(m_inlineStackTop->m_codeBlock);
  717. #if DFG_ENABLE(DEBUG_PROPAGATION_VERBOSE)
  718. if (m_inlineStackTop->m_profiledBlock->numberOfRareCaseProfiles())
  719. dataLogF("Slow case profile for bc#%u: %u\n", m_currentIndex, m_inlineStackTop->m_profiledBlock->rareCaseProfileForBytecodeOffset(m_currentIndex)->m_counter);
  720. dataLogF("Array profile for bc#%u: %p%s%s, %u\n", m_currentIndex, profile->expectedStructure(), profile->structureIsPolymorphic() ? " (polymorphic)" : "", profile->mayInterceptIndexedAccesses() ? " (may intercept)" : "", profile->observedArrayModes());
  721. #endif
  722. bool makeSafe =
  723. m_inlineStackTop->m_profiledBlock->likelyToTakeSlowCase(m_currentIndex)
  724. || profile->outOfBounds();
  725. ArrayMode result = ArrayMode::fromObserved(profile, action, makeSafe);
  726. if (profile->hasDefiniteStructure()
  727. && result.benefitsFromStructureCheck()
  728. && !m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, BadCache))
  729. addToGraph(CheckStructure, OpInfo(m_graph.addStructureSet(profile->expectedStructure())), base);
  730. return result;
  731. }
  732. Node* makeSafe(Node* node)
  733. {
  734. bool likelyToTakeSlowCase;
  735. if (!isX86() && node->op() == ArithMod)
  736. likelyToTakeSlowCase = false;
  737. else
  738. likelyToTakeSlowCase = m_inlineStackTop->m_profiledBlock->likelyToTakeSlowCase(m_currentIndex);
  739. if (!likelyToTakeSlowCase
  740. && !m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, Overflow)
  741. && !m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, NegativeZero))
  742. return node;
  743. switch (node->op()) {
  744. case UInt32ToNumber:
  745. case ArithAdd:
  746. case ArithSub:
  747. case ArithNegate:
  748. case ValueAdd:
  749. case ArithMod: // for ArithMod "MayOverflow" means we tried to divide by zero, or we saw double.
  750. node->mergeFlags(NodeMayOverflow);
  751. break;
  752. case ArithMul:
  753. if (m_inlineStackTop->m_profiledBlock->likelyToTakeDeepestSlowCase(m_currentIndex)
  754. || m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, Overflow)) {
  755. #if DFG_ENABLE(DEBUG_VERBOSE)
  756. dataLogF("Making ArithMul @%u take deepest slow case.\n", node->index());
  757. #endif
  758. node->mergeFlags(NodeMayOverflow | NodeMayNegZero);
  759. } else if (m_inlineStackTop->m_profiledBlock->likelyToTakeSlowCase(m_currentIndex)
  760. || m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, NegativeZero)) {
  761. #if DFG_ENABLE(DEBUG_VERBOSE)
  762. dataLogF("Making ArithMul @%u take faster slow case.\n", node->index());
  763. #endif
  764. node->mergeFlags(NodeMayNegZero);
  765. }
  766. break;
  767. default:
  768. RELEASE_ASSERT_NOT_REACHED();
  769. break;
  770. }
  771. return node;
  772. }
  773. Node* makeDivSafe(Node* node)
  774. {
  775. ASSERT(node->op() == ArithDiv);
  776. // The main slow case counter for op_div in the old JIT counts only when
  777. // the operands are not numbers. We don't care about that since we already
  778. // have speculations in place that take care of that separately. We only
  779. // care about when the outcome of the division is not an integer, which
  780. // is what the special fast case counter tells us.
  781. if (!m_inlineStackTop->m_profiledBlock->couldTakeSpecialFastCase(m_currentIndex)
  782. && !m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, Overflow)
  783. && !m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, NegativeZero))
  784. return node;
  785. #if DFG_ENABLE(DEBUG_VERBOSE)
  786. dataLogF("Making %s @%u safe at bc#%u because special fast-case counter is at %u and exit profiles say %d, %d\n", Graph::opName(node->op()), node->index(), m_currentIndex, m_inlineStackTop->m_profiledBlock->specialFastCaseProfileForBytecodeOffset(m_currentIndex)->m_counter, m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, Overflow), m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, NegativeZero));
  787. #endif
  788. // FIXME: It might be possible to make this more granular. The DFG certainly can
  789. // distinguish between negative zero and overflow in its exit profiles.
  790. node->mergeFlags(NodeMayOverflow | NodeMayNegZero);
  791. return node;
  792. }
  793. bool structureChainIsStillValid(bool direct, Structure* previousStructure, StructureChain* chain)
  794. {
  795. if (direct)
  796. return true;
  797. if (!previousStructure->storedPrototype().isNull() && previousStructure->storedPrototype().asCell()->structure() != chain->head()->get())
  798. return false;
  799. for (WriteBarrier<Structure>* it = chain->head(); *it; ++it) {
  800. if (!(*it)->storedPrototype().isNull() && (*it)->storedPrototype().asCell()->structure() != it[1].get())
  801. return false;
  802. }
  803. return true;
  804. }
  805. void buildOperandMapsIfNecessary();
  806. VM* m_vm;
  807. CodeBlock* m_codeBlock;
  808. CodeBlock* m_profiledBlock;
  809. Graph& m_graph;
  810. // The current block being generated.
  811. BasicBlock* m_currentBlock;
  812. // The bytecode index of the current instruction being generated.
  813. unsigned m_currentIndex;
  814. // The bytecode index of the value profile of the current instruction being generated.
  815. unsigned m_currentProfilingIndex;
  816. // We use these values during code generation, and to avoid the need for
  817. // special handling we make sure they are available as constants in the
  818. // CodeBlock's constant pool. These variables are initialized to
  819. // UINT_MAX, and lazily updated to hold an index into the CodeBlock's
  820. // constant pool, as necessary.
  821. unsigned m_constantUndefined;
  822. unsigned m_constantNull;
  823. unsigned m_constantNaN;
  824. unsigned m_constant1;
  825. HashMap<JSCell*, unsigned> m_cellConstants;
  826. HashMap<JSCell*, Node*> m_cellConstantNodes;
  827. // A constant in the constant pool may be represented by more than one
  828. // node in the graph, depending on the context in which it is being used.
  829. struct ConstantRecord {
  830. ConstantRecord()
  831. : asInt32(0)
  832. , asNumeric(0)
  833. , asJSValue(0)
  834. {
  835. }
  836. Node* asInt32;
  837. Node* asNumeric;
  838. Node* asJSValue;
  839. };
  840. // Track the index of the node whose result is the current value for every
  841. // register value in the bytecode - argument, local, and temporary.
  842. Vector<ConstantRecord, 16> m_constants;
  843. // The number of arguments passed to the function.
  844. unsigned m_numArguments;
  845. // The number of locals (vars + temporaries) used in the function.
  846. unsigned m_numLocals;
  847. // The set of registers we need to preserve across BasicBlock boundaries;
  848. // typically equal to the set of vars, but we expand this to cover all
  849. // temporaries that persist across blocks (dues to ?:, &&, ||, etc).
  850. BitVector_shared m_preservedVars;
  851. // The number of slots (in units of sizeof(Register)) that we need to
  852. // preallocate for calls emanating from this frame. This includes the
  853. // size of the CallFrame, only if this is not a leaf function. (I.e.
  854. // this is 0 if and only if this function is a leaf.)
  855. unsigned m_parameterSlots;
  856. // The number of var args passed to the next var arg node.
  857. unsigned m_numPassedVarArgs;
  858. HashMap<ConstantBufferKey, unsigned> m_constantBufferCache;
  859. struct InlineStackEntry {
  860. ByteCodeParser* m_byteCodeParser;
  861. CodeBlock* m_codeBlock;
  862. CodeBlock* m_profiledBlock;
  863. InlineCallFrame* m_inlineCallFrame;
  864. ScriptExecutable* executable() { return m_codeBlock->ownerExecutable(); }
  865. QueryableExitProfile m_exitProfile;
  866. // Remapping of identifier and constant numbers from the code block being
  867. // inlined (inline callee) to the code block that we're inlining into
  868. // (the machine code block, which is the transitive, though not necessarily
  869. // direct, caller).
  870. Vector_shared<unsigned> m_identifierRemap;
  871. Vector<unsigned> m_constantRemap;
  872. Vector<unsigned> m_constantBufferRemap;
  873. // Blocks introduced by this code block, which need successor linking.
  874. // May include up to one basic block that includes the continuation after
  875. // the callsite in the caller. These must be appended in the order that they
  876. // are created, but their bytecodeBegin values need not be in order as they
  877. // are ignored.
  878. Vector<UnlinkedBlock> m_unlinkedBlocks;
  879. // Potential block linking targets. Must be sorted by bytecodeBegin, and
  880. // cannot have two blocks that have the same bytecodeBegin. For this very
  881. // reason, this is not equivalent to
  882. Vector<BlockIndex> m_blockLinkingTargets;
  883. // If the callsite's basic block was split into two, then this will be
  884. // the head of the callsite block. It needs its successors linked to the
  885. // m_unlinkedBlocks, but not the other way around: there's no way for
  886. // any blocks in m_unlinkedBlocks to jump back into this block.
  887. BlockIndex m_callsiteBlockHead;
  888. // Does the callsite block head need linking? This is typically true
  889. // but will be false for the machine code block's inline stack entry
  890. // (since that one is not inlined) and for cases where an inline callee
  891. // did the linking for us.
  892. bool m_callsiteBlockHeadNeedsLinking;
  893. VirtualRegister m_returnValue;
  894. // Speculations about variable types collected from the profiled code block,
  895. // which are based on OSR exit profiles that past DFG compilatins of this
  896. // code block had gathered.
  897. LazyOperandValueProfileParser m_lazyOperands;
  898. // Did we see any returns? We need to handle the (uncommon but necessary)
  899. // case where a procedure that does not return was inlined.
  900. bool m_didReturn;
  901. // Did we have any early returns?
  902. bool m_didEarlyReturn;
  903. // Pointers to the argument position trackers for this slice of code.
  904. Vector<ArgumentPosition*> m_argumentPositions;
  905. InlineStackEntry* m_caller;
  906. InlineStackEntry(
  907. ByteCodeParser*,
  908. CodeBlock*,
  909. CodeBlock* profiledBlock,
  910. BlockIndex callsiteBlockHead,
  911. JSFunction* callee, // Null if this is a closure call.
  912. VirtualRegister returnValueVR,
  913. VirtualRegister inlineCallFrameStart,
  914. int argumentCountIncludingThis,
  915. CodeSpecializationKind);
  916. ~InlineStackEntry()
  917. {
  918. m_byteCodeParser->m_inlineStackTop = m_caller;
  919. }
  920. int remapOperand(int operand) const
  921. {
  922. if (!m_inlineCallFrame)
  923. return operand;
  924. if (operand >= FirstConstantRegisterIndex) {
  925. int result = m_constantRemap[operand - FirstConstantRegisterIndex];
  926. ASSERT(result >= FirstConstantRegisterIndex);
  927. return result;
  928. }
  929. ASSERT(operand != JSStack::Callee);
  930. return operand + m_inlineCallFrame->stackOffset;
  931. }
  932. };
  933. InlineStackEntry* m_inlineStackTop;
  934. // Have we built operand maps? We initialize them lazily, and only when doing
  935. // inlining.
  936. bool m_haveBuiltOperandMaps;
  937. // Mapping between identifier names and numbers.
  938. IdentifierMap m_identifierMap;
  939. // Mapping between values and constant numbers.
  940. JSValueMap m_jsValueMap;
  941. // Index of the empty value, or UINT_MAX if there is no mapping. This is a horrible
  942. // work-around for the fact that JSValueMap can't handle "empty" values.
  943. unsigned m_emptyJSValueIndex;
  944. Instruction* m_currentInstruction;
  945. };
  946. #define NEXT_OPCODE(name) \
  947. m_currentIndex += OPCODE_LENGTH(name); \
  948. continue
  949. #define LAST_OPCODE(name) \
  950. m_currentIndex += OPCODE_LENGTH(name); \
  951. return shouldContinueParsing
  952. void ByteCodeParser::handleCall(Interpreter* interpreter, Instruction* currentInstruction, NodeType op, CodeSpecializationKind kind)
  953. {
  954. ASSERT(OPCODE_LENGTH(op_call) == OPCODE_LENGTH(op_construct));
  955. Node* callTarget = get(currentInstruction[1].u.operand);
  956. CallLinkStatus callLinkStatus;
  957. if (m_graph.isConstant(callTarget))
  958. callLinkStatus = CallLinkStatus(m_graph.valueOfJSConstant(callTarget)).setIsProved(true);
  959. else {
  960. callLinkStatus = CallLinkStatus::computeFor(m_inlineStackTop->m_profiledBlock, m_currentIndex);
  961. callLinkStatus.setHasBadFunctionExitSite(m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, BadFunction));
  962. callLinkStatus.setHasBadCacheExitSite(m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, BadCache));
  963. callLinkStatus.setHasBadExecutableExitSite(m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, BadExecutable));
  964. }
  965. #if DFG_ENABLE(DEBUG_VERBOSE)
  966. dataLog("For call at bc#", m_currentIndex, ": ", callLinkStatus, "\n");
  967. #endif
  968. if (!callLinkStatus.canOptimize()) {
  969. // Oddly, this conflates calls that haven't executed with calls that behaved sufficiently polymorphically
  970. // that we cannot optimize them.
  971. addCall(interpreter, currentInstruction, op);
  972. return;
  973. }
  974. int argumentCountIncludingThis = currentInstruction[2].u.operand;
  975. int registerOffset = currentInstruction[3].u.operand;
  976. // Do we have a result?
  977. bool usesResult = false;
  978. int resultOperand = 0; // make compiler happy
  979. unsigned nextOffset = m_currentIndex + OPCODE_LENGTH(op_call);
  980. Instruction* putInstruction = currentInstruction + OPCODE_LENGTH(op_call);
  981. SpeculatedType prediction = SpecNone;
  982. if (interpreter->getOpcodeID(putInstruction->u.opcode) == op_call_put_result) {
  983. resultOperand = putInstruction[1].u.operand;
  984. usesResult = true;
  985. m_currentProfilingIndex = nextOffset;
  986. prediction = getPrediction();
  987. nextOffset += OPCODE_LENGTH(op_call_put_result);
  988. }
  989. if (InternalFunction* function = callLinkStatus.internalFunction()) {
  990. if (handleConstantInternalFunction(usesResult, resultOperand, function, registerOffset, argumentCountIncludingThis, prediction, kind)) {
  991. // This phantoming has to be *after* the code for the intrinsic, to signify that
  992. // the inputs must be kept alive whatever exits the intrinsic may do.
  993. addToGraph(Phantom, callTarget);
  994. emitArgumentPhantoms(registerOffset, argumentCountIncludingThis, kind);
  995. return;
  996. }
  997. // Can only handle this using the generic call handler.
  998. addCall(interpreter, currentInstruction, op);
  999. return;
  1000. }
  1001. Intrinsic intrinsic = callLinkStatus.intrinsicFor(kind);
  1002. if (intrinsic != NoIntrinsic) {
  1003. emitFunctionChecks(callLinkStatus, callTarget, registerOffset, kind);
  1004. if (handleIntrinsic(usesResult, resultOperand, intrinsic, registerOffset, argumentCountIncludingThis, prediction)) {
  1005. // This phantoming has to be *after* the code for the intrinsic, to signify that
  1006. // the inputs must be kept alive whatever exits the intrinsic may do.
  1007. addToGraph(Phantom, callTarget);
  1008. emitArgumentPhantoms(registerOffset, argumentCountIncludingThis, kind);
  1009. #if ENABLE(DETACHED_JIT)
  1010. #pragma message "[SECURE JSCORE] profiling not supported"
  1011. #else
  1012. if (m_graph.m_compilation)
  1013. m_graph.m_compilation->noticeInlinedCall();
  1014. #endif
  1015. return;
  1016. }
  1017. } else if (handleInlining(usesResult, callTarget, resultOperand, callLinkStatus, registerOffset, argumentCountIncludingThis, nextOffset, kind)) {
  1018. #if ENABLE(DETACHED_JIT)
  1019. #pragma message "[SECURE JSCORE] profiling not supported"
  1020. #else
  1021. if (m_graph.m_compilation)
  1022. m_graph.m_compilation->noticeInlinedCall();
  1023. #endif
  1024. return;
  1025. }
  1026. addCall(interpreter, currentInstruction, op);
  1027. }
  1028. void ByteCodeParser::emitFunctionChecks(const CallLinkStatus& callLinkStatus, Node* callTarget, int registerOffset, CodeSpecializationKind kind)
  1029. {
  1030. Node* thisArgument;
  1031. if (kind == CodeForCall)
  1032. thisArgument = get(registerOffset + argumentToOperand(0));
  1033. else
  1034. thisArgument = 0;
  1035. if (callLinkStatus.isProved()) {
  1036. addToGraph(Phantom, callTarget, thisArgument);
  1037. return;
  1038. }
  1039. ASSERT(callLinkStatus.canOptimize());
  1040. if (JSFunction* function = callLinkStatus.function())
  1041. addToGraph(CheckFunction, OpInfo(function), callTarget, thisArgument);
  1042. else {
  1043. ASSERT(callLinkStatus.structure());
  1044. ASSERT(callLinkStatus.executable());
  1045. addToGraph(CheckStructure, OpInfo(m_graph.addStructureSet(callLinkStatus.structure())), callTarget);
  1046. addToGraph(CheckExecutable, OpInfo(callLinkStatus.executable()), callTarget, thisArgument);
  1047. }
  1048. }
  1049. void ByteCodeParser::emitArgumentPhantoms(int registerOffset, int argumentCountIncludingThis, CodeSpecializationKind kind)
  1050. {
  1051. for (int i = kind == CodeForCall ? 0 : 1; i < argumentCountIncludingThis; ++i)
  1052. addToGraph(Phantom, get(registerOffset + argumentToOperand(i)));
  1053. }
  1054. bool ByteCodeParser::handleInlining(bool usesResult, Node* callTargetNode, int resultOperand, const CallLinkStatus& callLinkStatus, int registerOffset, int argumentCountIncludingThis, unsigned nextOffset, CodeSpecializationKind kind)
  1055. {
  1056. // First, the really simple checks: do we have an actual JS function?
  1057. if (!callLinkStatus.executable())
  1058. return false;
  1059. if (callLinkStatus.executable()->isHostFunction())
  1060. return false;
  1061. FunctionExecutable* executable = jsCast<FunctionExecutable*>(callLinkStatus.executable());
  1062. // Does the number of arguments we're passing match the arity of the target? We currently
  1063. // inline only if the number of arguments passed is greater than or equal to the number
  1064. // arguments expected.
  1065. if (static_cast<int>(executable->parameterCount()) + 1 > argumentCountIncludingThis)
  1066. return false;
  1067. // Have we exceeded inline stack depth, or are we trying to inline a recursive call?
  1068. // If either of these are detected, then don't inline.
  1069. unsigned depth = 0;
  1070. for (InlineStackEntry* entry = m_inlineStackTop; entry; entry = entry->m_caller) {
  1071. ++depth;
  1072. if (depth >= Options::maximumInliningDepth())
  1073. return false; // Depth exceeded.
  1074. if (entry->executable() == executable)
  1075. return false; // Recursion detected.
  1076. }
  1077. // Do we have a code block, and does the code block's size match the heuristics/requirements for
  1078. // being an inline candidate? We might not have a code block if code was thrown away or if we
  1079. // simply hadn't actually made this call yet. We could still theoretically attempt to inline it
  1080. // if we had a static proof of what was being called; this might happen for example if you call a
  1081. // global function, where watchpointing gives us static information. Overall, it's a rare case
  1082. // because we expect that any hot callees would have already been compiled.
  1083. CodeBlock* codeBlock = executable->baselineCodeBlockFor(kind);
  1084. if (!codeBlock)
  1085. return false;
  1086. if (!canInlineFunctionFor(codeBlock, kind, callLinkStatus.isClosureCall()))
  1087. return false;
  1088. #if DFG_ENABLE(DEBUG_VERBOSE)
  1089. dataLogF("Inlining executable %p.\n", executable);
  1090. #endif
  1091. // Now we know without a doubt that we are committed to inlining. So begin the process
  1092. // by checking the callee (if necessary) and making sure that arguments and the callee
  1093. // are flushed.
  1094. emitFunctionChecks(callLinkStatus, callTargetNode, registerOffset, kind);
  1095. // FIXME: Don't flush constants!
  1096. int inlineCallFrameStart = m_inlineStackTop->remapOperand(registerOffset) - JSStack::CallFrameHeaderSize;
  1097. // Make sure that the area used by the call frame is reserved.
  1098. for (int arg = inlineCallFrameStart + JSStack::CallFrameHeaderSize + codeBlock->m_numVars; arg-- > inlineCallFrameStart;)
  1099. m_preservedVars.set(arg);
  1100. // Make sure that we have enough locals.
  1101. unsigned newNumLocals = inlineCallFrameStart + JSStack::CallFrameHeaderSize + codeBlock->m_numCalleeRegisters;
  1102. if (newNumLocals > m_numLocals) {
  1103. m_numLocals = newNumLocals;
  1104. for (size_t i = 0; i < m_graph.m_blocks.size(); ++i)
  1105. m_graph.m_blocks[i]->ensureLocals(newNumLocals);
  1106. }
  1107. size_t argumentPositionStart = m_graph.m_argumentPositions.size();
  1108. InlineStackEntry inlineStackEntry(
  1109. this, codeBlock, codeBlock, m_graph.m_blocks.size() - 1,
  1110. callLinkStatus.function(), (VirtualRegister)m_inlineStackTop->remapOperand(
  1111. usesResult ? resultOperand : InvalidVirtualRegister),
  1112. (VirtualRegister)inlineCallFrameStart, argumentCountIncludingThis, kind);
  1113. // This is where the actual inlining really happens.
  1114. unsigned oldIndex = m_currentIndex;
  1115. unsigned oldProfilingIndex = m_currentProfilingIndex;
  1116. m_currentIndex = 0;
  1117. m_currentProfilingIndex = 0;
  1118. addToGraph(InlineStart, OpInfo(argumentPositionStart));
  1119. if (callLinkStatus.isClosureCall()) {
  1120. addToGraph(SetCallee, callTargetNode);
  1121. addToGraph(SetMyScope, addToGraph(GetScope, callTargetNode));
  1122. }
  1123. parseCodeBlock();
  1124. m_currentIndex = oldIndex;
  1125. m_currentProfilingIndex = oldProfilingIndex;
  1126. // If the inlined code created some new basic blocks, then we have linking to do.
  1127. if (inlineStackEntry.m_callsiteBlockHead != m_graph.m_blocks.size() - 1) {
  1128. ASSERT(!inlineStackEntry.m_unlinkedBlocks.isEmpty());
  1129. if (inlineStackEntry.m_callsiteBlockHeadNeedsLinking)
  1130. linkBlock(m_graph.m_blocks[inlineStackEntry.m_callsiteBlockHead].get(), inlineStackEntry.m_blockLinkingTargets);
  1131. else
  1132. ASSERT(m_graph.m_blocks[inlineStackEntry.m_callsiteBlockHead]->isLinked);
  1133. // It's possible that the callsite block head is not owned by the caller.
  1134. if (!inlineStackEntry.m_caller->m_unlinkedBlocks.isEmpty()) {
  1135. // It's definitely owned by the caller, because the caller created new blocks.
  1136. // Assert that this all adds up.
  1137. ASSERT(inlineStackEntry.m_caller->m_unlinkedBlocks.last().m_blockIndex == inlineStackEntry.m_callsiteBlockHead);
  1138. ASSERT(inlineStackEntry.m_caller->m_unlinkedBlocks.last().m_needsNormalLinking);
  1139. inlineStackEntry.m_caller->m_unlinkedBlocks.last().m_needsNormalLinking = false;
  1140. } else {
  1141. // It's definitely not owned by the caller. Tell the caller that he does not
  1142. // need to link his callsite block head, because we did it for him.
  1143. ASSERT(inlineStackEntry.m_caller->m_callsiteBlockHeadNeedsLinking);
  1144. ASSERT(inlineStackEntry.m_caller->m_callsiteBlockHead == inlineStackEntry.m_callsiteBlockHead);
  1145. inlineStackEntry.m_caller->m_callsiteBlockHeadNeedsLinking = false;
  1146. }
  1147. linkBlocks(inlineStackEntry.m_unlinkedBlocks, inlineStackEntry.m_blockLinkingTargets);
  1148. } else
  1149. ASSERT(inlineStackEntry.m_unlinkedBlocks.isEmpty());
  1150. BasicBlock* lastBlock = m_graph.m_blocks.last().get();
  1151. // If there was a return, but no early returns, then we're done. We allow parsing of
  1152. // the caller to continue in whatever basic block we're in right now.
  1153. if (!inlineStackEntry.m_didEarlyReturn && inlineStackEntry.m_didReturn) {
  1154. ASSERT(lastBlock->isEmpty() || !lastBlock->last()->isTerminal());
  1155. // If we created new blocks then the last block needs linking, but in the
  1156. // caller. It doesn't need to be linked to, but it needs outgoing links.
  1157. if (!inlineStackEntry.m_unlinkedBlocks.isEmpty()) {
  1158. #if DFG_ENABLE(DEBUG_VERBOSE)
  1159. dataLogF("Reascribing bytecode index of block %p from bc#%u to bc#%u (inline return case).\n", lastBlock, lastBlock->bytecodeBegin, m_currentIndex);
  1160. #endif
  1161. // For debugging purposes, set the bytecodeBegin. Note that this doesn't matter
  1162. // for release builds because this block will never serve as a potential target
  1163. // in the linker's binary search.
  1164. lastBlock->bytecodeBegin = m_currentIndex;
  1165. m_inlineStackTop->m_caller->m_unlinkedBlocks.append(UnlinkedBlock(m_graph.m_blocks.size() - 1));
  1166. }
  1167. m_currentBlock = m_graph.m_blocks.last().get();
  1168. #if DFG_ENABLE(DEBUG_VERBOSE)
  1169. dataLogF("Done inlining executable %p, continuing code generation at epilogue.\n", executable);
  1170. #endif
  1171. return true;
  1172. }
  1173. // If we get to this point then all blocks must end in some sort of terminals.
  1174. ASSERT(lastBlock->last()->isTerminal());
  1175. // Link the early returns to the basic block we're about to create.
  1176. for (size_t i = 0; i < inlineStackEntry.m_unlinkedBlocks.size(); ++i) {
  1177. if (!inlineStackEntry.m_unlinkedBlocks[i].m_needsEarlyReturnLinking)
  1178. continue;
  1179. BasicBlock* block = m_graph.m_blocks[inlineStackEntry.m_unlinkedBlocks[i].m_blockIndex].get();
  1180. ASSERT(!block->isLinked);
  1181. Node* node = block->last();
  1182. ASSERT(node->op() == Jump);
  1183. ASSERT(node->takenBlockIndex() == NoBlock);
  1184. node->setTakenBlockIndex(m_graph.m_blocks.size());
  1185. inlineStackEntry.m_unlinkedBlocks[i].m_needsEarlyReturnLinking = false;
  1186. #if !ASSERT_DISABLED
  1187. block->isLinked = true;
  1188. #endif
  1189. }
  1190. // Need to create a new basic block for the continuation at the caller.
  1191. OwnPtr<BasicBlock> block = adoptPtr(new BasicBlock(nextOffset, m_numArguments, m_numLocals));
  1192. #if DFG_ENABLE(DEBUG_VERBOSE)
  1193. dataLogF("Creating inline epilogue basic block %p, #%zu for %p bc#%u at inline depth %u.\n", block.get(), m_graph.m_blocks.size(), m_inlineStackTop->executable(), m_currentIndex, CodeOrigin::inlineDepthForCallFrame(inlineCallFrame()));
  1194. #endif
  1195. m_currentBlock = block.get();
  1196. ASSERT(m_inlineStackTop->m_caller->m_blockLinkingTargets.isEmpty() || m_graph.m_blocks[m_inlineStackTop->m_caller->m_blockLinkingTargets.last()]->bytecodeBegin < nextOffset);
  1197. m_inlineStackTop->m_caller->m_unlinkedBlocks.append(UnlinkedBlock(m_graph.m_blocks.size()));
  1198. m_inlineStackTop->m_caller->m_blockLinkingTargets.append(m_graph.m_blocks.size());
  1199. m_graph.m_blocks.append(block.release());
  1200. prepareToParseBlock();
  1201. // At this point we return and continue to generate code for the caller, but
  1202. // in the new basic block.
  1203. #if DFG_ENABLE(DEBUG_VERBOSE)
  1204. dataLogF("Done inlining executable %p, continuing code generation in new block.\n", executable);
  1205. #endif
  1206. return true;
  1207. }
  1208. void ByteCodeParser::setIntrinsicResult(bool usesResult, int resultOperand, Node* node)
  1209. {
  1210. if (!usesResult)
  1211. return;
  1212. set(resultOperand, node);
  1213. }
  1214. bool ByteCodeParser::handleMinMax(bool usesResult, int resultOperand, NodeType op, int registerOffset, int argumentCountIncludingThis)
  1215. {
  1216. if (argumentCountIncludingThis == 1) { // Math.min()
  1217. setIntrinsicResult(usesResult, resultOperand, constantNaN());
  1218. return true;
  1219. }
  1220. if (argumentCountIncludingThis == 2) { // Math.min(x)
  1221. Node* result = get(registerOffset + argumentToOperand(1));
  1222. addToGraph(Phantom, Edge(result, NumberUse));
  1223. setIntrinsicResult(usesResult, resultOperand, result);
  1224. return true;
  1225. }
  1226. if (argumentCountIncludingThis == 3) { // Math.min(x, y)
  1227. setIntrinsicResult(usesResult, resultOperand, addToGraph(op, get(registerOffset + argumentToOperand(1)), get(registerOffset + argumentToOperand(2))));
  1228. return true;
  1229. }
  1230. // Don't handle >=3 arguments for now.
  1231. return false;
  1232. }
  1233. // FIXME: We dead-code-eliminate unused Math intrinsics, but that's invalid because
  1234. // they need to perform the ToNumber conversion, which can have side-effects.
  1235. bool ByteCodeParser::handleIntrinsic(bool usesResult, int resultOperand, Intrinsic intrinsic, int registerOffset, int argumentCountIncludingThis, SpeculatedType prediction)
  1236. {
  1237. switch (intrinsic) {
  1238. case AbsIntrinsic: {
  1239. if (argumentCountIncludingThis == 1) { // Math.abs()
  1240. setIntrinsicResult(usesResult, resultOperand, constantNaN());
  1241. return true;
  1242. }
  1243. if (!MacroAssembler::supportsFloatingPointAbs())
  1244. return false;
  1245. Node* node = addToGraph(ArithAbs, get(registerOffset + argumentToOperand(1)));
  1246. if (m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, Overflow))
  1247. node->mergeFlags(NodeMayOverflow);
  1248. setIntrinsicResult(usesResult, resultOperand, node);
  1249. return true;
  1250. }
  1251. case MinIntrinsic:
  1252. return handleMinMax(usesResult, resultOperand, ArithMin, registerOffset, argumentCountIncludingThis);
  1253. case MaxIntrinsic:
  1254. return handleMinMax(usesResult, resultOperand, ArithMax, registerOffset, argumentCountIncludingThis);
  1255. case SqrtIntrinsic: {
  1256. if (argumentCountIncludingThis == 1) { // Math.sqrt()
  1257. setIntrinsicResult(usesResult, resultOperand, constantNaN());
  1258. return true;
  1259. }
  1260. if (!MacroAssembler::supportsFloatingPointSqrt())
  1261. return false;
  1262. setIntrinsicResult(usesResult, resultOperand, addToGraph(ArithSqrt, get(registerOffset + argumentToOperand(1))));
  1263. return true;
  1264. }
  1265. case ArrayPushIntrinsic: {
  1266. if (argumentCountIncludingThis != 2)
  1267. return false;
  1268. ArrayMode arrayMode = getArrayMode(m_currentInstruction[5].u.arrayProfile);
  1269. if (!arrayMode.isJSArray())
  1270. return false;
  1271. switch (arrayMode.type()) {
  1272. case Array::Undecided:
  1273. case Array::Int32:
  1274. case Array::Double:
  1275. case Array::Contiguous:
  1276. case Array::ArrayStorage: {
  1277. Node* arrayPush = addToGraph(ArrayPush, OpInfo(arrayMode.asWord()), OpInfo(prediction), get(registerOffset + argumentToOperand(0)), get(registerOffset + argumentToOperand(1)));
  1278. if (usesResult)
  1279. set(resultOperand, arrayPush);
  1280. return true;
  1281. }
  1282. default:
  1283. return false;
  1284. }
  1285. }
  1286. case ArrayPopIntrinsic: {
  1287. if (argumentCountIncludingThis != 1)
  1288. return false;
  1289. ArrayMode arrayMode = getArrayMode(m_currentInstruction[5].u.arrayProfile);
  1290. if (!arrayMode.isJSArray())
  1291. return false;
  1292. switch (arrayMode.type()) {
  1293. case Array::Int32:
  1294. case Array::Double:
  1295. case Array::Contiguous:
  1296. case Array::ArrayStorage: {
  1297. Node* arrayPop = addToGraph(ArrayPop, OpInfo(arrayMode.asWord()), OpInfo(prediction), get(registerOffset + argumentToOperand(0)));
  1298. if (usesResult)
  1299. set(resultOperand, arrayPop);
  1300. return true;
  1301. }
  1302. default:
  1303. return false;
  1304. }
  1305. }
  1306. case CharCodeAtIntrinsic: {
  1307. if (argumentCountIncludingThis != 2)
  1308. return false;
  1309. int thisOperand = registerOffset + argumentToOperand(0);
  1310. int indexOperand = registerOffset + argumentToOperand(1);
  1311. Node* charCode = addToGraph(StringCharCodeAt, OpInfo(ArrayMode(Array::String).asWord()), get(thisOperand), getToInt32(indexOperand));
  1312. if (usesResult)
  1313. set(resultOperand, charCode);
  1314. return true;
  1315. }
  1316. case CharAtIntrinsic: {
  1317. if (argumentCountIncludingThis != 2)
  1318. return false;
  1319. int thisOperand = registerOffset + argumentToOperand(0);
  1320. int indexOperand = registerOffset + argumentToOperand(1);
  1321. Node* charCode = addToGraph(StringCharAt, OpInfo(ArrayMode(Array::String).asWord()), get(thisOperand), getToInt32(indexOperand));
  1322. if (usesResult)
  1323. set(resultOperand, charCode);
  1324. return true;
  1325. }
  1326. case FromCharCodeIntrinsic: {
  1327. if (argumentCountIncludingThis != 2)
  1328. return false;
  1329. int indexOperand = registerOffset + argumentToOperand(1);
  1330. Node* charCode = addToGraph(StringFromCharCode, getToInt32(indexOperand));
  1331. if (usesResult)
  1332. set(resultOperand, charCode);
  1333. return true;
  1334. }
  1335. case RegExpExecIntrinsic: {
  1336. if (argumentCountIncludingThis != 2)
  1337. return false;
  1338. Node* regExpExec = addToGraph(RegExpExec, OpInfo(0), OpInfo(prediction), get(registerOffset + argumentToOperand(0)), get(registerOffset + argumentToOperand(1)));
  1339. if (usesResult)
  1340. set(resultOperand, regExpExec);
  1341. return true;
  1342. }
  1343. case RegExpTestIntrinsic: {
  1344. if (argumentCountIncludingThis != 2)
  1345. return false;
  1346. Node* regExpExec = addToGraph(RegExpTest, OpInfo(0), OpInfo(prediction), get(registerOffset + argumentToOperand(0)), get(registerOffset + argumentToOperand(1)));
  1347. if (usesResult)
  1348. set(resultOperand, regExpExec);
  1349. return true;
  1350. }
  1351. case IMulIntrinsic: {
  1352. if (argumentCountIncludingThis != 3)
  1353. return false;
  1354. int leftOperand = registerOffset + argumentToOperand(1);
  1355. int rightOperand = registerOffset + argumentToOperand(2);
  1356. Node* left = getToInt32(leftOperand);
  1357. Node* right = getToInt32(rightOperand);
  1358. setIntrinsicResult(usesResult, resultOperand, addToGraph(ArithIMul, left, right));
  1359. return true;
  1360. }
  1361. default:
  1362. return false;
  1363. }
  1364. }
  1365. bool ByteCodeParser::handleConstantInternalFunction(
  1366. bool usesResult, int resultOperand, InternalFunction* function, int registerOffset,
  1367. int argumentCountIncludingThis, SpeculatedType prediction, CodeSpecializationKind kind)
  1368. {
  1369. // If we ever find that we have a lot of internal functions that we specialize for,
  1370. // then we should probably have some sort of hashtable dispatch, or maybe even
  1371. // dispatch straight through the MethodTable of the InternalFunction. But for now,
  1372. // it seems that this case is hit infrequently enough, and the number of functions
  1373. // we know about is small enough, that having just a linear cascade of if statements
  1374. // is good enough.
  1375. UNUSED_PARAM(prediction); // Remove this once we do more things.
  1376. if (function->classInfo() == &ArrayConstructor::s_info) {
  1377. if (argumentCountIncludingThis == 2) {
  1378. setIntrinsicResult(
  1379. usesResult, resultOperand,
  1380. addToGraph(NewArrayWithSize, OpInfo(ArrayWithUndecided), get(registerOffset + argumentToOperand(1))));
  1381. return true;
  1382. }
  1383. for (int i = 1; i < argumentCountIncludingThis; ++i)
  1384. addVarArgChild(get(registerOffset + argumentToOperand(i)));
  1385. setIntrinsicResult(
  1386. usesResult, resultOperand,
  1387. addToGraph(Node::VarArg, NewArray, OpInfo(ArrayWithUndecided), OpInfo(0)));
  1388. return true;
  1389. } else if (function->classInfo() == &StringConstructor::s_info) {
  1390. Node* result;
  1391. if (argumentCountIncludingThis <= 1)
  1392. result = cellConstant(m_vm->smallStrings.emptyString());
  1393. else
  1394. result = addToGraph(ToString, get(registerOffset + argumentToOperand(1)));
  1395. if (kind == CodeForConstruct)
  1396. result = addToGraph(NewStringObject, OpInfo(function->globalObject()->stringObjectStructure()), result);
  1397. setIntrinsicResult(usesResult, resultOperand, result);
  1398. return true;
  1399. }
  1400. return false;
  1401. }
  1402. Node* ByteCodeParser::handleGetByOffset(SpeculatedType prediction, Node* base, unsigned identifierNumber, PropertyOffset offset)
  1403. {
  1404. Node* propertyStorage;
  1405. if (isInlineOffset(offset))
  1406. propertyStorage = base;
  1407. else
  1408. propertyStorage = addToGraph(GetButterfly, base);
  1409. // FIXME: It would be far more efficient for load elimination (and safer from
  1410. // an OSR standpoint) if GetByOffset also referenced the object we were loading
  1411. // from, and if we could load eliminate a GetByOffset even if the butterfly
  1412. // had changed. That would be a great success.
  1413. Node* getByOffset = addToGraph(GetByOffset, OpInfo(m_graph.m_storageAccessData.size()), OpInfo(prediction), propertyStorage);
  1414. StorageAccessData storageAccessData;
  1415. storageAccessData.offset = indexRelativeToBase(offset);
  1416. storageAccessData.identifierNumber = identifierNumber;
  1417. m_graph.m_storageAccessData.append(storageAccessData);
  1418. return getByOffset;
  1419. }
  1420. void ByteCodeParser::handleGetByOffset(
  1421. int destinationOperand, SpeculatedType prediction, Node* base, unsigned identifierNumber,
  1422. PropertyOffset offset)
  1423. {
  1424. set(destinationOperand, handleGetByOffset(prediction, base, identifierNumber, offset));
  1425. }
  1426. void ByteCodeParser::handleGetById(
  1427. int destinationOperand, SpeculatedType prediction, Node* base, unsigned identifierNumber,
  1428. const GetByIdStatus& getByIdStatus)
  1429. {
  1430. if (!getByIdStatus.isSimple()
  1431. || m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, BadCache)
  1432. || m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, BadWeakConstantCache)) {
  1433. set(destinationOperand,
  1434. addToGraph(
  1435. getByIdStatus.makesCalls() ? GetByIdFlush : GetById,
  1436. OpInfo(identifierNumber), OpInfo(prediction), base));
  1437. return;
  1438. }
  1439. ASSERT(getByIdStatus.structureSet().size());
  1440. // The implementation of GetByOffset does not know to terminate speculative
  1441. // execution if it doesn't have a prediction, so we do it manually.
  1442. if (prediction == SpecNone)
  1443. addToGraph(ForceOSRExit);
  1444. #if ENABLE(DETACHED_JIT)
  1445. #pragma message "[SECURE JSCORE] profiler is disabled"
  1446. #else
  1447. else if (m_graph.m_compilation)
  1448. m_graph.m_compilation->noticeInlinedGetById();
  1449. #endif
  1450. Node* originalBaseForBaselineJIT = base;
  1451. addToGraph(CheckStructure, OpInfo(m_graph.addStructureSet(getByIdStatus.structureSet())), base);
  1452. if (!getByIdStatus.chain().isEmpty()) {
  1453. Structure* currentStructure = getByIdStatus.structureSet().singletonStructure();
  1454. JSObject* currentObject = 0;
  1455. for (unsigned i = 0; i < getByIdStatus.chain().size(); ++i) {
  1456. currentObject = asObject(currentStructure->prototypeForLookup(m_inlineStackTop->m_codeBlock));
  1457. currentStructure = getByIdStatus.chain()[i];
  1458. base = addStructureTransitionCheck(currentObject, currentStructure);
  1459. }
  1460. }
  1461. // Unless we want bugs like https://bugs.webkit.org/show_bug.cgi?id=88783, we need to
  1462. // ensure that the base of the original get_by_id is kept alive until we're done with
  1463. // all of the speculations. We only insert the Phantom if there had been a CheckStructure
  1464. // on something other than the base following the CheckStructure on base, or if the
  1465. // access was compiled to a WeakJSConstant specific value, in which case we might not
  1466. // have any explicit use of the base at all.
  1467. if (getByIdStatus.specificValue() || originalBaseForBaselineJIT != base)
  1468. addToGraph(Phantom, originalBaseForBaselineJIT);
  1469. if (getByIdStatus.specificValue()) {
  1470. ASSERT(getByIdStatus.specificValue().isCell());
  1471. set(destinationOperand, cellConstant(getByIdStatus.specificValue().asCell()));
  1472. return;
  1473. }
  1474. handleGetByOffset(
  1475. destinationOperand, prediction, base, identifierNumber, getByIdStatus.offset());
  1476. }
  1477. void ByteCodeParser::prepareToParseBlock()
  1478. {
  1479. for (unsigned i = 0; i < m_constants.size(); ++i)
  1480. m_constants[i] = ConstantRecord();
  1481. m_cellConstantNodes.clear();
  1482. }
  1483. Node* ByteCodeParser::getScope(bool skipTop, unsigned skipCount)
  1484. {
  1485. Node* localBase;
  1486. if (inlineCallFrame() && !inlineCallFrame()->isClosureCall()) {
  1487. ASSERT(inlineCallFrame()->callee);
  1488. localBase = cellConstant(inlineCallFrame()->callee->scope());
  1489. } else
  1490. localBase = addToGraph(GetMyScope);
  1491. if (skipTop) {
  1492. ASSERT(!inlineCallFrame());
  1493. localBase = addToGraph(SkipTopScope, localBase);
  1494. }
  1495. for (unsigned n = skipCount; n--;)
  1496. localBase = addToGraph(SkipScope, localBase);
  1497. return localBase;
  1498. }
  1499. bool ByteCodeParser::parseResolveOperations(SpeculatedType prediction, unsigned identifier, ResolveOperations* resolveOperations, PutToBaseOperation* putToBaseOperation, Node** base, Node** value)
  1500. {
  1501. if (resolveOperations->isEmpty()) {
  1502. addToGraph(ForceOSRExit);
  1503. return false;
  1504. }
  1505. JSGlobalObject* globalObject = m_inlineStackTop->m_codeBlock->globalObject();
  1506. int skipCount = 0;
  1507. bool skipTop = false;
  1508. bool skippedScopes = false;
  1509. bool setBase = false;
  1510. ResolveOperation* pc = resolveOperations->data();
  1511. Node* localBase = 0;
  1512. bool resolvingBase = true;
  1513. while (resolvingBase) {
  1514. switch (pc->m_operation) {
  1515. case ResolveOperation::ReturnGlobalObjectAsBase:
  1516. *base = cellConstant(globalObject);
  1517. ASSERT(!value);
  1518. return true;
  1519. case ResolveOperation::SetBaseToGlobal:
  1520. *base = cellConstant(globalObject);
  1521. setBase = true;
  1522. resolvingBase = false;
  1523. ++pc;
  1524. break;
  1525. case ResolveOperation::SetBaseToUndefined:
  1526. *base = constantUndefined();
  1527. setBase = true;
  1528. resolvingBase = false;
  1529. ++pc;
  1530. break;
  1531. case ResolveOperation::SetBaseToScope:
  1532. localBase = getScope(skipTop, skipCount);
  1533. *base = localBase;
  1534. setBase = true;
  1535. resolvingBase = false;
  1536. // Reset the scope skipping as we've already loaded it
  1537. skippedScopes = false;
  1538. ++pc;
  1539. break;
  1540. case ResolveOperation::ReturnScopeAsBase:
  1541. *base = getScope(skipTop, skipCount);
  1542. ASSERT(!value);
  1543. return true;
  1544. case ResolveOperation::SkipTopScopeNode:
  1545. ASSERT(!inlineCallFrame());
  1546. skipTop = true;
  1547. skippedScopes = true;
  1548. ++pc;
  1549. break;
  1550. case ResolveOperation::SkipScopes:
  1551. skipCount += pc->m_scopesToSkip;
  1552. skippedScopes = true;
  1553. ++pc;
  1554. break;
  1555. case ResolveOperation::CheckForDynamicEntriesBeforeGlobalScope:
  1556. return false;
  1557. case ResolveOperation::Fail:
  1558. return false;
  1559. default:
  1560. resolvingBase = false;
  1561. }
  1562. }
  1563. if (skippedScopes)
  1564. localBase = getScope(skipTop, skipCount);
  1565. if (base && !setBase)
  1566. *base = localBase;
  1567. ASSERT(value);
  1568. ResolveOperation* resolveValueOperation = pc;
  1569. switch (resolveValueOperation->m_operation) {
  1570. case ResolveOperation::GetAndReturnGlobalProperty: {
  1571. ResolveGlobalStatus status = ResolveGlobalStatus::computeFor(m_inlineStackTop->m_profiledBlock, m_currentIndex, resolveValueOperation, m_codeBlock->identifier(identifier));
  1572. if (status.isSimple()) {
  1573. ASSERT(status.structure());
  1574. Node* globalObjectNode = addStructureTransitionCheck(globalObject, status.structure());
  1575. if (status.specificValue()) {
  1576. ASSERT(status.specificValue().isCell());
  1577. *value = cellConstant(status.specificValue().asCell());
  1578. } else
  1579. *value = handleGetByOffset(prediction, globalObjectNode, identifier, status.offset());
  1580. return true;
  1581. }
  1582. Node* resolve = addToGraph(ResolveGlobal, OpInfo(m_graph.m_resolveGlobalData.size()), OpInfo(prediction));
  1583. m_graph.m_resolveGlobalData.append(ResolveGlobalData());
  1584. ResolveGlobalData& data = m_graph.m_resolveGlobalData.last();
  1585. data.identifierNumber = identifier;
  1586. data.resolveOperations = resolveOperations;
  1587. data.putToBaseOperation = putToBaseOperation;
  1588. data.resolvePropertyIndex = resolveValueOperation - resolveOperations->data();
  1589. *value = resolve;
  1590. return true;
  1591. }
  1592. case ResolveOperation::GetAndReturnGlobalVar: {
  1593. *value = addToGraph(
  1594. GetGlobalVar,
  1595. OpInfo(globalObject->assertRegisterIsInThisObject(pc->m_registerAddress)),
  1596. OpInfo(prediction));
  1597. return true;
  1598. }
  1599. case ResolveOperation::GetAndReturnGlobalVarWatchable: {
  1600. SpeculatedType prediction = getPrediction();
  1601. JSGlobalObject* globalObject = m_inlineStackTop->m_codeBlock->globalObject();
  1602. Identifier ident = m_codeBlock->identifier(identifier);
  1603. SymbolTableEntry entry = globalObject->symbolTable()->get(ident.impl());
  1604. if (!entry.couldBeWatched()) {
  1605. *value = addToGraph(GetGlobalVar, OpInfo(globalObject->assertRegisterIsInThisObject(pc->m_registerAddress)), OpInfo(prediction));
  1606. return true;
  1607. }
  1608. // The watchpoint is still intact! This means that we will get notified if the
  1609. // current value in the global variable changes. So, we can inline that value.
  1610. // Moreover, currently we can assume that this value is a JSFunction*, which
  1611. // implies that it's a cell. This simplifies things, since in general we'd have
  1612. // to use a JSConstant for non-cells and a WeakJSConstant for cells. So instead
  1613. // of having both cases we just assert that the value is a cell.
  1614. // NB. If it wasn't for CSE, GlobalVarWatchpoint would have no need for the
  1615. // register pointer. But CSE tracks effects on global variables by comparing
  1616. // register pointers. Because CSE executes multiple times while the backend
  1617. // executes once, we use the following performance trade-off:
  1618. // - The node refers directly to the register pointer to make CSE super cheap.
  1619. // - To perform backend code generation, the node only contains the identifier
  1620. // number, from which it is possible to get (via a few average-time O(1)
  1621. // lookups) to the WatchpointSet.
  1622. addToGraph(GlobalVarWatchpoint, OpInfo(globalObject->assertRegisterIsInThisObject(pc->m_registerAddress)), OpInfo(identifier));
  1623. JSValue specificValue = globalObject->registerAt(entry.getIndex()).get();
  1624. ASSERT(specificValue.isCell());
  1625. *value = cellConstant(specificValue.asCell());
  1626. return true;
  1627. }
  1628. case ResolveOperation::GetAndReturnScopedVar: {
  1629. Node* getScopeRegisters = addToGraph(GetScopeRegisters, localBase);
  1630. *value = addToGraph(GetScopedVar, OpInfo(resolveValueOperation->m_offset), OpInfo(prediction), getScopeRegisters);
  1631. return true;
  1632. }
  1633. default:
  1634. CRASH();
  1635. return false;
  1636. }
  1637. }
  1638. bool ByteCodeParser::parseBlock(unsigned limit)
  1639. {
  1640. bool shouldContinueParsing = true;
  1641. Interpreter* interpreter = m_vm->interpreter;
  1642. Instruction* instructionsBegin = m_inlineStackTop->m_codeBlock->instructions().begin();
  1643. unsigned blockBegin = m_currentIndex;
  1644. // If we are the first basic block, introduce markers for arguments. This allows
  1645. // us to track if a use of an argument may use the actual argument passed, as
  1646. // opposed to using a value we set explicitly.
  1647. if (m_currentBlock == m_graph.m_blocks[0].get() && !inlineCallFrame()) {
  1648. m_graph.m_arguments.resize(m_numArguments);
  1649. for (unsigned argument = 0; argument < m_numArguments; ++argument) {
  1650. VariableAccessData* variable = newVariableAccessData(
  1651. argumentToOperand(argument), m_codeBlock->isCaptured(argumentToOperand(argument)));
  1652. variable->mergeStructureCheckHoistingFailed(
  1653. m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, BadCache));
  1654. Node* setArgument = addToGraph(SetArgument, OpInfo(variable));
  1655. m_graph.m_arguments[argument] = setArgument;
  1656. m_currentBlock->variablesAtTail.setArgumentFirstTime(argument, setArgument);
  1657. }
  1658. }
  1659. while (true) {
  1660. m_currentProfilingIndex = m_currentIndex;
  1661. // Don't extend over jump destinations.
  1662. if (m_currentIndex == limit) {
  1663. // Ordinarily we want to plant a jump. But refuse to do this if the block is
  1664. // empty. This is a special case for inlining, which might otherwise create
  1665. // some empty blocks in some cases. When parseBlock() returns with an empty
  1666. // block, it will get repurposed instead of creating a new one. Note that this
  1667. // logic relies on every bytecode resulting in one or more nodes, which would
  1668. // be true anyway except for op_loop_hint, which emits a Phantom to force this
  1669. // to be true.
  1670. if (!m_currentBlock->isEmpty())
  1671. addToGraph(Jump, OpInfo(m_currentIndex));
  1672. else {
  1673. #if DFG_ENABLE(DEBUG_VERBOSE)
  1674. dataLogF("Refusing to plant jump at limit %u because block %p is empty.\n", limit, m_currentBlock);
  1675. #endif
  1676. }
  1677. return shouldContinueParsing;
  1678. }
  1679. // Switch on the current bytecode opcode.
  1680. Instruction* currentInstruction = instructionsBegin + m_currentIndex;
  1681. m_currentInstruction = currentInstruction; // Some methods want to use this, and we'd rather not thread it through calls.
  1682. OpcodeID opcodeID = interpreter->getOpcodeID(currentInstruction->u.opcode);
  1683. #if ENABLE(DETACHED_JIT)
  1684. #pragma message "[SECURE JSCORE] profiler not supported"
  1685. #else
  1686. if (m_graph.m_compilation && opcodeID != op_call_put_result) {
  1687. addToGraph(CountExecution, OpInfo(m_graph.m_compilation->executionCounterFor(
  1688. Profiler::OriginStack(*m_vm->m_perBytecodeProfiler, m_codeBlock, currentCodeOrigin()))));
  1689. }
  1690. #endif
  1691. switch (opcodeID) {
  1692. // === Function entry opcodes ===
  1693. case op_enter:
  1694. // Initialize all locals to undefined.
  1695. for (int i = 0; i < m_inlineStackTop->m_codeBlock->m_numVars; ++i)
  1696. set(i, constantUndefined(), SetOnEntry);
  1697. NEXT_OPCODE(op_enter);
  1698. case op_convert_this: {
  1699. Node* op1 = getThis();
  1700. if (op1->op() != ConvertThis) {
  1701. ValueProfile* profile =
  1702. m_inlineStackTop->m_profiledBlock->valueProfileForBytecodeOffset(m_currentProfilingIndex);
  1703. profile->computeUpdatedPrediction();
  1704. #if DFG_ENABLE(DEBUG_VERBOSE)
  1705. dataLogF("[bc#%u]: profile %p: ", m_currentProfilingIndex, profile);
  1706. profile->dump(WTF::dataFile());
  1707. dataLogF("\n");
  1708. #endif
  1709. if (profile->m_singletonValueIsTop
  1710. || !profile->m_singletonValue
  1711. || !profile->m_singletonValue.isCell()
  1712. || profile->m_singletonValue.asCell()->classInfo() != &Structure::s_info)
  1713. setThis(addToGraph(ConvertThis, op1));
  1714. else {
  1715. addToGraph(
  1716. CheckStructure,
  1717. OpInfo(m_graph.addStructureSet(jsCast<Structure*>(profile->m_singletonValue.asCell()))),
  1718. op1);
  1719. }
  1720. }
  1721. NEXT_OPCODE(op_convert_this);
  1722. }
  1723. case op_create_this: {
  1724. int calleeOperand = currentInstruction[2].u.operand;
  1725. Node* callee = get(calleeOperand);
  1726. bool alreadyEmitted = false;
  1727. if (callee->op() == WeakJSConstant) {
  1728. JSCell* cell = callee->weakConstant();
  1729. ASSERT(cell->inherits(&JSFunction::s_info));
  1730. JSFunction* function = jsCast<JSFunction*>(cell);
  1731. ObjectAllocationProfile* allocationProfile = function->tryGetAllocationProfile();
  1732. if (allocationProfile) {
  1733. addToGraph(AllocationProfileWatchpoint, OpInfo(function));
  1734. // The callee is still live up to this point.
  1735. addToGraph(Phantom, callee);
  1736. set(currentInstruction[1].u.operand,
  1737. addToGraph(NewObject, OpInfo(allocationProfile->structure())));
  1738. alreadyEmitted = true;
  1739. }
  1740. }
  1741. if (!alreadyEmitted)
  1742. set(currentInstruction[1].u.operand,
  1743. addToGraph(CreateThis, OpInfo(currentInstruction[3].u.operand), callee));
  1744. NEXT_OPCODE(op_create_this);
  1745. }
  1746. case op_new_object: {
  1747. set(currentInstruction[1].u.operand,
  1748. addToGraph(NewObject,
  1749. OpInfo(currentInstruction[3].u.objectAllocationProfile->structure())));
  1750. NEXT_OPCODE(op_new_object);
  1751. }
  1752. case op_new_array: {
  1753. int startOperand = currentInstruction[2].u.operand;
  1754. int numOperands = currentInstruction[3].u.operand;
  1755. ArrayAllocationProfile* profile = currentInstruction[4].u.arrayAllocationProfile;
  1756. for (int operandIdx = startOperand; operandIdx < startOperand + numOperands; ++operandIdx)
  1757. addVarArgChild(get(operandIdx));
  1758. set(currentInstruction[1].u.operand, addToGraph(Node::VarArg, NewArray, OpInfo(profile->selectIndexingType()), OpInfo(0)));
  1759. NEXT_OPCODE(op_new_array);
  1760. }
  1761. case op_new_array_with_size: {
  1762. int lengthOperand = currentInstruction[2].u.operand;
  1763. ArrayAllocationProfile* profile = currentInstruction[3].u.arrayAllocationProfile;
  1764. set(currentInstruction[1].u.operand, addToGraph(NewArrayWithSize, OpInfo(profile->selectIndexingType()), get(lengthOperand)));
  1765. NEXT_OPCODE(op_new_array_with_size);
  1766. }
  1767. case op_new_array_buffer: {
  1768. int startConstant = currentInstruction[2].u.operand;
  1769. int numConstants = currentInstruction[3].u.operand;
  1770. ArrayAllocationProfile* profile = currentInstruction[4].u.arrayAllocationProfile;
  1771. NewArrayBufferData data;
  1772. data.startConstant = m_inlineStackTop->m_constantBufferRemap[startConstant];
  1773. data.numConstants = numConstants;
  1774. data.indexingType = profile->selectIndexingType();
  1775. // If this statement has never executed, we'll have the wrong indexing type in the profile.
  1776. for (int i = 0; i < numConstants; ++i) {
  1777. data.indexingType =
  1778. leastUpperBoundOfIndexingTypeAndValue(
  1779. data.indexingType,
  1780. m_codeBlock->constantBuffer(data.startConstant)[i]);
  1781. }
  1782. m_graph.m_newArrayBufferData.append(data);
  1783. set(currentInstruction[1].u.operand, addToGraph(NewArrayBuffer, OpInfo(&m_graph.m_newArrayBufferData.last())));
  1784. NEXT_OPCODE(op_new_array_buffer);
  1785. }
  1786. case op_new_regexp: {
  1787. set(currentInstruction[1].u.operand, addToGraph(NewRegexp, OpInfo(currentInstruction[2].u.operand)));
  1788. NEXT_OPCODE(op_new_regexp);
  1789. }
  1790. case op_get_callee: {
  1791. ValueProfile* profile = currentInstruction[2].u.profile;
  1792. profile->computeUpdatedPrediction();
  1793. if (profile->m_singletonValueIsTop
  1794. || !profile->m_singletonValue
  1795. || !profile->m_singletonValue.isCell())
  1796. set(currentInstruction[1].u.operand, get(JSStack::Callee));
  1797. else {
  1798. ASSERT(profile->m_singletonValue.asCell()->inherits(&JSFunction::s_info));
  1799. Node* actualCallee = get(JSStack::Callee);
  1800. addToGraph(CheckFunction, OpInfo(profile->m_singletonValue.asCell()), actualCallee);
  1801. set(currentInstruction[1].u.operand, addToGraph(WeakJSConstant, OpInfo(profile->m_singletonValue.asCell())));
  1802. }
  1803. NEXT_OPCODE(op_get_callee);
  1804. }
  1805. // === Bitwise operations ===
  1806. case op_bitand: {
  1807. Node* op1 = getToInt32(currentInstruction[2].u.operand);
  1808. Node* op2 = getToInt32(currentInstruction[3].u.operand);
  1809. set(currentInstruction[1].u.operand, addToGraph(BitAnd, op1, op2));
  1810. NEXT_OPCODE(op_bitand);
  1811. }
  1812. case op_bitor: {
  1813. Node* op1 = getToInt32(currentInstruction[2].u.operand);
  1814. Node* op2 = getToInt32(currentInstruction[3].u.operand);
  1815. set(currentInstruction[1].u.operand, addToGraph(BitOr, op1, op2));
  1816. NEXT_OPCODE(op_bitor);
  1817. }
  1818. case op_bitxor: {
  1819. Node* op1 = getToInt32(currentInstruction[2].u.operand);
  1820. Node* op2 = getToInt32(currentInstruction[3].u.operand);
  1821. set(currentInstruction[1].u.operand, addToGraph(BitXor, op1, op2));
  1822. NEXT_OPCODE(op_bitxor);
  1823. }
  1824. case op_rshift: {
  1825. Node* op1 = getToInt32(currentInstruction[2].u.operand);
  1826. Node* op2 = getToInt32(currentInstruction[3].u.operand);
  1827. Node* result;
  1828. // Optimize out shifts by zero.
  1829. if (isInt32Constant(op2) && !(valueOfInt32Constant(op2) & 0x1f))
  1830. result = op1;
  1831. else
  1832. result = addToGraph(BitRShift, op1, op2);
  1833. set(currentInstruction[1].u.operand, result);
  1834. NEXT_OPCODE(op_rshift);
  1835. }
  1836. case op_lshift: {
  1837. Node* op1 = getToInt32(currentInstruction[2].u.operand);
  1838. Node* op2 = getToInt32(currentInstruction[3].u.operand);
  1839. Node* result;
  1840. // Optimize out shifts by zero.
  1841. if (isInt32Constant(op2) && !(valueOfInt32Constant(op2) & 0x1f))
  1842. result = op1;
  1843. else
  1844. result = addToGraph(BitLShift, op1, op2);
  1845. set(currentInstruction[1].u.operand, result);
  1846. NEXT_OPCODE(op_lshift);
  1847. }
  1848. case op_urshift: {
  1849. Node* op1 = getToInt32(currentInstruction[2].u.operand);
  1850. Node* op2 = getToInt32(currentInstruction[3].u.operand);
  1851. Node* result;
  1852. // The result of a zero-extending right shift is treated as an unsigned value.
  1853. // This means that if the top bit is set, the result is not in the int32 range,
  1854. // and as such must be stored as a double. If the shift amount is a constant,
  1855. // we may be able to optimize.
  1856. if (isInt32Constant(op2)) {
  1857. // If we know we are shifting by a non-zero amount, then since the operation
  1858. // zero fills we know the top bit of the result must be zero, and as such the
  1859. // result must be within the int32 range. Conversely, if this is a shift by
  1860. // zero, then the result may be changed by the conversion to unsigned, but it
  1861. // is not necessary to perform the shift!
  1862. if (valueOfInt32Constant(op2) & 0x1f)
  1863. result = addToGraph(BitURShift, op1, op2);
  1864. else
  1865. result = makeSafe(addToGraph(UInt32ToNumber, op1));
  1866. } else {
  1867. // Cannot optimize at this stage; shift & potentially rebox as a double.
  1868. result = addToGraph(BitURShift, op1, op2);
  1869. result = makeSafe(addToGraph(UInt32ToNumber, result));
  1870. }
  1871. set(currentInstruction[1].u.operand, result);
  1872. NEXT_OPCODE(op_urshift);
  1873. }
  1874. // === Increment/Decrement opcodes ===
  1875. case op_inc: {
  1876. unsigned srcDst = currentInstruction[1].u.operand;
  1877. Node* op = get(srcDst);
  1878. set(srcDst, makeSafe(addToGraph(ArithAdd, op, one())));
  1879. NEXT_OPCODE(op_inc);
  1880. }
  1881. case op_dec: {
  1882. unsigned srcDst = currentInstruction[1].u.operand;
  1883. Node* op = get(srcDst);
  1884. set(srcDst, makeSafe(addToGraph(ArithSub, op, one())));
  1885. NEXT_OPCODE(op_dec);
  1886. }
  1887. // === Arithmetic operations ===
  1888. case op_add: {
  1889. Node* op1 = get(currentInstruction[2].u.operand);
  1890. Node* op2 = get(currentInstruction[3].u.operand);
  1891. if (op1->hasNumberResult() && op2->hasNumberResult())
  1892. set(currentInstruction[1].u.operand, makeSafe(addToGraph(ArithAdd, op1, op2)));
  1893. else
  1894. set(currentInstruction[1].u.operand, makeSafe(addToGraph(ValueAdd, op1, op2)));
  1895. NEXT_OPCODE(op_add);
  1896. }
  1897. case op_sub: {
  1898. Node* op1 = get(currentInstruction[2].u.operand);
  1899. Node* op2 = get(currentInstruction[3].u.operand);
  1900. set(currentInstruction[1].u.operand, makeSafe(addToGraph(ArithSub, op1, op2)));
  1901. NEXT_OPCODE(op_sub);
  1902. }
  1903. case op_negate: {
  1904. Node* op1 = get(currentInstruction[2].u.operand);
  1905. set(currentInstruction[1].u.operand, makeSafe(addToGraph(ArithNegate, op1)));
  1906. NEXT_OPCODE(op_negate);
  1907. }
  1908. case op_mul: {
  1909. // Multiply requires that the inputs are not truncated, unfortunately.
  1910. Node* op1 = get(currentInstruction[2].u.operand);
  1911. Node* op2 = get(currentInstruction[3].u.operand);
  1912. set(currentInstruction[1].u.operand, makeSafe(addToGraph(ArithMul, op1, op2)));
  1913. NEXT_OPCODE(op_mul);
  1914. }
  1915. case op_mod: {
  1916. Node* op1 = get(currentInstruction[2].u.operand);
  1917. Node* op2 = get(currentInstruction[3].u.operand);
  1918. set(currentInstruction[1].u.operand, makeSafe(addToGraph(ArithMod, op1, op2)));
  1919. NEXT_OPCODE(op_mod);
  1920. }
  1921. case op_div: {
  1922. Node* op1 = get(currentInstruction[2].u.operand);
  1923. Node* op2 = get(currentInstruction[3].u.operand);
  1924. set(currentInstruction[1].u.operand, makeDivSafe(addToGraph(ArithDiv, op1, op2)));
  1925. NEXT_OPCODE(op_div);
  1926. }
  1927. // === Misc operations ===
  1928. #if ENABLE(DEBUG_WITH_BREAKPOINT)
  1929. case op_debug:
  1930. addToGraph(Breakpoint);
  1931. NEXT_OPCODE(op_debug);
  1932. #endif
  1933. case op_mov: {
  1934. Node* op = get(currentInstruction[2].u.operand);
  1935. set(currentInstruction[1].u.operand, op);
  1936. NEXT_OPCODE(op_mov);
  1937. }
  1938. case op_check_has_instance:
  1939. addToGraph(CheckHasInstance, get(currentInstruction[3].u.operand));
  1940. NEXT_OPCODE(op_check_has_instance);
  1941. case op_instanceof: {
  1942. Node* value = get(currentInstruction[2].u.operand);
  1943. Node* prototype = get(currentInstruction[3].u.operand);
  1944. set(currentInstruction[1].u.operand, addToGraph(InstanceOf, value, prototype));
  1945. NEXT_OPCODE(op_instanceof);
  1946. }
  1947. case op_is_undefined: {
  1948. Node* value = get(currentInstruction[2].u.operand);
  1949. set(currentInstruction[1].u.operand, addToGraph(IsUndefined, value));
  1950. NEXT_OPCODE(op_is_undefined);
  1951. }
  1952. case op_is_boolean: {
  1953. Node* value = get(currentInstruction[2].u.operand);
  1954. set(currentInstruction[1].u.operand, addToGraph(IsBoolean, value));
  1955. NEXT_OPCODE(op_is_boolean);
  1956. }
  1957. case op_is_number: {
  1958. Node* value = get(currentInstruction[2].u.operand);
  1959. set(currentInstruction[1].u.operand, addToGraph(IsNumber, value));
  1960. NEXT_OPCODE(op_is_number);
  1961. }
  1962. case op_is_string: {
  1963. Node* value = get(currentInstruction[2].u.operand);
  1964. set(currentInstruction[1].u.operand, addToGraph(IsString, value));
  1965. NEXT_OPCODE(op_is_string);
  1966. }
  1967. case op_is_object: {
  1968. Node* value = get(currentInstruction[2].u.operand);
  1969. set(currentInstruction[1].u.operand, addToGraph(IsObject, value));
  1970. NEXT_OPCODE(op_is_object);
  1971. }
  1972. case op_is_function: {
  1973. Node* value = get(currentInstruction[2].u.operand);
  1974. set(currentInstruction[1].u.operand, addToGraph(IsFunction, value));
  1975. NEXT_OPCODE(op_is_function);
  1976. }
  1977. case op_not: {
  1978. Node* value = get(currentInstruction[2].u.operand);
  1979. set(currentInstruction[1].u.operand, addToGraph(LogicalNot, value));
  1980. NEXT_OPCODE(op_not);
  1981. }
  1982. case op_to_primitive: {
  1983. Node* value = get(currentInstruction[2].u.operand);
  1984. set(currentInstruction[1].u.operand, addToGraph(ToPrimitive, value));
  1985. NEXT_OPCODE(op_to_primitive);
  1986. }
  1987. case op_strcat: {
  1988. int startOperand = currentInstruction[2].u.operand;
  1989. int numOperands = currentInstruction[3].u.operand;
  1990. #if CPU(X86)
  1991. // X86 doesn't have enough registers to compile MakeRope with three arguments.
  1992. // Rather than try to be clever, we just make MakeRope dumber on this processor.
  1993. const unsigned maxRopeArguments = 2;
  1994. #else
  1995. const unsigned maxRopeArguments = 3;
  1996. #endif
  1997. OwnArrayPtr<Node*> toStringNodes = adoptArrayPtr(new Node*[numOperands]);
  1998. for (int i = 0; i < numOperands; i++)
  1999. toStringNodes[i] = addToGraph(ToString, get(startOperand + i));
  2000. for (int i = 0; i < numOperands; i++)
  2001. addToGraph(Phantom, toStringNodes[i]);
  2002. Node* operands[AdjacencyList::Size];
  2003. unsigned indexInOperands = 0;
  2004. for (unsigned i = 0; i < AdjacencyList::Size; ++i)
  2005. operands[i] = 0;
  2006. for (int operandIdx = 0; operandIdx < numOperands; ++operandIdx) {
  2007. if (indexInOperands == maxRopeArguments) {
  2008. operands[0] = addToGraph(MakeRope, operands[0], operands[1], operands[2]);
  2009. for (unsigned i = 1; i < AdjacencyList::Size; ++i)
  2010. operands[i] = 0;
  2011. indexInOperands = 1;
  2012. }
  2013. ASSERT(indexInOperands < AdjacencyList::Size);
  2014. ASSERT(indexInOperands < maxRopeArguments);
  2015. operands[indexInOperands++] = toStringNodes[operandIdx];
  2016. }
  2017. set(currentInstruction[1].u.operand,
  2018. addToGraph(MakeRope, operands[0], operands[1], operands[2]));
  2019. NEXT_OPCODE(op_strcat);
  2020. }
  2021. case op_less: {
  2022. Node* op1 = get(currentInstruction[2].u.operand);
  2023. Node* op2 = get(currentInstruction[3].u.operand);
  2024. if (canFold(op1) && canFold(op2)) {
  2025. JSValue a = valueOfJSConstant(op1);
  2026. JSValue b = valueOfJSConstant(op2);
  2027. if (a.isNumber() && b.isNumber()) {
  2028. set(currentInstruction[1].u.operand,
  2029. getJSConstantForValue(jsBoolean(a.asNumber() < b.asNumber())));
  2030. NEXT_OPCODE(op_less);
  2031. }
  2032. }
  2033. set(currentInstruction[1].u.operand, addToGraph(CompareLess, op1, op2));
  2034. NEXT_OPCODE(op_less);
  2035. }
  2036. case op_lesseq: {
  2037. Node* op1 = get(currentInstruction[2].u.operand);
  2038. Node* op2 = get(currentInstruction[3].u.operand);
  2039. if (canFold(op1) && canFold(op2)) {
  2040. JSValue a = valueOfJSConstant(op1);
  2041. JSValue b = valueOfJSConstant(op2);
  2042. if (a.isNumber() && b.isNumber()) {
  2043. set(currentInstruction[1].u.operand,
  2044. getJSConstantForValue(jsBoolean(a.asNumber() <= b.asNumber())));
  2045. NEXT_OPCODE(op_lesseq);
  2046. }
  2047. }
  2048. set(currentInstruction[1].u.operand, addToGraph(CompareLessEq, op1, op2));
  2049. NEXT_OPCODE(op_lesseq);
  2050. }
  2051. case op_greater: {
  2052. Node* op1 = get(currentInstruction[2].u.operand);
  2053. Node* op2 = get(currentInstruction[3].u.operand);
  2054. if (canFold(op1) && canFold(op2)) {
  2055. JSValue a = valueOfJSConstant(op1);
  2056. JSValue b = valueOfJSConstant(op2);
  2057. if (a.isNumber() && b.isNumber()) {
  2058. set(currentInstruction[1].u.operand,
  2059. getJSConstantForValue(jsBoolean(a.asNumber() > b.asNumber())));
  2060. NEXT_OPCODE(op_greater);
  2061. }
  2062. }
  2063. set(currentInstruction[1].u.operand, addToGraph(CompareGreater, op1, op2));
  2064. NEXT_OPCODE(op_greater);
  2065. }
  2066. case op_greatereq: {
  2067. Node* op1 = get(currentInstruction[2].u.operand);
  2068. Node* op2 = get(currentInstruction[3].u.operand);
  2069. if (canFold(op1) && canFold(op2)) {
  2070. JSValue a = valueOfJSConstant(op1);
  2071. JSValue b = valueOfJSConstant(op2);
  2072. if (a.isNumber() && b.isNumber()) {
  2073. set(currentInstruction[1].u.operand,
  2074. getJSConstantForValue(jsBoolean(a.asNumber() >= b.asNumber())));
  2075. NEXT_OPCODE(op_greatereq);
  2076. }
  2077. }
  2078. set(currentInstruction[1].u.operand, addToGraph(CompareGreaterEq, op1, op2));
  2079. NEXT_OPCODE(op_greatereq);
  2080. }
  2081. case op_eq: {
  2082. Node* op1 = get(currentInstruction[2].u.operand);
  2083. Node* op2 = get(currentInstruction[3].u.operand);
  2084. if (canFold(op1) && canFold(op2)) {
  2085. JSValue a = valueOfJSConstant(op1);
  2086. JSValue b = valueOfJSConstant(op2);
  2087. set(currentInstruction[1].u.operand,
  2088. getJSConstantForValue(jsBoolean(JSValue::equal(m_codeBlock->globalObject()->globalExec(), a, b))));
  2089. NEXT_OPCODE(op_eq);
  2090. }
  2091. set(currentInstruction[1].u.operand, addToGraph(CompareEq, op1, op2));
  2092. NEXT_OPCODE(op_eq);
  2093. }
  2094. case op_eq_null: {
  2095. Node* value = get(currentInstruction[2].u.operand);
  2096. set(currentInstruction[1].u.operand, addToGraph(CompareEqConstant, value, constantNull()));
  2097. NEXT_OPCODE(op_eq_null);
  2098. }
  2099. case op_stricteq: {
  2100. Node* op1 = get(currentInstruction[2].u.operand);
  2101. Node* op2 = get(currentInstruction[3].u.operand);
  2102. if (canFold(op1) && canFold(op2)) {
  2103. JSValue a = valueOfJSConstant(op1);
  2104. JSValue b = valueOfJSConstant(op2);
  2105. set(currentInstruction[1].u.operand,
  2106. getJSConstantForValue(jsBoolean(JSValue::strictEqual(m_codeBlock->globalObject()->globalExec(), a, b))));
  2107. NEXT_OPCODE(op_stricteq);
  2108. }
  2109. if (isConstantForCompareStrictEq(op1))
  2110. set(currentInstruction[1].u.operand, addToGraph(CompareStrictEqConstant, op2, op1));
  2111. else if (isConstantForCompareStrictEq(op2))
  2112. set(currentInstruction[1].u.operand, addToGraph(CompareStrictEqConstant, op1, op2));
  2113. else
  2114. set(currentInstruction[1].u.operand, addToGraph(CompareStrictEq, op1, op2));
  2115. NEXT_OPCODE(op_stricteq);
  2116. }
  2117. case op_neq: {
  2118. Node* op1 = get(currentInstruction[2].u.operand);
  2119. Node* op2 = get(currentInstruction[3].u.operand);
  2120. if (canFold(op1) && canFold(op2)) {
  2121. JSValue a = valueOfJSConstant(op1);
  2122. JSValue b = valueOfJSConstant(op2);
  2123. set(currentInstruction[1].u.operand,
  2124. getJSConstantForValue(jsBoolean(!JSValue::equal(m_codeBlock->globalObject()->globalExec(), a, b))));
  2125. NEXT_OPCODE(op_neq);
  2126. }
  2127. set(currentInstruction[1].u.operand, addToGraph(LogicalNot, addToGraph(CompareEq, op1, op2)));
  2128. NEXT_OPCODE(op_neq);
  2129. }
  2130. case op_neq_null: {
  2131. Node* value = get(currentInstruction[2].u.operand);
  2132. set(currentInstruction[1].u.operand, addToGraph(LogicalNot, addToGraph(CompareEqConstant, value, constantNull())));
  2133. NEXT_OPCODE(op_neq_null);
  2134. }
  2135. case op_nstricteq: {
  2136. Node* op1 = get(currentInstruction[2].u.operand);
  2137. Node* op2 = get(currentInstruction[3].u.operand);
  2138. if (canFold(op1) && canFold(op2)) {
  2139. JSValue a = valueOfJSConstant(op1);
  2140. JSValue b = valueOfJSConstant(op2);
  2141. set(currentInstruction[1].u.operand,
  2142. getJSConstantForValue(jsBoolean(!JSValue::strictEqual(m_codeBlock->globalObject()->globalExec(), a, b))));
  2143. NEXT_OPCODE(op_nstricteq);
  2144. }
  2145. Node* invertedResult;
  2146. if (isConstantForCompareStrictEq(op1))
  2147. invertedResult = addToGraph(CompareStrictEqConstant, op2, op1);
  2148. else if (isConstantForCompareStrictEq(op2))
  2149. invertedResult = addToGraph(CompareStrictEqConstant, op1, op2);
  2150. else
  2151. invertedResult = addToGraph(CompareStrictEq, op1, op2);
  2152. set(currentInstruction[1].u.operand, addToGraph(LogicalNot, invertedResult));
  2153. NEXT_OPCODE(op_nstricteq);
  2154. }
  2155. // === Property access operations ===
  2156. case op_get_by_val: {
  2157. SpeculatedType prediction = getPrediction();
  2158. Node* base = get(currentInstruction[2].u.operand);
  2159. ArrayMode arrayMode = getArrayModeAndEmitChecks(currentInstruction[4].u.arrayProfile, Array::Read, base);
  2160. Node* property = get(currentInstruction[3].u.operand);
  2161. Node* getByVal = addToGraph(GetByVal, OpInfo(arrayMode.asWord()), OpInfo(prediction), base, property);
  2162. set(currentInstruction[1].u.operand, getByVal);
  2163. NEXT_OPCODE(op_get_by_val);
  2164. }
  2165. case op_put_by_val: {
  2166. Node* base = get(currentInstruction[1].u.operand);
  2167. ArrayMode arrayMode = getArrayModeAndEmitChecks(currentInstruction[4].u.arrayProfile, Array::Write, base);
  2168. Node* property = get(currentInstruction[2].u.operand);
  2169. Node* value = get(currentInstruction[3].u.operand);
  2170. addVarArgChild(base);
  2171. addVarArgChild(property);
  2172. addVarArgChild(value);
  2173. addVarArgChild(0); // Leave room for property storage.
  2174. addToGraph(Node::VarArg, PutByVal, OpInfo(arrayMode.asWord()), OpInfo(0));
  2175. NEXT_OPCODE(op_put_by_val);
  2176. }
  2177. case op_get_by_id:
  2178. case op_get_by_id_out_of_line:
  2179. case op_get_array_length: {
  2180. SpeculatedType prediction = getPrediction();
  2181. Node* base = get(currentInstruction[2].u.operand);
  2182. unsigned identifierNumber = m_inlineStackTop->m_identifierRemap[currentInstruction[3].u.operand];
  2183. Identifier identifier = m_codeBlock->identifier(identifierNumber);
  2184. GetByIdStatus getByIdStatus = GetByIdStatus::computeFor(
  2185. m_inlineStackTop->m_profiledBlock, m_currentIndex, identifier);
  2186. handleGetById(
  2187. currentInstruction[1].u.operand, prediction, base, identifierNumber, getByIdStatus);
  2188. NEXT_OPCODE(op_get_by_id);
  2189. }
  2190. case op_put_by_id:
  2191. case op_put_by_id_out_of_line:
  2192. case op_put_by_id_transition_direct:
  2193. case op_put_by_id_transition_normal:
  2194. case op_put_by_id_transition_direct_out_of_line:
  2195. case op_put_by_id_transition_normal_out_of_line: {
  2196. Node* value = get(currentInstruction[3].u.operand);
  2197. Node* base = get(currentInstruction[1].u.operand);
  2198. unsigned identifierNumber = m_inlineStackTop->m_identifierRemap[currentInstruction[2].u.operand];
  2199. bool direct = currentInstruction[8].u.operand;
  2200. PutByIdStatus putByIdStatus = PutByIdStatus::computeFor(
  2201. m_inlineStackTop->m_profiledBlock,
  2202. m_currentIndex,
  2203. m_codeBlock->identifier(identifierNumber));
  2204. bool canCountAsInlined = true;
  2205. if (!putByIdStatus.isSet()) {
  2206. addToGraph(ForceOSRExit);
  2207. canCountAsInlined = false;
  2208. }
  2209. bool hasExitSite =
  2210. m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, BadCache)
  2211. || m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, BadWeakConstantCache);
  2212. if (!hasExitSite && putByIdStatus.isSimpleReplace()) {
  2213. addToGraph(CheckStructure, OpInfo(m_graph.addStructureSet(putByIdStatus.oldStructure())), base);
  2214. Node* propertyStorage;
  2215. if (isInlineOffset(putByIdStatus.offset()))
  2216. propertyStorage = base;
  2217. else
  2218. propertyStorage = addToGraph(GetButterfly, base);
  2219. addToGraph(PutByOffset, OpInfo(m_graph.m_storageAccessData.size()), propertyStorage, base, value);
  2220. StorageAccessData storageAccessData;
  2221. storageAccessData.offset = indexRelativeToBase(putByIdStatus.offset());
  2222. storageAccessData.identifierNumber = identifierNumber;
  2223. m_graph.m_storageAccessData.append(storageAccessData);
  2224. } else if (!hasExitSite
  2225. && putByIdStatus.isSimpleTransition()
  2226. && structureChainIsStillValid(
  2227. direct,
  2228. putByIdStatus.oldStructure(),
  2229. putByIdStatus.structureChain())) {
  2230. addToGraph(CheckStructure, OpInfo(m_graph.addStructureSet(putByIdStatus.oldStructure())), base);
  2231. if (!direct) {
  2232. if (!putByIdStatus.oldStructure()->storedPrototype().isNull()) {
  2233. addStructureTransitionCheck(
  2234. putByIdStatus.oldStructure()->storedPrototype().asCell());
  2235. }
  2236. for (WriteBarrier<Structure>* it = putByIdStatus.structureChain()->head(); *it; ++it) {
  2237. JSValue prototype = (*it)->storedPrototype();
  2238. if (prototype.isNull())
  2239. continue;
  2240. ASSERT(prototype.isCell());
  2241. addStructureTransitionCheck(prototype.asCell());
  2242. }
  2243. }
  2244. ASSERT(putByIdStatus.oldStructure()->transitionWatchpointSetHasBeenInvalidated());
  2245. Node* propertyStorage;
  2246. StructureTransitionData* transitionData =
  2247. m_graph.addStructureTransitionData(
  2248. StructureTransitionData(
  2249. putByIdStatus.oldStructure(),
  2250. putByIdStatus.newStructure()));
  2251. if (putByIdStatus.oldStructure()->outOfLineCapacity()
  2252. != putByIdStatus.newStructure()->outOfLineCapacity()) {
  2253. // If we're growing the property storage then it must be because we're
  2254. // storing into the out-of-line storage.
  2255. ASSERT(!isInlineOffset(putByIdStatus.offset()));
  2256. if (!putByIdStatus.oldStructure()->outOfLineCapacity()) {
  2257. propertyStorage = addToGraph(
  2258. AllocatePropertyStorage, OpInfo(transitionData), base);
  2259. } else {
  2260. propertyStorage = addToGraph(
  2261. ReallocatePropertyStorage, OpInfo(transitionData),
  2262. base, addToGraph(GetButterfly, base));
  2263. }
  2264. } else {
  2265. if (isInlineOffset(putByIdStatus.offset()))
  2266. propertyStorage = base;
  2267. else
  2268. propertyStorage = addToGraph(GetButterfly, base);
  2269. }
  2270. addToGraph(PutStructure, OpInfo(transitionData), base);
  2271. addToGraph(
  2272. PutByOffset,
  2273. OpInfo(m_graph.m_storageAccessData.size()),
  2274. propertyStorage,
  2275. base,
  2276. value);
  2277. StorageAccessData storageAccessData;
  2278. storageAccessData.offset = indexRelativeToBase(putByIdStatus.offset());
  2279. storageAccessData.identifierNumber = identifierNumber;
  2280. m_graph.m_storageAccessData.append(storageAccessData);
  2281. } else {
  2282. if (direct)
  2283. addToGraph(PutByIdDirect, OpInfo(identifierNumber), base, value);
  2284. else
  2285. addToGraph(PutById, OpInfo(identifierNumber), base, value);
  2286. canCountAsInlined = false;
  2287. }
  2288. #if ENABLE(DETACHED_JIT)
  2289. #pragma message "[SECURE JSCORE] profiling disabled"
  2290. #else
  2291. if (canCountAsInlined && m_graph.m_compilation)
  2292. m_graph.m_compilation->noticeInlinedPutById();
  2293. #endif
  2294. NEXT_OPCODE(op_put_by_id);
  2295. }
  2296. case op_init_global_const_nop: {
  2297. NEXT_OPCODE(op_init_global_const_nop);
  2298. }
  2299. case op_init_global_const: {
  2300. Node* value = get(currentInstruction[2].u.operand);
  2301. addToGraph(
  2302. PutGlobalVar,
  2303. OpInfo(m_inlineStackTop->m_codeBlock->globalObject()->assertRegisterIsInThisObject(currentInstruction[1].u.registerPointer)),
  2304. value);
  2305. NEXT_OPCODE(op_init_global_const);
  2306. }
  2307. case op_init_global_const_check: {
  2308. Node* value = get(currentInstruction[2].u.operand);
  2309. CodeBlock* codeBlock = m_inlineStackTop->m_codeBlock;
  2310. JSGlobalObject* globalObject = codeBlock->globalObject();
  2311. unsigned identifierNumber = m_inlineStackTop->m_identifierRemap[currentInstruction[4].u.operand];
  2312. Identifier identifier = m_codeBlock->identifier(identifierNumber);
  2313. SymbolTableEntry entry = globalObject->symbolTable()->get(identifier.impl());
  2314. if (!entry.couldBeWatched()) {
  2315. addToGraph(
  2316. PutGlobalVar,
  2317. OpInfo(globalObject->assertRegisterIsInThisObject(currentInstruction[1].u.registerPointer)),
  2318. value);
  2319. NEXT_OPCODE(op_init_global_const_check);
  2320. }
  2321. addToGraph(
  2322. PutGlobalVarCheck,
  2323. OpInfo(codeBlock->globalObject()->assertRegisterIsInThisObject(currentInstruction[1].u.registerPointer)),
  2324. OpInfo(identifierNumber),
  2325. value);
  2326. NEXT_OPCODE(op_init_global_const_check);
  2327. }
  2328. // === Block terminators. ===
  2329. case op_jmp: {
  2330. unsigned relativeOffset = currentInstruction[1].u.operand;
  2331. addToGraph(Jump, OpInfo(m_currentIndex + relativeOffset));
  2332. LAST_OPCODE(op_jmp);
  2333. }
  2334. case op_jtrue: {
  2335. unsigned relativeOffset = currentInstruction[2].u.operand;
  2336. Node* condition = get(currentInstruction[1].u.operand);
  2337. if (canFold(condition)) {
  2338. TriState state = valueOfJSConstant(condition).pureToBoolean();
  2339. if (state == TrueTriState) {
  2340. addToGraph(Jump, OpInfo(m_currentIndex + relativeOffset));
  2341. LAST_OPCODE(op_jtrue);
  2342. } else if (state == FalseTriState) {
  2343. // Emit a placeholder for this bytecode operation but otherwise
  2344. // just fall through.
  2345. addToGraph(Phantom);
  2346. NEXT_OPCODE(op_jtrue);
  2347. }
  2348. }
  2349. addToGraph(Branch, OpInfo(m_currentIndex + relativeOffset), OpInfo(m_currentIndex + OPCODE_LENGTH(op_jtrue)), condition);
  2350. LAST_OPCODE(op_jtrue);
  2351. }
  2352. case op_jfalse: {
  2353. unsigned relativeOffset = currentInstruction[2].u.operand;
  2354. Node* condition = get(currentInstruction[1].u.operand);
  2355. if (canFold(condition)) {
  2356. TriState state = valueOfJSConstant(condition).pureToBoolean();
  2357. if (state == FalseTriState) {
  2358. addToGraph(Jump, OpInfo(m_currentIndex + relativeOffset));
  2359. LAST_OPCODE(op_jfalse);
  2360. } else if (state == TrueTriState) {
  2361. // Emit a placeholder for this bytecode operation but otherwise
  2362. // just fall through.
  2363. addToGraph(Phantom);
  2364. NEXT_OPCODE(op_jfalse);
  2365. }
  2366. }
  2367. addToGraph(Branch, OpInfo(m_currentIndex + OPCODE_LENGTH(op_jfalse)), OpInfo(m_currentIndex + relativeOffset), condition);
  2368. LAST_OPCODE(op_jfalse);
  2369. }
  2370. case op_jeq_null: {
  2371. unsigned relativeOffset = currentInstruction[2].u.operand;
  2372. Node* value = get(currentInstruction[1].u.operand);
  2373. Node* condition = addToGraph(CompareEqConstant, value, constantNull());
  2374. addToGraph(Branch, OpInfo(m_currentIndex + relativeOffset), OpInfo(m_currentIndex + OPCODE_LENGTH(op_jeq_null)), condition);
  2375. LAST_OPCODE(op_jeq_null);
  2376. }
  2377. case op_jneq_null: {
  2378. unsigned relativeOffset = currentInstruction[2].u.operand;
  2379. Node* value = get(currentInstruction[1].u.operand);
  2380. Node* condition = addToGraph(CompareEqConstant, value, constantNull());
  2381. addToGraph(Branch, OpInfo(m_currentIndex + OPCODE_LENGTH(op_jneq_null)), OpInfo(m_currentIndex + relativeOffset), condition);
  2382. LAST_OPCODE(op_jneq_null);
  2383. }
  2384. case op_jless: {
  2385. unsigned relativeOffset = currentInstruction[3].u.operand;
  2386. Node* op1 = get(currentInstruction[1].u.operand);
  2387. Node* op2 = get(currentInstruction[2].u.operand);
  2388. if (canFold(op1) && canFold(op2)) {
  2389. JSValue aValue = valueOfJSConstant(op1);
  2390. JSValue bValue = valueOfJSConstant(op2);
  2391. if (aValue.isNumber() && bValue.isNumber()) {
  2392. double a = aValue.asNumber();
  2393. double b = bValue.asNumber();
  2394. if (a < b) {
  2395. addToGraph(Jump, OpInfo(m_currentIndex + relativeOffset));
  2396. LAST_OPCODE(op_jless);
  2397. } else {
  2398. // Emit a placeholder for this bytecode operation but otherwise
  2399. // just fall through.
  2400. addToGraph(Phantom);
  2401. NEXT_OPCODE(op_jless);
  2402. }
  2403. }
  2404. }
  2405. Node* condition = addToGraph(CompareLess, op1, op2);
  2406. addToGraph(Branch, OpInfo(m_currentIndex + relativeOffset), OpInfo(m_currentIndex + OPCODE_LENGTH(op_jless)), condition);
  2407. LAST_OPCODE(op_jless);
  2408. }
  2409. case op_jlesseq: {
  2410. unsigned relativeOffset = currentInstruction[3].u.operand;
  2411. Node* op1 = get(currentInstruction[1].u.operand);
  2412. Node* op2 = get(currentInstruction[2].u.operand);
  2413. if (canFold(op1) && canFold(op2)) {
  2414. JSValue aValue = valueOfJSConstant(op1);
  2415. JSValue bValue = valueOfJSConstant(op2);
  2416. if (aValue.isNumber() && bValue.isNumber()) {
  2417. double a = aValue.asNumber();
  2418. double b = bValue.asNumber();
  2419. if (a <= b) {
  2420. addToGraph(Jump, OpInfo(m_currentIndex + relativeOffset));
  2421. LAST_OPCODE(op_jlesseq);
  2422. } else {
  2423. // Emit a placeholder for this bytecode operation but otherwise
  2424. // just fall through.
  2425. addToGraph(Phantom);
  2426. NEXT_OPCODE(op_jlesseq);
  2427. }
  2428. }
  2429. }
  2430. Node* condition = addToGraph(CompareLessEq, op1, op2);
  2431. addToGraph(Branch, OpInfo(m_currentIndex + relativeOffset), OpInfo(m_currentIndex + OPCODE_LENGTH(op_jlesseq)), condition);
  2432. LAST_OPCODE(op_jlesseq);
  2433. }
  2434. case op_jgreater: {
  2435. unsigned relativeOffset = currentInstruction[3].u.operand;
  2436. Node* op1 = get(currentInstruction[1].u.operand);
  2437. Node* op2 = get(currentInstruction[2].u.operand);
  2438. if (canFold(op1) && canFold(op2)) {
  2439. JSValue aValue = valueOfJSConstant(op1);
  2440. JSValue bValue = valueOfJSConstant(op2);
  2441. if (aValue.isNumber() && bValue.isNumber()) {
  2442. double a = aValue.asNumber();
  2443. double b = bValue.asNumber();
  2444. if (a > b) {
  2445. addToGraph(Jump, OpInfo(m_currentIndex + relativeOffset));
  2446. LAST_OPCODE(op_jgreater);
  2447. } else {
  2448. // Emit a placeholder for this bytecode operation but otherwise
  2449. // just fall through.
  2450. addToGraph(Phantom);
  2451. NEXT_OPCODE(op_jgreater);
  2452. }
  2453. }
  2454. }
  2455. Node* condition = addToGraph(CompareGreater, op1, op2);
  2456. addToGraph(Branch, OpInfo(m_currentIndex + relativeOffset), OpInfo(m_currentIndex + OPCODE_LENGTH(op_jgreater)), condition);
  2457. LAST_OPCODE(op_jgreater);
  2458. }
  2459. case op_jgreatereq: {
  2460. unsigned relativeOffset = currentInstruction[3].u.operand;
  2461. Node* op1 = get(currentInstruction[1].u.operand);
  2462. Node* op2 = get(currentInstruction[2].u.operand);
  2463. if (canFold(op1) && canFold(op2)) {
  2464. JSValue aValue = valueOfJSConstant(op1);
  2465. JSValue bValue = valueOfJSConstant(op2);
  2466. if (aValue.isNumber() && bValue.isNumber()) {
  2467. double a = aValue.asNumber();
  2468. double b = bValue.asNumber();
  2469. if (a >= b) {
  2470. addToGraph(Jump, OpInfo(m_currentIndex + relativeOffset));
  2471. LAST_OPCODE(op_jgreatereq);
  2472. } else {
  2473. // Emit a placeholder for this bytecode operation but otherwise
  2474. // just fall through.
  2475. addToGraph(Phantom);
  2476. NEXT_OPCODE(op_jgreatereq);
  2477. }
  2478. }
  2479. }
  2480. Node* condition = addToGraph(CompareGreaterEq, op1, op2);
  2481. addToGraph(Branch, OpInfo(m_currentIndex + relativeOffset), OpInfo(m_currentIndex + OPCODE_LENGTH(op_jgreatereq)), condition);
  2482. LAST_OPCODE(op_jgreatereq);
  2483. }
  2484. case op_jnless: {
  2485. unsigned relativeOffset = currentInstruction[3].u.operand;
  2486. Node* op1 = get(currentInstruction[1].u.operand);
  2487. Node* op2 = get(currentInstruction[2].u.operand);
  2488. if (canFold(op1) && canFold(op2)) {
  2489. JSValue aValue = valueOfJSConstant(op1);
  2490. JSValue bValue = valueOfJSConstant(op2);
  2491. if (aValue.isNumber() && bValue.isNumber()) {
  2492. double a = aValue.asNumber();
  2493. double b = bValue.asNumber();
  2494. if (a < b) {
  2495. // Emit a placeholder for this bytecode operation but otherwise
  2496. // just fall through.
  2497. addToGraph(Phantom);
  2498. NEXT_OPCODE(op_jnless);
  2499. } else {
  2500. addToGraph(Jump, OpInfo(m_currentIndex + relativeOffset));
  2501. LAST_OPCODE(op_jnless);
  2502. }
  2503. }
  2504. }
  2505. Node* condition = addToGraph(CompareLess, op1, op2);
  2506. addToGraph(Branch, OpInfo(m_currentIndex + OPCODE_LENGTH(op_jnless)), OpInfo(m_currentIndex + relativeOffset), condition);
  2507. LAST_OPCODE(op_jnless);
  2508. }
  2509. case op_jnlesseq: {
  2510. unsigned relativeOffset = currentInstruction[3].u.operand;
  2511. Node* op1 = get(currentInstruction[1].u.operand);
  2512. Node* op2 = get(currentInstruction[2].u.operand);
  2513. if (canFold(op1) && canFold(op2)) {
  2514. JSValue aValue = valueOfJSConstant(op1);
  2515. JSValue bValue = valueOfJSConstant(op2);
  2516. if (aValue.isNumber() && bValue.isNumber()) {
  2517. double a = aValue.asNumber();
  2518. double b = bValue.asNumber();
  2519. if (a <= b) {
  2520. // Emit a placeholder for this bytecode operation but otherwise
  2521. // just fall through.
  2522. addToGraph(Phantom);
  2523. NEXT_OPCODE(op_jnlesseq);
  2524. } else {
  2525. addToGraph(Jump, OpInfo(m_currentIndex + relativeOffset));
  2526. LAST_OPCODE(op_jnlesseq);
  2527. }
  2528. }
  2529. }
  2530. Node* condition = addToGraph(CompareLessEq, op1, op2);
  2531. addToGraph(Branch, OpInfo(m_currentIndex + OPCODE_LENGTH(op_jnlesseq)), OpInfo(m_currentIndex + relativeOffset), condition);
  2532. LAST_OPCODE(op_jnlesseq);
  2533. }
  2534. case op_jngreater: {
  2535. unsigned relativeOffset = currentInstruction[3].u.operand;
  2536. Node* op1 = get(currentInstruction[1].u.operand);
  2537. Node* op2 = get(currentInstruction[2].u.operand);
  2538. if (canFold(op1) && canFold(op2)) {
  2539. JSValue aValue = valueOfJSConstant(op1);
  2540. JSValue bValue = valueOfJSConstant(op2);
  2541. if (aValue.isNumber() && bValue.isNumber()) {
  2542. double a = aValue.asNumber();
  2543. double b = bValue.asNumber();
  2544. if (a > b) {
  2545. // Emit a placeholder for this bytecode operation but otherwise
  2546. // just fall through.
  2547. addToGraph(Phantom);
  2548. NEXT_OPCODE(op_jngreater);
  2549. } else {
  2550. addToGraph(Jump, OpInfo(m_currentIndex + relativeOffset));
  2551. LAST_OPCODE(op_jngreater);
  2552. }
  2553. }
  2554. }
  2555. Node* condition = addToGraph(CompareGreater, op1, op2);
  2556. addToGraph(Branch, OpInfo(m_currentIndex + OPCODE_LENGTH(op_jngreater)), OpInfo(m_currentIndex + relativeOffset), condition);
  2557. LAST_OPCODE(op_jngreater);
  2558. }
  2559. case op_jngreatereq: {
  2560. unsigned relativeOffset = currentInstruction[3].u.operand;
  2561. Node* op1 = get(currentInstruction[1].u.operand);
  2562. Node* op2 = get(currentInstruction[2].u.operand);
  2563. if (canFold(op1) && canFold(op2)) {
  2564. JSValue aValue = valueOfJSConstant(op1);
  2565. JSValue bValue = valueOfJSConstant(op2);
  2566. if (aValue.isNumber() && bValue.isNumber()) {
  2567. double a = aValue.asNumber();
  2568. double b = bValue.asNumber();
  2569. if (a >= b) {
  2570. // Emit a placeholder for this bytecode operation but otherwise
  2571. // just fall through.
  2572. addToGraph(Phantom);
  2573. NEXT_OPCODE(op_jngreatereq);
  2574. } else {
  2575. addToGraph(Jump, OpInfo(m_currentIndex + relativeOffset));
  2576. LAST_OPCODE(op_jngreatereq);
  2577. }
  2578. }
  2579. }
  2580. Node* condition = addToGraph(CompareGreaterEq, op1, op2);
  2581. addToGraph(Branch, OpInfo(m_currentIndex + OPCODE_LENGTH(op_jngreatereq)), OpInfo(m_currentIndex + relativeOffset), condition);
  2582. LAST_OPCODE(op_jngreatereq);
  2583. }
  2584. case op_ret:
  2585. flushArgumentsAndCapturedVariables();
  2586. if (inlineCallFrame()) {
  2587. if (m_inlineStackTop->m_returnValue != InvalidVirtualRegister)
  2588. setDirect(m_inlineStackTop->m_returnValue, get(currentInstruction[1].u.operand));
  2589. m_inlineStackTop->m_didReturn = true;
  2590. if (m_inlineStackTop->m_unlinkedBlocks.isEmpty()) {
  2591. // If we're returning from the first block, then we're done parsing.
  2592. ASSERT(m_inlineStackTop->m_callsiteBlockHead == m_graph.m_blocks.size() - 1);
  2593. shouldContinueParsing = false;
  2594. LAST_OPCODE(op_ret);
  2595. } else {
  2596. // If inlining created blocks, and we're doing a return, then we need some
  2597. // special linking.
  2598. ASSERT(m_inlineStackTop->m_unlinkedBlocks.last().m_blockIndex == m_graph.m_blocks.size() - 1);
  2599. m_inlineStackTop->m_unlinkedBlocks.last().m_needsNormalLinking = false;
  2600. }
  2601. if (m_currentIndex + OPCODE_LENGTH(op_ret) != m_inlineStackTop->m_codeBlock->instructions().size() || m_inlineStackTop->m_didEarlyReturn) {
  2602. ASSERT(m_currentIndex + OPCODE_LENGTH(op_ret) <= m_inlineStackTop->m_codeBlock->instructions().size());
  2603. addToGraph(Jump, OpInfo(NoBlock));
  2604. m_inlineStackTop->m_unlinkedBlocks.last().m_needsEarlyReturnLinking = true;
  2605. m_inlineStackTop->m_didEarlyReturn = true;
  2606. }
  2607. LAST_OPCODE(op_ret);
  2608. }
  2609. addToGraph(Return, get(currentInstruction[1].u.operand));
  2610. LAST_OPCODE(op_ret);
  2611. case op_end:
  2612. flushArgumentsAndCapturedVariables();
  2613. ASSERT(!inlineCallFrame());
  2614. addToGraph(Return, get(currentInstruction[1].u.operand));
  2615. LAST_OPCODE(op_end);
  2616. case op_throw:
  2617. flushAllArgumentsAndCapturedVariablesInInlineStack();
  2618. addToGraph(Throw, get(currentInstruction[1].u.operand));
  2619. LAST_OPCODE(op_throw);
  2620. case op_throw_static_error:
  2621. flushAllArgumentsAndCapturedVariablesInInlineStack();
  2622. addToGraph(ThrowReferenceError);
  2623. LAST_OPCODE(op_throw_static_error);
  2624. case op_call:
  2625. handleCall(interpreter, currentInstruction, Call, CodeForCall);
  2626. NEXT_OPCODE(op_call);
  2627. case op_construct:
  2628. handleCall(interpreter, currentInstruction, Construct, CodeForConstruct);
  2629. NEXT_OPCODE(op_construct);
  2630. case op_call_varargs: {
  2631. ASSERT(inlineCallFrame());
  2632. ASSERT(currentInstruction[3].u.operand == m_inlineStackTop->m_codeBlock->argumentsRegister());
  2633. ASSERT(!m_inlineStackTop->m_codeBlock->symbolTable()->slowArguments());
  2634. // It would be cool to funnel this into handleCall() so that it can handle
  2635. // inlining. But currently that won't be profitable anyway, since none of the
  2636. // uses of call_varargs will be inlineable. So we set this up manually and
  2637. // without inline/intrinsic detection.
  2638. Instruction* putInstruction = currentInstruction + OPCODE_LENGTH(op_call_varargs);
  2639. SpeculatedType prediction = SpecNone;
  2640. if (interpreter->getOpcodeID(putInstruction->u.opcode) == op_call_put_result) {
  2641. m_currentProfilingIndex = m_currentIndex + OPCODE_LENGTH(op_call_varargs);
  2642. prediction = getPrediction();
  2643. }
  2644. addToGraph(CheckArgumentsNotCreated);
  2645. unsigned argCount = inlineCallFrame()->arguments.size();
  2646. if (JSStack::CallFrameHeaderSize + argCount > m_parameterSlots)
  2647. m_parameterSlots = JSStack::CallFrameHeaderSize + argCount;
  2648. addVarArgChild(get(currentInstruction[1].u.operand)); // callee
  2649. addVarArgChild(get(currentInstruction[2].u.operand)); // this
  2650. for (unsigned argument = 1; argument < argCount; ++argument)
  2651. addVarArgChild(get(argumentToOperand(argument)));
  2652. Node* call = addToGraph(Node::VarArg, Call, OpInfo(0), OpInfo(prediction));
  2653. if (interpreter->getOpcodeID(putInstruction->u.opcode) == op_call_put_result)
  2654. set(putInstruction[1].u.operand, call);
  2655. NEXT_OPCODE(op_call_varargs);
  2656. }
  2657. case op_call_put_result:
  2658. NEXT_OPCODE(op_call_put_result);
  2659. case op_jneq_ptr:
  2660. // Statically speculate for now. It makes sense to let speculate-only jneq_ptr
  2661. // support simmer for a while before making it more general, since it's
  2662. // already gnarly enough as it is.
  2663. ASSERT(pointerIsFunction(currentInstruction[2].u.specialPointer));
  2664. addToGraph(
  2665. CheckFunction,
  2666. OpInfo(actualPointerFor(m_inlineStackTop->m_codeBlock, currentInstruction[2].u.specialPointer)),
  2667. get(currentInstruction[1].u.operand));
  2668. addToGraph(Jump, OpInfo(m_currentIndex + OPCODE_LENGTH(op_jneq_ptr)));
  2669. LAST_OPCODE(op_jneq_ptr);
  2670. case op_get_scoped_var: {
  2671. SpeculatedType prediction = getPrediction();
  2672. int dst = currentInstruction[1].u.operand;
  2673. int slot = currentInstruction[2].u.operand;
  2674. int depth = currentInstruction[3].u.operand;
  2675. bool hasTopScope = m_codeBlock->codeType() == FunctionCode && m_inlineStackTop->m_codeBlock->needsFullScopeChain();
  2676. ASSERT(!hasTopScope || depth >= 1);
  2677. Node* scope = getScope(hasTopScope, depth - hasTopScope);
  2678. Node* getScopeRegisters = addToGraph(GetScopeRegisters, scope);
  2679. Node* getScopedVar = addToGraph(GetScopedVar, OpInfo(slot), OpInfo(prediction), getScopeRegisters);
  2680. set(dst, getScopedVar);
  2681. NEXT_OPCODE(op_get_scoped_var);
  2682. }
  2683. case op_put_scoped_var: {
  2684. int slot = currentInstruction[1].u.operand;
  2685. int depth = currentInstruction[2].u.operand;
  2686. int source = currentInstruction[3].u.operand;
  2687. bool hasTopScope = m_codeBlock->codeType() == FunctionCode && m_inlineStackTop->m_codeBlock->needsFullScopeChain();
  2688. ASSERT(!hasTopScope || depth >= 1);
  2689. Node* scope = getScope(hasTopScope, depth - hasTopScope);
  2690. Node* scopeRegisters = addToGraph(GetScopeRegisters, scope);
  2691. addToGraph(PutScopedVar, OpInfo(slot), scope, scopeRegisters, get(source));
  2692. NEXT_OPCODE(op_put_scoped_var);
  2693. }
  2694. case op_resolve:
  2695. case op_resolve_global_property:
  2696. case op_resolve_global_var:
  2697. case op_resolve_scoped_var:
  2698. case op_resolve_scoped_var_on_top_scope:
  2699. case op_resolve_scoped_var_with_top_scope_check: {
  2700. SpeculatedType prediction = getPrediction();
  2701. unsigned identifier = m_inlineStackTop->m_identifierRemap[currentInstruction[2].u.operand];
  2702. ResolveOperations* operations = currentInstruction[3].u.resolveOperations;
  2703. Node* value = 0;
  2704. if (parseResolveOperations(prediction, identifier, operations, 0, 0, &value)) {
  2705. set(currentInstruction[1].u.operand, value);
  2706. NEXT_OPCODE(op_resolve);
  2707. }
  2708. Node* resolve = addToGraph(Resolve, OpInfo(m_graph.m_resolveOperationsData.size()), OpInfo(prediction));
  2709. m_graph.m_resolveOperationsData.append(ResolveOperationData());
  2710. ResolveOperationData& data = m_graph.m_resolveOperationsData.last();
  2711. data.identifierNumber = identifier;
  2712. data.resolveOperations = operations;
  2713. set(currentInstruction[1].u.operand, resolve);
  2714. NEXT_OPCODE(op_resolve);
  2715. }
  2716. case op_put_to_base_variable:
  2717. case op_put_to_base: {
  2718. unsigned base = currentInstruction[1].u.operand;
  2719. unsigned identifier = m_inlineStackTop->m_identifierRemap[currentInstruction[2].u.operand];
  2720. unsigned value = currentInstruction[3].u.operand;
  2721. PutToBaseOperation* putToBase = currentInstruction[4].u.putToBaseOperation;
  2722. if (putToBase->m_isDynamic) {
  2723. addToGraph(PutById, OpInfo(identifier), get(base), get(value));
  2724. NEXT_OPCODE(op_put_to_base);
  2725. }
  2726. switch (putToBase->m_kind) {
  2727. case PutToBaseOperation::Uninitialised:
  2728. addToGraph(ForceOSRExit);
  2729. addToGraph(Phantom, get(base));
  2730. addToGraph(Phantom, get(value));
  2731. break;
  2732. case PutToBaseOperation::GlobalVariablePutChecked: {
  2733. CodeBlock* codeBlock = m_inlineStackTop->m_codeBlock;
  2734. JSGlobalObject* globalObject = codeBlock->globalObject();
  2735. SymbolTableEntry entry = globalObject->symbolTable()->get(m_codeBlock->identifier(identifier).impl());
  2736. if (entry.couldBeWatched()) {
  2737. addToGraph(PutGlobalVarCheck,
  2738. OpInfo(codeBlock->globalObject()->assertRegisterIsInThisObject(putToBase->m_registerAddress)),
  2739. OpInfo(identifier),
  2740. get(value));
  2741. break;
  2742. }
  2743. }
  2744. case PutToBaseOperation::GlobalVariablePut:
  2745. addToGraph(PutGlobalVar,
  2746. OpInfo(m_inlineStackTop->m_codeBlock->globalObject()->assertRegisterIsInThisObject(putToBase->m_registerAddress)),
  2747. get(value));
  2748. break;
  2749. case PutToBaseOperation::VariablePut: {
  2750. Node* scope = get(base);
  2751. Node* scopeRegisters = addToGraph(GetScopeRegisters, scope);
  2752. addToGraph(PutScopedVar, OpInfo(putToBase->m_offset), scope, scopeRegisters, get(value));
  2753. break;
  2754. }
  2755. case PutToBaseOperation::GlobalPropertyPut: {
  2756. if (!putToBase->m_structure) {
  2757. addToGraph(ForceOSRExit);
  2758. addToGraph(Phantom, get(base));
  2759. addToGraph(Phantom, get(value));
  2760. NEXT_OPCODE(op_put_to_base);
  2761. }
  2762. Node* baseNode = get(base);
  2763. addToGraph(CheckStructure, OpInfo(m_graph.addStructureSet(putToBase->m_structure.get())), baseNode);
  2764. Node* propertyStorage;
  2765. if (isInlineOffset(putToBase->m_offset))
  2766. propertyStorage = baseNode;
  2767. else
  2768. propertyStorage = addToGraph(GetButterfly, baseNode);
  2769. addToGraph(PutByOffset, OpInfo(m_graph.m_storageAccessData.size()), propertyStorage, baseNode, get(value));
  2770. StorageAccessData storageAccessData;
  2771. storageAccessData.offset = indexRelativeToBase(putToBase->m_offset);
  2772. storageAccessData.identifierNumber = identifier;
  2773. m_graph.m_storageAccessData.append(storageAccessData);
  2774. break;
  2775. }
  2776. case PutToBaseOperation::Readonly:
  2777. case PutToBaseOperation::Generic:
  2778. addToGraph(PutById, OpInfo(identifier), get(base), get(value));
  2779. }
  2780. NEXT_OPCODE(op_put_to_base);
  2781. }
  2782. case op_resolve_base_to_global:
  2783. case op_resolve_base_to_global_dynamic:
  2784. case op_resolve_base_to_scope:
  2785. case op_resolve_base_to_scope_with_top_scope_check:
  2786. case op_resolve_base: {
  2787. SpeculatedType prediction = getPrediction();
  2788. unsigned identifier = m_inlineStackTop->m_identifierRemap[currentInstruction[2].u.operand];
  2789. ResolveOperations* operations = currentInstruction[4].u.resolveOperations;
  2790. PutToBaseOperation* putToBaseOperation = currentInstruction[5].u.putToBaseOperation;
  2791. Node* base = 0;
  2792. if (parseResolveOperations(prediction, identifier, operations, 0, &base, 0)) {
  2793. set(currentInstruction[1].u.operand, base);
  2794. NEXT_OPCODE(op_resolve_base);
  2795. }
  2796. Node* resolve = addToGraph(currentInstruction[3].u.operand ? ResolveBaseStrictPut : ResolveBase, OpInfo(m_graph.m_resolveOperationsData.size()), OpInfo(prediction));
  2797. m_graph.m_resolveOperationsData.append(ResolveOperationData());
  2798. ResolveOperationData& data = m_graph.m_resolveOperationsData.last();
  2799. data.identifierNumber = identifier;
  2800. data.resolveOperations = operations;
  2801. data.putToBaseOperation = putToBaseOperation;
  2802. set(currentInstruction[1].u.operand, resolve);
  2803. NEXT_OPCODE(op_resolve_base);
  2804. }
  2805. case op_resolve_with_base: {
  2806. SpeculatedType prediction = getPrediction();
  2807. unsigned baseDst = currentInstruction[1].u.operand;
  2808. unsigned valueDst = currentInstruction[2].u.operand;
  2809. unsigned identifier = m_inlineStackTop->m_identifierRemap[currentInstruction[3].u.operand];
  2810. ResolveOperations* operations = currentInstruction[4].u.resolveOperations;
  2811. PutToBaseOperation* putToBaseOperation = currentInstruction[5].u.putToBaseOperation;
  2812. Node* base = 0;
  2813. Node* value = 0;
  2814. if (parseResolveOperations(prediction, identifier, operations, putToBaseOperation, &base, &value))
  2815. setPair(baseDst, base, valueDst, value);
  2816. else {
  2817. addToGraph(ForceOSRExit);
  2818. setPair(baseDst, addToGraph(GarbageValue), valueDst, addToGraph(GarbageValue));
  2819. }
  2820. NEXT_OPCODE(op_resolve_with_base);
  2821. }
  2822. case op_resolve_with_this: {
  2823. SpeculatedType prediction = getPrediction();
  2824. unsigned baseDst = currentInstruction[1].u.operand;
  2825. unsigned valueDst = currentInstruction[2].u.operand;
  2826. unsigned identifier = m_inlineStackTop->m_identifierRemap[currentInstruction[3].u.operand];
  2827. ResolveOperations* operations = currentInstruction[4].u.resolveOperations;
  2828. Node* base = 0;
  2829. Node* value = 0;
  2830. if (parseResolveOperations(prediction, identifier, operations, 0, &base, &value))
  2831. setPair(baseDst, base, valueDst, value);
  2832. else {
  2833. addToGraph(ForceOSRExit);
  2834. setPair(baseDst, addToGraph(GarbageValue), valueDst, addToGraph(GarbageValue));
  2835. }
  2836. NEXT_OPCODE(op_resolve_with_this);
  2837. }
  2838. case op_loop_hint: {
  2839. // Baseline->DFG OSR jumps between loop hints. The DFG assumes that Baseline->DFG
  2840. // OSR can only happen at basic block boundaries. Assert that these two statements
  2841. // are compatible.
  2842. RELEASE_ASSERT(m_currentIndex == blockBegin);
  2843. // We never do OSR into an inlined code block. That could not happen, since OSR
  2844. // looks up the code block that is the replacement for the baseline JIT code
  2845. // block. Hence, machine code block = true code block = not inline code block.
  2846. if (!m_inlineStackTop->m_caller)
  2847. m_currentBlock->isOSRTarget = true;
  2848. if (m_vm->watchdog.isEnabled())
  2849. addToGraph(CheckWatchdogTimer);
  2850. else {
  2851. // Emit a phantom node to ensure that there is a placeholder
  2852. // node for this bytecode op.
  2853. addToGraph(Phantom);
  2854. }
  2855. NEXT_OPCODE(op_loop_hint);
  2856. }
  2857. case op_init_lazy_reg: {
  2858. set(currentInstruction[1].u.operand, getJSConstantForValue(JSValue()));
  2859. NEXT_OPCODE(op_init_lazy_reg);
  2860. }
  2861. case op_create_activation: {
  2862. set(currentInstruction[1].u.operand, addToGraph(CreateActivation, get(currentInstruction[1].u.operand)));
  2863. NEXT_OPCODE(op_create_activation);
  2864. }
  2865. case op_create_arguments: {
  2866. m_graph.m_hasArguments = true;
  2867. Node* createArguments = addToGraph(CreateArguments, get(currentInstruction[1].u.operand));
  2868. set(currentInstruction[1].u.operand, createArguments);
  2869. set(unmodifiedArgumentsRegister(currentInstruction[1].u.operand), createArguments);
  2870. NEXT_OPCODE(op_create_arguments);
  2871. }
  2872. case op_tear_off_activation: {
  2873. addToGraph(TearOffActivation, get(currentInstruction[1].u.operand));
  2874. NEXT_OPCODE(op_tear_off_activation);
  2875. }
  2876. case op_tear_off_arguments: {
  2877. m_graph.m_hasArguments = true;
  2878. addToGraph(TearOffArguments, get(unmodifiedArgumentsRegister(currentInstruction[1].u.operand)), get(currentInstruction[2].u.operand));
  2879. NEXT_OPCODE(op_tear_off_arguments);
  2880. }
  2881. case op_get_arguments_length: {
  2882. m_graph.m_hasArguments = true;
  2883. set(currentInstruction[1].u.operand, addToGraph(GetMyArgumentsLengthSafe));
  2884. NEXT_OPCODE(op_get_arguments_length);
  2885. }
  2886. case op_get_argument_by_val: {
  2887. m_graph.m_hasArguments = true;
  2888. set(currentInstruction[1].u.operand,
  2889. addToGraph(
  2890. GetMyArgumentByValSafe, OpInfo(0), OpInfo(getPrediction()),
  2891. get(currentInstruction[3].u.operand)));
  2892. NEXT_OPCODE(op_get_argument_by_val);
  2893. }
  2894. case op_new_func: {
  2895. if (!currentInstruction[3].u.operand) {
  2896. set(currentInstruction[1].u.operand,
  2897. addToGraph(NewFunctionNoCheck, OpInfo(currentInstruction[2].u.operand)));
  2898. } else {
  2899. set(currentInstruction[1].u.operand,
  2900. addToGraph(
  2901. NewFunction,
  2902. OpInfo(currentInstruction[2].u.operand),
  2903. get(currentInstruction[1].u.operand)));
  2904. }
  2905. NEXT_OPCODE(op_new_func);
  2906. }
  2907. case op_new_func_exp: {
  2908. set(currentInstruction[1].u.operand,
  2909. addToGraph(NewFunctionExpression, OpInfo(currentInstruction[2].u.operand)));
  2910. NEXT_OPCODE(op_new_func_exp);
  2911. }
  2912. case op_typeof: {
  2913. set(currentInstruction[1].u.operand,
  2914. addToGraph(TypeOf, get(currentInstruction[2].u.operand)));
  2915. NEXT_OPCODE(op_typeof);
  2916. }
  2917. case op_to_number: {
  2918. set(currentInstruction[1].u.operand,
  2919. addToGraph(Identity, Edge(get(currentInstruction[2].u.operand), NumberUse)));
  2920. NEXT_OPCODE(op_to_number);
  2921. }
  2922. default:
  2923. // Parse failed! This should not happen because the capabilities checker
  2924. // should have caught it.
  2925. RELEASE_ASSERT_NOT_REACHED();
  2926. return false;
  2927. }
  2928. }
  2929. }
  2930. void ByteCodeParser::linkBlock(BasicBlock* block, Vector<BlockIndex>& possibleTargets)
  2931. {
  2932. ASSERT(!block->isLinked);
  2933. ASSERT(!block->isEmpty());
  2934. Node* node = block->last();
  2935. ASSERT(node->isTerminal());
  2936. switch (node->op()) {
  2937. case Jump:
  2938. node->setTakenBlockIndex(m_graph.blockIndexForBytecodeOffset(possibleTargets, node->takenBytecodeOffsetDuringParsing()));
  2939. #if DFG_ENABLE(DEBUG_VERBOSE)
  2940. dataLogF("Linked basic block %p to %p, #%u.\n", block, m_graph.m_blocks[node->takenBlockIndex()].get(), node->takenBlockIndex());
  2941. #endif
  2942. break;
  2943. case Branch:
  2944. node->setTakenBlockIndex(m_graph.blockIndexForBytecodeOffset(possibleTargets, node->takenBytecodeOffsetDuringParsing()));
  2945. node->setNotTakenBlockIndex(m_graph.blockIndexForBytecodeOffset(possibleTargets, node->notTakenBytecodeOffsetDuringParsing()));
  2946. #if DFG_ENABLE(DEBUG_VERBOSE)
  2947. dataLogF("Linked basic block %p to %p, #%u and %p, #%u.\n", block, m_graph.m_blocks[node->takenBlockIndex()].get(), node->takenBlockIndex(), m_graph.m_blocks[node->notTakenBlockIndex()].get(), node->notTakenBlockIndex());
  2948. #endif
  2949. break;
  2950. default:
  2951. #if DFG_ENABLE(DEBUG_VERBOSE)
  2952. dataLogF("Marking basic block %p as linked.\n", block);
  2953. #endif
  2954. break;
  2955. }
  2956. #if !ASSERT_DISABLED
  2957. block->isLinked = true;
  2958. #endif
  2959. }
  2960. void ByteCodeParser::linkBlocks(Vector<UnlinkedBlock>& unlinkedBlocks, Vector<BlockIndex>& possibleTargets)
  2961. {
  2962. for (size_t i = 0; i < unlinkedBlocks.size(); ++i) {
  2963. if (unlinkedBlocks[i].m_needsNormalLinking) {
  2964. linkBlock(m_graph.m_blocks[unlinkedBlocks[i].m_blockIndex].get(), possibleTargets);
  2965. unlinkedBlocks[i].m_needsNormalLinking = false;
  2966. }
  2967. }
  2968. }
  2969. void ByteCodeParser::buildOperandMapsIfNecessary()
  2970. {
  2971. if (m_haveBuiltOperandMaps)
  2972. return;
  2973. for (size_t i = 0; i < m_codeBlock->numberOfIdentifiers(); ++i)
  2974. m_identifierMap.add(m_codeBlock->identifier(i).impl(), i);
  2975. for (size_t i = 0; i < m_codeBlock->numberOfConstantRegisters(); ++i) {
  2976. JSValue value = m_codeBlock->getConstant(i + FirstConstantRegisterIndex);
  2977. if (!value)
  2978. m_emptyJSValueIndex = i + FirstConstantRegisterIndex;
  2979. else
  2980. m_jsValueMap.add(JSValue::encode(value), i + FirstConstantRegisterIndex);
  2981. }
  2982. m_haveBuiltOperandMaps = true;
  2983. }
  2984. ByteCodeParser::InlineStackEntry::InlineStackEntry(
  2985. ByteCodeParser* byteCodeParser,
  2986. CodeBlock* codeBlock,
  2987. CodeBlock* profiledBlock,
  2988. BlockIndex callsiteBlockHead,
  2989. JSFunction* callee, // Null if this is a closure call.
  2990. VirtualRegister returnValueVR,
  2991. VirtualRegister inlineCallFrameStart,
  2992. int argumentCountIncludingThis,
  2993. CodeSpecializationKind kind)
  2994. : m_byteCodeParser(byteCodeParser)
  2995. , m_codeBlock(codeBlock)
  2996. , m_profiledBlock(profiledBlock)
  2997. , m_exitProfile(profiledBlock->exitProfile())
  2998. , m_callsiteBlockHead(callsiteBlockHead)
  2999. , m_returnValue(returnValueVR)
  3000. , m_lazyOperands(profiledBlock->lazyOperandValueProfiles())
  3001. , m_didReturn(false)
  3002. , m_didEarlyReturn(false)
  3003. , m_caller(byteCodeParser->m_inlineStackTop)
  3004. {
  3005. m_argumentPositions.resize(argumentCountIncludingThis);
  3006. for (int i = 0; i < argumentCountIncludingThis; ++i) {
  3007. byteCodeParser->m_graph.m_argumentPositions.append(ArgumentPosition());
  3008. ArgumentPosition* argumentPosition = &byteCodeParser->m_graph.m_argumentPositions.last();
  3009. m_argumentPositions[i] = argumentPosition;
  3010. }
  3011. // Track the code-block-global exit sites.
  3012. if (m_exitProfile.hasExitSite(ArgumentsEscaped)) {
  3013. byteCodeParser->m_graph.m_executablesWhoseArgumentsEscaped.add(
  3014. codeBlock->ownerExecutable());
  3015. }
  3016. if (m_caller) {
  3017. // Inline case.
  3018. ASSERT(codeBlock != byteCodeParser->m_codeBlock);
  3019. ASSERT(inlineCallFrameStart != InvalidVirtualRegister);
  3020. ASSERT(callsiteBlockHead != NoBlock);
  3021. InlineCallFrame inlineCallFrame;
  3022. inlineCallFrame.executable.set(*byteCodeParser->m_vm, byteCodeParser->m_codeBlock->ownerExecutable(), codeBlock->ownerExecutable());
  3023. inlineCallFrame.stackOffset = inlineCallFrameStart + JSStack::CallFrameHeaderSize;
  3024. if (callee)
  3025. inlineCallFrame.callee.set(*byteCodeParser->m_vm, byteCodeParser->m_codeBlock->ownerExecutable(), callee);
  3026. inlineCallFrame.caller = byteCodeParser->currentCodeOrigin();
  3027. inlineCallFrame.arguments.resize(argumentCountIncludingThis); // Set the number of arguments including this, but don't configure the value recoveries, yet.
  3028. inlineCallFrame.isCall = isCall(kind);
  3029. if (inlineCallFrame.caller.inlineCallFrame)
  3030. inlineCallFrame.capturedVars = inlineCallFrame.caller.inlineCallFrame->capturedVars;
  3031. else {
  3032. for (int i = byteCodeParser->m_codeBlock->m_numVars; i--;) {
  3033. if (byteCodeParser->m_codeBlock->isCaptured(i))
  3034. inlineCallFrame.capturedVars.set(i);
  3035. }
  3036. }
  3037. for (int i = argumentCountIncludingThis; i--;) {
  3038. if (codeBlock->isCaptured(argumentToOperand(i)))
  3039. inlineCallFrame.capturedVars.set(argumentToOperand(i) + inlineCallFrame.stackOffset);
  3040. }
  3041. for (size_t i = codeBlock->m_numVars; i--;) {
  3042. if (codeBlock->isCaptured(i))
  3043. inlineCallFrame.capturedVars.set(i + inlineCallFrame.stackOffset);
  3044. }
  3045. #if DFG_ENABLE(DEBUG_VERBOSE)
  3046. dataLogF("Current captured variables: ");
  3047. inlineCallFrame.capturedVars.dump(WTF::dataFile());
  3048. dataLogF("\n");
  3049. #endif
  3050. byteCodeParser->m_codeBlock->inlineCallFrames().append(inlineCallFrame);
  3051. m_inlineCallFrame = &byteCodeParser->m_codeBlock->inlineCallFrames().last();
  3052. byteCodeParser->buildOperandMapsIfNecessary();
  3053. m_identifierRemap.resize(codeBlock->numberOfIdentifiers());
  3054. m_constantRemap.resize(codeBlock->numberOfConstantRegisters());
  3055. m_constantBufferRemap.resize(codeBlock->numberOfConstantBuffers());
  3056. for (size_t i = 0; i < codeBlock->numberOfIdentifiers(); ++i) {
  3057. StringImpl* rep = codeBlock->identifier(i).impl();
  3058. IdentifierMap::AddResult result = byteCodeParser->m_identifierMap.add(rep, byteCodeParser->m_codeBlock->numberOfIdentifiers());
  3059. if (result.isNewEntry)
  3060. byteCodeParser->m_codeBlock->addIdentifier(Identifier(byteCodeParser->m_vm, rep));
  3061. m_identifierRemap[i] = result.iterator->value;
  3062. }
  3063. for (size_t i = 0; i < codeBlock->numberOfConstantRegisters(); ++i) {
  3064. JSValue value = codeBlock->getConstant(i + FirstConstantRegisterIndex);
  3065. if (!value) {
  3066. if (byteCodeParser->m_emptyJSValueIndex == UINT_MAX) {
  3067. byteCodeParser->m_emptyJSValueIndex = byteCodeParser->m_codeBlock->numberOfConstantRegisters() + FirstConstantRegisterIndex;
  3068. byteCodeParser->m_codeBlock->addConstant(JSValue());
  3069. byteCodeParser->m_constants.append(ConstantRecord());
  3070. }
  3071. m_constantRemap[i] = byteCodeParser->m_emptyJSValueIndex;
  3072. continue;
  3073. }
  3074. JSValueMap::AddResult result = byteCodeParser->m_jsValueMap.add(JSValue::encode(value), byteCodeParser->m_codeBlock->numberOfConstantRegisters() + FirstConstantRegisterIndex);
  3075. if (result.isNewEntry) {
  3076. byteCodeParser->m_codeBlock->addConstant(value);
  3077. byteCodeParser->m_constants.append(ConstantRecord());
  3078. }
  3079. m_constantRemap[i] = result.iterator->value;
  3080. }
  3081. for (unsigned i = 0; i < codeBlock->numberOfConstantBuffers(); ++i) {
  3082. // If we inline the same code block multiple times, we don't want to needlessly
  3083. // duplicate its constant buffers.
  3084. HashMap<ConstantBufferKey, unsigned>::iterator iter =
  3085. byteCodeParser->m_constantBufferCache.find(ConstantBufferKey(codeBlock, i));
  3086. if (iter != byteCodeParser->m_constantBufferCache.end()) {
  3087. m_constantBufferRemap[i] = iter->value;
  3088. continue;
  3089. }
  3090. Vector_shared<JSValue>& buffer = codeBlock->constantBufferAsVector(i);
  3091. unsigned newIndex = byteCodeParser->m_codeBlock->addConstantBuffer(buffer);
  3092. m_constantBufferRemap[i] = newIndex;
  3093. byteCodeParser->m_constantBufferCache.add(ConstantBufferKey(codeBlock, i), newIndex);
  3094. }
  3095. m_callsiteBlockHeadNeedsLinking = true;
  3096. } else {
  3097. // Machine code block case.
  3098. ASSERT(codeBlock == byteCodeParser->m_codeBlock);
  3099. ASSERT(!callee);
  3100. ASSERT(returnValueVR == InvalidVirtualRegister);
  3101. ASSERT(inlineCallFrameStart == InvalidVirtualRegister);
  3102. ASSERT(callsiteBlockHead == NoBlock);
  3103. m_inlineCallFrame = 0;
  3104. m_identifierRemap.resize(codeBlock->numberOfIdentifiers());
  3105. m_constantRemap.resize(codeBlock->numberOfConstantRegisters());
  3106. m_constantBufferRemap.resize(codeBlock->numberOfConstantBuffers());
  3107. for (size_t i = 0; i < codeBlock->numberOfIdentifiers(); ++i)
  3108. m_identifierRemap[i] = i;
  3109. for (size_t i = 0; i < codeBlock->numberOfConstantRegisters(); ++i)
  3110. m_constantRemap[i] = i + FirstConstantRegisterIndex;
  3111. for (size_t i = 0; i < codeBlock->numberOfConstantBuffers(); ++i)
  3112. m_constantBufferRemap[i] = i;
  3113. m_callsiteBlockHeadNeedsLinking = false;
  3114. }
  3115. for (size_t i = 0; i < m_constantRemap.size(); ++i)
  3116. ASSERT(m_constantRemap[i] >= static_cast<unsigned>(FirstConstantRegisterIndex));
  3117. byteCodeParser->m_inlineStackTop = this;
  3118. }
  3119. void ByteCodeParser::parseCodeBlock()
  3120. {
  3121. CodeBlock* codeBlock = m_inlineStackTop->m_codeBlock;
  3122. #if ENABLE(DETACHED_JIT)
  3123. #pragma message "[SECURE JSCORE] profiling disabled"
  3124. #else
  3125. if (m_graph.m_compilation) {
  3126. m_graph.m_compilation->addProfiledBytecodes(
  3127. *m_vm->m_perBytecodeProfiler, m_inlineStackTop->m_profiledBlock);
  3128. }
  3129. #endif
  3130. bool shouldDumpBytecode = Options::dumpBytecodeAtDFGTime();
  3131. #if DFG_ENABLE(DEBUG_VERBOSE)
  3132. shouldDumpBytecode |= true;
  3133. #endif
  3134. if (shouldDumpBytecode) {
  3135. dataLog("Parsing ", *codeBlock);
  3136. if (inlineCallFrame()) {
  3137. dataLog(
  3138. " for inlining at ", CodeBlockWithJITType(m_codeBlock, JITCode::DFGJIT),
  3139. " ", inlineCallFrame()->caller);
  3140. }
  3141. dataLog(
  3142. ": captureCount = ", codeBlock->symbolTable() ? codeBlock->symbolTable()->captureCount() : 0,
  3143. ", needsFullScopeChain = ", codeBlock->needsFullScopeChain(),
  3144. ", needsActivation = ", codeBlock->ownerExecutable()->needsActivation(),
  3145. ", isStrictMode = ", codeBlock->ownerExecutable()->isStrictMode(), "\n");
  3146. codeBlock->baselineVersion()->dumpBytecode();
  3147. }
  3148. Vector<unsigned, 32> jumpTargets;
  3149. computePreciseJumpTargets(codeBlock, jumpTargets);
  3150. if (Options::dumpBytecodeAtDFGTime()) {
  3151. dataLog("Jump targets: ");
  3152. CommaPrinter comma;
  3153. for (unsigned i = 0; i < jumpTargets.size(); ++i)
  3154. dataLog(comma, jumpTargets[i]);
  3155. dataLog("\n");
  3156. }
  3157. for (unsigned jumpTargetIndex = 0; jumpTargetIndex <= jumpTargets.size(); ++jumpTargetIndex) {
  3158. // The maximum bytecode offset to go into the current basicblock is either the next jump target, or the end of the instructions.
  3159. unsigned limit = jumpTargetIndex < jumpTargets.size() ? jumpTargets[jumpTargetIndex] : codeBlock->instructions().size();
  3160. #if DFG_ENABLE(DEBUG_VERBOSE)
  3161. dataLog(
  3162. "Parsing bytecode with limit ", pointerDump(inlineCallFrame()),
  3163. " bc#", limit, " at inline depth ",
  3164. CodeOrigin::inlineDepthForCallFrame(inlineCallFrame()), ".\n");
  3165. #endif
  3166. ASSERT(m_currentIndex < limit);
  3167. // Loop until we reach the current limit (i.e. next jump target).
  3168. do {
  3169. if (!m_currentBlock) {
  3170. // Check if we can use the last block.
  3171. if (!m_graph.m_blocks.isEmpty() && m_graph.m_blocks.last()->isEmpty()) {
  3172. // This must be a block belonging to us.
  3173. ASSERT(m_inlineStackTop->m_unlinkedBlocks.last().m_blockIndex == m_graph.m_blocks.size() - 1);
  3174. // Either the block is linkable or it isn't. If it's linkable then it's the last
  3175. // block in the blockLinkingTargets list. If it's not then the last block will
  3176. // have a lower bytecode index that the one we're about to give to this block.
  3177. if (m_inlineStackTop->m_blockLinkingTargets.isEmpty() || m_graph.m_blocks[m_inlineStackTop->m_blockLinkingTargets.last()]->bytecodeBegin != m_currentIndex) {
  3178. // Make the block linkable.
  3179. ASSERT(m_inlineStackTop->m_blockLinkingTargets.isEmpty() || m_graph.m_blocks[m_inlineStackTop->m_blockLinkingTargets.last()]->bytecodeBegin < m_currentIndex);
  3180. m_inlineStackTop->m_blockLinkingTargets.append(m_graph.m_blocks.size() - 1);
  3181. }
  3182. // Change its bytecode begin and continue.
  3183. m_currentBlock = m_graph.m_blocks.last().get();
  3184. #if DFG_ENABLE(DEBUG_VERBOSE)
  3185. dataLogF("Reascribing bytecode index of block %p from bc#%u to bc#%u (peephole case).\n", m_currentBlock, m_currentBlock->bytecodeBegin, m_currentIndex);
  3186. #endif
  3187. m_currentBlock->bytecodeBegin = m_currentIndex;
  3188. } else {
  3189. OwnPtr<BasicBlock> block = adoptPtr(new BasicBlock(m_currentIndex, m_numArguments, m_numLocals));
  3190. #if DFG_ENABLE(DEBUG_VERBOSE)
  3191. dataLogF("Creating basic block %p, #%zu for %p bc#%u at inline depth %u.\n", block.get(), m_graph.m_blocks.size(), m_inlineStackTop->executable(), m_currentIndex, CodeOrigin::inlineDepthForCallFrame(inlineCallFrame()));
  3192. #endif
  3193. m_currentBlock = block.get();
  3194. // This assertion checks two things:
  3195. // 1) If the bytecodeBegin is greater than currentIndex, then something has gone
  3196. // horribly wrong. So, we're probably generating incorrect code.
  3197. // 2) If the bytecodeBegin is equal to the currentIndex, then we failed to do
  3198. // a peephole coalescing of this block in the if statement above. So, we're
  3199. // generating suboptimal code and leaving more work for the CFG simplifier.
  3200. ASSERT(m_inlineStackTop->m_unlinkedBlocks.isEmpty() || m_graph.m_blocks[m_inlineStackTop->m_unlinkedBlocks.last().m_blockIndex]->bytecodeBegin < m_currentIndex);
  3201. m_inlineStackTop->m_unlinkedBlocks.append(UnlinkedBlock(m_graph.m_blocks.size()));
  3202. m_inlineStackTop->m_blockLinkingTargets.append(m_graph.m_blocks.size());
  3203. // The first block is definitely an OSR target.
  3204. if (!m_graph.m_blocks.size())
  3205. block->isOSRTarget = true;
  3206. m_graph.m_blocks.append(block.release());
  3207. prepareToParseBlock();
  3208. }
  3209. }
  3210. bool shouldContinueParsing = parseBlock(limit);
  3211. // We should not have gone beyond the limit.
  3212. ASSERT(m_currentIndex <= limit);
  3213. // We should have planted a terminal, or we just gave up because
  3214. // we realized that the jump target information is imprecise, or we
  3215. // are at the end of an inline function, or we realized that we
  3216. // should stop parsing because there was a return in the first
  3217. // basic block.
  3218. ASSERT(m_currentBlock->isEmpty() || m_currentBlock->last()->isTerminal() || (m_currentIndex == codeBlock->instructions().size() && inlineCallFrame()) || !shouldContinueParsing);
  3219. if (!shouldContinueParsing)
  3220. return;
  3221. m_currentBlock = 0;
  3222. } while (m_currentIndex < limit);
  3223. }
  3224. // Should have reached the end of the instructions.
  3225. ASSERT(m_currentIndex == codeBlock->instructions().size());
  3226. }
  3227. bool ByteCodeParser::parse()
  3228. {
  3229. // Set during construction.
  3230. ASSERT(!m_currentIndex);
  3231. #if DFG_ENABLE(ALL_VARIABLES_CAPTURED)
  3232. // We should be pretending that the code has an activation.
  3233. ASSERT(m_graph.needsActivation());
  3234. #endif
  3235. InlineStackEntry inlineStackEntry(
  3236. this, m_codeBlock, m_profiledBlock, NoBlock, 0, InvalidVirtualRegister, InvalidVirtualRegister,
  3237. m_codeBlock->numParameters(), CodeForCall);
  3238. parseCodeBlock();
  3239. linkBlocks(inlineStackEntry.m_unlinkedBlocks, inlineStackEntry.m_blockLinkingTargets);
  3240. m_graph.determineReachability();
  3241. ASSERT(m_preservedVars.size());
  3242. size_t numberOfLocals = 0;
  3243. for (size_t i = m_preservedVars.size(); i--;) {
  3244. if (m_preservedVars.quickGet(i)) {
  3245. numberOfLocals = i + 1;
  3246. break;
  3247. }
  3248. }
  3249. for (BlockIndex blockIndex = 0; blockIndex < m_graph.m_blocks.size(); ++blockIndex) {
  3250. BasicBlock* block = m_graph.m_blocks[blockIndex].get();
  3251. ASSERT(block);
  3252. if (!block->isReachable) {
  3253. m_graph.m_blocks[blockIndex].clear();
  3254. continue;
  3255. }
  3256. block->variablesAtHead.ensureLocals(numberOfLocals);
  3257. block->variablesAtTail.ensureLocals(numberOfLocals);
  3258. }
  3259. m_graph.m_preservedVars = m_preservedVars;
  3260. m_graph.m_localVars = m_numLocals;
  3261. m_graph.m_parameterSlots = m_parameterSlots;
  3262. return true;
  3263. }
  3264. bool parse(ExecState*, Graph& graph)
  3265. {
  3266. SamplingRegion samplingRegion("DFG Parsing");
  3267. #if DFG_DEBUG_LOCAL_DISBALE
  3268. UNUSED_PARAM(exec);
  3269. UNUSED_PARAM(graph);
  3270. return false;
  3271. #else
  3272. #if OS(ORBIS)
  3273. static int enable_dfg(-1);
  3274. if (enable_dfg == -1) {
  3275. char * enable_dfg_setting(getenv("JSCORE_ENABLE_DFG"));
  3276. enable_dfg = !enable_dfg_setting ? 0 : // if not set, disable
  3277. atoi(enable_dfg_setting) <= 0 ? 0 : // if value <= 0, disable
  3278. 1;
  3279. fprintf(stderr, "[JavaScriptCore] DFG_JIT %s\n", enable_dfg ? "enabled" : "disabled");
  3280. }
  3281. if (!enable_dfg) {
  3282. return false;
  3283. }
  3284. #endif
  3285. return ByteCodeParser(graph).parse();
  3286. #endif
  3287. }
  3288. } } // namespace JSC::DFG
  3289. #endif