DFGGraph.h 29 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839
  1. /*
  2. * Copyright (C) 2011, 2012, 2013 Apple Inc. All rights reserved.
  3. *
  4. * Redistribution and use in source and binary forms, with or without
  5. * modification, are permitted provided that the following conditions
  6. * are met:
  7. * 1. Redistributions of source code must retain the above copyright
  8. * notice, this list of conditions and the following disclaimer.
  9. * 2. Redistributions in binary form must reproduce the above copyright
  10. * notice, this list of conditions and the following disclaimer in the
  11. * documentation and/or other materials provided with the distribution.
  12. *
  13. * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
  14. * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
  15. * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
  16. * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
  17. * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
  18. * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
  19. * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
  20. * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
  21. * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
  22. * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
  23. * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
  24. */
  25. #ifndef DFGGraph_h
  26. #define DFGGraph_h
  27. #include <wtf/Platform.h>
  28. #if ENABLE(DFG_JIT)
  29. #include "CodeBlock.h"
  30. #include "DFGArgumentPosition.h"
  31. #include "DFGAssemblyHelpers.h"
  32. #include "DFGBasicBlock.h"
  33. #include "DFGDominators.h"
  34. #include "DFGLongLivedState.h"
  35. #include "DFGNode.h"
  36. #include "DFGNodeAllocator.h"
  37. #include "DFGVariadicFunction.h"
  38. #include "JSStack.h"
  39. #include "MethodOfGettingAValueProfile.h"
  40. #include <wtf/BitVector.h>
  41. #include <wtf/HashMap.h>
  42. #include <wtf/Vector.h>
  43. #include <wtf/StdLibExtras.h>
  44. namespace JSC {
  45. class CodeBlock;
  46. class ExecState;
  47. namespace DFG {
  48. struct StorageAccessData {
  49. size_t offset;
  50. unsigned identifierNumber;
  51. };
  52. struct ResolveGlobalData {
  53. unsigned identifierNumber;
  54. ResolveOperations* resolveOperations;
  55. PutToBaseOperation* putToBaseOperation;
  56. unsigned resolvePropertyIndex;
  57. };
  58. struct ResolveOperationData {
  59. unsigned identifierNumber;
  60. ResolveOperations* resolveOperations;
  61. PutToBaseOperation* putToBaseOperation;
  62. };
  63. struct PutToBaseOperationData {
  64. PutToBaseOperation* putToBaseOperation;
  65. };
  66. enum AddSpeculationMode {
  67. DontSpeculateInteger,
  68. SpeculateIntegerAndTruncateConstants,
  69. SpeculateInteger
  70. };
  71. //
  72. // === Graph ===
  73. //
  74. // The order may be significant for nodes with side-effects (property accesses, value conversions).
  75. // Nodes that are 'dead' remain in the vector with refCount 0.
  76. class Graph {
  77. #if ENABLE(DETACHED_JIT)
  78. DETACHED_JIT_MAKE_SHARED_DATA_ALLOCATED;
  79. #endif
  80. public:
  81. Graph(VM&, CodeBlock*, unsigned osrEntryBytecodeIndex, const Operands<JSValue>& mustHandleValues);
  82. ~Graph();
  83. void changeChild(Edge& edge, Node* newNode)
  84. {
  85. edge.setNode(newNode);
  86. }
  87. void changeEdge(Edge& edge, Edge newEdge)
  88. {
  89. edge = newEdge;
  90. }
  91. void compareAndSwap(Edge& edge, Node* oldNode, Node* newNode)
  92. {
  93. if (edge.node() != oldNode)
  94. return;
  95. changeChild(edge, newNode);
  96. }
  97. void compareAndSwap(Edge& edge, Edge oldEdge, Edge newEdge)
  98. {
  99. if (edge != oldEdge)
  100. return;
  101. changeEdge(edge, newEdge);
  102. }
  103. void clearAndDerefChild(Node* node, unsigned index)
  104. {
  105. if (!node->children.child(index))
  106. return;
  107. node->children.setChild(index, Edge());
  108. }
  109. void clearAndDerefChild1(Node* node) { clearAndDerefChild(node, 0); }
  110. void clearAndDerefChild2(Node* node) { clearAndDerefChild(node, 1); }
  111. void clearAndDerefChild3(Node* node) { clearAndDerefChild(node, 2); }
  112. void performSubstitution(Node* node)
  113. {
  114. if (node->flags() & NodeHasVarArgs) {
  115. for (unsigned childIdx = node->firstChild(); childIdx < node->firstChild() + node->numChildren(); childIdx++)
  116. performSubstitutionForEdge(m_varArgChildren[childIdx]);
  117. } else {
  118. performSubstitutionForEdge(node->child1());
  119. performSubstitutionForEdge(node->child2());
  120. performSubstitutionForEdge(node->child3());
  121. }
  122. }
  123. void performSubstitutionForEdge(Edge& child)
  124. {
  125. // Check if this operand is actually unused.
  126. if (!child)
  127. return;
  128. // Check if there is any replacement.
  129. Node* replacement = child->replacement;
  130. if (!replacement)
  131. return;
  132. child.setNode(replacement);
  133. // There is definitely a replacement. Assert that the replacement does not
  134. // have a replacement.
  135. ASSERT(!child->replacement);
  136. }
  137. #define DFG_DEFINE_ADD_NODE(templatePre, templatePost, typeParams, valueParamsComma, valueParams, valueArgs) \
  138. templatePre typeParams templatePost Node* addNode(SpeculatedType type valueParamsComma valueParams) \
  139. { \
  140. Node* node = new (m_allocator) Node(valueArgs); \
  141. node->predict(type); \
  142. return node; \
  143. }
  144. DFG_VARIADIC_TEMPLATE_FUNCTION(DFG_DEFINE_ADD_NODE)
  145. #undef DFG_DEFINE_ADD_NODE
  146. void dethread();
  147. void convertToConstant(Node* node, unsigned constantNumber)
  148. {
  149. if (node->op() == GetLocal)
  150. dethread();
  151. else
  152. ASSERT(!node->hasVariableAccessData());
  153. node->convertToConstant(constantNumber);
  154. }
  155. void convertToConstant(Node* node, JSValue value)
  156. {
  157. convertToConstant(node, m_codeBlock->addOrFindConstant(value));
  158. }
  159. // CodeBlock is optional, but may allow additional information to be dumped (e.g. Identifier names).
  160. void dump(PrintStream& = WTF::dataFile());
  161. enum PhiNodeDumpMode { DumpLivePhisOnly, DumpAllPhis };
  162. void dumpBlockHeader(PrintStream&, const char* prefix, BlockIndex, PhiNodeDumpMode);
  163. void dump(PrintStream&, Edge);
  164. void dump(PrintStream&, const char* prefix, Node*);
  165. static int amountOfNodeWhiteSpace(Node*);
  166. static void printNodeWhiteSpace(PrintStream&, Node*);
  167. // Dump the code origin of the given node as a diff from the code origin of the
  168. // preceding node. Returns true if anything was printed.
  169. bool dumpCodeOrigin(PrintStream&, const char* prefix, Node* previousNode, Node* currentNode);
  170. BlockIndex blockIndexForBytecodeOffset(Vector<BlockIndex>& blocks, unsigned bytecodeBegin);
  171. SpeculatedType getJSConstantSpeculation(Node* node)
  172. {
  173. return speculationFromValue(node->valueOfJSConstant(m_codeBlock));
  174. }
  175. AddSpeculationMode addSpeculationMode(Node* add, bool leftShouldSpeculateInteger, bool rightShouldSpeculateInteger)
  176. {
  177. ASSERT(add->op() == ValueAdd || add->op() == ArithAdd || add->op() == ArithSub);
  178. Node* left = add->child1().node();
  179. Node* right = add->child2().node();
  180. if (left->hasConstant())
  181. return addImmediateShouldSpeculateInteger(add, rightShouldSpeculateInteger, left);
  182. if (right->hasConstant())
  183. return addImmediateShouldSpeculateInteger(add, leftShouldSpeculateInteger, right);
  184. return (leftShouldSpeculateInteger && rightShouldSpeculateInteger && add->canSpeculateInteger()) ? SpeculateInteger : DontSpeculateInteger;
  185. }
  186. AddSpeculationMode valueAddSpeculationMode(Node* add)
  187. {
  188. return addSpeculationMode(add, add->child1()->shouldSpeculateIntegerExpectingDefined(), add->child2()->shouldSpeculateIntegerExpectingDefined());
  189. }
  190. AddSpeculationMode arithAddSpeculationMode(Node* add)
  191. {
  192. return addSpeculationMode(add, add->child1()->shouldSpeculateIntegerForArithmetic(), add->child2()->shouldSpeculateIntegerForArithmetic());
  193. }
  194. AddSpeculationMode addSpeculationMode(Node* add)
  195. {
  196. if (add->op() == ValueAdd)
  197. return valueAddSpeculationMode(add);
  198. return arithAddSpeculationMode(add);
  199. }
  200. bool addShouldSpeculateInteger(Node* add)
  201. {
  202. return addSpeculationMode(add) != DontSpeculateInteger;
  203. }
  204. bool mulShouldSpeculateInteger(Node* mul)
  205. {
  206. ASSERT(mul->op() == ArithMul);
  207. Node* left = mul->child1().node();
  208. Node* right = mul->child2().node();
  209. return Node::shouldSpeculateIntegerForArithmetic(left, right) && mul->canSpeculateInteger();
  210. }
  211. bool negateShouldSpeculateInteger(Node* negate)
  212. {
  213. ASSERT(negate->op() == ArithNegate);
  214. return negate->child1()->shouldSpeculateIntegerForArithmetic() && negate->canSpeculateInteger();
  215. }
  216. // Helper methods to check nodes for constants.
  217. bool isConstant(Node* node)
  218. {
  219. return node->hasConstant();
  220. }
  221. bool isJSConstant(Node* node)
  222. {
  223. return node->hasConstant();
  224. }
  225. bool isInt32Constant(Node* node)
  226. {
  227. return node->isInt32Constant(m_codeBlock);
  228. }
  229. bool isDoubleConstant(Node* node)
  230. {
  231. return node->isDoubleConstant(m_codeBlock);
  232. }
  233. bool isNumberConstant(Node* node)
  234. {
  235. return node->isNumberConstant(m_codeBlock);
  236. }
  237. bool isBooleanConstant(Node* node)
  238. {
  239. return node->isBooleanConstant(m_codeBlock);
  240. }
  241. bool isCellConstant(Node* node)
  242. {
  243. if (!isJSConstant(node))
  244. return false;
  245. JSValue value = valueOfJSConstant(node);
  246. return value.isCell() && !!value;
  247. }
  248. bool isFunctionConstant(Node* node)
  249. {
  250. if (!isJSConstant(node))
  251. return false;
  252. if (!getJSFunction(valueOfJSConstant(node)))
  253. return false;
  254. return true;
  255. }
  256. bool isInternalFunctionConstant(Node* node)
  257. {
  258. if (!isJSConstant(node))
  259. return false;
  260. JSValue value = valueOfJSConstant(node);
  261. if (!value.isCell() || !value)
  262. return false;
  263. JSCell* cell = value.asCell();
  264. if (!cell->inherits(&InternalFunction::s_info))
  265. return false;
  266. return true;
  267. }
  268. // Helper methods get constant values from nodes.
  269. JSValue valueOfJSConstant(Node* node)
  270. {
  271. return node->valueOfJSConstant(m_codeBlock);
  272. }
  273. int32_t valueOfInt32Constant(Node* node)
  274. {
  275. return valueOfJSConstant(node).asInt32();
  276. }
  277. double valueOfNumberConstant(Node* node)
  278. {
  279. return valueOfJSConstant(node).asNumber();
  280. }
  281. bool valueOfBooleanConstant(Node* node)
  282. {
  283. return valueOfJSConstant(node).asBoolean();
  284. }
  285. JSFunction* valueOfFunctionConstant(Node* node)
  286. {
  287. JSCell* function = getJSFunction(valueOfJSConstant(node));
  288. ASSERT(function);
  289. return jsCast<JSFunction*>(function);
  290. }
  291. static const char *opName(NodeType);
  292. StructureSet* addStructureSet(const StructureSet& structureSet)
  293. {
  294. ASSERT(structureSet.size());
  295. m_structureSet.append(structureSet);
  296. return &m_structureSet.last();
  297. }
  298. StructureTransitionData* addStructureTransitionData(const StructureTransitionData& structureTransitionData)
  299. {
  300. m_structureTransitionData.append(structureTransitionData);
  301. return &m_structureTransitionData.last();
  302. }
  303. JSGlobalObject* globalObjectFor(CodeOrigin codeOrigin)
  304. {
  305. return m_codeBlock->globalObjectFor(codeOrigin);
  306. }
  307. JSObject* globalThisObjectFor(CodeOrigin codeOrigin)
  308. {
  309. JSGlobalObject* object = globalObjectFor(codeOrigin);
  310. return object->methodTable()->toThisObject(object, 0);
  311. }
  312. ExecutableBase* executableFor(InlineCallFrame* inlineCallFrame)
  313. {
  314. if (!inlineCallFrame)
  315. return m_codeBlock->ownerExecutable();
  316. return inlineCallFrame->executable.get();
  317. }
  318. ExecutableBase* executableFor(const CodeOrigin& codeOrigin)
  319. {
  320. return executableFor(codeOrigin.inlineCallFrame);
  321. }
  322. CodeBlock* baselineCodeBlockFor(const CodeOrigin& codeOrigin)
  323. {
  324. return baselineCodeBlockForOriginAndBaselineCodeBlock(codeOrigin, m_profiledBlock);
  325. }
  326. bool hasGlobalExitSite(const CodeOrigin& codeOrigin, ExitKind exitKind)
  327. {
  328. return baselineCodeBlockFor(codeOrigin)->hasExitSite(FrequentExitSite(exitKind));
  329. }
  330. bool hasExitSite(const CodeOrigin& codeOrigin, ExitKind exitKind)
  331. {
  332. return baselineCodeBlockFor(codeOrigin)->hasExitSite(FrequentExitSite(codeOrigin.bytecodeIndex, exitKind));
  333. }
  334. int argumentsRegisterFor(const CodeOrigin& codeOrigin)
  335. {
  336. if (!codeOrigin.inlineCallFrame)
  337. return m_codeBlock->argumentsRegister();
  338. return baselineCodeBlockForInlineCallFrame(
  339. codeOrigin.inlineCallFrame)->argumentsRegister() +
  340. codeOrigin.inlineCallFrame->stackOffset;
  341. }
  342. int uncheckedArgumentsRegisterFor(const CodeOrigin& codeOrigin)
  343. {
  344. if (!codeOrigin.inlineCallFrame)
  345. return m_codeBlock->uncheckedArgumentsRegister();
  346. CodeBlock* codeBlock = baselineCodeBlockForInlineCallFrame(
  347. codeOrigin.inlineCallFrame);
  348. if (!codeBlock->usesArguments())
  349. return InvalidVirtualRegister;
  350. return codeBlock->argumentsRegister() +
  351. codeOrigin.inlineCallFrame->stackOffset;
  352. }
  353. int uncheckedActivationRegisterFor(const CodeOrigin&)
  354. {
  355. // This will ignore CodeOrigin because we don't inline code that uses activations.
  356. // Hence for inlined call frames it will return the outermost code block's
  357. // activation register. This method is only used to compare the result to a local
  358. // to see if we're mucking with the activation register. Hence if we return the
  359. // "wrong" activation register for the frame then it will compare false, which is
  360. // what we wanted.
  361. return m_codeBlock->uncheckedActivationRegister();
  362. }
  363. ValueProfile* valueProfileFor(Node* node)
  364. {
  365. if (!node)
  366. return 0;
  367. CodeBlock* profiledBlock = baselineCodeBlockFor(node->codeOrigin);
  368. if (node->hasLocal()) {
  369. if (!operandIsArgument(node->local()))
  370. return 0;
  371. int argument = operandToArgument(node->local());
  372. if (node->variableAccessData() != m_arguments[argument]->variableAccessData())
  373. return 0;
  374. return profiledBlock->valueProfileForArgument(argument);
  375. }
  376. if (node->hasHeapPrediction())
  377. return profiledBlock->valueProfileForBytecodeOffset(node->codeOrigin.bytecodeIndexForValueProfile());
  378. return 0;
  379. }
  380. MethodOfGettingAValueProfile methodOfGettingAValueProfileFor(Node* node)
  381. {
  382. if (!node)
  383. return MethodOfGettingAValueProfile();
  384. CodeBlock* profiledBlock = baselineCodeBlockFor(node->codeOrigin);
  385. if (node->op() == GetLocal) {
  386. return MethodOfGettingAValueProfile::fromLazyOperand(
  387. profiledBlock,
  388. LazyOperandValueProfileKey(
  389. node->codeOrigin.bytecodeIndex, node->local()));
  390. }
  391. return MethodOfGettingAValueProfile(valueProfileFor(node));
  392. }
  393. bool needsActivation() const
  394. {
  395. return m_codeBlock->needsFullScopeChain() && m_codeBlock->codeType() != GlobalCode;
  396. }
  397. bool usesArguments() const
  398. {
  399. return m_codeBlock->usesArguments();
  400. }
  401. unsigned numSuccessors(BasicBlock* block)
  402. {
  403. return block->last()->numSuccessors();
  404. }
  405. BlockIndex successor(BasicBlock* block, unsigned index)
  406. {
  407. return block->last()->successor(index);
  408. }
  409. BlockIndex successorForCondition(BasicBlock* block, bool condition)
  410. {
  411. return block->last()->successorForCondition(condition);
  412. }
  413. bool isPredictedNumerical(Node* node)
  414. {
  415. return isNumerical(node->child1().useKind()) && isNumerical(node->child2().useKind());
  416. }
  417. // Note that a 'true' return does not actually mean that the ByVal access clobbers nothing.
  418. // It really means that it will not clobber the entire world. It's still up to you to
  419. // carefully consider things like:
  420. // - PutByVal definitely changes the array it stores to, and may even change its length.
  421. // - PutByOffset definitely changes the object it stores to.
  422. // - and so on.
  423. bool byValIsPure(Node* node)
  424. {
  425. switch (node->arrayMode().type()) {
  426. case Array::Generic:
  427. return false;
  428. case Array::Int32:
  429. case Array::Double:
  430. case Array::Contiguous:
  431. case Array::ArrayStorage:
  432. return !node->arrayMode().isOutOfBounds();
  433. case Array::SlowPutArrayStorage:
  434. return !node->arrayMode().mayStoreToHole();
  435. case Array::String:
  436. return node->op() == GetByVal;
  437. #if USE(JSVALUE32_64)
  438. case Array::Arguments:
  439. if (node->op() == GetByVal)
  440. return true;
  441. return false;
  442. #endif // USE(JSVALUE32_64)
  443. default:
  444. return true;
  445. }
  446. }
  447. bool clobbersWorld(Node* node)
  448. {
  449. if (node->flags() & NodeClobbersWorld)
  450. return true;
  451. if (!(node->flags() & NodeMightClobber))
  452. return false;
  453. switch (node->op()) {
  454. case ValueAdd:
  455. case CompareLess:
  456. case CompareLessEq:
  457. case CompareGreater:
  458. case CompareGreaterEq:
  459. case CompareEq:
  460. return !isPredictedNumerical(node);
  461. case GetByVal:
  462. case PutByVal:
  463. case PutByValAlias:
  464. return !byValIsPure(node);
  465. case ToString:
  466. switch (node->child1().useKind()) {
  467. case StringObjectUse:
  468. case StringOrStringObjectUse:
  469. return false;
  470. case CellUse:
  471. case UntypedUse:
  472. return true;
  473. default:
  474. RELEASE_ASSERT_NOT_REACHED();
  475. return true;
  476. }
  477. default:
  478. RELEASE_ASSERT_NOT_REACHED();
  479. return true; // If by some oddity we hit this case in release build it's safer to have CSE assume the worst.
  480. }
  481. }
  482. void determineReachability();
  483. void resetReachability();
  484. void resetExitStates();
  485. unsigned varArgNumChildren(Node* node)
  486. {
  487. ASSERT(node->flags() & NodeHasVarArgs);
  488. return node->numChildren();
  489. }
  490. unsigned numChildren(Node* node)
  491. {
  492. if (node->flags() & NodeHasVarArgs)
  493. return varArgNumChildren(node);
  494. return AdjacencyList::Size;
  495. }
  496. Edge& varArgChild(Node* node, unsigned index)
  497. {
  498. ASSERT(node->flags() & NodeHasVarArgs);
  499. return m_varArgChildren[node->firstChild() + index];
  500. }
  501. Edge& child(Node* node, unsigned index)
  502. {
  503. if (node->flags() & NodeHasVarArgs)
  504. return varArgChild(node, index);
  505. return node->children.child(index);
  506. }
  507. void voteNode(Node* node, unsigned ballot)
  508. {
  509. switch (node->op()) {
  510. case ValueToInt32:
  511. case UInt32ToNumber:
  512. node = node->child1().node();
  513. break;
  514. default:
  515. break;
  516. }
  517. if (node->op() == GetLocal)
  518. node->variableAccessData()->vote(ballot);
  519. }
  520. void voteNode(Edge edge, unsigned ballot)
  521. {
  522. voteNode(edge.node(), ballot);
  523. }
  524. void voteChildren(Node* node, unsigned ballot)
  525. {
  526. if (node->flags() & NodeHasVarArgs) {
  527. for (unsigned childIdx = node->firstChild();
  528. childIdx < node->firstChild() + node->numChildren();
  529. childIdx++) {
  530. if (!!m_varArgChildren[childIdx])
  531. voteNode(m_varArgChildren[childIdx], ballot);
  532. }
  533. return;
  534. }
  535. if (!node->child1())
  536. return;
  537. voteNode(node->child1(), ballot);
  538. if (!node->child2())
  539. return;
  540. voteNode(node->child2(), ballot);
  541. if (!node->child3())
  542. return;
  543. voteNode(node->child3(), ballot);
  544. }
  545. template<typename T> // T = Node* or Edge
  546. void substitute(BasicBlock& block, unsigned startIndexInBlock, T oldThing, T newThing)
  547. {
  548. for (unsigned indexInBlock = startIndexInBlock; indexInBlock < block.size(); ++indexInBlock) {
  549. Node* node = block[indexInBlock];
  550. if (node->flags() & NodeHasVarArgs) {
  551. for (unsigned childIdx = node->firstChild(); childIdx < node->firstChild() + node->numChildren(); ++childIdx) {
  552. if (!!m_varArgChildren[childIdx])
  553. compareAndSwap(m_varArgChildren[childIdx], oldThing, newThing);
  554. }
  555. continue;
  556. }
  557. if (!node->child1())
  558. continue;
  559. compareAndSwap(node->children.child1(), oldThing, newThing);
  560. if (!node->child2())
  561. continue;
  562. compareAndSwap(node->children.child2(), oldThing, newThing);
  563. if (!node->child3())
  564. continue;
  565. compareAndSwap(node->children.child3(), oldThing, newThing);
  566. }
  567. }
  568. // Use this if you introduce a new GetLocal and you know that you introduced it *before*
  569. // any GetLocals in the basic block.
  570. // FIXME: it may be appropriate, in the future, to generalize this to handle GetLocals
  571. // introduced anywhere in the basic block.
  572. void substituteGetLocal(BasicBlock& block, unsigned startIndexInBlock, VariableAccessData* variableAccessData, Node* newGetLocal)
  573. {
  574. if (variableAccessData->isCaptured()) {
  575. // Let CSE worry about this one.
  576. return;
  577. }
  578. for (unsigned indexInBlock = startIndexInBlock; indexInBlock < block.size(); ++indexInBlock) {
  579. Node* node = block[indexInBlock];
  580. bool shouldContinue = true;
  581. switch (node->op()) {
  582. case SetLocal: {
  583. if (node->local() == variableAccessData->local())
  584. shouldContinue = false;
  585. break;
  586. }
  587. case GetLocal: {
  588. if (node->variableAccessData() != variableAccessData)
  589. continue;
  590. substitute(block, indexInBlock, node, newGetLocal);
  591. Node* oldTailNode = block.variablesAtTail.operand(variableAccessData->local());
  592. if (oldTailNode == node)
  593. block.variablesAtTail.operand(variableAccessData->local()) = newGetLocal;
  594. shouldContinue = false;
  595. break;
  596. }
  597. default:
  598. break;
  599. }
  600. if (!shouldContinue)
  601. break;
  602. }
  603. }
  604. VM& m_vm;
  605. CodeBlock* m_codeBlock;
  606. #if !ENABLE(DETACHED_JIT)
  607. RefPtr<Profiler::Compilation> m_compilation;
  608. #endif
  609. CodeBlock* m_profiledBlock;
  610. NodeAllocator& m_allocator;
  611. Vector_shared< OwnPtr<BasicBlock> , 8> m_blocks;
  612. Vector_shared<Edge, 16> m_varArgChildren;
  613. Vector_shared<StorageAccessData> m_storageAccessData;
  614. Vector_shared<ResolveGlobalData> m_resolveGlobalData;
  615. Vector_shared<ResolveOperationData> m_resolveOperationsData;
  616. Vector_shared<PutToBaseOperationData> m_putToBaseOperationData;
  617. Vector_shared<Node*, 8> m_arguments;
  618. SegmentedVector_shared<VariableAccessData, 16> m_variableAccessData;
  619. SegmentedVector_shared<ArgumentPosition, 8> m_argumentPositions;
  620. SegmentedVector_shared<StructureSet, 16> m_structureSet;
  621. SegmentedVector_shared<StructureTransitionData, 8> m_structureTransitionData;
  622. SegmentedVector_shared<NewArrayBufferData, 4> m_newArrayBufferData;
  623. bool m_hasArguments;
  624. HashSet<ExecutableBase*> m_executablesWhoseArgumentsEscaped;
  625. BitVector_shared m_preservedVars;
  626. Dominators m_dominators;
  627. unsigned m_localVars;
  628. unsigned m_parameterSlots;
  629. unsigned m_osrEntryBytecodeIndex;
  630. Operands<JSValue> m_mustHandleValues;
  631. OptimizationFixpointState m_fixpointState;
  632. GraphForm m_form;
  633. UnificationState m_unificationState;
  634. RefCountState m_refCountState;
  635. private:
  636. void handleSuccessor(Vector<BlockIndex, 16>& worklist, BlockIndex blockIndex, BlockIndex successorIndex);
  637. AddSpeculationMode addImmediateShouldSpeculateInteger(Node* add, bool variableShouldSpeculateInteger, Node* immediate)
  638. {
  639. ASSERT(immediate->hasConstant());
  640. JSValue immediateValue = immediate->valueOfJSConstant(m_codeBlock);
  641. if (!immediateValue.isNumber())
  642. return DontSpeculateInteger;
  643. if (!variableShouldSpeculateInteger)
  644. return DontSpeculateInteger;
  645. if (immediateValue.isInt32())
  646. return add->canSpeculateInteger() ? SpeculateInteger : DontSpeculateInteger;
  647. double doubleImmediate = immediateValue.asDouble();
  648. const double twoToThe48 = 281474976710656.0;
  649. if (doubleImmediate < -twoToThe48 || doubleImmediate > twoToThe48)
  650. return DontSpeculateInteger;
  651. return nodeCanTruncateInteger(add->arithNodeFlags()) ? SpeculateIntegerAndTruncateConstants : DontSpeculateInteger;
  652. }
  653. bool mulImmediateShouldSpeculateInteger(Node* mul, Node* variable, Node* immediate)
  654. {
  655. ASSERT(immediate->hasConstant());
  656. JSValue immediateValue = immediate->valueOfJSConstant(m_codeBlock);
  657. if (!immediateValue.isInt32())
  658. return false;
  659. if (!variable->shouldSpeculateIntegerForArithmetic())
  660. return false;
  661. int32_t intImmediate = immediateValue.asInt32();
  662. // Doubles have a 53 bit mantissa so we expect a multiplication of 2^31 (the highest
  663. // magnitude possible int32 value) and any value less than 2^22 to not result in any
  664. // rounding in a double multiplication - hence it will be equivalent to an integer
  665. // multiplication, if we are doing int32 truncation afterwards (which is what
  666. // canSpeculateInteger() implies).
  667. const int32_t twoToThe22 = 1 << 22;
  668. if (intImmediate <= -twoToThe22 || intImmediate >= twoToThe22)
  669. return mul->canSpeculateInteger() && !nodeMayOverflow(mul->arithNodeFlags());
  670. return mul->canSpeculateInteger();
  671. }
  672. };
  673. class GetBytecodeBeginForBlock {
  674. public:
  675. GetBytecodeBeginForBlock(Graph& graph)
  676. : m_graph(graph)
  677. {
  678. }
  679. unsigned operator()(BlockIndex* blockIndex) const
  680. {
  681. return m_graph.m_blocks[*blockIndex]->bytecodeBegin;
  682. }
  683. private:
  684. Graph& m_graph;
  685. };
  686. inline BlockIndex Graph::blockIndexForBytecodeOffset(Vector<BlockIndex>& linkingTargets, unsigned bytecodeBegin)
  687. {
  688. return *binarySearch<BlockIndex, unsigned>(linkingTargets, linkingTargets.size(), bytecodeBegin, GetBytecodeBeginForBlock(*this));
  689. }
  690. #define DFG_NODE_DO_TO_CHILDREN(graph, node, thingToDo) do { \
  691. Node* _node = (node); \
  692. if (_node->flags() & NodeHasVarArgs) { \
  693. for (unsigned _childIdx = _node->firstChild(); \
  694. _childIdx < _node->firstChild() + _node->numChildren(); \
  695. _childIdx++) { \
  696. if (!!(graph).m_varArgChildren[_childIdx]) \
  697. thingToDo(_node, (graph).m_varArgChildren[_childIdx]); \
  698. } \
  699. } else { \
  700. if (!_node->child1()) { \
  701. ASSERT( \
  702. !_node->child2() \
  703. && !_node->child3()); \
  704. break; \
  705. } \
  706. thingToDo(_node, _node->child1()); \
  707. \
  708. if (!_node->child2()) { \
  709. ASSERT(!_node->child3()); \
  710. break; \
  711. } \
  712. thingToDo(_node, _node->child2()); \
  713. \
  714. if (!_node->child3()) \
  715. break; \
  716. thingToDo(_node, _node->child3()); \
  717. } \
  718. } while (false)
  719. } } // namespace JSC::DFG
  720. #endif
  721. #endif