DFGJITCompiler.h 16 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491
  1. /*
  2. * Copyright (C) 2011, 2013 Apple Inc. All rights reserved.
  3. *
  4. * Redistribution and use in source and binary forms, with or without
  5. * modification, are permitted provided that the following conditions
  6. * are met:
  7. * 1. Redistributions of source code must retain the above copyright
  8. * notice, this list of conditions and the following disclaimer.
  9. * 2. Redistributions in binary form must reproduce the above copyright
  10. * notice, this list of conditions and the following disclaimer in the
  11. * documentation and/or other materials provided with the distribution.
  12. *
  13. * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
  14. * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
  15. * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
  16. * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
  17. * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
  18. * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
  19. * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
  20. * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
  21. * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
  22. * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
  23. * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
  24. */
  25. #ifndef DFGJITCompiler_h
  26. #define DFGJITCompiler_h
  27. #if ENABLE(DFG_JIT)
  28. #include "CodeBlock.h"
  29. #include "DFGCCallHelpers.h"
  30. #include "DFGDisassembler.h"
  31. #include "DFGFPRInfo.h"
  32. #include "DFGGPRInfo.h"
  33. #include "DFGGraph.h"
  34. #include "DFGOSRExitCompilationInfo.h"
  35. #include "DFGRegisterBank.h"
  36. #include "DFGRegisterSet.h"
  37. #include "JITCode.h"
  38. #include "LinkBuffer.h"
  39. #include "MacroAssembler.h"
  40. namespace JSC {
  41. class AbstractSamplingCounter;
  42. class CodeBlock;
  43. class VM;
  44. namespace DFG {
  45. class JITCodeGenerator;
  46. class NodeToRegisterMap;
  47. class OSRExitJumpPlaceholder;
  48. class SlowPathGenerator;
  49. class SpeculativeJIT;
  50. class SpeculationRecovery;
  51. struct EntryLocation;
  52. struct OSRExit;
  53. // === CallLinkRecord ===
  54. //
  55. // A record of a call out from JIT code that needs linking to a helper function.
  56. // Every CallLinkRecord contains a reference to the call instruction & the function
  57. // that it needs to be linked to.
  58. struct CallLinkRecord {
  59. CallLinkRecord(MacroAssembler::Call call, FunctionPtr function)
  60. : m_call(call)
  61. , m_function(function)
  62. {
  63. }
  64. MacroAssembler::Call m_call;
  65. FunctionPtr m_function;
  66. };
  67. class CallBeginToken {
  68. public:
  69. CallBeginToken()
  70. #if !ASSERT_DISABLED
  71. : m_registered(false)
  72. , m_exceptionCheckIndex(std::numeric_limits<unsigned>::max())
  73. #endif
  74. {
  75. }
  76. ~CallBeginToken()
  77. {
  78. ASSERT(m_registered || !m_codeOrigin.isSet());
  79. ASSERT(m_codeOrigin.isSet() == (m_exceptionCheckIndex != std::numeric_limits<unsigned>::max()));
  80. }
  81. void set(CodeOrigin codeOrigin, unsigned index)
  82. {
  83. #if !ASSERT_DISABLED
  84. ASSERT(m_registered || !m_codeOrigin.isSet());
  85. ASSERT(m_codeOrigin.isSet() == (m_exceptionCheckIndex != std::numeric_limits<unsigned>::max()));
  86. m_codeOrigin = codeOrigin;
  87. m_registered = false;
  88. m_exceptionCheckIndex = index;
  89. #else
  90. UNUSED_PARAM(codeOrigin);
  91. UNUSED_PARAM(index);
  92. #endif
  93. }
  94. void registerWithExceptionCheck(CodeOrigin codeOrigin, unsigned index)
  95. {
  96. #if !ASSERT_DISABLED
  97. ASSERT(m_codeOrigin == codeOrigin);
  98. if (m_registered)
  99. return;
  100. ASSERT(m_exceptionCheckIndex == index);
  101. m_registered = true;
  102. #else
  103. UNUSED_PARAM(codeOrigin);
  104. UNUSED_PARAM(index);
  105. #endif
  106. }
  107. #if !ASSERT_DISABLED
  108. const CodeOrigin& codeOrigin() const
  109. {
  110. return m_codeOrigin;
  111. }
  112. #endif
  113. private:
  114. #if !ASSERT_DISABLED
  115. CodeOrigin m_codeOrigin;
  116. bool m_registered;
  117. unsigned m_exceptionCheckIndex;
  118. #endif
  119. };
  120. // === CallExceptionRecord ===
  121. //
  122. // A record of a call out from JIT code that might throw an exception.
  123. // Calls that might throw an exception also record the Jump taken on exception
  124. // (unset if not present) and code origin used to recover handler/source info.
  125. struct CallExceptionRecord {
  126. CallExceptionRecord(MacroAssembler::Call call, CodeOrigin codeOrigin)
  127. : m_call(call)
  128. , m_codeOrigin(codeOrigin)
  129. {
  130. }
  131. CallExceptionRecord(MacroAssembler::Call call, MacroAssembler::Jump exceptionCheck, CodeOrigin codeOrigin)
  132. : m_call(call)
  133. , m_exceptionCheck(exceptionCheck)
  134. , m_codeOrigin(codeOrigin)
  135. {
  136. }
  137. MacroAssembler::Call m_call;
  138. MacroAssembler::Jump m_exceptionCheck;
  139. CodeOrigin m_codeOrigin;
  140. };
  141. struct PropertyAccessRecord {
  142. enum RegisterMode { RegistersFlushed, RegistersInUse };
  143. #if USE(JSVALUE64)
  144. PropertyAccessRecord(
  145. CodeOrigin codeOrigin,
  146. MacroAssembler::DataLabelPtr structureImm,
  147. MacroAssembler::PatchableJump structureCheck,
  148. MacroAssembler::ConvertibleLoadLabel propertyStorageLoad,
  149. MacroAssembler::DataLabelCompact loadOrStore,
  150. SlowPathGenerator* slowPathGenerator,
  151. MacroAssembler::Label done,
  152. int8_t baseGPR,
  153. int8_t valueGPR,
  154. const RegisterSet& usedRegisters,
  155. RegisterMode registerMode = RegistersInUse)
  156. #elif USE(JSVALUE32_64)
  157. PropertyAccessRecord(
  158. CodeOrigin codeOrigin,
  159. MacroAssembler::DataLabelPtr structureImm,
  160. MacroAssembler::PatchableJump structureCheck,
  161. MacroAssembler::ConvertibleLoadLabel propertyStorageLoad,
  162. MacroAssembler::DataLabelCompact tagLoadOrStore,
  163. MacroAssembler::DataLabelCompact payloadLoadOrStore,
  164. SlowPathGenerator* slowPathGenerator,
  165. MacroAssembler::Label done,
  166. int8_t baseGPR,
  167. int8_t valueTagGPR,
  168. int8_t valueGPR,
  169. const RegisterSet& usedRegisters,
  170. RegisterMode registerMode = RegistersInUse)
  171. #endif
  172. : m_codeOrigin(codeOrigin)
  173. , m_structureImm(structureImm)
  174. , m_structureCheck(structureCheck)
  175. , m_propertyStorageLoad(propertyStorageLoad)
  176. #if USE(JSVALUE64)
  177. , m_loadOrStore(loadOrStore)
  178. #elif USE(JSVALUE32_64)
  179. , m_tagLoadOrStore(tagLoadOrStore)
  180. , m_payloadLoadOrStore(payloadLoadOrStore)
  181. #endif
  182. , m_slowPathGenerator(slowPathGenerator)
  183. , m_done(done)
  184. , m_baseGPR(baseGPR)
  185. #if USE(JSVALUE32_64)
  186. , m_valueTagGPR(valueTagGPR)
  187. #endif
  188. , m_valueGPR(valueGPR)
  189. , m_usedRegisters(usedRegisters)
  190. , m_registerMode(registerMode)
  191. {
  192. }
  193. CodeOrigin m_codeOrigin;
  194. MacroAssembler::DataLabelPtr m_structureImm;
  195. MacroAssembler::PatchableJump m_structureCheck;
  196. MacroAssembler::ConvertibleLoadLabel m_propertyStorageLoad;
  197. #if USE(JSVALUE64)
  198. MacroAssembler::DataLabelCompact m_loadOrStore;
  199. #elif USE(JSVALUE32_64)
  200. MacroAssembler::DataLabelCompact m_tagLoadOrStore;
  201. MacroAssembler::DataLabelCompact m_payloadLoadOrStore;
  202. #endif
  203. SlowPathGenerator* m_slowPathGenerator;
  204. MacroAssembler::Label m_done;
  205. int8_t m_baseGPR;
  206. #if USE(JSVALUE32_64)
  207. int8_t m_valueTagGPR;
  208. #endif
  209. int8_t m_valueGPR;
  210. RegisterSet m_usedRegisters;
  211. RegisterMode m_registerMode;
  212. };
  213. // === JITCompiler ===
  214. //
  215. // DFG::JITCompiler is responsible for generating JIT code from the dataflow graph.
  216. // It does so by delegating to the speculative & non-speculative JITs, which
  217. // generate to a MacroAssembler (which the JITCompiler owns through an inheritance
  218. // relationship). The JITCompiler holds references to information required during
  219. // compilation, and also records information used in linking (e.g. a list of all
  220. // call to be linked).
  221. class JITCompiler : public CCallHelpers {
  222. public:
  223. JITCompiler(Graph& dfg);
  224. bool compile(JITCode& entry);
  225. bool compileFunction(JITCode& entry, MacroAssemblerCodePtr& entryWithArityCheck);
  226. // Accessors for properties.
  227. Graph& graph() { return m_graph; }
  228. // Methods to set labels for the disassembler.
  229. void setStartOfCode()
  230. {
  231. #if !ENABLE(DETACHED_JIT)
  232. if (LIKELY(!m_disassembler))
  233. return;
  234. m_disassembler->setStartOfCode(labelIgnoringWatchpoints());
  235. #endif
  236. }
  237. void setForBlock(BlockIndex blockIndex)
  238. {
  239. #if !ENABLE(DETACHED_JIT)
  240. if (LIKELY(!m_disassembler))
  241. return;
  242. m_disassembler->setForBlock(blockIndex, labelIgnoringWatchpoints());
  243. #endif
  244. }
  245. void setForNode(Node* node)
  246. {
  247. #if !ENABLE(DETACHED_JIT)
  248. if (LIKELY(!m_disassembler))
  249. return;
  250. m_disassembler->setForNode(node, labelIgnoringWatchpoints());
  251. #endif
  252. }
  253. void setEndOfMainPath()
  254. {
  255. #if !ENABLE(DETACHED_JIT)
  256. if (LIKELY(!m_disassembler))
  257. return;
  258. m_disassembler->setEndOfMainPath(labelIgnoringWatchpoints());
  259. #endif
  260. }
  261. void setEndOfCode()
  262. {
  263. #if !ENABLE(DETACHED_JIT)
  264. if (LIKELY(!m_disassembler))
  265. return;
  266. m_disassembler->setEndOfCode(labelIgnoringWatchpoints());
  267. #endif
  268. }
  269. unsigned currentCodeOriginIndex() const
  270. {
  271. return m_currentCodeOriginIndex;
  272. }
  273. // Get a token for beginning a call, and set the current code origin index in
  274. // the call frame. For each beginCall() there must be at least one exception
  275. // check, and all of the exception checks must have the same CodeOrigin as the
  276. // beginCall().
  277. void beginCall(CodeOrigin codeOrigin, CallBeginToken& token)
  278. {
  279. unsigned index = m_exceptionChecks.size();
  280. store32(TrustedImm32(index), tagFor(static_cast<VirtualRegister>(JSStack::ArgumentCount)));
  281. token.set(codeOrigin, index);
  282. }
  283. // Notify the JIT of a call that does not require linking.
  284. void notifyCall(Call functionCall, CodeOrigin codeOrigin, CallBeginToken& token)
  285. {
  286. token.registerWithExceptionCheck(codeOrigin, m_exceptionChecks.size());
  287. m_exceptionChecks.append(CallExceptionRecord(functionCall, codeOrigin));
  288. }
  289. // Add a call out from JIT code, without an exception check.
  290. Call appendCall(const FunctionPtr& function)
  291. {
  292. Call functionCall = call();
  293. m_calls.append(CallLinkRecord(functionCall, function));
  294. return functionCall;
  295. }
  296. void prepareForExceptionCheck()
  297. {
  298. move(TrustedImm32(m_exceptionChecks.size()), GPRInfo::nonPreservedNonReturnGPR);
  299. }
  300. // Add a call out from JIT code, with an exception check.
  301. void addExceptionCheck(Call functionCall, CodeOrigin codeOrigin, CallBeginToken& token)
  302. {
  303. prepareForExceptionCheck();
  304. token.registerWithExceptionCheck(codeOrigin, m_exceptionChecks.size());
  305. m_exceptionChecks.append(CallExceptionRecord(functionCall, emitExceptionCheck(), codeOrigin));
  306. }
  307. // Add a call out from JIT code, with a fast exception check that tests if the return value is zero.
  308. void addFastExceptionCheck(Call functionCall, CodeOrigin codeOrigin, CallBeginToken& token)
  309. {
  310. prepareForExceptionCheck();
  311. Jump exceptionCheck = branchTestPtr(Zero, GPRInfo::returnValueGPR);
  312. token.registerWithExceptionCheck(codeOrigin, m_exceptionChecks.size());
  313. m_exceptionChecks.append(CallExceptionRecord(functionCall, exceptionCheck, codeOrigin));
  314. }
  315. void appendExitInfo(MacroAssembler::JumpList jumpsToFail = MacroAssembler::JumpList())
  316. {
  317. OSRExitCompilationInfo info;
  318. info.m_failureJumps = jumpsToFail;
  319. m_exitCompilationInfo.append(info);
  320. }
  321. #if USE(JSVALUE32_64)
  322. void* addressOfDoubleConstant(Node* node)
  323. {
  324. ASSERT(m_graph.isNumberConstant(node));
  325. unsigned constantIndex = node->constantNumber();
  326. return &(codeBlock()->constantRegister(FirstConstantRegisterIndex + constantIndex));
  327. }
  328. #endif
  329. void addPropertyAccess(const PropertyAccessRecord& record)
  330. {
  331. m_propertyAccesses.append(record);
  332. }
  333. void addJSCall(Call fastCall, Call slowCall, DataLabelPtr targetToCheck, CallLinkInfo::CallType callType, GPRReg callee, CodeOrigin codeOrigin)
  334. {
  335. m_jsCalls.append(JSCallRecord(fastCall, slowCall, targetToCheck, callType, callee, codeOrigin));
  336. }
  337. void addWeakReference(JSCell* target)
  338. {
  339. m_codeBlock->appendWeakReference(target);
  340. }
  341. void addWeakReferences(const StructureSet& structureSet)
  342. {
  343. for (unsigned i = structureSet.size(); i--;)
  344. addWeakReference(structureSet[i]);
  345. }
  346. void addWeakReferenceTransition(JSCell* codeOrigin, JSCell* from, JSCell* to)
  347. {
  348. m_codeBlock->appendWeakReferenceTransition(codeOrigin, from, to);
  349. }
  350. template<typename T>
  351. Jump branchWeakPtr(RelationalCondition cond, T left, JSCell* weakPtr)
  352. {
  353. Jump result = branchPtr(cond, left, TrustedImmPtr(weakPtr));
  354. addWeakReference(weakPtr);
  355. return result;
  356. }
  357. void noticeOSREntry(BasicBlock& basicBlock, JITCompiler::Label blockHead, LinkBuffer& linkBuffer)
  358. {
  359. #if DFG_ENABLE(OSR_ENTRY)
  360. // OSR entry is not allowed into blocks deemed unreachable by control flow analysis.
  361. if (!basicBlock.cfaHasVisited)
  362. return;
  363. OSREntryData* entry = codeBlock()->appendDFGOSREntryData(basicBlock.bytecodeBegin, linkBuffer.offsetOf(blockHead));
  364. entry->m_expectedValues = basicBlock.valuesAtHead;
  365. // Fix the expected values: in our protocol, a dead variable will have an expected
  366. // value of (None, []). But the old JIT may stash some values there. So we really
  367. // need (Top, TOP).
  368. for (size_t argument = 0; argument < basicBlock.variablesAtHead.numberOfArguments(); ++argument) {
  369. Node* node = basicBlock.variablesAtHead.argument(argument);
  370. if (!node || !node->shouldGenerate())
  371. entry->m_expectedValues.argument(argument).makeTop();
  372. }
  373. for (size_t local = 0; local < basicBlock.variablesAtHead.numberOfLocals(); ++local) {
  374. Node* node = basicBlock.variablesAtHead.local(local);
  375. if (!node || !node->shouldGenerate())
  376. entry->m_expectedValues.local(local).makeTop();
  377. else if (node->variableAccessData()->shouldUseDoubleFormat())
  378. entry->m_localsForcedDouble.set(local);
  379. }
  380. #else
  381. UNUSED_PARAM(basicBlock);
  382. UNUSED_PARAM(blockHead);
  383. UNUSED_PARAM(linkBuffer);
  384. #endif
  385. }
  386. private:
  387. friend class OSRExitJumpPlaceholder;
  388. // Internal implementation to compile.
  389. void compileEntry();
  390. void compileBody(SpeculativeJIT&);
  391. void link(LinkBuffer&);
  392. void exitSpeculativeWithOSR(const OSRExit&, SpeculationRecovery*);
  393. void compileExceptionHandlers();
  394. void linkOSRExits();
  395. // The dataflow graph currently being generated.
  396. Graph& m_graph;
  397. #if !ENABLE(DETACHED_JIT)
  398. OwnPtr<Disassembler> m_disassembler;
  399. #endif
  400. // Vector of calls out from JIT code, including exception handler information.
  401. // Count of the number of CallRecords with exception handlers.
  402. Vector<CallLinkRecord> m_calls;
  403. Vector<CallExceptionRecord> m_exceptionChecks;
  404. struct JSCallRecord {
  405. JSCallRecord(Call fastCall, Call slowCall, DataLabelPtr targetToCheck, CallLinkInfo::CallType callType, GPRReg callee, CodeOrigin codeOrigin)
  406. : m_fastCall(fastCall)
  407. , m_slowCall(slowCall)
  408. , m_targetToCheck(targetToCheck)
  409. , m_callType(callType)
  410. , m_callee(callee)
  411. , m_codeOrigin(codeOrigin)
  412. {
  413. }
  414. Call m_fastCall;
  415. Call m_slowCall;
  416. DataLabelPtr m_targetToCheck;
  417. CallLinkInfo::CallType m_callType;
  418. GPRReg m_callee;
  419. CodeOrigin m_codeOrigin;
  420. };
  421. Vector<PropertyAccessRecord, 4> m_propertyAccesses;
  422. Vector<JSCallRecord, 4> m_jsCalls;
  423. Vector<OSRExitCompilationInfo> m_exitCompilationInfo;
  424. Vector<Vector<Label> > m_exitSiteLabels;
  425. unsigned m_currentCodeOriginIndex;
  426. };
  427. } } // namespace JSC::DFG
  428. #endif
  429. #endif