DFGThunks.cpp 13 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346
  1. /*
  2. * Copyright (C) 2011 Apple Inc. All rights reserved.
  3. *
  4. * Redistribution and use in source and binary forms, with or without
  5. * modification, are permitted provided that the following conditions
  6. * are met:
  7. * 1. Redistributions of source code must retain the above copyright
  8. * notice, this list of conditions and the following disclaimer.
  9. * 2. Redistributions in binary form must reproduce the above copyright
  10. * notice, this list of conditions and the following disclaimer in the
  11. * documentation and/or other materials provided with the distribution.
  12. *
  13. * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
  14. * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
  15. * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
  16. * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
  17. * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
  18. * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
  19. * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
  20. * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
  21. * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
  22. * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
  23. * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
  24. */
  25. #include "config.h"
  26. #include "DFGThunks.h"
  27. #if ENABLE(DFG_JIT)
  28. #include "DFGCCallHelpers.h"
  29. #include "DFGFPRInfo.h"
  30. #include "DFGGPRInfo.h"
  31. #include "DFGOSRExitCompiler.h"
  32. #include "MacroAssembler.h"
  33. namespace JSC { namespace DFG {
  34. #if !(ENABLE(DETACHED_JIT) && !BUILDING_DETACHED_JIT)
  35. MacroAssemblerCodeRef osrExitGenerationThunkGenerator(VM* vm)
  36. {
  37. MacroAssembler jit;
  38. size_t scratchSize = sizeof(EncodedJSValue) * (GPRInfo::numberOfRegisters + FPRInfo::numberOfRegisters);
  39. ScratchBuffer* scratchBuffer = vm->scratchBufferForSize(scratchSize);
  40. EncodedJSValue* buffer = static_cast<EncodedJSValue*>(scratchBuffer->dataBuffer());
  41. for (unsigned i = 0; i < GPRInfo::numberOfRegisters; ++i) {
  42. #if USE(JSVALUE64)
  43. jit.store64(GPRInfo::toRegister(i), buffer + i);
  44. #else
  45. jit.store32(GPRInfo::toRegister(i), buffer + i);
  46. #endif
  47. }
  48. for (unsigned i = 0; i < FPRInfo::numberOfRegisters; ++i) {
  49. jit.move(MacroAssembler::TrustedImmPtr(buffer + GPRInfo::numberOfRegisters + i), GPRInfo::regT0);
  50. jit.storeDouble(FPRInfo::toRegister(i), GPRInfo::regT0);
  51. }
  52. // Tell GC mark phase how much of the scratch buffer is active during call.
  53. jit.move(MacroAssembler::TrustedImmPtr(scratchBuffer->activeLengthPtr()), GPRInfo::regT0);
  54. jit.storePtr(MacroAssembler::TrustedImmPtr(scratchSize), GPRInfo::regT0);
  55. // Set up one argument.
  56. #if CPU(X86)
  57. jit.poke(GPRInfo::callFrameRegister, 0);
  58. #else
  59. jit.move(GPRInfo::callFrameRegister, GPRInfo::argumentGPR0);
  60. #endif
  61. MacroAssembler::Call functionCall = jit.call();
  62. jit.move(MacroAssembler::TrustedImmPtr(scratchBuffer->activeLengthPtr()), GPRInfo::regT0);
  63. jit.storePtr(MacroAssembler::TrustedImmPtr(0), GPRInfo::regT0);
  64. for (unsigned i = 0; i < FPRInfo::numberOfRegisters; ++i) {
  65. jit.move(MacroAssembler::TrustedImmPtr(buffer + GPRInfo::numberOfRegisters + i), GPRInfo::regT0);
  66. jit.loadDouble(GPRInfo::regT0, FPRInfo::toRegister(i));
  67. }
  68. for (unsigned i = 0; i < GPRInfo::numberOfRegisters; ++i) {
  69. #if USE(JSVALUE64)
  70. jit.load64(buffer + i, GPRInfo::toRegister(i));
  71. #else
  72. jit.load32(buffer + i, GPRInfo::toRegister(i));
  73. #endif
  74. }
  75. jit.jump(MacroAssembler::AbsoluteAddress(&vm->osrExitJumpDestination));
  76. LinkBuffer patchBuffer(*vm, &jit, GLOBAL_THUNK_ID);
  77. #if ENABLE(DETACHED_JIT) && BUILDING_DETACHED_JIT
  78. // for detached jit model, link to the stub on the VM process instead of the one in the compiler process
  79. patchBuffer.link(functionCall, compileOSRExit_vmstub);
  80. #else
  81. patchBuffer.link(functionCall, compileOSRExit);
  82. #endif
  83. return FINALIZE_CODE(patchBuffer, ("DFG OSR exit generation thunk"));
  84. }
  85. inline void emitPointerValidation(CCallHelpers& jit, GPRReg pointerGPR)
  86. {
  87. #if !ASSERT_DISABLED
  88. CCallHelpers::Jump isNonZero = jit.branchTestPtr(CCallHelpers::NonZero, pointerGPR);
  89. jit.breakpoint();
  90. isNonZero.link(&jit);
  91. jit.push(pointerGPR);
  92. jit.load8(pointerGPR, pointerGPR);
  93. jit.pop(pointerGPR);
  94. #else
  95. UNUSED_PARAM(jit);
  96. UNUSED_PARAM(pointerGPR);
  97. #endif
  98. }
  99. MacroAssemblerCodeRef throwExceptionFromCallSlowPathGenerator(VM* vm)
  100. {
  101. CCallHelpers jit(vm);
  102. // We will jump to here if the JIT code thinks it's making a call, but the
  103. // linking helper (C++ code) decided to throw an exception instead. We will
  104. // have saved the callReturnIndex in the first arguments of JITStackFrame.
  105. // Note that the return address will be on the stack at this point, so we
  106. // need to remove it and drop it on the floor, since we don't care about it.
  107. // Finally note that the call frame register points at the callee frame, so
  108. // we need to pop it.
  109. jit.preserveReturnAddressAfterCall(GPRInfo::nonPreservedNonReturnGPR);
  110. jit.loadPtr(
  111. CCallHelpers::Address(
  112. GPRInfo::callFrameRegister,
  113. static_cast<ptrdiff_t>(sizeof(Register)) * JSStack::CallerFrame),
  114. GPRInfo::callFrameRegister);
  115. #if USE(JSVALUE64)
  116. jit.peek64(GPRInfo::nonPreservedNonReturnGPR, JITSTACKFRAME_ARGS_INDEX);
  117. #else
  118. jit.peek(GPRInfo::nonPreservedNonReturnGPR, JITSTACKFRAME_ARGS_INDEX);
  119. #endif
  120. jit.setupArgumentsWithExecState(GPRInfo::nonPreservedNonReturnGPR);
  121. jit.move(CCallHelpers::TrustedImmPtr(bitwise_cast<void*>(lookupExceptionHandler)), GPRInfo::nonArgGPR0);
  122. emitPointerValidation(jit, GPRInfo::nonArgGPR0);
  123. jit.call(GPRInfo::nonArgGPR0);
  124. emitPointerValidation(jit, GPRInfo::returnValueGPR2);
  125. jit.jump(GPRInfo::returnValueGPR2);
  126. LinkBuffer patchBuffer(*vm, &jit, GLOBAL_THUNK_ID);
  127. return FINALIZE_CODE(patchBuffer, ("DFG throw exception from call slow path thunk"));
  128. }
  129. static void slowPathFor(
  130. CCallHelpers& jit, VM* vm, P_DFGOperation_E slowPathFunction)
  131. {
  132. jit.preserveReturnAddressAfterCall(GPRInfo::nonArgGPR2);
  133. emitPointerValidation(jit, GPRInfo::nonArgGPR2);
  134. jit.storePtr(
  135. GPRInfo::nonArgGPR2,
  136. CCallHelpers::Address(
  137. GPRInfo::callFrameRegister,
  138. static_cast<ptrdiff_t>(sizeof(Register)) * JSStack::ReturnPC));
  139. jit.storePtr(GPRInfo::callFrameRegister, &vm->topCallFrame);
  140. #if USE(JSVALUE64)
  141. jit.poke64(GPRInfo::nonPreservedNonReturnGPR, JITSTACKFRAME_ARGS_INDEX);
  142. #else
  143. jit.poke(GPRInfo::nonPreservedNonReturnGPR, JITSTACKFRAME_ARGS_INDEX);
  144. #endif
  145. jit.setupArgumentsExecState();
  146. jit.move(CCallHelpers::TrustedImmPtr(bitwise_cast<void*>(slowPathFunction)), GPRInfo::nonArgGPR0);
  147. emitPointerValidation(jit, GPRInfo::nonArgGPR0);
  148. jit.call(GPRInfo::nonArgGPR0);
  149. // This slow call will return the address of one of the following:
  150. // 1) Exception throwing thunk.
  151. // 2) Host call return value returner thingy.
  152. // 3) The function to call.
  153. jit.loadPtr(
  154. CCallHelpers::Address(
  155. GPRInfo::callFrameRegister,
  156. static_cast<ptrdiff_t>(sizeof(Register)) * JSStack::ReturnPC),
  157. GPRInfo::nonPreservedNonReturnGPR);
  158. jit.storePtr(
  159. CCallHelpers::TrustedImmPtr(0),
  160. CCallHelpers::Address(
  161. GPRInfo::callFrameRegister,
  162. static_cast<ptrdiff_t>(sizeof(Register)) * JSStack::ReturnPC));
  163. emitPointerValidation(jit, GPRInfo::nonPreservedNonReturnGPR);
  164. jit.restoreReturnAddressBeforeReturn(GPRInfo::nonPreservedNonReturnGPR);
  165. emitPointerValidation(jit, GPRInfo::returnValueGPR);
  166. jit.jump(GPRInfo::returnValueGPR);
  167. }
  168. MacroAssemblerCodeRef linkForThunkGenerator(
  169. VM* vm, CodeSpecializationKind kind)
  170. {
  171. // The return address is on the stack or in the link register. We will hence
  172. // save the return address to the call frame while we make a C++ function call
  173. // to perform linking and lazy compilation if necessary. We expect the callee
  174. // to be in nonArgGPR0/nonArgGPR1 (payload/tag), the call frame to have already
  175. // been adjusted, nonPreservedNonReturnGPR holds the exception handler index,
  176. // and all other registers to be available for use. We use JITStackFrame::args
  177. // to save important information across calls.
  178. CCallHelpers jit(vm);
  179. slowPathFor(jit, vm, kind == CodeForCall ? operationLinkCall : operationLinkConstruct);
  180. LinkBuffer patchBuffer(*vm, &jit, GLOBAL_THUNK_ID);
  181. return FINALIZE_CODE(
  182. patchBuffer,
  183. ("DFG link %s slow path thunk", kind == CodeForCall ? "call" : "construct"));
  184. }
  185. #endif // #if !(ENABLE(DETACHED_JIT) && !BUILDING_DETACHED_JIT)
  186. MacroAssemblerCodeRef linkCallThunkGenerator(VM* vm)
  187. {
  188. return linkForThunkGenerator(vm, CodeForCall);
  189. }
  190. MacroAssemblerCodeRef linkConstructThunkGenerator(VM* vm)
  191. {
  192. return linkForThunkGenerator(vm, CodeForConstruct);
  193. }
  194. #if !(ENABLE(DETACHED_JIT) && !BUILDING_DETACHED_JIT)
  195. // For closure optimizations, we only include calls, since if you're using closures for
  196. // object construction then you're going to lose big time anyway.
  197. MacroAssemblerCodeRef linkClosureCallThunkGenerator(VM* vm)
  198. {
  199. CCallHelpers jit(vm);
  200. slowPathFor(jit, vm, operationLinkClosureCall);
  201. LinkBuffer patchBuffer(*vm, &jit, GLOBAL_THUNK_ID);
  202. return FINALIZE_CODE(patchBuffer, ("DFG link closure call slow path thunk"));
  203. }
  204. MacroAssemblerCodeRef virtualForThunkGenerator(
  205. VM* vm, CodeSpecializationKind kind)
  206. {
  207. DEFINE_STATIC_CLASSINFO(JSFunction);
  208. // The return address is on the stack, or in the link register. We will hence
  209. // jump to the callee, or save the return address to the call frame while we
  210. // make a C++ function call to the appropriate DFG operation.
  211. CCallHelpers jit(vm);
  212. CCallHelpers::JumpList slowCase;
  213. // FIXME: we should have a story for eliminating these checks. In many cases,
  214. // the DFG knows that the value is definitely a cell, or definitely a function.
  215. #if USE(JSVALUE64)
  216. slowCase.append(
  217. jit.branchTest64(
  218. CCallHelpers::NonZero, GPRInfo::nonArgGPR0, GPRInfo::tagMaskRegister));
  219. #else
  220. slowCase.append(
  221. jit.branch32(
  222. CCallHelpers::NotEqual, GPRInfo::nonArgGPR1,
  223. CCallHelpers::TrustedImm32(JSValue::CellTag)));
  224. #endif
  225. jit.loadPtr(CCallHelpers::Address(GPRInfo::nonArgGPR0, JSCell::structureOffset()), GPRInfo::nonArgGPR2);
  226. slowCase.append(
  227. jit.branchPtr(
  228. CCallHelpers::NotEqual,
  229. CCallHelpers::Address(GPRInfo::nonArgGPR2, Structure::classInfoOffset()),
  230. CCallHelpers::TrustedImmPtr(sJSFunctionClassInfo)));
  231. // Now we know we have a JSFunction.
  232. jit.loadPtr(
  233. CCallHelpers::Address(GPRInfo::nonArgGPR0, JSFunction::offsetOfExecutable()),
  234. GPRInfo::nonArgGPR2);
  235. slowCase.append(
  236. jit.branch32(
  237. CCallHelpers::LessThan,
  238. CCallHelpers::Address(
  239. GPRInfo::nonArgGPR2, ExecutableBase::offsetOfNumParametersFor(kind)),
  240. CCallHelpers::TrustedImm32(0)));
  241. // Now we know that we have a CodeBlock, and we're committed to making a fast
  242. // call.
  243. jit.loadPtr(
  244. CCallHelpers::Address(GPRInfo::nonArgGPR0, JSFunction::offsetOfScopeChain()),
  245. GPRInfo::nonArgGPR1);
  246. #if USE(JSVALUE64)
  247. jit.store64(
  248. GPRInfo::nonArgGPR1,
  249. CCallHelpers::Address(
  250. GPRInfo::callFrameRegister,
  251. static_cast<ptrdiff_t>(sizeof(Register)) * JSStack::ScopeChain));
  252. #else
  253. jit.storePtr(
  254. GPRInfo::nonArgGPR1,
  255. CCallHelpers::Address(
  256. GPRInfo::callFrameRegister,
  257. static_cast<ptrdiff_t>(sizeof(Register)) * JSStack::ScopeChain +
  258. OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
  259. jit.store32(
  260. CCallHelpers::TrustedImm32(JSValue::CellTag),
  261. CCallHelpers::Address(
  262. GPRInfo::callFrameRegister,
  263. static_cast<ptrdiff_t>(sizeof(Register)) * JSStack::ScopeChain +
  264. OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
  265. #endif
  266. jit.loadPtr(
  267. CCallHelpers::Address(GPRInfo::nonArgGPR2, ExecutableBase::offsetOfJITCodeWithArityCheckFor(kind)),
  268. GPRInfo::regT0);
  269. // Make a tail call. This will return back to DFG code.
  270. emitPointerValidation(jit, GPRInfo::regT0);
  271. jit.jump(GPRInfo::regT0);
  272. slowCase.link(&jit);
  273. // Here we don't know anything, so revert to the full slow path.
  274. slowPathFor(jit, vm, kind == CodeForCall ? operationVirtualCall : operationVirtualConstruct);
  275. LinkBuffer patchBuffer(*vm, &jit, GLOBAL_THUNK_ID);
  276. return FINALIZE_CODE(
  277. patchBuffer,
  278. ("DFG virtual %s slow path thunk", kind == CodeForCall ? "call" : "construct"));
  279. }
  280. #endif // #if !(ENABLE(DETACHED_JIT) && !BUILDING_DETACHED_JIT)
  281. MacroAssemblerCodeRef virtualCallThunkGenerator(VM* vm)
  282. {
  283. return virtualForThunkGenerator(vm, CodeForCall);
  284. }
  285. MacroAssemblerCodeRef virtualConstructThunkGenerator(VM* vm)
  286. {
  287. return virtualForThunkGenerator(vm, CodeForConstruct);
  288. }
  289. } } // namespace JSC::DFG
  290. #endif // ENABLE(DFG_JIT)