SpecializedThunkJIT.h 6.9 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191
  1. /*
  2. * Copyright (C) 2010 Apple Inc. All rights reserved.
  3. *
  4. * Redistribution and use in source and binary forms, with or without
  5. * modification, are permitted provided that the following conditions
  6. * are met:
  7. * 1. Redistributions of source code must retain the above copyright
  8. * notice, this list of conditions and the following disclaimer.
  9. * 2. Redistributions in binary form must reproduce the above copyright
  10. * notice, this list of conditions and the following disclaimer in the
  11. * documentation and/or other materials provided with the distribution.
  12. *
  13. * THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS''
  14. * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
  15. * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
  16. * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS
  17. * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
  18. * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
  19. * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
  20. * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
  21. * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
  22. * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
  23. * THE POSSIBILITY OF SUCH DAMAGE.
  24. */
  25. #ifndef SpecializedThunkJIT_h
  26. #define SpecializedThunkJIT_h
  27. #if ENABLE(JIT)
  28. #include "Executable.h"
  29. #include "JSCBridgeToll.h"
  30. #include "JSInterfaceJIT.h"
  31. #include "LinkBuffer.h"
  32. namespace JSC {
  33. class SpecializedThunkJIT : public JSInterfaceJIT {
  34. public:
  35. static const int ThisArgument = -1;
  36. SpecializedThunkJIT(int expectedArgCount)
  37. {
  38. // Check that we have the expected number of arguments
  39. m_failures.append(branch32(NotEqual, payloadFor(JSStack::ArgumentCount), TrustedImm32(expectedArgCount + 1)));
  40. }
  41. void loadDoubleArgument(int argument, FPRegisterID dst, RegisterID scratch)
  42. {
  43. unsigned src = CallFrame::argumentOffset(argument);
  44. m_failures.append(emitLoadDouble(src, dst, scratch));
  45. }
  46. void loadCellArgument(int argument, RegisterID dst)
  47. {
  48. unsigned src = CallFrame::argumentOffset(argument);
  49. m_failures.append(emitLoadJSCell(src, dst));
  50. }
  51. void loadJSStringArgument(VM& vm, int argument, RegisterID dst)
  52. {
  53. loadCellArgument(argument, dst);
  54. #if ENABLE(DETACHED_JIT)
  55. void * stringStructurePtr = JSCBridge::sharedInstance()->toll()->m_vm.m_stringStructure;
  56. #else
  57. void * stringStructurePtr = vm.stringStructure.get();
  58. #endif
  59. m_failures.append(branchPtr(NotEqual, Address(dst, JSCell::structureOffset()), TrustedImmPtr(stringStructurePtr)));
  60. }
  61. void loadInt32Argument(int argument, RegisterID dst, Jump& failTarget)
  62. {
  63. unsigned src = CallFrame::argumentOffset(argument);
  64. failTarget = emitLoadInt32(src, dst);
  65. }
  66. void loadInt32Argument(int argument, RegisterID dst)
  67. {
  68. Jump conversionFailed;
  69. loadInt32Argument(argument, dst, conversionFailed);
  70. m_failures.append(conversionFailed);
  71. }
  72. void appendFailure(const Jump& failure)
  73. {
  74. m_failures.append(failure);
  75. }
  76. void returnJSValue(RegisterID src)
  77. {
  78. if (src != regT0)
  79. move(src, regT0);
  80. loadPtr(payloadFor(JSStack::CallerFrame, callFrameRegister), callFrameRegister);
  81. ret();
  82. }
  83. void returnDouble(FPRegisterID src)
  84. {
  85. #if USE(JSVALUE64)
  86. moveDoubleTo64(src, regT0);
  87. Jump zero = branchTest64(Zero, regT0);
  88. sub64(tagTypeNumberRegister, regT0);
  89. Jump done = jump();
  90. zero.link(this);
  91. move(tagTypeNumberRegister, regT0);
  92. done.link(this);
  93. #else
  94. storeDouble(src, Address(stackPointerRegister, -(int)sizeof(double)));
  95. loadPtr(Address(stackPointerRegister, OBJECT_OFFSETOF(JSValue, u.asBits.tag) - sizeof(double)), regT1);
  96. loadPtr(Address(stackPointerRegister, OBJECT_OFFSETOF(JSValue, u.asBits.payload) - sizeof(double)), regT0);
  97. Jump lowNonZero = branchTestPtr(NonZero, regT1);
  98. Jump highNonZero = branchTestPtr(NonZero, regT0);
  99. move(TrustedImm32(0), regT0);
  100. move(TrustedImm32(Int32Tag), regT1);
  101. lowNonZero.link(this);
  102. highNonZero.link(this);
  103. #endif
  104. loadPtr(payloadFor(JSStack::CallerFrame, callFrameRegister), callFrameRegister);
  105. ret();
  106. }
  107. void returnInt32(RegisterID src)
  108. {
  109. if (src != regT0)
  110. move(src, regT0);
  111. tagReturnAsInt32();
  112. loadPtr(payloadFor(JSStack::CallerFrame, callFrameRegister), callFrameRegister);
  113. ret();
  114. }
  115. void returnJSCell(RegisterID src)
  116. {
  117. if (src != regT0)
  118. move(src, regT0);
  119. tagReturnAsJSCell();
  120. loadPtr(payloadFor(JSStack::CallerFrame, callFrameRegister), callFrameRegister);
  121. ret();
  122. }
  123. MacroAssemblerCodeRef finalize(VM& vm, MacroAssemblerCodePtr fallback, const char* thunkKind)
  124. {
  125. LinkBuffer patchBuffer(vm, this, GLOBAL_THUNK_ID);
  126. patchBuffer.link(m_failures, CodeLocationLabel(fallback));
  127. for (unsigned i = 0; i < m_calls.size(); i++)
  128. patchBuffer.link(m_calls[i].first, m_calls[i].second);
  129. return FINALIZE_CODE(patchBuffer, ("Specialized thunk for %s", thunkKind));
  130. }
  131. // Assumes that the target function uses fpRegister0 as the first argument
  132. // and return value. Like any sensible architecture would.
  133. void callDoubleToDouble(FunctionPtr function)
  134. {
  135. m_calls.append(std::make_pair(call(), function));
  136. }
  137. void callDoubleToDoublePreservingReturn(FunctionPtr function)
  138. {
  139. if (!isX86())
  140. preserveReturnAddressAfterCall(regT3);
  141. callDoubleToDouble(function);
  142. if (!isX86())
  143. restoreReturnAddressBeforeReturn(regT3);
  144. }
  145. private:
  146. void tagReturnAsInt32()
  147. {
  148. #if USE(JSVALUE64)
  149. or64(tagTypeNumberRegister, regT0);
  150. #else
  151. move(TrustedImm32(JSValue::Int32Tag), regT1);
  152. #endif
  153. }
  154. void tagReturnAsJSCell()
  155. {
  156. #if USE(JSVALUE32_64)
  157. move(TrustedImm32(JSValue::CellTag), regT1);
  158. #endif
  159. }
  160. MacroAssembler::JumpList m_failures;
  161. Vector<std::pair<Call, FunctionPtr> > m_calls;
  162. };
  163. }
  164. #endif // ENABLE(JIT)
  165. #endif // SpecializedThunkJIT_h