DFGRepatch.cpp 63 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364
  1. /*
  2. * Copyright (C) 2011, 2012 Apple Inc. All rights reserved.
  3. *
  4. * Redistribution and use in source and binary forms, with or without
  5. * modification, are permitted provided that the following conditions
  6. * are met:
  7. * 1. Redistributions of source code must retain the above copyright
  8. * notice, this list of conditions and the following disclaimer.
  9. * 2. Redistributions in binary form must reproduce the above copyright
  10. * notice, this list of conditions and the following disclaimer in the
  11. * documentation and/or other materials provided with the distribution.
  12. *
  13. * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
  14. * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
  15. * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
  16. * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
  17. * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
  18. * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
  19. * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
  20. * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
  21. * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
  22. * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
  23. * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
  24. */
  25. #include "config.h"
  26. #include "DFGRepatch.h"
  27. #if ENABLE(DFG_JIT)
  28. #include "DFGCCallHelpers.h"
  29. #include "DFGScratchRegisterAllocator.h"
  30. #include "DFGSpeculativeJIT.h"
  31. #include "DFGThunks.h"
  32. #include "GCAwareJITStubRoutine.h"
  33. #include "LinkBuffer.h"
  34. #include "Operations.h"
  35. #include "PolymorphicPutByIdList.h"
  36. #include "RepatchBuffer.h"
  37. #include "StructureRareDataInlines.h"
  38. #include <wtf/StringPrintStream.h>
  39. namespace JSC { namespace DFG {
  40. static void dfgRepatchCall(CodeBlock* codeblock, CodeLocationCall call, FunctionPtr newCalleeFunction)
  41. {
  42. RepatchBuffer repatchBuffer(codeblock);
  43. repatchBuffer.relink(call, newCalleeFunction);
  44. }
  45. #if !(ENABLE(DETACHED_JIT) && !BUILDING_DETACHED_JIT)
  46. void dfgRepatchByIdSelfAccess(CodeBlock* codeBlock, StructureStubInfo & stubInfo, Structure * structure, PropertyOffset offset, const FunctionPtr &slowPathFunction, bool compact)
  47. {
  48. RepatchBuffer repatchBuffer(codeBlock);
  49. // Only optimize once!
  50. repatchBuffer.relink(stubInfo.callReturnLocation, slowPathFunction);
  51. // Patch the structure check & the offset of the load.
  52. repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelPtrAtOffset(-(intptr_t)stubInfo.patch.dfg.deltaCheckImmToCall), structure);
  53. repatchBuffer.setLoadInstructionIsActive(stubInfo.callReturnLocation.convertibleLoadAtOffset(stubInfo.patch.dfg.deltaCallToStorageLoad), isOutOfLineOffset(offset));
  54. #if USE(JSVALUE64)
  55. if (compact)
  56. repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.dfg.deltaCallToLoadOrStore), offsetRelativeToPatchedStorage(offset));
  57. else
  58. repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.dfg.deltaCallToLoadOrStore), offsetRelativeToPatchedStorage(offset));
  59. #elif USE(JSVALUE32_64)
  60. if (compact) {
  61. repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.dfg.deltaCallToTagLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag));
  62. repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.dfg.deltaCallToPayloadLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload));
  63. } else {
  64. repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.dfg.deltaCallToTagLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag));
  65. repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.dfg.deltaCallToPayloadLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload));
  66. }
  67. #endif
  68. }
  69. static void addStructureTransitionCheck(
  70. JSCell* object, Structure* structure, CodeBlock* codeBlock, StructureStubInfo& stubInfo,
  71. MacroAssembler& jit, MacroAssembler::JumpList& failureCases, GPRReg scratchGPR)
  72. {
  73. if (object->structure() == structure && structure->transitionWatchpointSetIsStillValid()) {
  74. structure->addTransitionWatchpoint(stubInfo.addWatchpoint(codeBlock));
  75. #if DFG_ENABLE(JIT_ASSERT)
  76. // If we execute this code, the object must have the structure we expect. Assert
  77. // this in debug modes.
  78. jit.move(MacroAssembler::TrustedImmPtr(object), scratchGPR);
  79. MacroAssembler::Jump ok = jit.branchPtr(
  80. MacroAssembler::Equal,
  81. MacroAssembler::Address(scratchGPR, JSCell::structureOffset()),
  82. MacroAssembler::TrustedImmPtr(structure));
  83. jit.breakpoint();
  84. ok.link(&jit);
  85. #endif
  86. return;
  87. }
  88. jit.move(MacroAssembler::TrustedImmPtr(object), scratchGPR);
  89. failureCases.append(
  90. jit.branchPtr(
  91. MacroAssembler::NotEqual,
  92. MacroAssembler::Address(scratchGPR, JSCell::structureOffset()),
  93. MacroAssembler::TrustedImmPtr(structure)));
  94. }
  95. static void addStructureTransitionCheck(
  96. JSValue prototype, CodeBlock* codeBlock, StructureStubInfo& stubInfo,
  97. MacroAssembler& jit, MacroAssembler::JumpList& failureCases, GPRReg scratchGPR)
  98. {
  99. if (prototype.isNull())
  100. return;
  101. ASSERT(prototype.isCell());
  102. addStructureTransitionCheck(
  103. prototype.asCell(), prototype.asCell()->structure(), codeBlock, stubInfo, jit,
  104. failureCases, scratchGPR);
  105. }
  106. #endif
  107. static void replaceWithJump(RepatchBuffer& repatchBuffer, StructureStubInfo& stubInfo, const MacroAssemblerCodePtr target)
  108. {
  109. if (MacroAssembler::canJumpReplacePatchableBranchPtrWithPatch()) {
  110. repatchBuffer.replaceWithJump(
  111. RepatchBuffer::startOfPatchableBranchPtrWithPatchOnAddress(
  112. stubInfo.callReturnLocation.dataLabelPtrAtOffset(
  113. -(intptr_t)stubInfo.patch.dfg.deltaCheckImmToCall)),
  114. CodeLocationLabel(target));
  115. return;
  116. }
  117. repatchBuffer.relink(
  118. stubInfo.callReturnLocation.jumpAtOffset(
  119. stubInfo.patch.dfg.deltaCallToStructCheck),
  120. CodeLocationLabel(target));
  121. }
  122. #if !(ENABLE(DETACHED_JIT) && !BUILDING_DETACHED_JIT)
  123. static void emitRestoreScratch(MacroAssembler& stubJit, bool needToRestoreScratch, GPRReg scratchGPR, MacroAssembler::Jump& success, MacroAssembler::Jump& fail, MacroAssembler::JumpList failureCases)
  124. {
  125. if (needToRestoreScratch) {
  126. stubJit.pop(scratchGPR);
  127. success = stubJit.jump();
  128. // link failure cases here, so we can pop scratchGPR, and then jump back.
  129. failureCases.link(&stubJit);
  130. stubJit.pop(scratchGPR);
  131. fail = stubJit.jump();
  132. return;
  133. }
  134. success = stubJit.jump();
  135. }
  136. static void linkRestoreScratch(LinkBuffer& patchBuffer, bool needToRestoreScratch, MacroAssembler::Jump success, MacroAssembler::Jump fail, MacroAssembler::JumpList failureCases, CodeLocationLabel successLabel, CodeLocationLabel slowCaseBegin)
  137. {
  138. patchBuffer.link(success, successLabel);
  139. if (needToRestoreScratch) {
  140. patchBuffer.link(fail, slowCaseBegin);
  141. return;
  142. }
  143. // link failure cases directly back to normal path
  144. patchBuffer.link(failureCases, slowCaseBegin);
  145. }
  146. static void linkRestoreScratch(LinkBuffer& patchBuffer, bool needToRestoreScratch, StructureStubInfo& stubInfo, MacroAssembler::Jump success, MacroAssembler::Jump fail, MacroAssembler::JumpList failureCases)
  147. {
  148. linkRestoreScratch(patchBuffer, needToRestoreScratch, success, fail, failureCases, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToDone), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToSlowCase));
  149. }
  150. void generateProtoChainAccessStub(ExecState* exec, StructureStubInfo& stubInfo, StructureChain* chain, size_t count, PropertyOffset offset, Structure* structure, CodeLocationLabel successLabel, CodeLocationLabel slowCaseLabel, RefPtr<JITStubRoutine>& stubRoutine)
  151. {
  152. VM* vm = &exec->vm();
  153. MacroAssembler stubJit;
  154. GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.dfg.baseGPR);
  155. #if USE(JSVALUE32_64)
  156. GPRReg resultTagGPR = static_cast<GPRReg>(stubInfo.patch.dfg.valueTagGPR);
  157. #endif
  158. GPRReg resultGPR = static_cast<GPRReg>(stubInfo.patch.dfg.valueGPR);
  159. GPRReg scratchGPR = RegisterSet(stubInfo.patch.dfg.usedRegisters).getFreeGPR();
  160. bool needToRestoreScratch = false;
  161. if (scratchGPR == InvalidGPRReg) {
  162. #if USE(JSVALUE64)
  163. scratchGPR = SpeculativeJIT::selectScratchGPR(baseGPR, resultGPR);
  164. #else
  165. scratchGPR = SpeculativeJIT::selectScratchGPR(baseGPR, resultGPR, resultTagGPR);
  166. #endif
  167. stubJit.push(scratchGPR);
  168. needToRestoreScratch = true;
  169. }
  170. MacroAssembler::JumpList failureCases;
  171. failureCases.append(stubJit.branchPtr(MacroAssembler::NotEqual, MacroAssembler::Address(baseGPR, JSCell::structureOffset()), MacroAssembler::TrustedImmPtr(structure)));
  172. Structure* currStructure = structure;
  173. WriteBarrier<Structure>* it = chain->head();
  174. JSObject* protoObject = 0;
  175. for (unsigned i = 0; i < count; ++i, ++it) {
  176. protoObject = asObject(currStructure->prototypeForLookup(exec));
  177. addStructureTransitionCheck(
  178. protoObject, protoObject->structure(), exec->codeBlock(), stubInfo, stubJit,
  179. failureCases, scratchGPR);
  180. currStructure = it->get();
  181. }
  182. if (isInlineOffset(offset)) {
  183. #if USE(JSVALUE64)
  184. stubJit.load64(protoObject->locationForOffset(offset), resultGPR);
  185. #elif USE(JSVALUE32_64)
  186. stubJit.move(MacroAssembler::TrustedImmPtr(protoObject->locationForOffset(offset)), resultGPR);
  187. stubJit.load32(MacroAssembler::Address(resultGPR, OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)), resultTagGPR);
  188. stubJit.load32(MacroAssembler::Address(resultGPR, OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)), resultGPR);
  189. #endif
  190. } else {
  191. stubJit.loadPtr(protoObject->butterflyAddress(), resultGPR);
  192. #if USE(JSVALUE64)
  193. stubJit.load64(MacroAssembler::Address(resultGPR, offsetInButterfly(offset) * sizeof(WriteBarrier<Unknown>)), resultGPR);
  194. #elif USE(JSVALUE32_64)
  195. stubJit.load32(MacroAssembler::Address(resultGPR, offsetInButterfly(offset) * sizeof(WriteBarrier<Unknown>) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)), resultTagGPR);
  196. stubJit.load32(MacroAssembler::Address(resultGPR, offsetInButterfly(offset) * sizeof(WriteBarrier<Unknown>) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)), resultGPR);
  197. #endif
  198. }
  199. MacroAssembler::Jump success, fail;
  200. emitRestoreScratch(stubJit, needToRestoreScratch, scratchGPR, success, fail, failureCases);
  201. LinkBuffer patchBuffer(*vm, &stubJit, exec->codeBlock());
  202. linkRestoreScratch(patchBuffer, needToRestoreScratch, success, fail, failureCases, successLabel, slowCaseLabel);
  203. stubRoutine = FINALIZE_CODE_FOR_DFG_STUB(
  204. patchBuffer,
  205. ("DFG prototype chain access stub for %s, return point %p",
  206. toCString(*exec->codeBlock()).data(), successLabel.executableAddress()));
  207. }
  208. #endif
  209. bool tryCacheGetByID(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo)
  210. {
  211. // FIXME: Write a test that proves we need to check for recursion here just
  212. // like the interpreter does, then add a check for recursion.
  213. CodeBlock* codeBlock = exec->codeBlock();
  214. VM* vm = &exec->vm();
  215. if (isJSArray(baseValue) && propertyName == exec->propertyNames().length)
  216. {
  217. #if ENABLE(DETACHED_JIT) && !BUILDING_DETACHED_JIT
  218. // this block is too complicated to handle in the VM process
  219. // (even with the help of MacroAssembler proxies) so we will
  220. // delegate this entire block to the JIT compiler process
  221. return tryCacheGetByID_vmstub(exec, baseValue, propertyName, slot, stubInfo);
  222. #else
  223. GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.dfg.baseGPR);
  224. #if USE(JSVALUE32_64)
  225. GPRReg resultTagGPR = static_cast<GPRReg>(stubInfo.patch.dfg.valueTagGPR);
  226. #endif
  227. GPRReg resultGPR = static_cast<GPRReg>(stubInfo.patch.dfg.valueGPR);
  228. GPRReg scratchGPR = RegisterSet(stubInfo.patch.dfg.usedRegisters).getFreeGPR();
  229. bool needToRestoreScratch = false;
  230. MacroAssembler stubJit;
  231. if (scratchGPR == InvalidGPRReg) {
  232. #if USE(JSVALUE64)
  233. scratchGPR = SpeculativeJIT::selectScratchGPR(baseGPR, resultGPR);
  234. #else
  235. scratchGPR = SpeculativeJIT::selectScratchGPR(baseGPR, resultGPR, resultTagGPR);
  236. #endif
  237. stubJit.push(scratchGPR);
  238. needToRestoreScratch = true;
  239. }
  240. MacroAssembler::JumpList failureCases;
  241. stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSCell::structureOffset()), scratchGPR);
  242. stubJit.load8(MacroAssembler::Address(scratchGPR, Structure::indexingTypeOffset()), scratchGPR);
  243. failureCases.append(stubJit.branchTest32(MacroAssembler::Zero, scratchGPR, MacroAssembler::TrustedImm32(IsArray)));
  244. failureCases.append(stubJit.branchTest32(MacroAssembler::Zero, scratchGPR, MacroAssembler::TrustedImm32(IndexingShapeMask)));
  245. stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR);
  246. stubJit.load32(MacroAssembler::Address(scratchGPR, ArrayStorage::lengthOffset()), scratchGPR);
  247. failureCases.append(stubJit.branch32(MacroAssembler::LessThan, scratchGPR, MacroAssembler::TrustedImm32(0)));
  248. #if USE(JSVALUE64)
  249. stubJit.or64(GPRInfo::tagTypeNumberRegister, scratchGPR, resultGPR);
  250. #elif USE(JSVALUE32_64)
  251. stubJit.move(scratchGPR, resultGPR);
  252. stubJit.move(JITCompiler::TrustedImm32(0xffffffff), resultTagGPR); // JSValue::Int32Tag
  253. #endif
  254. MacroAssembler::Jump success, fail;
  255. emitRestoreScratch(stubJit, needToRestoreScratch, scratchGPR, success, fail, failureCases);
  256. LinkBuffer patchBuffer(*vm, &stubJit, codeBlock);
  257. linkRestoreScratch(patchBuffer, needToRestoreScratch, stubInfo, success, fail, failureCases);
  258. stubInfo.stubRoutine = FINALIZE_CODE_FOR_DFG_STUB(
  259. patchBuffer,
  260. ("DFG GetById array length stub for %s, return point %p",
  261. toCString(*exec->codeBlock()).data(), stubInfo.callReturnLocation.labelAtOffset(
  262. stubInfo.patch.dfg.deltaCallToDone).executableAddress()));
  263. RepatchBuffer repatchBuffer(codeBlock);
  264. replaceWithJump(repatchBuffer, stubInfo, stubInfo.stubRoutine->code().code());
  265. repatchBuffer.relink(stubInfo.callReturnLocation, operationGetById);
  266. return true;
  267. #endif
  268. }
  269. // this function should only be called when the if block above is taken from the VM process
  270. // so we're putting a guard here just to be safe
  271. ASSERT_NOT_REACHED_BY_DETACHED_JIT();
  272. #if !(ENABLE(DETACHED_JIT) && BUILDING_DETACHED_JIT)
  273. // FIXME: should support length access for String.
  274. // FIXME: Cache property access for immediates.
  275. if (!baseValue.isCell())
  276. return false;
  277. JSCell* baseCell = baseValue.asCell();
  278. Structure* structure = baseCell->structure();
  279. if (!slot.isCacheable())
  280. return false;
  281. if (!structure->propertyAccessesAreCacheable())
  282. return false;
  283. // Optimize self access.
  284. if (slot.slotBase() == baseValue) {
  285. if ((slot.cachedPropertyType() != PropertySlot::Value)
  286. || !MacroAssembler::isCompactPtrAlignedAddressOffset(maxOffsetRelativeToPatchedStorage(slot.cachedOffset()))) {
  287. dfgRepatchCall(codeBlock, stubInfo.callReturnLocation, operationGetByIdBuildList);
  288. return true;
  289. }
  290. dfgRepatchByIdSelfAccess(codeBlock, stubInfo, structure, slot.cachedOffset(), operationGetByIdBuildList, true);
  291. stubInfo.initGetByIdSelf(*vm, codeBlock->ownerExecutable(), structure);
  292. return true;
  293. }
  294. if (structure->isDictionary())
  295. return false;
  296. // FIXME: optimize getters and setters
  297. if (slot.cachedPropertyType() != PropertySlot::Value)
  298. return false;
  299. PropertyOffset offset = slot.cachedOffset();
  300. size_t count = normalizePrototypeChainForChainAccess(exec, baseValue, slot.slotBase(), propertyName, offset);
  301. if (count == InvalidPrototypeChain)
  302. return false;
  303. StructureChain* prototypeChain = structure->prototypeChain(exec);
  304. ASSERT(slot.slotBase().isObject());
  305. generateProtoChainAccessStub(exec, stubInfo, prototypeChain, count, offset, structure, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToDone), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToSlowCase), stubInfo.stubRoutine);
  306. RepatchBuffer repatchBuffer(codeBlock);
  307. replaceWithJump(repatchBuffer, stubInfo, stubInfo.stubRoutine->code().code());
  308. repatchBuffer.relink(stubInfo.callReturnLocation, operationGetByIdProtoBuildList);
  309. stubInfo.initGetByIdChain(*vm, codeBlock->ownerExecutable(), structure, prototypeChain, count, true);
  310. return true;
  311. #endif // #if !(ENABLE(DETACHED_JIT) && BUILDING_DETACHED_JIT)
  312. }
  313. void dfgRepatchGetByID(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo)
  314. {
  315. bool cached = tryCacheGetByID(exec, baseValue, propertyName, slot, stubInfo);
  316. if (!cached)
  317. dfgRepatchCall(exec->codeBlock(), stubInfo.callReturnLocation, operationGetById);
  318. }
  319. bool tryBuildGetByIDList(ExecState* exec, JSValue baseValue, const Identifier& ident, const PropertySlot& slot, StructureStubInfo& stubInfo)
  320. {
  321. #if !(ENABLE(DETACHED_JIT) && BUILDING_DETACHED_JIT)
  322. // NOTE for detached jit, we can safely bypass the following checks since
  323. // they were already done in the VM process
  324. if (!baseValue.isCell()
  325. || !slot.isCacheable()
  326. || baseValue.asCell()->structure()->isUncacheableDictionary()
  327. || slot.slotBase() != baseValue)
  328. return false;
  329. if (!stubInfo.patch.dfg.registersFlushed) {
  330. // We cannot do as much inline caching if the registers were not flushed prior to this GetById. In particular,
  331. // non-Value cached properties require planting calls, which requires registers to have been flushed. Thus,
  332. // if registers were not flushed, don't do non-Value caching.
  333. if (slot.cachedPropertyType() != PropertySlot::Value)
  334. return false;
  335. }
  336. #endif
  337. #if ENABLE(DETACHED_JIT) && !BUILDING_DETACHED_JIT
  338. return tryBuildGetByIDList_vmstub(exec, baseValue, ident, slot, stubInfo);
  339. #else
  340. CodeBlock* codeBlock = exec->codeBlock();
  341. JSCell* baseCell = baseValue.asCell();
  342. Structure* structure = baseCell->structure();
  343. VM* vm = &exec->vm();
  344. ASSERT(slot.slotBase().isObject());
  345. PolymorphicAccessStructureList* polymorphicStructureList;
  346. int listIndex;
  347. if (stubInfo.accessType == access_unset) {
  348. ASSERT(!stubInfo.stubRoutine);
  349. polymorphicStructureList = new PolymorphicAccessStructureList();
  350. stubInfo.initGetByIdSelfList(polymorphicStructureList, 0);
  351. listIndex = 0;
  352. } else if (stubInfo.accessType == access_get_by_id_self) {
  353. ASSERT(!stubInfo.stubRoutine);
  354. polymorphicStructureList = new PolymorphicAccessStructureList(*vm, codeBlock->ownerExecutable(), JITStubRoutine::createSelfManagedRoutine(stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToSlowCase)), stubInfo.u.getByIdSelf.baseObjectStructure.get(), true);
  355. stubInfo.initGetByIdSelfList(polymorphicStructureList, 1);
  356. listIndex = 1;
  357. } else {
  358. polymorphicStructureList = stubInfo.u.getByIdSelfList.structureList;
  359. listIndex = stubInfo.u.getByIdSelfList.listSize;
  360. }
  361. if (listIndex < POLYMORPHIC_LIST_CACHE_SIZE) {
  362. stubInfo.u.getByIdSelfList.listSize++;
  363. GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.dfg.baseGPR);
  364. #if USE(JSVALUE32_64)
  365. GPRReg resultTagGPR = static_cast<GPRReg>(stubInfo.patch.dfg.valueTagGPR);
  366. #endif
  367. GPRReg resultGPR = static_cast<GPRReg>(stubInfo.patch.dfg.valueGPR);
  368. GPRReg scratchGPR = RegisterSet(stubInfo.patch.dfg.usedRegisters).getFreeGPR();
  369. CCallHelpers stubJit(vm, codeBlock);
  370. MacroAssembler::Jump wrongStruct = stubJit.branchPtr(MacroAssembler::NotEqual, MacroAssembler::Address(baseGPR, JSCell::structureOffset()), MacroAssembler::TrustedImmPtr(structure));
  371. // The strategy we use for stubs is as follows:
  372. // 1) Call DFG helper that calls the getter.
  373. // 2) Check if there was an exception, and if there was, call yet another
  374. // helper.
  375. bool isDirect = false;
  376. MacroAssembler::Call operationCall;
  377. MacroAssembler::Call handlerCall;
  378. FunctionPtr operationFunction;
  379. MacroAssembler::Jump success;
  380. if (slot.cachedPropertyType() == PropertySlot::Getter
  381. || slot.cachedPropertyType() == PropertySlot::Custom) {
  382. if (slot.cachedPropertyType() == PropertySlot::Getter) {
  383. ASSERT(scratchGPR != InvalidGPRReg);
  384. ASSERT(baseGPR != scratchGPR);
  385. if (isInlineOffset(slot.cachedOffset())) {
  386. #if USE(JSVALUE64)
  387. stubJit.load64(MacroAssembler::Address(baseGPR, offsetRelativeToBase(slot.cachedOffset())), scratchGPR);
  388. #else
  389. stubJit.load32(MacroAssembler::Address(baseGPR, offsetRelativeToBase(slot.cachedOffset())), scratchGPR);
  390. #endif
  391. } else {
  392. stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR);
  393. #if USE(JSVALUE64)
  394. stubJit.load64(MacroAssembler::Address(scratchGPR, offsetRelativeToBase(slot.cachedOffset())), scratchGPR);
  395. #else
  396. stubJit.load32(MacroAssembler::Address(scratchGPR, offsetRelativeToBase(slot.cachedOffset())), scratchGPR);
  397. #endif
  398. }
  399. stubJit.setupArgumentsWithExecState(baseGPR, scratchGPR);
  400. operationFunction = operationCallGetter;
  401. } else {
  402. stubJit.setupArgumentsWithExecState(
  403. baseGPR,
  404. MacroAssembler::TrustedImmPtr(FunctionPtr(slot.customGetter()).executableAddress()),
  405. MacroAssembler::TrustedImmPtr(const_cast<Identifier*>(&ident)));
  406. operationFunction = operationCallCustomGetter;
  407. }
  408. // Need to make sure that whenever this call is made in the future, we remember the
  409. // place that we made it from. It just so happens to be the place that we are at
  410. // right now!
  411. stubJit.store32(
  412. MacroAssembler::TrustedImm32(exec->codeOriginIndexForDFG()),
  413. CCallHelpers::tagFor(static_cast<VirtualRegister>(JSStack::ArgumentCount)));
  414. operationCall = stubJit.call();
  415. #if USE(JSVALUE64)
  416. stubJit.move(GPRInfo::returnValueGPR, resultGPR);
  417. #else
  418. stubJit.setupResults(resultGPR, resultTagGPR);
  419. #endif
  420. success = stubJit.emitExceptionCheck(CCallHelpers::InvertedExceptionCheck);
  421. stubJit.setupArgumentsWithExecState(
  422. MacroAssembler::TrustedImmPtr(&stubInfo));
  423. handlerCall = stubJit.call();
  424. stubJit.jump(GPRInfo::returnValueGPR2);
  425. } else {
  426. if (isInlineOffset(slot.cachedOffset())) {
  427. #if USE(JSVALUE64)
  428. stubJit.load64(MacroAssembler::Address(baseGPR, offsetRelativeToBase(slot.cachedOffset())), resultGPR);
  429. #else
  430. if (baseGPR == resultTagGPR) {
  431. stubJit.load32(MacroAssembler::Address(baseGPR, offsetRelativeToBase(slot.cachedOffset()) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)), resultGPR);
  432. stubJit.load32(MacroAssembler::Address(baseGPR, offsetRelativeToBase(slot.cachedOffset()) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)), resultTagGPR);
  433. } else {
  434. stubJit.load32(MacroAssembler::Address(baseGPR, offsetRelativeToBase(slot.cachedOffset()) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)), resultTagGPR);
  435. stubJit.load32(MacroAssembler::Address(baseGPR, offsetRelativeToBase(slot.cachedOffset()) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)), resultGPR);
  436. }
  437. #endif
  438. } else {
  439. stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), resultGPR);
  440. #if USE(JSVALUE64)
  441. stubJit.load64(MacroAssembler::Address(resultGPR, offsetRelativeToBase(slot.cachedOffset())), resultGPR);
  442. #else
  443. stubJit.load32(MacroAssembler::Address(resultGPR, offsetRelativeToBase(slot.cachedOffset()) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)), resultTagGPR);
  444. stubJit.load32(MacroAssembler::Address(resultGPR, offsetRelativeToBase(slot.cachedOffset()) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)), resultGPR);
  445. #endif
  446. }
  447. success = stubJit.jump();
  448. isDirect = true;
  449. }
  450. LinkBuffer patchBuffer(*vm, &stubJit, codeBlock);
  451. CodeLocationLabel lastProtoBegin;
  452. if (listIndex)
  453. lastProtoBegin = CodeLocationLabel(polymorphicStructureList->list[listIndex - 1].stubRoutine->code().code());
  454. else
  455. lastProtoBegin = stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToSlowCase);
  456. ASSERT(!!lastProtoBegin);
  457. patchBuffer.link(wrongStruct, lastProtoBegin);
  458. patchBuffer.link(success, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToDone));
  459. if (!isDirect) {
  460. patchBuffer.link(operationCall, operationFunction);
  461. patchBuffer.link(handlerCall, lookupExceptionHandlerInStub);
  462. }
  463. RefPtr<JITStubRoutine> stubRoutine =
  464. createJITStubRoutine(
  465. FINALIZE_DFG_CODE(
  466. patchBuffer,
  467. ("DFG GetById polymorphic list access for %s, return point %p",
  468. toCString(*exec->codeBlock()).data(), stubInfo.callReturnLocation.labelAtOffset(
  469. stubInfo.patch.dfg.deltaCallToDone).executableAddress())),
  470. *vm,
  471. codeBlock->ownerExecutable(),
  472. slot.cachedPropertyType() == PropertySlot::Getter
  473. || slot.cachedPropertyType() == PropertySlot::Custom);
  474. polymorphicStructureList->list[listIndex].set(*vm, codeBlock->ownerExecutable(), stubRoutine, structure, isDirect);
  475. RepatchBuffer repatchBuffer(codeBlock);
  476. repatchBuffer.relink(
  477. stubInfo.callReturnLocation.jumpAtOffset(
  478. stubInfo.patch.dfg.deltaCallToStructCheck),
  479. CodeLocationLabel(stubRoutine->code().code()));
  480. if (listIndex < (POLYMORPHIC_LIST_CACHE_SIZE - 1))
  481. return true;
  482. }
  483. return false;
  484. #endif // ENABLE(DETACHED_JIT) && BUILDING_DETACHED_JIT
  485. }
  486. void dfgBuildGetByIDList(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo)
  487. {
  488. bool dontChangeCall = tryBuildGetByIDList(exec, baseValue, propertyName, slot, stubInfo);
  489. if (!dontChangeCall)
  490. dfgRepatchCall(exec->codeBlock(), stubInfo.callReturnLocation, operationGetById);
  491. }
  492. #if !(ENABLE(DETACHED_JIT) && BUILDING_DETACHED_JIT)
  493. static bool tryBuildGetByIDProtoList(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo)
  494. {
  495. if (!baseValue.isCell()
  496. || !slot.isCacheable()
  497. || baseValue.asCell()->structure()->isDictionary()
  498. || baseValue.asCell()->structure()->typeInfo().prohibitsPropertyCaching()
  499. || slot.slotBase() == baseValue
  500. || slot.cachedPropertyType() != PropertySlot::Value)
  501. return false;
  502. ASSERT(slot.slotBase().isObject());
  503. PropertyOffset offset = slot.cachedOffset();
  504. size_t count = normalizePrototypeChainForChainAccess(exec, baseValue, slot.slotBase(), propertyName, offset);
  505. if (count == InvalidPrototypeChain)
  506. return false;
  507. Structure* structure = baseValue.asCell()->structure();
  508. StructureChain* prototypeChain = structure->prototypeChain(exec);
  509. CodeBlock* codeBlock = exec->codeBlock();
  510. VM* vm = &exec->vm();
  511. PolymorphicAccessStructureList* polymorphicStructureList;
  512. int listIndex = 1;
  513. if (stubInfo.accessType == access_get_by_id_chain) {
  514. ASSERT(!!stubInfo.stubRoutine);
  515. polymorphicStructureList = new PolymorphicAccessStructureList(*vm, codeBlock->ownerExecutable(), stubInfo.stubRoutine, stubInfo.u.getByIdChain.baseObjectStructure.get(), stubInfo.u.getByIdChain.chain.get(), true);
  516. stubInfo.stubRoutine.clear();
  517. stubInfo.initGetByIdProtoList(polymorphicStructureList, 1);
  518. } else {
  519. ASSERT(stubInfo.accessType == access_get_by_id_proto_list);
  520. polymorphicStructureList = stubInfo.u.getByIdProtoList.structureList;
  521. listIndex = stubInfo.u.getByIdProtoList.listSize;
  522. }
  523. if (listIndex < POLYMORPHIC_LIST_CACHE_SIZE) {
  524. stubInfo.u.getByIdProtoList.listSize++;
  525. CodeLocationLabel lastProtoBegin = CodeLocationLabel(polymorphicStructureList->list[listIndex - 1].stubRoutine->code().code());
  526. ASSERT(!!lastProtoBegin);
  527. RefPtr<JITStubRoutine> stubRoutine;
  528. generateProtoChainAccessStub(exec, stubInfo, prototypeChain, count, offset, structure, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToDone), lastProtoBegin, stubRoutine);
  529. polymorphicStructureList->list[listIndex].set(*vm, codeBlock->ownerExecutable(), stubRoutine, structure, true);
  530. RepatchBuffer repatchBuffer(codeBlock);
  531. replaceWithJump(repatchBuffer, stubInfo, stubRoutine->code().code());
  532. if (listIndex < (POLYMORPHIC_LIST_CACHE_SIZE - 1))
  533. return true;
  534. }
  535. return false;
  536. }
  537. void dfgBuildGetByIDProtoList(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo)
  538. {
  539. bool dontChangeCall = tryBuildGetByIDProtoList(exec, baseValue, propertyName, slot, stubInfo);
  540. if (!dontChangeCall)
  541. dfgRepatchCall(exec->codeBlock(), stubInfo.callReturnLocation, operationGetById);
  542. }
  543. static V_DFGOperation_EJCI appropriateGenericPutByIdFunction(const PutPropertySlot &slot, PutKind putKind)
  544. {
  545. if (slot.isStrictMode()) {
  546. if (putKind == Direct)
  547. return operationPutByIdDirectStrict;
  548. return operationPutByIdStrict;
  549. }
  550. if (putKind == Direct)
  551. return operationPutByIdDirectNonStrict;
  552. return operationPutByIdNonStrict;
  553. }
  554. static V_DFGOperation_EJCI appropriateListBuildingPutByIdFunction(const PutPropertySlot &slot, PutKind putKind)
  555. {
  556. if (slot.isStrictMode()) {
  557. if (putKind == Direct)
  558. return operationPutByIdDirectStrictBuildList;
  559. return operationPutByIdStrictBuildList;
  560. }
  561. if (putKind == Direct)
  562. return operationPutByIdDirectNonStrictBuildList;
  563. return operationPutByIdNonStrictBuildList;
  564. }
  565. #endif // #if !(ENABLE(DETACHED_JIT) && BUILDING_DETACHED_JIT)
  566. #if !(ENABLE(DETACHED_JIT) && !BUILDING_DETACHED_JIT)
  567. void emitPutReplaceStub(
  568. ExecState* exec,
  569. JSValue,
  570. const Identifier&,
  571. const PutPropertySlot& slot,
  572. StructureStubInfo& stubInfo,
  573. PutKind,
  574. Structure* structure,
  575. CodeLocationLabel failureLabel,
  576. RefPtr<JITStubRoutine>& stubRoutine)
  577. {
  578. VM* vm = &exec->vm();
  579. GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.dfg.baseGPR);
  580. #if USE(JSVALUE32_64)
  581. GPRReg valueTagGPR = static_cast<GPRReg>(stubInfo.patch.dfg.valueTagGPR);
  582. #endif
  583. GPRReg valueGPR = static_cast<GPRReg>(stubInfo.patch.dfg.valueGPR);
  584. GPRReg scratchGPR = RegisterSet(stubInfo.patch.dfg.usedRegisters).getFreeGPR();
  585. bool needToRestoreScratch = false;
  586. #if ENABLE(WRITE_BARRIER_PROFILING)
  587. GPRReg scratchGPR2;
  588. const bool writeBarrierNeeded = true;
  589. #else
  590. const bool writeBarrierNeeded = false;
  591. #endif
  592. MacroAssembler stubJit;
  593. if (scratchGPR == InvalidGPRReg && (writeBarrierNeeded || isOutOfLineOffset(slot.cachedOffset()))) {
  594. #if USE(JSVALUE64)
  595. scratchGPR = SpeculativeJIT::selectScratchGPR(baseGPR, valueGPR);
  596. #else
  597. scratchGPR = SpeculativeJIT::selectScratchGPR(baseGPR, valueGPR, valueTagGPR);
  598. #endif
  599. needToRestoreScratch = true;
  600. stubJit.push(scratchGPR);
  601. }
  602. MacroAssembler::Jump badStructure = stubJit.branchPtr(
  603. MacroAssembler::NotEqual,
  604. MacroAssembler::Address(baseGPR, JSCell::structureOffset()),
  605. MacroAssembler::TrustedImmPtr(structure));
  606. #if ENABLE(WRITE_BARRIER_PROFILING)
  607. #if USE(JSVALUE64)
  608. scratchGPR2 = SpeculativeJIT::selectScratchGPR(baseGPR, valueGPR, scratchGPR);
  609. #else
  610. scratchGPR2 = SpeculativeJIT::selectScratchGPR(baseGPR, valueGPR, valueTagGPR, scratchGPR);
  611. #endif
  612. stubJit.push(scratchGPR2);
  613. SpeculativeJIT::writeBarrier(stubJit, baseGPR, scratchGPR, scratchGPR2, WriteBarrierForPropertyAccess);
  614. stubJit.pop(scratchGPR2);
  615. #endif
  616. #if USE(JSVALUE64)
  617. if (isInlineOffset(slot.cachedOffset()))
  618. stubJit.store64(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue)));
  619. else {
  620. stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR);
  621. stubJit.store64(valueGPR, MacroAssembler::Address(scratchGPR, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue)));
  622. }
  623. #elif USE(JSVALUE32_64)
  624. if (isInlineOffset(slot.cachedOffset())) {
  625. stubJit.store32(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
  626. stubJit.store32(valueTagGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
  627. } else {
  628. stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR);
  629. stubJit.store32(valueGPR, MacroAssembler::Address(scratchGPR, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
  630. stubJit.store32(valueTagGPR, MacroAssembler::Address(scratchGPR, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
  631. }
  632. #endif
  633. MacroAssembler::Jump success;
  634. MacroAssembler::Jump failure;
  635. if (needToRestoreScratch) {
  636. stubJit.pop(scratchGPR);
  637. success = stubJit.jump();
  638. badStructure.link(&stubJit);
  639. stubJit.pop(scratchGPR);
  640. failure = stubJit.jump();
  641. } else {
  642. success = stubJit.jump();
  643. failure = badStructure;
  644. }
  645. LinkBuffer patchBuffer(*vm, &stubJit, exec->codeBlock());
  646. patchBuffer.link(success, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToDone));
  647. patchBuffer.link(failure, failureLabel);
  648. stubRoutine = FINALIZE_CODE_FOR_DFG_STUB(
  649. patchBuffer,
  650. ("DFG PutById replace stub for %s, return point %p",
  651. toCString(*exec->codeBlock()).data(), stubInfo.callReturnLocation.labelAtOffset(
  652. stubInfo.patch.dfg.deltaCallToDone).executableAddress()));
  653. }
  654. void emitPutTransitionStub(
  655. ExecState* exec,
  656. JSValue,
  657. const Identifier&,
  658. const PutPropertySlot& slot,
  659. StructureStubInfo& stubInfo,
  660. PutKind putKind,
  661. Structure* structure,
  662. Structure* oldStructure,
  663. StructureChain* prototypeChain,
  664. CodeLocationLabel failureLabel,
  665. RefPtr<JITStubRoutine>& stubRoutine)
  666. {
  667. VM* vm = &exec->vm();
  668. GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.dfg.baseGPR);
  669. #if USE(JSVALUE32_64)
  670. GPRReg valueTagGPR = static_cast<GPRReg>(stubInfo.patch.dfg.valueTagGPR);
  671. #endif
  672. GPRReg valueGPR = static_cast<GPRReg>(stubInfo.patch.dfg.valueGPR);
  673. ScratchRegisterAllocator allocator(stubInfo.patch.dfg.usedRegisters);
  674. allocator.lock(baseGPR);
  675. #if USE(JSVALUE32_64)
  676. allocator.lock(valueTagGPR);
  677. #endif
  678. allocator.lock(valueGPR);
  679. CCallHelpers stubJit(vm);
  680. GPRReg scratchGPR1 = allocator.allocateScratchGPR();
  681. ASSERT(scratchGPR1 != baseGPR);
  682. ASSERT(scratchGPR1 != valueGPR);
  683. bool needSecondScratch = false;
  684. bool needThirdScratch = false;
  685. #if ENABLE(WRITE_BARRIER_PROFILING)
  686. needSecondScratch = true;
  687. #endif
  688. if (structure->outOfLineCapacity() != oldStructure->outOfLineCapacity()
  689. && oldStructure->outOfLineCapacity()) {
  690. needSecondScratch = true;
  691. needThirdScratch = true;
  692. }
  693. GPRReg scratchGPR2;
  694. if (needSecondScratch) {
  695. scratchGPR2 = allocator.allocateScratchGPR();
  696. ASSERT(scratchGPR2 != baseGPR);
  697. ASSERT(scratchGPR2 != valueGPR);
  698. ASSERT(scratchGPR2 != scratchGPR1);
  699. } else
  700. scratchGPR2 = InvalidGPRReg;
  701. GPRReg scratchGPR3;
  702. if (needThirdScratch) {
  703. scratchGPR3 = allocator.allocateScratchGPR();
  704. ASSERT(scratchGPR3 != baseGPR);
  705. ASSERT(scratchGPR3 != valueGPR);
  706. ASSERT(scratchGPR3 != scratchGPR1);
  707. ASSERT(scratchGPR3 != scratchGPR2);
  708. } else
  709. scratchGPR3 = InvalidGPRReg;
  710. allocator.preserveReusedRegistersByPushing(stubJit);
  711. MacroAssembler::JumpList failureCases;
  712. ASSERT(oldStructure->transitionWatchpointSetHasBeenInvalidated());
  713. failureCases.append(stubJit.branchPtr(MacroAssembler::NotEqual, MacroAssembler::Address(baseGPR, JSCell::structureOffset()), MacroAssembler::TrustedImmPtr(oldStructure)));
  714. addStructureTransitionCheck(
  715. oldStructure->storedPrototype(), exec->codeBlock(), stubInfo, stubJit, failureCases,
  716. scratchGPR1);
  717. if (putKind == NotDirect) {
  718. for (WriteBarrier<Structure>* it = prototypeChain->head(); *it; ++it) {
  719. addStructureTransitionCheck(
  720. (*it)->storedPrototype(), exec->codeBlock(), stubInfo, stubJit, failureCases,
  721. scratchGPR1);
  722. }
  723. }
  724. #if ENABLE(WRITE_BARRIER_PROFILING)
  725. ASSERT(needSecondScratch);
  726. ASSERT(scratchGPR2 != InvalidGPRReg);
  727. // Must always emit this write barrier as the structure transition itself requires it
  728. SpeculativeJIT::writeBarrier(stubJit, baseGPR, scratchGPR1, scratchGPR2, WriteBarrierForPropertyAccess);
  729. #endif
  730. MacroAssembler::JumpList slowPath;
  731. bool scratchGPR1HasStorage = false;
  732. if (structure->outOfLineCapacity() != oldStructure->outOfLineCapacity()) {
  733. size_t newSize = structure->outOfLineCapacity() * sizeof(JSValue);
  734. CopiedAllocator* copiedAllocator = &vm->heap.storageAllocator();
  735. if (!oldStructure->outOfLineCapacity()) {
  736. stubJit.loadPtr(&copiedAllocator->m_currentRemaining, scratchGPR1);
  737. slowPath.append(stubJit.branchSubPtr(MacroAssembler::Signed, MacroAssembler::TrustedImm32(newSize), scratchGPR1));
  738. stubJit.storePtr(scratchGPR1, &copiedAllocator->m_currentRemaining);
  739. stubJit.negPtr(scratchGPR1);
  740. stubJit.addPtr(MacroAssembler::AbsoluteAddress(&copiedAllocator->m_currentPayloadEnd), scratchGPR1);
  741. stubJit.addPtr(MacroAssembler::TrustedImm32(sizeof(JSValue)), scratchGPR1);
  742. } else {
  743. size_t oldSize = oldStructure->outOfLineCapacity() * sizeof(JSValue);
  744. ASSERT(newSize > oldSize);
  745. stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR3);
  746. stubJit.loadPtr(&copiedAllocator->m_currentRemaining, scratchGPR1);
  747. slowPath.append(stubJit.branchSubPtr(MacroAssembler::Signed, MacroAssembler::TrustedImm32(newSize), scratchGPR1));
  748. stubJit.storePtr(scratchGPR1, &copiedAllocator->m_currentRemaining);
  749. stubJit.negPtr(scratchGPR1);
  750. stubJit.addPtr(MacroAssembler::AbsoluteAddress(&copiedAllocator->m_currentPayloadEnd), scratchGPR1);
  751. stubJit.addPtr(MacroAssembler::TrustedImm32(sizeof(JSValue)), scratchGPR1);
  752. // We have scratchGPR1 = new storage, scratchGPR3 = old storage, scratchGPR2 = available
  753. for (ptrdiff_t offset = 0; offset < static_cast<ptrdiff_t>(oldSize); offset += sizeof(void*)) {
  754. stubJit.loadPtr(MacroAssembler::Address(scratchGPR3, -(offset + sizeof(JSValue) + sizeof(void*))), scratchGPR2);
  755. stubJit.storePtr(scratchGPR2, MacroAssembler::Address(scratchGPR1, -(offset + sizeof(JSValue) + sizeof(void*))));
  756. }
  757. }
  758. stubJit.storePtr(scratchGPR1, MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()));
  759. scratchGPR1HasStorage = true;
  760. }
  761. stubJit.storePtr(MacroAssembler::TrustedImmPtr(structure), MacroAssembler::Address(baseGPR, JSCell::structureOffset()));
  762. #if USE(JSVALUE64)
  763. if (isInlineOffset(slot.cachedOffset()))
  764. stubJit.store64(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue)));
  765. else {
  766. if (!scratchGPR1HasStorage)
  767. stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR1);
  768. stubJit.store64(valueGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue)));
  769. }
  770. #elif USE(JSVALUE32_64)
  771. if (isInlineOffset(slot.cachedOffset())) {
  772. stubJit.store32(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
  773. stubJit.store32(valueTagGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
  774. } else {
  775. if (!scratchGPR1HasStorage)
  776. stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR1);
  777. stubJit.store32(valueGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
  778. stubJit.store32(valueTagGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
  779. }
  780. #endif
  781. MacroAssembler::Jump success;
  782. MacroAssembler::Jump failure;
  783. if (allocator.didReuseRegisters()) {
  784. allocator.restoreReusedRegistersByPopping(stubJit);
  785. success = stubJit.jump();
  786. failureCases.link(&stubJit);
  787. allocator.restoreReusedRegistersByPopping(stubJit);
  788. failure = stubJit.jump();
  789. } else
  790. success = stubJit.jump();
  791. MacroAssembler::Call operationCall;
  792. MacroAssembler::Jump successInSlowPath;
  793. if (structure->outOfLineCapacity() != oldStructure->outOfLineCapacity()) {
  794. slowPath.link(&stubJit);
  795. allocator.restoreReusedRegistersByPopping(stubJit);
  796. ScratchBuffer* scratchBuffer = vm->scratchBufferForSize(allocator.desiredScratchBufferSize());
  797. allocator.preserveUsedRegistersToScratchBuffer(stubJit, scratchBuffer, scratchGPR1);
  798. #if USE(JSVALUE64)
  799. stubJit.setupArgumentsWithExecState(baseGPR, MacroAssembler::TrustedImmPtr(structure), MacroAssembler::TrustedImm32(slot.cachedOffset()), valueGPR);
  800. #else
  801. stubJit.setupArgumentsWithExecState(baseGPR, MacroAssembler::TrustedImmPtr(structure), MacroAssembler::TrustedImm32(slot.cachedOffset()), valueGPR, valueTagGPR);
  802. #endif
  803. operationCall = stubJit.call();
  804. allocator.restoreUsedRegistersFromScratchBuffer(stubJit, scratchBuffer, scratchGPR1);
  805. successInSlowPath = stubJit.jump();
  806. }
  807. LinkBuffer patchBuffer(*vm, &stubJit, exec->codeBlock());
  808. patchBuffer.link(success, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToDone));
  809. if (allocator.didReuseRegisters())
  810. patchBuffer.link(failure, failureLabel);
  811. else
  812. patchBuffer.link(failureCases, failureLabel);
  813. if (structure->outOfLineCapacity() != oldStructure->outOfLineCapacity()) {
  814. patchBuffer.link(operationCall, operationReallocateStorageAndFinishPut);
  815. patchBuffer.link(successInSlowPath, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToDone));
  816. }
  817. stubRoutine =
  818. createJITStubRoutine(
  819. FINALIZE_DFG_CODE(
  820. patchBuffer,
  821. ("DFG PutById %stransition stub (%p -> %p) for %s, return point %p",
  822. structure->outOfLineCapacity() != oldStructure->outOfLineCapacity() ? "reallocating " : "",
  823. oldStructure, structure,
  824. toCString(*exec->codeBlock()).data(), stubInfo.callReturnLocation.labelAtOffset(
  825. stubInfo.patch.dfg.deltaCallToDone).executableAddress())),
  826. *vm,
  827. exec->codeBlock()->ownerExecutable(),
  828. structure->outOfLineCapacity() != oldStructure->outOfLineCapacity(),
  829. structure);
  830. }
  831. #endif // #if !(ENABLE(DETACHED_JIT) && !BUILDING_DETACHED_JIT)
  832. #if !(ENABLE(DETACHED_JIT) && BUILDING_DETACHED_JIT)
  833. static bool tryCachePutByID(ExecState* exec, JSValue baseValue, const Identifier& ident, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
  834. {
  835. CodeBlock* codeBlock = exec->codeBlock();
  836. VM* vm = &exec->vm();
  837. if (!baseValue.isCell())
  838. return false;
  839. JSCell* baseCell = baseValue.asCell();
  840. Structure* structure = baseCell->structure();
  841. Structure* oldStructure = structure->previousID();
  842. if (!slot.isCacheable())
  843. return false;
  844. if (structure->isUncacheableDictionary())
  845. return false;
  846. // Optimize self access.
  847. if (slot.base() == baseValue) {
  848. if (slot.type() == PutPropertySlot::NewProperty) {
  849. if (structure->isDictionary())
  850. return false;
  851. // Skip optimizing the case where we need a realloc, if we don't have
  852. // enough registers to make it happen.
  853. if (GPRInfo::numberOfRegisters < 6
  854. && oldStructure->outOfLineCapacity() != structure->outOfLineCapacity()
  855. && oldStructure->outOfLineCapacity())
  856. return false;
  857. // Skip optimizing the case where we need realloc, and the structure has
  858. // indexing storage.
  859. if (hasIndexingHeader(oldStructure->indexingType()))
  860. return false;
  861. if (normalizePrototypeChain(exec, baseCell) == InvalidPrototypeChain)
  862. return false;
  863. StructureChain* prototypeChain = structure->prototypeChain(exec);
  864. emitPutTransitionStub(
  865. exec, baseValue, ident, slot, stubInfo, putKind,
  866. structure, oldStructure, prototypeChain,
  867. stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToSlowCase),
  868. stubInfo.stubRoutine);
  869. RepatchBuffer repatchBuffer(codeBlock);
  870. repatchBuffer.relink(
  871. stubInfo.callReturnLocation.jumpAtOffset(
  872. stubInfo.patch.dfg.deltaCallToStructCheck),
  873. CodeLocationLabel(stubInfo.stubRoutine->code().code()));
  874. repatchBuffer.relink(stubInfo.callReturnLocation, appropriateListBuildingPutByIdFunction(slot, putKind));
  875. stubInfo.initPutByIdTransition(*vm, codeBlock->ownerExecutable(), oldStructure, structure, prototypeChain, putKind == Direct);
  876. return true;
  877. }
  878. dfgRepatchByIdSelfAccess(codeBlock, stubInfo, structure, slot.cachedOffset(), appropriateListBuildingPutByIdFunction(slot, putKind), false);
  879. stubInfo.initPutByIdReplace(*vm, codeBlock->ownerExecutable(), structure);
  880. return true;
  881. }
  882. return false;
  883. }
  884. void dfgRepatchPutByID(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
  885. {
  886. bool cached = tryCachePutByID(exec, baseValue, propertyName, slot, stubInfo, putKind);
  887. if (!cached)
  888. dfgRepatchCall(exec->codeBlock(), stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind));
  889. }
  890. static bool tryBuildPutByIdList(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
  891. {
  892. CodeBlock* codeBlock = exec->codeBlock();
  893. VM* vm = &exec->vm();
  894. if (!baseValue.isCell())
  895. return false;
  896. JSCell* baseCell = baseValue.asCell();
  897. Structure* structure = baseCell->structure();
  898. Structure* oldStructure = structure->previousID();
  899. if (!slot.isCacheable())
  900. return false;
  901. if (structure->isUncacheableDictionary())
  902. return false;
  903. // Optimize self access.
  904. if (slot.base() == baseValue) {
  905. PolymorphicPutByIdList* list;
  906. RefPtr<JITStubRoutine> stubRoutine;
  907. if (slot.type() == PutPropertySlot::NewProperty) {
  908. if (structure->isDictionary())
  909. return false;
  910. // Skip optimizing the case where we need a realloc, if we don't have
  911. // enough registers to make it happen.
  912. if (GPRInfo::numberOfRegisters < 6
  913. && oldStructure->outOfLineCapacity() != structure->outOfLineCapacity()
  914. && oldStructure->outOfLineCapacity())
  915. return false;
  916. // Skip optimizing the case where we need realloc, and the structure has
  917. // indexing storage.
  918. if (hasIndexingHeader(oldStructure->indexingType()))
  919. return false;
  920. if (normalizePrototypeChain(exec, baseCell) == InvalidPrototypeChain)
  921. return false;
  922. StructureChain* prototypeChain = structure->prototypeChain(exec);
  923. // We're now committed to creating the stub. Mogrify the meta-data accordingly.
  924. list = PolymorphicPutByIdList::from(
  925. putKind, stubInfo,
  926. stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToSlowCase));
  927. emitPutTransitionStub(
  928. exec, baseValue, propertyName, slot, stubInfo, putKind,
  929. structure, oldStructure, prototypeChain,
  930. CodeLocationLabel(list->currentSlowPathTarget()),
  931. stubRoutine);
  932. list->addAccess(
  933. PutByIdAccess::transition(
  934. *vm, codeBlock->ownerExecutable(),
  935. oldStructure, structure, prototypeChain,
  936. stubRoutine));
  937. } else {
  938. // We're now committed to creating the stub. Mogrify the meta-data accordingly.
  939. list = PolymorphicPutByIdList::from(
  940. putKind, stubInfo,
  941. stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToSlowCase));
  942. emitPutReplaceStub(
  943. exec, baseValue, propertyName, slot, stubInfo, putKind,
  944. structure, CodeLocationLabel(list->currentSlowPathTarget()), stubRoutine);
  945. list->addAccess(
  946. PutByIdAccess::replace(
  947. *vm, codeBlock->ownerExecutable(),
  948. structure, stubRoutine));
  949. }
  950. RepatchBuffer repatchBuffer(codeBlock);
  951. repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.dfg.deltaCallToStructCheck), CodeLocationLabel(stubRoutine->code().code()));
  952. if (list->isFull())
  953. repatchBuffer.relink(stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind));
  954. return true;
  955. }
  956. return false;
  957. }
  958. void dfgBuildPutByIdList(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
  959. {
  960. bool cached = tryBuildPutByIdList(exec, baseValue, propertyName, slot, stubInfo, putKind);
  961. if (!cached)
  962. dfgRepatchCall(exec->codeBlock(), stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind));
  963. }
  964. #endif // #if !(ENABLE(DETACHED_JIT) && BUILDING_DETACHED_JIT)
  965. static void linkSlowFor(RepatchBuffer& repatchBuffer, VM* vm, CallLinkInfo& callLinkInfo, CodeSpecializationKind kind)
  966. {
  967. if (kind == CodeForCall) {
  968. repatchBuffer.relink(callLinkInfo.callReturnLocation, vm->getCTIStub(virtualCallThunkGenerator).code());
  969. return;
  970. }
  971. ASSERT(kind == CodeForConstruct);
  972. repatchBuffer.relink(callLinkInfo.callReturnLocation, vm->getCTIStub(virtualConstructThunkGenerator).code());
  973. }
  974. void dfgLinkFor(ExecState* exec, CallLinkInfo& callLinkInfo, CodeBlock* calleeCodeBlock, JSFunction* callee, MacroAssemblerCodePtr codePtr, CodeSpecializationKind kind)
  975. {
  976. ASSERT(!callLinkInfo.stub);
  977. CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
  978. VM* vm = callerCodeBlock->vm();
  979. RepatchBuffer repatchBuffer(callerCodeBlock);
  980. ASSERT(!callLinkInfo.isLinked());
  981. callLinkInfo.callee.set(exec->callerFrame()->vm(), callLinkInfo.hotPathBegin, callerCodeBlock->ownerExecutable(), callee);
  982. callLinkInfo.lastSeenCallee.set(exec->callerFrame()->vm(), callerCodeBlock->ownerExecutable(), callee);
  983. repatchBuffer.relink(callLinkInfo.hotPathOther, codePtr);
  984. if (calleeCodeBlock)
  985. calleeCodeBlock->linkIncomingCall(&callLinkInfo);
  986. if (kind == CodeForCall) {
  987. repatchBuffer.relink(callLinkInfo.callReturnLocation, vm->getCTIStub(linkClosureCallThunkGenerator).code());
  988. return;
  989. }
  990. ASSERT(kind == CodeForConstruct);
  991. linkSlowFor(repatchBuffer, vm, callLinkInfo, CodeForConstruct);
  992. }
  993. void dfgLinkSlowFor(ExecState* exec, CallLinkInfo& callLinkInfo, CodeSpecializationKind kind)
  994. {
  995. CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
  996. VM* vm = callerCodeBlock->vm();
  997. RepatchBuffer repatchBuffer(callerCodeBlock);
  998. linkSlowFor(repatchBuffer, vm, callLinkInfo, kind);
  999. }
  1000. #if !(ENABLE(DETACHED_JIT) && !BUILDING_DETACHED_JIT)
  1001. void dfgLinkClosureCall(ExecState* exec, CallLinkInfo& callLinkInfo, CodeBlock* calleeCodeBlock, Structure* structure, ExecutableBase* executable, MacroAssemblerCodePtr codePtr)
  1002. {
  1003. ASSERT(!callLinkInfo.stub);
  1004. CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
  1005. VM* vm = callerCodeBlock->vm();
  1006. GPRReg calleeGPR = static_cast<GPRReg>(callLinkInfo.calleeGPR);
  1007. CCallHelpers stubJit(vm, callerCodeBlock);
  1008. CCallHelpers::JumpList slowPath;
  1009. #if USE(JSVALUE64)
  1010. slowPath.append(
  1011. stubJit.branchTest64(
  1012. CCallHelpers::NonZero, calleeGPR, GPRInfo::tagMaskRegister));
  1013. #else
  1014. // We would have already checked that the callee is a cell.
  1015. #endif
  1016. slowPath.append(
  1017. stubJit.branchPtr(
  1018. CCallHelpers::NotEqual,
  1019. CCallHelpers::Address(calleeGPR, JSCell::structureOffset()),
  1020. CCallHelpers::TrustedImmPtr(structure)));
  1021. slowPath.append(
  1022. stubJit.branchPtr(
  1023. CCallHelpers::NotEqual,
  1024. CCallHelpers::Address(calleeGPR, JSFunction::offsetOfExecutable()),
  1025. CCallHelpers::TrustedImmPtr(executable)));
  1026. stubJit.loadPtr(
  1027. CCallHelpers::Address(calleeGPR, JSFunction::offsetOfScopeChain()),
  1028. GPRInfo::returnValueGPR);
  1029. #if USE(JSVALUE64)
  1030. stubJit.store64(
  1031. GPRInfo::returnValueGPR,
  1032. CCallHelpers::Address(GPRInfo::callFrameRegister, static_cast<ptrdiff_t>(sizeof(Register) * JSStack::ScopeChain)));
  1033. #else
  1034. stubJit.storePtr(
  1035. GPRInfo::returnValueGPR,
  1036. CCallHelpers::Address(GPRInfo::callFrameRegister, static_cast<ptrdiff_t>(sizeof(Register) * JSStack::ScopeChain) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
  1037. stubJit.store32(
  1038. CCallHelpers::TrustedImm32(JSValue::CellTag),
  1039. CCallHelpers::Address(GPRInfo::callFrameRegister, static_cast<ptrdiff_t>(sizeof(Register) * JSStack::ScopeChain) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
  1040. #endif
  1041. JITCompiler::Call call = stubJit.nearCall();
  1042. JITCompiler::Jump done = stubJit.jump();
  1043. slowPath.link(&stubJit);
  1044. stubJit.move(calleeGPR, GPRInfo::nonArgGPR0);
  1045. #if USE(JSVALUE32_64)
  1046. stubJit.move(CCallHelpers::TrustedImm32(JSValue::CellTag), GPRInfo::nonArgGPR1);
  1047. #endif
  1048. stubJit.move(CCallHelpers::TrustedImmPtr(callLinkInfo.callReturnLocation.executableAddress()), GPRInfo::nonArgGPR2);
  1049. stubJit.restoreReturnAddressBeforeReturn(GPRInfo::nonArgGPR2);
  1050. JITCompiler::Jump slow = stubJit.jump();
  1051. LinkBuffer patchBuffer(*vm, &stubJit, callerCodeBlock);
  1052. patchBuffer.link(call, FunctionPtr(codePtr.executableAddress()));
  1053. patchBuffer.link(done, callLinkInfo.callReturnLocation.labelAtOffset(0));
  1054. patchBuffer.link(slow, CodeLocationLabel(vm->getCTIStub(virtualCallThunkGenerator).code()));
  1055. RefPtr<ClosureCallStubRoutine> stubRoutine = adoptRef(new ClosureCallStubRoutine(
  1056. FINALIZE_DFG_CODE(
  1057. patchBuffer,
  1058. ("DFG closure call stub for %s, return point %p, target %p (%s)",
  1059. toCString(*callerCodeBlock).data(), callLinkInfo.callReturnLocation.labelAtOffset(0).executableAddress(),
  1060. codePtr.executableAddress(), toCString(pointerDump(calleeCodeBlock)).data())),
  1061. *vm, callerCodeBlock->ownerExecutable(), structure, executable, callLinkInfo.codeOrigin));
  1062. RepatchBuffer repatchBuffer(callerCodeBlock);
  1063. repatchBuffer.replaceWithJump(
  1064. RepatchBuffer::startOfBranchPtrWithPatchOnRegister(callLinkInfo.hotPathBegin),
  1065. CodeLocationLabel(stubRoutine->code().code()));
  1066. linkSlowFor(repatchBuffer, vm, callLinkInfo, CodeForCall);
  1067. callLinkInfo.stub = stubRoutine.release();
  1068. ASSERT(!calleeCodeBlock || calleeCodeBlock->isIncomingCallAlreadyLinked(&callLinkInfo));
  1069. }
  1070. #endif
  1071. void dfgResetGetByID(RepatchBuffer& repatchBuffer, StructureStubInfo& stubInfo)
  1072. {
  1073. repatchBuffer.relink(stubInfo.callReturnLocation, operationGetByIdOptimize);
  1074. CodeLocationDataLabelPtr structureLabel = stubInfo.callReturnLocation.dataLabelPtrAtOffset(-(intptr_t)stubInfo.patch.dfg.deltaCheckImmToCall);
  1075. if (MacroAssembler::canJumpReplacePatchableBranchPtrWithPatch()) {
  1076. repatchBuffer.revertJumpReplacementToPatchableBranchPtrWithPatch(
  1077. RepatchBuffer::startOfPatchableBranchPtrWithPatchOnAddress(structureLabel),
  1078. MacroAssembler::Address(
  1079. static_cast<MacroAssembler::RegisterID>(stubInfo.patch.dfg.baseGPR),
  1080. JSCell::structureOffset()),
  1081. reinterpret_cast<void*>(unusedPointer));
  1082. }
  1083. repatchBuffer.repatch(structureLabel, reinterpret_cast<void*>(unusedPointer));
  1084. #if USE(JSVALUE64)
  1085. repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.dfg.deltaCallToLoadOrStore), 0);
  1086. #else
  1087. repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.dfg.deltaCallToTagLoadOrStore), 0);
  1088. repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.dfg.deltaCallToPayloadLoadOrStore), 0);
  1089. #endif
  1090. repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.dfg.deltaCallToStructCheck), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToSlowCase));
  1091. }
  1092. void dfgResetPutByID(RepatchBuffer& repatchBuffer, StructureStubInfo& stubInfo)
  1093. {
  1094. V_DFGOperation_EJCI unoptimizedFunction = bitwise_cast<V_DFGOperation_EJCI>(MacroAssembler::readCallTarget(stubInfo.callReturnLocation).executableAddress());
  1095. V_DFGOperation_EJCI optimizedFunction;
  1096. if (unoptimizedFunction == operationPutByIdStrict || unoptimizedFunction == operationPutByIdStrictBuildList)
  1097. optimizedFunction = operationPutByIdStrictOptimize;
  1098. else if (unoptimizedFunction == operationPutByIdNonStrict || unoptimizedFunction == operationPutByIdNonStrictBuildList)
  1099. optimizedFunction = operationPutByIdNonStrictOptimize;
  1100. else if (unoptimizedFunction == operationPutByIdDirectStrict || unoptimizedFunction == operationPutByIdDirectStrictBuildList)
  1101. optimizedFunction = operationPutByIdDirectStrictOptimize;
  1102. else {
  1103. ASSERT(unoptimizedFunction == operationPutByIdDirectNonStrict || unoptimizedFunction == operationPutByIdDirectNonStrictBuildList);
  1104. optimizedFunction = operationPutByIdDirectNonStrictOptimize;
  1105. }
  1106. repatchBuffer.relink(stubInfo.callReturnLocation, optimizedFunction);
  1107. CodeLocationDataLabelPtr structureLabel = stubInfo.callReturnLocation.dataLabelPtrAtOffset(-(intptr_t)stubInfo.patch.dfg.deltaCheckImmToCall);
  1108. if (MacroAssembler::canJumpReplacePatchableBranchPtrWithPatch()) {
  1109. repatchBuffer.revertJumpReplacementToPatchableBranchPtrWithPatch(
  1110. RepatchBuffer::startOfPatchableBranchPtrWithPatchOnAddress(structureLabel),
  1111. MacroAssembler::Address(
  1112. static_cast<MacroAssembler::RegisterID>(stubInfo.patch.dfg.baseGPR),
  1113. JSCell::structureOffset()),
  1114. reinterpret_cast<void*>(unusedPointer));
  1115. }
  1116. repatchBuffer.repatch(structureLabel, reinterpret_cast<void*>(unusedPointer));
  1117. #if USE(JSVALUE64)
  1118. repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.dfg.deltaCallToLoadOrStore), 0);
  1119. #else
  1120. repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.dfg.deltaCallToTagLoadOrStore), 0);
  1121. repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.dfg.deltaCallToPayloadLoadOrStore), 0);
  1122. #endif
  1123. repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.dfg.deltaCallToStructCheck), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToSlowCase));
  1124. }
  1125. } } // namespace JSC::DFG
  1126. #endif