JITPropertyAccess32_64.cpp 64 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375
  1. /*
  2. * Copyright (C) 2008, 2009 Apple Inc. All rights reserved.
  3. *
  4. * Redistribution and use in source and binary forms, with or without
  5. * modification, are permitted provided that the following conditions
  6. * are met:
  7. * 1. Redistributions of source code must retain the above copyright
  8. * notice, this list of conditions and the following disclaimer.
  9. * 2. Redistributions in binary form must reproduce the above copyright
  10. * notice, this list of conditions and the following disclaimer in the
  11. * documentation and/or other materials provided with the distribution.
  12. *
  13. * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
  14. * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
  15. * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
  16. * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
  17. * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
  18. * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
  19. * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
  20. * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
  21. * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
  22. * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
  23. * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
  24. */
  25. #include "config.h"
  26. #if ENABLE(JIT)
  27. #if USE(JSVALUE32_64)
  28. #include "JIT.h"
  29. #include "CodeBlock.h"
  30. #include "GCAwareJITStubRoutine.h"
  31. #include "Interpreter.h"
  32. #include "JITInlines.h"
  33. #include "JITStubCall.h"
  34. #include "JSArray.h"
  35. #include "JSFunction.h"
  36. #include "JSPropertyNameIterator.h"
  37. #include "JSVariableObject.h"
  38. #include "LinkBuffer.h"
  39. #include "RepatchBuffer.h"
  40. #include "ResultType.h"
  41. #include "SamplingTool.h"
  42. #include <wtf/StringPrintStream.h>
  43. #ifndef NDEBUG
  44. #include <stdio.h>
  45. #endif
  46. using namespace std;
  47. namespace JSC {
  48. void JIT::emit_op_put_by_index(Instruction* currentInstruction)
  49. {
  50. unsigned base = currentInstruction[1].u.operand;
  51. unsigned property = currentInstruction[2].u.operand;
  52. unsigned value = currentInstruction[3].u.operand;
  53. JITStubCall stubCall(this, cti_op_put_by_index);
  54. stubCall.addArgument(base);
  55. stubCall.addArgument(TrustedImm32(property));
  56. stubCall.addArgument(value);
  57. stubCall.call();
  58. }
  59. void JIT::emit_op_put_getter_setter(Instruction* currentInstruction)
  60. {
  61. unsigned base = currentInstruction[1].u.operand;
  62. unsigned property = currentInstruction[2].u.operand;
  63. unsigned getter = currentInstruction[3].u.operand;
  64. unsigned setter = currentInstruction[4].u.operand;
  65. JITStubCall stubCall(this, cti_op_put_getter_setter);
  66. stubCall.addArgument(base);
  67. stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(property)));
  68. stubCall.addArgument(getter);
  69. stubCall.addArgument(setter);
  70. stubCall.call();
  71. }
  72. void JIT::emit_op_del_by_id(Instruction* currentInstruction)
  73. {
  74. unsigned dst = currentInstruction[1].u.operand;
  75. unsigned base = currentInstruction[2].u.operand;
  76. unsigned property = currentInstruction[3].u.operand;
  77. JITStubCall stubCall(this, cti_op_del_by_id);
  78. stubCall.addArgument(base);
  79. stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(property)));
  80. stubCall.call(dst);
  81. }
  82. JIT::CodeRef JIT::stringGetByValStubGenerator(VM* vm)
  83. {
  84. JSInterfaceJIT jit;
  85. JumpList failures;
  86. failures.append(jit.branchPtr(NotEqual, Address(regT0, JSCell::structureOffset()), TrustedImmPtr(vm->stringStructure.get())));
  87. // Load string length to regT1, and start the process of loading the data pointer into regT0
  88. jit.load32(Address(regT0, ThunkHelpers::jsStringLengthOffset()), regT1);
  89. jit.loadPtr(Address(regT0, ThunkHelpers::jsStringValueOffset()), regT0);
  90. failures.append(jit.branchTest32(Zero, regT0));
  91. // Do an unsigned compare to simultaneously filter negative indices as well as indices that are too large
  92. failures.append(jit.branch32(AboveOrEqual, regT2, regT1));
  93. // Load the character
  94. JumpList is16Bit;
  95. JumpList cont8Bit;
  96. // Load the string flags
  97. jit.loadPtr(Address(regT0, StringImpl::flagsOffset()), regT1);
  98. jit.loadPtr(Address(regT0, StringImpl::dataOffset()), regT0);
  99. is16Bit.append(jit.branchTest32(Zero, regT1, TrustedImm32(StringImpl::flagIs8Bit())));
  100. jit.load8(BaseIndex(regT0, regT2, TimesOne, 0), regT0);
  101. cont8Bit.append(jit.jump());
  102. is16Bit.link(&jit);
  103. jit.load16(BaseIndex(regT0, regT2, TimesTwo, 0), regT0);
  104. cont8Bit.link(&jit);
  105. failures.append(jit.branch32(AboveOrEqual, regT0, TrustedImm32(0x100)));
  106. jit.move(TrustedImmPtr(vm->smallStrings.singleCharacterStrings()), regT1);
  107. jit.loadPtr(BaseIndex(regT1, regT0, ScalePtr, 0), regT0);
  108. jit.move(TrustedImm32(JSValue::CellTag), regT1); // We null check regT0 on return so this is safe
  109. jit.ret();
  110. failures.link(&jit);
  111. jit.move(TrustedImm32(0), regT0);
  112. jit.ret();
  113. LinkBuffer patchBuffer(*vm, &jit, GLOBAL_THUNK_ID);
  114. return FINALIZE_CODE(patchBuffer, ("String get_by_val stub"));
  115. }
  116. void JIT::emit_op_get_by_val(Instruction* currentInstruction)
  117. {
  118. unsigned dst = currentInstruction[1].u.operand;
  119. unsigned base = currentInstruction[2].u.operand;
  120. unsigned property = currentInstruction[3].u.operand;
  121. ArrayProfile* profile = currentInstruction[4].u.arrayProfile;
  122. emitLoad2(base, regT1, regT0, property, regT3, regT2);
  123. addSlowCase(branch32(NotEqual, regT3, TrustedImm32(JSValue::Int32Tag)));
  124. emitJumpSlowCaseIfNotJSCell(base, regT1);
  125. loadPtr(Address(regT0, JSCell::structureOffset()), regT1);
  126. emitArrayProfilingSite(regT1, regT3, profile);
  127. and32(TrustedImm32(IndexingShapeMask), regT1);
  128. PatchableJump badType;
  129. JumpList slowCases;
  130. JITArrayMode mode = chooseArrayMode(profile);
  131. switch (mode) {
  132. case JITInt32:
  133. slowCases = emitInt32GetByVal(currentInstruction, badType);
  134. break;
  135. case JITDouble:
  136. slowCases = emitDoubleGetByVal(currentInstruction, badType);
  137. break;
  138. case JITContiguous:
  139. slowCases = emitContiguousGetByVal(currentInstruction, badType);
  140. break;
  141. case JITArrayStorage:
  142. slowCases = emitArrayStorageGetByVal(currentInstruction, badType);
  143. break;
  144. default:
  145. CRASH();
  146. }
  147. addSlowCase(badType);
  148. addSlowCase(slowCases);
  149. Label done = label();
  150. #if !ASSERT_DISABLED
  151. Jump resultOK = branch32(NotEqual, regT1, TrustedImm32(JSValue::EmptyValueTag));
  152. breakpoint();
  153. resultOK.link(this);
  154. #endif
  155. emitValueProfilingSite();
  156. emitStore(dst, regT1, regT0);
  157. map(m_bytecodeOffset + OPCODE_LENGTH(op_get_by_val), dst, regT1, regT0);
  158. m_byValCompilationInfo.append(ByValCompilationInfo(m_bytecodeOffset, badType, mode, done));
  159. }
  160. JIT::JumpList JIT::emitContiguousGetByVal(Instruction*, PatchableJump& badType, IndexingType expectedShape)
  161. {
  162. JumpList slowCases;
  163. badType = patchableBranch32(NotEqual, regT1, TrustedImm32(expectedShape));
  164. loadPtr(Address(regT0, JSObject::butterflyOffset()), regT3);
  165. slowCases.append(branch32(AboveOrEqual, regT2, Address(regT3, Butterfly::offsetOfPublicLength())));
  166. load32(BaseIndex(regT3, regT2, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.tag)), regT1); // tag
  167. load32(BaseIndex(regT3, regT2, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT0); // payload
  168. slowCases.append(branch32(Equal, regT1, TrustedImm32(JSValue::EmptyValueTag)));
  169. return slowCases;
  170. }
  171. JIT::JumpList JIT::emitDoubleGetByVal(Instruction*, PatchableJump& badType)
  172. {
  173. JumpList slowCases;
  174. badType = patchableBranch32(NotEqual, regT1, TrustedImm32(DoubleShape));
  175. loadPtr(Address(regT0, JSObject::butterflyOffset()), regT3);
  176. slowCases.append(branch32(AboveOrEqual, regT2, Address(regT3, Butterfly::offsetOfPublicLength())));
  177. loadDouble(BaseIndex(regT3, regT2, TimesEight), fpRegT0);
  178. slowCases.append(branchDouble(DoubleNotEqualOrUnordered, fpRegT0, fpRegT0));
  179. moveDoubleToInts(fpRegT0, regT0, regT1);
  180. return slowCases;
  181. }
  182. JIT::JumpList JIT::emitArrayStorageGetByVal(Instruction*, PatchableJump& badType)
  183. {
  184. JumpList slowCases;
  185. add32(TrustedImm32(-ArrayStorageShape), regT1, regT3);
  186. badType = patchableBranch32(Above, regT3, TrustedImm32(SlowPutArrayStorageShape - ArrayStorageShape));
  187. loadPtr(Address(regT0, JSObject::butterflyOffset()), regT3);
  188. slowCases.append(branch32(AboveOrEqual, regT2, Address(regT3, ArrayStorage::vectorLengthOffset())));
  189. load32(BaseIndex(regT3, regT2, TimesEight, OBJECT_OFFSETOF(ArrayStorage, m_vector[0]) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), regT1); // tag
  190. load32(BaseIndex(regT3, regT2, TimesEight, OBJECT_OFFSETOF(ArrayStorage, m_vector[0]) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT0); // payload
  191. slowCases.append(branch32(Equal, regT1, TrustedImm32(JSValue::EmptyValueTag)));
  192. return slowCases;
  193. }
  194. void JIT::emitSlow_op_get_by_val(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
  195. {
  196. unsigned dst = currentInstruction[1].u.operand;
  197. unsigned base = currentInstruction[2].u.operand;
  198. unsigned property = currentInstruction[3].u.operand;
  199. ArrayProfile* profile = currentInstruction[4].u.arrayProfile;
  200. linkSlowCase(iter); // property int32 check
  201. linkSlowCaseIfNotJSCell(iter, base); // base cell check
  202. Jump nonCell = jump();
  203. linkSlowCase(iter); // base array check
  204. Jump notString = branchPtr(NotEqual, Address(regT0, JSCell::structureOffset()), TrustedImmPtr(m_vm->stringStructure.get()));
  205. emitNakedCall(m_vm->getCTIStub(stringGetByValStubGenerator).code());
  206. Jump failed = branchTestPtr(Zero, regT0);
  207. emitStore(dst, regT1, regT0);
  208. emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_get_by_val));
  209. failed.link(this);
  210. notString.link(this);
  211. nonCell.link(this);
  212. Jump skipProfiling = jump();
  213. linkSlowCase(iter); // vector length check
  214. linkSlowCase(iter); // empty value
  215. emitArrayProfileOutOfBoundsSpecialCase(profile);
  216. skipProfiling.link(this);
  217. Label slowPath = label();
  218. JITStubCall stubCall(this, cti_op_get_by_val);
  219. stubCall.addArgument(base);
  220. stubCall.addArgument(property);
  221. Call call = stubCall.call(dst);
  222. m_byValCompilationInfo[m_byValInstructionIndex].slowPathTarget = slowPath;
  223. m_byValCompilationInfo[m_byValInstructionIndex].returnAddress = call;
  224. m_byValInstructionIndex++;
  225. emitValueProfilingSite();
  226. }
  227. void JIT::emit_op_put_by_val(Instruction* currentInstruction)
  228. {
  229. unsigned base = currentInstruction[1].u.operand;
  230. unsigned property = currentInstruction[2].u.operand;
  231. ArrayProfile* profile = currentInstruction[4].u.arrayProfile;
  232. emitLoad2(base, regT1, regT0, property, regT3, regT2);
  233. addSlowCase(branch32(NotEqual, regT3, TrustedImm32(JSValue::Int32Tag)));
  234. emitJumpSlowCaseIfNotJSCell(base, regT1);
  235. loadPtr(Address(regT0, JSCell::structureOffset()), regT1);
  236. emitArrayProfilingSite(regT1, regT3, profile);
  237. and32(TrustedImm32(IndexingShapeMask), regT1);
  238. PatchableJump badType;
  239. JumpList slowCases;
  240. JITArrayMode mode = chooseArrayMode(profile);
  241. switch (mode) {
  242. case JITInt32:
  243. slowCases = emitInt32PutByVal(currentInstruction, badType);
  244. break;
  245. case JITDouble:
  246. slowCases = emitDoublePutByVal(currentInstruction, badType);
  247. break;
  248. case JITContiguous:
  249. slowCases = emitContiguousPutByVal(currentInstruction, badType);
  250. break;
  251. case JITArrayStorage:
  252. slowCases = emitArrayStoragePutByVal(currentInstruction, badType);
  253. break;
  254. default:
  255. CRASH();
  256. break;
  257. }
  258. addSlowCase(badType);
  259. addSlowCase(slowCases);
  260. Label done = label();
  261. m_byValCompilationInfo.append(ByValCompilationInfo(m_bytecodeOffset, badType, mode, done));
  262. }
  263. JIT::JumpList JIT::emitGenericContiguousPutByVal(Instruction* currentInstruction, PatchableJump& badType, IndexingType indexingShape)
  264. {
  265. unsigned value = currentInstruction[3].u.operand;
  266. ArrayProfile* profile = currentInstruction[4].u.arrayProfile;
  267. JumpList slowCases;
  268. badType = patchableBranch32(NotEqual, regT1, TrustedImm32(ContiguousShape));
  269. loadPtr(Address(regT0, JSObject::butterflyOffset()), regT3);
  270. Jump outOfBounds = branch32(AboveOrEqual, regT2, Address(regT3, Butterfly::offsetOfPublicLength()));
  271. Label storeResult = label();
  272. emitLoad(value, regT1, regT0);
  273. switch (indexingShape) {
  274. case Int32Shape:
  275. slowCases.append(branch32(NotEqual, regT1, TrustedImm32(JSValue::Int32Tag)));
  276. // Fall through.
  277. case ContiguousShape:
  278. store32(regT0, BaseIndex(regT3, regT2, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.payload)));
  279. store32(regT1, BaseIndex(regT3, regT2, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.tag)));
  280. break;
  281. case DoubleShape: {
  282. Jump notInt = branch32(NotEqual, regT1, TrustedImm32(JSValue::Int32Tag));
  283. convertInt32ToDouble(regT0, fpRegT0);
  284. Jump ready = jump();
  285. notInt.link(this);
  286. moveIntsToDouble(regT0, regT1, fpRegT0, fpRegT1);
  287. slowCases.append(branchDouble(DoubleNotEqualOrUnordered, fpRegT0, fpRegT0));
  288. ready.link(this);
  289. storeDouble(fpRegT0, BaseIndex(regT3, regT2, TimesEight));
  290. break;
  291. }
  292. default:
  293. CRASH();
  294. break;
  295. }
  296. Jump done = jump();
  297. outOfBounds.link(this);
  298. slowCases.append(branch32(AboveOrEqual, regT2, Address(regT3, Butterfly::offsetOfVectorLength())));
  299. emitArrayProfileStoreToHoleSpecialCase(profile);
  300. add32(TrustedImm32(1), regT2, regT1);
  301. store32(regT1, Address(regT3, Butterfly::offsetOfPublicLength()));
  302. jump().linkTo(storeResult, this);
  303. done.link(this);
  304. emitWriteBarrier(regT0, regT1, regT1, regT3, UnconditionalWriteBarrier, WriteBarrierForPropertyAccess);
  305. return slowCases;
  306. }
  307. JIT::JumpList JIT::emitArrayStoragePutByVal(Instruction* currentInstruction, PatchableJump& badType)
  308. {
  309. unsigned value = currentInstruction[3].u.operand;
  310. ArrayProfile* profile = currentInstruction[4].u.arrayProfile;
  311. JumpList slowCases;
  312. badType = patchableBranch32(NotEqual, regT1, TrustedImm32(ArrayStorageShape));
  313. loadPtr(Address(regT0, JSObject::butterflyOffset()), regT3);
  314. slowCases.append(branch32(AboveOrEqual, regT2, Address(regT3, ArrayStorage::vectorLengthOffset())));
  315. Jump empty = branch32(Equal, BaseIndex(regT3, regT2, TimesEight, OBJECT_OFFSETOF(ArrayStorage, m_vector[0]) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), TrustedImm32(JSValue::EmptyValueTag));
  316. Label storeResult(this);
  317. emitLoad(value, regT1, regT0);
  318. store32(regT0, BaseIndex(regT3, regT2, TimesEight, OBJECT_OFFSETOF(ArrayStorage, m_vector[0]) + OBJECT_OFFSETOF(JSValue, u.asBits.payload))); // payload
  319. store32(regT1, BaseIndex(regT3, regT2, TimesEight, OBJECT_OFFSETOF(ArrayStorage, m_vector[0]) + OBJECT_OFFSETOF(JSValue, u.asBits.tag))); // tag
  320. Jump end = jump();
  321. empty.link(this);
  322. emitArrayProfileStoreToHoleSpecialCase(profile);
  323. add32(TrustedImm32(1), Address(regT3, OBJECT_OFFSETOF(ArrayStorage, m_numValuesInVector)));
  324. branch32(Below, regT2, Address(regT3, ArrayStorage::lengthOffset())).linkTo(storeResult, this);
  325. add32(TrustedImm32(1), regT2, regT0);
  326. store32(regT0, Address(regT3, ArrayStorage::lengthOffset()));
  327. jump().linkTo(storeResult, this);
  328. end.link(this);
  329. emitWriteBarrier(regT0, regT1, regT1, regT3, UnconditionalWriteBarrier, WriteBarrierForPropertyAccess);
  330. return slowCases;
  331. }
  332. void JIT::emitSlow_op_put_by_val(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
  333. {
  334. unsigned base = currentInstruction[1].u.operand;
  335. unsigned property = currentInstruction[2].u.operand;
  336. unsigned value = currentInstruction[3].u.operand;
  337. ArrayProfile* profile = currentInstruction[4].u.arrayProfile;
  338. linkSlowCase(iter); // property int32 check
  339. linkSlowCaseIfNotJSCell(iter, base); // base cell check
  340. linkSlowCase(iter); // base not array check
  341. JITArrayMode mode = chooseArrayMode(profile);
  342. switch (mode) {
  343. case JITInt32:
  344. case JITDouble:
  345. linkSlowCase(iter); // value type check
  346. break;
  347. default:
  348. break;
  349. }
  350. Jump skipProfiling = jump();
  351. linkSlowCase(iter); // out of bounds
  352. emitArrayProfileOutOfBoundsSpecialCase(profile);
  353. skipProfiling.link(this);
  354. Label slowPath = label();
  355. JITStubCall stubPutByValCall(this, cti_op_put_by_val);
  356. stubPutByValCall.addArgument(base);
  357. stubPutByValCall.addArgument(property);
  358. stubPutByValCall.addArgument(value);
  359. Call call = stubPutByValCall.call();
  360. m_byValCompilationInfo[m_byValInstructionIndex].slowPathTarget = slowPath;
  361. m_byValCompilationInfo[m_byValInstructionIndex].returnAddress = call;
  362. m_byValInstructionIndex++;
  363. }
  364. void JIT::emit_op_get_by_id(Instruction* currentInstruction)
  365. {
  366. int dst = currentInstruction[1].u.operand;
  367. int base = currentInstruction[2].u.operand;
  368. Identifier* ident = &(m_codeBlock->identifier(currentInstruction[3].u.operand));
  369. emitLoad(base, regT1, regT0);
  370. emitJumpSlowCaseIfNotJSCell(base, regT1);
  371. compileGetByIdHotPath(ident);
  372. emitValueProfilingSite();
  373. emitStore(dst, regT1, regT0);
  374. map(m_bytecodeOffset + OPCODE_LENGTH(op_get_by_id), dst, regT1, regT0);
  375. }
  376. void JIT::compileGetByIdHotPath(Identifier* ident)
  377. {
  378. // As for put_by_id, get_by_id requires the offset of the Structure and the offset of the access to be patched.
  379. // Additionally, for get_by_id we need patch the offset of the branch to the slow case (we patch this to jump
  380. // to array-length / prototype access tranpolines, and finally we also the the property-map access offset as a label
  381. // to jump back to if one of these trampolies finds a match.
  382. if (*ident == m_vm->propertyNames->length && shouldEmitProfiling()) {
  383. loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
  384. emitArrayProfilingSiteForBytecodeIndex(regT2, regT3, m_bytecodeOffset);
  385. }
  386. BEGIN_UNINTERRUPTED_SEQUENCE(sequenceGetByIdHotPath);
  387. Label hotPathBegin(this);
  388. DataLabelPtr structureToCompare;
  389. PatchableJump structureCheck = patchableBranchPtrWithPatch(NotEqual, Address(regT0, JSCell::structureOffset()), structureToCompare, TrustedImmPtr(reinterpret_cast<void*>(patchGetByIdDefaultStructure)));
  390. addSlowCase(structureCheck);
  391. ConvertibleLoadLabel propertyStorageLoad = convertibleLoadPtr(Address(regT0, JSObject::butterflyOffset()), regT2);
  392. DataLabelCompact displacementLabel1 = loadPtrWithCompactAddressOffsetPatch(Address(regT2, patchGetByIdDefaultOffset), regT0); // payload
  393. DataLabelCompact displacementLabel2 = loadPtrWithCompactAddressOffsetPatch(Address(regT2, patchGetByIdDefaultOffset), regT1); // tag
  394. Label putResult(this);
  395. END_UNINTERRUPTED_SEQUENCE(sequenceGetByIdHotPath);
  396. m_propertyAccessCompilationInfo.append(PropertyStubCompilationInfo(PropertyStubGetById, m_bytecodeOffset, hotPathBegin, structureToCompare, structureCheck, propertyStorageLoad, displacementLabel1, displacementLabel2, putResult));
  397. }
  398. void JIT::emitSlow_op_get_by_id(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
  399. {
  400. int dst = currentInstruction[1].u.operand;
  401. int base = currentInstruction[2].u.operand;
  402. int ident = currentInstruction[3].u.operand;
  403. compileGetByIdSlowCase(dst, base, &(m_codeBlock->identifier(ident)), iter);
  404. emitValueProfilingSite();
  405. }
  406. void JIT::compileGetByIdSlowCase(int dst, int base, Identifier* ident, Vector<SlowCaseEntry>::iterator& iter)
  407. {
  408. // As for the hot path of get_by_id, above, we ensure that we can use an architecture specific offset
  409. // so that we only need track one pointer into the slow case code - we track a pointer to the location
  410. // of the call (which we can use to look up the patch information), but should a array-length or
  411. // prototype access trampoline fail we want to bail out back to here. To do so we can subtract back
  412. // the distance from the call to the head of the slow case.
  413. linkSlowCaseIfNotJSCell(iter, base);
  414. linkSlowCase(iter);
  415. BEGIN_UNINTERRUPTED_SEQUENCE(sequenceGetByIdSlowCase);
  416. Label coldPathBegin(this);
  417. JITStubCall stubCall(this, cti_op_get_by_id);
  418. stubCall.addArgument(regT1, regT0);
  419. stubCall.addArgument(TrustedImmPtr(ident));
  420. Call call = stubCall.call(dst);
  421. END_UNINTERRUPTED_SEQUENCE_FOR_PUT(sequenceGetByIdSlowCase, dst);
  422. // Track the location of the call; this will be used to recover patch information.
  423. m_propertyAccessCompilationInfo[m_propertyAccessInstructionIndex++].slowCaseInfo(PropertyStubGetById, coldPathBegin, call);
  424. }
  425. void JIT::emit_op_put_by_id(Instruction* currentInstruction)
  426. {
  427. // In order to be able to patch both the Structure, and the object offset, we store one pointer,
  428. // to just after the arguments have been loaded into registers 'hotPathBegin', and we generate code
  429. // such that the Structure & offset are always at the same distance from this.
  430. int base = currentInstruction[1].u.operand;
  431. int value = currentInstruction[3].u.operand;
  432. emitLoad2(base, regT1, regT0, value, regT3, regT2);
  433. emitJumpSlowCaseIfNotJSCell(base, regT1);
  434. BEGIN_UNINTERRUPTED_SEQUENCE(sequencePutById);
  435. Label hotPathBegin(this);
  436. // It is important that the following instruction plants a 32bit immediate, in order that it can be patched over.
  437. DataLabelPtr structureToCompare;
  438. addSlowCase(branchPtrWithPatch(NotEqual, Address(regT0, JSCell::structureOffset()), structureToCompare, TrustedImmPtr(reinterpret_cast<void*>(patchGetByIdDefaultStructure))));
  439. ConvertibleLoadLabel propertyStorageLoad = convertibleLoadPtr(Address(regT0, JSObject::butterflyOffset()), regT1);
  440. DataLabel32 displacementLabel1 = storePtrWithAddressOffsetPatch(regT2, Address(regT1, patchPutByIdDefaultOffset)); // payload
  441. DataLabel32 displacementLabel2 = storePtrWithAddressOffsetPatch(regT3, Address(regT1, patchPutByIdDefaultOffset)); // tag
  442. END_UNINTERRUPTED_SEQUENCE(sequencePutById);
  443. emitWriteBarrier(regT0, regT2, regT1, regT2, ShouldFilterImmediates, WriteBarrierForPropertyAccess);
  444. m_propertyAccessCompilationInfo.append(PropertyStubCompilationInfo(PropertyStubPutById, m_bytecodeOffset, hotPathBegin, structureToCompare, propertyStorageLoad, displacementLabel1, displacementLabel2));
  445. }
  446. void JIT::emitSlow_op_put_by_id(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
  447. {
  448. int base = currentInstruction[1].u.operand;
  449. int ident = currentInstruction[2].u.operand;
  450. int direct = currentInstruction[8].u.operand;
  451. linkSlowCaseIfNotJSCell(iter, base);
  452. linkSlowCase(iter);
  453. JITStubCall stubCall(this, direct ? cti_op_put_by_id_direct : cti_op_put_by_id);
  454. stubCall.addArgument(base);
  455. stubCall.addArgument(TrustedImmPtr(&(m_codeBlock->identifier(ident))));
  456. stubCall.addArgument(regT3, regT2);
  457. Call call = stubCall.call();
  458. // Track the location of the call; this will be used to recover patch information.
  459. m_propertyAccessCompilationInfo[m_propertyAccessInstructionIndex++].slowCaseInfo(PropertyStubPutById, call);
  460. }
  461. // Compile a store into an object's property storage. May overwrite base.
  462. void JIT::compilePutDirectOffset(RegisterID base, RegisterID valueTag, RegisterID valuePayload, PropertyOffset cachedOffset)
  463. {
  464. if (isOutOfLineOffset(cachedOffset))
  465. loadPtr(Address(base, JSObject::butterflyOffset()), base);
  466. emitStore(indexRelativeToBase(cachedOffset), valueTag, valuePayload, base);
  467. }
  468. // Compile a load from an object's property storage. May overwrite base.
  469. void JIT::compileGetDirectOffset(RegisterID base, RegisterID resultTag, RegisterID resultPayload, PropertyOffset cachedOffset)
  470. {
  471. if (isInlineOffset(cachedOffset)) {
  472. emitLoad(indexRelativeToBase(cachedOffset), resultTag, resultPayload, base);
  473. return;
  474. }
  475. RegisterID temp = resultPayload;
  476. loadPtr(Address(base, JSObject::butterflyOffset()), temp);
  477. emitLoad(indexRelativeToBase(cachedOffset), resultTag, resultPayload, temp);
  478. }
  479. void JIT::compileGetDirectOffset(JSObject* base, RegisterID resultTag, RegisterID resultPayload, PropertyOffset cachedOffset)
  480. {
  481. if (isInlineOffset(cachedOffset)) {
  482. move(TrustedImmPtr(base->locationForOffset(cachedOffset)), resultTag);
  483. load32(Address(resultTag, OBJECT_OFFSETOF(JSValue, u.asBits.payload)), resultPayload);
  484. load32(Address(resultTag, OBJECT_OFFSETOF(JSValue, u.asBits.tag)), resultTag);
  485. return;
  486. }
  487. loadPtr(base->butterflyAddress(), resultTag);
  488. load32(Address(resultTag, offsetInButterfly(cachedOffset) * sizeof(WriteBarrier<Unknown>) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), resultPayload);
  489. load32(Address(resultTag, offsetInButterfly(cachedOffset) * sizeof(WriteBarrier<Unknown>) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), resultTag);
  490. }
  491. void JIT::privateCompilePutByIdTransition(StructureStubInfo* stubInfo, Structure* oldStructure, Structure* newStructure, PropertyOffset cachedOffset, StructureChain* chain, ReturnAddressPtr returnAddress, bool direct)
  492. {
  493. // The code below assumes that regT0 contains the basePayload and regT1 contains the baseTag. Restore them from the stack.
  494. #if CPU(MIPS) || CPU(SH4) || CPU(ARM)
  495. // For MIPS, we don't add sizeof(void*) to the stack offset.
  496. load32(Address(stackPointerRegister, OBJECT_OFFSETOF(JITStackFrame, args[0]) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT0);
  497. // For MIPS, we don't add sizeof(void*) to the stack offset.
  498. load32(Address(stackPointerRegister, OBJECT_OFFSETOF(JITStackFrame, args[0]) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), regT1);
  499. #else
  500. load32(Address(stackPointerRegister, OBJECT_OFFSETOF(JITStackFrame, args[0]) + sizeof(void*) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT0);
  501. load32(Address(stackPointerRegister, OBJECT_OFFSETOF(JITStackFrame, args[0]) + sizeof(void*) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), regT1);
  502. #endif
  503. JumpList failureCases;
  504. failureCases.append(branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag)));
  505. failureCases.append(branchPtr(NotEqual, Address(regT0, JSCell::structureOffset()), TrustedImmPtr(oldStructure)));
  506. testPrototype(oldStructure->storedPrototype(), failureCases, stubInfo);
  507. if (!direct) {
  508. // Verify that nothing in the prototype chain has a setter for this property.
  509. for (WriteBarrier<Structure>* it = chain->head(); *it; ++it)
  510. testPrototype((*it)->storedPrototype(), failureCases, stubInfo);
  511. }
  512. // If we succeed in all of our checks, and the code was optimizable, then make sure we
  513. // decrement the rare case counter.
  514. #if ENABLE(VALUE_PROFILER)
  515. if (m_codeBlock->canCompileWithDFG() >= DFG::MayInline) {
  516. sub32(
  517. TrustedImm32(1),
  518. AbsoluteAddress(&m_codeBlock->rareCaseProfileForBytecodeOffset(stubInfo->bytecodeIndex)->m_counter));
  519. }
  520. #endif
  521. // Reallocate property storage if needed.
  522. Call callTarget;
  523. bool willNeedStorageRealloc = oldStructure->outOfLineCapacity() != newStructure->outOfLineCapacity();
  524. if (willNeedStorageRealloc) {
  525. // This trampoline was called to like a JIT stub; before we can can call again we need to
  526. // remove the return address from the stack, to prevent the stack from becoming misaligned.
  527. preserveReturnAddressAfterCall(regT3);
  528. JITStubCall stubCall(this, cti_op_put_by_id_transition_realloc);
  529. stubCall.skipArgument(); // base
  530. stubCall.skipArgument(); // ident
  531. stubCall.skipArgument(); // value
  532. stubCall.addArgument(TrustedImm32(oldStructure->outOfLineCapacity()));
  533. stubCall.addArgument(TrustedImmPtr(newStructure));
  534. stubCall.call(regT0);
  535. restoreReturnAddressBeforeReturn(regT3);
  536. #if CPU(MIPS) || CPU(SH4) || CPU(ARM)
  537. // For MIPS, we don't add sizeof(void*) to the stack offset.
  538. load32(Address(stackPointerRegister, OBJECT_OFFSETOF(JITStackFrame, args[0]) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT0);
  539. // For MIPS, we don't add sizeof(void*) to the stack offset.
  540. load32(Address(stackPointerRegister, OBJECT_OFFSETOF(JITStackFrame, args[0]) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), regT1);
  541. #else
  542. load32(Address(stackPointerRegister, OBJECT_OFFSETOF(JITStackFrame, args[0]) + sizeof(void*) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT0);
  543. load32(Address(stackPointerRegister, OBJECT_OFFSETOF(JITStackFrame, args[0]) + sizeof(void*) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), regT1);
  544. #endif
  545. }
  546. emitWriteBarrier(regT0, regT1, regT1, regT3, UnconditionalWriteBarrier, WriteBarrierForPropertyAccess);
  547. storePtr(TrustedImmPtr(newStructure), Address(regT0, JSCell::structureOffset()));
  548. #if CPU(MIPS) || CPU(SH4) || CPU(ARM)
  549. // For MIPS, we don't add sizeof(void*) to the stack offset.
  550. load32(Address(stackPointerRegister, OBJECT_OFFSETOF(JITStackFrame, args[2]) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT3);
  551. load32(Address(stackPointerRegister, OBJECT_OFFSETOF(JITStackFrame, args[2]) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), regT2);
  552. #else
  553. load32(Address(stackPointerRegister, OBJECT_OFFSETOF(JITStackFrame, args[2]) + sizeof(void*) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT3);
  554. load32(Address(stackPointerRegister, OBJECT_OFFSETOF(JITStackFrame, args[2]) + sizeof(void*) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), regT2);
  555. #endif
  556. compilePutDirectOffset(regT0, regT2, regT3, cachedOffset);
  557. ret();
  558. ASSERT(!failureCases.empty());
  559. failureCases.link(this);
  560. restoreArgumentReferenceForTrampoline();
  561. Call failureCall = tailRecursiveCall();
  562. LinkBuffer patchBuffer(*m_vm, this, m_codeBlock);
  563. patchBuffer.link(failureCall, FunctionPtr(direct ? cti_op_put_by_id_direct_fail : cti_op_put_by_id_fail));
  564. if (willNeedStorageRealloc) {
  565. ASSERT(m_calls.size() == 1);
  566. patchBuffer.link(m_calls[0].from, FunctionPtr(cti_op_put_by_id_transition_realloc));
  567. }
  568. stubInfo->stubRoutine = createJITStubRoutine(
  569. FINALIZE_CODE(
  570. patchBuffer,
  571. ("Baseline put_by_id transition stub for %s, return point %p",
  572. toCString(*m_codeBlock).data(), returnAddress.value())),
  573. *m_vm,
  574. m_codeBlock->ownerExecutable(),
  575. willNeedStorageRealloc,
  576. newStructure);
  577. RepatchBuffer repatchBuffer(m_codeBlock);
  578. repatchBuffer.relinkCallerToTrampoline(returnAddress, CodeLocationLabel(stubInfo->stubRoutine->code().code()));
  579. }
  580. void JIT::patchGetByIdSelf(CodeBlock* codeBlock, StructureStubInfo* stubInfo, Structure* structure, PropertyOffset cachedOffset, ReturnAddressPtr returnAddress)
  581. {
  582. RepatchBuffer repatchBuffer(codeBlock);
  583. // We don't want to patch more than once - in future go to cti_op_get_by_id_generic.
  584. // Should probably go to JITStubs::cti_op_get_by_id_fail, but that doesn't do anything interesting right now.
  585. repatchBuffer.relinkCallerToFunction(returnAddress, FunctionPtr(cti_op_get_by_id_self_fail));
  586. // Patch the offset into the propoerty map to load from, then patch the Structure to look for.
  587. repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabelPtrAtOffset(stubInfo->patch.baseline.u.get.structureToCompare), structure);
  588. repatchBuffer.setLoadInstructionIsActive(stubInfo->hotPathBegin.convertibleLoadAtOffset(stubInfo->patch.baseline.u.get.propertyStorageLoad), isOutOfLineOffset(cachedOffset));
  589. repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabelCompactAtOffset(stubInfo->patch.baseline.u.get.displacementLabel1), offsetRelativeToPatchedStorage(cachedOffset) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)); // payload
  590. repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabelCompactAtOffset(stubInfo->patch.baseline.u.get.displacementLabel2), offsetRelativeToPatchedStorage(cachedOffset) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)); // tag
  591. }
  592. void JIT::patchPutByIdReplace(CodeBlock* codeBlock, StructureStubInfo* stubInfo, Structure* structure, PropertyOffset cachedOffset, ReturnAddressPtr returnAddress, bool direct)
  593. {
  594. RepatchBuffer repatchBuffer(codeBlock);
  595. // We don't want to patch more than once - in future go to cti_op_put_by_id_generic.
  596. // Should probably go to cti_op_put_by_id_fail, but that doesn't do anything interesting right now.
  597. repatchBuffer.relinkCallerToFunction(returnAddress, FunctionPtr(direct ? cti_op_put_by_id_direct_generic : cti_op_put_by_id_generic));
  598. // Patch the offset into the propoerty map to load from, then patch the Structure to look for.
  599. repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabelPtrAtOffset(stubInfo->patch.baseline.u.put.structureToCompare), structure);
  600. repatchBuffer.setLoadInstructionIsActive(stubInfo->hotPathBegin.convertibleLoadAtOffset(stubInfo->patch.baseline.u.put.propertyStorageLoad), isOutOfLineOffset(cachedOffset));
  601. repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabel32AtOffset(stubInfo->patch.baseline.u.put.displacementLabel1), offsetRelativeToPatchedStorage(cachedOffset) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)); // payload
  602. repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabel32AtOffset(stubInfo->patch.baseline.u.put.displacementLabel2), offsetRelativeToPatchedStorage(cachedOffset) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)); // tag
  603. }
  604. void JIT::privateCompilePatchGetArrayLength(ReturnAddressPtr returnAddress)
  605. {
  606. StructureStubInfo* stubInfo = &m_codeBlock->getStubInfo(returnAddress);
  607. // regT0 holds a JSCell*
  608. // Check for array
  609. loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
  610. Jump failureCases1 = branchTest32(Zero, regT2, TrustedImm32(IsArray));
  611. Jump failureCases2 = branchTest32(Zero, regT2, TrustedImm32(IndexingShapeMask));
  612. // Checks out okay! - get the length from the storage
  613. loadPtr(Address(regT0, JSArray::butterflyOffset()), regT2);
  614. load32(Address(regT2, ArrayStorage::lengthOffset()), regT2);
  615. Jump failureCases3 = branch32(Above, regT2, TrustedImm32(INT_MAX));
  616. move(regT2, regT0);
  617. move(TrustedImm32(JSValue::Int32Tag), regT1);
  618. Jump success = jump();
  619. LinkBuffer patchBuffer(*m_vm, this, m_codeBlock);
  620. // Use the patch information to link the failure cases back to the original slow case routine.
  621. CodeLocationLabel slowCaseBegin = stubInfo->callReturnLocation.labelAtOffset(-stubInfo->patch.baseline.u.get.coldPathBegin);
  622. patchBuffer.link(failureCases1, slowCaseBegin);
  623. patchBuffer.link(failureCases2, slowCaseBegin);
  624. patchBuffer.link(failureCases3, slowCaseBegin);
  625. // On success return back to the hot patch code, at a point it will perform the store to dest for us.
  626. patchBuffer.link(success, stubInfo->hotPathBegin.labelAtOffset(stubInfo->patch.baseline.u.get.putResult));
  627. // Track the stub we have created so that it will be deleted later.
  628. stubInfo->stubRoutine = FINALIZE_CODE_FOR_STUB(
  629. patchBuffer,
  630. ("Baseline get_by_id array length stub for %s, return point %p",
  631. toCString(*m_codeBlock).data(), stubInfo->hotPathBegin.labelAtOffset(
  632. stubInfo->patch.baseline.u.get.putResult).executableAddress()));
  633. // Finally patch the jump to slow case back in the hot path to jump here instead.
  634. CodeLocationJump jumpLocation = stubInfo->hotPathBegin.jumpAtOffset(stubInfo->patch.baseline.u.get.structureCheck);
  635. RepatchBuffer repatchBuffer(m_codeBlock);
  636. repatchBuffer.relink(jumpLocation, CodeLocationLabel(stubInfo->stubRoutine->code().code()));
  637. // We don't want to patch more than once - in future go to cti_op_put_by_id_generic.
  638. repatchBuffer.relinkCallerToFunction(returnAddress, FunctionPtr(cti_op_get_by_id_array_fail));
  639. }
  640. void JIT::privateCompileGetByIdProto(StructureStubInfo* stubInfo, Structure* structure, Structure* prototypeStructure, const Identifier& ident, const PropertySlot& slot, PropertyOffset cachedOffset, ReturnAddressPtr returnAddress, CallFrame* callFrame)
  641. {
  642. // regT0 holds a JSCell*
  643. // The prototype object definitely exists (if this stub exists the CodeBlock is referencing a Structure that is
  644. // referencing the prototype object - let's speculatively load it's table nice and early!)
  645. JSObject* protoObject = asObject(structure->prototypeForLookup(callFrame));
  646. Jump failureCases1 = checkStructure(regT0, structure);
  647. // Check the prototype object's Structure had not changed.
  648. Jump failureCases2 = addStructureTransitionCheck(protoObject, prototypeStructure, stubInfo, regT3);
  649. bool needsStubLink = false;
  650. // Checks out okay!
  651. if (slot.cachedPropertyType() == PropertySlot::Getter) {
  652. needsStubLink = true;
  653. compileGetDirectOffset(protoObject, regT2, regT1, cachedOffset);
  654. JITStubCall stubCall(this, cti_op_get_by_id_getter_stub);
  655. stubCall.addArgument(regT1);
  656. stubCall.addArgument(regT0);
  657. stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress()));
  658. stubCall.call();
  659. } else if (slot.cachedPropertyType() == PropertySlot::Custom) {
  660. needsStubLink = true;
  661. JITStubCall stubCall(this, cti_op_get_by_id_custom_stub);
  662. stubCall.addArgument(TrustedImmPtr(protoObject));
  663. stubCall.addArgument(TrustedImmPtr(FunctionPtr(slot.customGetter()).executableAddress()));
  664. stubCall.addArgument(TrustedImmPtr(const_cast<Identifier*>(&ident)));
  665. stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress()));
  666. stubCall.call();
  667. } else
  668. compileGetDirectOffset(protoObject, regT1, regT0, cachedOffset);
  669. Jump success = jump();
  670. LinkBuffer patchBuffer(*m_vm, this, m_codeBlock);
  671. // Use the patch information to link the failure cases back to the original slow case routine.
  672. CodeLocationLabel slowCaseBegin = stubInfo->callReturnLocation.labelAtOffset(-stubInfo->patch.baseline.u.get.coldPathBegin);
  673. patchBuffer.link(failureCases1, slowCaseBegin);
  674. if (failureCases2.isSet())
  675. patchBuffer.link(failureCases2, slowCaseBegin);
  676. // On success return back to the hot patch code, at a point it will perform the store to dest for us.
  677. patchBuffer.link(success, stubInfo->hotPathBegin.labelAtOffset(stubInfo->patch.baseline.u.get.putResult));
  678. if (needsStubLink) {
  679. for (Vector<CallRecord>::iterator iter = m_calls.begin(); iter != m_calls.end(); ++iter) {
  680. if (iter->to)
  681. patchBuffer.link(iter->from, FunctionPtr(iter->to));
  682. }
  683. }
  684. // Track the stub we have created so that it will be deleted later.
  685. stubInfo->stubRoutine = createJITStubRoutine(
  686. FINALIZE_CODE(
  687. patchBuffer,
  688. ("Baseline get_by_id proto stub for %s, return point %p",
  689. toCString(*m_codeBlock).data(), stubInfo->hotPathBegin.labelAtOffset(
  690. stubInfo->patch.baseline.u.get.putResult).executableAddress())),
  691. *m_vm,
  692. m_codeBlock->ownerExecutable(),
  693. needsStubLink);
  694. // Finally patch the jump to slow case back in the hot path to jump here instead.
  695. CodeLocationJump jumpLocation = stubInfo->hotPathBegin.jumpAtOffset(stubInfo->patch.baseline.u.get.structureCheck);
  696. RepatchBuffer repatchBuffer(m_codeBlock);
  697. repatchBuffer.relink(jumpLocation, CodeLocationLabel(stubInfo->stubRoutine->code().code()));
  698. // We don't want to patch more than once - in future go to cti_op_put_by_id_generic.
  699. repatchBuffer.relinkCallerToFunction(returnAddress, FunctionPtr(cti_op_get_by_id_proto_list));
  700. }
  701. void JIT::privateCompileGetByIdSelfList(StructureStubInfo* stubInfo, PolymorphicAccessStructureList* polymorphicStructures, int currentIndex, Structure* structure, const Identifier& ident, const PropertySlot& slot, PropertyOffset cachedOffset)
  702. {
  703. // regT0 holds a JSCell*
  704. Jump failureCase = checkStructure(regT0, structure);
  705. bool needsStubLink = false;
  706. bool isDirect = false;
  707. if (slot.cachedPropertyType() == PropertySlot::Getter) {
  708. needsStubLink = true;
  709. compileGetDirectOffset(regT0, regT2, regT1, cachedOffset);
  710. JITStubCall stubCall(this, cti_op_get_by_id_getter_stub);
  711. stubCall.addArgument(regT1);
  712. stubCall.addArgument(regT0);
  713. stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress()));
  714. stubCall.call();
  715. } else if (slot.cachedPropertyType() == PropertySlot::Custom) {
  716. needsStubLink = true;
  717. JITStubCall stubCall(this, cti_op_get_by_id_custom_stub);
  718. stubCall.addArgument(regT0);
  719. stubCall.addArgument(TrustedImmPtr(FunctionPtr(slot.customGetter()).executableAddress()));
  720. stubCall.addArgument(TrustedImmPtr(const_cast<Identifier*>(&ident)));
  721. stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress()));
  722. stubCall.call();
  723. } else {
  724. isDirect = true;
  725. compileGetDirectOffset(regT0, regT1, regT0, cachedOffset);
  726. }
  727. Jump success = jump();
  728. LinkBuffer patchBuffer(*m_vm, this, m_codeBlock);
  729. if (needsStubLink) {
  730. for (Vector<CallRecord>::iterator iter = m_calls.begin(); iter != m_calls.end(); ++iter) {
  731. if (iter->to)
  732. patchBuffer.link(iter->from, FunctionPtr(iter->to));
  733. }
  734. }
  735. // Use the patch information to link the failure cases back to the original slow case routine.
  736. CodeLocationLabel lastProtoBegin = CodeLocationLabel(JITStubRoutine::asCodePtr(polymorphicStructures->list[currentIndex - 1].stubRoutine));
  737. if (!lastProtoBegin)
  738. lastProtoBegin = stubInfo->callReturnLocation.labelAtOffset(-stubInfo->patch.baseline.u.get.coldPathBegin);
  739. patchBuffer.link(failureCase, lastProtoBegin);
  740. // On success return back to the hot patch code, at a point it will perform the store to dest for us.
  741. patchBuffer.link(success, stubInfo->hotPathBegin.labelAtOffset(stubInfo->patch.baseline.u.get.putResult));
  742. RefPtr<JITStubRoutine> stubRoutine = createJITStubRoutine(
  743. FINALIZE_CODE(
  744. patchBuffer,
  745. ("Baseline get_by_id self list stub for %s, return point %p",
  746. toCString(*m_codeBlock).data(), stubInfo->hotPathBegin.labelAtOffset(
  747. stubInfo->patch.baseline.u.get.putResult).executableAddress())),
  748. *m_vm,
  749. m_codeBlock->ownerExecutable(),
  750. needsStubLink);
  751. polymorphicStructures->list[currentIndex].set(*m_vm, m_codeBlock->ownerExecutable(), stubRoutine, structure, isDirect);
  752. // Finally patch the jump to slow case back in the hot path to jump here instead.
  753. CodeLocationJump jumpLocation = stubInfo->hotPathBegin.jumpAtOffset(stubInfo->patch.baseline.u.get.structureCheck);
  754. RepatchBuffer repatchBuffer(m_codeBlock);
  755. repatchBuffer.relink(jumpLocation, CodeLocationLabel(stubRoutine->code().code()));
  756. }
  757. void JIT::privateCompileGetByIdProtoList(StructureStubInfo* stubInfo, PolymorphicAccessStructureList* prototypeStructures, int currentIndex, Structure* structure, Structure* prototypeStructure, const Identifier& ident, const PropertySlot& slot, PropertyOffset cachedOffset, CallFrame* callFrame)
  758. {
  759. // regT0 holds a JSCell*
  760. // The prototype object definitely exists (if this stub exists the CodeBlock is referencing a Structure that is
  761. // referencing the prototype object - let's speculatively load it's table nice and early!)
  762. JSObject* protoObject = asObject(structure->prototypeForLookup(callFrame));
  763. // Check eax is an object of the right Structure.
  764. Jump failureCases1 = checkStructure(regT0, structure);
  765. // Check the prototype object's Structure had not changed.
  766. Jump failureCases2 = addStructureTransitionCheck(protoObject, prototypeStructure, stubInfo, regT3);
  767. bool needsStubLink = false;
  768. bool isDirect = false;
  769. if (slot.cachedPropertyType() == PropertySlot::Getter) {
  770. needsStubLink = true;
  771. compileGetDirectOffset(protoObject, regT2, regT1, cachedOffset);
  772. JITStubCall stubCall(this, cti_op_get_by_id_getter_stub);
  773. stubCall.addArgument(regT1);
  774. stubCall.addArgument(regT0);
  775. stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress()));
  776. stubCall.call();
  777. } else if (slot.cachedPropertyType() == PropertySlot::Custom) {
  778. needsStubLink = true;
  779. JITStubCall stubCall(this, cti_op_get_by_id_custom_stub);
  780. stubCall.addArgument(TrustedImmPtr(protoObject));
  781. stubCall.addArgument(TrustedImmPtr(FunctionPtr(slot.customGetter()).executableAddress()));
  782. stubCall.addArgument(TrustedImmPtr(const_cast<Identifier*>(&ident)));
  783. stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress()));
  784. stubCall.call();
  785. } else {
  786. isDirect = true;
  787. compileGetDirectOffset(protoObject, regT1, regT0, cachedOffset);
  788. }
  789. Jump success = jump();
  790. LinkBuffer patchBuffer(*m_vm, this, m_codeBlock);
  791. if (needsStubLink) {
  792. for (Vector<CallRecord>::iterator iter = m_calls.begin(); iter != m_calls.end(); ++iter) {
  793. if (iter->to)
  794. patchBuffer.link(iter->from, FunctionPtr(iter->to));
  795. }
  796. }
  797. // Use the patch information to link the failure cases back to the original slow case routine.
  798. CodeLocationLabel lastProtoBegin = CodeLocationLabel(JITStubRoutine::asCodePtr(prototypeStructures->list[currentIndex - 1].stubRoutine));
  799. patchBuffer.link(failureCases1, lastProtoBegin);
  800. if (failureCases2.isSet())
  801. patchBuffer.link(failureCases2, lastProtoBegin);
  802. // On success return back to the hot patch code, at a point it will perform the store to dest for us.
  803. patchBuffer.link(success, stubInfo->hotPathBegin.labelAtOffset(stubInfo->patch.baseline.u.get.putResult));
  804. RefPtr<JITStubRoutine> stubRoutine = createJITStubRoutine(
  805. FINALIZE_CODE(
  806. patchBuffer,
  807. ("Baseline get_by_id proto list stub for %s, return point %p",
  808. toCString(*m_codeBlock).data(), stubInfo->hotPathBegin.labelAtOffset(
  809. stubInfo->patch.baseline.u.get.putResult).executableAddress())),
  810. *m_vm,
  811. m_codeBlock->ownerExecutable(),
  812. needsStubLink);
  813. prototypeStructures->list[currentIndex].set(callFrame->vm(), m_codeBlock->ownerExecutable(), stubRoutine, structure, prototypeStructure, isDirect);
  814. // Finally patch the jump to slow case back in the hot path to jump here instead.
  815. CodeLocationJump jumpLocation = stubInfo->hotPathBegin.jumpAtOffset(stubInfo->patch.baseline.u.get.structureCheck);
  816. RepatchBuffer repatchBuffer(m_codeBlock);
  817. repatchBuffer.relink(jumpLocation, CodeLocationLabel(stubRoutine->code().code()));
  818. }
  819. void JIT::privateCompileGetByIdChainList(StructureStubInfo* stubInfo, PolymorphicAccessStructureList* prototypeStructures, int currentIndex, Structure* structure, StructureChain* chain, size_t count, const Identifier& ident, const PropertySlot& slot, PropertyOffset cachedOffset, CallFrame* callFrame)
  820. {
  821. // regT0 holds a JSCell*
  822. ASSERT(count);
  823. JumpList bucketsOfFail;
  824. // Check eax is an object of the right Structure.
  825. bucketsOfFail.append(checkStructure(regT0, structure));
  826. Structure* currStructure = structure;
  827. WriteBarrier<Structure>* it = chain->head();
  828. JSObject* protoObject = 0;
  829. for (unsigned i = 0; i < count; ++i, ++it) {
  830. protoObject = asObject(currStructure->prototypeForLookup(callFrame));
  831. currStructure = it->get();
  832. testPrototype(protoObject, bucketsOfFail, stubInfo);
  833. }
  834. ASSERT(protoObject);
  835. bool needsStubLink = false;
  836. bool isDirect = false;
  837. if (slot.cachedPropertyType() == PropertySlot::Getter) {
  838. needsStubLink = true;
  839. compileGetDirectOffset(protoObject, regT2, regT1, cachedOffset);
  840. JITStubCall stubCall(this, cti_op_get_by_id_getter_stub);
  841. stubCall.addArgument(regT1);
  842. stubCall.addArgument(regT0);
  843. stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress()));
  844. stubCall.call();
  845. } else if (slot.cachedPropertyType() == PropertySlot::Custom) {
  846. needsStubLink = true;
  847. JITStubCall stubCall(this, cti_op_get_by_id_custom_stub);
  848. stubCall.addArgument(TrustedImmPtr(protoObject));
  849. stubCall.addArgument(TrustedImmPtr(FunctionPtr(slot.customGetter()).executableAddress()));
  850. stubCall.addArgument(TrustedImmPtr(const_cast<Identifier*>(&ident)));
  851. stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress()));
  852. stubCall.call();
  853. } else {
  854. isDirect = true;
  855. compileGetDirectOffset(protoObject, regT1, regT0, cachedOffset);
  856. }
  857. Jump success = jump();
  858. LinkBuffer patchBuffer(*m_vm, this, m_codeBlock);
  859. if (needsStubLink) {
  860. for (Vector<CallRecord>::iterator iter = m_calls.begin(); iter != m_calls.end(); ++iter) {
  861. if (iter->to)
  862. patchBuffer.link(iter->from, FunctionPtr(iter->to));
  863. }
  864. }
  865. // Use the patch information to link the failure cases back to the original slow case routine.
  866. CodeLocationLabel lastProtoBegin = CodeLocationLabel(JITStubRoutine::asCodePtr(prototypeStructures->list[currentIndex - 1].stubRoutine));
  867. patchBuffer.link(bucketsOfFail, lastProtoBegin);
  868. // On success return back to the hot patch code, at a point it will perform the store to dest for us.
  869. patchBuffer.link(success, stubInfo->hotPathBegin.labelAtOffset(stubInfo->patch.baseline.u.get.putResult));
  870. RefPtr<JITStubRoutine> stubRoutine = createJITStubRoutine(
  871. FINALIZE_CODE(
  872. patchBuffer,
  873. ("Baseline get_by_id chain list stub for %s, return point %p",
  874. toCString(*m_codeBlock).data(), stubInfo->hotPathBegin.labelAtOffset(
  875. stubInfo->patch.baseline.u.get.putResult).executableAddress())),
  876. *m_vm,
  877. m_codeBlock->ownerExecutable(),
  878. needsStubLink);
  879. // Track the stub we have created so that it will be deleted later.
  880. prototypeStructures->list[currentIndex].set(callFrame->vm(), m_codeBlock->ownerExecutable(), stubRoutine, structure, chain, isDirect);
  881. // Finally patch the jump to slow case back in the hot path to jump here instead.
  882. CodeLocationJump jumpLocation = stubInfo->hotPathBegin.jumpAtOffset(stubInfo->patch.baseline.u.get.structureCheck);
  883. RepatchBuffer repatchBuffer(m_codeBlock);
  884. repatchBuffer.relink(jumpLocation, CodeLocationLabel(stubRoutine->code().code()));
  885. }
  886. void JIT::privateCompileGetByIdChain(StructureStubInfo* stubInfo, Structure* structure, StructureChain* chain, size_t count, const Identifier& ident, const PropertySlot& slot, PropertyOffset cachedOffset, ReturnAddressPtr returnAddress, CallFrame* callFrame)
  887. {
  888. // regT0 holds a JSCell*
  889. ASSERT(count);
  890. JumpList bucketsOfFail;
  891. // Check eax is an object of the right Structure.
  892. bucketsOfFail.append(checkStructure(regT0, structure));
  893. Structure* currStructure = structure;
  894. WriteBarrier<Structure>* it = chain->head();
  895. JSObject* protoObject = 0;
  896. for (unsigned i = 0; i < count; ++i, ++it) {
  897. protoObject = asObject(currStructure->prototypeForLookup(callFrame));
  898. currStructure = it->get();
  899. testPrototype(protoObject, bucketsOfFail, stubInfo);
  900. }
  901. ASSERT(protoObject);
  902. bool needsStubLink = false;
  903. if (slot.cachedPropertyType() == PropertySlot::Getter) {
  904. needsStubLink = true;
  905. compileGetDirectOffset(protoObject, regT2, regT1, cachedOffset);
  906. JITStubCall stubCall(this, cti_op_get_by_id_getter_stub);
  907. stubCall.addArgument(regT1);
  908. stubCall.addArgument(regT0);
  909. stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress()));
  910. stubCall.call();
  911. } else if (slot.cachedPropertyType() == PropertySlot::Custom) {
  912. needsStubLink = true;
  913. JITStubCall stubCall(this, cti_op_get_by_id_custom_stub);
  914. stubCall.addArgument(TrustedImmPtr(protoObject));
  915. stubCall.addArgument(TrustedImmPtr(FunctionPtr(slot.customGetter()).executableAddress()));
  916. stubCall.addArgument(TrustedImmPtr(const_cast<Identifier*>(&ident)));
  917. stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress()));
  918. stubCall.call();
  919. } else
  920. compileGetDirectOffset(protoObject, regT1, regT0, cachedOffset);
  921. Jump success = jump();
  922. LinkBuffer patchBuffer(*m_vm, this, m_codeBlock);
  923. if (needsStubLink) {
  924. for (Vector<CallRecord>::iterator iter = m_calls.begin(); iter != m_calls.end(); ++iter) {
  925. if (iter->to)
  926. patchBuffer.link(iter->from, FunctionPtr(iter->to));
  927. }
  928. }
  929. // Use the patch information to link the failure cases back to the original slow case routine.
  930. patchBuffer.link(bucketsOfFail, stubInfo->callReturnLocation.labelAtOffset(-stubInfo->patch.baseline.u.get.coldPathBegin));
  931. // On success return back to the hot patch code, at a point it will perform the store to dest for us.
  932. patchBuffer.link(success, stubInfo->hotPathBegin.labelAtOffset(stubInfo->patch.baseline.u.get.putResult));
  933. // Track the stub we have created so that it will be deleted later.
  934. RefPtr<JITStubRoutine> stubRoutine = createJITStubRoutine(
  935. FINALIZE_CODE(
  936. patchBuffer,
  937. ("Baseline get_by_id chain stub for %s, return point %p",
  938. toCString(*m_codeBlock).data(), stubInfo->hotPathBegin.labelAtOffset(
  939. stubInfo->patch.baseline.u.get.putResult).executableAddress())),
  940. *m_vm,
  941. m_codeBlock->ownerExecutable(),
  942. needsStubLink);
  943. stubInfo->stubRoutine = stubRoutine;
  944. // Finally patch the jump to slow case back in the hot path to jump here instead.
  945. CodeLocationJump jumpLocation = stubInfo->hotPathBegin.jumpAtOffset(stubInfo->patch.baseline.u.get.structureCheck);
  946. RepatchBuffer repatchBuffer(m_codeBlock);
  947. repatchBuffer.relink(jumpLocation, CodeLocationLabel(stubRoutine->code().code()));
  948. // We don't want to patch more than once - in future go to cti_op_put_by_id_generic.
  949. repatchBuffer.relinkCallerToFunction(returnAddress, FunctionPtr(cti_op_get_by_id_proto_list));
  950. }
  951. void JIT::compileGetDirectOffset(RegisterID base, RegisterID resultTag, RegisterID resultPayload, RegisterID offset, FinalObjectMode finalObjectMode)
  952. {
  953. ASSERT(sizeof(JSValue) == 8);
  954. if (finalObjectMode == MayBeFinal) {
  955. Jump isInline = branch32(LessThan, offset, TrustedImm32(firstOutOfLineOffset));
  956. loadPtr(Address(base, JSObject::butterflyOffset()), base);
  957. neg32(offset);
  958. Jump done = jump();
  959. isInline.link(this);
  960. addPtr(TrustedImmPtr(JSObject::offsetOfInlineStorage() - (firstOutOfLineOffset - 2) * sizeof(EncodedJSValue)), base);
  961. done.link(this);
  962. } else {
  963. #if !ASSERT_DISABLED
  964. Jump isOutOfLine = branch32(GreaterThanOrEqual, offset, TrustedImm32(firstOutOfLineOffset));
  965. breakpoint();
  966. isOutOfLine.link(this);
  967. #endif
  968. loadPtr(Address(base, JSObject::butterflyOffset()), base);
  969. neg32(offset);
  970. }
  971. load32(BaseIndex(base, offset, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.payload) + (firstOutOfLineOffset - 2) * sizeof(EncodedJSValue)), resultPayload);
  972. load32(BaseIndex(base, offset, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.tag) + (firstOutOfLineOffset - 2) * sizeof(EncodedJSValue)), resultTag);
  973. }
  974. void JIT::emit_op_get_by_pname(Instruction* currentInstruction)
  975. {
  976. unsigned dst = currentInstruction[1].u.operand;
  977. unsigned base = currentInstruction[2].u.operand;
  978. unsigned property = currentInstruction[3].u.operand;
  979. unsigned expected = currentInstruction[4].u.operand;
  980. unsigned iter = currentInstruction[5].u.operand;
  981. unsigned i = currentInstruction[6].u.operand;
  982. emitLoad2(property, regT1, regT0, base, regT3, regT2);
  983. emitJumpSlowCaseIfNotJSCell(property, regT1);
  984. addSlowCase(branchPtr(NotEqual, regT0, payloadFor(expected)));
  985. // Property registers are now available as the property is known
  986. emitJumpSlowCaseIfNotJSCell(base, regT3);
  987. emitLoadPayload(iter, regT1);
  988. // Test base's structure
  989. loadPtr(Address(regT2, JSCell::structureOffset()), regT0);
  990. addSlowCase(branchPtr(NotEqual, regT0, Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_cachedStructure))));
  991. load32(addressFor(i), regT3);
  992. sub32(TrustedImm32(1), regT3);
  993. addSlowCase(branch32(AboveOrEqual, regT3, Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_numCacheableSlots))));
  994. Jump inlineProperty = branch32(Below, regT3, Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_cachedStructureInlineCapacity)));
  995. add32(TrustedImm32(firstOutOfLineOffset), regT3);
  996. sub32(Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_cachedStructureInlineCapacity)), regT3);
  997. inlineProperty.link(this);
  998. compileGetDirectOffset(regT2, regT1, regT0, regT3);
  999. emitStore(dst, regT1, regT0);
  1000. map(m_bytecodeOffset + OPCODE_LENGTH(op_get_by_pname), dst, regT1, regT0);
  1001. }
  1002. void JIT::emitSlow_op_get_by_pname(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
  1003. {
  1004. unsigned dst = currentInstruction[1].u.operand;
  1005. unsigned base = currentInstruction[2].u.operand;
  1006. unsigned property = currentInstruction[3].u.operand;
  1007. linkSlowCaseIfNotJSCell(iter, property);
  1008. linkSlowCase(iter);
  1009. linkSlowCaseIfNotJSCell(iter, base);
  1010. linkSlowCase(iter);
  1011. linkSlowCase(iter);
  1012. JITStubCall stubCall(this, cti_op_get_by_val_generic);
  1013. stubCall.addArgument(base);
  1014. stubCall.addArgument(property);
  1015. stubCall.call(dst);
  1016. }
  1017. void JIT::emit_op_get_scoped_var(Instruction* currentInstruction)
  1018. {
  1019. int dst = currentInstruction[1].u.operand;
  1020. int index = currentInstruction[2].u.operand;
  1021. int skip = currentInstruction[3].u.operand;
  1022. emitGetFromCallFrameHeaderPtr(JSStack::ScopeChain, regT2);
  1023. bool checkTopLevel = m_codeBlock->codeType() == FunctionCode && m_codeBlock->needsFullScopeChain();
  1024. ASSERT(skip || !checkTopLevel);
  1025. if (checkTopLevel && skip--) {
  1026. Jump activationNotCreated;
  1027. if (checkTopLevel)
  1028. activationNotCreated = branch32(Equal, tagFor(m_codeBlock->activationRegister()), TrustedImm32(JSValue::EmptyValueTag));
  1029. loadPtr(Address(regT2, JSScope::offsetOfNext()), regT2);
  1030. activationNotCreated.link(this);
  1031. }
  1032. while (skip--)
  1033. loadPtr(Address(regT2, JSScope::offsetOfNext()), regT2);
  1034. loadPtr(Address(regT2, JSVariableObject::offsetOfRegisters()), regT2);
  1035. emitLoad(index, regT1, regT0, regT2);
  1036. emitValueProfilingSite();
  1037. emitStore(dst, regT1, regT0);
  1038. map(m_bytecodeOffset + OPCODE_LENGTH(op_get_scoped_var), dst, regT1, regT0);
  1039. }
  1040. void JIT::emit_op_put_scoped_var(Instruction* currentInstruction)
  1041. {
  1042. int index = currentInstruction[1].u.operand;
  1043. int skip = currentInstruction[2].u.operand;
  1044. int value = currentInstruction[3].u.operand;
  1045. emitLoad(value, regT1, regT0);
  1046. emitGetFromCallFrameHeaderPtr(JSStack::ScopeChain, regT2);
  1047. bool checkTopLevel = m_codeBlock->codeType() == FunctionCode && m_codeBlock->needsFullScopeChain();
  1048. ASSERT(skip || !checkTopLevel);
  1049. if (checkTopLevel && skip--) {
  1050. Jump activationNotCreated;
  1051. if (checkTopLevel)
  1052. activationNotCreated = branch32(Equal, tagFor(m_codeBlock->activationRegister()), TrustedImm32(JSValue::EmptyValueTag));
  1053. loadPtr(Address(regT2, JSScope::offsetOfNext()), regT2);
  1054. activationNotCreated.link(this);
  1055. }
  1056. while (skip--)
  1057. loadPtr(Address(regT2, JSScope::offsetOfNext()), regT2);
  1058. loadPtr(Address(regT2, JSVariableObject::offsetOfRegisters()), regT3);
  1059. emitStore(index, regT1, regT0, regT3);
  1060. emitWriteBarrier(regT2, regT1, regT0, regT1, ShouldFilterImmediates, WriteBarrierForVariableAccess);
  1061. }
  1062. void JIT::emit_op_init_global_const(Instruction* currentInstruction)
  1063. {
  1064. WriteBarrier<Unknown>* registerPointer = currentInstruction[1].u.registerPointer;
  1065. int value = currentInstruction[2].u.operand;
  1066. JSGlobalObject* globalObject = m_codeBlock->globalObject();
  1067. emitLoad(value, regT1, regT0);
  1068. if (Heap::isWriteBarrierEnabled()) {
  1069. move(TrustedImmPtr(globalObject), regT2);
  1070. emitWriteBarrier(globalObject, regT1, regT3, ShouldFilterImmediates, WriteBarrierForVariableAccess);
  1071. }
  1072. store32(regT1, registerPointer->tagPointer());
  1073. store32(regT0, registerPointer->payloadPointer());
  1074. map(m_bytecodeOffset + OPCODE_LENGTH(op_init_global_const), value, regT1, regT0);
  1075. }
  1076. void JIT::emit_op_init_global_const_check(Instruction* currentInstruction)
  1077. {
  1078. WriteBarrier<Unknown>* registerPointer = currentInstruction[1].u.registerPointer;
  1079. int value = currentInstruction[2].u.operand;
  1080. JSGlobalObject* globalObject = m_codeBlock->globalObject();
  1081. emitLoad(value, regT1, regT0);
  1082. addSlowCase(branchTest8(NonZero, AbsoluteAddress(currentInstruction[3].u.predicatePointer)));
  1083. if (Heap::isWriteBarrierEnabled()) {
  1084. move(TrustedImmPtr(globalObject), regT2);
  1085. emitWriteBarrier(globalObject, regT1, regT3, ShouldFilterImmediates, WriteBarrierForVariableAccess);
  1086. }
  1087. store32(regT1, registerPointer->tagPointer());
  1088. store32(regT0, registerPointer->payloadPointer());
  1089. unmap();
  1090. }
  1091. void JIT::emitSlow_op_init_global_const_check(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
  1092. {
  1093. linkSlowCase(iter);
  1094. JITStubCall stubCall(this, cti_op_init_global_const_check);
  1095. stubCall.addArgument(regT1, regT0);
  1096. stubCall.addArgument(TrustedImm32(currentInstruction[4].u.operand));
  1097. stubCall.call();
  1098. }
  1099. void JIT::resetPatchGetById(RepatchBuffer& repatchBuffer, StructureStubInfo* stubInfo)
  1100. {
  1101. repatchBuffer.relink(stubInfo->callReturnLocation, cti_op_get_by_id);
  1102. repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabelPtrAtOffset(stubInfo->patch.baseline.u.get.structureToCompare), reinterpret_cast<void*>(unusedPointer));
  1103. repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabelCompactAtOffset(stubInfo->patch.baseline.u.get.displacementLabel1), 0);
  1104. repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabelCompactAtOffset(stubInfo->patch.baseline.u.get.displacementLabel2), 0);
  1105. repatchBuffer.relink(stubInfo->hotPathBegin.jumpAtOffset(stubInfo->patch.baseline.u.get.structureCheck), stubInfo->callReturnLocation.labelAtOffset(-stubInfo->patch.baseline.u.get.coldPathBegin));
  1106. }
  1107. void JIT::resetPatchPutById(RepatchBuffer& repatchBuffer, StructureStubInfo* stubInfo)
  1108. {
  1109. if (isDirectPutById(stubInfo))
  1110. repatchBuffer.relink(stubInfo->callReturnLocation, cti_op_put_by_id_direct);
  1111. else
  1112. repatchBuffer.relink(stubInfo->callReturnLocation, cti_op_put_by_id);
  1113. repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabelPtrAtOffset(stubInfo->patch.baseline.u.put.structureToCompare), reinterpret_cast<void*>(unusedPointer));
  1114. repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabel32AtOffset(stubInfo->patch.baseline.u.put.displacementLabel1), 0);
  1115. repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabel32AtOffset(stubInfo->patch.baseline.u.put.displacementLabel2), 0);
  1116. }
  1117. } // namespace JSC
  1118. #endif // USE(JSVALUE32_64)
  1119. #endif // ENABLE(JIT)