JIT.h 43 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877
  1. /*
  2. * Copyright (C) 2008, 2012, 2013 Apple Inc. All rights reserved.
  3. *
  4. * Redistribution and use in source and binary forms, with or without
  5. * modification, are permitted provided that the following conditions
  6. * are met:
  7. * 1. Redistributions of source code must retain the above copyright
  8. * notice, this list of conditions and the following disclaimer.
  9. * 2. Redistributions in binary form must reproduce the above copyright
  10. * notice, this list of conditions and the following disclaimer in the
  11. * documentation and/or other materials provided with the distribution.
  12. *
  13. * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
  14. * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
  15. * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
  16. * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
  17. * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
  18. * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
  19. * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
  20. * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
  21. * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
  22. * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
  23. * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
  24. */
  25. #ifndef JIT_h
  26. #define JIT_h
  27. #if ENABLE(JIT)
  28. // Verbose logging of code generation
  29. #define ENABLE_JIT_VERBOSE 0
  30. // Verbose logging for OSR-related code.
  31. #define ENABLE_JIT_VERBOSE_OSR 0
  32. // We've run into some problems where changing the size of the class JIT leads to
  33. // performance fluctuations. Try forcing alignment in an attempt to stabalize this.
  34. #if COMPILER(GCC)
  35. #define JIT_CLASS_ALIGNMENT __attribute__ ((aligned (32)))
  36. #else
  37. #define JIT_CLASS_ALIGNMENT
  38. #endif
  39. #define ASSERT_JIT_OFFSET(actual, expected) ASSERT_WITH_MESSAGE(actual == expected, "JIT Offset \"%s\" should be %d, not %d.\n", #expected, static_cast<int>(expected), static_cast<int>(actual));
  40. #include "CodeBlock.h"
  41. #include "CompactJITCodeMap.h"
  42. #include "Interpreter.h"
  43. #include "JITDisassembler.h"
  44. #include "JSInterfaceJIT.h"
  45. #include "LegacyProfiler.h"
  46. #include "Opcode.h"
  47. #include "ResultType.h"
  48. #include "UnusedPointer.h"
  49. #include <bytecode/SamplingTool.h>
  50. #if ENABLE(DETACHED_JIT)
  51. #include <JITBridge.h>
  52. #endif
  53. namespace JSC {
  54. class CodeBlock;
  55. class FunctionExecutable;
  56. class JIT;
  57. class JSPropertyNameIterator;
  58. class Interpreter;
  59. class JSScope;
  60. class JSStack;
  61. class MarkedAllocator;
  62. class Register;
  63. class StructureChain;
  64. struct CallLinkInfo;
  65. struct Instruction;
  66. struct OperandTypes;
  67. struct PolymorphicAccessStructureList;
  68. struct SimpleJumpTable;
  69. struct StringJumpTable;
  70. struct StructureStubInfo;
  71. struct CallRecord {
  72. MacroAssembler::Call from;
  73. unsigned bytecodeOffset;
  74. void* to;
  75. CallRecord()
  76. {
  77. }
  78. CallRecord(MacroAssembler::Call from, unsigned bytecodeOffset, void* to = 0)
  79. : from(from)
  80. , bytecodeOffset(bytecodeOffset)
  81. , to(to)
  82. {
  83. }
  84. };
  85. struct JumpTable {
  86. MacroAssembler::Jump from;
  87. unsigned toBytecodeOffset;
  88. JumpTable(MacroAssembler::Jump f, unsigned t)
  89. : from(f)
  90. , toBytecodeOffset(t)
  91. {
  92. }
  93. };
  94. struct SlowCaseEntry {
  95. MacroAssembler::Jump from;
  96. unsigned to;
  97. unsigned hint;
  98. SlowCaseEntry(MacroAssembler::Jump f, unsigned t, unsigned h = 0)
  99. : from(f)
  100. , to(t)
  101. , hint(h)
  102. {
  103. }
  104. };
  105. struct SwitchRecord {
  106. enum Type {
  107. Immediate,
  108. Character,
  109. String
  110. };
  111. Type type;
  112. union {
  113. SimpleJumpTable* simpleJumpTable;
  114. StringJumpTable* stringJumpTable;
  115. } jumpTable;
  116. unsigned bytecodeOffset;
  117. unsigned defaultOffset;
  118. SwitchRecord(SimpleJumpTable* jumpTable, unsigned bytecodeOffset, unsigned defaultOffset, Type type)
  119. : type(type)
  120. , bytecodeOffset(bytecodeOffset)
  121. , defaultOffset(defaultOffset)
  122. {
  123. this->jumpTable.simpleJumpTable = jumpTable;
  124. }
  125. SwitchRecord(StringJumpTable* jumpTable, unsigned bytecodeOffset, unsigned defaultOffset)
  126. : type(String)
  127. , bytecodeOffset(bytecodeOffset)
  128. , defaultOffset(defaultOffset)
  129. {
  130. this->jumpTable.stringJumpTable = jumpTable;
  131. }
  132. };
  133. enum PropertyStubGetById_T { PropertyStubGetById };
  134. enum PropertyStubPutById_T { PropertyStubPutById };
  135. struct PropertyStubCompilationInfo {
  136. enum Type { GetById, PutById } m_type;
  137. unsigned bytecodeIndex;
  138. MacroAssembler::Call callReturnLocation;
  139. MacroAssembler::Label hotPathBegin;
  140. MacroAssembler::DataLabelPtr getStructureToCompare;
  141. MacroAssembler::PatchableJump getStructureCheck;
  142. MacroAssembler::ConvertibleLoadLabel propertyStorageLoad;
  143. #if USE(JSVALUE64)
  144. MacroAssembler::DataLabelCompact getDisplacementLabel;
  145. #else
  146. MacroAssembler::DataLabelCompact getDisplacementLabel1;
  147. MacroAssembler::DataLabelCompact getDisplacementLabel2;
  148. #endif
  149. MacroAssembler::Label getPutResult;
  150. MacroAssembler::Label getColdPathBegin;
  151. MacroAssembler::DataLabelPtr putStructureToCompare;
  152. #if USE(JSVALUE64)
  153. MacroAssembler::DataLabel32 putDisplacementLabel;
  154. #else
  155. MacroAssembler::DataLabel32 putDisplacementLabel1;
  156. MacroAssembler::DataLabel32 putDisplacementLabel2;
  157. #endif
  158. #if !ASSERT_DISABLED
  159. PropertyStubCompilationInfo()
  160. : bytecodeIndex(std::numeric_limits<unsigned>::max())
  161. {
  162. }
  163. #endif
  164. PropertyStubCompilationInfo(
  165. PropertyStubGetById_T, unsigned bytecodeIndex, MacroAssembler::Label hotPathBegin,
  166. MacroAssembler::DataLabelPtr structureToCompare,
  167. MacroAssembler::PatchableJump structureCheck,
  168. MacroAssembler::ConvertibleLoadLabel propertyStorageLoad,
  169. #if USE(JSVALUE64)
  170. MacroAssembler::DataLabelCompact displacementLabel,
  171. #else
  172. MacroAssembler::DataLabelCompact displacementLabel1,
  173. MacroAssembler::DataLabelCompact displacementLabel2,
  174. #endif
  175. MacroAssembler::Label putResult)
  176. : m_type(GetById)
  177. , bytecodeIndex(bytecodeIndex)
  178. , hotPathBegin(hotPathBegin)
  179. , getStructureToCompare(structureToCompare)
  180. , getStructureCheck(structureCheck)
  181. , propertyStorageLoad(propertyStorageLoad)
  182. #if USE(JSVALUE64)
  183. , getDisplacementLabel(displacementLabel)
  184. #else
  185. , getDisplacementLabel1(displacementLabel1)
  186. , getDisplacementLabel2(displacementLabel2)
  187. #endif
  188. , getPutResult(putResult)
  189. {
  190. }
  191. PropertyStubCompilationInfo(
  192. PropertyStubPutById_T, unsigned bytecodeIndex, MacroAssembler::Label hotPathBegin,
  193. MacroAssembler::DataLabelPtr structureToCompare,
  194. MacroAssembler::ConvertibleLoadLabel propertyStorageLoad,
  195. #if USE(JSVALUE64)
  196. MacroAssembler::DataLabel32 displacementLabel
  197. #else
  198. MacroAssembler::DataLabel32 displacementLabel1,
  199. MacroAssembler::DataLabel32 displacementLabel2
  200. #endif
  201. )
  202. : m_type(PutById)
  203. , bytecodeIndex(bytecodeIndex)
  204. , hotPathBegin(hotPathBegin)
  205. , propertyStorageLoad(propertyStorageLoad)
  206. , putStructureToCompare(structureToCompare)
  207. #if USE(JSVALUE64)
  208. , putDisplacementLabel(displacementLabel)
  209. #else
  210. , putDisplacementLabel1(displacementLabel1)
  211. , putDisplacementLabel2(displacementLabel2)
  212. #endif
  213. {
  214. }
  215. void slowCaseInfo(PropertyStubGetById_T, MacroAssembler::Label coldPathBegin, MacroAssembler::Call call)
  216. {
  217. ASSERT(m_type == GetById);
  218. callReturnLocation = call;
  219. getColdPathBegin = coldPathBegin;
  220. }
  221. void slowCaseInfo(PropertyStubPutById_T, MacroAssembler::Call call)
  222. {
  223. ASSERT(m_type == PutById);
  224. callReturnLocation = call;
  225. }
  226. void copyToStubInfo(StructureStubInfo& info, LinkBuffer &patchBuffer);
  227. };
  228. struct ByValCompilationInfo {
  229. ByValCompilationInfo() { }
  230. ByValCompilationInfo(unsigned bytecodeIndex, MacroAssembler::PatchableJump badTypeJump, JITArrayMode arrayMode, MacroAssembler::Label doneTarget)
  231. : bytecodeIndex(bytecodeIndex)
  232. , badTypeJump(badTypeJump)
  233. , arrayMode(arrayMode)
  234. , doneTarget(doneTarget)
  235. {
  236. }
  237. unsigned bytecodeIndex;
  238. MacroAssembler::PatchableJump badTypeJump;
  239. JITArrayMode arrayMode;
  240. MacroAssembler::Label doneTarget;
  241. MacroAssembler::Label slowPathTarget;
  242. MacroAssembler::Call returnAddress;
  243. };
  244. struct StructureStubCompilationInfo {
  245. MacroAssembler::DataLabelPtr hotPathBegin;
  246. MacroAssembler::Call hotPathOther;
  247. MacroAssembler::Call callReturnLocation;
  248. CallLinkInfo::CallType callType;
  249. unsigned bytecodeIndex;
  250. };
  251. // Near calls can only be patched to other JIT code, regular calls can be patched to JIT code or relinked to stub functions.
  252. void ctiPatchNearCallByReturnAddress(CodeBlock* codeblock, ReturnAddressPtr returnAddress, MacroAssemblerCodePtr newCalleeFunction);
  253. void ctiPatchCallByReturnAddress(CodeBlock* codeblock, ReturnAddressPtr returnAddress, MacroAssemblerCodePtr newCalleeFunction);
  254. void ctiPatchCallByReturnAddress(CodeBlock* codeblock, ReturnAddressPtr returnAddress, FunctionPtr newCalleeFunction);
  255. class JIT : private JSInterfaceJIT {
  256. friend class JITStubCall;
  257. friend struct PropertyStubCompilationInfo;
  258. #if ENABLE(DETACHED_JIT) && BUILDING_DETACHED_JIT
  259. friend void JSCBridge::performCompilerTask();
  260. #endif
  261. using MacroAssembler::Jump;
  262. using MacroAssembler::JumpList;
  263. using MacroAssembler::Label;
  264. static const uintptr_t patchGetByIdDefaultStructure = unusedPointer;
  265. static const int patchGetByIdDefaultOffset = 0;
  266. // Magic number - initial offset cannot be representable as a signed 8bit value, or the X86Assembler
  267. // will compress the displacement, and we may not be able to fit a patched offset.
  268. static const int patchPutByIdDefaultOffset = 256;
  269. public:
  270. static JITCode compile(VM* vm, CodeBlock* codeBlock, JITCompilationEffort effort, CodePtr* functionEntryArityCheck = 0);
  271. static void compileClosureCall(VM* vm, CallLinkInfo* callLinkInfo, CodeBlock* callerCodeBlock, CodeBlock* calleeCodeBlock, Structure* expectedStructure, ExecutableBase* expectedExecutable, MacroAssemblerCodePtr codePtr);
  272. static void compileGetByIdProto(VM* vm, CallFrame* callFrame, CodeBlock* codeBlock, StructureStubInfo* stubInfo, Structure* structure, Structure* prototypeStructure, const Identifier& ident, const PropertySlot& slot, PropertyOffset cachedOffset, ReturnAddressPtr returnAddress);
  273. static void compileGetByIdSelfList(VM* vm, CodeBlock* codeBlock, StructureStubInfo* stubInfo, PolymorphicAccessStructureList* polymorphicStructures, int currentIndex, Structure* structure, const Identifier& ident, const PropertySlot& slot, PropertyOffset cachedOffset);
  274. static void compileGetByIdProtoList(VM* vm, CallFrame* callFrame, CodeBlock* codeBlock, StructureStubInfo* stubInfo, PolymorphicAccessStructureList* prototypeStructureList, int currentIndex, Structure* structure, Structure* prototypeStructure, const Identifier& ident, const PropertySlot& slot, PropertyOffset cachedOffset);
  275. static void compileGetByIdChainList(VM* vm, CallFrame* callFrame, CodeBlock* codeBlock, StructureStubInfo* stubInfo, PolymorphicAccessStructureList* prototypeStructureList, int currentIndex, Structure* structure, StructureChain* chain, size_t count, const Identifier& ident, const PropertySlot& slot, PropertyOffset cachedOffset);
  276. static void compileGetByIdChain(VM* vm, CallFrame* callFrame, CodeBlock* codeBlock, StructureStubInfo* stubInfo, Structure* structure, StructureChain* chain, size_t count, const Identifier& ident, const PropertySlot& slot, PropertyOffset cachedOffset, ReturnAddressPtr returnAddress);
  277. static void compilePutByIdTransition(VM* vm, CodeBlock* codeBlock, StructureStubInfo* stubInfo, Structure* oldStructure, Structure* newStructure, PropertyOffset cachedOffset, StructureChain* chain, ReturnAddressPtr returnAddress, bool direct);
  278. static void compileGetByVal(VM* vm, CodeBlock* codeBlock, ByValInfo* byValInfo, ReturnAddressPtr returnAddress, JITArrayMode arrayMode);
  279. static void compilePutByVal(VM* vm, CodeBlock* codeBlock, ByValInfo* byValInfo, ReturnAddressPtr returnAddress, JITArrayMode arrayMode);
  280. static CodeRef compileCTINativeCall(VM* vm, NativeFunction func);
  281. static void resetPatchGetById(RepatchBuffer&, StructureStubInfo*);
  282. static void resetPatchPutById(RepatchBuffer&, StructureStubInfo*);
  283. static void patchGetByIdSelf(CodeBlock*, StructureStubInfo*, Structure*, PropertyOffset cachedOffset, ReturnAddressPtr);
  284. static void patchPutByIdReplace(CodeBlock*, StructureStubInfo*, Structure*, PropertyOffset cachedOffset, ReturnAddressPtr, bool direct);
  285. static void compilePatchGetArrayLength(VM* vm, CodeBlock* codeBlock, ReturnAddressPtr returnAddress);
  286. static void linkFor(JSFunction* callee, CodeBlock* callerCodeBlock, CodeBlock* calleeCodeBlock, CodePtr, CallLinkInfo*, VM*, CodeSpecializationKind);
  287. static void linkSlowCall(CodeBlock* callerCodeBlock, CallLinkInfo*);
  288. private:
  289. JIT(VM*, CodeBlock* = 0);
  290. void privateCompileMainPass();
  291. void privateCompileLinkPass();
  292. void privateCompileSlowCases();
  293. JITCode privateCompile(CodePtr* functionEntryArityCheck, JITCompilationEffort);
  294. void privateCompileClosureCall(CallLinkInfo*, CodeBlock* calleeCodeBlock, Structure*, ExecutableBase*, MacroAssemblerCodePtr);
  295. void privateCompileGetByIdProto(StructureStubInfo*, Structure*, Structure* prototypeStructure, const Identifier&, const PropertySlot&, PropertyOffset cachedOffset, ReturnAddressPtr, CallFrame*);
  296. void privateCompileGetByIdSelfList(StructureStubInfo*, PolymorphicAccessStructureList*, int, Structure*, const Identifier&, const PropertySlot&, PropertyOffset cachedOffset);
  297. void privateCompileGetByIdProtoList(StructureStubInfo*, PolymorphicAccessStructureList*, int, Structure*, Structure* prototypeStructure, const Identifier&, const PropertySlot&, PropertyOffset cachedOffset, CallFrame*);
  298. void privateCompileGetByIdChainList(StructureStubInfo*, PolymorphicAccessStructureList*, int, Structure*, StructureChain*, size_t count, const Identifier&, const PropertySlot&, PropertyOffset cachedOffset, CallFrame*);
  299. void privateCompileGetByIdChain(StructureStubInfo*, Structure*, StructureChain*, size_t count, const Identifier&, const PropertySlot&, PropertyOffset cachedOffset, ReturnAddressPtr, CallFrame*);
  300. void privateCompilePutByIdTransition(StructureStubInfo*, Structure*, Structure*, PropertyOffset cachedOffset, StructureChain*, ReturnAddressPtr, bool direct);
  301. void privateCompileGetByVal(ByValInfo*, ReturnAddressPtr, JITArrayMode);
  302. void privateCompilePutByVal(ByValInfo*, ReturnAddressPtr, JITArrayMode);
  303. Label privateCompileCTINativeCall(VM*, bool isConstruct = false);
  304. CodeRef privateCompileCTINativeCall(VM*, NativeFunction);
  305. void privateCompilePatchGetArrayLength(ReturnAddressPtr returnAddress);
  306. static bool isDirectPutById(StructureStubInfo*);
  307. void addSlowCase(Jump);
  308. void addSlowCase(JumpList);
  309. void addSlowCase();
  310. void addJump(Jump, int);
  311. void emitJumpSlowToHot(Jump, int);
  312. void compileOpCall(OpcodeID, Instruction*, unsigned callLinkInfoIndex);
  313. void compileOpCallSlowCase(OpcodeID, Instruction*, Vector<SlowCaseEntry>::iterator&, unsigned callLinkInfoIndex);
  314. void compileLoadVarargs(Instruction*);
  315. void compileCallEval();
  316. void compileCallEvalSlowCase(Vector<SlowCaseEntry>::iterator&);
  317. enum CompileOpStrictEqType { OpStrictEq, OpNStrictEq };
  318. void compileOpStrictEq(Instruction* instruction, CompileOpStrictEqType type);
  319. bool isOperandConstantImmediateDouble(unsigned src);
  320. void emitLoadDouble(int index, FPRegisterID value);
  321. void emitLoadInt32ToDouble(int index, FPRegisterID value);
  322. Jump emitJumpIfNotObject(RegisterID structureReg);
  323. Jump addStructureTransitionCheck(JSCell*, Structure*, StructureStubInfo*, RegisterID scratch);
  324. void addStructureTransitionCheck(JSCell*, Structure*, StructureStubInfo*, JumpList& failureCases, RegisterID scratch);
  325. void testPrototype(JSValue, JumpList& failureCases, StructureStubInfo*);
  326. enum WriteBarrierMode { UnconditionalWriteBarrier, ShouldFilterImmediates };
  327. // value register in write barrier is used before any scratch registers
  328. // so may safely be the same as either of the scratch registers.
  329. void emitWriteBarrier(RegisterID owner, RegisterID valueTag, RegisterID scratch, RegisterID scratch2, WriteBarrierMode, WriteBarrierUseKind);
  330. void emitWriteBarrier(JSCell* owner, RegisterID value, RegisterID scratch, WriteBarrierMode, WriteBarrierUseKind);
  331. template<typename StructureType> // StructureType can be RegisterID or ImmPtr.
  332. void emitAllocateJSObject(RegisterID allocator, StructureType, RegisterID result, RegisterID scratch);
  333. #if ENABLE(VALUE_PROFILER)
  334. // This assumes that the value to profile is in regT0 and that regT3 is available for
  335. // scratch.
  336. void emitValueProfilingSite(ValueProfile*);
  337. void emitValueProfilingSite(unsigned bytecodeOffset);
  338. void emitValueProfilingSite();
  339. #else
  340. void emitValueProfilingSite(unsigned) { }
  341. void emitValueProfilingSite() { }
  342. #endif
  343. void emitArrayProfilingSite(RegisterID structureAndIndexingType, RegisterID scratch, ArrayProfile*);
  344. void emitArrayProfilingSiteForBytecodeIndex(RegisterID structureAndIndexingType, RegisterID scratch, unsigned bytecodeIndex);
  345. void emitArrayProfileStoreToHoleSpecialCase(ArrayProfile*);
  346. void emitArrayProfileOutOfBoundsSpecialCase(ArrayProfile*);
  347. JITArrayMode chooseArrayMode(ArrayProfile*);
  348. // Property is in regT1, base is in regT0. regT2 contains indexing type.
  349. // Property is int-checked and zero extended. Base is cell checked.
  350. // Structure is already profiled. Returns the slow cases. Fall-through
  351. // case contains result in regT0, and it is not yet profiled.
  352. JumpList emitInt32GetByVal(Instruction* instruction, PatchableJump& badType) { return emitContiguousGetByVal(instruction, badType, Int32Shape); }
  353. JumpList emitDoubleGetByVal(Instruction*, PatchableJump& badType);
  354. JumpList emitContiguousGetByVal(Instruction*, PatchableJump& badType, IndexingType expectedShape = ContiguousShape);
  355. JumpList emitArrayStorageGetByVal(Instruction*, PatchableJump& badType);
  356. JumpList emitIntTypedArrayGetByVal(Instruction*, PatchableJump& badType, const TypedArrayDescriptor&, size_t elementSize, TypedArraySignedness);
  357. JumpList emitFloatTypedArrayGetByVal(Instruction*, PatchableJump& badType, const TypedArrayDescriptor&, size_t elementSize);
  358. // Property is in regT0, base is in regT0. regT2 contains indecing type.
  359. // The value to store is not yet loaded. Property is int-checked and
  360. // zero-extended. Base is cell checked. Structure is already profiled.
  361. // returns the slow cases.
  362. JumpList emitInt32PutByVal(Instruction* currentInstruction, PatchableJump& badType)
  363. {
  364. return emitGenericContiguousPutByVal(currentInstruction, badType, Int32Shape);
  365. }
  366. JumpList emitDoublePutByVal(Instruction* currentInstruction, PatchableJump& badType)
  367. {
  368. return emitGenericContiguousPutByVal(currentInstruction, badType, DoubleShape);
  369. }
  370. JumpList emitContiguousPutByVal(Instruction* currentInstruction, PatchableJump& badType)
  371. {
  372. return emitGenericContiguousPutByVal(currentInstruction, badType);
  373. }
  374. JumpList emitGenericContiguousPutByVal(Instruction*, PatchableJump& badType, IndexingType indexingShape = ContiguousShape);
  375. JumpList emitArrayStoragePutByVal(Instruction*, PatchableJump& badType);
  376. JumpList emitIntTypedArrayPutByVal(Instruction*, PatchableJump& badType, const TypedArrayDescriptor&, size_t elementSize, TypedArraySignedness, TypedArrayRounding);
  377. JumpList emitFloatTypedArrayPutByVal(Instruction*, PatchableJump& badType, const TypedArrayDescriptor&, size_t elementSize);
  378. enum FinalObjectMode { MayBeFinal, KnownNotFinal };
  379. #if USE(JSVALUE32_64)
  380. bool getOperandConstantImmediateInt(unsigned op1, unsigned op2, unsigned& op, int32_t& constant);
  381. void emitLoadTag(int index, RegisterID tag);
  382. void emitLoadPayload(int index, RegisterID payload);
  383. void emitLoad(const JSValue& v, RegisterID tag, RegisterID payload);
  384. void emitLoad(int index, RegisterID tag, RegisterID payload, RegisterID base = callFrameRegister);
  385. void emitLoad2(int index1, RegisterID tag1, RegisterID payload1, int index2, RegisterID tag2, RegisterID payload2);
  386. void emitStore(int index, RegisterID tag, RegisterID payload, RegisterID base = callFrameRegister);
  387. void emitStore(int index, const JSValue constant, RegisterID base = callFrameRegister);
  388. void emitStoreInt32(int index, RegisterID payload, bool indexIsInt32 = false);
  389. void emitStoreInt32(int index, TrustedImm32 payload, bool indexIsInt32 = false);
  390. void emitStoreAndMapInt32(int index, RegisterID tag, RegisterID payload, bool indexIsInt32, size_t opcodeLength);
  391. void emitStoreCell(int index, RegisterID payload, bool indexIsCell = false);
  392. void emitStoreBool(int index, RegisterID payload, bool indexIsBool = false);
  393. void emitStoreDouble(int index, FPRegisterID value);
  394. bool isLabeled(unsigned bytecodeOffset);
  395. void map(unsigned bytecodeOffset, int virtualRegisterIndex, RegisterID tag, RegisterID payload);
  396. void unmap(RegisterID);
  397. void unmap();
  398. bool isMapped(int virtualRegisterIndex);
  399. bool getMappedPayload(int virtualRegisterIndex, RegisterID& payload);
  400. bool getMappedTag(int virtualRegisterIndex, RegisterID& tag);
  401. void emitJumpSlowCaseIfNotJSCell(int virtualRegisterIndex);
  402. void emitJumpSlowCaseIfNotJSCell(int virtualRegisterIndex, RegisterID tag);
  403. void compileGetByIdHotPath(Identifier*);
  404. void compileGetByIdSlowCase(int resultVReg, int baseVReg, Identifier*, Vector<SlowCaseEntry>::iterator&);
  405. void compileGetDirectOffset(RegisterID base, RegisterID resultTag, RegisterID resultPayload, PropertyOffset cachedOffset);
  406. void compileGetDirectOffset(JSObject* base, RegisterID resultTag, RegisterID resultPayload, PropertyOffset cachedOffset);
  407. void compileGetDirectOffset(RegisterID base, RegisterID resultTag, RegisterID resultPayload, RegisterID offset, FinalObjectMode = MayBeFinal);
  408. void compilePutDirectOffset(RegisterID base, RegisterID valueTag, RegisterID valuePayload, PropertyOffset cachedOffset);
  409. // Arithmetic opcode helpers
  410. void emitAdd32Constant(unsigned dst, unsigned op, int32_t constant, ResultType opType);
  411. void emitSub32Constant(unsigned dst, unsigned op, int32_t constant, ResultType opType);
  412. void emitBinaryDoubleOp(OpcodeID, unsigned dst, unsigned op1, unsigned op2, OperandTypes, JumpList& notInt32Op1, JumpList& notInt32Op2, bool op1IsInRegisters = true, bool op2IsInRegisters = true);
  413. #if CPU(ARM_TRADITIONAL)
  414. // sequenceOpCall
  415. static const int sequenceOpCallInstructionSpace = 12;
  416. static const int sequenceOpCallConstantSpace = 2;
  417. // sequenceGetByIdHotPath
  418. static const int sequenceGetByIdHotPathInstructionSpace = 36;
  419. static const int sequenceGetByIdHotPathConstantSpace = 4;
  420. // sequenceGetByIdSlowCase
  421. static const int sequenceGetByIdSlowCaseInstructionSpace = 80;
  422. static const int sequenceGetByIdSlowCaseConstantSpace = 4;
  423. // sequencePutById
  424. static const int sequencePutByIdInstructionSpace = 36;
  425. static const int sequencePutByIdConstantSpace = 4;
  426. #elif CPU(SH4)
  427. // sequenceOpCall
  428. static const int sequenceOpCallInstructionSpace = 12;
  429. static const int sequenceOpCallConstantSpace = 2;
  430. // sequenceGetByIdHotPath
  431. static const int sequenceGetByIdHotPathInstructionSpace = 36;
  432. static const int sequenceGetByIdHotPathConstantSpace = 5;
  433. // sequenceGetByIdSlowCase
  434. static const int sequenceGetByIdSlowCaseInstructionSpace = 38;
  435. static const int sequenceGetByIdSlowCaseConstantSpace = 4;
  436. // sequencePutById
  437. static const int sequencePutByIdInstructionSpace = 36;
  438. static const int sequencePutByIdConstantSpace = 5;
  439. #endif
  440. #else // USE(JSVALUE32_64)
  441. /* This function is deprecated. */
  442. void emitGetJITStubArg(unsigned argumentNumber, RegisterID dst);
  443. void emitGetVirtualRegister(int src, RegisterID dst);
  444. void emitGetVirtualRegisters(int src1, RegisterID dst1, int src2, RegisterID dst2);
  445. void emitPutVirtualRegister(unsigned dst, RegisterID from = regT0);
  446. void emitStoreCell(unsigned dst, RegisterID payload, bool /* only used in JSValue32_64 */ = false)
  447. {
  448. emitPutVirtualRegister(dst, payload);
  449. }
  450. int32_t getConstantOperandImmediateInt(unsigned src);
  451. void killLastResultRegister();
  452. Jump emitJumpIfJSCell(RegisterID);
  453. Jump emitJumpIfBothJSCells(RegisterID, RegisterID, RegisterID);
  454. void emitJumpSlowCaseIfJSCell(RegisterID);
  455. void emitJumpSlowCaseIfNotJSCell(RegisterID);
  456. void emitJumpSlowCaseIfNotJSCell(RegisterID, int VReg);
  457. Jump emitJumpIfImmediateInteger(RegisterID);
  458. Jump emitJumpIfNotImmediateInteger(RegisterID);
  459. Jump emitJumpIfNotImmediateIntegers(RegisterID, RegisterID, RegisterID);
  460. void emitJumpSlowCaseIfNotImmediateInteger(RegisterID);
  461. void emitJumpSlowCaseIfNotImmediateNumber(RegisterID);
  462. void emitJumpSlowCaseIfNotImmediateIntegers(RegisterID, RegisterID, RegisterID);
  463. void emitFastArithReTagImmediate(RegisterID src, RegisterID dest);
  464. void emitTagAsBoolImmediate(RegisterID reg);
  465. void compileBinaryArithOp(OpcodeID, unsigned dst, unsigned src1, unsigned src2, OperandTypes opi);
  466. void compileBinaryArithOpSlowCase(OpcodeID, Vector<SlowCaseEntry>::iterator&, unsigned dst, unsigned src1, unsigned src2, OperandTypes, bool op1HasImmediateIntFastCase, bool op2HasImmediateIntFastCase);
  467. void compileGetByIdHotPath(int baseVReg, Identifier*);
  468. void compileGetByIdSlowCase(int resultVReg, int baseVReg, Identifier*, Vector<SlowCaseEntry>::iterator&);
  469. void compileGetDirectOffset(RegisterID base, RegisterID result, PropertyOffset cachedOffset);
  470. void compileGetDirectOffset(JSObject* base, RegisterID result, PropertyOffset cachedOffset);
  471. void compileGetDirectOffset(RegisterID base, RegisterID result, RegisterID offset, RegisterID scratch, FinalObjectMode = MayBeFinal);
  472. void compilePutDirectOffset(RegisterID base, RegisterID value, PropertyOffset cachedOffset);
  473. #endif // USE(JSVALUE32_64)
  474. #if (defined(ASSEMBLER_HAS_CONSTANT_POOL) && ASSEMBLER_HAS_CONSTANT_POOL)
  475. #define BEGIN_UNINTERRUPTED_SEQUENCE(name) do { beginUninterruptedSequence(name ## InstructionSpace, name ## ConstantSpace); } while (false)
  476. #define END_UNINTERRUPTED_SEQUENCE_FOR_PUT(name, dst) do { endUninterruptedSequence(name ## InstructionSpace, name ## ConstantSpace, dst); } while (false)
  477. #define END_UNINTERRUPTED_SEQUENCE(name) END_UNINTERRUPTED_SEQUENCE_FOR_PUT(name, 0)
  478. void beginUninterruptedSequence(int, int);
  479. void endUninterruptedSequence(int, int, int);
  480. #else
  481. #define BEGIN_UNINTERRUPTED_SEQUENCE(name)
  482. #define END_UNINTERRUPTED_SEQUENCE(name)
  483. #define END_UNINTERRUPTED_SEQUENCE_FOR_PUT(name, dst)
  484. #endif
  485. void emit_compareAndJump(OpcodeID, unsigned op1, unsigned op2, unsigned target, RelationalCondition);
  486. void emit_compareAndJumpSlow(unsigned op1, unsigned op2, unsigned target, DoubleCondition, int (JIT_STUB *stub)(STUB_ARGS_DECLARATION), bool invert, Vector<SlowCaseEntry>::iterator&);
  487. void emit_op_add(Instruction*);
  488. void emit_op_bitand(Instruction*);
  489. void emit_op_bitor(Instruction*);
  490. void emit_op_bitxor(Instruction*);
  491. void emit_op_call(Instruction*);
  492. void emit_op_call_eval(Instruction*);
  493. void emit_op_call_varargs(Instruction*);
  494. void emit_op_call_put_result(Instruction*);
  495. void emit_op_catch(Instruction*);
  496. void emit_op_construct(Instruction*);
  497. void emit_op_get_callee(Instruction*);
  498. void emit_op_create_this(Instruction*);
  499. void emit_op_convert_this(Instruction*);
  500. void emit_op_create_arguments(Instruction*);
  501. void emit_op_debug(Instruction*);
  502. void emit_op_del_by_id(Instruction*);
  503. void emit_op_div(Instruction*);
  504. void emit_op_end(Instruction*);
  505. void emit_op_enter(Instruction*);
  506. void emit_op_create_activation(Instruction*);
  507. void emit_op_eq(Instruction*);
  508. void emit_op_eq_null(Instruction*);
  509. void emit_op_get_by_id(Instruction*);
  510. void emit_op_get_arguments_length(Instruction*);
  511. void emit_op_get_by_val(Instruction*);
  512. void emit_op_get_argument_by_val(Instruction*);
  513. void emit_op_get_by_pname(Instruction*);
  514. void emit_op_init_lazy_reg(Instruction*);
  515. void emit_op_check_has_instance(Instruction*);
  516. void emit_op_instanceof(Instruction*);
  517. void emit_op_is_undefined(Instruction*);
  518. void emit_op_is_boolean(Instruction*);
  519. void emit_op_is_number(Instruction*);
  520. void emit_op_is_string(Instruction*);
  521. void emit_op_jeq_null(Instruction*);
  522. void emit_op_jfalse(Instruction*);
  523. void emit_op_jmp(Instruction*);
  524. void emit_op_jneq_null(Instruction*);
  525. void emit_op_jneq_ptr(Instruction*);
  526. void emit_op_jless(Instruction*);
  527. void emit_op_jlesseq(Instruction*);
  528. void emit_op_jgreater(Instruction*);
  529. void emit_op_jgreatereq(Instruction*);
  530. void emit_op_jnless(Instruction*);
  531. void emit_op_jnlesseq(Instruction*);
  532. void emit_op_jngreater(Instruction*);
  533. void emit_op_jngreatereq(Instruction*);
  534. void emit_op_jtrue(Instruction*);
  535. void emit_op_loop_hint(Instruction*);
  536. void emit_op_lshift(Instruction*);
  537. void emit_op_mod(Instruction*);
  538. void emit_op_mov(Instruction*);
  539. void emit_op_mul(Instruction*);
  540. void emit_op_negate(Instruction*);
  541. void emit_op_neq(Instruction*);
  542. void emit_op_neq_null(Instruction*);
  543. void emit_op_new_array(Instruction*);
  544. void emit_op_new_array_with_size(Instruction*);
  545. void emit_op_new_array_buffer(Instruction*);
  546. void emit_op_new_func(Instruction*);
  547. void emit_op_new_func_exp(Instruction*);
  548. void emit_op_new_object(Instruction*);
  549. void emit_op_new_regexp(Instruction*);
  550. void emit_op_get_pnames(Instruction*);
  551. void emit_op_next_pname(Instruction*);
  552. void emit_op_not(Instruction*);
  553. void emit_op_nstricteq(Instruction*);
  554. void emit_op_pop_scope(Instruction*);
  555. void emit_op_dec(Instruction*);
  556. void emit_op_inc(Instruction*);
  557. void emit_op_profile_did_call(Instruction*);
  558. void emit_op_profile_will_call(Instruction*);
  559. void emit_op_push_name_scope(Instruction*);
  560. void emit_op_push_with_scope(Instruction*);
  561. void emit_op_put_by_id(Instruction*);
  562. void emit_op_put_by_index(Instruction*);
  563. void emit_op_put_by_val(Instruction*);
  564. void emit_op_put_getter_setter(Instruction*);
  565. void emit_op_init_global_const(Instruction*);
  566. void emit_op_init_global_const_check(Instruction*);
  567. void emit_resolve_operations(ResolveOperations*, const int* base, const int* value);
  568. void emitSlow_link_resolve_operations(ResolveOperations*, Vector<SlowCaseEntry>::iterator&);
  569. void emit_op_resolve(Instruction*);
  570. void emit_op_resolve_base(Instruction*);
  571. void emit_op_resolve_with_base(Instruction*);
  572. void emit_op_resolve_with_this(Instruction*);
  573. void emit_op_put_to_base(Instruction*);
  574. void emit_op_ret(Instruction*);
  575. void emit_op_ret_object_or_this(Instruction*);
  576. void emit_op_rshift(Instruction*);
  577. void emit_op_strcat(Instruction*);
  578. void emit_op_stricteq(Instruction*);
  579. void emit_op_sub(Instruction*);
  580. void emit_op_switch_char(Instruction*);
  581. void emit_op_switch_imm(Instruction*);
  582. void emit_op_switch_string(Instruction*);
  583. void emit_op_tear_off_activation(Instruction*);
  584. void emit_op_tear_off_arguments(Instruction*);
  585. void emit_op_throw(Instruction*);
  586. void emit_op_throw_static_error(Instruction*);
  587. void emit_op_to_number(Instruction*);
  588. void emit_op_to_primitive(Instruction*);
  589. void emit_op_unexpected_load(Instruction*);
  590. void emit_op_urshift(Instruction*);
  591. void emit_op_get_scoped_var(Instruction*);
  592. void emit_op_put_scoped_var(Instruction*);
  593. void emitSlow_op_add(Instruction*, Vector<SlowCaseEntry>::iterator&);
  594. void emitSlow_op_bitand(Instruction*, Vector<SlowCaseEntry>::iterator&);
  595. void emitSlow_op_bitor(Instruction*, Vector<SlowCaseEntry>::iterator&);
  596. void emitSlow_op_bitxor(Instruction*, Vector<SlowCaseEntry>::iterator&);
  597. void emitSlow_op_call(Instruction*, Vector<SlowCaseEntry>::iterator&);
  598. void emitSlow_op_call_eval(Instruction*, Vector<SlowCaseEntry>::iterator&);
  599. void emitSlow_op_call_varargs(Instruction*, Vector<SlowCaseEntry>::iterator&);
  600. void emitSlow_op_construct(Instruction*, Vector<SlowCaseEntry>::iterator&);
  601. void emitSlow_op_convert_this(Instruction*, Vector<SlowCaseEntry>::iterator&);
  602. void emitSlow_op_create_this(Instruction*, Vector<SlowCaseEntry>::iterator&);
  603. void emitSlow_op_div(Instruction*, Vector<SlowCaseEntry>::iterator&);
  604. void emitSlow_op_eq(Instruction*, Vector<SlowCaseEntry>::iterator&);
  605. void emitSlow_op_get_by_id(Instruction*, Vector<SlowCaseEntry>::iterator&);
  606. void emitSlow_op_get_arguments_length(Instruction*, Vector<SlowCaseEntry>::iterator&);
  607. void emitSlow_op_get_by_val(Instruction*, Vector<SlowCaseEntry>::iterator&);
  608. void emitSlow_op_get_argument_by_val(Instruction*, Vector<SlowCaseEntry>::iterator&);
  609. void emitSlow_op_get_by_pname(Instruction*, Vector<SlowCaseEntry>::iterator&);
  610. void emitSlow_op_check_has_instance(Instruction*, Vector<SlowCaseEntry>::iterator&);
  611. void emitSlow_op_instanceof(Instruction*, Vector<SlowCaseEntry>::iterator&);
  612. void emitSlow_op_jfalse(Instruction*, Vector<SlowCaseEntry>::iterator&);
  613. void emitSlow_op_jless(Instruction*, Vector<SlowCaseEntry>::iterator&);
  614. void emitSlow_op_jlesseq(Instruction*, Vector<SlowCaseEntry>::iterator&);
  615. void emitSlow_op_jgreater(Instruction*, Vector<SlowCaseEntry>::iterator&);
  616. void emitSlow_op_jgreatereq(Instruction*, Vector<SlowCaseEntry>::iterator&);
  617. void emitSlow_op_jnless(Instruction*, Vector<SlowCaseEntry>::iterator&);
  618. void emitSlow_op_jnlesseq(Instruction*, Vector<SlowCaseEntry>::iterator&);
  619. void emitSlow_op_jngreater(Instruction*, Vector<SlowCaseEntry>::iterator&);
  620. void emitSlow_op_jngreatereq(Instruction*, Vector<SlowCaseEntry>::iterator&);
  621. void emitSlow_op_jtrue(Instruction*, Vector<SlowCaseEntry>::iterator&);
  622. void emitSlow_op_loop_hint(Instruction*, Vector<SlowCaseEntry>::iterator&);
  623. void emitSlow_op_lshift(Instruction*, Vector<SlowCaseEntry>::iterator&);
  624. void emitSlow_op_mod(Instruction*, Vector<SlowCaseEntry>::iterator&);
  625. void emitSlow_op_mul(Instruction*, Vector<SlowCaseEntry>::iterator&);
  626. void emitSlow_op_negate(Instruction*, Vector<SlowCaseEntry>::iterator&);
  627. void emitSlow_op_neq(Instruction*, Vector<SlowCaseEntry>::iterator&);
  628. void emitSlow_op_new_object(Instruction*, Vector<SlowCaseEntry>::iterator&);
  629. void emitSlow_op_not(Instruction*, Vector<SlowCaseEntry>::iterator&);
  630. void emitSlow_op_nstricteq(Instruction*, Vector<SlowCaseEntry>::iterator&);
  631. void emitSlow_op_dec(Instruction*, Vector<SlowCaseEntry>::iterator&);
  632. void emitSlow_op_inc(Instruction*, Vector<SlowCaseEntry>::iterator&);
  633. void emitSlow_op_put_by_id(Instruction*, Vector<SlowCaseEntry>::iterator&);
  634. void emitSlow_op_put_by_val(Instruction*, Vector<SlowCaseEntry>::iterator&);
  635. void emitSlow_op_init_global_const_check(Instruction*, Vector<SlowCaseEntry>::iterator&);
  636. void emitSlow_op_rshift(Instruction*, Vector<SlowCaseEntry>::iterator&);
  637. void emitSlow_op_stricteq(Instruction*, Vector<SlowCaseEntry>::iterator&);
  638. void emitSlow_op_sub(Instruction*, Vector<SlowCaseEntry>::iterator&);
  639. void emitSlow_op_to_number(Instruction*, Vector<SlowCaseEntry>::iterator&);
  640. void emitSlow_op_to_primitive(Instruction*, Vector<SlowCaseEntry>::iterator&);
  641. void emitSlow_op_urshift(Instruction*, Vector<SlowCaseEntry>::iterator&);
  642. void emitSlow_op_resolve(Instruction*, Vector<SlowCaseEntry>::iterator&);
  643. void emitSlow_op_resolve_base(Instruction*, Vector<SlowCaseEntry>::iterator&);
  644. void emitSlow_op_resolve_with_base(Instruction*, Vector<SlowCaseEntry>::iterator&);
  645. void emitSlow_op_resolve_with_this(Instruction*, Vector<SlowCaseEntry>::iterator&);
  646. void emitSlow_op_put_to_base(Instruction*, Vector<SlowCaseEntry>::iterator&);
  647. void emitRightShift(Instruction*, bool isUnsigned);
  648. void emitRightShiftSlowCase(Instruction*, Vector<SlowCaseEntry>::iterator&, bool isUnsigned);
  649. void emitInitRegister(unsigned dst);
  650. void emitPutIntToCallFrameHeader(RegisterID from, JSStack::CallFrameHeaderEntry);
  651. void emitGetFromCallFrameHeaderPtr(JSStack::CallFrameHeaderEntry, RegisterID to, RegisterID from = callFrameRegister);
  652. void emitGetFromCallFrameHeader32(JSStack::CallFrameHeaderEntry, RegisterID to, RegisterID from = callFrameRegister);
  653. #if USE(JSVALUE64)
  654. void emitGetFromCallFrameHeader64(JSStack::CallFrameHeaderEntry, RegisterID to, RegisterID from = callFrameRegister);
  655. #endif
  656. JSValue getConstantOperand(unsigned src);
  657. bool isOperandConstantImmediateInt(unsigned src);
  658. bool isOperandConstantImmediateChar(unsigned src);
  659. bool atJumpTarget();
  660. Jump getSlowCase(Vector<SlowCaseEntry>::iterator& iter)
  661. {
  662. return iter++->from;
  663. }
  664. void linkSlowCase(Vector<SlowCaseEntry>::iterator& iter)
  665. {
  666. iter->from.link(this);
  667. ++iter;
  668. }
  669. void linkDummySlowCase(Vector<SlowCaseEntry>::iterator& iter)
  670. {
  671. ASSERT(!iter->from.isSet());
  672. ++iter;
  673. }
  674. void linkSlowCaseIfNotJSCell(Vector<SlowCaseEntry>::iterator&, int virtualRegisterIndex);
  675. Jump checkStructure(RegisterID reg, Structure* structure);
  676. void restoreArgumentReferenceForTrampoline();
  677. void updateTopCallFrame();
  678. Call emitNakedCall(CodePtr function = CodePtr());
  679. // Loads the character value of a single character string into dst.
  680. void emitLoadCharacterString(RegisterID src, RegisterID dst, JumpList& failures);
  681. #if ENABLE(DFG_JIT)
  682. void emitEnterOptimizationCheck();
  683. #else
  684. void emitEnterOptimizationCheck() { }
  685. #endif
  686. #ifndef NDEBUG
  687. void printBytecodeOperandTypes(unsigned src1, unsigned src2);
  688. #endif
  689. #if ENABLE(SAMPLING_FLAGS)
  690. void setSamplingFlag(int32_t);
  691. void clearSamplingFlag(int32_t);
  692. #endif
  693. #if ENABLE(SAMPLING_COUNTERS)
  694. void emitCount(AbstractSamplingCounter&, int32_t = 1);
  695. #endif
  696. #if ENABLE(OPCODE_SAMPLING)
  697. void sampleInstruction(Instruction*, bool = false);
  698. #endif
  699. #if ENABLE(CODEBLOCK_SAMPLING)
  700. void sampleCodeBlock(CodeBlock*);
  701. #else
  702. void sampleCodeBlock(CodeBlock*) {}
  703. #endif
  704. #if ENABLE(DFG_JIT)
  705. bool canBeOptimized() { return m_canBeOptimized; }
  706. bool canBeOptimizedOrInlined() { return m_canBeOptimizedOrInlined; }
  707. bool shouldEmitProfiling() { return m_shouldEmitProfiling; }
  708. #else
  709. bool canBeOptimized() { return false; }
  710. bool canBeOptimizedOrInlined() { return false; }
  711. // Enables use of value profiler with tiered compilation turned off,
  712. // in which case all code gets profiled.
  713. bool shouldEmitProfiling() { return false; }
  714. #endif
  715. Interpreter* m_interpreter;
  716. VM* m_vm;
  717. CodeBlock* m_codeBlock;
  718. Vector<CallRecord> m_calls;
  719. Vector<Label> m_labels;
  720. Vector<PropertyStubCompilationInfo> m_propertyAccessCompilationInfo;
  721. Vector<ByValCompilationInfo> m_byValCompilationInfo;
  722. Vector<StructureStubCompilationInfo> m_callStructureStubCompilationInfo;
  723. Vector<JumpTable> m_jmpTable;
  724. unsigned m_bytecodeOffset;
  725. Vector<SlowCaseEntry> m_slowCases;
  726. Vector<SwitchRecord> m_switches;
  727. unsigned m_propertyAccessInstructionIndex;
  728. unsigned m_byValInstructionIndex;
  729. unsigned m_globalResolveInfoIndex;
  730. unsigned m_callLinkInfoIndex;
  731. #if USE(JSVALUE32_64)
  732. unsigned m_jumpTargetIndex;
  733. unsigned m_mappedBytecodeOffset;
  734. int m_mappedVirtualRegisterIndex;
  735. RegisterID m_mappedTag;
  736. RegisterID m_mappedPayload;
  737. #else
  738. int m_lastResultBytecodeRegister;
  739. #endif
  740. unsigned m_jumpTargetsPosition;
  741. #ifndef NDEBUG
  742. #if defined(ASSEMBLER_HAS_CONSTANT_POOL) && ASSEMBLER_HAS_CONSTANT_POOL
  743. Label m_uninterruptedInstructionSequenceBegin;
  744. int m_uninterruptedConstantSequenceBegin;
  745. #endif
  746. #endif
  747. #if !ENABLE(DETACHED_JIT)
  748. OwnPtr<JITDisassembler> m_disassembler;
  749. RefPtr<Profiler::Compilation> m_compilation;
  750. #endif
  751. WeakRandom m_randomGenerator;
  752. static CodeRef stringGetByValStubGenerator(VM*);
  753. #if ENABLE(VALUE_PROFILER)
  754. bool m_canBeOptimized;
  755. bool m_canBeOptimizedOrInlined;
  756. bool m_shouldEmitProfiling;
  757. #endif
  758. } JIT_CLASS_ALIGNMENT;
  759. } // namespace JSC
  760. #endif // ENABLE(JIT)
  761. #endif // JIT_h