JITOpcodes32_64.cpp 52 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375
  1. /*
  2. * Copyright (C) 2009, 2012 Apple Inc. All rights reserved.
  3. * Copyright (C) 2010 Patrick Gansterer <paroga@paroga.com>
  4. *
  5. * Redistribution and use in source and binary forms, with or without
  6. * modification, are permitted provided that the following conditions
  7. * are met:
  8. * 1. Redistributions of source code must retain the above copyright
  9. * notice, this list of conditions and the following disclaimer.
  10. * 2. Redistributions in binary form must reproduce the above copyright
  11. * notice, this list of conditions and the following disclaimer in the
  12. * documentation and/or other materials provided with the distribution.
  13. *
  14. * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
  15. * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
  16. * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
  17. * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
  18. * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
  19. * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
  20. * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
  21. * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
  22. * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
  23. * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
  24. * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
  25. */
  26. #include "config.h"
  27. #if ENABLE(JIT)
  28. #if USE(JSVALUE32_64)
  29. #include "JIT.h"
  30. #include "JITInlines.h"
  31. #include "JITStubCall.h"
  32. #include "JSArray.h"
  33. #include "JSCell.h"
  34. #include "JSFunction.h"
  35. #include "JSPropertyNameIterator.h"
  36. #include "JSVariableObject.h"
  37. #include "LinkBuffer.h"
  38. namespace JSC {
  39. JIT::CodeRef JIT::privateCompileCTINativeCall(VM* vm, NativeFunction func)
  40. {
  41. Call nativeCall;
  42. emitPutImmediateToCallFrameHeader(0, JSStack::CodeBlock);
  43. storePtr(callFrameRegister, &m_vm->topCallFrame);
  44. #if CPU(X86)
  45. // Load caller frame's scope chain into this callframe so that whatever we call can
  46. // get to its global data.
  47. emitGetFromCallFrameHeaderPtr(JSStack::CallerFrame, regT0);
  48. emitGetFromCallFrameHeaderPtr(JSStack::ScopeChain, regT1, regT0);
  49. emitPutCellToCallFrameHeader(regT1, JSStack::ScopeChain);
  50. peek(regT1);
  51. emitPutToCallFrameHeader(regT1, JSStack::ReturnPC);
  52. // Calling convention: f(ecx, edx, ...);
  53. // Host function signature: f(ExecState*);
  54. move(callFrameRegister, X86Registers::ecx);
  55. subPtr(TrustedImm32(16 - sizeof(void*)), stackPointerRegister); // Align stack after call.
  56. move(regT0, callFrameRegister); // Eagerly restore caller frame register to avoid loading from stack.
  57. // call the function
  58. nativeCall = call();
  59. addPtr(TrustedImm32(16 - sizeof(void*)), stackPointerRegister);
  60. #elif CPU(ARM)
  61. // Load caller frame's scope chain into this callframe so that whatever we call can
  62. // get to its global data.
  63. emitGetFromCallFrameHeaderPtr(JSStack::CallerFrame, regT2);
  64. emitGetFromCallFrameHeaderPtr(JSStack::ScopeChain, regT1, regT2);
  65. emitPutCellToCallFrameHeader(regT1, JSStack::ScopeChain);
  66. preserveReturnAddressAfterCall(regT3); // Callee preserved
  67. emitPutToCallFrameHeader(regT3, JSStack::ReturnPC);
  68. // Calling convention: f(r0 == regT0, r1 == regT1, ...);
  69. // Host function signature: f(ExecState*);
  70. move(callFrameRegister, ARMRegisters::r0);
  71. emitGetFromCallFrameHeaderPtr(JSStack::Callee, ARMRegisters::r1);
  72. move(regT2, callFrameRegister); // Eagerly restore caller frame register to avoid loading from stack.
  73. loadPtr(Address(ARMRegisters::r1, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
  74. // call the function
  75. nativeCall = call();
  76. restoreReturnAddressBeforeReturn(regT3);
  77. #elif CPU(MIPS)
  78. // Load caller frame's scope chain into this callframe so that whatever we call can
  79. // get to its global data.
  80. emitGetFromCallFrameHeaderPtr(JSStack::CallerFrame, regT0);
  81. emitGetFromCallFrameHeaderPtr(JSStack::ScopeChain, regT1, regT0);
  82. emitPutCellToCallFrameHeader(regT1, JSStack::ScopeChain);
  83. preserveReturnAddressAfterCall(regT3); // Callee preserved
  84. emitPutToCallFrameHeader(regT3, JSStack::ReturnPC);
  85. // Calling convention: f(a0, a1, a2, a3);
  86. // Host function signature: f(ExecState*);
  87. // Allocate stack space for 16 bytes (8-byte aligned)
  88. // 16 bytes (unused) for 4 arguments
  89. subPtr(TrustedImm32(16), stackPointerRegister);
  90. // Setup arg0
  91. move(callFrameRegister, MIPSRegisters::a0);
  92. // Call
  93. emitGetFromCallFrameHeaderPtr(JSStack::Callee, MIPSRegisters::a2);
  94. loadPtr(Address(MIPSRegisters::a2, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
  95. move(regT0, callFrameRegister); // Eagerly restore caller frame register to avoid loading from stack.
  96. // call the function
  97. nativeCall = call();
  98. // Restore stack space
  99. addPtr(TrustedImm32(16), stackPointerRegister);
  100. restoreReturnAddressBeforeReturn(regT3);
  101. #elif CPU(SH4)
  102. // Load caller frame's scope chain into this callframe so that whatever we call can
  103. // get to its global data.
  104. emitGetFromCallFrameHeaderPtr(JSStack::CallerFrame, regT2);
  105. emitGetFromCallFrameHeaderPtr(JSStack::ScopeChain, regT1, regT2);
  106. emitPutCellToCallFrameHeader(regT1, JSStack::ScopeChain);
  107. preserveReturnAddressAfterCall(regT3); // Callee preserved
  108. emitPutToCallFrameHeader(regT3, JSStack::ReturnPC);
  109. // Calling convention: f(r0 == regT4, r1 == regT5, ...);
  110. // Host function signature: f(ExecState*);
  111. move(callFrameRegister, regT4);
  112. emitGetFromCallFrameHeaderPtr(JSStack::Callee, regT5);
  113. move(regT2, callFrameRegister); // Eagerly restore caller frame register to avoid loading from stack.
  114. loadPtr(Address(regT5, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
  115. // call the function
  116. nativeCall = call();
  117. restoreReturnAddressBeforeReturn(regT3);
  118. #else
  119. #error "JIT not supported on this platform."
  120. breakpoint();
  121. #endif // CPU(X86)
  122. // Check for an exception
  123. Jump sawException = branch32(NotEqual, AbsoluteAddress(reinterpret_cast<char*>(&vm->exception) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), TrustedImm32(JSValue::EmptyValueTag));
  124. // Return.
  125. ret();
  126. // Handle an exception
  127. sawException.link(this);
  128. // Grab the return address.
  129. preserveReturnAddressAfterCall(regT1);
  130. move(TrustedImmPtr(&vm->exceptionLocation), regT2);
  131. storePtr(regT1, regT2);
  132. poke(callFrameRegister, OBJECT_OFFSETOF(struct JITStackFrame, callFrame) / sizeof(void*));
  133. storePtr(callFrameRegister, &m_vm->topCallFrame);
  134. // Set the return address.
  135. move(TrustedImmPtr(FunctionPtr(ctiVMThrowTrampoline).value()), regT1);
  136. restoreReturnAddressBeforeReturn(regT1);
  137. ret();
  138. // All trampolines constructed! copy the code, link up calls, and set the pointers on the Machine object.
  139. LinkBuffer patchBuffer(*m_vm, this, GLOBAL_THUNK_ID);
  140. patchBuffer.link(nativeCall, FunctionPtr(func));
  141. return FINALIZE_CODE(patchBuffer, ("JIT CTI native call"));
  142. }
  143. void JIT::emit_op_mov(Instruction* currentInstruction)
  144. {
  145. unsigned dst = currentInstruction[1].u.operand;
  146. unsigned src = currentInstruction[2].u.operand;
  147. if (m_codeBlock->isConstantRegisterIndex(src))
  148. emitStore(dst, getConstantOperand(src));
  149. else {
  150. emitLoad(src, regT1, regT0);
  151. emitStore(dst, regT1, regT0);
  152. map(m_bytecodeOffset + OPCODE_LENGTH(op_mov), dst, regT1, regT0);
  153. }
  154. }
  155. void JIT::emit_op_end(Instruction* currentInstruction)
  156. {
  157. ASSERT(returnValueRegister != callFrameRegister);
  158. emitLoad(currentInstruction[1].u.operand, regT1, regT0);
  159. restoreReturnAddressBeforeReturn(Address(callFrameRegister, JSStack::ReturnPC * static_cast<int>(sizeof(Register))));
  160. ret();
  161. }
  162. void JIT::emit_op_jmp(Instruction* currentInstruction)
  163. {
  164. unsigned target = currentInstruction[1].u.operand;
  165. addJump(jump(), target);
  166. }
  167. void JIT::emit_op_new_object(Instruction* currentInstruction)
  168. {
  169. Structure* structure = currentInstruction[3].u.objectAllocationProfile->structure();
  170. size_t allocationSize = JSObject::allocationSize(structure->inlineCapacity());
  171. MarkedAllocator* allocator = &m_vm->heap.allocatorForObjectWithoutDestructor(allocationSize);
  172. RegisterID resultReg = regT0;
  173. RegisterID allocatorReg = regT1;
  174. RegisterID scratchReg = regT2;
  175. move(TrustedImmPtr(allocator), allocatorReg);
  176. emitAllocateJSObject(allocatorReg, TrustedImmPtr(structure), resultReg, scratchReg);
  177. emitStoreCell(currentInstruction[1].u.operand, resultReg);
  178. }
  179. void JIT::emitSlow_op_new_object(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
  180. {
  181. linkSlowCase(iter);
  182. JITStubCall stubCall(this, cti_op_new_object);
  183. stubCall.addArgument(TrustedImmPtr(currentInstruction[3].u.objectAllocationProfile->structure()));
  184. stubCall.call(currentInstruction[1].u.operand);
  185. }
  186. void JIT::emit_op_check_has_instance(Instruction* currentInstruction)
  187. {
  188. unsigned baseVal = currentInstruction[3].u.operand;
  189. emitLoadPayload(baseVal, regT0);
  190. // Check that baseVal is a cell.
  191. emitJumpSlowCaseIfNotJSCell(baseVal);
  192. // Check that baseVal 'ImplementsHasInstance'.
  193. loadPtr(Address(regT0, JSCell::structureOffset()), regT0);
  194. addSlowCase(branchTest8(Zero, Address(regT0, Structure::typeInfoFlagsOffset()), TrustedImm32(ImplementsDefaultHasInstance)));
  195. }
  196. void JIT::emit_op_instanceof(Instruction* currentInstruction)
  197. {
  198. unsigned dst = currentInstruction[1].u.operand;
  199. unsigned value = currentInstruction[2].u.operand;
  200. unsigned proto = currentInstruction[3].u.operand;
  201. // Load the operands into registers.
  202. // We use regT0 for baseVal since we will be done with this first, and we can then use it for the result.
  203. emitLoadPayload(value, regT2);
  204. emitLoadPayload(proto, regT1);
  205. // Check that proto are cells. baseVal must be a cell - this is checked by op_check_has_instance.
  206. emitJumpSlowCaseIfNotJSCell(value);
  207. emitJumpSlowCaseIfNotJSCell(proto);
  208. // Check that prototype is an object
  209. loadPtr(Address(regT1, JSCell::structureOffset()), regT3);
  210. addSlowCase(emitJumpIfNotObject(regT3));
  211. // Optimistically load the result true, and start looping.
  212. // Initially, regT1 still contains proto and regT2 still contains value.
  213. // As we loop regT2 will be updated with its prototype, recursively walking the prototype chain.
  214. move(TrustedImm32(1), regT0);
  215. Label loop(this);
  216. // Load the prototype of the cell in regT2. If this is equal to regT1 - WIN!
  217. // Otherwise, check if we've hit null - if we have then drop out of the loop, if not go again.
  218. loadPtr(Address(regT2, JSCell::structureOffset()), regT2);
  219. load32(Address(regT2, Structure::prototypeOffset() + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT2);
  220. Jump isInstance = branchPtr(Equal, regT2, regT1);
  221. branchTest32(NonZero, regT2).linkTo(loop, this);
  222. // We get here either by dropping out of the loop, or if value was not an Object. Result is false.
  223. move(TrustedImm32(0), regT0);
  224. // isInstance jumps right down to here, to skip setting the result to false (it has already set true).
  225. isInstance.link(this);
  226. emitStoreBool(dst, regT0);
  227. }
  228. void JIT::emitSlow_op_check_has_instance(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
  229. {
  230. unsigned dst = currentInstruction[1].u.operand;
  231. unsigned value = currentInstruction[2].u.operand;
  232. unsigned baseVal = currentInstruction[3].u.operand;
  233. linkSlowCaseIfNotJSCell(iter, baseVal);
  234. linkSlowCase(iter);
  235. JITStubCall stubCall(this, cti_op_check_has_instance);
  236. stubCall.addArgument(value);
  237. stubCall.addArgument(baseVal);
  238. stubCall.call(dst);
  239. emitJumpSlowToHot(jump(), currentInstruction[4].u.operand);
  240. }
  241. void JIT::emitSlow_op_instanceof(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
  242. {
  243. unsigned dst = currentInstruction[1].u.operand;
  244. unsigned value = currentInstruction[2].u.operand;
  245. unsigned proto = currentInstruction[3].u.operand;
  246. linkSlowCaseIfNotJSCell(iter, value);
  247. linkSlowCaseIfNotJSCell(iter, proto);
  248. linkSlowCase(iter);
  249. JITStubCall stubCall(this, cti_op_instanceof);
  250. stubCall.addArgument(value);
  251. stubCall.addArgument(proto);
  252. stubCall.call(dst);
  253. }
  254. void JIT::emit_op_is_undefined(Instruction* currentInstruction)
  255. {
  256. unsigned dst = currentInstruction[1].u.operand;
  257. unsigned value = currentInstruction[2].u.operand;
  258. emitLoad(value, regT1, regT0);
  259. Jump isCell = branch32(Equal, regT1, TrustedImm32(JSValue::CellTag));
  260. compare32(Equal, regT1, TrustedImm32(JSValue::UndefinedTag), regT0);
  261. Jump done = jump();
  262. isCell.link(this);
  263. loadPtr(Address(regT0, JSCell::structureOffset()), regT1);
  264. Jump isMasqueradesAsUndefined = branchTest8(NonZero, Address(regT1, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
  265. move(TrustedImm32(0), regT0);
  266. Jump notMasqueradesAsUndefined = jump();
  267. isMasqueradesAsUndefined.link(this);
  268. move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
  269. loadPtr(Address(regT1, Structure::globalObjectOffset()), regT1);
  270. compare32(Equal, regT0, regT1, regT0);
  271. notMasqueradesAsUndefined.link(this);
  272. done.link(this);
  273. emitStoreBool(dst, regT0);
  274. }
  275. void JIT::emit_op_is_boolean(Instruction* currentInstruction)
  276. {
  277. unsigned dst = currentInstruction[1].u.operand;
  278. unsigned value = currentInstruction[2].u.operand;
  279. emitLoadTag(value, regT0);
  280. compare32(Equal, regT0, TrustedImm32(JSValue::BooleanTag), regT0);
  281. emitStoreBool(dst, regT0);
  282. }
  283. void JIT::emit_op_is_number(Instruction* currentInstruction)
  284. {
  285. unsigned dst = currentInstruction[1].u.operand;
  286. unsigned value = currentInstruction[2].u.operand;
  287. emitLoadTag(value, regT0);
  288. add32(TrustedImm32(1), regT0);
  289. compare32(Below, regT0, TrustedImm32(JSValue::LowestTag + 1), regT0);
  290. emitStoreBool(dst, regT0);
  291. }
  292. void JIT::emit_op_is_string(Instruction* currentInstruction)
  293. {
  294. unsigned dst = currentInstruction[1].u.operand;
  295. unsigned value = currentInstruction[2].u.operand;
  296. emitLoad(value, regT1, regT0);
  297. Jump isNotCell = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
  298. loadPtr(Address(regT0, JSCell::structureOffset()), regT1);
  299. compare8(Equal, Address(regT1, Structure::typeInfoTypeOffset()), TrustedImm32(StringType), regT0);
  300. Jump done = jump();
  301. isNotCell.link(this);
  302. move(TrustedImm32(0), regT0);
  303. done.link(this);
  304. emitStoreBool(dst, regT0);
  305. }
  306. void JIT::emit_op_tear_off_activation(Instruction* currentInstruction)
  307. {
  308. unsigned activation = currentInstruction[1].u.operand;
  309. Jump activationNotCreated = branch32(Equal, tagFor(activation), TrustedImm32(JSValue::EmptyValueTag));
  310. JITStubCall stubCall(this, cti_op_tear_off_activation);
  311. stubCall.addArgument(activation);
  312. stubCall.call();
  313. activationNotCreated.link(this);
  314. }
  315. void JIT::emit_op_tear_off_arguments(Instruction* currentInstruction)
  316. {
  317. int arguments = currentInstruction[1].u.operand;
  318. int activation = currentInstruction[2].u.operand;
  319. Jump argsNotCreated = branch32(Equal, tagFor(unmodifiedArgumentsRegister(arguments)), TrustedImm32(JSValue::EmptyValueTag));
  320. JITStubCall stubCall(this, cti_op_tear_off_arguments);
  321. stubCall.addArgument(unmodifiedArgumentsRegister(arguments));
  322. stubCall.addArgument(activation);
  323. stubCall.call();
  324. argsNotCreated.link(this);
  325. }
  326. void JIT::emit_op_to_primitive(Instruction* currentInstruction)
  327. {
  328. int dst = currentInstruction[1].u.operand;
  329. int src = currentInstruction[2].u.operand;
  330. emitLoad(src, regT1, regT0);
  331. Jump isImm = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
  332. addSlowCase(branchPtr(NotEqual, Address(regT0, JSCell::structureOffset()), TrustedImmPtr(m_vm->stringStructure.get())));
  333. isImm.link(this);
  334. if (dst != src)
  335. emitStore(dst, regT1, regT0);
  336. map(m_bytecodeOffset + OPCODE_LENGTH(op_to_primitive), dst, regT1, regT0);
  337. }
  338. void JIT::emitSlow_op_to_primitive(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
  339. {
  340. int dst = currentInstruction[1].u.operand;
  341. linkSlowCase(iter);
  342. JITStubCall stubCall(this, cti_op_to_primitive);
  343. stubCall.addArgument(regT1, regT0);
  344. stubCall.call(dst);
  345. }
  346. void JIT::emit_op_strcat(Instruction* currentInstruction)
  347. {
  348. JITStubCall stubCall(this, cti_op_strcat);
  349. stubCall.addArgument(TrustedImm32(currentInstruction[2].u.operand));
  350. stubCall.addArgument(TrustedImm32(currentInstruction[3].u.operand));
  351. stubCall.call(currentInstruction[1].u.operand);
  352. }
  353. void JIT::emit_op_not(Instruction* currentInstruction)
  354. {
  355. unsigned dst = currentInstruction[1].u.operand;
  356. unsigned src = currentInstruction[2].u.operand;
  357. emitLoadTag(src, regT0);
  358. emitLoad(src, regT1, regT0);
  359. addSlowCase(branch32(NotEqual, regT1, TrustedImm32(JSValue::BooleanTag)));
  360. xor32(TrustedImm32(1), regT0);
  361. emitStoreBool(dst, regT0, (dst == src));
  362. }
  363. void JIT::emitSlow_op_not(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
  364. {
  365. unsigned dst = currentInstruction[1].u.operand;
  366. unsigned src = currentInstruction[2].u.operand;
  367. linkSlowCase(iter);
  368. JITStubCall stubCall(this, cti_op_not);
  369. stubCall.addArgument(src);
  370. stubCall.call(dst);
  371. }
  372. void JIT::emit_op_jfalse(Instruction* currentInstruction)
  373. {
  374. unsigned cond = currentInstruction[1].u.operand;
  375. unsigned target = currentInstruction[2].u.operand;
  376. emitLoad(cond, regT1, regT0);
  377. ASSERT((JSValue::BooleanTag + 1 == JSValue::Int32Tag) && !(JSValue::Int32Tag + 1));
  378. addSlowCase(branch32(Below, regT1, TrustedImm32(JSValue::BooleanTag)));
  379. addJump(branchTest32(Zero, regT0), target);
  380. }
  381. void JIT::emitSlow_op_jfalse(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
  382. {
  383. unsigned cond = currentInstruction[1].u.operand;
  384. unsigned target = currentInstruction[2].u.operand;
  385. linkSlowCase(iter);
  386. if (supportsFloatingPoint()) {
  387. // regT1 contains the tag from the hot path.
  388. Jump notNumber = branch32(Above, regT1, TrustedImm32(JSValue::LowestTag));
  389. emitLoadDouble(cond, fpRegT0);
  390. emitJumpSlowToHot(branchDoubleZeroOrNaN(fpRegT0, fpRegT1), target);
  391. emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_jfalse));
  392. notNumber.link(this);
  393. }
  394. JITStubCall stubCall(this, cti_op_jtrue);
  395. stubCall.addArgument(cond);
  396. stubCall.call();
  397. emitJumpSlowToHot(branchTest32(Zero, regT0), target); // Inverted.
  398. }
  399. void JIT::emit_op_jtrue(Instruction* currentInstruction)
  400. {
  401. unsigned cond = currentInstruction[1].u.operand;
  402. unsigned target = currentInstruction[2].u.operand;
  403. emitLoad(cond, regT1, regT0);
  404. ASSERT((JSValue::BooleanTag + 1 == JSValue::Int32Tag) && !(JSValue::Int32Tag + 1));
  405. addSlowCase(branch32(Below, regT1, TrustedImm32(JSValue::BooleanTag)));
  406. addJump(branchTest32(NonZero, regT0), target);
  407. }
  408. void JIT::emitSlow_op_jtrue(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
  409. {
  410. unsigned cond = currentInstruction[1].u.operand;
  411. unsigned target = currentInstruction[2].u.operand;
  412. linkSlowCase(iter);
  413. if (supportsFloatingPoint()) {
  414. // regT1 contains the tag from the hot path.
  415. Jump notNumber = branch32(Above, regT1, TrustedImm32(JSValue::LowestTag));
  416. emitLoadDouble(cond, fpRegT0);
  417. emitJumpSlowToHot(branchDoubleNonZero(fpRegT0, fpRegT1), target);
  418. emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_jtrue));
  419. notNumber.link(this);
  420. }
  421. JITStubCall stubCall(this, cti_op_jtrue);
  422. stubCall.addArgument(cond);
  423. stubCall.call();
  424. emitJumpSlowToHot(branchTest32(NonZero, regT0), target);
  425. }
  426. void JIT::emit_op_jeq_null(Instruction* currentInstruction)
  427. {
  428. unsigned src = currentInstruction[1].u.operand;
  429. unsigned target = currentInstruction[2].u.operand;
  430. emitLoad(src, regT1, regT0);
  431. Jump isImmediate = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
  432. // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
  433. loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
  434. Jump isNotMasqueradesAsUndefined = branchTest8(Zero, Address(regT2, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
  435. move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
  436. addJump(branchPtr(Equal, Address(regT2, Structure::globalObjectOffset()), regT0), target);
  437. Jump masqueradesGlobalObjectIsForeign = jump();
  438. // Now handle the immediate cases - undefined & null
  439. isImmediate.link(this);
  440. ASSERT((JSValue::UndefinedTag + 1 == JSValue::NullTag) && (JSValue::NullTag & 0x1));
  441. or32(TrustedImm32(1), regT1);
  442. addJump(branch32(Equal, regT1, TrustedImm32(JSValue::NullTag)), target);
  443. isNotMasqueradesAsUndefined.link(this);
  444. masqueradesGlobalObjectIsForeign.link(this);
  445. }
  446. void JIT::emit_op_jneq_null(Instruction* currentInstruction)
  447. {
  448. unsigned src = currentInstruction[1].u.operand;
  449. unsigned target = currentInstruction[2].u.operand;
  450. emitLoad(src, regT1, regT0);
  451. Jump isImmediate = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
  452. // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
  453. loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
  454. addJump(branchTest8(Zero, Address(regT2, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined)), target);
  455. move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
  456. addJump(branchPtr(NotEqual, Address(regT2, Structure::globalObjectOffset()), regT0), target);
  457. Jump wasNotImmediate = jump();
  458. // Now handle the immediate cases - undefined & null
  459. isImmediate.link(this);
  460. ASSERT((JSValue::UndefinedTag + 1 == JSValue::NullTag) && (JSValue::NullTag & 0x1));
  461. or32(TrustedImm32(1), regT1);
  462. addJump(branch32(NotEqual, regT1, TrustedImm32(JSValue::NullTag)), target);
  463. wasNotImmediate.link(this);
  464. }
  465. void JIT::emit_op_jneq_ptr(Instruction* currentInstruction)
  466. {
  467. unsigned src = currentInstruction[1].u.operand;
  468. Special::Pointer ptr = currentInstruction[2].u.specialPointer;
  469. unsigned target = currentInstruction[3].u.operand;
  470. emitLoad(src, regT1, regT0);
  471. addJump(branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag)), target);
  472. addJump(branchPtr(NotEqual, regT0, TrustedImmPtr(actualPointerFor(m_codeBlock, ptr))), target);
  473. }
  474. void JIT::emit_op_eq(Instruction* currentInstruction)
  475. {
  476. unsigned dst = currentInstruction[1].u.operand;
  477. unsigned src1 = currentInstruction[2].u.operand;
  478. unsigned src2 = currentInstruction[3].u.operand;
  479. emitLoad2(src1, regT1, regT0, src2, regT3, regT2);
  480. addSlowCase(branch32(NotEqual, regT1, regT3));
  481. addSlowCase(branch32(Equal, regT1, TrustedImm32(JSValue::CellTag)));
  482. addSlowCase(branch32(Below, regT1, TrustedImm32(JSValue::LowestTag)));
  483. compare32(Equal, regT0, regT2, regT0);
  484. emitStoreBool(dst, regT0);
  485. }
  486. void JIT::emitSlow_op_eq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
  487. {
  488. unsigned dst = currentInstruction[1].u.operand;
  489. unsigned op1 = currentInstruction[2].u.operand;
  490. unsigned op2 = currentInstruction[3].u.operand;
  491. JumpList storeResult;
  492. JumpList genericCase;
  493. genericCase.append(getSlowCase(iter)); // tags not equal
  494. linkSlowCase(iter); // tags equal and JSCell
  495. genericCase.append(branchPtr(NotEqual, Address(regT0, JSCell::structureOffset()), TrustedImmPtr(m_vm->stringStructure.get())));
  496. genericCase.append(branchPtr(NotEqual, Address(regT2, JSCell::structureOffset()), TrustedImmPtr(m_vm->stringStructure.get())));
  497. // String case.
  498. JITStubCall stubCallEqStrings(this, cti_op_eq_strings);
  499. stubCallEqStrings.addArgument(regT0);
  500. stubCallEqStrings.addArgument(regT2);
  501. stubCallEqStrings.call();
  502. storeResult.append(jump());
  503. // Generic case.
  504. genericCase.append(getSlowCase(iter)); // doubles
  505. genericCase.link(this);
  506. JITStubCall stubCallEq(this, cti_op_eq);
  507. stubCallEq.addArgument(op1);
  508. stubCallEq.addArgument(op2);
  509. stubCallEq.call(regT0);
  510. storeResult.link(this);
  511. emitStoreBool(dst, regT0);
  512. }
  513. void JIT::emit_op_neq(Instruction* currentInstruction)
  514. {
  515. unsigned dst = currentInstruction[1].u.operand;
  516. unsigned src1 = currentInstruction[2].u.operand;
  517. unsigned src2 = currentInstruction[3].u.operand;
  518. emitLoad2(src1, regT1, regT0, src2, regT3, regT2);
  519. addSlowCase(branch32(NotEqual, regT1, regT3));
  520. addSlowCase(branch32(Equal, regT1, TrustedImm32(JSValue::CellTag)));
  521. addSlowCase(branch32(Below, regT1, TrustedImm32(JSValue::LowestTag)));
  522. compare32(NotEqual, regT0, regT2, regT0);
  523. emitStoreBool(dst, regT0);
  524. }
  525. void JIT::emitSlow_op_neq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
  526. {
  527. unsigned dst = currentInstruction[1].u.operand;
  528. JumpList storeResult;
  529. JumpList genericCase;
  530. genericCase.append(getSlowCase(iter)); // tags not equal
  531. linkSlowCase(iter); // tags equal and JSCell
  532. genericCase.append(branchPtr(NotEqual, Address(regT0, JSCell::structureOffset()), TrustedImmPtr(m_vm->stringStructure.get())));
  533. genericCase.append(branchPtr(NotEqual, Address(regT2, JSCell::structureOffset()), TrustedImmPtr(m_vm->stringStructure.get())));
  534. // String case.
  535. JITStubCall stubCallEqStrings(this, cti_op_eq_strings);
  536. stubCallEqStrings.addArgument(regT0);
  537. stubCallEqStrings.addArgument(regT2);
  538. stubCallEqStrings.call(regT0);
  539. storeResult.append(jump());
  540. // Generic case.
  541. genericCase.append(getSlowCase(iter)); // doubles
  542. genericCase.link(this);
  543. JITStubCall stubCallEq(this, cti_op_eq);
  544. stubCallEq.addArgument(regT1, regT0);
  545. stubCallEq.addArgument(regT3, regT2);
  546. stubCallEq.call(regT0);
  547. storeResult.link(this);
  548. xor32(TrustedImm32(0x1), regT0);
  549. emitStoreBool(dst, regT0);
  550. }
  551. void JIT::compileOpStrictEq(Instruction* currentInstruction, CompileOpStrictEqType type)
  552. {
  553. unsigned dst = currentInstruction[1].u.operand;
  554. unsigned src1 = currentInstruction[2].u.operand;
  555. unsigned src2 = currentInstruction[3].u.operand;
  556. emitLoad2(src1, regT1, regT0, src2, regT3, regT2);
  557. // Bail if the tags differ, or are double.
  558. addSlowCase(branch32(NotEqual, regT1, regT3));
  559. addSlowCase(branch32(Below, regT1, TrustedImm32(JSValue::LowestTag)));
  560. // Jump to a slow case if both are strings.
  561. Jump notCell = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
  562. Jump firstNotString = branchPtr(NotEqual, Address(regT0, JSCell::structureOffset()), TrustedImmPtr(m_vm->stringStructure.get()));
  563. addSlowCase(branchPtr(Equal, Address(regT2, JSCell::structureOffset()), TrustedImmPtr(m_vm->stringStructure.get())));
  564. notCell.link(this);
  565. firstNotString.link(this);
  566. // Simply compare the payloads.
  567. if (type == OpStrictEq)
  568. compare32(Equal, regT0, regT2, regT0);
  569. else
  570. compare32(NotEqual, regT0, regT2, regT0);
  571. emitStoreBool(dst, regT0);
  572. }
  573. void JIT::emit_op_stricteq(Instruction* currentInstruction)
  574. {
  575. compileOpStrictEq(currentInstruction, OpStrictEq);
  576. }
  577. void JIT::emitSlow_op_stricteq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
  578. {
  579. unsigned dst = currentInstruction[1].u.operand;
  580. unsigned src1 = currentInstruction[2].u.operand;
  581. unsigned src2 = currentInstruction[3].u.operand;
  582. linkSlowCase(iter);
  583. linkSlowCase(iter);
  584. linkSlowCase(iter);
  585. JITStubCall stubCall(this, cti_op_stricteq);
  586. stubCall.addArgument(src1);
  587. stubCall.addArgument(src2);
  588. stubCall.call(dst);
  589. }
  590. void JIT::emit_op_nstricteq(Instruction* currentInstruction)
  591. {
  592. compileOpStrictEq(currentInstruction, OpNStrictEq);
  593. }
  594. void JIT::emitSlow_op_nstricteq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
  595. {
  596. unsigned dst = currentInstruction[1].u.operand;
  597. unsigned src1 = currentInstruction[2].u.operand;
  598. unsigned src2 = currentInstruction[3].u.operand;
  599. linkSlowCase(iter);
  600. linkSlowCase(iter);
  601. linkSlowCase(iter);
  602. JITStubCall stubCall(this, cti_op_nstricteq);
  603. stubCall.addArgument(src1);
  604. stubCall.addArgument(src2);
  605. stubCall.call(dst);
  606. }
  607. void JIT::emit_op_eq_null(Instruction* currentInstruction)
  608. {
  609. unsigned dst = currentInstruction[1].u.operand;
  610. unsigned src = currentInstruction[2].u.operand;
  611. emitLoad(src, regT1, regT0);
  612. Jump isImmediate = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
  613. loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
  614. Jump isMasqueradesAsUndefined = branchTest8(NonZero, Address(regT2, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
  615. move(TrustedImm32(0), regT1);
  616. Jump wasNotMasqueradesAsUndefined = jump();
  617. isMasqueradesAsUndefined.link(this);
  618. move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
  619. loadPtr(Address(regT2, Structure::globalObjectOffset()), regT2);
  620. compare32(Equal, regT0, regT2, regT1);
  621. Jump wasNotImmediate = jump();
  622. isImmediate.link(this);
  623. compare32(Equal, regT1, TrustedImm32(JSValue::NullTag), regT2);
  624. compare32(Equal, regT1, TrustedImm32(JSValue::UndefinedTag), regT1);
  625. or32(regT2, regT1);
  626. wasNotImmediate.link(this);
  627. wasNotMasqueradesAsUndefined.link(this);
  628. emitStoreBool(dst, regT1);
  629. }
  630. void JIT::emit_op_neq_null(Instruction* currentInstruction)
  631. {
  632. unsigned dst = currentInstruction[1].u.operand;
  633. unsigned src = currentInstruction[2].u.operand;
  634. emitLoad(src, regT1, regT0);
  635. Jump isImmediate = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
  636. loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
  637. Jump isMasqueradesAsUndefined = branchTest8(NonZero, Address(regT2, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
  638. move(TrustedImm32(1), regT1);
  639. Jump wasNotMasqueradesAsUndefined = jump();
  640. isMasqueradesAsUndefined.link(this);
  641. move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
  642. loadPtr(Address(regT2, Structure::globalObjectOffset()), regT2);
  643. compare32(NotEqual, regT0, regT2, regT1);
  644. Jump wasNotImmediate = jump();
  645. isImmediate.link(this);
  646. compare32(NotEqual, regT1, TrustedImm32(JSValue::NullTag), regT2);
  647. compare32(NotEqual, regT1, TrustedImm32(JSValue::UndefinedTag), regT1);
  648. and32(regT2, regT1);
  649. wasNotImmediate.link(this);
  650. wasNotMasqueradesAsUndefined.link(this);
  651. emitStoreBool(dst, regT1);
  652. }
  653. void JIT::emit_op_throw(Instruction* currentInstruction)
  654. {
  655. unsigned exception = currentInstruction[1].u.operand;
  656. JITStubCall stubCall(this, cti_op_throw);
  657. stubCall.addArgument(exception);
  658. stubCall.call();
  659. #ifndef NDEBUG
  660. // cti_op_throw always changes it's return address,
  661. // this point in the code should never be reached.
  662. breakpoint();
  663. #endif
  664. }
  665. void JIT::emit_op_get_pnames(Instruction* currentInstruction)
  666. {
  667. int dst = currentInstruction[1].u.operand;
  668. int base = currentInstruction[2].u.operand;
  669. int i = currentInstruction[3].u.operand;
  670. int size = currentInstruction[4].u.operand;
  671. int breakTarget = currentInstruction[5].u.operand;
  672. JumpList isNotObject;
  673. emitLoad(base, regT1, regT0);
  674. if (!m_codeBlock->isKnownNotImmediate(base))
  675. isNotObject.append(branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag)));
  676. if (base != m_codeBlock->thisRegister() || m_codeBlock->isStrictMode()) {
  677. loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
  678. isNotObject.append(emitJumpIfNotObject(regT2));
  679. }
  680. // We could inline the case where you have a valid cache, but
  681. // this call doesn't seem to be hot.
  682. Label isObject(this);
  683. JITStubCall getPnamesStubCall(this, cti_op_get_pnames);
  684. getPnamesStubCall.addArgument(regT0);
  685. getPnamesStubCall.call(dst);
  686. load32(Address(regT0, OBJECT_OFFSETOF(JSPropertyNameIterator, m_jsStringsSize)), regT3);
  687. store32(TrustedImm32(Int32Tag), intTagFor(i));
  688. store32(TrustedImm32(0), intPayloadFor(i));
  689. store32(TrustedImm32(Int32Tag), intTagFor(size));
  690. store32(regT3, payloadFor(size));
  691. Jump end = jump();
  692. isNotObject.link(this);
  693. addJump(branch32(Equal, regT1, TrustedImm32(JSValue::NullTag)), breakTarget);
  694. addJump(branch32(Equal, regT1, TrustedImm32(JSValue::UndefinedTag)), breakTarget);
  695. JITStubCall toObjectStubCall(this, cti_to_object);
  696. toObjectStubCall.addArgument(regT1, regT0);
  697. toObjectStubCall.call(base);
  698. jump().linkTo(isObject, this);
  699. end.link(this);
  700. }
  701. void JIT::emit_op_next_pname(Instruction* currentInstruction)
  702. {
  703. int dst = currentInstruction[1].u.operand;
  704. int base = currentInstruction[2].u.operand;
  705. int i = currentInstruction[3].u.operand;
  706. int size = currentInstruction[4].u.operand;
  707. int it = currentInstruction[5].u.operand;
  708. int target = currentInstruction[6].u.operand;
  709. JumpList callHasProperty;
  710. Label begin(this);
  711. load32(intPayloadFor(i), regT0);
  712. Jump end = branch32(Equal, regT0, intPayloadFor(size));
  713. // Grab key @ i
  714. loadPtr(payloadFor(it), regT1);
  715. loadPtr(Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_jsStrings)), regT2);
  716. load32(BaseIndex(regT2, regT0, TimesEight), regT2);
  717. store32(TrustedImm32(JSValue::CellTag), tagFor(dst));
  718. store32(regT2, payloadFor(dst));
  719. // Increment i
  720. add32(TrustedImm32(1), regT0);
  721. store32(regT0, intPayloadFor(i));
  722. // Verify that i is valid:
  723. loadPtr(payloadFor(base), regT0);
  724. // Test base's structure
  725. loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
  726. callHasProperty.append(branchPtr(NotEqual, regT2, Address(Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_cachedStructure)))));
  727. // Test base's prototype chain
  728. loadPtr(Address(Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_cachedPrototypeChain))), regT3);
  729. loadPtr(Address(regT3, OBJECT_OFFSETOF(StructureChain, m_vector)), regT3);
  730. addJump(branchTestPtr(Zero, Address(regT3)), target);
  731. Label checkPrototype(this);
  732. callHasProperty.append(branch32(Equal, Address(regT2, Structure::prototypeOffset() + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), TrustedImm32(JSValue::NullTag)));
  733. loadPtr(Address(regT2, Structure::prototypeOffset() + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT2);
  734. loadPtr(Address(regT2, JSCell::structureOffset()), regT2);
  735. callHasProperty.append(branchPtr(NotEqual, regT2, Address(regT3)));
  736. addPtr(TrustedImm32(sizeof(Structure*)), regT3);
  737. branchTestPtr(NonZero, Address(regT3)).linkTo(checkPrototype, this);
  738. // Continue loop.
  739. addJump(jump(), target);
  740. // Slow case: Ask the object if i is valid.
  741. callHasProperty.link(this);
  742. loadPtr(addressFor(dst), regT1);
  743. JITStubCall stubCall(this, cti_has_property);
  744. stubCall.addArgument(regT0);
  745. stubCall.addArgument(regT1);
  746. stubCall.call();
  747. // Test for valid key.
  748. addJump(branchTest32(NonZero, regT0), target);
  749. jump().linkTo(begin, this);
  750. // End of loop.
  751. end.link(this);
  752. }
  753. void JIT::emit_op_push_with_scope(Instruction* currentInstruction)
  754. {
  755. JITStubCall stubCall(this, cti_op_push_with_scope);
  756. stubCall.addArgument(currentInstruction[1].u.operand);
  757. stubCall.call();
  758. }
  759. void JIT::emit_op_pop_scope(Instruction*)
  760. {
  761. JITStubCall(this, cti_op_pop_scope).call();
  762. }
  763. void JIT::emit_op_to_number(Instruction* currentInstruction)
  764. {
  765. int dst = currentInstruction[1].u.operand;
  766. int src = currentInstruction[2].u.operand;
  767. emitLoad(src, regT1, regT0);
  768. Jump isInt32 = branch32(Equal, regT1, TrustedImm32(JSValue::Int32Tag));
  769. addSlowCase(branch32(AboveOrEqual, regT1, TrustedImm32(JSValue::LowestTag)));
  770. isInt32.link(this);
  771. if (src != dst)
  772. emitStore(dst, regT1, regT0);
  773. map(m_bytecodeOffset + OPCODE_LENGTH(op_to_number), dst, regT1, regT0);
  774. }
  775. void JIT::emitSlow_op_to_number(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
  776. {
  777. int dst = currentInstruction[1].u.operand;
  778. linkSlowCase(iter);
  779. JITStubCall stubCall(this, cti_op_to_number);
  780. stubCall.addArgument(regT1, regT0);
  781. stubCall.call(dst);
  782. }
  783. void JIT::emit_op_push_name_scope(Instruction* currentInstruction)
  784. {
  785. JITStubCall stubCall(this, cti_op_push_name_scope);
  786. stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(currentInstruction[1].u.operand)));
  787. stubCall.addArgument(currentInstruction[2].u.operand);
  788. stubCall.addArgument(TrustedImm32(currentInstruction[3].u.operand));
  789. stubCall.call();
  790. }
  791. void JIT::emit_op_catch(Instruction* currentInstruction)
  792. {
  793. // cti_op_throw returns the callFrame for the handler.
  794. move(regT0, callFrameRegister);
  795. // Now store the exception returned by cti_op_throw.
  796. loadPtr(Address(stackPointerRegister, OBJECT_OFFSETOF(struct JITStackFrame, vm)), regT3);
  797. load32(Address(regT3, OBJECT_OFFSETOF(VM, exception) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT0);
  798. load32(Address(regT3, OBJECT_OFFSETOF(VM, exception) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), regT1);
  799. store32(TrustedImm32(JSValue().payload()), Address(regT3, OBJECT_OFFSETOF(VM, exception) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)));
  800. store32(TrustedImm32(JSValue().tag()), Address(regT3, OBJECT_OFFSETOF(VM, exception) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)));
  801. unsigned exception = currentInstruction[1].u.operand;
  802. emitStore(exception, regT1, regT0);
  803. map(m_bytecodeOffset + OPCODE_LENGTH(op_catch), exception, regT1, regT0);
  804. }
  805. void JIT::emit_op_switch_imm(Instruction* currentInstruction)
  806. {
  807. unsigned tableIndex = currentInstruction[1].u.operand;
  808. unsigned defaultOffset = currentInstruction[2].u.operand;
  809. unsigned scrutinee = currentInstruction[3].u.operand;
  810. // create jump table for switch destinations, track this switch statement.
  811. SimpleJumpTable* jumpTable = &m_codeBlock->immediateSwitchJumpTable(tableIndex);
  812. m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset, SwitchRecord::Immediate));
  813. jumpTable->ctiOffsets.grow(jumpTable->branchOffsets.size());
  814. JITStubCall stubCall(this, cti_op_switch_imm);
  815. stubCall.addArgument(scrutinee);
  816. stubCall.addArgument(TrustedImm32(tableIndex));
  817. stubCall.call();
  818. jump(regT0);
  819. }
  820. void JIT::emit_op_switch_char(Instruction* currentInstruction)
  821. {
  822. unsigned tableIndex = currentInstruction[1].u.operand;
  823. unsigned defaultOffset = currentInstruction[2].u.operand;
  824. unsigned scrutinee = currentInstruction[3].u.operand;
  825. // create jump table for switch destinations, track this switch statement.
  826. SimpleJumpTable* jumpTable = &m_codeBlock->characterSwitchJumpTable(tableIndex);
  827. m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset, SwitchRecord::Character));
  828. jumpTable->ctiOffsets.grow(jumpTable->branchOffsets.size());
  829. JITStubCall stubCall(this, cti_op_switch_char);
  830. stubCall.addArgument(scrutinee);
  831. stubCall.addArgument(TrustedImm32(tableIndex));
  832. stubCall.call();
  833. jump(regT0);
  834. }
  835. void JIT::emit_op_switch_string(Instruction* currentInstruction)
  836. {
  837. unsigned tableIndex = currentInstruction[1].u.operand;
  838. unsigned defaultOffset = currentInstruction[2].u.operand;
  839. unsigned scrutinee = currentInstruction[3].u.operand;
  840. // create jump table for switch destinations, track this switch statement.
  841. StringJumpTable* jumpTable = &m_codeBlock->stringSwitchJumpTable(tableIndex);
  842. m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset));
  843. JITStubCall stubCall(this, cti_op_switch_string);
  844. stubCall.addArgument(scrutinee);
  845. stubCall.addArgument(TrustedImm32(tableIndex));
  846. stubCall.call();
  847. jump(regT0);
  848. }
  849. void JIT::emit_op_throw_static_error(Instruction* currentInstruction)
  850. {
  851. unsigned message = currentInstruction[1].u.operand;
  852. JITStubCall stubCall(this, cti_op_throw_static_error);
  853. stubCall.addArgument(m_codeBlock->getConstant(message));
  854. stubCall.addArgument(TrustedImm32(currentInstruction[2].u.operand));
  855. stubCall.call();
  856. }
  857. void JIT::emit_op_debug(Instruction* currentInstruction)
  858. {
  859. #if ENABLE(DEBUG_WITH_BREAKPOINT)
  860. UNUSED_PARAM(currentInstruction);
  861. breakpoint();
  862. #else
  863. JITStubCall stubCall(this, cti_op_debug);
  864. stubCall.addArgument(Imm32(currentInstruction[1].u.operand));
  865. stubCall.addArgument(Imm32(currentInstruction[2].u.operand));
  866. stubCall.addArgument(Imm32(currentInstruction[3].u.operand));
  867. stubCall.addArgument(Imm32(currentInstruction[4].u.operand));
  868. stubCall.call();
  869. #endif
  870. }
  871. void JIT::emit_op_enter(Instruction*)
  872. {
  873. emitEnterOptimizationCheck();
  874. // Even though JIT code doesn't use them, we initialize our constant
  875. // registers to zap stale pointers, to avoid unnecessarily prolonging
  876. // object lifetime and increasing GC pressure.
  877. for (int i = 0; i < m_codeBlock->m_numVars; ++i)
  878. emitStore(i, jsUndefined());
  879. }
  880. void JIT::emit_op_create_activation(Instruction* currentInstruction)
  881. {
  882. unsigned activation = currentInstruction[1].u.operand;
  883. Jump activationCreated = branch32(NotEqual, tagFor(activation), TrustedImm32(JSValue::EmptyValueTag));
  884. JITStubCall(this, cti_op_push_activation).call(activation);
  885. activationCreated.link(this);
  886. }
  887. void JIT::emit_op_create_arguments(Instruction* currentInstruction)
  888. {
  889. unsigned dst = currentInstruction[1].u.operand;
  890. Jump argsCreated = branch32(NotEqual, tagFor(dst), TrustedImm32(JSValue::EmptyValueTag));
  891. JITStubCall(this, cti_op_create_arguments).call();
  892. emitStore(dst, regT1, regT0);
  893. emitStore(unmodifiedArgumentsRegister(dst), regT1, regT0);
  894. argsCreated.link(this);
  895. }
  896. void JIT::emit_op_init_lazy_reg(Instruction* currentInstruction)
  897. {
  898. unsigned dst = currentInstruction[1].u.operand;
  899. emitStore(dst, JSValue());
  900. }
  901. void JIT::emit_op_get_callee(Instruction* currentInstruction)
  902. {
  903. int dst = currentInstruction[1].u.operand;
  904. emitGetFromCallFrameHeaderPtr(JSStack::Callee, regT0);
  905. move(TrustedImm32(JSValue::CellTag), regT1);
  906. emitValueProfilingSite();
  907. emitStore(dst, regT1, regT0);
  908. }
  909. void JIT::emit_op_create_this(Instruction* currentInstruction)
  910. {
  911. int callee = currentInstruction[2].u.operand;
  912. RegisterID calleeReg = regT0;
  913. RegisterID resultReg = regT0;
  914. RegisterID allocatorReg = regT1;
  915. RegisterID structureReg = regT2;
  916. RegisterID scratchReg = regT3;
  917. emitLoadPayload(callee, calleeReg);
  918. loadPtr(Address(calleeReg, JSFunction::offsetOfAllocationProfile() + ObjectAllocationProfile::offsetOfAllocator()), allocatorReg);
  919. loadPtr(Address(calleeReg, JSFunction::offsetOfAllocationProfile() + ObjectAllocationProfile::offsetOfStructure()), structureReg);
  920. addSlowCase(branchTestPtr(Zero, allocatorReg));
  921. emitAllocateJSObject(allocatorReg, structureReg, resultReg, scratchReg);
  922. emitStoreCell(currentInstruction[1].u.operand, resultReg);
  923. }
  924. void JIT::emitSlow_op_create_this(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
  925. {
  926. linkSlowCase(iter); // doesn't have an allocation profile
  927. linkSlowCase(iter); // allocation failed
  928. JITStubCall stubCall(this, cti_op_create_this);
  929. stubCall.addArgument(TrustedImm32(currentInstruction[3].u.operand));
  930. stubCall.call(currentInstruction[1].u.operand);
  931. }
  932. void JIT::emit_op_convert_this(Instruction* currentInstruction)
  933. {
  934. unsigned thisRegister = currentInstruction[1].u.operand;
  935. emitLoad(thisRegister, regT3, regT2);
  936. addSlowCase(branch32(NotEqual, regT3, TrustedImm32(JSValue::CellTag)));
  937. if (shouldEmitProfiling()) {
  938. loadPtr(Address(regT2, JSCell::structureOffset()), regT0);
  939. move(regT3, regT1);
  940. emitValueProfilingSite();
  941. }
  942. addSlowCase(branchPtr(Equal, Address(regT2, JSCell::structureOffset()), TrustedImmPtr(m_vm->stringStructure.get())));
  943. }
  944. void JIT::emitSlow_op_convert_this(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
  945. {
  946. void* globalThis = m_codeBlock->globalObject()->globalThis();
  947. unsigned thisRegister = currentInstruction[1].u.operand;
  948. linkSlowCase(iter);
  949. if (shouldEmitProfiling()) {
  950. move(TrustedImm32(JSValue::UndefinedTag), regT1);
  951. move(TrustedImm32(0), regT0);
  952. }
  953. Jump isNotUndefined = branch32(NotEqual, regT3, TrustedImm32(JSValue::UndefinedTag));
  954. emitValueProfilingSite();
  955. move(TrustedImmPtr(globalThis), regT0);
  956. move(TrustedImm32(JSValue::CellTag), regT1);
  957. emitStore(thisRegister, regT1, regT0);
  958. emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_convert_this));
  959. linkSlowCase(iter);
  960. if (shouldEmitProfiling()) {
  961. move(TrustedImm32(JSValue::CellTag), regT1);
  962. move(TrustedImmPtr(m_vm->stringStructure.get()), regT0);
  963. }
  964. isNotUndefined.link(this);
  965. emitValueProfilingSite();
  966. JITStubCall stubCall(this, cti_op_convert_this);
  967. stubCall.addArgument(regT3, regT2);
  968. stubCall.call(thisRegister);
  969. }
  970. void JIT::emit_op_profile_will_call(Instruction* currentInstruction)
  971. {
  972. JITStubCall stubCall(this, cti_op_profile_will_call);
  973. stubCall.addArgument(currentInstruction[1].u.operand);
  974. stubCall.call();
  975. }
  976. void JIT::emit_op_profile_did_call(Instruction* currentInstruction)
  977. {
  978. JITStubCall stubCall(this, cti_op_profile_did_call);
  979. stubCall.addArgument(currentInstruction[1].u.operand);
  980. stubCall.call();
  981. }
  982. void JIT::emit_op_get_arguments_length(Instruction* currentInstruction)
  983. {
  984. int dst = currentInstruction[1].u.operand;
  985. int argumentsRegister = currentInstruction[2].u.operand;
  986. addSlowCase(branch32(NotEqual, tagFor(argumentsRegister), TrustedImm32(JSValue::EmptyValueTag)));
  987. load32(payloadFor(JSStack::ArgumentCount), regT0);
  988. sub32(TrustedImm32(1), regT0);
  989. emitStoreInt32(dst, regT0);
  990. }
  991. void JIT::emitSlow_op_get_arguments_length(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
  992. {
  993. linkSlowCase(iter);
  994. int dst = currentInstruction[1].u.operand;
  995. int base = currentInstruction[2].u.operand;
  996. int ident = currentInstruction[3].u.operand;
  997. JITStubCall stubCall(this, cti_op_get_by_id_generic);
  998. stubCall.addArgument(base);
  999. stubCall.addArgument(TrustedImmPtr(&(m_codeBlock->identifier(ident))));
  1000. stubCall.call(dst);
  1001. }
  1002. void JIT::emit_op_get_argument_by_val(Instruction* currentInstruction)
  1003. {
  1004. int dst = currentInstruction[1].u.operand;
  1005. int argumentsRegister = currentInstruction[2].u.operand;
  1006. int property = currentInstruction[3].u.operand;
  1007. addSlowCase(branch32(NotEqual, tagFor(argumentsRegister), TrustedImm32(JSValue::EmptyValueTag)));
  1008. emitLoad(property, regT1, regT2);
  1009. addSlowCase(branch32(NotEqual, regT1, TrustedImm32(JSValue::Int32Tag)));
  1010. add32(TrustedImm32(1), regT2);
  1011. // regT2 now contains the integer index of the argument we want, including this
  1012. load32(payloadFor(JSStack::ArgumentCount), regT3);
  1013. addSlowCase(branch32(AboveOrEqual, regT2, regT3));
  1014. neg32(regT2);
  1015. loadPtr(BaseIndex(callFrameRegister, regT2, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.payload) + CallFrame::thisArgumentOffset() * static_cast<int>(sizeof(Register))), regT0);
  1016. loadPtr(BaseIndex(callFrameRegister, regT2, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.tag) + CallFrame::thisArgumentOffset() * static_cast<int>(sizeof(Register))), regT1);
  1017. emitValueProfilingSite();
  1018. emitStore(dst, regT1, regT0);
  1019. }
  1020. void JIT::emitSlow_op_get_argument_by_val(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
  1021. {
  1022. unsigned dst = currentInstruction[1].u.operand;
  1023. unsigned arguments = currentInstruction[2].u.operand;
  1024. unsigned property = currentInstruction[3].u.operand;
  1025. linkSlowCase(iter);
  1026. Jump skipArgumentsCreation = jump();
  1027. linkSlowCase(iter);
  1028. linkSlowCase(iter);
  1029. JITStubCall(this, cti_op_create_arguments).call();
  1030. emitStore(arguments, regT1, regT0);
  1031. emitStore(unmodifiedArgumentsRegister(arguments), regT1, regT0);
  1032. skipArgumentsCreation.link(this);
  1033. JITStubCall stubCall(this, cti_op_get_by_val_generic);
  1034. stubCall.addArgument(arguments);
  1035. stubCall.addArgument(property);
  1036. stubCall.callWithValueProfiling(dst);
  1037. }
  1038. void JIT::emit_op_put_to_base(Instruction* currentInstruction)
  1039. {
  1040. int base = currentInstruction[1].u.operand;
  1041. int id = currentInstruction[2].u.operand;
  1042. int value = currentInstruction[3].u.operand;
  1043. PutToBaseOperation* operation = currentInstruction[4].u.putToBaseOperation;
  1044. switch (operation->m_kind) {
  1045. case PutToBaseOperation::GlobalVariablePutChecked:
  1046. addSlowCase(branchTest8(NonZero, AbsoluteAddress(operation->m_predicatePointer)));
  1047. case PutToBaseOperation::GlobalVariablePut: {
  1048. JSGlobalObject* globalObject = m_codeBlock->globalObject();
  1049. if (operation->m_isDynamic)
  1050. addSlowCase(branchPtr(NotEqual, payloadFor(base), TrustedImmPtr(globalObject)));
  1051. emitLoad(value, regT1, regT0);
  1052. storePtr(regT0, reinterpret_cast<char*>(operation->m_registerAddress) + OBJECT_OFFSETOF(JSValue, u.asBits.payload));
  1053. storePtr(regT1, reinterpret_cast<char*>(operation->m_registerAddress) + OBJECT_OFFSETOF(JSValue, u.asBits.tag));
  1054. if (Heap::isWriteBarrierEnabled())
  1055. emitWriteBarrier(globalObject, regT0, regT2, ShouldFilterImmediates, WriteBarrierForVariableAccess);
  1056. break;
  1057. }
  1058. case PutToBaseOperation::VariablePut: {
  1059. loadPtr(payloadFor(base), regT3);
  1060. emitLoad(value, regT1, regT0);
  1061. loadPtr(Address(regT3, JSVariableObject::offsetOfRegisters()), regT2);
  1062. store32(regT0, Address(regT2, operation->m_offset * sizeof(Register) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)));
  1063. store32(regT1, Address(regT2, operation->m_offset * sizeof(Register) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)));
  1064. if (Heap::isWriteBarrierEnabled())
  1065. emitWriteBarrier(regT3, regT1, regT0, regT2, ShouldFilterImmediates, WriteBarrierForVariableAccess);
  1066. break;
  1067. }
  1068. case PutToBaseOperation::GlobalPropertyPut: {
  1069. JSGlobalObject* globalObject = m_codeBlock->globalObject();
  1070. loadPtr(payloadFor(base), regT3);
  1071. emitLoad(value, regT1, regT0);
  1072. loadPtr(&operation->m_structure, regT2);
  1073. addSlowCase(branchPtr(NotEqual, Address(regT3, JSCell::structureOffset()), regT2));
  1074. ASSERT(!operation->m_structure || !operation->m_structure->inlineCapacity());
  1075. loadPtr(Address(regT3, JSObject::butterflyOffset()), regT2);
  1076. load32(&operation->m_offsetInButterfly, regT3);
  1077. storePtr(regT0, BaseIndex(regT2, regT3, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.payload)));
  1078. storePtr(regT1, BaseIndex(regT2, regT3, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.tag)));
  1079. if (Heap::isWriteBarrierEnabled())
  1080. emitWriteBarrier(globalObject, regT1, regT2, ShouldFilterImmediates, WriteBarrierForVariableAccess);
  1081. break;
  1082. }
  1083. case PutToBaseOperation::Uninitialised:
  1084. case PutToBaseOperation::Readonly:
  1085. case PutToBaseOperation::Generic:
  1086. JITStubCall stubCall(this, cti_op_put_to_base);
  1087. stubCall.addArgument(TrustedImm32(base));
  1088. stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(id)));
  1089. stubCall.addArgument(TrustedImm32(value));
  1090. stubCall.addArgument(TrustedImmPtr(operation));
  1091. stubCall.call();
  1092. break;
  1093. }
  1094. }
  1095. } // namespace JSC
  1096. #endif // USE(JSVALUE32_64)
  1097. #endif // ENABLE(JIT)