LowLevelInterpreter.asm 37 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273
  1. # Copyright (C) 2011, 2012 Apple Inc. All rights reserved.
  2. #
  3. # Redistribution and use in source and binary forms, with or without
  4. # modification, are permitted provided that the following conditions
  5. # are met:
  6. # 1. Redistributions of source code must retain the above copyright
  7. # notice, this list of conditions and the following disclaimer.
  8. # 2. Redistributions in binary form must reproduce the above copyright
  9. # notice, this list of conditions and the following disclaimer in the
  10. # documentation and/or other materials provided with the distribution.
  11. #
  12. # THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS''
  13. # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
  14. # THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
  15. # PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS
  16. # BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
  17. # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
  18. # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
  19. # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
  20. # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
  21. # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
  22. # THE POSSIBILITY OF SUCH DAMAGE.
  23. # First come the common protocols that both interpreters use. Note that each
  24. # of these must have an ASSERT() in LLIntData.cpp
  25. # Work-around for the fact that the toolchain's awareness of armv7s results in
  26. # a separate slab in the fat binary, yet the offlineasm doesn't know to expect
  27. # it.
  28. if ARMv7s
  29. end
  30. # These declarations must match interpreter/JSStack.h.
  31. const CallFrameHeaderSize = 48
  32. const ArgumentCount = -48
  33. const CallerFrame = -40
  34. const Callee = -32
  35. const ScopeChain = -24
  36. const ReturnPC = -16
  37. const CodeBlock = -8
  38. const ThisArgumentOffset = -CallFrameHeaderSize - 8
  39. # Some register conventions.
  40. if JSVALUE64
  41. # - Use a pair of registers to represent the PC: one register for the
  42. # base of the stack, and one register for the index.
  43. # - The PC base (or PB for short) should be stored in the csr. It will
  44. # get clobbered on calls to other JS code, but will get saved on calls
  45. # to C functions.
  46. # - C calls are still given the Instruction* rather than the PC index.
  47. # This requires an add before the call, and a sub after.
  48. const PC = t4
  49. const PB = t6
  50. const tagTypeNumber = csr1
  51. const tagMask = csr2
  52. macro loadisFromInstruction(offset, dest)
  53. loadis offset * 8[PB, PC, 8], dest
  54. end
  55. macro loadpFromInstruction(offset, dest)
  56. loadp offset * 8[PB, PC, 8], dest
  57. end
  58. macro storepToInstruction(value, offset)
  59. storep value, offset * 8[PB, PC, 8]
  60. end
  61. else
  62. const PC = t4
  63. macro loadisFromInstruction(offset, dest)
  64. loadis offset * 4[PC], dest
  65. end
  66. macro loadpFromInstruction(offset, dest)
  67. loadp offset * 4[PC], dest
  68. end
  69. end
  70. # Constants for reasoning about value representation.
  71. if BIG_ENDIAN
  72. const TagOffset = 0
  73. const PayloadOffset = 4
  74. else
  75. const TagOffset = 4
  76. const PayloadOffset = 0
  77. end
  78. # Constant for reasoning about butterflies.
  79. const IsArray = 1
  80. const IndexingShapeMask = 30
  81. const NoIndexingShape = 0
  82. const Int32Shape = 20
  83. const DoubleShape = 22
  84. const ContiguousShape = 26
  85. const ArrayStorageShape = 28
  86. const SlowPutArrayStorageShape = 30
  87. # Type constants.
  88. const StringType = 5
  89. const ObjectType = 17
  90. # Type flags constants.
  91. const MasqueradesAsUndefined = 1
  92. const ImplementsHasInstance = 2
  93. const ImplementsDefaultHasInstance = 8
  94. # Bytecode operand constants.
  95. const FirstConstantRegisterIndex = 0x40000000
  96. # Code type constants.
  97. const GlobalCode = 0
  98. const EvalCode = 1
  99. const FunctionCode = 2
  100. # The interpreter steals the tag word of the argument count.
  101. const LLIntReturnPC = ArgumentCount + TagOffset
  102. # String flags.
  103. const HashFlags8BitBuffer = 64
  104. # Copied from PropertyOffset.h
  105. const firstOutOfLineOffset = 100
  106. # From ResolveOperations.h
  107. const ResolveOperationFail = 0
  108. const ResolveOperationSetBaseToUndefined = 1
  109. const ResolveOperationReturnScopeAsBase = 2
  110. const ResolveOperationSetBaseToScope = 3
  111. const ResolveOperationSetBaseToGlobal = 4
  112. const ResolveOperationGetAndReturnScopedVar = 5
  113. const ResolveOperationGetAndReturnGlobalVar = 6
  114. const ResolveOperationGetAndReturnGlobalVarWatchable = 7
  115. const ResolveOperationSkipTopScopeNode = 8
  116. const ResolveOperationSkipScopes = 9
  117. const ResolveOperationReturnGlobalObjectAsBase = 10
  118. const ResolveOperationGetAndReturnGlobalProperty = 11
  119. const ResolveOperationCheckForDynamicEntriesBeforeGlobalScope = 12
  120. const PutToBaseOperationKindUninitialised = 0
  121. const PutToBaseOperationKindGeneric = 1
  122. const PutToBaseOperationKindReadonly = 2
  123. const PutToBaseOperationKindGlobalVariablePut = 3
  124. const PutToBaseOperationKindGlobalVariablePutChecked = 4
  125. const PutToBaseOperationKindGlobalPropertyPut = 5
  126. const PutToBaseOperationKindVariablePut = 6
  127. # Allocation constants
  128. if JSVALUE64
  129. const JSFinalObjectSizeClassIndex = 1
  130. else
  131. const JSFinalObjectSizeClassIndex = 3
  132. end
  133. # This must match wtf/Vector.h
  134. const VectorBufferOffset = 0
  135. if JSVALUE64
  136. const VectorSizeOffset = 12
  137. else
  138. const VectorSizeOffset = 8
  139. end
  140. # Some common utilities.
  141. macro crash()
  142. if C_LOOP
  143. cloopCrash
  144. else
  145. storei t0, 0xbbadbeef[]
  146. move 0, t0
  147. call t0
  148. end
  149. end
  150. macro assert(assertion)
  151. if ASSERT_ENABLED
  152. assertion(.ok)
  153. crash()
  154. .ok:
  155. end
  156. end
  157. macro preserveReturnAddressAfterCall(destinationRegister)
  158. if C_LOOP or ARM or ARMv7 or ARMv7_TRADITIONAL or MIPS
  159. # In C_LOOP case, we're only preserving the bytecode vPC.
  160. move lr, destinationRegister
  161. elsif SH4
  162. stspr destinationRegister
  163. elsif X86 or X86_64
  164. pop destinationRegister
  165. else
  166. error
  167. end
  168. end
  169. macro restoreReturnAddressBeforeReturn(sourceRegister)
  170. if C_LOOP or ARM or ARMv7 or ARMv7_TRADITIONAL or MIPS
  171. # In C_LOOP case, we're only restoring the bytecode vPC.
  172. move sourceRegister, lr
  173. elsif SH4
  174. ldspr sourceRegister
  175. elsif X86 or X86_64
  176. push sourceRegister
  177. else
  178. error
  179. end
  180. end
  181. macro traceExecution()
  182. if EXECUTION_TRACING
  183. callSlowPath(_llint_trace)
  184. end
  185. end
  186. macro callTargetFunction(callLinkInfo)
  187. if C_LOOP
  188. cloopCallJSFunction LLIntCallLinkInfo::machineCodeTarget[callLinkInfo]
  189. else
  190. call LLIntCallLinkInfo::machineCodeTarget[callLinkInfo]
  191. dispatchAfterCall()
  192. end
  193. end
  194. macro slowPathForCall(advance, slowPath)
  195. callCallSlowPath(
  196. advance,
  197. slowPath,
  198. macro (callee)
  199. if C_LOOP
  200. cloopCallJSFunction callee
  201. else
  202. call callee
  203. dispatchAfterCall()
  204. end
  205. end)
  206. end
  207. macro arrayProfile(structureAndIndexingType, profile, scratch)
  208. const structure = structureAndIndexingType
  209. const indexingType = structureAndIndexingType
  210. if VALUE_PROFILER
  211. storep structure, ArrayProfile::m_lastSeenStructure[profile]
  212. end
  213. loadb Structure::m_indexingType[structure], indexingType
  214. end
  215. macro checkSwitchToJIT(increment, action)
  216. if JIT_ENABLED
  217. loadp CodeBlock[cfr], t0
  218. baddis increment, CodeBlock::m_llintExecuteCounter + ExecutionCounter::m_counter[t0], .continue
  219. action()
  220. .continue:
  221. end
  222. end
  223. macro checkSwitchToJITForEpilogue()
  224. checkSwitchToJIT(
  225. 10,
  226. macro ()
  227. callSlowPath(_llint_replace)
  228. end)
  229. end
  230. macro assertNotConstant(index)
  231. assert(macro (ok) bilt index, FirstConstantRegisterIndex, ok end)
  232. end
  233. macro functionForCallCodeBlockGetter(targetRegister)
  234. loadp Callee[cfr], targetRegister
  235. loadp JSFunction::m_executable[targetRegister], targetRegister
  236. loadp FunctionExecutable::m_codeBlockForCall[targetRegister], targetRegister
  237. end
  238. macro functionForConstructCodeBlockGetter(targetRegister)
  239. loadp Callee[cfr], targetRegister
  240. loadp JSFunction::m_executable[targetRegister], targetRegister
  241. loadp FunctionExecutable::m_codeBlockForConstruct[targetRegister], targetRegister
  242. end
  243. macro notFunctionCodeBlockGetter(targetRegister)
  244. loadp CodeBlock[cfr], targetRegister
  245. end
  246. macro functionCodeBlockSetter(sourceRegister)
  247. storep sourceRegister, CodeBlock[cfr]
  248. end
  249. macro notFunctionCodeBlockSetter(sourceRegister)
  250. # Nothing to do!
  251. end
  252. # Do the bare minimum required to execute code. Sets up the PC, leave the CodeBlock*
  253. # in t1. May also trigger prologue entry OSR.
  254. macro prologue(codeBlockGetter, codeBlockSetter, osrSlowPath, traceSlowPath)
  255. preserveReturnAddressAfterCall(t2)
  256. # Set up the call frame and check if we should OSR.
  257. storep t2, ReturnPC[cfr]
  258. if EXECUTION_TRACING
  259. callSlowPath(traceSlowPath)
  260. end
  261. codeBlockGetter(t1)
  262. if JIT_ENABLED
  263. baddis 5, CodeBlock::m_llintExecuteCounter + ExecutionCounter::m_counter[t1], .continue
  264. cCall2(osrSlowPath, cfr, PC)
  265. move t1, cfr
  266. btpz t0, .recover
  267. loadp ReturnPC[cfr], t2
  268. restoreReturnAddressBeforeReturn(t2)
  269. jmp t0
  270. .recover:
  271. codeBlockGetter(t1)
  272. .continue:
  273. end
  274. codeBlockSetter(t1)
  275. # Set up the PC.
  276. if JSVALUE64
  277. loadp CodeBlock::m_instructions[t1], PB
  278. move 0, PC
  279. else
  280. loadp CodeBlock::m_instructions[t1], PC
  281. end
  282. end
  283. # Expects that CodeBlock is in t1, which is what prologue() leaves behind.
  284. # Must call dispatch(0) after calling this.
  285. macro functionInitialization(profileArgSkip)
  286. if VALUE_PROFILER
  287. # Profile the arguments. Unfortunately, we have no choice but to do this. This
  288. # code is pretty horrendous because of the difference in ordering between
  289. # arguments and value profiles, the desire to have a simple loop-down-to-zero
  290. # loop, and the desire to use only three registers so as to preserve the PC and
  291. # the code block. It is likely that this code should be rewritten in a more
  292. # optimal way for architectures that have more than five registers available
  293. # for arbitrary use in the interpreter.
  294. loadi CodeBlock::m_numParameters[t1], t0
  295. addp -profileArgSkip, t0 # Use addi because that's what has the peephole
  296. assert(macro (ok) bpgteq t0, 0, ok end)
  297. btpz t0, .argumentProfileDone
  298. loadp CodeBlock::m_argumentValueProfiles + VectorBufferOffset[t1], t3
  299. mulp sizeof ValueProfile, t0, t2 # Aaaaahhhh! Need strength reduction!
  300. negp t0
  301. lshiftp 3, t0
  302. addp t2, t3
  303. .argumentProfileLoop:
  304. if JSVALUE64
  305. loadq ThisArgumentOffset + 8 - profileArgSkip * 8[cfr, t0], t2
  306. subp sizeof ValueProfile, t3
  307. storeq t2, profileArgSkip * sizeof ValueProfile + ValueProfile::m_buckets[t3]
  308. else
  309. loadi ThisArgumentOffset + TagOffset + 8 - profileArgSkip * 8[cfr, t0], t2
  310. subp sizeof ValueProfile, t3
  311. storei t2, profileArgSkip * sizeof ValueProfile + ValueProfile::m_buckets + TagOffset[t3]
  312. loadi ThisArgumentOffset + PayloadOffset + 8 - profileArgSkip * 8[cfr, t0], t2
  313. storei t2, profileArgSkip * sizeof ValueProfile + ValueProfile::m_buckets + PayloadOffset[t3]
  314. end
  315. baddpnz 8, t0, .argumentProfileLoop
  316. .argumentProfileDone:
  317. end
  318. # Check stack height.
  319. loadi CodeBlock::m_numCalleeRegisters[t1], t0
  320. loadp CodeBlock::m_vm[t1], t2
  321. loadp VM::interpreter[t2], t2 # FIXME: Can get to the JSStack from the JITStackFrame
  322. lshifti 3, t0
  323. addp t0, cfr, t0
  324. bpaeq Interpreter::m_stack + JSStack::m_end[t2], t0, .stackHeightOK
  325. # Stack height check failed - need to call a slow_path.
  326. callSlowPath(_llint_stack_check)
  327. .stackHeightOK:
  328. end
  329. macro allocateJSObject(allocator, structure, result, scratch1, slowCase)
  330. if ALWAYS_ALLOCATE_SLOW
  331. jmp slowCase
  332. else
  333. const offsetOfFirstFreeCell =
  334. MarkedAllocator::m_freeList +
  335. MarkedBlock::FreeList::head
  336. # Get the object from the free list.
  337. loadp offsetOfFirstFreeCell[allocator], result
  338. btpz result, slowCase
  339. # Remove the object from the free list.
  340. loadp [result], scratch1
  341. storep scratch1, offsetOfFirstFreeCell[allocator]
  342. # Initialize the object.
  343. storep structure, JSCell::m_structure[result]
  344. storep 0, JSObject::m_butterfly[result]
  345. end
  346. end
  347. macro doReturn()
  348. loadp ReturnPC[cfr], t2
  349. loadp CallerFrame[cfr], cfr
  350. restoreReturnAddressBeforeReturn(t2)
  351. ret
  352. end
  353. # Indicate the beginning of LLInt.
  354. _llint_begin:
  355. crash()
  356. _llint_program_prologue:
  357. prologue(notFunctionCodeBlockGetter, notFunctionCodeBlockSetter, _llint_entry_osr, _llint_trace_prologue)
  358. dispatch(0)
  359. _llint_eval_prologue:
  360. prologue(notFunctionCodeBlockGetter, notFunctionCodeBlockSetter, _llint_entry_osr, _llint_trace_prologue)
  361. dispatch(0)
  362. _llint_function_for_call_prologue:
  363. prologue(functionForCallCodeBlockGetter, functionCodeBlockSetter, _llint_entry_osr_function_for_call, _llint_trace_prologue_function_for_call)
  364. .functionForCallBegin:
  365. functionInitialization(0)
  366. dispatch(0)
  367. _llint_function_for_construct_prologue:
  368. prologue(functionForConstructCodeBlockGetter, functionCodeBlockSetter, _llint_entry_osr_function_for_construct, _llint_trace_prologue_function_for_construct)
  369. .functionForConstructBegin:
  370. functionInitialization(1)
  371. dispatch(0)
  372. _llint_function_for_call_arity_check:
  373. prologue(functionForCallCodeBlockGetter, functionCodeBlockSetter, _llint_entry_osr_function_for_call_arityCheck, _llint_trace_arityCheck_for_call)
  374. functionArityCheck(.functionForCallBegin, _llint_slow_path_call_arityCheck)
  375. _llint_function_for_construct_arity_check:
  376. prologue(functionForConstructCodeBlockGetter, functionCodeBlockSetter, _llint_entry_osr_function_for_construct_arityCheck, _llint_trace_arityCheck_for_construct)
  377. functionArityCheck(.functionForConstructBegin, _llint_slow_path_construct_arityCheck)
  378. # Value-representation-specific code.
  379. if JSVALUE64
  380. include LowLevelInterpreter64
  381. else
  382. include LowLevelInterpreter32_64
  383. end
  384. # Value-representation-agnostic code.
  385. _llint_op_new_array:
  386. traceExecution()
  387. callSlowPath(_llint_slow_path_new_array)
  388. dispatch(5)
  389. _llint_op_new_array_with_size:
  390. traceExecution()
  391. callSlowPath(_llint_slow_path_new_array_with_size)
  392. dispatch(4)
  393. _llint_op_new_array_buffer:
  394. traceExecution()
  395. callSlowPath(_llint_slow_path_new_array_buffer)
  396. dispatch(5)
  397. _llint_op_new_regexp:
  398. traceExecution()
  399. callSlowPath(_llint_slow_path_new_regexp)
  400. dispatch(3)
  401. _llint_op_less:
  402. traceExecution()
  403. callSlowPath(_llint_slow_path_less)
  404. dispatch(4)
  405. _llint_op_lesseq:
  406. traceExecution()
  407. callSlowPath(_llint_slow_path_lesseq)
  408. dispatch(4)
  409. _llint_op_greater:
  410. traceExecution()
  411. callSlowPath(_llint_slow_path_greater)
  412. dispatch(4)
  413. _llint_op_greatereq:
  414. traceExecution()
  415. callSlowPath(_llint_slow_path_greatereq)
  416. dispatch(4)
  417. _llint_op_mod:
  418. traceExecution()
  419. callSlowPath(_llint_slow_path_mod)
  420. dispatch(4)
  421. _llint_op_typeof:
  422. traceExecution()
  423. callSlowPath(_llint_slow_path_typeof)
  424. dispatch(3)
  425. _llint_op_is_object:
  426. traceExecution()
  427. callSlowPath(_llint_slow_path_is_object)
  428. dispatch(3)
  429. _llint_op_is_function:
  430. traceExecution()
  431. callSlowPath(_llint_slow_path_is_function)
  432. dispatch(3)
  433. _llint_op_in:
  434. traceExecution()
  435. callSlowPath(_llint_slow_path_in)
  436. dispatch(4)
  437. macro getPutToBaseOperationField(scratch, scratch1, fieldOffset, fieldGetter)
  438. loadpFromInstruction(4, scratch)
  439. fieldGetter(fieldOffset[scratch])
  440. end
  441. macro moveJSValueFromRegisterWithoutProfiling(value, destBuffer, destOffsetReg)
  442. storeq value, [destBuffer, destOffsetReg, 8]
  443. end
  444. macro moveJSValueFromRegistersWithoutProfiling(tag, payload, destBuffer, destOffsetReg)
  445. storei tag, TagOffset[destBuffer, destOffsetReg, 8]
  446. storei payload, PayloadOffset[destBuffer, destOffsetReg, 8]
  447. end
  448. macro putToBaseVariableBody(variableOffset, scratch1, scratch2, scratch3)
  449. loadisFromInstruction(1, scratch1)
  450. loadp PayloadOffset[cfr, scratch1, 8], scratch1
  451. loadp JSVariableObject::m_registers[scratch1], scratch1
  452. loadisFromInstruction(3, scratch2)
  453. if JSVALUE64
  454. loadConstantOrVariable(scratch2, scratch3)
  455. moveJSValueFromRegisterWithoutProfiling(scratch3, scratch1, variableOffset)
  456. else
  457. loadConstantOrVariable2Reg(scratch2, scratch3, scratch2) # scratch3=tag, scratch2=payload
  458. moveJSValueFromRegistersWithoutProfiling(scratch3, scratch2, scratch1, variableOffset)
  459. end
  460. end
  461. _llint_op_put_to_base_variable:
  462. traceExecution()
  463. getPutToBaseOperationField(t0, t1, PutToBaseOperation::m_offset, macro(addr)
  464. loadis addr, t0
  465. end)
  466. putToBaseVariableBody(t0, t1, t2, t3)
  467. dispatch(5)
  468. _llint_op_put_to_base:
  469. traceExecution()
  470. getPutToBaseOperationField(t0, t1, 0, macro(addr)
  471. leap addr, t0
  472. bbneq PutToBaseOperation::m_kindAsUint8[t0], PutToBaseOperationKindVariablePut, .notPutToBaseVariable
  473. loadis PutToBaseOperation::m_offset[t0], t0
  474. putToBaseVariableBody(t0, t1, t2, t3)
  475. dispatch(5)
  476. .notPutToBaseVariable:
  477. end)
  478. callSlowPath(_llint_slow_path_put_to_base)
  479. dispatch(5)
  480. macro getResolveOperation(resolveOperationIndex, dest)
  481. loadpFromInstruction(resolveOperationIndex, dest)
  482. loadp VectorBufferOffset[dest], dest
  483. end
  484. macro getScope(loadInitialScope, scopeCount, dest, scratch)
  485. loadInitialScope(dest)
  486. loadi scopeCount, scratch
  487. btiz scratch, .done
  488. .loop:
  489. loadp JSScope::m_next[dest], dest
  490. subi 1, scratch
  491. btinz scratch, .loop
  492. .done:
  493. end
  494. macro moveJSValue(sourceBuffer, sourceOffsetReg, destBuffer, destOffsetReg, profileOffset, scratchRegister)
  495. if JSVALUE64
  496. loadq [sourceBuffer, sourceOffsetReg, 8], scratchRegister
  497. storeq scratchRegister, [destBuffer, destOffsetReg, 8]
  498. loadpFromInstruction(profileOffset, destOffsetReg)
  499. valueProfile(scratchRegister, destOffsetReg)
  500. else
  501. loadi PayloadOffset[sourceBuffer, sourceOffsetReg, 8], scratchRegister
  502. storei scratchRegister, PayloadOffset[destBuffer, destOffsetReg, 8]
  503. loadi TagOffset[sourceBuffer, sourceOffsetReg, 8], sourceOffsetReg
  504. storei sourceOffsetReg, TagOffset[destBuffer, destOffsetReg, 8]
  505. loadpFromInstruction(profileOffset, destOffsetReg)
  506. valueProfile(sourceOffsetReg, scratchRegister, destOffsetReg)
  507. end
  508. end
  509. macro moveJSValueFromSlot(slot, destBuffer, destOffsetReg, profileOffset, scratchRegister)
  510. if JSVALUE64
  511. loadq [slot], scratchRegister
  512. storeq scratchRegister, [destBuffer, destOffsetReg, 8]
  513. loadpFromInstruction(profileOffset, destOffsetReg)
  514. valueProfile(scratchRegister, destOffsetReg)
  515. else
  516. loadi PayloadOffset[slot], scratchRegister
  517. storei scratchRegister, PayloadOffset[destBuffer, destOffsetReg, 8]
  518. loadi TagOffset[slot], slot
  519. storei slot, TagOffset[destBuffer, destOffsetReg, 8]
  520. loadpFromInstruction(profileOffset, destOffsetReg)
  521. valueProfile(slot, scratchRegister, destOffsetReg)
  522. end
  523. end
  524. macro moveJSValueFromRegister(value, destBuffer, destOffsetReg, profileOffset)
  525. storeq value, [destBuffer, destOffsetReg, 8]
  526. loadpFromInstruction(profileOffset, destOffsetReg)
  527. valueProfile(value, destOffsetReg)
  528. end
  529. macro moveJSValueFromRegisters(tag, payload, destBuffer, destOffsetReg, profileOffset)
  530. storei tag, TagOffset[destBuffer, destOffsetReg, 8]
  531. storei payload, PayloadOffset[destBuffer, destOffsetReg, 8]
  532. loadpFromInstruction(profileOffset, destOffsetReg)
  533. valueProfile(tag, payload, destOffsetReg)
  534. end
  535. _llint_op_resolve_global_property:
  536. traceExecution()
  537. getResolveOperation(3, t0)
  538. loadp CodeBlock[cfr], t1
  539. loadp CodeBlock::m_globalObject[t1], t1
  540. loadp ResolveOperation::m_structure[t0], t2
  541. bpneq JSCell::m_structure[t1], t2, .llint_op_resolve_local
  542. loadis ResolveOperation::m_offset[t0], t0
  543. if JSVALUE64
  544. loadPropertyAtVariableOffsetKnownNotInline(t0, t1, t2)
  545. loadisFromInstruction(1, t0)
  546. moveJSValueFromRegister(t2, cfr, t0, 4)
  547. else
  548. loadPropertyAtVariableOffsetKnownNotInline(t0, t1, t2, t3)
  549. loadisFromInstruction(1, t0)
  550. moveJSValueFromRegisters(t2, t3, cfr, t0, 4)
  551. end
  552. dispatch(5)
  553. _llint_op_resolve_global_var:
  554. traceExecution()
  555. getResolveOperation(3, t0)
  556. loadp ResolveOperation::m_registerAddress[t0], t0
  557. loadisFromInstruction(1, t1)
  558. moveJSValueFromSlot(t0, cfr, t1, 4, t3)
  559. dispatch(5)
  560. macro resolveScopedVarBody(resolveOperations)
  561. # First ResolveOperation is to skip scope chain nodes
  562. getScope(macro(dest)
  563. loadp ScopeChain + PayloadOffset[cfr], dest
  564. end,
  565. ResolveOperation::m_scopesToSkip[resolveOperations], t1, t2)
  566. loadp JSVariableObject::m_registers[t1], t1 # t1 now contains the activation registers
  567. # Second ResolveOperation tells us what offset to use
  568. loadis ResolveOperation::m_offset + sizeof ResolveOperation[resolveOperations], t2
  569. loadisFromInstruction(1, t3)
  570. moveJSValue(t1, t2, cfr, t3, 4, t0)
  571. end
  572. _llint_op_resolve_scoped_var:
  573. traceExecution()
  574. getResolveOperation(3, t0)
  575. resolveScopedVarBody(t0)
  576. dispatch(5)
  577. _llint_op_resolve_scoped_var_on_top_scope:
  578. traceExecution()
  579. getResolveOperation(3, t0)
  580. # Load destination index
  581. loadisFromInstruction(1, t3)
  582. # We know we want the top scope chain entry
  583. loadp ScopeChain + PayloadOffset[cfr], t1
  584. loadp JSVariableObject::m_registers[t1], t1 # t1 now contains the activation registers
  585. # Second ResolveOperation tells us what offset to use
  586. loadis ResolveOperation::m_offset + sizeof ResolveOperation[t0], t2
  587. moveJSValue(t1, t2, cfr, t3, 4, t0)
  588. dispatch(5)
  589. _llint_op_resolve_scoped_var_with_top_scope_check:
  590. traceExecution()
  591. getResolveOperation(3, t0)
  592. # First ResolveOperation tells us what register to check
  593. loadis ResolveOperation::m_activationRegister[t0], t1
  594. loadp PayloadOffset[cfr, t1, 8], t1
  595. getScope(macro(dest)
  596. btpz t1, .scopeChainNotCreated
  597. loadp JSScope::m_next[t1], dest
  598. jmp .done
  599. .scopeChainNotCreated:
  600. loadp ScopeChain + PayloadOffset[cfr], dest
  601. .done:
  602. end,
  603. # Second ResolveOperation tells us how many more nodes to skip
  604. ResolveOperation::m_scopesToSkip + sizeof ResolveOperation[t0], t1, t2)
  605. loadp JSVariableObject::m_registers[t1], t1 # t1 now contains the activation registers
  606. # Third operation tells us what offset to use
  607. loadis ResolveOperation::m_offset + 2 * sizeof ResolveOperation[t0], t2
  608. loadisFromInstruction(1, t3)
  609. moveJSValue(t1, t2, cfr, t3, 4, t0)
  610. dispatch(5)
  611. _llint_op_resolve:
  612. .llint_op_resolve_local:
  613. traceExecution()
  614. getResolveOperation(3, t0)
  615. btpz t0, .noInstructions
  616. loadis ResolveOperation::m_operation[t0], t1
  617. bineq t1, ResolveOperationSkipScopes, .notSkipScopes
  618. resolveScopedVarBody(t0)
  619. dispatch(5)
  620. .notSkipScopes:
  621. bineq t1, ResolveOperationGetAndReturnGlobalVar, .notGetAndReturnGlobalVar
  622. loadp ResolveOperation::m_registerAddress[t0], t0
  623. loadisFromInstruction(1, t1)
  624. moveJSValueFromSlot(t0, cfr, t1, 4, t3)
  625. dispatch(5)
  626. .notGetAndReturnGlobalVar:
  627. .noInstructions:
  628. callSlowPath(_llint_slow_path_resolve)
  629. dispatch(5)
  630. _llint_op_resolve_base_to_global:
  631. traceExecution()
  632. loadp CodeBlock[cfr], t1
  633. loadp CodeBlock::m_globalObject[t1], t1
  634. loadisFromInstruction(1, t3)
  635. if JSVALUE64
  636. moveJSValueFromRegister(t1, cfr, t3, 6)
  637. else
  638. move CellTag, t2
  639. moveJSValueFromRegisters(t2, t1, cfr, t3, 6)
  640. end
  641. dispatch(7)
  642. _llint_op_resolve_base_to_global_dynamic:
  643. jmp _llint_op_resolve_base
  644. _llint_op_resolve_base_to_scope:
  645. traceExecution()
  646. getResolveOperation(4, t0)
  647. # First ResolveOperation is to skip scope chain nodes
  648. getScope(macro(dest)
  649. loadp ScopeChain + PayloadOffset[cfr], dest
  650. end,
  651. ResolveOperation::m_scopesToSkip[t0], t1, t2)
  652. loadisFromInstruction(1, t3)
  653. if JSVALUE64
  654. moveJSValueFromRegister(t1, cfr, t3, 6)
  655. else
  656. move CellTag, t2
  657. moveJSValueFromRegisters(t2, t1, cfr, t3, 6)
  658. end
  659. dispatch(7)
  660. _llint_op_resolve_base_to_scope_with_top_scope_check:
  661. traceExecution()
  662. getResolveOperation(4, t0)
  663. # First ResolveOperation tells us what register to check
  664. loadis ResolveOperation::m_activationRegister[t0], t1
  665. loadp PayloadOffset[cfr, t1, 8], t1
  666. getScope(macro(dest)
  667. btpz t1, .scopeChainNotCreated
  668. loadp JSScope::m_next[t1], dest
  669. jmp .done
  670. .scopeChainNotCreated:
  671. loadp ScopeChain + PayloadOffset[cfr], dest
  672. .done:
  673. end,
  674. # Second ResolveOperation tells us how many more nodes to skip
  675. ResolveOperation::m_scopesToSkip + sizeof ResolveOperation[t0], t1, t2)
  676. loadisFromInstruction(1, t3)
  677. if JSVALUE64
  678. moveJSValueFromRegister(t1, cfr, t3, 6)
  679. else
  680. move CellTag, t2
  681. moveJSValueFromRegisters(t2, t1, cfr, t3, 6)
  682. end
  683. dispatch(7)
  684. _llint_op_resolve_base:
  685. traceExecution()
  686. callSlowPath(_llint_slow_path_resolve_base)
  687. dispatch(7)
  688. macro interpretResolveWithBase(opcodeLength, slowPath)
  689. traceExecution()
  690. getResolveOperation(4, t0)
  691. btpz t0, .slowPath
  692. loadp ScopeChain[cfr], t3
  693. # Get the base
  694. loadis ResolveOperation::m_operation[t0], t2
  695. bineq t2, ResolveOperationSkipScopes, .notSkipScopes
  696. getScope(macro(dest) move t3, dest end,
  697. ResolveOperation::m_scopesToSkip[t0], t1, t2)
  698. move t1, t3
  699. addp sizeof ResolveOperation, t0, t0
  700. jmp .haveCorrectScope
  701. .notSkipScopes:
  702. bineq t2, ResolveOperationSkipTopScopeNode, .notSkipTopScopeNode
  703. loadis ResolveOperation::m_activationRegister[t0], t1
  704. loadp PayloadOffset[cfr, t1, 8], t1
  705. getScope(macro(dest)
  706. btpz t1, .scopeChainNotCreated
  707. loadp JSScope::m_next[t1], dest
  708. jmp .done
  709. .scopeChainNotCreated:
  710. loadp ScopeChain + PayloadOffset[cfr], dest
  711. .done:
  712. end,
  713. sizeof ResolveOperation + ResolveOperation::m_scopesToSkip[t0], t1, t2)
  714. move t1, t3
  715. # We've handled two opcodes here
  716. addp 2 * sizeof ResolveOperation, t0, t0
  717. .notSkipTopScopeNode:
  718. .haveCorrectScope:
  719. # t3 now contains the correct Scope
  720. # t0 contains a pointer to the current ResolveOperation
  721. loadis ResolveOperation::m_operation[t0], t2
  722. # t2 contains the next instruction
  723. loadisFromInstruction(1, t1)
  724. # t1 now contains the index for the base register
  725. bineq t2, ResolveOperationSetBaseToScope, .notSetBaseToScope
  726. if JSVALUE64
  727. storeq t3, [cfr, t1, 8]
  728. else
  729. storei t3, PayloadOffset[cfr, t1, 8]
  730. storei CellTag, TagOffset[cfr, t1, 8]
  731. end
  732. jmp .haveSetBase
  733. .notSetBaseToScope:
  734. bineq t2, ResolveOperationSetBaseToUndefined, .notSetBaseToUndefined
  735. if JSVALUE64
  736. storeq ValueUndefined, [cfr, t1, 8]
  737. else
  738. storei 0, PayloadOffset[cfr, t1, 8]
  739. storei UndefinedTag, TagOffset[cfr, t1, 8]
  740. end
  741. jmp .haveSetBase
  742. .notSetBaseToUndefined:
  743. bineq t2, ResolveOperationSetBaseToGlobal, .slowPath
  744. loadp JSCell::m_structure[t3], t2
  745. loadp Structure::m_globalObject[t2], t2
  746. if JSVALUE64
  747. storeq t2, [cfr, t1, 8]
  748. else
  749. storei t2, PayloadOffset[cfr, t1, 8]
  750. storei CellTag, TagOffset[cfr, t1, 8]
  751. end
  752. .haveSetBase:
  753. # Get the value
  754. # Load the operation into t2
  755. loadis ResolveOperation::m_operation + sizeof ResolveOperation[t0], t2
  756. # Load the index for the value register into t1
  757. loadisFromInstruction(2, t1)
  758. bineq t2, ResolveOperationGetAndReturnScopedVar, .notGetAndReturnScopedVar
  759. loadp JSVariableObject::m_registers[t3], t3 # t3 now contains the activation registers
  760. # Second ResolveOperation tells us what offset to use
  761. loadis ResolveOperation::m_offset + sizeof ResolveOperation[t0], t2
  762. moveJSValue(t3, t2, cfr, t1, opcodeLength - 1, t0)
  763. dispatch(opcodeLength)
  764. .notGetAndReturnScopedVar:
  765. bineq t2, ResolveOperationGetAndReturnGlobalProperty, .slowPath
  766. callSlowPath(slowPath)
  767. dispatch(opcodeLength)
  768. .slowPath:
  769. callSlowPath(slowPath)
  770. dispatch(opcodeLength)
  771. end
  772. _llint_op_resolve_with_base:
  773. interpretResolveWithBase(7, _llint_slow_path_resolve_with_base)
  774. _llint_op_resolve_with_this:
  775. interpretResolveWithBase(6, _llint_slow_path_resolve_with_this)
  776. macro withInlineStorage(object, propertyStorage, continuation)
  777. # Indicate that the object is the property storage, and that the
  778. # property storage register is unused.
  779. continuation(object, propertyStorage)
  780. end
  781. macro withOutOfLineStorage(object, propertyStorage, continuation)
  782. loadp JSObject::m_butterfly[object], propertyStorage
  783. # Indicate that the propertyStorage register now points to the
  784. # property storage, and that the object register may be reused
  785. # if the object pointer is not needed anymore.
  786. continuation(propertyStorage, object)
  787. end
  788. _llint_op_del_by_id:
  789. traceExecution()
  790. callSlowPath(_llint_slow_path_del_by_id)
  791. dispatch(4)
  792. _llint_op_del_by_val:
  793. traceExecution()
  794. callSlowPath(_llint_slow_path_del_by_val)
  795. dispatch(4)
  796. _llint_op_put_by_index:
  797. traceExecution()
  798. callSlowPath(_llint_slow_path_put_by_index)
  799. dispatch(4)
  800. _llint_op_put_getter_setter:
  801. traceExecution()
  802. callSlowPath(_llint_slow_path_put_getter_setter)
  803. dispatch(5)
  804. _llint_op_jtrue:
  805. traceExecution()
  806. jumpTrueOrFalse(
  807. macro (value, target) btinz value, target end,
  808. _llint_slow_path_jtrue)
  809. _llint_op_jfalse:
  810. traceExecution()
  811. jumpTrueOrFalse(
  812. macro (value, target) btiz value, target end,
  813. _llint_slow_path_jfalse)
  814. _llint_op_jless:
  815. traceExecution()
  816. compare(
  817. macro (left, right, target) bilt left, right, target end,
  818. macro (left, right, target) bdlt left, right, target end,
  819. _llint_slow_path_jless)
  820. _llint_op_jnless:
  821. traceExecution()
  822. compare(
  823. macro (left, right, target) bigteq left, right, target end,
  824. macro (left, right, target) bdgtequn left, right, target end,
  825. _llint_slow_path_jnless)
  826. _llint_op_jgreater:
  827. traceExecution()
  828. compare(
  829. macro (left, right, target) bigt left, right, target end,
  830. macro (left, right, target) bdgt left, right, target end,
  831. _llint_slow_path_jgreater)
  832. _llint_op_jngreater:
  833. traceExecution()
  834. compare(
  835. macro (left, right, target) bilteq left, right, target end,
  836. macro (left, right, target) bdltequn left, right, target end,
  837. _llint_slow_path_jngreater)
  838. _llint_op_jlesseq:
  839. traceExecution()
  840. compare(
  841. macro (left, right, target) bilteq left, right, target end,
  842. macro (left, right, target) bdlteq left, right, target end,
  843. _llint_slow_path_jlesseq)
  844. _llint_op_jnlesseq:
  845. traceExecution()
  846. compare(
  847. macro (left, right, target) bigt left, right, target end,
  848. macro (left, right, target) bdgtun left, right, target end,
  849. _llint_slow_path_jnlesseq)
  850. _llint_op_jgreatereq:
  851. traceExecution()
  852. compare(
  853. macro (left, right, target) bigteq left, right, target end,
  854. macro (left, right, target) bdgteq left, right, target end,
  855. _llint_slow_path_jgreatereq)
  856. _llint_op_jngreatereq:
  857. traceExecution()
  858. compare(
  859. macro (left, right, target) bilt left, right, target end,
  860. macro (left, right, target) bdltun left, right, target end,
  861. _llint_slow_path_jngreatereq)
  862. _llint_op_loop_hint:
  863. traceExecution()
  864. loadp JITStackFrame::vm[sp], t1
  865. loadb VM::watchdog+Watchdog::m_timerDidFire[t1], t0
  866. btbnz t0, .handleWatchdogTimer
  867. .afterWatchdogTimerCheck:
  868. checkSwitchToJITForLoop()
  869. dispatch(1)
  870. .handleWatchdogTimer:
  871. callWatchdogTimerHandler(.throwHandler)
  872. jmp .afterWatchdogTimerCheck
  873. .throwHandler:
  874. jmp _llint_throw_from_slow_path_trampoline
  875. _llint_op_switch_string:
  876. traceExecution()
  877. callSlowPath(_llint_slow_path_switch_string)
  878. dispatch(0)
  879. _llint_op_new_func_exp:
  880. traceExecution()
  881. callSlowPath(_llint_slow_path_new_func_exp)
  882. dispatch(3)
  883. _llint_op_call:
  884. traceExecution()
  885. arrayProfileForCall()
  886. doCall(_llint_slow_path_call)
  887. _llint_op_construct:
  888. traceExecution()
  889. doCall(_llint_slow_path_construct)
  890. _llint_op_call_varargs:
  891. traceExecution()
  892. slowPathForCall(6, _llint_slow_path_call_varargs)
  893. _llint_op_call_eval:
  894. traceExecution()
  895. # Eval is executed in one of two modes:
  896. #
  897. # 1) We find that we're really invoking eval() in which case the
  898. # execution is perfomed entirely inside the slow_path, and it
  899. # returns the PC of a function that just returns the return value
  900. # that the eval returned.
  901. #
  902. # 2) We find that we're invoking something called eval() that is not
  903. # the real eval. Then the slow_path returns the PC of the thing to
  904. # call, and we call it.
  905. #
  906. # This allows us to handle two cases, which would require a total of
  907. # up to four pieces of state that cannot be easily packed into two
  908. # registers (C functions can return up to two registers, easily):
  909. #
  910. # - The call frame register. This may or may not have been modified
  911. # by the slow_path, but the convention is that it returns it. It's not
  912. # totally clear if that's necessary, since the cfr is callee save.
  913. # But that's our style in this here interpreter so we stick with it.
  914. #
  915. # - A bit to say if the slow_path successfully executed the eval and has
  916. # the return value, or did not execute the eval but has a PC for us
  917. # to call.
  918. #
  919. # - Either:
  920. # - The JS return value (two registers), or
  921. #
  922. # - The PC to call.
  923. #
  924. # It turns out to be easier to just always have this return the cfr
  925. # and a PC to call, and that PC may be a dummy thunk that just
  926. # returns the JS value that the eval returned.
  927. slowPathForCall(4, _llint_slow_path_call_eval)
  928. _llint_generic_return_point:
  929. dispatchAfterCall()
  930. _llint_op_strcat:
  931. traceExecution()
  932. callSlowPath(_llint_slow_path_strcat)
  933. dispatch(4)
  934. _llint_op_get_pnames:
  935. traceExecution()
  936. callSlowPath(_llint_slow_path_get_pnames)
  937. dispatch(0) # The slow_path either advances the PC or jumps us to somewhere else.
  938. _llint_op_push_with_scope:
  939. traceExecution()
  940. callSlowPath(_llint_slow_path_push_with_scope)
  941. dispatch(2)
  942. _llint_op_pop_scope:
  943. traceExecution()
  944. callSlowPath(_llint_slow_path_pop_scope)
  945. dispatch(1)
  946. _llint_op_push_name_scope:
  947. traceExecution()
  948. callSlowPath(_llint_slow_path_push_name_scope)
  949. dispatch(4)
  950. _llint_op_throw:
  951. traceExecution()
  952. callSlowPath(_llint_slow_path_throw)
  953. dispatch(2)
  954. _llint_op_throw_static_error:
  955. traceExecution()
  956. callSlowPath(_llint_slow_path_throw_static_error)
  957. dispatch(3)
  958. _llint_op_profile_will_call:
  959. traceExecution()
  960. callSlowPath(_llint_slow_path_profile_will_call)
  961. dispatch(2)
  962. _llint_op_profile_did_call:
  963. traceExecution()
  964. callSlowPath(_llint_slow_path_profile_did_call)
  965. dispatch(2)
  966. _llint_op_debug:
  967. traceExecution()
  968. callSlowPath(_llint_slow_path_debug)
  969. dispatch(5)
  970. _llint_native_call_trampoline:
  971. nativeCallTrampoline(NativeExecutable::m_function)
  972. _llint_native_construct_trampoline:
  973. nativeCallTrampoline(NativeExecutable::m_constructor)
  974. # Lastly, make sure that we can link even though we don't support all opcodes.
  975. # These opcodes should never arise when using LLInt or either JIT. We assert
  976. # as much.
  977. macro notSupported()
  978. if ASSERT_ENABLED
  979. crash()
  980. else
  981. # We should use whatever the smallest possible instruction is, just to
  982. # ensure that there is a gap between instruction labels. If multiple
  983. # smallest instructions exist, we should pick the one that is most
  984. # likely result in execution being halted. Currently that is the break
  985. # instruction on all architectures we're interested in. (Break is int3
  986. # on Intel, which is 1 byte, and bkpt on ARMv7, which is 2 bytes.)
  987. break
  988. end
  989. end
  990. _llint_op_get_by_id_chain:
  991. notSupported()
  992. _llint_op_get_by_id_custom_chain:
  993. notSupported()
  994. _llint_op_get_by_id_custom_proto:
  995. notSupported()
  996. _llint_op_get_by_id_custom_self:
  997. notSupported()
  998. _llint_op_get_by_id_generic:
  999. notSupported()
  1000. _llint_op_get_by_id_getter_chain:
  1001. notSupported()
  1002. _llint_op_get_by_id_getter_proto:
  1003. notSupported()
  1004. _llint_op_get_by_id_getter_self:
  1005. notSupported()
  1006. _llint_op_get_by_id_proto:
  1007. notSupported()
  1008. _llint_op_get_by_id_self:
  1009. notSupported()
  1010. _llint_op_get_string_length:
  1011. notSupported()
  1012. _llint_op_put_by_id_generic:
  1013. notSupported()
  1014. _llint_op_put_by_id_replace:
  1015. notSupported()
  1016. _llint_op_put_by_id_transition:
  1017. notSupported()
  1018. _llint_op_init_global_const_nop:
  1019. dispatch(5)
  1020. # Indicate the end of LLInt.
  1021. _llint_end:
  1022. crash()