LowLevelInterpreter32_64.asm 57 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924
  1. # Copyright (C) 2011, 2012 Apple Inc. All rights reserved.
  2. #
  3. # Redistribution and use in source and binary forms, with or without
  4. # modification, are permitted provided that the following conditions
  5. # are met:
  6. # 1. Redistributions of source code must retain the above copyright
  7. # notice, this list of conditions and the following disclaimer.
  8. # 2. Redistributions in binary form must reproduce the above copyright
  9. # notice, this list of conditions and the following disclaimer in the
  10. # documentation and/or other materials provided with the distribution.
  11. #
  12. # THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS''
  13. # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
  14. # THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
  15. # PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS
  16. # BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
  17. # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
  18. # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
  19. # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
  20. # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
  21. # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
  22. # THE POSSIBILITY OF SUCH DAMAGE.
  23. # Crash course on the language that this is written in (which I just call
  24. # "assembly" even though it's more than that):
  25. #
  26. # - Mostly gas-style operand ordering. The last operand tends to be the
  27. # destination. So "a := b" is written as "mov b, a". But unlike gas,
  28. # comparisons are in-order, so "if (a < b)" is written as
  29. # "bilt a, b, ...".
  30. #
  31. # - "b" = byte, "h" = 16-bit word, "i" = 32-bit word, "p" = pointer.
  32. # Currently this is just 32-bit so "i" and "p" are interchangeable
  33. # except when an op supports one but not the other.
  34. #
  35. # - In general, valid operands for macro invocations and instructions are
  36. # registers (eg "t0"), addresses (eg "4[t0]"), base-index addresses
  37. # (eg "7[t0, t1, 2]"), absolute addresses (eg "0xa0000000[]"), or labels
  38. # (eg "_foo" or ".foo"). Macro invocations can also take anonymous
  39. # macros as operands. Instructions cannot take anonymous macros.
  40. #
  41. # - Labels must have names that begin with either "_" or ".". A "." label
  42. # is local and gets renamed before code gen to minimize namespace
  43. # pollution. A "_" label is an extern symbol (i.e. ".globl"). The "_"
  44. # may or may not be removed during code gen depending on whether the asm
  45. # conventions for C name mangling on the target platform mandate a "_"
  46. # prefix.
  47. #
  48. # - A "macro" is a lambda expression, which may be either anonymous or
  49. # named. But this has caveats. "macro" can take zero or more arguments,
  50. # which may be macros or any valid operands, but it can only return
  51. # code. But you can do Turing-complete things via continuation passing
  52. # style: "macro foo (a, b) b(a) end foo(foo, foo)". Actually, don't do
  53. # that, since you'll just crash the assembler.
  54. #
  55. # - An "if" is a conditional on settings. Any identifier supplied in the
  56. # predicate of an "if" is assumed to be a #define that is available
  57. # during code gen. So you can't use "if" for computation in a macro, but
  58. # you can use it to select different pieces of code for different
  59. # platforms.
  60. #
  61. # - Arguments to macros follow lexical scoping rather than dynamic scoping.
  62. # Const's also follow lexical scoping and may override (hide) arguments
  63. # or other consts. All variables (arguments and constants) can be bound
  64. # to operands. Additionally, arguments (but not constants) can be bound
  65. # to macros.
  66. # Below we have a bunch of constant declarations. Each constant must have
  67. # a corresponding ASSERT() in LLIntData.cpp.
  68. # Value representation constants.
  69. const Int32Tag = -1
  70. const BooleanTag = -2
  71. const NullTag = -3
  72. const UndefinedTag = -4
  73. const CellTag = -5
  74. const EmptyValueTag = -6
  75. const DeletedValueTag = -7
  76. const LowestTag = DeletedValueTag
  77. # Utilities
  78. macro dispatch(advance)
  79. addp advance * 4, PC
  80. jmp [PC]
  81. end
  82. macro dispatchBranchWithOffset(pcOffset)
  83. lshifti 2, pcOffset
  84. addp pcOffset, PC
  85. jmp [PC]
  86. end
  87. macro dispatchBranch(pcOffset)
  88. loadi pcOffset, t0
  89. dispatchBranchWithOffset(t0)
  90. end
  91. macro dispatchAfterCall()
  92. loadi ArgumentCount + TagOffset[cfr], PC
  93. jmp [PC]
  94. end
  95. macro cCall2(function, arg1, arg2)
  96. if ARM or ARMv7 or ARMv7_TRADITIONAL
  97. move arg1, t0
  98. move arg2, t1
  99. call function
  100. elsif X86
  101. resetX87Stack
  102. poke arg1, 0
  103. poke arg2, 1
  104. call function
  105. elsif MIPS or SH4
  106. move arg1, a0
  107. move arg2, a1
  108. call function
  109. elsif C_LOOP
  110. cloopCallSlowPath function, arg1, arg2
  111. else
  112. error
  113. end
  114. end
  115. # This barely works. arg3 and arg4 should probably be immediates.
  116. macro cCall4(function, arg1, arg2, arg3, arg4)
  117. if ARM or ARMv7 or ARMv7_TRADITIONAL
  118. move arg1, t0
  119. move arg2, t1
  120. move arg3, t2
  121. move arg4, t3
  122. call function
  123. elsif X86
  124. resetX87Stack
  125. poke arg1, 0
  126. poke arg2, 1
  127. poke arg3, 2
  128. poke arg4, 3
  129. call function
  130. elsif MIPS or SH4
  131. move arg1, a0
  132. move arg2, a1
  133. move arg3, a2
  134. move arg4, a3
  135. call function
  136. elsif C_LOOP
  137. error
  138. else
  139. error
  140. end
  141. end
  142. macro callSlowPath(slowPath)
  143. cCall2(slowPath, cfr, PC)
  144. move t0, PC
  145. move t1, cfr
  146. end
  147. # Debugging operation if you'd like to print an operand in the instruction stream. fromWhere
  148. # should be an immediate integer - any integer you like; use it to identify the place you're
  149. # debugging from. operand should likewise be an immediate, and should identify the operand
  150. # in the instruction stream you'd like to print out.
  151. macro traceOperand(fromWhere, operand)
  152. cCall4(_llint_trace_operand, cfr, PC, fromWhere, operand)
  153. move t0, PC
  154. move t1, cfr
  155. end
  156. # Debugging operation if you'd like to print the value of an operand in the instruction
  157. # stream. Same as traceOperand(), but assumes that the operand is a register, and prints its
  158. # value.
  159. macro traceValue(fromWhere, operand)
  160. cCall4(_llint_trace_value, cfr, PC, fromWhere, operand)
  161. move t0, PC
  162. move t1, cfr
  163. end
  164. # Call a slowPath for call opcodes.
  165. macro callCallSlowPath(advance, slowPath, action)
  166. addp advance * 4, PC, t0
  167. storep t0, ArgumentCount + TagOffset[cfr]
  168. cCall2(slowPath, cfr, PC)
  169. move t1, cfr
  170. action(t0)
  171. end
  172. macro callWatchdogTimerHandler(throwHandler)
  173. storei PC, ArgumentCount + TagOffset[cfr]
  174. cCall2(_llint_slow_path_handle_watchdog_timer, cfr, PC)
  175. move t1, cfr
  176. btpnz t0, throwHandler
  177. loadi ArgumentCount + TagOffset[cfr], PC
  178. end
  179. macro checkSwitchToJITForLoop()
  180. checkSwitchToJIT(
  181. 1,
  182. macro ()
  183. storei PC, ArgumentCount + TagOffset[cfr]
  184. cCall2(_llint_loop_osr, cfr, PC)
  185. move t1, cfr
  186. btpz t0, .recover
  187. jmp t0
  188. .recover:
  189. loadi ArgumentCount + TagOffset[cfr], PC
  190. end)
  191. end
  192. # Index, tag, and payload must be different registers. Index is not
  193. # changed.
  194. macro loadConstantOrVariable(index, tag, payload)
  195. bigteq index, FirstConstantRegisterIndex, .constant
  196. loadi TagOffset[cfr, index, 8], tag
  197. loadi PayloadOffset[cfr, index, 8], payload
  198. jmp .done
  199. .constant:
  200. loadp CodeBlock[cfr], payload
  201. loadp CodeBlock::m_constantRegisters + VectorBufferOffset[payload], payload
  202. # There is a bit of evil here: if the index contains a value >= FirstConstantRegisterIndex,
  203. # then value << 3 will be equal to (value - FirstConstantRegisterIndex) << 3.
  204. loadp TagOffset[payload, index, 8], tag
  205. loadp PayloadOffset[payload, index, 8], payload
  206. .done:
  207. end
  208. macro loadConstantOrVariableTag(index, tag)
  209. bigteq index, FirstConstantRegisterIndex, .constant
  210. loadi TagOffset[cfr, index, 8], tag
  211. jmp .done
  212. .constant:
  213. loadp CodeBlock[cfr], tag
  214. loadp CodeBlock::m_constantRegisters + VectorBufferOffset[tag], tag
  215. # There is a bit of evil here: if the index contains a value >= FirstConstantRegisterIndex,
  216. # then value << 3 will be equal to (value - FirstConstantRegisterIndex) << 3.
  217. loadp TagOffset[tag, index, 8], tag
  218. .done:
  219. end
  220. # Index and payload may be the same register. Index may be clobbered.
  221. macro loadConstantOrVariable2Reg(index, tag, payload)
  222. bigteq index, FirstConstantRegisterIndex, .constant
  223. loadi TagOffset[cfr, index, 8], tag
  224. loadi PayloadOffset[cfr, index, 8], payload
  225. jmp .done
  226. .constant:
  227. loadp CodeBlock[cfr], tag
  228. loadp CodeBlock::m_constantRegisters + VectorBufferOffset[tag], tag
  229. # There is a bit of evil here: if the index contains a value >= FirstConstantRegisterIndex,
  230. # then value << 3 will be equal to (value - FirstConstantRegisterIndex) << 3.
  231. lshifti 3, index
  232. addp index, tag
  233. loadp PayloadOffset[tag], payload
  234. loadp TagOffset[tag], tag
  235. .done:
  236. end
  237. macro loadConstantOrVariablePayloadTagCustom(index, tagCheck, payload)
  238. bigteq index, FirstConstantRegisterIndex, .constant
  239. tagCheck(TagOffset[cfr, index, 8])
  240. loadi PayloadOffset[cfr, index, 8], payload
  241. jmp .done
  242. .constant:
  243. loadp CodeBlock[cfr], payload
  244. loadp CodeBlock::m_constantRegisters + VectorBufferOffset[payload], payload
  245. # There is a bit of evil here: if the index contains a value >= FirstConstantRegisterIndex,
  246. # then value << 3 will be equal to (value - FirstConstantRegisterIndex) << 3.
  247. tagCheck(TagOffset[payload, index, 8])
  248. loadp PayloadOffset[payload, index, 8], payload
  249. .done:
  250. end
  251. # Index and payload must be different registers. Index is not mutated. Use
  252. # this if you know what the tag of the variable should be. Doing the tag
  253. # test as part of loading the variable reduces register use, but may not
  254. # be faster than doing loadConstantOrVariable followed by a branch on the
  255. # tag.
  256. macro loadConstantOrVariablePayload(index, expectedTag, payload, slow)
  257. loadConstantOrVariablePayloadTagCustom(
  258. index,
  259. macro (actualTag) bineq actualTag, expectedTag, slow end,
  260. payload)
  261. end
  262. macro loadConstantOrVariablePayloadUnchecked(index, payload)
  263. loadConstantOrVariablePayloadTagCustom(
  264. index,
  265. macro (actualTag) end,
  266. payload)
  267. end
  268. macro writeBarrier(tag, payload)
  269. # Nothing to do, since we don't have a generational or incremental collector.
  270. end
  271. macro valueProfile(tag, payload, profile)
  272. if VALUE_PROFILER
  273. storei tag, ValueProfile::m_buckets + TagOffset[profile]
  274. storei payload, ValueProfile::m_buckets + PayloadOffset[profile]
  275. end
  276. end
  277. # Entrypoints into the interpreter
  278. # Expects that CodeBlock is in t1, which is what prologue() leaves behind.
  279. macro functionArityCheck(doneLabel, slow_path)
  280. loadi PayloadOffset + ArgumentCount[cfr], t0
  281. biaeq t0, CodeBlock::m_numParameters[t1], doneLabel
  282. cCall2(slow_path, cfr, PC) # This slow_path has a simple protocol: t0 = 0 => no error, t0 != 0 => error
  283. move t1, cfr
  284. btiz t0, .continue
  285. loadp JITStackFrame::vm[sp], t1
  286. loadp VM::callFrameForThrow[t1], t0
  287. jmp VM::targetMachinePCForThrow[t1]
  288. .continue:
  289. # Reload CodeBlock and PC, since the slow_path clobbered it.
  290. loadp CodeBlock[cfr], t1
  291. loadp CodeBlock::m_instructions[t1], PC
  292. jmp doneLabel
  293. end
  294. # Instruction implementations
  295. _llint_op_enter:
  296. traceExecution()
  297. loadp CodeBlock[cfr], t2 // t2<CodeBlock> = cfr.CodeBlock
  298. loadi CodeBlock::m_numVars[t2], t2 // t2<size_t> = t2<CodeBlock>.m_numVars
  299. btiz t2, .opEnterDone
  300. move UndefinedTag, t0
  301. move 0, t1
  302. .opEnterLoop:
  303. subi 1, t2
  304. storei t0, TagOffset[cfr, t2, 8]
  305. storei t1, PayloadOffset[cfr, t2, 8]
  306. btinz t2, .opEnterLoop
  307. .opEnterDone:
  308. dispatch(1)
  309. _llint_op_create_activation:
  310. traceExecution()
  311. loadi 4[PC], t0
  312. bineq TagOffset[cfr, t0, 8], EmptyValueTag, .opCreateActivationDone
  313. callSlowPath(_llint_slow_path_create_activation)
  314. .opCreateActivationDone:
  315. dispatch(2)
  316. _llint_op_init_lazy_reg:
  317. traceExecution()
  318. loadi 4[PC], t0
  319. storei EmptyValueTag, TagOffset[cfr, t0, 8]
  320. storei 0, PayloadOffset[cfr, t0, 8]
  321. dispatch(2)
  322. _llint_op_create_arguments:
  323. traceExecution()
  324. loadi 4[PC], t0
  325. bineq TagOffset[cfr, t0, 8], EmptyValueTag, .opCreateArgumentsDone
  326. callSlowPath(_llint_slow_path_create_arguments)
  327. .opCreateArgumentsDone:
  328. dispatch(2)
  329. _llint_op_create_this:
  330. traceExecution()
  331. loadi 8[PC], t0
  332. loadp PayloadOffset[cfr, t0, 8], t0
  333. loadp JSFunction::m_allocationProfile + ObjectAllocationProfile::m_allocator[t0], t1
  334. loadp JSFunction::m_allocationProfile + ObjectAllocationProfile::m_structure[t0], t2
  335. btpz t1, .opCreateThisSlow
  336. allocateJSObject(t1, t2, t0, t3, .opCreateThisSlow)
  337. loadi 4[PC], t1
  338. storei CellTag, TagOffset[cfr, t1, 8]
  339. storei t0, PayloadOffset[cfr, t1, 8]
  340. dispatch(4)
  341. .opCreateThisSlow:
  342. callSlowPath(_llint_slow_path_create_this)
  343. dispatch(4)
  344. _llint_op_get_callee:
  345. traceExecution()
  346. loadi 4[PC], t0
  347. loadp PayloadOffset + Callee[cfr], t1
  348. loadp 8[PC], t2
  349. valueProfile(CellTag, t1, t2)
  350. storei CellTag, TagOffset[cfr, t0, 8]
  351. storei t1, PayloadOffset[cfr, t0, 8]
  352. dispatch(3)
  353. _llint_op_convert_this:
  354. traceExecution()
  355. loadi 4[PC], t0
  356. bineq TagOffset[cfr, t0, 8], CellTag, .opConvertThisSlow
  357. loadi PayloadOffset[cfr, t0, 8], t0
  358. loadp JSCell::m_structure[t0], t0
  359. bbb Structure::m_typeInfo + TypeInfo::m_type[t0], ObjectType, .opConvertThisSlow
  360. loadi 8[PC], t1
  361. valueProfile(CellTag, t0, t1)
  362. dispatch(3)
  363. .opConvertThisSlow:
  364. callSlowPath(_llint_slow_path_convert_this)
  365. dispatch(3)
  366. _llint_op_new_object:
  367. traceExecution()
  368. loadpFromInstruction(3, t0)
  369. loadp ObjectAllocationProfile::m_allocator[t0], t1
  370. loadp ObjectAllocationProfile::m_structure[t0], t2
  371. allocateJSObject(t1, t2, t0, t3, .opNewObjectSlow)
  372. loadi 4[PC], t1
  373. storei CellTag, TagOffset[cfr, t1, 8]
  374. storei t0, PayloadOffset[cfr, t1, 8]
  375. dispatch(4)
  376. .opNewObjectSlow:
  377. callSlowPath(_llint_slow_path_new_object)
  378. dispatch(4)
  379. _llint_op_mov:
  380. traceExecution()
  381. loadi 8[PC], t1
  382. loadi 4[PC], t0
  383. loadConstantOrVariable(t1, t2, t3)
  384. storei t2, TagOffset[cfr, t0, 8]
  385. storei t3, PayloadOffset[cfr, t0, 8]
  386. dispatch(3)
  387. _llint_op_not:
  388. traceExecution()
  389. loadi 8[PC], t0
  390. loadi 4[PC], t1
  391. loadConstantOrVariable(t0, t2, t3)
  392. bineq t2, BooleanTag, .opNotSlow
  393. xori 1, t3
  394. storei t2, TagOffset[cfr, t1, 8]
  395. storei t3, PayloadOffset[cfr, t1, 8]
  396. dispatch(3)
  397. .opNotSlow:
  398. callSlowPath(_llint_slow_path_not)
  399. dispatch(3)
  400. _llint_op_eq:
  401. traceExecution()
  402. loadi 12[PC], t2
  403. loadi 8[PC], t0
  404. loadConstantOrVariable(t2, t3, t1)
  405. loadConstantOrVariable2Reg(t0, t2, t0)
  406. bineq t2, t3, .opEqSlow
  407. bieq t2, CellTag, .opEqSlow
  408. bib t2, LowestTag, .opEqSlow
  409. loadi 4[PC], t2
  410. cieq t0, t1, t0
  411. storei BooleanTag, TagOffset[cfr, t2, 8]
  412. storei t0, PayloadOffset[cfr, t2, 8]
  413. dispatch(4)
  414. .opEqSlow:
  415. callSlowPath(_llint_slow_path_eq)
  416. dispatch(4)
  417. _llint_op_eq_null:
  418. traceExecution()
  419. loadi 8[PC], t0
  420. loadi 4[PC], t3
  421. assertNotConstant(t0)
  422. loadi TagOffset[cfr, t0, 8], t1
  423. loadi PayloadOffset[cfr, t0, 8], t0
  424. bineq t1, CellTag, .opEqNullImmediate
  425. loadp JSCell::m_structure[t0], t1
  426. btbnz Structure::m_typeInfo + TypeInfo::m_flags[t1], MasqueradesAsUndefined, .opEqNullMasqueradesAsUndefined
  427. move 0, t1
  428. jmp .opEqNullNotImmediate
  429. .opEqNullMasqueradesAsUndefined:
  430. loadp CodeBlock[cfr], t0
  431. loadp CodeBlock::m_globalObject[t0], t0
  432. cpeq Structure::m_globalObject[t1], t0, t1
  433. jmp .opEqNullNotImmediate
  434. .opEqNullImmediate:
  435. cieq t1, NullTag, t2
  436. cieq t1, UndefinedTag, t1
  437. ori t2, t1
  438. .opEqNullNotImmediate:
  439. storei BooleanTag, TagOffset[cfr, t3, 8]
  440. storei t1, PayloadOffset[cfr, t3, 8]
  441. dispatch(3)
  442. _llint_op_neq:
  443. traceExecution()
  444. loadi 12[PC], t2
  445. loadi 8[PC], t0
  446. loadConstantOrVariable(t2, t3, t1)
  447. loadConstantOrVariable2Reg(t0, t2, t0)
  448. bineq t2, t3, .opNeqSlow
  449. bieq t2, CellTag, .opNeqSlow
  450. bib t2, LowestTag, .opNeqSlow
  451. loadi 4[PC], t2
  452. cineq t0, t1, t0
  453. storei BooleanTag, TagOffset[cfr, t2, 8]
  454. storei t0, PayloadOffset[cfr, t2, 8]
  455. dispatch(4)
  456. .opNeqSlow:
  457. callSlowPath(_llint_slow_path_neq)
  458. dispatch(4)
  459. _llint_op_neq_null:
  460. traceExecution()
  461. loadi 8[PC], t0
  462. loadi 4[PC], t3
  463. assertNotConstant(t0)
  464. loadi TagOffset[cfr, t0, 8], t1
  465. loadi PayloadOffset[cfr, t0, 8], t0
  466. bineq t1, CellTag, .opNeqNullImmediate
  467. loadp JSCell::m_structure[t0], t1
  468. btbnz Structure::m_typeInfo + TypeInfo::m_flags[t1], MasqueradesAsUndefined, .opNeqNullMasqueradesAsUndefined
  469. move 1, t1
  470. jmp .opNeqNullNotImmediate
  471. .opNeqNullMasqueradesAsUndefined:
  472. loadp CodeBlock[cfr], t0
  473. loadp CodeBlock::m_globalObject[t0], t0
  474. cpneq Structure::m_globalObject[t1], t0, t1
  475. jmp .opNeqNullNotImmediate
  476. .opNeqNullImmediate:
  477. cineq t1, NullTag, t2
  478. cineq t1, UndefinedTag, t1
  479. andi t2, t1
  480. .opNeqNullNotImmediate:
  481. storei BooleanTag, TagOffset[cfr, t3, 8]
  482. storei t1, PayloadOffset[cfr, t3, 8]
  483. dispatch(3)
  484. macro strictEq(equalityOperation, slowPath)
  485. loadi 12[PC], t2
  486. loadi 8[PC], t0
  487. loadConstantOrVariable(t2, t3, t1)
  488. loadConstantOrVariable2Reg(t0, t2, t0)
  489. bineq t2, t3, .slow
  490. bib t2, LowestTag, .slow
  491. bineq t2, CellTag, .notString
  492. loadp JSCell::m_structure[t0], t2
  493. loadp JSCell::m_structure[t1], t3
  494. bbneq Structure::m_typeInfo + TypeInfo::m_type[t2], StringType, .notString
  495. bbeq Structure::m_typeInfo + TypeInfo::m_type[t3], StringType, .slow
  496. .notString:
  497. loadi 4[PC], t2
  498. equalityOperation(t0, t1, t0)
  499. storei BooleanTag, TagOffset[cfr, t2, 8]
  500. storei t0, PayloadOffset[cfr, t2, 8]
  501. dispatch(4)
  502. .slow:
  503. callSlowPath(slowPath)
  504. dispatch(4)
  505. end
  506. _llint_op_stricteq:
  507. traceExecution()
  508. strictEq(macro (left, right, result) cieq left, right, result end, _llint_slow_path_stricteq)
  509. _llint_op_nstricteq:
  510. traceExecution()
  511. strictEq(macro (left, right, result) cineq left, right, result end, _llint_slow_path_nstricteq)
  512. _llint_op_inc:
  513. traceExecution()
  514. loadi 4[PC], t0
  515. bineq TagOffset[cfr, t0, 8], Int32Tag, .opIncSlow
  516. loadi PayloadOffset[cfr, t0, 8], t1
  517. baddio 1, t1, .opIncSlow
  518. storei t1, PayloadOffset[cfr, t0, 8]
  519. dispatch(2)
  520. .opIncSlow:
  521. callSlowPath(_llint_slow_path_pre_inc)
  522. dispatch(2)
  523. _llint_op_dec:
  524. traceExecution()
  525. loadi 4[PC], t0
  526. bineq TagOffset[cfr, t0, 8], Int32Tag, .opDecSlow
  527. loadi PayloadOffset[cfr, t0, 8], t1
  528. bsubio 1, t1, .opDecSlow
  529. storei t1, PayloadOffset[cfr, t0, 8]
  530. dispatch(2)
  531. .opDecSlow:
  532. callSlowPath(_llint_slow_path_pre_dec)
  533. dispatch(2)
  534. _llint_op_to_number:
  535. traceExecution()
  536. loadi 8[PC], t0
  537. loadi 4[PC], t1
  538. loadConstantOrVariable(t0, t2, t3)
  539. bieq t2, Int32Tag, .opToNumberIsInt
  540. biaeq t2, LowestTag, .opToNumberSlow
  541. .opToNumberIsInt:
  542. storei t2, TagOffset[cfr, t1, 8]
  543. storei t3, PayloadOffset[cfr, t1, 8]
  544. dispatch(3)
  545. .opToNumberSlow:
  546. callSlowPath(_llint_slow_path_to_number)
  547. dispatch(3)
  548. _llint_op_negate:
  549. traceExecution()
  550. loadi 8[PC], t0
  551. loadi 4[PC], t3
  552. loadConstantOrVariable(t0, t1, t2)
  553. bineq t1, Int32Tag, .opNegateSrcNotInt
  554. btiz t2, 0x7fffffff, .opNegateSlow
  555. negi t2
  556. storei Int32Tag, TagOffset[cfr, t3, 8]
  557. storei t2, PayloadOffset[cfr, t3, 8]
  558. dispatch(3)
  559. .opNegateSrcNotInt:
  560. bia t1, LowestTag, .opNegateSlow
  561. xori 0x80000000, t1
  562. storei t1, TagOffset[cfr, t3, 8]
  563. storei t2, PayloadOffset[cfr, t3, 8]
  564. dispatch(3)
  565. .opNegateSlow:
  566. callSlowPath(_llint_slow_path_negate)
  567. dispatch(3)
  568. macro binaryOpCustomStore(integerOperationAndStore, doubleOperation, slowPath)
  569. loadi 12[PC], t2
  570. loadi 8[PC], t0
  571. loadConstantOrVariable(t2, t3, t1)
  572. loadConstantOrVariable2Reg(t0, t2, t0)
  573. bineq t2, Int32Tag, .op1NotInt
  574. bineq t3, Int32Tag, .op2NotInt
  575. loadi 4[PC], t2
  576. integerOperationAndStore(t3, t1, t0, .slow, t2)
  577. dispatch(5)
  578. .op1NotInt:
  579. # First operand is definitely not an int, the second operand could be anything.
  580. bia t2, LowestTag, .slow
  581. bib t3, LowestTag, .op1NotIntOp2Double
  582. bineq t3, Int32Tag, .slow
  583. ci2d t1, ft1
  584. jmp .op1NotIntReady
  585. .op1NotIntOp2Double:
  586. fii2d t1, t3, ft1
  587. .op1NotIntReady:
  588. loadi 4[PC], t1
  589. fii2d t0, t2, ft0
  590. doubleOperation(ft1, ft0)
  591. stored ft0, [cfr, t1, 8]
  592. dispatch(5)
  593. .op2NotInt:
  594. # First operand is definitely an int, the second operand is definitely not.
  595. loadi 4[PC], t2
  596. bia t3, LowestTag, .slow
  597. ci2d t0, ft0
  598. fii2d t1, t3, ft1
  599. doubleOperation(ft1, ft0)
  600. stored ft0, [cfr, t2, 8]
  601. dispatch(5)
  602. .slow:
  603. callSlowPath(slowPath)
  604. dispatch(5)
  605. end
  606. macro binaryOp(integerOperation, doubleOperation, slowPath)
  607. binaryOpCustomStore(
  608. macro (int32Tag, left, right, slow, index)
  609. integerOperation(left, right, slow)
  610. storei int32Tag, TagOffset[cfr, index, 8]
  611. storei right, PayloadOffset[cfr, index, 8]
  612. end,
  613. doubleOperation, slowPath)
  614. end
  615. _llint_op_add:
  616. traceExecution()
  617. binaryOp(
  618. macro (left, right, slow) baddio left, right, slow end,
  619. macro (left, right) addd left, right end,
  620. _llint_slow_path_add)
  621. _llint_op_mul:
  622. traceExecution()
  623. binaryOpCustomStore(
  624. macro (int32Tag, left, right, slow, index)
  625. const scratch = int32Tag # We know that we can reuse the int32Tag register since it has a constant.
  626. move right, scratch
  627. bmulio left, scratch, slow
  628. btinz scratch, .done
  629. bilt left, 0, slow
  630. bilt right, 0, slow
  631. .done:
  632. storei Int32Tag, TagOffset[cfr, index, 8]
  633. storei scratch, PayloadOffset[cfr, index, 8]
  634. end,
  635. macro (left, right) muld left, right end,
  636. _llint_slow_path_mul)
  637. _llint_op_sub:
  638. traceExecution()
  639. binaryOp(
  640. macro (left, right, slow) bsubio left, right, slow end,
  641. macro (left, right) subd left, right end,
  642. _llint_slow_path_sub)
  643. _llint_op_div:
  644. traceExecution()
  645. binaryOpCustomStore(
  646. macro (int32Tag, left, right, slow, index)
  647. ci2d left, ft0
  648. ci2d right, ft1
  649. divd ft0, ft1
  650. bcd2i ft1, right, .notInt
  651. storei int32Tag, TagOffset[cfr, index, 8]
  652. storei right, PayloadOffset[cfr, index, 8]
  653. jmp .done
  654. .notInt:
  655. stored ft1, [cfr, index, 8]
  656. .done:
  657. end,
  658. macro (left, right) divd left, right end,
  659. _llint_slow_path_div)
  660. macro bitOp(operation, slowPath, advance)
  661. loadi 12[PC], t2
  662. loadi 8[PC], t0
  663. loadConstantOrVariable(t2, t3, t1)
  664. loadConstantOrVariable2Reg(t0, t2, t0)
  665. bineq t3, Int32Tag, .slow
  666. bineq t2, Int32Tag, .slow
  667. loadi 4[PC], t2
  668. operation(t1, t0, .slow)
  669. storei t3, TagOffset[cfr, t2, 8]
  670. storei t0, PayloadOffset[cfr, t2, 8]
  671. dispatch(advance)
  672. .slow:
  673. callSlowPath(slowPath)
  674. dispatch(advance)
  675. end
  676. _llint_op_lshift:
  677. traceExecution()
  678. bitOp(
  679. macro (left, right, slow) lshifti left, right end,
  680. _llint_slow_path_lshift,
  681. 4)
  682. _llint_op_rshift:
  683. traceExecution()
  684. bitOp(
  685. macro (left, right, slow) rshifti left, right end,
  686. _llint_slow_path_rshift,
  687. 4)
  688. _llint_op_urshift:
  689. traceExecution()
  690. bitOp(
  691. macro (left, right, slow)
  692. urshifti left, right
  693. bilt right, 0, slow
  694. end,
  695. _llint_slow_path_urshift,
  696. 4)
  697. _llint_op_bitand:
  698. traceExecution()
  699. bitOp(
  700. macro (left, right, slow) andi left, right end,
  701. _llint_slow_path_bitand,
  702. 5)
  703. _llint_op_bitxor:
  704. traceExecution()
  705. bitOp(
  706. macro (left, right, slow) xori left, right end,
  707. _llint_slow_path_bitxor,
  708. 5)
  709. _llint_op_bitor:
  710. traceExecution()
  711. bitOp(
  712. macro (left, right, slow) ori left, right end,
  713. _llint_slow_path_bitor,
  714. 5)
  715. _llint_op_check_has_instance:
  716. traceExecution()
  717. loadi 12[PC], t1
  718. loadConstantOrVariablePayload(t1, CellTag, t0, .opCheckHasInstanceSlow)
  719. loadp JSCell::m_structure[t0], t0
  720. btbz Structure::m_typeInfo + TypeInfo::m_flags[t0], ImplementsDefaultHasInstance, .opCheckHasInstanceSlow
  721. dispatch(5)
  722. .opCheckHasInstanceSlow:
  723. callSlowPath(_llint_slow_path_check_has_instance)
  724. dispatch(0)
  725. _llint_op_instanceof:
  726. traceExecution()
  727. # Actually do the work.
  728. loadi 12[PC], t0
  729. loadi 4[PC], t3
  730. loadConstantOrVariablePayload(t0, CellTag, t1, .opInstanceofSlow)
  731. loadp JSCell::m_structure[t1], t2
  732. bbb Structure::m_typeInfo + TypeInfo::m_type[t2], ObjectType, .opInstanceofSlow
  733. loadi 8[PC], t0
  734. loadConstantOrVariablePayload(t0, CellTag, t2, .opInstanceofSlow)
  735. # Register state: t1 = prototype, t2 = value
  736. move 1, t0
  737. .opInstanceofLoop:
  738. loadp JSCell::m_structure[t2], t2
  739. loadi Structure::m_prototype + PayloadOffset[t2], t2
  740. bpeq t2, t1, .opInstanceofDone
  741. btinz t2, .opInstanceofLoop
  742. move 0, t0
  743. .opInstanceofDone:
  744. storei BooleanTag, TagOffset[cfr, t3, 8]
  745. storei t0, PayloadOffset[cfr, t3, 8]
  746. dispatch(4)
  747. .opInstanceofSlow:
  748. callSlowPath(_llint_slow_path_instanceof)
  749. dispatch(4)
  750. _llint_op_is_undefined:
  751. traceExecution()
  752. loadi 8[PC], t1
  753. loadi 4[PC], t0
  754. loadConstantOrVariable(t1, t2, t3)
  755. storei BooleanTag, TagOffset[cfr, t0, 8]
  756. bieq t2, CellTag, .opIsUndefinedCell
  757. cieq t2, UndefinedTag, t3
  758. storei t3, PayloadOffset[cfr, t0, 8]
  759. dispatch(3)
  760. .opIsUndefinedCell:
  761. loadp JSCell::m_structure[t3], t1
  762. btbnz Structure::m_typeInfo + TypeInfo::m_flags[t1], MasqueradesAsUndefined, .opIsUndefinedMasqueradesAsUndefined
  763. move 0, t1
  764. storei t1, PayloadOffset[cfr, t0, 8]
  765. dispatch(3)
  766. .opIsUndefinedMasqueradesAsUndefined:
  767. loadp CodeBlock[cfr], t3
  768. loadp CodeBlock::m_globalObject[t3], t3
  769. cpeq Structure::m_globalObject[t1], t3, t1
  770. storei t1, PayloadOffset[cfr, t0, 8]
  771. dispatch(3)
  772. _llint_op_is_boolean:
  773. traceExecution()
  774. loadi 8[PC], t1
  775. loadi 4[PC], t2
  776. loadConstantOrVariableTag(t1, t0)
  777. cieq t0, BooleanTag, t0
  778. storei BooleanTag, TagOffset[cfr, t2, 8]
  779. storei t0, PayloadOffset[cfr, t2, 8]
  780. dispatch(3)
  781. _llint_op_is_number:
  782. traceExecution()
  783. loadi 8[PC], t1
  784. loadi 4[PC], t2
  785. loadConstantOrVariableTag(t1, t0)
  786. storei BooleanTag, TagOffset[cfr, t2, 8]
  787. addi 1, t0
  788. cib t0, LowestTag + 1, t1
  789. storei t1, PayloadOffset[cfr, t2, 8]
  790. dispatch(3)
  791. _llint_op_is_string:
  792. traceExecution()
  793. loadi 8[PC], t1
  794. loadi 4[PC], t2
  795. loadConstantOrVariable(t1, t0, t3)
  796. storei BooleanTag, TagOffset[cfr, t2, 8]
  797. bineq t0, CellTag, .opIsStringNotCell
  798. loadp JSCell::m_structure[t3], t0
  799. cbeq Structure::m_typeInfo + TypeInfo::m_type[t0], StringType, t1
  800. storei t1, PayloadOffset[cfr, t2, 8]
  801. dispatch(3)
  802. .opIsStringNotCell:
  803. storep 0, PayloadOffset[cfr, t2, 8]
  804. dispatch(3)
  805. macro loadPropertyAtVariableOffsetKnownNotInline(propertyOffset, objectAndStorage, tag, payload)
  806. assert(macro (ok) bigteq propertyOffset, firstOutOfLineOffset, ok end)
  807. negi propertyOffset
  808. loadp JSObject::m_butterfly[objectAndStorage], objectAndStorage
  809. loadi TagOffset + (firstOutOfLineOffset - 2) * 8[objectAndStorage, propertyOffset, 8], tag
  810. loadi PayloadOffset + (firstOutOfLineOffset - 2) * 8[objectAndStorage, propertyOffset, 8], payload
  811. end
  812. macro loadPropertyAtVariableOffset(propertyOffset, objectAndStorage, tag, payload)
  813. bilt propertyOffset, firstOutOfLineOffset, .isInline
  814. loadp JSObject::m_butterfly[objectAndStorage], objectAndStorage
  815. negi propertyOffset
  816. jmp .ready
  817. .isInline:
  818. addp sizeof JSObject - (firstOutOfLineOffset - 2) * 8, objectAndStorage
  819. .ready:
  820. loadi TagOffset + (firstOutOfLineOffset - 2) * 8[objectAndStorage, propertyOffset, 8], tag
  821. loadi PayloadOffset + (firstOutOfLineOffset - 2) * 8[objectAndStorage, propertyOffset, 8], payload
  822. end
  823. macro resolveGlobal(size, slow)
  824. # Operands are as follows:
  825. # 4[PC] Destination for the load.
  826. # 8[PC] Property identifier index in the code block.
  827. # 12[PC] Structure pointer, initialized to 0 by bytecode generator.
  828. # 16[PC] Offset in global object, initialized to 0 by bytecode generator.
  829. loadp CodeBlock[cfr], t0
  830. loadp CodeBlock::m_globalObject[t0], t0
  831. loadp JSCell::m_structure[t0], t1
  832. bpneq t1, 12[PC], slow
  833. loadi 16[PC], t1
  834. loadPropertyAtVariableOffsetKnownNotInline(t1, t0, t2, t3)
  835. loadi 4[PC], t0
  836. storei t2, TagOffset[cfr, t0, 8]
  837. storei t3, PayloadOffset[cfr, t0, 8]
  838. loadi (size - 1) * 4[PC], t0
  839. valueProfile(t2, t3, t0)
  840. end
  841. _llint_op_init_global_const:
  842. traceExecution()
  843. loadi 8[PC], t1
  844. loadi 4[PC], t0
  845. loadConstantOrVariable(t1, t2, t3)
  846. writeBarrier(t2, t3)
  847. storei t2, TagOffset[t0]
  848. storei t3, PayloadOffset[t0]
  849. dispatch(5)
  850. _llint_op_init_global_const_check:
  851. traceExecution()
  852. loadp 12[PC], t2
  853. loadi 8[PC], t1
  854. loadi 4[PC], t0
  855. btbnz [t2], .opInitGlobalConstCheckSlow
  856. loadConstantOrVariable(t1, t2, t3)
  857. writeBarrier(t2, t3)
  858. storei t2, TagOffset[t0]
  859. storei t3, PayloadOffset[t0]
  860. dispatch(5)
  861. .opInitGlobalConstCheckSlow:
  862. callSlowPath(_llint_slow_path_init_global_const_check)
  863. dispatch(5)
  864. # We only do monomorphic get_by_id caching for now, and we do not modify the
  865. # opcode. We do, however, allow for the cache to change anytime if fails, since
  866. # ping-ponging is free. At best we get lucky and the get_by_id will continue
  867. # to take fast path on the new cache. At worst we take slow path, which is what
  868. # we would have been doing anyway.
  869. macro getById(getPropertyStorage)
  870. traceExecution()
  871. loadi 8[PC], t0
  872. loadi 16[PC], t1
  873. loadConstantOrVariablePayload(t0, CellTag, t3, .opGetByIdSlow)
  874. loadi 20[PC], t2
  875. getPropertyStorage(
  876. t3,
  877. t0,
  878. macro (propertyStorage, scratch)
  879. bpneq JSCell::m_structure[t3], t1, .opGetByIdSlow
  880. loadi 4[PC], t1
  881. loadi TagOffset[propertyStorage, t2], scratch
  882. loadi PayloadOffset[propertyStorage, t2], t2
  883. storei scratch, TagOffset[cfr, t1, 8]
  884. storei t2, PayloadOffset[cfr, t1, 8]
  885. loadi 32[PC], t1
  886. valueProfile(scratch, t2, t1)
  887. dispatch(9)
  888. end)
  889. .opGetByIdSlow:
  890. callSlowPath(_llint_slow_path_get_by_id)
  891. dispatch(9)
  892. end
  893. _llint_op_get_by_id:
  894. getById(withInlineStorage)
  895. _llint_op_get_by_id_out_of_line:
  896. getById(withOutOfLineStorage)
  897. _llint_op_get_array_length:
  898. traceExecution()
  899. loadi 8[PC], t0
  900. loadp 16[PC], t1
  901. loadConstantOrVariablePayload(t0, CellTag, t3, .opGetArrayLengthSlow)
  902. loadp JSCell::m_structure[t3], t2
  903. arrayProfile(t2, t1, t0)
  904. btiz t2, IsArray, .opGetArrayLengthSlow
  905. btiz t2, IndexingShapeMask, .opGetArrayLengthSlow
  906. loadi 4[PC], t1
  907. loadp 32[PC], t2
  908. loadp JSObject::m_butterfly[t3], t0
  909. loadi -sizeof IndexingHeader + IndexingHeader::m_publicLength[t0], t0
  910. bilt t0, 0, .opGetArrayLengthSlow
  911. valueProfile(Int32Tag, t0, t2)
  912. storep t0, PayloadOffset[cfr, t1, 8]
  913. storep Int32Tag, TagOffset[cfr, t1, 8]
  914. dispatch(9)
  915. .opGetArrayLengthSlow:
  916. callSlowPath(_llint_slow_path_get_by_id)
  917. dispatch(9)
  918. _llint_op_get_arguments_length:
  919. traceExecution()
  920. loadi 8[PC], t0
  921. loadi 4[PC], t1
  922. bineq TagOffset[cfr, t0, 8], EmptyValueTag, .opGetArgumentsLengthSlow
  923. loadi ArgumentCount + PayloadOffset[cfr], t2
  924. subi 1, t2
  925. storei Int32Tag, TagOffset[cfr, t1, 8]
  926. storei t2, PayloadOffset[cfr, t1, 8]
  927. dispatch(4)
  928. .opGetArgumentsLengthSlow:
  929. callSlowPath(_llint_slow_path_get_arguments_length)
  930. dispatch(4)
  931. macro putById(getPropertyStorage)
  932. traceExecution()
  933. loadi 4[PC], t3
  934. loadi 16[PC], t1
  935. loadConstantOrVariablePayload(t3, CellTag, t0, .opPutByIdSlow)
  936. loadi 12[PC], t2
  937. getPropertyStorage(
  938. t0,
  939. t3,
  940. macro (propertyStorage, scratch)
  941. bpneq JSCell::m_structure[t0], t1, .opPutByIdSlow
  942. loadi 20[PC], t1
  943. loadConstantOrVariable2Reg(t2, scratch, t2)
  944. writeBarrier(scratch, t2)
  945. storei scratch, TagOffset[propertyStorage, t1]
  946. storei t2, PayloadOffset[propertyStorage, t1]
  947. dispatch(9)
  948. end)
  949. end
  950. _llint_op_put_by_id:
  951. putById(withInlineStorage)
  952. .opPutByIdSlow:
  953. callSlowPath(_llint_slow_path_put_by_id)
  954. dispatch(9)
  955. _llint_op_put_by_id_out_of_line:
  956. putById(withOutOfLineStorage)
  957. macro putByIdTransition(additionalChecks, getPropertyStorage)
  958. traceExecution()
  959. loadi 4[PC], t3
  960. loadi 16[PC], t1
  961. loadConstantOrVariablePayload(t3, CellTag, t0, .opPutByIdSlow)
  962. loadi 12[PC], t2
  963. bpneq JSCell::m_structure[t0], t1, .opPutByIdSlow
  964. additionalChecks(t1, t3)
  965. loadi 20[PC], t1
  966. getPropertyStorage(
  967. t0,
  968. t3,
  969. macro (propertyStorage, scratch)
  970. addp t1, propertyStorage, t3
  971. loadConstantOrVariable2Reg(t2, t1, t2)
  972. writeBarrier(t1, t2)
  973. storei t1, TagOffset[t3]
  974. loadi 24[PC], t1
  975. storei t2, PayloadOffset[t3]
  976. storep t1, JSCell::m_structure[t0]
  977. dispatch(9)
  978. end)
  979. end
  980. macro noAdditionalChecks(oldStructure, scratch)
  981. end
  982. macro structureChainChecks(oldStructure, scratch)
  983. const protoCell = oldStructure # Reusing the oldStructure register for the proto
  984. loadp 28[PC], scratch
  985. assert(macro (ok) btpnz scratch, ok end)
  986. loadp StructureChain::m_vector[scratch], scratch
  987. assert(macro (ok) btpnz scratch, ok end)
  988. bieq Structure::m_prototype + TagOffset[oldStructure], NullTag, .done
  989. .loop:
  990. loadi Structure::m_prototype + PayloadOffset[oldStructure], protoCell
  991. loadp JSCell::m_structure[protoCell], oldStructure
  992. bpneq oldStructure, [scratch], .opPutByIdSlow
  993. addp 4, scratch
  994. bineq Structure::m_prototype + TagOffset[oldStructure], NullTag, .loop
  995. .done:
  996. end
  997. _llint_op_put_by_id_transition_direct:
  998. putByIdTransition(noAdditionalChecks, withInlineStorage)
  999. _llint_op_put_by_id_transition_direct_out_of_line:
  1000. putByIdTransition(noAdditionalChecks, withOutOfLineStorage)
  1001. _llint_op_put_by_id_transition_normal:
  1002. putByIdTransition(structureChainChecks, withInlineStorage)
  1003. _llint_op_put_by_id_transition_normal_out_of_line:
  1004. putByIdTransition(structureChainChecks, withOutOfLineStorage)
  1005. _llint_op_get_by_val:
  1006. traceExecution()
  1007. loadi 8[PC], t2
  1008. loadConstantOrVariablePayload(t2, CellTag, t0, .opGetByValSlow)
  1009. loadp JSCell::m_structure[t0], t2
  1010. loadp 16[PC], t3
  1011. arrayProfile(t2, t3, t1)
  1012. loadi 12[PC], t3
  1013. loadConstantOrVariablePayload(t3, Int32Tag, t1, .opGetByValSlow)
  1014. loadp JSObject::m_butterfly[t0], t3
  1015. andi IndexingShapeMask, t2
  1016. bieq t2, Int32Shape, .opGetByValIsContiguous
  1017. bineq t2, ContiguousShape, .opGetByValNotContiguous
  1018. .opGetByValIsContiguous:
  1019. biaeq t1, -sizeof IndexingHeader + IndexingHeader::m_publicLength[t3], .opGetByValOutOfBounds
  1020. loadi TagOffset[t3, t1, 8], t2
  1021. loadi PayloadOffset[t3, t1, 8], t1
  1022. jmp .opGetByValDone
  1023. .opGetByValNotContiguous:
  1024. bineq t2, DoubleShape, .opGetByValNotDouble
  1025. biaeq t1, -sizeof IndexingHeader + IndexingHeader::m_publicLength[t3], .opGetByValOutOfBounds
  1026. loadd [t3, t1, 8], ft0
  1027. bdnequn ft0, ft0, .opGetByValSlow
  1028. # FIXME: This could be massively optimized.
  1029. fd2ii ft0, t1, t2
  1030. loadi 4[PC], t0
  1031. jmp .opGetByValNotEmpty
  1032. .opGetByValNotDouble:
  1033. subi ArrayStorageShape, t2
  1034. bia t2, SlowPutArrayStorageShape - ArrayStorageShape, .opGetByValSlow
  1035. biaeq t1, -sizeof IndexingHeader + IndexingHeader::m_vectorLength[t3], .opGetByValOutOfBounds
  1036. loadi ArrayStorage::m_vector + TagOffset[t3, t1, 8], t2
  1037. loadi ArrayStorage::m_vector + PayloadOffset[t3, t1, 8], t1
  1038. .opGetByValDone:
  1039. loadi 4[PC], t0
  1040. bieq t2, EmptyValueTag, .opGetByValOutOfBounds
  1041. .opGetByValNotEmpty:
  1042. storei t2, TagOffset[cfr, t0, 8]
  1043. storei t1, PayloadOffset[cfr, t0, 8]
  1044. loadi 20[PC], t0
  1045. valueProfile(t2, t1, t0)
  1046. dispatch(6)
  1047. .opGetByValOutOfBounds:
  1048. if VALUE_PROFILER
  1049. loadpFromInstruction(4, t0)
  1050. storeb 1, ArrayProfile::m_outOfBounds[t0]
  1051. end
  1052. .opGetByValSlow:
  1053. callSlowPath(_llint_slow_path_get_by_val)
  1054. dispatch(6)
  1055. _llint_op_get_argument_by_val:
  1056. # FIXME: At some point we should array profile this. Right now it isn't necessary
  1057. # since the DFG will never turn a get_argument_by_val into a GetByVal.
  1058. traceExecution()
  1059. loadi 8[PC], t0
  1060. loadi 12[PC], t1
  1061. bineq TagOffset[cfr, t0, 8], EmptyValueTag, .opGetArgumentByValSlow
  1062. loadConstantOrVariablePayload(t1, Int32Tag, t2, .opGetArgumentByValSlow)
  1063. addi 1, t2
  1064. loadi ArgumentCount + PayloadOffset[cfr], t1
  1065. biaeq t2, t1, .opGetArgumentByValSlow
  1066. negi t2
  1067. loadi 4[PC], t3
  1068. loadi ThisArgumentOffset + TagOffset[cfr, t2, 8], t0
  1069. loadi ThisArgumentOffset + PayloadOffset[cfr, t2, 8], t1
  1070. loadi 20[PC], t2
  1071. storei t0, TagOffset[cfr, t3, 8]
  1072. storei t1, PayloadOffset[cfr, t3, 8]
  1073. valueProfile(t0, t1, t2)
  1074. dispatch(6)
  1075. .opGetArgumentByValSlow:
  1076. callSlowPath(_llint_slow_path_get_argument_by_val)
  1077. dispatch(6)
  1078. _llint_op_get_by_pname:
  1079. traceExecution()
  1080. loadi 12[PC], t0
  1081. loadConstantOrVariablePayload(t0, CellTag, t1, .opGetByPnameSlow)
  1082. loadi 16[PC], t0
  1083. bpneq t1, PayloadOffset[cfr, t0, 8], .opGetByPnameSlow
  1084. loadi 8[PC], t0
  1085. loadConstantOrVariablePayload(t0, CellTag, t2, .opGetByPnameSlow)
  1086. loadi 20[PC], t0
  1087. loadi PayloadOffset[cfr, t0, 8], t3
  1088. loadp JSCell::m_structure[t2], t0
  1089. bpneq t0, JSPropertyNameIterator::m_cachedStructure[t3], .opGetByPnameSlow
  1090. loadi 24[PC], t0
  1091. loadi [cfr, t0, 8], t0
  1092. subi 1, t0
  1093. biaeq t0, JSPropertyNameIterator::m_numCacheableSlots[t3], .opGetByPnameSlow
  1094. bilt t0, JSPropertyNameIterator::m_cachedStructureInlineCapacity[t3], .opGetByPnameInlineProperty
  1095. addi firstOutOfLineOffset, t0
  1096. subi JSPropertyNameIterator::m_cachedStructureInlineCapacity[t3], t0
  1097. .opGetByPnameInlineProperty:
  1098. loadPropertyAtVariableOffset(t0, t2, t1, t3)
  1099. loadi 4[PC], t0
  1100. storei t1, TagOffset[cfr, t0, 8]
  1101. storei t3, PayloadOffset[cfr, t0, 8]
  1102. dispatch(7)
  1103. .opGetByPnameSlow:
  1104. callSlowPath(_llint_slow_path_get_by_pname)
  1105. dispatch(7)
  1106. macro contiguousPutByVal(storeCallback)
  1107. biaeq t3, -sizeof IndexingHeader + IndexingHeader::m_publicLength[t0], .outOfBounds
  1108. .storeResult:
  1109. loadi 12[PC], t2
  1110. storeCallback(t2, t1, t0, t3)
  1111. dispatch(5)
  1112. .outOfBounds:
  1113. biaeq t3, -sizeof IndexingHeader + IndexingHeader::m_vectorLength[t0], .opPutByValOutOfBounds
  1114. if VALUE_PROFILER
  1115. loadp 16[PC], t2
  1116. storeb 1, ArrayProfile::m_mayStoreToHole[t2]
  1117. end
  1118. addi 1, t3, t2
  1119. storei t2, -sizeof IndexingHeader + IndexingHeader::m_publicLength[t0]
  1120. jmp .storeResult
  1121. end
  1122. _llint_op_put_by_val:
  1123. traceExecution()
  1124. loadi 4[PC], t0
  1125. loadConstantOrVariablePayload(t0, CellTag, t1, .opPutByValSlow)
  1126. loadp JSCell::m_structure[t1], t2
  1127. loadp 16[PC], t3
  1128. arrayProfile(t2, t3, t0)
  1129. loadi 8[PC], t0
  1130. loadConstantOrVariablePayload(t0, Int32Tag, t3, .opPutByValSlow)
  1131. loadp JSObject::m_butterfly[t1], t0
  1132. andi IndexingShapeMask, t2
  1133. bineq t2, Int32Shape, .opPutByValNotInt32
  1134. contiguousPutByVal(
  1135. macro (operand, scratch, base, index)
  1136. loadConstantOrVariablePayload(operand, Int32Tag, scratch, .opPutByValSlow)
  1137. storei Int32Tag, TagOffset[base, index, 8]
  1138. storei scratch, PayloadOffset[base, index, 8]
  1139. end)
  1140. .opPutByValNotInt32:
  1141. bineq t2, DoubleShape, .opPutByValNotDouble
  1142. contiguousPutByVal(
  1143. macro (operand, scratch, base, index)
  1144. const tag = scratch
  1145. const payload = operand
  1146. loadConstantOrVariable2Reg(operand, tag, payload)
  1147. bineq tag, Int32Tag, .notInt
  1148. ci2d payload, ft0
  1149. jmp .ready
  1150. .notInt:
  1151. fii2d payload, tag, ft0
  1152. bdnequn ft0, ft0, .opPutByValSlow
  1153. .ready:
  1154. stored ft0, [base, index, 8]
  1155. end)
  1156. .opPutByValNotDouble:
  1157. bineq t2, ContiguousShape, .opPutByValNotContiguous
  1158. contiguousPutByVal(
  1159. macro (operand, scratch, base, index)
  1160. const tag = scratch
  1161. const payload = operand
  1162. loadConstantOrVariable2Reg(operand, tag, payload)
  1163. writeBarrier(tag, payload)
  1164. storei tag, TagOffset[base, index, 8]
  1165. storei payload, PayloadOffset[base, index, 8]
  1166. end)
  1167. .opPutByValNotContiguous:
  1168. bineq t2, ArrayStorageShape, .opPutByValSlow
  1169. biaeq t3, -sizeof IndexingHeader + IndexingHeader::m_vectorLength[t0], .opPutByValOutOfBounds
  1170. bieq ArrayStorage::m_vector + TagOffset[t0, t3, 8], EmptyValueTag, .opPutByValArrayStorageEmpty
  1171. .opPutByValArrayStorageStoreResult:
  1172. loadi 12[PC], t2
  1173. loadConstantOrVariable2Reg(t2, t1, t2)
  1174. writeBarrier(t1, t2)
  1175. storei t1, ArrayStorage::m_vector + TagOffset[t0, t3, 8]
  1176. storei t2, ArrayStorage::m_vector + PayloadOffset[t0, t3, 8]
  1177. dispatch(5)
  1178. .opPutByValArrayStorageEmpty:
  1179. if VALUE_PROFILER
  1180. loadp 16[PC], t1
  1181. storeb 1, ArrayProfile::m_mayStoreToHole[t1]
  1182. end
  1183. addi 1, ArrayStorage::m_numValuesInVector[t0]
  1184. bib t3, -sizeof IndexingHeader + IndexingHeader::m_publicLength[t0], .opPutByValArrayStorageStoreResult
  1185. addi 1, t3, t1
  1186. storei t1, -sizeof IndexingHeader + IndexingHeader::m_publicLength[t0]
  1187. jmp .opPutByValArrayStorageStoreResult
  1188. .opPutByValOutOfBounds:
  1189. if VALUE_PROFILER
  1190. loadpFromInstruction(4, t0)
  1191. storeb 1, ArrayProfile::m_outOfBounds[t0]
  1192. end
  1193. .opPutByValSlow:
  1194. callSlowPath(_llint_slow_path_put_by_val)
  1195. dispatch(5)
  1196. _llint_op_jmp:
  1197. traceExecution()
  1198. dispatchBranch(4[PC])
  1199. macro jumpTrueOrFalse(conditionOp, slow)
  1200. loadi 4[PC], t1
  1201. loadConstantOrVariablePayload(t1, BooleanTag, t0, .slow)
  1202. conditionOp(t0, .target)
  1203. dispatch(3)
  1204. .target:
  1205. dispatchBranch(8[PC])
  1206. .slow:
  1207. callSlowPath(slow)
  1208. dispatch(0)
  1209. end
  1210. macro equalNull(cellHandler, immediateHandler)
  1211. loadi 4[PC], t0
  1212. assertNotConstant(t0)
  1213. loadi TagOffset[cfr, t0, 8], t1
  1214. loadi PayloadOffset[cfr, t0, 8], t0
  1215. bineq t1, CellTag, .immediate
  1216. loadp JSCell::m_structure[t0], t2
  1217. cellHandler(t2, Structure::m_typeInfo + TypeInfo::m_flags[t2], .target)
  1218. dispatch(3)
  1219. .target:
  1220. dispatchBranch(8[PC])
  1221. .immediate:
  1222. ori 1, t1
  1223. immediateHandler(t1, .target)
  1224. dispatch(3)
  1225. end
  1226. _llint_op_jeq_null:
  1227. traceExecution()
  1228. equalNull(
  1229. macro (structure, value, target)
  1230. btbz value, MasqueradesAsUndefined, .opJeqNullNotMasqueradesAsUndefined
  1231. loadp CodeBlock[cfr], t0
  1232. loadp CodeBlock::m_globalObject[t0], t0
  1233. bpeq Structure::m_globalObject[structure], t0, target
  1234. .opJeqNullNotMasqueradesAsUndefined:
  1235. end,
  1236. macro (value, target) bieq value, NullTag, target end)
  1237. _llint_op_jneq_null:
  1238. traceExecution()
  1239. equalNull(
  1240. macro (structure, value, target)
  1241. btbz value, MasqueradesAsUndefined, target
  1242. loadp CodeBlock[cfr], t0
  1243. loadp CodeBlock::m_globalObject[t0], t0
  1244. bpneq Structure::m_globalObject[structure], t0, target
  1245. end,
  1246. macro (value, target) bineq value, NullTag, target end)
  1247. _llint_op_jneq_ptr:
  1248. traceExecution()
  1249. loadi 4[PC], t0
  1250. loadi 8[PC], t1
  1251. loadp CodeBlock[cfr], t2
  1252. loadp CodeBlock::m_globalObject[t2], t2
  1253. bineq TagOffset[cfr, t0, 8], CellTag, .opJneqPtrBranch
  1254. loadp JSGlobalObject::m_specialPointers[t2, t1, 4], t1
  1255. bpeq PayloadOffset[cfr, t0, 8], t1, .opJneqPtrFallThrough
  1256. .opJneqPtrBranch:
  1257. dispatchBranch(12[PC])
  1258. .opJneqPtrFallThrough:
  1259. dispatch(4)
  1260. macro compare(integerCompare, doubleCompare, slowPath)
  1261. loadi 4[PC], t2
  1262. loadi 8[PC], t3
  1263. loadConstantOrVariable(t2, t0, t1)
  1264. loadConstantOrVariable2Reg(t3, t2, t3)
  1265. bineq t0, Int32Tag, .op1NotInt
  1266. bineq t2, Int32Tag, .op2NotInt
  1267. integerCompare(t1, t3, .jumpTarget)
  1268. dispatch(4)
  1269. .op1NotInt:
  1270. bia t0, LowestTag, .slow
  1271. bib t2, LowestTag, .op1NotIntOp2Double
  1272. bineq t2, Int32Tag, .slow
  1273. ci2d t3, ft1
  1274. jmp .op1NotIntReady
  1275. .op1NotIntOp2Double:
  1276. fii2d t3, t2, ft1
  1277. .op1NotIntReady:
  1278. fii2d t1, t0, ft0
  1279. doubleCompare(ft0, ft1, .jumpTarget)
  1280. dispatch(4)
  1281. .op2NotInt:
  1282. ci2d t1, ft0
  1283. bia t2, LowestTag, .slow
  1284. fii2d t3, t2, ft1
  1285. doubleCompare(ft0, ft1, .jumpTarget)
  1286. dispatch(4)
  1287. .jumpTarget:
  1288. dispatchBranch(12[PC])
  1289. .slow:
  1290. callSlowPath(slowPath)
  1291. dispatch(0)
  1292. end
  1293. _llint_op_switch_imm:
  1294. traceExecution()
  1295. loadi 12[PC], t2
  1296. loadi 4[PC], t3
  1297. loadConstantOrVariable(t2, t1, t0)
  1298. loadp CodeBlock[cfr], t2
  1299. loadp CodeBlock::m_rareData[t2], t2
  1300. muli sizeof SimpleJumpTable, t3 # FIXME: would be nice to peephole this!
  1301. loadp CodeBlock::RareData::m_immediateSwitchJumpTables + VectorBufferOffset[t2], t2
  1302. addp t3, t2
  1303. bineq t1, Int32Tag, .opSwitchImmNotInt
  1304. subi SimpleJumpTable::min[t2], t0
  1305. biaeq t0, SimpleJumpTable::branchOffsets + VectorSizeOffset[t2], .opSwitchImmFallThrough
  1306. loadp SimpleJumpTable::branchOffsets + VectorBufferOffset[t2], t3
  1307. loadi [t3, t0, 4], t1
  1308. btiz t1, .opSwitchImmFallThrough
  1309. dispatchBranchWithOffset(t1)
  1310. .opSwitchImmNotInt:
  1311. bib t1, LowestTag, .opSwitchImmSlow # Go to slow path if it's a double.
  1312. .opSwitchImmFallThrough:
  1313. dispatchBranch(8[PC])
  1314. .opSwitchImmSlow:
  1315. callSlowPath(_llint_slow_path_switch_imm)
  1316. dispatch(0)
  1317. _llint_op_switch_char:
  1318. traceExecution()
  1319. loadi 12[PC], t2
  1320. loadi 4[PC], t3
  1321. loadConstantOrVariable(t2, t1, t0)
  1322. loadp CodeBlock[cfr], t2
  1323. loadp CodeBlock::m_rareData[t2], t2
  1324. muli sizeof SimpleJumpTable, t3
  1325. loadp CodeBlock::RareData::m_characterSwitchJumpTables + VectorBufferOffset[t2], t2
  1326. addp t3, t2
  1327. bineq t1, CellTag, .opSwitchCharFallThrough
  1328. loadp JSCell::m_structure[t0], t1
  1329. bbneq Structure::m_typeInfo + TypeInfo::m_type[t1], StringType, .opSwitchCharFallThrough
  1330. bineq JSString::m_length[t0], 1, .opSwitchCharFallThrough
  1331. loadp JSString::m_value[t0], t0
  1332. btpz t0, .opSwitchOnRope
  1333. loadp StringImpl::m_data8[t0], t1
  1334. btinz StringImpl::m_hashAndFlags[t0], HashFlags8BitBuffer, .opSwitchChar8Bit
  1335. loadh [t1], t0
  1336. jmp .opSwitchCharReady
  1337. .opSwitchChar8Bit:
  1338. loadb [t1], t0
  1339. .opSwitchCharReady:
  1340. subi SimpleJumpTable::min[t2], t0
  1341. biaeq t0, SimpleJumpTable::branchOffsets + VectorSizeOffset[t2], .opSwitchCharFallThrough
  1342. loadp SimpleJumpTable::branchOffsets + VectorBufferOffset[t2], t2
  1343. loadi [t2, t0, 4], t1
  1344. btiz t1, .opSwitchCharFallThrough
  1345. dispatchBranchWithOffset(t1)
  1346. .opSwitchCharFallThrough:
  1347. dispatchBranch(8[PC])
  1348. .opSwitchOnRope:
  1349. callSlowPath(_llint_slow_path_switch_char)
  1350. dispatch(0)
  1351. _llint_op_new_func:
  1352. traceExecution()
  1353. btiz 12[PC], .opNewFuncUnchecked
  1354. loadi 4[PC], t1
  1355. bineq TagOffset[cfr, t1, 8], EmptyValueTag, .opNewFuncDone
  1356. .opNewFuncUnchecked:
  1357. callSlowPath(_llint_slow_path_new_func)
  1358. .opNewFuncDone:
  1359. dispatch(4)
  1360. macro arrayProfileForCall()
  1361. if VALUE_PROFILER
  1362. loadi 12[PC], t3
  1363. bineq ThisArgumentOffset + TagOffset[cfr, t3, 8], CellTag, .done
  1364. loadi ThisArgumentOffset + PayloadOffset[cfr, t3, 8], t0
  1365. loadp JSCell::m_structure[t0], t0
  1366. loadp 20[PC], t1
  1367. storep t0, ArrayProfile::m_lastSeenStructure[t1]
  1368. .done:
  1369. end
  1370. end
  1371. macro doCall(slowPath)
  1372. loadi 4[PC], t0
  1373. loadi 16[PC], t1
  1374. loadp LLIntCallLinkInfo::callee[t1], t2
  1375. loadConstantOrVariablePayload(t0, CellTag, t3, .opCallSlow)
  1376. bineq t3, t2, .opCallSlow
  1377. loadi 12[PC], t3
  1378. addp 24, PC
  1379. lshifti 3, t3
  1380. addp cfr, t3 # t3 contains the new value of cfr
  1381. loadp JSFunction::m_scope[t2], t0
  1382. storei t2, Callee + PayloadOffset[t3]
  1383. storei t0, ScopeChain + PayloadOffset[t3]
  1384. loadi 8 - 24[PC], t2
  1385. storei PC, ArgumentCount + TagOffset[cfr]
  1386. storep cfr, CallerFrame[t3]
  1387. storei t2, ArgumentCount + PayloadOffset[t3]
  1388. storei CellTag, Callee + TagOffset[t3]
  1389. storei CellTag, ScopeChain + TagOffset[t3]
  1390. move t3, cfr
  1391. callTargetFunction(t1)
  1392. .opCallSlow:
  1393. slowPathForCall(6, slowPath)
  1394. end
  1395. _llint_op_tear_off_activation:
  1396. traceExecution()
  1397. loadi 4[PC], t0
  1398. bieq TagOffset[cfr, t0, 8], EmptyValueTag, .opTearOffActivationNotCreated
  1399. callSlowPath(_llint_slow_path_tear_off_activation)
  1400. .opTearOffActivationNotCreated:
  1401. dispatch(2)
  1402. _llint_op_tear_off_arguments:
  1403. traceExecution()
  1404. loadi 4[PC], t0
  1405. subi 1, t0 # Get the unmodifiedArgumentsRegister
  1406. bieq TagOffset[cfr, t0, 8], EmptyValueTag, .opTearOffArgumentsNotCreated
  1407. callSlowPath(_llint_slow_path_tear_off_arguments)
  1408. .opTearOffArgumentsNotCreated:
  1409. dispatch(3)
  1410. _llint_op_ret:
  1411. traceExecution()
  1412. checkSwitchToJITForEpilogue()
  1413. loadi 4[PC], t2
  1414. loadConstantOrVariable(t2, t1, t0)
  1415. doReturn()
  1416. _llint_op_call_put_result:
  1417. loadi 4[PC], t2
  1418. loadi 8[PC], t3
  1419. storei t1, TagOffset[cfr, t2, 8]
  1420. storei t0, PayloadOffset[cfr, t2, 8]
  1421. valueProfile(t1, t0, t3)
  1422. traceExecution() # Needs to be here because it would clobber t1, t0
  1423. dispatch(3)
  1424. _llint_op_ret_object_or_this:
  1425. traceExecution()
  1426. checkSwitchToJITForEpilogue()
  1427. loadi 4[PC], t2
  1428. loadConstantOrVariable(t2, t1, t0)
  1429. bineq t1, CellTag, .opRetObjectOrThisNotObject
  1430. loadp JSCell::m_structure[t0], t2
  1431. bbb Structure::m_typeInfo + TypeInfo::m_type[t2], ObjectType, .opRetObjectOrThisNotObject
  1432. doReturn()
  1433. .opRetObjectOrThisNotObject:
  1434. loadi 8[PC], t2
  1435. loadConstantOrVariable(t2, t1, t0)
  1436. doReturn()
  1437. _llint_op_to_primitive:
  1438. traceExecution()
  1439. loadi 8[PC], t2
  1440. loadi 4[PC], t3
  1441. loadConstantOrVariable(t2, t1, t0)
  1442. bineq t1, CellTag, .opToPrimitiveIsImm
  1443. loadp JSCell::m_structure[t0], t2
  1444. bbneq Structure::m_typeInfo + TypeInfo::m_type[t2], StringType, .opToPrimitiveSlowCase
  1445. .opToPrimitiveIsImm:
  1446. storei t1, TagOffset[cfr, t3, 8]
  1447. storei t0, PayloadOffset[cfr, t3, 8]
  1448. dispatch(3)
  1449. .opToPrimitiveSlowCase:
  1450. callSlowPath(_llint_slow_path_to_primitive)
  1451. dispatch(3)
  1452. _llint_op_next_pname:
  1453. traceExecution()
  1454. loadi 12[PC], t1
  1455. loadi 16[PC], t2
  1456. loadi PayloadOffset[cfr, t1, 8], t0
  1457. bieq t0, PayloadOffset[cfr, t2, 8], .opNextPnameEnd
  1458. loadi 20[PC], t2
  1459. loadi PayloadOffset[cfr, t2, 8], t2
  1460. loadp JSPropertyNameIterator::m_jsStrings[t2], t3
  1461. loadi [t3, t0, 8], t3
  1462. addi 1, t0
  1463. storei t0, PayloadOffset[cfr, t1, 8]
  1464. loadi 4[PC], t1
  1465. storei CellTag, TagOffset[cfr, t1, 8]
  1466. storei t3, PayloadOffset[cfr, t1, 8]
  1467. loadi 8[PC], t3
  1468. loadi PayloadOffset[cfr, t3, 8], t3
  1469. loadp JSCell::m_structure[t3], t1
  1470. bpneq t1, JSPropertyNameIterator::m_cachedStructure[t2], .opNextPnameSlow
  1471. loadp JSPropertyNameIterator::m_cachedPrototypeChain[t2], t0
  1472. loadp StructureChain::m_vector[t0], t0
  1473. btpz [t0], .opNextPnameTarget
  1474. .opNextPnameCheckPrototypeLoop:
  1475. bieq Structure::m_prototype + TagOffset[t1], NullTag, .opNextPnameSlow
  1476. loadp Structure::m_prototype + PayloadOffset[t1], t2
  1477. loadp JSCell::m_structure[t2], t1
  1478. bpneq t1, [t0], .opNextPnameSlow
  1479. addp 4, t0
  1480. btpnz [t0], .opNextPnameCheckPrototypeLoop
  1481. .opNextPnameTarget:
  1482. dispatchBranch(24[PC])
  1483. .opNextPnameEnd:
  1484. dispatch(7)
  1485. .opNextPnameSlow:
  1486. callSlowPath(_llint_slow_path_next_pname) # This either keeps the PC where it was (causing us to loop) or sets it to target.
  1487. dispatch(0)
  1488. _llint_op_catch:
  1489. # This is where we end up from the JIT's throw trampoline (because the
  1490. # machine code return address will be set to _llint_op_catch), and from
  1491. # the interpreter's throw trampoline (see _llint_throw_trampoline).
  1492. # The JIT throwing protocol calls for the cfr to be in t0. The throwing
  1493. # code must have known that we were throwing to the interpreter, and have
  1494. # set VM::targetInterpreterPCForThrow.
  1495. move t0, cfr
  1496. loadp JITStackFrame::vm[sp], t3
  1497. loadi VM::targetInterpreterPCForThrow[t3], PC
  1498. loadi VM::exception + PayloadOffset[t3], t0
  1499. loadi VM::exception + TagOffset[t3], t1
  1500. storei 0, VM::exception + PayloadOffset[t3]
  1501. storei EmptyValueTag, VM::exception + TagOffset[t3]
  1502. loadi 4[PC], t2
  1503. storei t0, PayloadOffset[cfr, t2, 8]
  1504. storei t1, TagOffset[cfr, t2, 8]
  1505. traceExecution() # This needs to be here because we don't want to clobber t0, t1, t2, t3 above.
  1506. dispatch(2)
  1507. # Gives you the scope in t0, while allowing you to optionally perform additional checks on the
  1508. # scopes as they are traversed. scopeCheck() is called with two arguments: the register
  1509. # holding the scope, and a register that can be used for scratch. Note that this does not
  1510. # use t3, so you can hold stuff in t3 if need be.
  1511. macro getDeBruijnScope(deBruijinIndexOperand, scopeCheck)
  1512. loadp ScopeChain + PayloadOffset[cfr], t0
  1513. loadi deBruijinIndexOperand, t2
  1514. btiz t2, .done
  1515. loadp CodeBlock[cfr], t1
  1516. bineq CodeBlock::m_codeType[t1], FunctionCode, .loop
  1517. btbz CodeBlock::m_needsActivation[t1], .loop
  1518. loadi CodeBlock::m_activationRegister[t1], t1
  1519. # Need to conditionally skip over one scope.
  1520. bieq TagOffset[cfr, t1, 8], EmptyValueTag, .noActivation
  1521. scopeCheck(t0, t1)
  1522. loadp JSScope::m_next[t0], t0
  1523. .noActivation:
  1524. subi 1, t2
  1525. btiz t2, .done
  1526. .loop:
  1527. scopeCheck(t0, t1)
  1528. loadp JSScope::m_next[t0], t0
  1529. subi 1, t2
  1530. btinz t2, .loop
  1531. .done:
  1532. end
  1533. _llint_op_get_scoped_var:
  1534. traceExecution()
  1535. # Operands are as follows:
  1536. # 4[PC] Destination for the load.
  1537. # 8[PC] Index of register in the scope.
  1538. # 12[PC] De Bruijin index.
  1539. getDeBruijnScope(12[PC], macro (scope, scratch) end)
  1540. loadi 4[PC], t1
  1541. loadi 8[PC], t2
  1542. loadp JSVariableObject::m_registers[t0], t0
  1543. loadi TagOffset[t0, t2, 8], t3
  1544. loadi PayloadOffset[t0, t2, 8], t0
  1545. storei t3, TagOffset[cfr, t1, 8]
  1546. storei t0, PayloadOffset[cfr, t1, 8]
  1547. loadi 16[PC], t1
  1548. valueProfile(t3, t0, t1)
  1549. dispatch(5)
  1550. _llint_op_put_scoped_var:
  1551. traceExecution()
  1552. getDeBruijnScope(8[PC], macro (scope, scratch) end)
  1553. loadi 12[PC], t1
  1554. loadConstantOrVariable(t1, t3, t2)
  1555. loadi 4[PC], t1
  1556. writeBarrier(t3, t2)
  1557. loadp JSVariableObject::m_registers[t0], t0
  1558. storei t3, TagOffset[t0, t1, 8]
  1559. storei t2, PayloadOffset[t0, t1, 8]
  1560. dispatch(4)
  1561. _llint_op_end:
  1562. traceExecution()
  1563. checkSwitchToJITForEpilogue()
  1564. loadi 4[PC], t0
  1565. assertNotConstant(t0)
  1566. loadi TagOffset[cfr, t0, 8], t1
  1567. loadi PayloadOffset[cfr, t0, 8], t0
  1568. doReturn()
  1569. _llint_throw_from_slow_path_trampoline:
  1570. # When throwing from the interpreter (i.e. throwing from LLIntSlowPaths), so
  1571. # the throw target is not necessarily interpreted code, we come to here.
  1572. # This essentially emulates the JIT's throwing protocol.
  1573. loadp JITStackFrame::vm[sp], t1
  1574. loadp VM::callFrameForThrow[t1], t0
  1575. jmp VM::targetMachinePCForThrow[t1]
  1576. _llint_throw_during_call_trampoline:
  1577. preserveReturnAddressAfterCall(t2)
  1578. loadp JITStackFrame::vm[sp], t1
  1579. loadp VM::callFrameForThrow[t1], t0
  1580. jmp VM::targetMachinePCForThrow[t1]
  1581. macro nativeCallTrampoline(executableOffsetToFunction)
  1582. storep 0, CodeBlock[cfr]
  1583. loadp CallerFrame[cfr], t0
  1584. loadi ScopeChain + PayloadOffset[t0], t1
  1585. storei CellTag, ScopeChain + TagOffset[cfr]
  1586. storei t1, ScopeChain + PayloadOffset[cfr]
  1587. if X86
  1588. loadp JITStackFrame::vm + 4[sp], t3 # Additional offset for return address
  1589. storep cfr, VM::topCallFrame[t3]
  1590. peek 0, t1
  1591. storep t1, ReturnPC[cfr]
  1592. move cfr, t2 # t2 = ecx
  1593. subp 16 - 4, sp
  1594. loadi Callee + PayloadOffset[cfr], t1
  1595. loadp JSFunction::m_executable[t1], t1
  1596. move t0, cfr
  1597. call executableOffsetToFunction[t1]
  1598. addp 16 - 4, sp
  1599. loadp JITStackFrame::vm + 4[sp], t3
  1600. elsif ARM or ARMv7 or ARMv7_TRADITIONAL
  1601. loadp JITStackFrame::vm[sp], t3
  1602. storep cfr, VM::topCallFrame[t3]
  1603. move t0, t2
  1604. preserveReturnAddressAfterCall(t3)
  1605. storep t3, ReturnPC[cfr]
  1606. move cfr, t0
  1607. loadi Callee + PayloadOffset[cfr], t1
  1608. loadp JSFunction::m_executable[t1], t1
  1609. move t2, cfr
  1610. call executableOffsetToFunction[t1]
  1611. restoreReturnAddressBeforeReturn(t3)
  1612. loadp JITStackFrame::vm[sp], t3
  1613. elsif MIPS
  1614. loadp JITStackFrame::vm[sp], t3
  1615. storep cfr, VM::topCallFrame[t3]
  1616. move t0, t2
  1617. preserveReturnAddressAfterCall(t3)
  1618. storep t3, ReturnPC[cfr]
  1619. move cfr, t0
  1620. loadi Callee + PayloadOffset[cfr], t1
  1621. loadp JSFunction::m_executable[t1], t1
  1622. move t2, cfr
  1623. move t0, a0
  1624. call executableOffsetToFunction[t1]
  1625. restoreReturnAddressBeforeReturn(t3)
  1626. loadp JITStackFrame::vm[sp], t3
  1627. elsif SH4
  1628. loadp JITStackFrame::vm[sp], t3
  1629. storep cfr, VM::topCallFrame[t3]
  1630. move t0, t2
  1631. preserveReturnAddressAfterCall(t3)
  1632. storep t3, ReturnPC[cfr]
  1633. move cfr, t0
  1634. loadi Callee + PayloadOffset[cfr], t1
  1635. loadp JSFunction::m_executable[t1], t1
  1636. move t2, cfr
  1637. call executableOffsetToFunction[t1]
  1638. restoreReturnAddressBeforeReturn(t3)
  1639. loadp JITStackFrame::vm[sp], t3
  1640. elsif C_LOOP
  1641. loadp JITStackFrame::vm[sp], t3
  1642. storep cfr, VM::topCallFrame[t3]
  1643. move t0, t2
  1644. preserveReturnAddressAfterCall(t3)
  1645. storep t3, ReturnPC[cfr]
  1646. move cfr, t0
  1647. loadi Callee + PayloadOffset[cfr], t1
  1648. loadp JSFunction::m_executable[t1], t1
  1649. move t2, cfr
  1650. cloopCallNative executableOffsetToFunction[t1]
  1651. restoreReturnAddressBeforeReturn(t3)
  1652. loadp JITStackFrame::vm[sp], t3
  1653. else
  1654. error
  1655. end
  1656. bineq VM::exception + TagOffset[t3], EmptyValueTag, .exception
  1657. ret
  1658. .exception:
  1659. preserveReturnAddressAfterCall(t1) # This is really only needed on X86
  1660. loadi ArgumentCount + TagOffset[cfr], PC
  1661. callSlowPath(_llint_throw_from_native_call)
  1662. jmp _llint_throw_from_slow_path_trampoline
  1663. end