break.S 20 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793
  1. /* break.S: Break interrupt handling (kept separate from entry.S)
  2. *
  3. * Copyright (C) 2003 Red Hat, Inc. All Rights Reserved.
  4. * Written by David Howells (dhowells@redhat.com)
  5. *
  6. * This program is free software; you can redistribute it and/or
  7. * modify it under the terms of the GNU General Public License
  8. * as published by the Free Software Foundation; either version
  9. * 2 of the License, or (at your option) any later version.
  10. */
  11. #include <linux/linkage.h>
  12. #include <asm/setup.h>
  13. #include <asm/segment.h>
  14. #include <asm/ptrace.h>
  15. #include <asm/thread_info.h>
  16. #include <asm/spr-regs.h>
  17. #include <asm/errno.h>
  18. #
  19. # the break handler has its own stack
  20. #
  21. .section .bss..stack
  22. .globl __break_user_context
  23. .balign THREAD_SIZE
  24. __break_stack:
  25. .space THREAD_SIZE - FRV_FRAME0_SIZE
  26. __break_frame_0:
  27. .space FRV_FRAME0_SIZE
  28. #
  29. # miscellaneous variables
  30. #
  31. .section .bss
  32. #ifdef CONFIG_MMU
  33. .globl __break_tlb_miss_real_return_info
  34. __break_tlb_miss_real_return_info:
  35. .balign 8
  36. .space 2*4 /* saved PCSR, PSR for TLB-miss handler fixup */
  37. #endif
  38. __break_trace_through_exceptions:
  39. .space 4
  40. #define CS2_ECS1 0xe1200000
  41. #define CS2_USERLED 0x4
  42. .macro LEDS val,reg
  43. # sethi.p %hi(CS2_ECS1+CS2_USERLED),gr30
  44. # setlo %lo(CS2_ECS1+CS2_USERLED),gr30
  45. # setlos #~\val,\reg
  46. # st \reg,@(gr30,gr0)
  47. # setlos #0x5555,\reg
  48. # sethi.p %hi(0xffc00100),gr30
  49. # setlo %lo(0xffc00100),gr30
  50. # sth \reg,@(gr30,gr0)
  51. # membar
  52. .endm
  53. ###############################################################################
  54. #
  55. # entry point for Break Exceptions/Interrupts
  56. #
  57. ###############################################################################
  58. .section .text..break
  59. .balign 4
  60. .globl __entry_break
  61. __entry_break:
  62. #ifdef CONFIG_MMU
  63. movgs gr31,scr3
  64. #endif
  65. LEDS 0x1001,gr31
  66. sethi.p %hi(__break_frame_0),gr31
  67. setlo %lo(__break_frame_0),gr31
  68. stdi gr2,@(gr31,#REG_GR(2))
  69. movsg ccr,gr3
  70. sti gr3,@(gr31,#REG_CCR)
  71. # catch the return from a TLB-miss handler that had single-step disabled
  72. # traps will be enabled, so we have to do this now
  73. #ifdef CONFIG_MMU
  74. movsg bpcsr,gr3
  75. sethi.p %hi(__break_tlb_miss_return_breaks_here),gr2
  76. setlo %lo(__break_tlb_miss_return_breaks_here),gr2
  77. subcc gr2,gr3,gr0,icc0
  78. beq icc0,#2,__break_return_singlestep_tlbmiss
  79. #endif
  80. # determine whether we have stepped through into an exception
  81. # - we need to take special action to suspend h/w single stepping if we've done
  82. # that, so that the gdbstub doesn't get bogged down endlessly stepping through
  83. # external interrupt handling
  84. movsg bpsr,gr3
  85. andicc gr3,#BPSR_BET,gr0,icc0
  86. bne icc0,#2,__break_maybe_userspace /* jump if PSR.ET was 1 */
  87. LEDS 0x1003,gr2
  88. movsg brr,gr3
  89. andicc gr3,#BRR_ST,gr0,icc0
  90. andicc.p gr3,#BRR_SB,gr0,icc1
  91. bne icc0,#2,__break_step /* jump if single-step caused break */
  92. beq icc1,#2,__break_continue /* jump if BREAK didn't cause break */
  93. LEDS 0x1007,gr2
  94. # handle special breaks
  95. movsg bpcsr,gr3
  96. sethi.p %hi(__entry_return_singlestep_breaks_here),gr2
  97. setlo %lo(__entry_return_singlestep_breaks_here),gr2
  98. subcc gr2,gr3,gr0,icc0
  99. beq icc0,#2,__break_return_singlestep
  100. bra __break_continue
  101. ###############################################################################
  102. #
  103. # handle BREAK instruction in kernel-mode exception epilogue
  104. #
  105. ###############################################################################
  106. __break_return_singlestep:
  107. LEDS 0x100f,gr2
  108. # special break insn requests single-stepping to be turned back on
  109. # HERE RETT
  110. # PSR.ET 0 0
  111. # PSR.PS old PSR.S ?
  112. # PSR.S 1 1
  113. # BPSR.ET 0 1 (can't have caused orig excep otherwise)
  114. # BPSR.BS 1 old PSR.S
  115. movsg dcr,gr2
  116. sethi.p %hi(DCR_SE),gr3
  117. setlo %lo(DCR_SE),gr3
  118. or gr2,gr3,gr2
  119. movgs gr2,dcr
  120. movsg psr,gr2
  121. andi gr2,#PSR_PS,gr2
  122. slli gr2,#11,gr2 /* PSR.PS -> BPSR.BS */
  123. ori gr2,#BPSR_BET,gr2 /* 1 -> BPSR.BET */
  124. movgs gr2,bpsr
  125. # return to the invoker of the original kernel exception
  126. movsg pcsr,gr2
  127. movgs gr2,bpcsr
  128. LEDS 0x101f,gr2
  129. ldi @(gr31,#REG_CCR),gr3
  130. movgs gr3,ccr
  131. lddi.p @(gr31,#REG_GR(2)),gr2
  132. xor gr31,gr31,gr31
  133. movgs gr0,brr
  134. #ifdef CONFIG_MMU
  135. movsg scr3,gr31
  136. #endif
  137. rett #1
  138. ###############################################################################
  139. #
  140. # handle BREAK instruction in TLB-miss handler return path
  141. #
  142. ###############################################################################
  143. #ifdef CONFIG_MMU
  144. __break_return_singlestep_tlbmiss:
  145. LEDS 0x1100,gr2
  146. sethi.p %hi(__break_tlb_miss_real_return_info),gr3
  147. setlo %lo(__break_tlb_miss_real_return_info),gr3
  148. lddi @(gr3,#0),gr2
  149. movgs gr2,pcsr
  150. movgs gr3,psr
  151. bra __break_return_singlestep
  152. #endif
  153. ###############################################################################
  154. #
  155. # handle single stepping into an exception prologue from kernel mode
  156. # - we try and catch it whilst it is still in the main vector table
  157. # - if we catch it there, we have to jump to the fixup handler
  158. # - there is a fixup table that has a pointer for every 16b slot in the trap
  159. # table
  160. #
  161. ###############################################################################
  162. __break_step:
  163. LEDS 0x2003,gr2
  164. # external interrupts seem to escape from the trap table before single
  165. # step catches up with them
  166. movsg bpcsr,gr2
  167. sethi.p %hi(__entry_kernel_external_interrupt),gr3
  168. setlo %lo(__entry_kernel_external_interrupt),gr3
  169. subcc.p gr2,gr3,gr0,icc0
  170. sethi %hi(__entry_uspace_external_interrupt),gr3
  171. setlo.p %lo(__entry_uspace_external_interrupt),gr3
  172. beq icc0,#2,__break_step_kernel_external_interrupt
  173. subcc.p gr2,gr3,gr0,icc0
  174. sethi %hi(__entry_kernel_external_interrupt_virtually_disabled),gr3
  175. setlo.p %lo(__entry_kernel_external_interrupt_virtually_disabled),gr3
  176. beq icc0,#2,__break_step_uspace_external_interrupt
  177. subcc.p gr2,gr3,gr0,icc0
  178. sethi %hi(__entry_kernel_external_interrupt_virtual_reenable),gr3
  179. setlo.p %lo(__entry_kernel_external_interrupt_virtual_reenable),gr3
  180. beq icc0,#2,__break_step_kernel_external_interrupt_virtually_disabled
  181. subcc gr2,gr3,gr0,icc0
  182. beq icc0,#2,__break_step_kernel_external_interrupt_virtual_reenable
  183. LEDS 0x2007,gr2
  184. # the two main vector tables are adjacent on one 8Kb slab
  185. movsg bpcsr,gr2
  186. setlos #0xffffe000,gr3
  187. and gr2,gr3,gr2
  188. sethi.p %hi(__trap_tables),gr3
  189. setlo %lo(__trap_tables),gr3
  190. subcc gr2,gr3,gr0,icc0
  191. bne icc0,#2,__break_continue
  192. LEDS 0x200f,gr2
  193. # skip workaround if so requested by GDB
  194. sethi.p %hi(__break_trace_through_exceptions),gr3
  195. setlo %lo(__break_trace_through_exceptions),gr3
  196. ld @(gr3,gr0),gr3
  197. subcc gr3,gr0,gr0,icc0
  198. bne icc0,#0,__break_continue
  199. LEDS 0x201f,gr2
  200. # access the fixup table - there's a 1:1 mapping between the slots in the trap tables and
  201. # the slots in the trap fixup tables allowing us to simply divide the offset into the
  202. # former by 4 to access the latter
  203. sethi.p %hi(__trap_tables),gr3
  204. setlo %lo(__trap_tables),gr3
  205. movsg bpcsr,gr2
  206. sub gr2,gr3,gr2
  207. srli.p gr2,#2,gr2
  208. sethi %hi(__trap_fixup_tables),gr3
  209. setlo.p %lo(__trap_fixup_tables),gr3
  210. andi gr2,#~3,gr2
  211. ld @(gr2,gr3),gr2
  212. jmpil @(gr2,#0)
  213. # step through an internal exception from kernel mode
  214. .globl __break_step_kernel_softprog_interrupt
  215. __break_step_kernel_softprog_interrupt:
  216. sethi.p %hi(__entry_kernel_softprog_interrupt_reentry),gr3
  217. setlo %lo(__entry_kernel_softprog_interrupt_reentry),gr3
  218. bra __break_return_as_kernel_prologue
  219. # step through an external interrupt from kernel mode
  220. .globl __break_step_kernel_external_interrupt
  221. __break_step_kernel_external_interrupt:
  222. # deal with virtual interrupt disablement
  223. beq icc2,#0,__break_step_kernel_external_interrupt_virtually_disabled
  224. sethi.p %hi(__entry_kernel_external_interrupt_reentry),gr3
  225. setlo %lo(__entry_kernel_external_interrupt_reentry),gr3
  226. __break_return_as_kernel_prologue:
  227. LEDS 0x203f,gr2
  228. movgs gr3,bpcsr
  229. # do the bit we had to skip
  230. #ifdef CONFIG_MMU
  231. movsg ear0,gr2 /* EAR0 can get clobbered by gdb-stub (ICI/ICEI) */
  232. movgs gr2,scr2
  233. #endif
  234. or.p sp,gr0,gr2 /* set up the stack pointer */
  235. subi sp,#REG__END,sp
  236. sti.p gr2,@(sp,#REG_SP)
  237. setlos #REG__STATUS_STEP,gr2
  238. sti gr2,@(sp,#REG__STATUS) /* record single step status */
  239. # cancel single-stepping mode
  240. movsg dcr,gr2
  241. sethi.p %hi(~DCR_SE),gr3
  242. setlo %lo(~DCR_SE),gr3
  243. and gr2,gr3,gr2
  244. movgs gr2,dcr
  245. LEDS 0x207f,gr2
  246. ldi @(gr31,#REG_CCR),gr3
  247. movgs gr3,ccr
  248. lddi.p @(gr31,#REG_GR(2)),gr2
  249. xor gr31,gr31,gr31
  250. movgs gr0,brr
  251. #ifdef CONFIG_MMU
  252. movsg scr3,gr31
  253. #endif
  254. rett #1
  255. # we single-stepped into an interrupt handler whilst interrupts were merely virtually disabled
  256. # need to really disable interrupts, set flag, fix up and return
  257. __break_step_kernel_external_interrupt_virtually_disabled:
  258. movsg psr,gr2
  259. andi gr2,#~PSR_PIL,gr2
  260. ori gr2,#PSR_PIL_14,gr2 /* debugging interrupts only */
  261. movgs gr2,psr
  262. ldi @(gr31,#REG_CCR),gr3
  263. movgs gr3,ccr
  264. subcc.p gr0,gr0,gr0,icc2 /* leave Z set, clear C */
  265. # exceptions must've been enabled and we must've been in supervisor mode
  266. setlos BPSR_BET|BPSR_BS,gr3
  267. movgs gr3,bpsr
  268. # return to where the interrupt happened
  269. movsg pcsr,gr2
  270. movgs gr2,bpcsr
  271. lddi.p @(gr31,#REG_GR(2)),gr2
  272. xor gr31,gr31,gr31
  273. movgs gr0,brr
  274. #ifdef CONFIG_MMU
  275. movsg scr3,gr31
  276. #endif
  277. rett #1
  278. # we stepped through into the virtual interrupt reenablement trap
  279. #
  280. # we also want to single step anyway, but after fixing up so that we get an event on the
  281. # instruction after the broken-into exception returns
  282. .globl __break_step_kernel_external_interrupt_virtual_reenable
  283. __break_step_kernel_external_interrupt_virtual_reenable:
  284. movsg psr,gr2
  285. andi gr2,#~PSR_PIL,gr2
  286. movgs gr2,psr
  287. ldi @(gr31,#REG_CCR),gr3
  288. movgs gr3,ccr
  289. subicc gr0,#1,gr0,icc2 /* clear Z, set C */
  290. # save the adjusted ICC2
  291. movsg ccr,gr3
  292. sti gr3,@(gr31,#REG_CCR)
  293. # exceptions must've been enabled and we must've been in supervisor mode
  294. setlos BPSR_BET|BPSR_BS,gr3
  295. movgs gr3,bpsr
  296. # return to where the trap happened
  297. movsg pcsr,gr2
  298. movgs gr2,bpcsr
  299. # and then process the single step
  300. bra __break_continue
  301. # step through an internal exception from uspace mode
  302. .globl __break_step_uspace_softprog_interrupt
  303. __break_step_uspace_softprog_interrupt:
  304. sethi.p %hi(__entry_uspace_softprog_interrupt_reentry),gr3
  305. setlo %lo(__entry_uspace_softprog_interrupt_reentry),gr3
  306. bra __break_return_as_uspace_prologue
  307. # step through an external interrupt from kernel mode
  308. .globl __break_step_uspace_external_interrupt
  309. __break_step_uspace_external_interrupt:
  310. sethi.p %hi(__entry_uspace_external_interrupt_reentry),gr3
  311. setlo %lo(__entry_uspace_external_interrupt_reentry),gr3
  312. __break_return_as_uspace_prologue:
  313. LEDS 0x20ff,gr2
  314. movgs gr3,bpcsr
  315. # do the bit we had to skip
  316. sethi.p %hi(__kernel_frame0_ptr),gr28
  317. setlo %lo(__kernel_frame0_ptr),gr28
  318. ldi.p @(gr28,#0),gr28
  319. setlos #REG__STATUS_STEP,gr2
  320. sti gr2,@(gr28,#REG__STATUS) /* record single step status */
  321. # cancel single-stepping mode
  322. movsg dcr,gr2
  323. sethi.p %hi(~DCR_SE),gr3
  324. setlo %lo(~DCR_SE),gr3
  325. and gr2,gr3,gr2
  326. movgs gr2,dcr
  327. LEDS 0x20fe,gr2
  328. ldi @(gr31,#REG_CCR),gr3
  329. movgs gr3,ccr
  330. lddi.p @(gr31,#REG_GR(2)),gr2
  331. xor gr31,gr31,gr31
  332. movgs gr0,brr
  333. #ifdef CONFIG_MMU
  334. movsg scr3,gr31
  335. #endif
  336. rett #1
  337. #ifdef CONFIG_MMU
  338. # step through an ITLB-miss handler from user mode
  339. .globl __break_user_insn_tlb_miss
  340. __break_user_insn_tlb_miss:
  341. # we'll want to try the trap stub again
  342. sethi.p %hi(__trap_user_insn_tlb_miss),gr2
  343. setlo %lo(__trap_user_insn_tlb_miss),gr2
  344. movgs gr2,bpcsr
  345. __break_tlb_miss_common:
  346. LEDS 0x2101,gr2
  347. # cancel single-stepping mode
  348. movsg dcr,gr2
  349. sethi.p %hi(~DCR_SE),gr3
  350. setlo %lo(~DCR_SE),gr3
  351. and gr2,gr3,gr2
  352. movgs gr2,dcr
  353. # we'll swap the real return address for one with a BREAK insn so that we can re-enable
  354. # single stepping on return
  355. movsg pcsr,gr2
  356. sethi.p %hi(__break_tlb_miss_real_return_info),gr3
  357. setlo %lo(__break_tlb_miss_real_return_info),gr3
  358. sti gr2,@(gr3,#0)
  359. sethi.p %hi(__break_tlb_miss_return_break),gr2
  360. setlo %lo(__break_tlb_miss_return_break),gr2
  361. movgs gr2,pcsr
  362. # we also have to fudge PSR because the return BREAK is in kernel space and we want
  363. # to get a BREAK fault not an access violation should the return be to userspace
  364. movsg psr,gr2
  365. sti.p gr2,@(gr3,#4)
  366. ori gr2,#PSR_PS,gr2
  367. movgs gr2,psr
  368. LEDS 0x2102,gr2
  369. ldi @(gr31,#REG_CCR),gr3
  370. movgs gr3,ccr
  371. lddi @(gr31,#REG_GR(2)),gr2
  372. movsg scr3,gr31
  373. movgs gr0,brr
  374. rett #1
  375. # step through a DTLB-miss handler from user mode
  376. .globl __break_user_data_tlb_miss
  377. __break_user_data_tlb_miss:
  378. # we'll want to try the trap stub again
  379. sethi.p %hi(__trap_user_data_tlb_miss),gr2
  380. setlo %lo(__trap_user_data_tlb_miss),gr2
  381. movgs gr2,bpcsr
  382. bra __break_tlb_miss_common
  383. # step through an ITLB-miss handler from kernel mode
  384. .globl __break_kernel_insn_tlb_miss
  385. __break_kernel_insn_tlb_miss:
  386. # we'll want to try the trap stub again
  387. sethi.p %hi(__trap_kernel_insn_tlb_miss),gr2
  388. setlo %lo(__trap_kernel_insn_tlb_miss),gr2
  389. movgs gr2,bpcsr
  390. bra __break_tlb_miss_common
  391. # step through a DTLB-miss handler from kernel mode
  392. .globl __break_kernel_data_tlb_miss
  393. __break_kernel_data_tlb_miss:
  394. # we'll want to try the trap stub again
  395. sethi.p %hi(__trap_kernel_data_tlb_miss),gr2
  396. setlo %lo(__trap_kernel_data_tlb_miss),gr2
  397. movgs gr2,bpcsr
  398. bra __break_tlb_miss_common
  399. #endif
  400. ###############################################################################
  401. #
  402. # handle debug events originating with userspace
  403. #
  404. ###############################################################################
  405. __break_maybe_userspace:
  406. LEDS 0x3003,gr2
  407. setlos #BPSR_BS,gr2
  408. andcc gr3,gr2,gr0,icc0
  409. bne icc0,#0,__break_continue /* skip if PSR.S was 1 */
  410. movsg brr,gr2
  411. andicc gr2,#BRR_ST|BRR_SB,gr0,icc0
  412. beq icc0,#0,__break_continue /* jump if not BREAK or single-step */
  413. LEDS 0x3007,gr2
  414. # do the first part of the exception prologue here
  415. sethi.p %hi(__kernel_frame0_ptr),gr28
  416. setlo %lo(__kernel_frame0_ptr),gr28
  417. ldi @(gr28,#0),gr28
  418. andi gr28,#~7,gr28
  419. # set up the kernel stack pointer
  420. sti sp ,@(gr28,#REG_SP)
  421. ori gr28,0,sp
  422. sti gr0 ,@(gr28,#REG_GR(28))
  423. stdi gr20,@(gr28,#REG_GR(20))
  424. stdi gr22,@(gr28,#REG_GR(22))
  425. movsg tbr,gr20
  426. movsg bpcsr,gr21
  427. movsg psr,gr22
  428. # determine the exception type and cancel single-stepping mode
  429. or gr0,gr0,gr23
  430. movsg dcr,gr2
  431. sethi.p %hi(DCR_SE),gr3
  432. setlo %lo(DCR_SE),gr3
  433. andcc gr2,gr3,gr0,icc0
  434. beq icc0,#0,__break_no_user_sstep /* must have been a BREAK insn */
  435. not gr3,gr3
  436. and gr2,gr3,gr2
  437. movgs gr2,dcr
  438. ori gr23,#REG__STATUS_STEP,gr23
  439. __break_no_user_sstep:
  440. LEDS 0x300f,gr2
  441. movsg brr,gr2
  442. andi gr2,#BRR_ST|BRR_SB,gr2
  443. slli gr2,#1,gr2
  444. or gr23,gr2,gr23
  445. sti.p gr23,@(gr28,#REG__STATUS) /* record single step status */
  446. # adjust the value acquired from TBR - this indicates the exception
  447. setlos #~TBR_TT,gr2
  448. and.p gr20,gr2,gr20
  449. setlos #TBR_TT_BREAK,gr2
  450. or.p gr20,gr2,gr20
  451. # fudge PSR.PS and BPSR.BS to return to kernel mode through the trap
  452. # table as trap 126
  453. andi gr22,#~PSR_PS,gr22 /* PSR.PS should be 0 */
  454. movgs gr22,psr
  455. setlos #BPSR_BS,gr2 /* BPSR.BS should be 1 and BPSR.BET 0 */
  456. movgs gr2,bpsr
  457. # return through remainder of the exception prologue
  458. # - need to load gr23 with return handler address
  459. sethi.p %hi(__entry_return_from_user_exception),gr23
  460. setlo %lo(__entry_return_from_user_exception),gr23
  461. sethi.p %hi(__entry_common),gr3
  462. setlo %lo(__entry_common),gr3
  463. movgs gr3,bpcsr
  464. LEDS 0x301f,gr2
  465. ldi @(gr31,#REG_CCR),gr3
  466. movgs gr3,ccr
  467. lddi.p @(gr31,#REG_GR(2)),gr2
  468. xor gr31,gr31,gr31
  469. movgs gr0,brr
  470. #ifdef CONFIG_MMU
  471. movsg scr3,gr31
  472. #endif
  473. rett #1
  474. ###############################################################################
  475. #
  476. # resume normal debug-mode entry
  477. #
  478. ###############################################################################
  479. __break_continue:
  480. LEDS 0x4003,gr2
  481. # set up the kernel stack pointer
  482. sti sp,@(gr31,#REG_SP)
  483. sethi.p %hi(__break_frame_0),sp
  484. setlo %lo(__break_frame_0),sp
  485. # finish building the exception frame
  486. stdi gr4 ,@(gr31,#REG_GR(4))
  487. stdi gr6 ,@(gr31,#REG_GR(6))
  488. stdi gr8 ,@(gr31,#REG_GR(8))
  489. stdi gr10,@(gr31,#REG_GR(10))
  490. stdi gr12,@(gr31,#REG_GR(12))
  491. stdi gr14,@(gr31,#REG_GR(14))
  492. stdi gr16,@(gr31,#REG_GR(16))
  493. stdi gr18,@(gr31,#REG_GR(18))
  494. stdi gr20,@(gr31,#REG_GR(20))
  495. stdi gr22,@(gr31,#REG_GR(22))
  496. stdi gr24,@(gr31,#REG_GR(24))
  497. stdi gr26,@(gr31,#REG_GR(26))
  498. sti gr0 ,@(gr31,#REG_GR(28)) /* NULL frame pointer */
  499. sti gr29,@(gr31,#REG_GR(29))
  500. sti gr30,@(gr31,#REG_GR(30))
  501. sti gr8 ,@(gr31,#REG_ORIG_GR8)
  502. #ifdef CONFIG_MMU
  503. movsg scr3,gr19
  504. sti gr19,@(gr31,#REG_GR(31))
  505. #endif
  506. movsg bpsr ,gr19
  507. movsg tbr ,gr20
  508. movsg bpcsr,gr21
  509. movsg psr ,gr22
  510. movsg isr ,gr23
  511. movsg cccr ,gr25
  512. movsg lr ,gr26
  513. movsg lcr ,gr27
  514. andi.p gr22,#~(PSR_S|PSR_ET),gr5 /* rebuild PSR */
  515. andi gr19,#PSR_ET,gr4
  516. or.p gr4,gr5,gr5
  517. srli gr19,#10,gr4
  518. andi gr4,#PSR_S,gr4
  519. or.p gr4,gr5,gr5
  520. setlos #-1,gr6
  521. sti gr20,@(gr31,#REG_TBR)
  522. sti gr21,@(gr31,#REG_PC)
  523. sti gr5 ,@(gr31,#REG_PSR)
  524. sti gr23,@(gr31,#REG_ISR)
  525. sti gr25,@(gr31,#REG_CCCR)
  526. stdi gr26,@(gr31,#REG_LR)
  527. sti gr6 ,@(gr31,#REG_SYSCALLNO)
  528. # store CPU-specific regs
  529. movsg iacc0h,gr4
  530. movsg iacc0l,gr5
  531. stdi gr4,@(gr31,#REG_IACC0)
  532. movsg gner0,gr4
  533. movsg gner1,gr5
  534. stdi gr4,@(gr31,#REG_GNER0)
  535. # build the debug register frame
  536. movsg brr,gr4
  537. movgs gr0,brr
  538. movsg nmar,gr5
  539. movsg dcr,gr6
  540. sethi.p %hi(__debug_status),gr7
  541. setlo %lo(__debug_status),gr7
  542. stdi gr4 ,@(gr7,#DEBUG_BRR)
  543. sti gr19,@(gr7,#DEBUG_BPSR)
  544. sti.p gr6 ,@(gr7,#DEBUG_DCR)
  545. # trap exceptions during break handling and disable h/w breakpoints/watchpoints
  546. sethi %hi(DCR_EBE),gr5
  547. setlo.p %lo(DCR_EBE),gr5
  548. sethi %hi(__entry_breaktrap_table),gr4
  549. setlo %lo(__entry_breaktrap_table),gr4
  550. movgs gr5,dcr
  551. movgs gr4,tbr
  552. # set up kernel global registers
  553. sethi.p %hi(__kernel_current_task),gr5
  554. setlo %lo(__kernel_current_task),gr5
  555. ld @(gr5,gr0),gr29
  556. ldi.p @(gr29,#4),gr15 ; __current_thread_info = current->thread_info
  557. sethi %hi(_gp),gr16
  558. setlo.p %lo(_gp),gr16
  559. # make sure we (the kernel) get div-zero and misalignment exceptions
  560. setlos #ISR_EDE|ISR_DTT_DIVBYZERO|ISR_EMAM_EXCEPTION,gr5
  561. movgs gr5,isr
  562. # enter the GDB stub
  563. LEDS 0x4007,gr2
  564. or.p gr0,gr0,fp
  565. call debug_stub
  566. LEDS 0x403f,gr2
  567. # return from break
  568. lddi @(gr31,#REG_IACC0),gr4
  569. movgs gr4,iacc0h
  570. movgs gr5,iacc0l
  571. lddi @(gr31,#REG_GNER0),gr4
  572. movgs gr4,gner0
  573. movgs gr5,gner1
  574. lddi @(gr31,#REG_LR) ,gr26
  575. lddi @(gr31,#REG_CCR) ,gr24
  576. lddi @(gr31,#REG_PSR) ,gr22
  577. ldi @(gr31,#REG_PC) ,gr21
  578. ldi @(gr31,#REG_TBR) ,gr20
  579. sethi.p %hi(__debug_status),gr6
  580. setlo %lo(__debug_status),gr6
  581. ldi.p @(gr6,#DEBUG_DCR) ,gr6
  582. andi gr22,#PSR_S,gr19 /* rebuild BPSR */
  583. andi.p gr22,#PSR_ET,gr5
  584. slli gr19,#10,gr19
  585. or gr5,gr19,gr19
  586. movgs gr6 ,dcr
  587. movgs gr19,bpsr
  588. movgs gr20,tbr
  589. movgs gr21,bpcsr
  590. movgs gr23,isr
  591. movgs gr24,ccr
  592. movgs gr25,cccr
  593. movgs gr26,lr
  594. movgs gr27,lcr
  595. LEDS 0x407f,gr2
  596. #ifdef CONFIG_MMU
  597. ldi @(gr31,#REG_GR(31)),gr2
  598. movgs gr2,scr3
  599. #endif
  600. ldi @(gr31,#REG_GR(30)),gr30
  601. ldi @(gr31,#REG_GR(29)),gr29
  602. lddi @(gr31,#REG_GR(26)),gr26
  603. lddi @(gr31,#REG_GR(24)),gr24
  604. lddi @(gr31,#REG_GR(22)),gr22
  605. lddi @(gr31,#REG_GR(20)),gr20
  606. lddi @(gr31,#REG_GR(18)),gr18
  607. lddi @(gr31,#REG_GR(16)),gr16
  608. lddi @(gr31,#REG_GR(14)),gr14
  609. lddi @(gr31,#REG_GR(12)),gr12
  610. lddi @(gr31,#REG_GR(10)),gr10
  611. lddi @(gr31,#REG_GR(8)) ,gr8
  612. lddi @(gr31,#REG_GR(6)) ,gr6
  613. lddi @(gr31,#REG_GR(4)) ,gr4
  614. lddi @(gr31,#REG_GR(2)) ,gr2
  615. ldi.p @(gr31,#REG_SP) ,sp
  616. xor gr31,gr31,gr31
  617. movgs gr0,brr
  618. #ifdef CONFIG_MMU
  619. movsg scr3,gr31
  620. #endif
  621. rett #1
  622. ###################################################################################################
  623. #
  624. # GDB stub "system calls"
  625. #
  626. ###################################################################################################
  627. #ifdef CONFIG_GDBSTUB
  628. # void gdbstub_console_write(struct console *con, const char *p, unsigned n)
  629. .globl gdbstub_console_write
  630. gdbstub_console_write:
  631. break
  632. bralr
  633. #endif
  634. # GDB stub BUG() trap
  635. # GR8 is the proposed signal number
  636. .globl __debug_bug_trap
  637. __debug_bug_trap:
  638. break
  639. bralr
  640. # transfer kernel exeception to GDB for handling
  641. .globl __break_hijack_kernel_event
  642. __break_hijack_kernel_event:
  643. break
  644. .globl __break_hijack_kernel_event_breaks_here
  645. __break_hijack_kernel_event_breaks_here:
  646. nop
  647. #ifdef CONFIG_MMU
  648. # handle a return from TLB-miss that requires single-step reactivation
  649. .globl __break_tlb_miss_return_break
  650. __break_tlb_miss_return_break:
  651. break
  652. __break_tlb_miss_return_breaks_here:
  653. nop
  654. #endif
  655. # guard the first .text label in the next file from confusion
  656. nop