spiterrs.S 7.0 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246
  1. /* We need to carefully read the error status, ACK the errors,
  2. * prevent recursive traps, and pass the information on to C
  3. * code for logging.
  4. *
  5. * We pass the AFAR in as-is, and we encode the status
  6. * information as described in asm-sparc64/sfafsr.h
  7. */
  8. .type __spitfire_access_error,#function
  9. __spitfire_access_error:
  10. /* Disable ESTATE error reporting so that we do not take
  11. * recursive traps and RED state the processor.
  12. */
  13. stxa %g0, [%g0] ASI_ESTATE_ERROR_EN
  14. membar #Sync
  15. mov UDBE_UE, %g1
  16. ldxa [%g0] ASI_AFSR, %g4 ! Get AFSR
  17. /* __spitfire_cee_trap branches here with AFSR in %g4 and
  18. * UDBE_CE in %g1. It only clears ESTATE_ERR_CE in the ESTATE
  19. * Error Enable register.
  20. */
  21. __spitfire_cee_trap_continue:
  22. ldxa [%g0] ASI_AFAR, %g5 ! Get AFAR
  23. rdpr %tt, %g3
  24. and %g3, 0x1ff, %g3 ! Paranoia
  25. sllx %g3, SFSTAT_TRAP_TYPE_SHIFT, %g3
  26. or %g4, %g3, %g4
  27. rdpr %tl, %g3
  28. cmp %g3, 1
  29. mov 1, %g3
  30. bleu %xcc, 1f
  31. sllx %g3, SFSTAT_TL_GT_ONE_SHIFT, %g3
  32. or %g4, %g3, %g4
  33. /* Read in the UDB error register state, clearing the sticky
  34. * error bits as-needed. We only clear them if the UE bit is
  35. * set. Likewise, __spitfire_cee_trap below will only do so
  36. * if the CE bit is set.
  37. *
  38. * NOTE: UltraSparc-I/II have high and low UDB error
  39. * registers, corresponding to the two UDB units
  40. * present on those chips. UltraSparc-IIi only
  41. * has a single UDB, called "SDB" in the manual.
  42. * For IIi the upper UDB register always reads
  43. * as zero so for our purposes things will just
  44. * work with the checks below.
  45. */
  46. 1: ldxa [%g0] ASI_UDBH_ERROR_R, %g3
  47. and %g3, 0x3ff, %g7 ! Paranoia
  48. sllx %g7, SFSTAT_UDBH_SHIFT, %g7
  49. or %g4, %g7, %g4
  50. andcc %g3, %g1, %g3 ! UDBE_UE or UDBE_CE
  51. be,pn %xcc, 1f
  52. nop
  53. stxa %g3, [%g0] ASI_UDB_ERROR_W
  54. membar #Sync
  55. 1: mov 0x18, %g3
  56. ldxa [%g3] ASI_UDBL_ERROR_R, %g3
  57. and %g3, 0x3ff, %g7 ! Paranoia
  58. sllx %g7, SFSTAT_UDBL_SHIFT, %g7
  59. or %g4, %g7, %g4
  60. andcc %g3, %g1, %g3 ! UDBE_UE or UDBE_CE
  61. be,pn %xcc, 1f
  62. nop
  63. mov 0x18, %g7
  64. stxa %g3, [%g7] ASI_UDB_ERROR_W
  65. membar #Sync
  66. 1: /* Ok, now that we've latched the error state, clear the
  67. * sticky bits in the AFSR.
  68. */
  69. stxa %g4, [%g0] ASI_AFSR
  70. membar #Sync
  71. rdpr %tl, %g2
  72. cmp %g2, 1
  73. rdpr %pil, %g2
  74. bleu,pt %xcc, 1f
  75. wrpr %g0, PIL_NORMAL_MAX, %pil
  76. ba,pt %xcc, etraptl1
  77. rd %pc, %g7
  78. ba,pt %xcc, 2f
  79. nop
  80. 1: ba,pt %xcc, etrap_irq
  81. rd %pc, %g7
  82. 2:
  83. #ifdef CONFIG_TRACE_IRQFLAGS
  84. call trace_hardirqs_off
  85. nop
  86. #endif
  87. mov %l4, %o1
  88. mov %l5, %o2
  89. call spitfire_access_error
  90. add %sp, PTREGS_OFF, %o0
  91. ba,pt %xcc, rtrap
  92. nop
  93. .size __spitfire_access_error,.-__spitfire_access_error
  94. /* This is the trap handler entry point for ECC correctable
  95. * errors. They are corrected, but we listen for the trap so
  96. * that the event can be logged.
  97. *
  98. * Disrupting errors are either:
  99. * 1) single-bit ECC errors during UDB reads to system
  100. * memory
  101. * 2) data parity errors during write-back events
  102. *
  103. * As far as I can make out from the manual, the CEE trap is
  104. * only for correctable errors during memory read accesses by
  105. * the front-end of the processor.
  106. *
  107. * The code below is only for trap level 1 CEE events, as it
  108. * is the only situation where we can safely record and log.
  109. * For trap level >1 we just clear the CE bit in the AFSR and
  110. * return.
  111. *
  112. * This is just like __spiftire_access_error above, but it
  113. * specifically handles correctable errors. If an
  114. * uncorrectable error is indicated in the AFSR we will branch
  115. * directly above to __spitfire_access_error to handle it
  116. * instead. Uncorrectable therefore takes priority over
  117. * correctable, and the error logging C code will notice this
  118. * case by inspecting the trap type.
  119. */
  120. .type __spitfire_cee_trap,#function
  121. __spitfire_cee_trap:
  122. ldxa [%g0] ASI_AFSR, %g4 ! Get AFSR
  123. mov 1, %g3
  124. sllx %g3, SFAFSR_UE_SHIFT, %g3
  125. andcc %g4, %g3, %g0 ! Check for UE
  126. bne,pn %xcc, __spitfire_access_error
  127. nop
  128. /* Ok, in this case we only have a correctable error.
  129. * Indicate we only wish to capture that state in register
  130. * %g1, and we only disable CE error reporting unlike UE
  131. * handling which disables all errors.
  132. */
  133. ldxa [%g0] ASI_ESTATE_ERROR_EN, %g3
  134. andn %g3, ESTATE_ERR_CE, %g3
  135. stxa %g3, [%g0] ASI_ESTATE_ERROR_EN
  136. membar #Sync
  137. /* Preserve AFSR in %g4, indicate UDB state to capture in %g1 */
  138. ba,pt %xcc, __spitfire_cee_trap_continue
  139. mov UDBE_CE, %g1
  140. .size __spitfire_cee_trap,.-__spitfire_cee_trap
  141. .type __spitfire_data_access_exception_tl1,#function
  142. __spitfire_data_access_exception_tl1:
  143. rdpr %pstate, %g4
  144. wrpr %g4, PSTATE_MG|PSTATE_AG, %pstate
  145. mov TLB_SFSR, %g3
  146. mov DMMU_SFAR, %g5
  147. ldxa [%g3] ASI_DMMU, %g4 ! Get SFSR
  148. ldxa [%g5] ASI_DMMU, %g5 ! Get SFAR
  149. stxa %g0, [%g3] ASI_DMMU ! Clear SFSR.FaultValid bit
  150. membar #Sync
  151. rdpr %tt, %g3
  152. cmp %g3, 0x80 ! first win spill/fill trap
  153. blu,pn %xcc, 1f
  154. cmp %g3, 0xff ! last win spill/fill trap
  155. bgu,pn %xcc, 1f
  156. nop
  157. ba,pt %xcc, winfix_dax
  158. rdpr %tpc, %g3
  159. 1: sethi %hi(109f), %g7
  160. ba,pt %xcc, etraptl1
  161. 109: or %g7, %lo(109b), %g7
  162. mov %l4, %o1
  163. mov %l5, %o2
  164. call spitfire_data_access_exception_tl1
  165. add %sp, PTREGS_OFF, %o0
  166. ba,pt %xcc, rtrap
  167. nop
  168. .size __spitfire_data_access_exception_tl1,.-__spitfire_data_access_exception_tl1
  169. .type __spitfire_data_access_exception,#function
  170. __spitfire_data_access_exception:
  171. rdpr %pstate, %g4
  172. wrpr %g4, PSTATE_MG|PSTATE_AG, %pstate
  173. mov TLB_SFSR, %g3
  174. mov DMMU_SFAR, %g5
  175. ldxa [%g3] ASI_DMMU, %g4 ! Get SFSR
  176. ldxa [%g5] ASI_DMMU, %g5 ! Get SFAR
  177. stxa %g0, [%g3] ASI_DMMU ! Clear SFSR.FaultValid bit
  178. membar #Sync
  179. sethi %hi(109f), %g7
  180. ba,pt %xcc, etrap
  181. 109: or %g7, %lo(109b), %g7
  182. mov %l4, %o1
  183. mov %l5, %o2
  184. call spitfire_data_access_exception
  185. add %sp, PTREGS_OFF, %o0
  186. ba,pt %xcc, rtrap
  187. nop
  188. .size __spitfire_data_access_exception,.-__spitfire_data_access_exception
  189. .type __spitfire_insn_access_exception_tl1,#function
  190. __spitfire_insn_access_exception_tl1:
  191. rdpr %pstate, %g4
  192. wrpr %g4, PSTATE_MG|PSTATE_AG, %pstate
  193. mov TLB_SFSR, %g3
  194. ldxa [%g3] ASI_IMMU, %g4 ! Get SFSR
  195. rdpr %tpc, %g5 ! IMMU has no SFAR, use TPC
  196. stxa %g0, [%g3] ASI_IMMU ! Clear FaultValid bit
  197. membar #Sync
  198. sethi %hi(109f), %g7
  199. ba,pt %xcc, etraptl1
  200. 109: or %g7, %lo(109b), %g7
  201. mov %l4, %o1
  202. mov %l5, %o2
  203. call spitfire_insn_access_exception_tl1
  204. add %sp, PTREGS_OFF, %o0
  205. ba,pt %xcc, rtrap
  206. nop
  207. .size __spitfire_insn_access_exception_tl1,.-__spitfire_insn_access_exception_tl1
  208. .type __spitfire_insn_access_exception,#function
  209. __spitfire_insn_access_exception:
  210. rdpr %pstate, %g4
  211. wrpr %g4, PSTATE_MG|PSTATE_AG, %pstate
  212. mov TLB_SFSR, %g3
  213. ldxa [%g3] ASI_IMMU, %g4 ! Get SFSR
  214. rdpr %tpc, %g5 ! IMMU has no SFAR, use TPC
  215. stxa %g0, [%g3] ASI_IMMU ! Clear FaultValid bit
  216. membar #Sync
  217. sethi %hi(109f), %g7
  218. ba,pt %xcc, etrap
  219. 109: or %g7, %lo(109b), %g7
  220. mov %l4, %o1
  221. mov %l5, %o2
  222. call spitfire_insn_access_exception
  223. add %sp, PTREGS_OFF, %o0
  224. ba,pt %xcc, rtrap
  225. nop
  226. .size __spitfire_insn_access_exception,.-__spitfire_insn_access_exception