1 /* SPDX-License-Identifier: GPL-2.0 */ 1 /* SPDX-License-Identifier: GPL-2.0 */ 2 /* We need to carefully read the error 2 /* We need to carefully read the error status, ACK the errors, 3 * prevent recursive traps, and pass t 3 * prevent recursive traps, and pass the information on to C 4 * code for logging. 4 * code for logging. 5 * 5 * 6 * We pass the AFAR in as-is, and we e 6 * We pass the AFAR in as-is, and we encode the status 7 * information as described in asm-spa 7 * information as described in asm-sparc64/sfafsr.h 8 */ 8 */ 9 .type __spitfire_access_erro 9 .type __spitfire_access_error,#function 10 __spitfire_access_error: 10 __spitfire_access_error: 11 /* Disable ESTATE error reporting so t 11 /* Disable ESTATE error reporting so that we do not take 12 * recursive traps and RED state the p 12 * recursive traps and RED state the processor. 13 */ 13 */ 14 stxa %g0, [%g0] ASI_ESTATE_ 14 stxa %g0, [%g0] ASI_ESTATE_ERROR_EN 15 membar #Sync 15 membar #Sync 16 16 17 mov UDBE_UE, %g1 17 mov UDBE_UE, %g1 18 ldxa [%g0] ASI_AFSR, %g4 18 ldxa [%g0] ASI_AFSR, %g4 ! Get AFSR 19 19 20 /* __spitfire_cee_trap branches here w 20 /* __spitfire_cee_trap branches here with AFSR in %g4 and 21 * UDBE_CE in %g1. It only clears EST 21 * UDBE_CE in %g1. It only clears ESTATE_ERR_CE in the ESTATE 22 * Error Enable register. 22 * Error Enable register. 23 */ 23 */ 24 __spitfire_cee_trap_continue: 24 __spitfire_cee_trap_continue: 25 ldxa [%g0] ASI_AFAR, %g5 25 ldxa [%g0] ASI_AFAR, %g5 ! Get AFAR 26 26 27 rdpr %tt, %g3 27 rdpr %tt, %g3 28 and %g3, 0x1ff, %g3 28 and %g3, 0x1ff, %g3 ! Paranoia 29 sllx %g3, SFSTAT_TRAP_TYPE_ 29 sllx %g3, SFSTAT_TRAP_TYPE_SHIFT, %g3 30 or %g4, %g3, %g4 30 or %g4, %g3, %g4 31 rdpr %tl, %g3 31 rdpr %tl, %g3 32 cmp %g3, 1 32 cmp %g3, 1 33 mov 1, %g3 33 mov 1, %g3 34 bleu %xcc, 1f 34 bleu %xcc, 1f 35 sllx %g3, SFSTAT_TL_GT_ONE_ 35 sllx %g3, SFSTAT_TL_GT_ONE_SHIFT, %g3 36 36 37 or %g4, %g3, %g4 37 or %g4, %g3, %g4 38 38 39 /* Read in the UDB error register stat 39 /* Read in the UDB error register state, clearing the sticky 40 * error bits as-needed. We only clea 40 * error bits as-needed. We only clear them if the UE bit is 41 * set. Likewise, __spitfire_cee_trap 41 * set. Likewise, __spitfire_cee_trap below will only do so 42 * if the CE bit is set. 42 * if the CE bit is set. 43 * 43 * 44 * NOTE: UltraSparc-I/II have high and 44 * NOTE: UltraSparc-I/II have high and low UDB error 45 * registers, corresponding to t 45 * registers, corresponding to the two UDB units 46 * present on those chips. Ultr 46 * present on those chips. UltraSparc-IIi only 47 * has a single UDB, called "SDB 47 * has a single UDB, called "SDB" in the manual. 48 * For IIi the upper UDB registe 48 * For IIi the upper UDB register always reads 49 * as zero so for our purposes t 49 * as zero so for our purposes things will just 50 * work with the checks below. 50 * work with the checks below. 51 */ 51 */ 52 1: ldxa [%g0] ASI_UDBH_ERROR_R 52 1: ldxa [%g0] ASI_UDBH_ERROR_R, %g3 53 and %g3, 0x3ff, %g7 53 and %g3, 0x3ff, %g7 ! Paranoia 54 sllx %g7, SFSTAT_UDBH_SHIFT 54 sllx %g7, SFSTAT_UDBH_SHIFT, %g7 55 or %g4, %g7, %g4 55 or %g4, %g7, %g4 56 andcc %g3, %g1, %g3 56 andcc %g3, %g1, %g3 ! UDBE_UE or UDBE_CE 57 be,pn %xcc, 1f 57 be,pn %xcc, 1f 58 nop 58 nop 59 stxa %g3, [%g0] ASI_UDB_ERR 59 stxa %g3, [%g0] ASI_UDB_ERROR_W 60 membar #Sync 60 membar #Sync 61 61 62 1: mov 0x18, %g3 62 1: mov 0x18, %g3 63 ldxa [%g3] ASI_UDBL_ERROR_R 63 ldxa [%g3] ASI_UDBL_ERROR_R, %g3 64 and %g3, 0x3ff, %g7 64 and %g3, 0x3ff, %g7 ! Paranoia 65 sllx %g7, SFSTAT_UDBL_SHIFT 65 sllx %g7, SFSTAT_UDBL_SHIFT, %g7 66 or %g4, %g7, %g4 66 or %g4, %g7, %g4 67 andcc %g3, %g1, %g3 67 andcc %g3, %g1, %g3 ! UDBE_UE or UDBE_CE 68 be,pn %xcc, 1f 68 be,pn %xcc, 1f 69 nop 69 nop 70 mov 0x18, %g7 70 mov 0x18, %g7 71 stxa %g3, [%g7] ASI_UDB_ERR 71 stxa %g3, [%g7] ASI_UDB_ERROR_W 72 membar #Sync 72 membar #Sync 73 73 74 1: /* Ok, now that we've latched the erro 74 1: /* Ok, now that we've latched the error state, clear the 75 * sticky bits in the AFSR. 75 * sticky bits in the AFSR. 76 */ 76 */ 77 stxa %g4, [%g0] ASI_AFSR 77 stxa %g4, [%g0] ASI_AFSR 78 membar #Sync 78 membar #Sync 79 79 80 rdpr %tl, %g2 80 rdpr %tl, %g2 81 cmp %g2, 1 81 cmp %g2, 1 82 rdpr %pil, %g2 82 rdpr %pil, %g2 83 bleu,pt %xcc, 1f 83 bleu,pt %xcc, 1f 84 wrpr %g0, PIL_NORMAL_MAX, % 84 wrpr %g0, PIL_NORMAL_MAX, %pil 85 85 86 ba,pt %xcc, etraptl1 86 ba,pt %xcc, etraptl1 87 rd %pc, %g7 87 rd %pc, %g7 88 88 89 ba,a,pt %xcc, 2f 89 ba,a,pt %xcc, 2f 90 nop 90 nop 91 91 92 1: ba,pt %xcc, etrap_irq 92 1: ba,pt %xcc, etrap_irq 93 rd %pc, %g7 93 rd %pc, %g7 94 94 95 2: 95 2: 96 #ifdef CONFIG_TRACE_IRQFLAGS 96 #ifdef CONFIG_TRACE_IRQFLAGS 97 call trace_hardirqs_off 97 call trace_hardirqs_off 98 nop 98 nop 99 #endif 99 #endif 100 mov %l4, %o1 100 mov %l4, %o1 101 mov %l5, %o2 101 mov %l5, %o2 102 call spitfire_access_error 102 call spitfire_access_error 103 add %sp, PTREGS_OFF, %o0 103 add %sp, PTREGS_OFF, %o0 104 ba,a,pt %xcc, rtrap 104 ba,a,pt %xcc, rtrap 105 .size __spitfire_access_erro 105 .size __spitfire_access_error,.-__spitfire_access_error 106 106 107 /* This is the trap handler entry poin 107 /* This is the trap handler entry point for ECC correctable 108 * errors. They are corrected, but we 108 * errors. They are corrected, but we listen for the trap so 109 * that the event can be logged. 109 * that the event can be logged. 110 * 110 * 111 * Disrupting errors are either: 111 * Disrupting errors are either: 112 * 1) single-bit ECC errors during UDB 112 * 1) single-bit ECC errors during UDB reads to system 113 * memory 113 * memory 114 * 2) data parity errors during write- 114 * 2) data parity errors during write-back events 115 * 115 * 116 * As far as I can make out from the m 116 * As far as I can make out from the manual, the CEE trap is 117 * only for correctable errors during 117 * only for correctable errors during memory read accesses by 118 * the front-end of the processor. 118 * the front-end of the processor. 119 * 119 * 120 * The code below is only for trap lev 120 * The code below is only for trap level 1 CEE events, as it 121 * is the only situation where we can 121 * is the only situation where we can safely record and log. 122 * For trap level >1 we just clear the 122 * For trap level >1 we just clear the CE bit in the AFSR and 123 * return. 123 * return. 124 * 124 * 125 * This is just like __spiftire_access 125 * This is just like __spiftire_access_error above, but it 126 * specifically handles correctable er 126 * specifically handles correctable errors. If an 127 * uncorrectable error is indicated in 127 * uncorrectable error is indicated in the AFSR we will branch 128 * directly above to __spitfire_access 128 * directly above to __spitfire_access_error to handle it 129 * instead. Uncorrectable therefore t 129 * instead. Uncorrectable therefore takes priority over 130 * correctable, and the error logging 130 * correctable, and the error logging C code will notice this 131 * case by inspecting the trap type. 131 * case by inspecting the trap type. 132 */ 132 */ 133 .type __spitfire_cee_trap,#f 133 .type __spitfire_cee_trap,#function 134 __spitfire_cee_trap: 134 __spitfire_cee_trap: 135 ldxa [%g0] ASI_AFSR, %g4 135 ldxa [%g0] ASI_AFSR, %g4 ! Get AFSR 136 mov 1, %g3 136 mov 1, %g3 137 sllx %g3, SFAFSR_UE_SHIFT, 137 sllx %g3, SFAFSR_UE_SHIFT, %g3 138 andcc %g4, %g3, %g0 138 andcc %g4, %g3, %g0 ! Check for UE 139 bne,pn %xcc, __spitfire_acces 139 bne,pn %xcc, __spitfire_access_error 140 nop 140 nop 141 141 142 /* Ok, in this case we only have a cor 142 /* Ok, in this case we only have a correctable error. 143 * Indicate we only wish to capture th 143 * Indicate we only wish to capture that state in register 144 * %g1, and we only disable CE error r 144 * %g1, and we only disable CE error reporting unlike UE 145 * handling which disables all errors. 145 * handling which disables all errors. 146 */ 146 */ 147 ldxa [%g0] ASI_ESTATE_ERROR 147 ldxa [%g0] ASI_ESTATE_ERROR_EN, %g3 148 andn %g3, ESTATE_ERR_CE, %g 148 andn %g3, ESTATE_ERR_CE, %g3 149 stxa %g3, [%g0] ASI_ESTATE_ 149 stxa %g3, [%g0] ASI_ESTATE_ERROR_EN 150 membar #Sync 150 membar #Sync 151 151 152 /* Preserve AFSR in %g4, indicate UDB 152 /* Preserve AFSR in %g4, indicate UDB state to capture in %g1 */ 153 ba,pt %xcc, __spitfire_cee_t 153 ba,pt %xcc, __spitfire_cee_trap_continue 154 mov UDBE_CE, %g1 154 mov UDBE_CE, %g1 155 .size __spitfire_cee_trap,.- 155 .size __spitfire_cee_trap,.-__spitfire_cee_trap 156 156 157 .type __spitfire_data_access 157 .type __spitfire_data_access_exception_tl1,#function 158 __spitfire_data_access_exception_tl1: 158 __spitfire_data_access_exception_tl1: 159 rdpr %pstate, %g4 159 rdpr %pstate, %g4 160 wrpr %g4, PSTATE_MG|PSTATE_ 160 wrpr %g4, PSTATE_MG|PSTATE_AG, %pstate 161 mov TLB_SFSR, %g3 161 mov TLB_SFSR, %g3 162 mov DMMU_SFAR, %g5 162 mov DMMU_SFAR, %g5 163 ldxa [%g3] ASI_DMMU, %g4 163 ldxa [%g3] ASI_DMMU, %g4 ! Get SFSR 164 ldxa [%g5] ASI_DMMU, %g5 164 ldxa [%g5] ASI_DMMU, %g5 ! Get SFAR 165 stxa %g0, [%g3] ASI_DMMU 165 stxa %g0, [%g3] ASI_DMMU ! Clear SFSR.FaultValid bit 166 membar #Sync 166 membar #Sync 167 rdpr %tt, %g3 167 rdpr %tt, %g3 168 cmp %g3, 0x80 168 cmp %g3, 0x80 ! first win spill/fill trap 169 blu,pn %xcc, 1f 169 blu,pn %xcc, 1f 170 cmp %g3, 0xff 170 cmp %g3, 0xff ! last win spill/fill trap 171 bgu,pn %xcc, 1f 171 bgu,pn %xcc, 1f 172 nop 172 nop 173 ba,pt %xcc, winfix_dax 173 ba,pt %xcc, winfix_dax 174 rdpr %tpc, %g3 174 rdpr %tpc, %g3 175 1: sethi %hi(109f), %g7 175 1: sethi %hi(109f), %g7 176 ba,pt %xcc, etraptl1 176 ba,pt %xcc, etraptl1 177 109: or %g7, %lo(109b), %g7 177 109: or %g7, %lo(109b), %g7 178 mov %l4, %o1 178 mov %l4, %o1 179 mov %l5, %o2 179 mov %l5, %o2 180 call spitfire_data_access_e 180 call spitfire_data_access_exception_tl1 181 add %sp, PTREGS_OFF, %o0 181 add %sp, PTREGS_OFF, %o0 182 ba,a,pt %xcc, rtrap 182 ba,a,pt %xcc, rtrap 183 .size __spitfire_data_access 183 .size __spitfire_data_access_exception_tl1,.-__spitfire_data_access_exception_tl1 184 184 185 .type __spitfire_data_access 185 .type __spitfire_data_access_exception,#function 186 __spitfire_data_access_exception: 186 __spitfire_data_access_exception: 187 rdpr %pstate, %g4 187 rdpr %pstate, %g4 188 wrpr %g4, PSTATE_MG|PSTATE_ 188 wrpr %g4, PSTATE_MG|PSTATE_AG, %pstate 189 mov TLB_SFSR, %g3 189 mov TLB_SFSR, %g3 190 mov DMMU_SFAR, %g5 190 mov DMMU_SFAR, %g5 191 ldxa [%g3] ASI_DMMU, %g4 191 ldxa [%g3] ASI_DMMU, %g4 ! Get SFSR 192 ldxa [%g5] ASI_DMMU, %g5 192 ldxa [%g5] ASI_DMMU, %g5 ! Get SFAR 193 stxa %g0, [%g3] ASI_DMMU 193 stxa %g0, [%g3] ASI_DMMU ! Clear SFSR.FaultValid bit 194 membar #Sync 194 membar #Sync 195 sethi %hi(109f), %g7 195 sethi %hi(109f), %g7 196 ba,pt %xcc, etrap 196 ba,pt %xcc, etrap 197 109: or %g7, %lo(109b), %g7 197 109: or %g7, %lo(109b), %g7 198 mov %l4, %o1 198 mov %l4, %o1 199 mov %l5, %o2 199 mov %l5, %o2 200 call spitfire_data_access_e 200 call spitfire_data_access_exception 201 add %sp, PTREGS_OFF, %o0 201 add %sp, PTREGS_OFF, %o0 202 ba,a,pt %xcc, rtrap 202 ba,a,pt %xcc, rtrap 203 .size __spitfire_data_access 203 .size __spitfire_data_access_exception,.-__spitfire_data_access_exception 204 204 205 .type __spitfire_insn_access 205 .type __spitfire_insn_access_exception_tl1,#function 206 __spitfire_insn_access_exception_tl1: 206 __spitfire_insn_access_exception_tl1: 207 rdpr %pstate, %g4 207 rdpr %pstate, %g4 208 wrpr %g4, PSTATE_MG|PSTATE_ 208 wrpr %g4, PSTATE_MG|PSTATE_AG, %pstate 209 mov TLB_SFSR, %g3 209 mov TLB_SFSR, %g3 210 ldxa [%g3] ASI_IMMU, %g4 210 ldxa [%g3] ASI_IMMU, %g4 ! Get SFSR 211 rdpr %tpc, %g5 211 rdpr %tpc, %g5 ! IMMU has no SFAR, use TPC 212 stxa %g0, [%g3] ASI_IMMU 212 stxa %g0, [%g3] ASI_IMMU ! Clear FaultValid bit 213 membar #Sync 213 membar #Sync 214 sethi %hi(109f), %g7 214 sethi %hi(109f), %g7 215 ba,pt %xcc, etraptl1 215 ba,pt %xcc, etraptl1 216 109: or %g7, %lo(109b), %g7 216 109: or %g7, %lo(109b), %g7 217 mov %l4, %o1 217 mov %l4, %o1 218 mov %l5, %o2 218 mov %l5, %o2 219 call spitfire_insn_access_e 219 call spitfire_insn_access_exception_tl1 220 add %sp, PTREGS_OFF, %o0 220 add %sp, PTREGS_OFF, %o0 221 ba,a,pt %xcc, rtrap 221 ba,a,pt %xcc, rtrap 222 .size __spitfire_insn_access 222 .size __spitfire_insn_access_exception_tl1,.-__spitfire_insn_access_exception_tl1 223 223 224 .type __spitfire_insn_access 224 .type __spitfire_insn_access_exception,#function 225 __spitfire_insn_access_exception: 225 __spitfire_insn_access_exception: 226 rdpr %pstate, %g4 226 rdpr %pstate, %g4 227 wrpr %g4, PSTATE_MG|PSTATE_ 227 wrpr %g4, PSTATE_MG|PSTATE_AG, %pstate 228 mov TLB_SFSR, %g3 228 mov TLB_SFSR, %g3 229 ldxa [%g3] ASI_IMMU, %g4 229 ldxa [%g3] ASI_IMMU, %g4 ! Get SFSR 230 rdpr %tpc, %g5 230 rdpr %tpc, %g5 ! IMMU has no SFAR, use TPC 231 stxa %g0, [%g3] ASI_IMMU 231 stxa %g0, [%g3] ASI_IMMU ! Clear FaultValid bit 232 membar #Sync 232 membar #Sync 233 sethi %hi(109f), %g7 233 sethi %hi(109f), %g7 234 ba,pt %xcc, etrap 234 ba,pt %xcc, etrap 235 109: or %g7, %lo(109b), %g7 235 109: or %g7, %lo(109b), %g7 236 mov %l4, %o1 236 mov %l4, %o1 237 mov %l5, %o2 237 mov %l5, %o2 238 call spitfire_insn_access_e 238 call spitfire_insn_access_exception 239 add %sp, PTREGS_OFF, %o0 239 add %sp, PTREGS_OFF, %o0 240 ba,a,pt %xcc, rtrap 240 ba,a,pt %xcc, rtrap 241 .size __spitfire_insn_access 241 .size __spitfire_insn_access_exception,.-__spitfire_insn_access_exception
Linux® is a registered trademark of Linus Torvalds in the United States and other countries.
TOMOYO® is a registered trademark of NTT DATA CORPORATION.