1 /* SPDX-License-Identifier: GPL-2.0-or-later * 2 /* 3 * This file contains low-level functions for 4 * types of TLB invalidations on various proce 5 * table. 6 * 7 * This file implements the following function 8 * processors. Some aren't implemented for som 9 * are inline in tlbflush.h 10 * 11 * - tlbil_va 12 * - tlbil_pid 13 * - tlbil_all 14 * - tlbivax_bcast 15 * 16 * Code mostly moved over from misc_32.S 17 * 18 * Copyright (C) 1995-1996 Gary Thomas (gdt 19 * 20 * Partially rewritten by Cort Dougan (cort@cs 21 * Paul Mackerras, Kumar Gala and Benjamin Her 22 */ 23 24 #include <asm/reg.h> 25 #include <asm/page.h> 26 #include <asm/cputable.h> 27 #include <asm/mmu.h> 28 #include <asm/ppc_asm.h> 29 #include <asm/asm-offsets.h> 30 #include <asm/processor.h> 31 #include <asm/bug.h> 32 #include <asm/asm-compat.h> 33 #include <asm/feature-fixups.h> 34 35 #if defined(CONFIG_PPC_8xx) 36 37 /* 38 * Nothing to do for 8xx, everything is inline 39 */ 40 41 #elif defined(CONFIG_44x) /* Includes 47x */ 42 43 /* 44 * 440 implementation uses tlbsx/we for tlbil_ 45 * of the TLB for everything else. 46 */ 47 _GLOBAL(__tlbil_va) 48 mfspr r5,SPRN_MMUCR 49 mfmsr r10 50 51 /* 52 * We write 16 bits of STID since 47x 53 * will never be passed out of bounds 54 */ 55 rlwimi r5,r4,0,16,31 56 57 /* We have to run the search with inte 58 * an interrupt which causes a TLB mis 59 * between the mtspr and the tlbsx. 60 * 61 * Critical and Machine Check interrup 62 * and restoring MMUCR, so only normal 63 * taken care of. 64 */ 65 wrteei 0 66 mtspr SPRN_MMUCR,r5 67 tlbsx. r6,0,r3 68 bne 10f 69 sync 70 #ifndef CONFIG_PPC_47x 71 /* On 440 There are only 64 TLB entrie 72 * 22, is clear. Since 22 is the V bi 73 * value will invalidate the TLB entry 74 */ 75 tlbwe r6,r6,PPC44x_TLB_PAGEID 76 #else 77 oris r7,r6,0x8000 /* specify way 78 clrrwi r4,r3,12 /* get an EPN 79 ori r4,r4,PPC47x_TLBE_SIZE 80 tlbwe r4,r7,0 /* write it */ 81 #endif /* !CONFIG_PPC_47x */ 82 isync 83 10: wrtee r10 84 blr 85 86 _GLOBAL(_tlbil_all) 87 _GLOBAL(_tlbil_pid) 88 #ifndef CONFIG_PPC_47x 89 li r3,0 90 sync 91 92 /* Load high watermark */ 93 lis r4,tlb_44x_hwater@ha 94 lwz r5,tlb_44x_hwater@l(r4) 95 96 1: tlbwe r3,r3,PPC44x_TLB_PAGEID 97 addi r3,r3,1 98 cmpw 0,r3,r5 99 ble 1b 100 101 isync 102 blr 103 #else 104 /* 476 variant. There's not simple way 105 * try to limit the amount of such ful 106 */ 107 mfmsr r11 /* Interrupts 108 wrteei 0 109 li r3,-1 /* Current set 110 lis r10,tlb_47x_boltmap@h 111 ori r10,r10,tlb_47x_boltmap@l 112 lis r7,0x8000 /* Specify way 113 114 b 9f /* For each se 115 116 1: li r9,4 /* Number of w 117 li r4,0 /* Current way 118 li r6,0 /* Default ent 119 andi. r0,r8,1 /* Check if wa 120 mtctr r9 /* Load way co 121 bne- 3f /* Bolted, ski 122 123 2: /* For each way */ 124 or r5,r3,r4 /* Make way|in 125 rlwimi r5,r5,16,8,15 /* Copy index 126 tlbre r6,r5,0 /* Read entry 127 3: addis r4,r4,0x2000 /* Next way */ 128 andi. r0,r6,PPC47x_TLB0_VALID /* Val 129 beq 4f /* Nope, skip 130 rlwimi r7,r5,0,1,2 /* Insert way 131 rlwinm r6,r6,0,21,19 /* Clear V */ 132 tlbwe r6,r7,0 /* Write it */ 133 4: bdnz 2b /* Loop for ea 134 srwi r8,r8,1 /* Next boltma 135 9: cmpwi cr1,r3,255 /* Last set do 136 addi r3,r3,1 /* Next set */ 137 beq cr1,1f /* End of loop 138 andi. r0,r3,0x1f /* Need to loa 139 bne 1b /* No, loop */ 140 lwz r8,0(r10) /* Load boltma 141 addi r10,r10,4 /* Next word * 142 b 1b /* Then loop * 143 1: isync /* Sync shadow 144 wrtee r11 145 blr 146 #endif /* !CONFIG_PPC_47x */ 147 148 #ifdef CONFIG_PPC_47x 149 150 /* 151 * _tlbivax_bcast is only on 47x. We don't bot 152 * check though, it will blow up soon enough i 153 * to use it on a 440. 154 */ 155 _GLOBAL(_tlbivax_bcast) 156 mfspr r5,SPRN_MMUCR 157 mfmsr r10 158 rlwimi r5,r4,0,16,31 159 wrteei 0 160 mtspr SPRN_MMUCR,r5 161 isync 162 PPC_TLBIVAX(0, R3) 163 isync 164 mbar 165 tlbsync 166 BEGIN_FTR_SECTION 167 b 1f 168 END_FTR_SECTION_IFSET(CPU_FTR_476_DD2) 169 sync 170 wrtee r10 171 blr 172 /* 173 * DD2 HW could hang if in instruction fetch h 174 * Touch enough instruction cache lines to ens 175 */ 176 1: mflr r9 177 bcl 20,31,$+4 178 2: mflr r6 179 li r7,32 180 PPC_ICBT(0,R6,R7) /* tou 181 add r6,r6,r7 182 PPC_ICBT(0,R6,R7) /* tou 183 add r6,r6,r7 184 PPC_ICBT(0,R6,R7) /* tou 185 sync 186 nop 187 nop 188 nop 189 nop 190 nop 191 nop 192 nop 193 nop 194 mtlr r9 195 wrtee r10 196 blr 197 #endif /* CONFIG_PPC_47x */ 198 199 #elif defined(CONFIG_PPC_85xx) 200 /* 201 * FSL BookE implementations. 202 * 203 * Since feature sections are using _SECTION_E 204 * to have the larger code path before the _SE 205 */ 206 207 /* 208 * Flush MMU TLB on the local processor 209 */ 210 _GLOBAL(_tlbil_all) 211 BEGIN_MMU_FTR_SECTION 212 li r3,(MMUCSR0_TLBFI)@l 213 mtspr SPRN_MMUCSR0, r3 214 1: 215 mfspr r3,SPRN_MMUCSR0 216 andi. r3,r3,MMUCSR0_TLBFI@l 217 bne 1b 218 MMU_FTR_SECTION_ELSE 219 PPC_TLBILX_ALL(0,R0) 220 ALT_MMU_FTR_SECTION_END_IFCLR(MMU_FTR_USE_TLBI 221 msync 222 isync 223 blr 224 225 _GLOBAL(_tlbil_pid) 226 BEGIN_MMU_FTR_SECTION 227 slwi r3,r3,16 228 mfmsr r10 229 wrteei 0 230 mfspr r4,SPRN_MAS6 /* save MAS6 * 231 mtspr SPRN_MAS6,r3 232 PPC_TLBILX_PID(0,R0) 233 mtspr SPRN_MAS6,r4 /* restore MAS 234 wrtee r10 235 MMU_FTR_SECTION_ELSE 236 li r3,(MMUCSR0_TLBFI)@l 237 mtspr SPRN_MMUCSR0, r3 238 1: 239 mfspr r3,SPRN_MMUCSR0 240 andi. r3,r3,MMUCSR0_TLBFI@l 241 bne 1b 242 ALT_MMU_FTR_SECTION_END_IFSET(MMU_FTR_USE_TLBI 243 msync 244 isync 245 blr 246 247 /* 248 * Flush MMU TLB for a particular address, but 249 * (no broadcast) 250 */ 251 _GLOBAL(__tlbil_va) 252 mfmsr r10 253 wrteei 0 254 slwi r4,r4,16 255 ori r4,r4,(MAS6_ISIZE(BOOK3E_PAGES 256 mtspr SPRN_MAS6,r4 /* ass 257 BEGIN_MMU_FTR_SECTION 258 tlbsx 0,r3 259 mfspr r4,SPRN_MAS1 /* che 260 andis. r3,r4,MAS1_VALID@h 261 beq 1f 262 rlwinm r4,r4,0,1,31 263 mtspr SPRN_MAS1,r4 264 tlbwe 265 MMU_FTR_SECTION_ELSE 266 PPC_TLBILX_VA(0,R3) 267 ALT_MMU_FTR_SECTION_END_IFCLR(MMU_FTR_USE_TLBI 268 msync 269 isync 270 1: wrtee r10 271 blr 272 #elif defined(CONFIG_PPC_BOOK3E_64) 273 /* 274 * New Book3E (>= 2.06) implementation 275 * 276 * Note: We may be able to get away without th 277 * if we save/restore MAS6 on exceptions that 278 */ 279 _GLOBAL(_tlbil_pid) 280 slwi r4,r3,MAS6_SPID_SHIFT 281 mfmsr r10 282 wrteei 0 283 mtspr SPRN_MAS6,r4 284 PPC_TLBILX_PID(0,R0) 285 wrtee r10 286 msync 287 isync 288 blr 289 290 _GLOBAL(_tlbil_pid_noind) 291 slwi r4,r3,MAS6_SPID_SHIFT 292 mfmsr r10 293 ori r4,r4,MAS6_SIND 294 wrteei 0 295 mtspr SPRN_MAS6,r4 296 PPC_TLBILX_PID(0,R0) 297 wrtee r10 298 msync 299 isync 300 blr 301 302 _GLOBAL(_tlbil_all) 303 PPC_TLBILX_ALL(0,R0) 304 msync 305 isync 306 blr 307 308 _GLOBAL(_tlbil_va) 309 mfmsr r10 310 wrteei 0 311 cmpwi cr0,r6,0 312 slwi r4,r4,MAS6_SPID_SHIFT 313 rlwimi r4,r5,MAS6_ISIZE_SHIFT,MAS6_IS 314 beq 1f 315 rlwimi r4,r6,MAS6_SIND_SHIFT,MAS6_SIN 316 1: mtspr SPRN_MAS6,r4 /* ass 317 PPC_TLBILX_VA(0,R3) 318 msync 319 isync 320 wrtee r10 321 blr 322 323 _GLOBAL(_tlbivax_bcast) 324 mfmsr r10 325 wrteei 0 326 cmpwi cr0,r6,0 327 slwi r4,r4,MAS6_SPID_SHIFT 328 rlwimi r4,r5,MAS6_ISIZE_SHIFT,MAS6_IS 329 beq 1f 330 rlwimi r4,r6,MAS6_SIND_SHIFT,MAS6_SIN 331 1: mtspr SPRN_MAS6,r4 /* ass 332 PPC_TLBIVAX(0,R3) 333 mbar 334 tlbsync 335 sync 336 wrtee r10 337 blr 338 #else 339 #error Unsupported processor type ! 340 #endif 341 342 #if defined(CONFIG_PPC_E500) 343 /* 344 * extern void loadcam_entry(unsigned int inde 345 * 346 * Load TLBCAM[index] entry in to the L2 CAM M 347 * Must preserve r7, r8, r9, r10, r11, r12 348 */ 349 _GLOBAL(loadcam_entry) 350 mflr r5 351 LOAD_REG_ADDR_PIC(r4, TLBCAM) 352 mtlr r5 353 mulli r5,r3,TLBCAM_SIZE 354 add r3,r5,r4 355 lwz r4,TLBCAM_MAS0(r3) 356 mtspr SPRN_MAS0,r4 357 lwz r4,TLBCAM_MAS1(r3) 358 mtspr SPRN_MAS1,r4 359 PPC_LL r4,TLBCAM_MAS2(r3) 360 mtspr SPRN_MAS2,r4 361 lwz r4,TLBCAM_MAS3(r3) 362 mtspr SPRN_MAS3,r4 363 BEGIN_MMU_FTR_SECTION 364 lwz r4,TLBCAM_MAS7(r3) 365 mtspr SPRN_MAS7,r4 366 END_MMU_FTR_SECTION_IFSET(MMU_FTR_BIG_PHYS) 367 isync 368 tlbwe 369 isync 370 blr 371 372 /* 373 * Load multiple TLB entries at once, using an 374 * trampoline so that we don't have to care ab 375 * TLB entry maps us before and after. 376 * 377 * r3 = first entry to write 378 * r4 = number of entries to write 379 * r5 = temporary tlb entry (0 means no switch 380 */ 381 _GLOBAL(loadcam_multi) 382 mflr r8 383 /* Don't switch to AS=1 if already the 384 mfmsr r11 385 andi. r11,r11,MSR_IS 386 bne 10f 387 mr. r12, r5 388 beq 10f 389 390 /* 391 * Set up temporary TLB entry that is 392 * running from, but in AS=1. 393 */ 394 bcl 20,31,$+4 395 1: mflr r6 396 tlbsx 0,r8 397 mfspr r6,SPRN_MAS1 398 ori r6,r6,MAS1_TS 399 mtspr SPRN_MAS1,r6 400 mfspr r6,SPRN_MAS0 401 rlwimi r6,r5,MAS0_ESEL_SHIFT,MAS0_ESE 402 mr r7,r5 403 mtspr SPRN_MAS0,r6 404 isync 405 tlbwe 406 isync 407 408 /* Switch to AS=1 */ 409 mfmsr r6 410 ori r6,r6,MSR_IS|MSR_DS 411 mtmsr r6 412 isync 413 414 10: 415 mr r9,r3 416 add r10,r3,r4 417 2: bl loadcam_entry 418 addi r9,r9,1 419 cmpw r9,r10 420 mr r3,r9 421 blt 2b 422 423 /* Don't return to AS=0 if we were in 424 andi. r11,r11,MSR_IS 425 bne 3f 426 cmpwi r12, 0 427 beq 3f 428 429 /* Return to AS=0 and clear the tempor 430 mfmsr r6 431 rlwinm. r6,r6,0,~(MSR_IS|MSR_DS) 432 mtmsr r6 433 isync 434 435 li r6,0 436 mtspr SPRN_MAS1,r6 437 rlwinm r6,r7,MAS0_ESEL_SHIFT,MAS0_ESE 438 oris r6,r6,MAS0_TLBSEL(1)@h 439 mtspr SPRN_MAS0,r6 440 isync 441 tlbwe 442 isync 443 444 3: 445 mtlr r8 446 blr 447 #endif
Linux® is a registered trademark of Linus Torvalds in the United States and other countries.
TOMOYO® is a registered trademark of NTT DATA CORPORATION.