~ [ source navigation ] ~ [ diff markup ] ~ [ identifier search ] ~

TOMOYO Linux Cross Reference
Linux/arch/sparc/kernel/ktlb.S

Version: ~ [ linux-6.12-rc7 ] ~ [ linux-6.11.7 ] ~ [ linux-6.10.14 ] ~ [ linux-6.9.12 ] ~ [ linux-6.8.12 ] ~ [ linux-6.7.12 ] ~ [ linux-6.6.60 ] ~ [ linux-6.5.13 ] ~ [ linux-6.4.16 ] ~ [ linux-6.3.13 ] ~ [ linux-6.2.16 ] ~ [ linux-6.1.116 ] ~ [ linux-6.0.19 ] ~ [ linux-5.19.17 ] ~ [ linux-5.18.19 ] ~ [ linux-5.17.15 ] ~ [ linux-5.16.20 ] ~ [ linux-5.15.171 ] ~ [ linux-5.14.21 ] ~ [ linux-5.13.19 ] ~ [ linux-5.12.19 ] ~ [ linux-5.11.22 ] ~ [ linux-5.10.229 ] ~ [ linux-5.9.16 ] ~ [ linux-5.8.18 ] ~ [ linux-5.7.19 ] ~ [ linux-5.6.19 ] ~ [ linux-5.5.19 ] ~ [ linux-5.4.285 ] ~ [ linux-5.3.18 ] ~ [ linux-5.2.21 ] ~ [ linux-5.1.21 ] ~ [ linux-5.0.21 ] ~ [ linux-4.20.17 ] ~ [ linux-4.19.323 ] ~ [ linux-4.18.20 ] ~ [ linux-4.17.19 ] ~ [ linux-4.16.18 ] ~ [ linux-4.15.18 ] ~ [ linux-4.14.336 ] ~ [ linux-4.13.16 ] ~ [ linux-4.12.14 ] ~ [ linux-4.11.12 ] ~ [ linux-4.10.17 ] ~ [ linux-4.9.337 ] ~ [ linux-4.4.302 ] ~ [ linux-3.10.108 ] ~ [ linux-2.6.32.71 ] ~ [ linux-2.6.0 ] ~ [ linux-2.4.37.11 ] ~ [ unix-v6-master ] ~ [ ccs-tools-1.8.12 ] ~ [ policy-sample ] ~
Architecture: ~ [ i386 ] ~ [ alpha ] ~ [ m68k ] ~ [ mips ] ~ [ ppc ] ~ [ sparc ] ~ [ sparc64 ] ~

Diff markup

Differences between /arch/sparc/kernel/ktlb.S (Architecture sparc64) and /arch/sparc/kernel/ktlb.S (Architecture sparc)


  1 /* SPDX-License-Identifier: GPL-2.0 */              1 /* SPDX-License-Identifier: GPL-2.0 */
  2 /* arch/sparc64/kernel/ktlb.S: Kernel mapping       2 /* arch/sparc64/kernel/ktlb.S: Kernel mapping TLB miss handling.
  3  *                                                  3  *
  4  * Copyright (C) 1995, 1997, 2005, 2008 David <      4  * Copyright (C) 1995, 1997, 2005, 2008 David S. Miller <davem@davemloft.net>
  5  * Copyright (C) 1996 Eddie C. Dost        (ec      5  * Copyright (C) 1996 Eddie C. Dost        (ecd@brainaid.de)
  6  * Copyright (C) 1996 Miguel de Icaza      (mi      6  * Copyright (C) 1996 Miguel de Icaza      (miguel@nuclecu.unam.mx)
  7  * Copyright (C) 1996,98,99 Jakub Jelinek  (jj      7  * Copyright (C) 1996,98,99 Jakub Jelinek  (jj@sunsite.mff.cuni.cz)
  8  */                                                 8  */
  9                                                     9 
 10 #include <linux/pgtable.h>                         10 #include <linux/pgtable.h>
 11 #include <asm/head.h>                              11 #include <asm/head.h>
 12 #include <asm/asi.h>                               12 #include <asm/asi.h>
 13 #include <asm/page.h>                              13 #include <asm/page.h>
 14 #include <asm/tsb.h>                               14 #include <asm/tsb.h>
 15                                                    15 
 16         .text                                      16         .text
 17         .align          32                         17         .align          32
 18                                                    18 
 19 kvmap_itlb:                                        19 kvmap_itlb:
 20         /* g6: TAG TARGET */                       20         /* g6: TAG TARGET */
 21         mov             TLB_TAG_ACCESS, %g4        21         mov             TLB_TAG_ACCESS, %g4
 22         ldxa            [%g4] ASI_IMMU, %g4        22         ldxa            [%g4] ASI_IMMU, %g4
 23                                                    23 
 24         /* The kernel executes in context zero     24         /* The kernel executes in context zero, therefore we do not
 25          * need to clear the context ID bits o     25          * need to clear the context ID bits out of %g4 here.
 26          */                                        26          */
 27                                                    27 
 28         /* sun4v_itlb_miss branches here with      28         /* sun4v_itlb_miss branches here with the missing virtual
 29          * address already loaded into %g4         29          * address already loaded into %g4
 30          */                                        30          */
 31 kvmap_itlb_4v:                                     31 kvmap_itlb_4v:
 32                                                    32 
 33         /* Catch kernel NULL pointer calls.  *     33         /* Catch kernel NULL pointer calls.  */
 34         sethi           %hi(PAGE_SIZE), %g5        34         sethi           %hi(PAGE_SIZE), %g5
 35         cmp             %g4, %g5                   35         cmp             %g4, %g5
 36         blu,pn          %xcc, kvmap_itlb_longp     36         blu,pn          %xcc, kvmap_itlb_longpath
 37          nop                                       37          nop
 38                                                    38 
 39         KERN_TSB_LOOKUP_TL1(%g4, %g6, %g5, %g1     39         KERN_TSB_LOOKUP_TL1(%g4, %g6, %g5, %g1, %g2, %g3, kvmap_itlb_load)
 40                                                    40 
 41 kvmap_itlb_tsb_miss:                               41 kvmap_itlb_tsb_miss:
 42         sethi           %hi(LOW_OBP_ADDRESS),      42         sethi           %hi(LOW_OBP_ADDRESS), %g5
 43         cmp             %g4, %g5                   43         cmp             %g4, %g5
 44         blu,pn          %xcc, kvmap_itlb_vmall     44         blu,pn          %xcc, kvmap_itlb_vmalloc_addr
 45          mov            0x1, %g5                   45          mov            0x1, %g5
 46         sllx            %g5, 32, %g5               46         sllx            %g5, 32, %g5
 47         cmp             %g4, %g5                   47         cmp             %g4, %g5
 48         blu,pn          %xcc, kvmap_itlb_obp       48         blu,pn          %xcc, kvmap_itlb_obp
 49          nop                                       49          nop
 50                                                    50 
 51 kvmap_itlb_vmalloc_addr:                           51 kvmap_itlb_vmalloc_addr:
 52         KERN_PGTABLE_WALK(%g4, %g5, %g2, kvmap     52         KERN_PGTABLE_WALK(%g4, %g5, %g2, kvmap_itlb_longpath)
 53                                                    53 
 54         TSB_LOCK_TAG(%g1, %g2, %g7)                54         TSB_LOCK_TAG(%g1, %g2, %g7)
 55         TSB_WRITE(%g1, %g5, %g6)                   55         TSB_WRITE(%g1, %g5, %g6)
 56                                                    56 
 57         /* fallthrough to TLB load */              57         /* fallthrough to TLB load */
 58                                                    58 
 59 kvmap_itlb_load:                                   59 kvmap_itlb_load:
 60                                                    60 
 61 661:    stxa            %g5, [%g0] ASI_ITLB_DA     61 661:    stxa            %g5, [%g0] ASI_ITLB_DATA_IN
 62         retry                                      62         retry
 63         .section        .sun4v_2insn_patch, "a     63         .section        .sun4v_2insn_patch, "ax"
 64         .word           661b                       64         .word           661b
 65         nop                                        65         nop
 66         nop                                        66         nop
 67         .previous                                  67         .previous
 68                                                    68 
 69         /* For sun4v the ASI_ITLB_DATA_IN stor     69         /* For sun4v the ASI_ITLB_DATA_IN store and the retry
 70          * instruction get nop'd out and we ge     70          * instruction get nop'd out and we get here to branch
 71          * to the sun4v tlb load code.  The re     71          * to the sun4v tlb load code.  The registers are setup
 72          * as follows:                             72          * as follows:
 73          *                                         73          *
 74          * %g4: vaddr                              74          * %g4: vaddr
 75          * %g5: PTE                                75          * %g5: PTE
 76          * %g6: TAG                                76          * %g6: TAG
 77          *                                         77          *
 78          * The sun4v TLB load wants the PTE in     78          * The sun4v TLB load wants the PTE in %g3 so we fix that
 79          * up here.                                79          * up here.
 80          */                                        80          */
 81         ba,pt           %xcc, sun4v_itlb_load      81         ba,pt           %xcc, sun4v_itlb_load
 82          mov            %g5, %g3                   82          mov            %g5, %g3
 83                                                    83 
 84 kvmap_itlb_longpath:                               84 kvmap_itlb_longpath:
 85                                                    85 
 86 661:    rdpr    %pstate, %g5                       86 661:    rdpr    %pstate, %g5
 87         wrpr    %g5, PSTATE_AG | PSTATE_MG, %p     87         wrpr    %g5, PSTATE_AG | PSTATE_MG, %pstate
 88         .section .sun4v_2insn_patch, "ax"          88         .section .sun4v_2insn_patch, "ax"
 89         .word   661b                               89         .word   661b
 90         SET_GL(1)                                  90         SET_GL(1)
 91         nop                                        91         nop
 92         .previous                                  92         .previous
 93                                                    93 
 94         rdpr    %tpc, %g5                          94         rdpr    %tpc, %g5
 95         ba,pt   %xcc, sparc64_realfault_common     95         ba,pt   %xcc, sparc64_realfault_common
 96          mov    FAULT_CODE_ITLB, %g4               96          mov    FAULT_CODE_ITLB, %g4
 97                                                    97 
 98 kvmap_itlb_obp:                                    98 kvmap_itlb_obp:
 99         OBP_TRANS_LOOKUP(%g4, %g5, %g2, %g3, k     99         OBP_TRANS_LOOKUP(%g4, %g5, %g2, %g3, kvmap_itlb_longpath)
100                                                   100 
101         TSB_LOCK_TAG(%g1, %g2, %g7)               101         TSB_LOCK_TAG(%g1, %g2, %g7)
102                                                   102 
103         TSB_WRITE(%g1, %g5, %g6)                  103         TSB_WRITE(%g1, %g5, %g6)
104                                                   104 
105         ba,pt           %xcc, kvmap_itlb_load     105         ba,pt           %xcc, kvmap_itlb_load
106          nop                                      106          nop
107                                                   107 
108 kvmap_dtlb_obp:                                   108 kvmap_dtlb_obp:
109         OBP_TRANS_LOOKUP(%g4, %g5, %g2, %g3, k    109         OBP_TRANS_LOOKUP(%g4, %g5, %g2, %g3, kvmap_dtlb_longpath)
110                                                   110 
111         TSB_LOCK_TAG(%g1, %g2, %g7)               111         TSB_LOCK_TAG(%g1, %g2, %g7)
112                                                   112 
113         TSB_WRITE(%g1, %g5, %g6)                  113         TSB_WRITE(%g1, %g5, %g6)
114                                                   114 
115         ba,pt           %xcc, kvmap_dtlb_load     115         ba,pt           %xcc, kvmap_dtlb_load
116          nop                                      116          nop
117                                                   117 
118 kvmap_linear_early:                               118 kvmap_linear_early:
119         sethi           %hi(kern_linear_pte_xo    119         sethi           %hi(kern_linear_pte_xor), %g7
120         ldx             [%g7 + %lo(kern_linear    120         ldx             [%g7 + %lo(kern_linear_pte_xor)], %g2
121         ba,pt           %xcc, kvmap_dtlb_tsb4m    121         ba,pt           %xcc, kvmap_dtlb_tsb4m_load
122          xor            %g2, %g4, %g5             122          xor            %g2, %g4, %g5
123                                                   123 
124         .align          32                        124         .align          32
125 kvmap_dtlb_tsb4m_load:                            125 kvmap_dtlb_tsb4m_load:
126         TSB_LOCK_TAG(%g1, %g2, %g7)               126         TSB_LOCK_TAG(%g1, %g2, %g7)
127         TSB_WRITE(%g1, %g5, %g6)                  127         TSB_WRITE(%g1, %g5, %g6)
128         ba,pt           %xcc, kvmap_dtlb_load     128         ba,pt           %xcc, kvmap_dtlb_load
129          nop                                      129          nop
130                                                   130 
131 kvmap_dtlb:                                       131 kvmap_dtlb:
132         /* %g6: TAG TARGET */                     132         /* %g6: TAG TARGET */
133         mov             TLB_TAG_ACCESS, %g4       133         mov             TLB_TAG_ACCESS, %g4
134         ldxa            [%g4] ASI_DMMU, %g4       134         ldxa            [%g4] ASI_DMMU, %g4
135                                                   135 
136         /* The kernel executes in context zero    136         /* The kernel executes in context zero, therefore we do not
137          * need to clear the context ID bits o    137          * need to clear the context ID bits out of %g4 here.
138          */                                       138          */
139                                                   139 
140         /* sun4v_dtlb_miss branches here with     140         /* sun4v_dtlb_miss branches here with the missing virtual
141          * address already loaded into %g4        141          * address already loaded into %g4
142          */                                       142          */
143 kvmap_dtlb_4v:                                    143 kvmap_dtlb_4v:
144         brgez,pn        %g4, kvmap_dtlb_nonlin    144         brgez,pn        %g4, kvmap_dtlb_nonlinear
145          nop                                      145          nop
146                                                   146 
147 #ifdef CONFIG_DEBUG_PAGEALLOC                     147 #ifdef CONFIG_DEBUG_PAGEALLOC
148         /* Index through the base page size TS    148         /* Index through the base page size TSB even for linear
149          * mappings when using page allocation    149          * mappings when using page allocation debugging.
150          */                                       150          */
151         KERN_TSB_LOOKUP_TL1(%g4, %g6, %g5, %g1    151         KERN_TSB_LOOKUP_TL1(%g4, %g6, %g5, %g1, %g2, %g3, kvmap_dtlb_load)
152 #else                                             152 #else
153         /* Correct TAG_TARGET is already in %g    153         /* Correct TAG_TARGET is already in %g6, check 4mb TSB.  */
154         KERN_TSB4M_LOOKUP_TL1(%g6, %g5, %g1, %    154         KERN_TSB4M_LOOKUP_TL1(%g6, %g5, %g1, %g2, %g3, kvmap_dtlb_load)
155 #endif                                            155 #endif
156         /* Linear mapping TSB lookup failed.      156         /* Linear mapping TSB lookup failed.  Fallthrough to kernel
157          * page table based lookup.               157          * page table based lookup.
158          */                                       158          */
159         .globl          kvmap_linear_patch        159         .globl          kvmap_linear_patch
160 kvmap_linear_patch:                               160 kvmap_linear_patch:
161         ba,a,pt         %xcc, kvmap_linear_ear    161         ba,a,pt         %xcc, kvmap_linear_early
162                                                   162 
163 kvmap_dtlb_vmalloc_addr:                          163 kvmap_dtlb_vmalloc_addr:
164         KERN_PGTABLE_WALK(%g4, %g5, %g2, kvmap    164         KERN_PGTABLE_WALK(%g4, %g5, %g2, kvmap_dtlb_longpath)
165                                                   165 
166         TSB_LOCK_TAG(%g1, %g2, %g7)               166         TSB_LOCK_TAG(%g1, %g2, %g7)
167         TSB_WRITE(%g1, %g5, %g6)                  167         TSB_WRITE(%g1, %g5, %g6)
168                                                   168 
169         /* fallthrough to TLB load */             169         /* fallthrough to TLB load */
170                                                   170 
171 kvmap_dtlb_load:                                  171 kvmap_dtlb_load:
172                                                   172 
173 661:    stxa            %g5, [%g0] ASI_DTLB_DA    173 661:    stxa            %g5, [%g0] ASI_DTLB_DATA_IN     ! Reload TLB
174         retry                                     174         retry
175         .section        .sun4v_2insn_patch, "a    175         .section        .sun4v_2insn_patch, "ax"
176         .word           661b                      176         .word           661b
177         nop                                       177         nop
178         nop                                       178         nop
179         .previous                                 179         .previous
180                                                   180 
181         /* For sun4v the ASI_DTLB_DATA_IN stor    181         /* For sun4v the ASI_DTLB_DATA_IN store and the retry
182          * instruction get nop'd out and we ge    182          * instruction get nop'd out and we get here to branch
183          * to the sun4v tlb load code.  The re    183          * to the sun4v tlb load code.  The registers are setup
184          * as follows:                            184          * as follows:
185          *                                        185          *
186          * %g4: vaddr                             186          * %g4: vaddr
187          * %g5: PTE                               187          * %g5: PTE
188          * %g6: TAG                               188          * %g6: TAG
189          *                                        189          *
190          * The sun4v TLB load wants the PTE in    190          * The sun4v TLB load wants the PTE in %g3 so we fix that
191          * up here.                               191          * up here.
192          */                                       192          */
193         ba,pt           %xcc, sun4v_dtlb_load     193         ba,pt           %xcc, sun4v_dtlb_load
194          mov            %g5, %g3                  194          mov            %g5, %g3
195                                                   195 
196 #ifdef CONFIG_SPARSEMEM_VMEMMAP                   196 #ifdef CONFIG_SPARSEMEM_VMEMMAP
197 kvmap_vmemmap:                                    197 kvmap_vmemmap:
198         KERN_PGTABLE_WALK(%g4, %g5, %g2, kvmap    198         KERN_PGTABLE_WALK(%g4, %g5, %g2, kvmap_dtlb_longpath)
199         ba,a,pt         %xcc, kvmap_dtlb_load     199         ba,a,pt         %xcc, kvmap_dtlb_load
200 #endif                                            200 #endif
201                                                   201 
202 kvmap_dtlb_nonlinear:                             202 kvmap_dtlb_nonlinear:
203         /* Catch kernel NULL pointer derefs.      203         /* Catch kernel NULL pointer derefs.  */
204         sethi           %hi(PAGE_SIZE), %g5       204         sethi           %hi(PAGE_SIZE), %g5
205         cmp             %g4, %g5                  205         cmp             %g4, %g5
206         bleu,pn         %xcc, kvmap_dtlb_longp    206         bleu,pn         %xcc, kvmap_dtlb_longpath
207          nop                                      207          nop
208                                                   208 
209 #ifdef CONFIG_SPARSEMEM_VMEMMAP                   209 #ifdef CONFIG_SPARSEMEM_VMEMMAP
210         /* Do not use the TSB for vmemmap.  */    210         /* Do not use the TSB for vmemmap.  */
211         sethi           %hi(VMEMMAP_BASE), %g5    211         sethi           %hi(VMEMMAP_BASE), %g5
212         ldx             [%g5 + %lo(VMEMMAP_BAS    212         ldx             [%g5 + %lo(VMEMMAP_BASE)], %g5
213         cmp             %g4,%g5                   213         cmp             %g4,%g5
214         bgeu,pn         %xcc, kvmap_vmemmap       214         bgeu,pn         %xcc, kvmap_vmemmap
215          nop                                      215          nop
216 #endif                                            216 #endif
217                                                   217 
218         KERN_TSB_LOOKUP_TL1(%g4, %g6, %g5, %g1    218         KERN_TSB_LOOKUP_TL1(%g4, %g6, %g5, %g1, %g2, %g3, kvmap_dtlb_load)
219                                                   219 
220 kvmap_dtlb_tsbmiss:                               220 kvmap_dtlb_tsbmiss:
221         sethi           %hi(MODULES_VADDR), %g    221         sethi           %hi(MODULES_VADDR), %g5
222         cmp             %g4, %g5                  222         cmp             %g4, %g5
223         blu,pn          %xcc, kvmap_dtlb_longp    223         blu,pn          %xcc, kvmap_dtlb_longpath
224          sethi          %hi(VMALLOC_END), %g5     224          sethi          %hi(VMALLOC_END), %g5
225         ldx             [%g5 + %lo(VMALLOC_END    225         ldx             [%g5 + %lo(VMALLOC_END)], %g5
226         cmp             %g4, %g5                  226         cmp             %g4, %g5
227         bgeu,pn         %xcc, kvmap_dtlb_longp    227         bgeu,pn         %xcc, kvmap_dtlb_longpath
228          nop                                      228          nop
229                                                   229 
230 kvmap_check_obp:                                  230 kvmap_check_obp:
231         sethi           %hi(LOW_OBP_ADDRESS),     231         sethi           %hi(LOW_OBP_ADDRESS), %g5
232         cmp             %g4, %g5                  232         cmp             %g4, %g5
233         blu,pn          %xcc, kvmap_dtlb_vmall    233         blu,pn          %xcc, kvmap_dtlb_vmalloc_addr
234          mov            0x1, %g5                  234          mov            0x1, %g5
235         sllx            %g5, 32, %g5              235         sllx            %g5, 32, %g5
236         cmp             %g4, %g5                  236         cmp             %g4, %g5
237         blu,pn          %xcc, kvmap_dtlb_obp      237         blu,pn          %xcc, kvmap_dtlb_obp
238          nop                                      238          nop
239         ba,pt           %xcc, kvmap_dtlb_vmall    239         ba,pt           %xcc, kvmap_dtlb_vmalloc_addr
240          nop                                      240          nop
241                                                   241 
242 kvmap_dtlb_longpath:                              242 kvmap_dtlb_longpath:
243                                                   243 
244 661:    rdpr    %pstate, %g5                      244 661:    rdpr    %pstate, %g5
245         wrpr    %g5, PSTATE_AG | PSTATE_MG, %p    245         wrpr    %g5, PSTATE_AG | PSTATE_MG, %pstate
246         .section .sun4v_2insn_patch, "ax"         246         .section .sun4v_2insn_patch, "ax"
247         .word   661b                              247         .word   661b
248         SET_GL(1)                                 248         SET_GL(1)
249         ldxa            [%g0] ASI_SCRATCHPAD,     249         ldxa            [%g0] ASI_SCRATCHPAD, %g5
250         .previous                                 250         .previous
251                                                   251 
252         rdpr    %tl, %g3                          252         rdpr    %tl, %g3
253         cmp     %g3, 1                            253         cmp     %g3, 1
254                                                   254 
255 661:    mov     TLB_TAG_ACCESS, %g4               255 661:    mov     TLB_TAG_ACCESS, %g4
256         ldxa    [%g4] ASI_DMMU, %g5               256         ldxa    [%g4] ASI_DMMU, %g5
257         .section .sun4v_2insn_patch, "ax"         257         .section .sun4v_2insn_patch, "ax"
258         .word   661b                              258         .word   661b
259         ldx     [%g5 + HV_FAULT_D_ADDR_OFFSET]    259         ldx     [%g5 + HV_FAULT_D_ADDR_OFFSET], %g5
260         nop                                       260         nop
261         .previous                                 261         .previous
262                                                   262 
263         /* The kernel executes in context zero    263         /* The kernel executes in context zero, therefore we do not
264          * need to clear the context ID bits o    264          * need to clear the context ID bits out of %g5 here.
265          */                                       265          */
266                                                   266 
267         be,pt   %xcc, sparc64_realfault_common    267         be,pt   %xcc, sparc64_realfault_common
268          mov    FAULT_CODE_DTLB, %g4              268          mov    FAULT_CODE_DTLB, %g4
269         ba,pt   %xcc, winfix_trampoline           269         ba,pt   %xcc, winfix_trampoline
270          nop                                      270          nop
                                                      

~ [ source navigation ] ~ [ diff markup ] ~ [ identifier search ] ~

kernel.org | git.kernel.org | LWN.net | Project Home | SVN repository | Mail admin

Linux® is a registered trademark of Linus Torvalds in the United States and other countries.
TOMOYO® is a registered trademark of NTT DATA CORPORATION.

sflogo.php