~ [ source navigation ] ~ [ diff markup ] ~ [ identifier search ] ~

TOMOYO Linux Cross Reference
Linux/arch/arm/boot/compressed/head.S

Version: ~ [ linux-6.12-rc7 ] ~ [ linux-6.11.7 ] ~ [ linux-6.10.14 ] ~ [ linux-6.9.12 ] ~ [ linux-6.8.12 ] ~ [ linux-6.7.12 ] ~ [ linux-6.6.60 ] ~ [ linux-6.5.13 ] ~ [ linux-6.4.16 ] ~ [ linux-6.3.13 ] ~ [ linux-6.2.16 ] ~ [ linux-6.1.116 ] ~ [ linux-6.0.19 ] ~ [ linux-5.19.17 ] ~ [ linux-5.18.19 ] ~ [ linux-5.17.15 ] ~ [ linux-5.16.20 ] ~ [ linux-5.15.171 ] ~ [ linux-5.14.21 ] ~ [ linux-5.13.19 ] ~ [ linux-5.12.19 ] ~ [ linux-5.11.22 ] ~ [ linux-5.10.229 ] ~ [ linux-5.9.16 ] ~ [ linux-5.8.18 ] ~ [ linux-5.7.19 ] ~ [ linux-5.6.19 ] ~ [ linux-5.5.19 ] ~ [ linux-5.4.285 ] ~ [ linux-5.3.18 ] ~ [ linux-5.2.21 ] ~ [ linux-5.1.21 ] ~ [ linux-5.0.21 ] ~ [ linux-4.20.17 ] ~ [ linux-4.19.323 ] ~ [ linux-4.18.20 ] ~ [ linux-4.17.19 ] ~ [ linux-4.16.18 ] ~ [ linux-4.15.18 ] ~ [ linux-4.14.336 ] ~ [ linux-4.13.16 ] ~ [ linux-4.12.14 ] ~ [ linux-4.11.12 ] ~ [ linux-4.10.17 ] ~ [ linux-4.9.337 ] ~ [ linux-4.4.302 ] ~ [ linux-3.10.108 ] ~ [ linux-2.6.32.71 ] ~ [ linux-2.6.0 ] ~ [ linux-2.4.37.11 ] ~ [ unix-v6-master ] ~ [ ccs-tools-1.8.12 ] ~ [ policy-sample ] ~
Architecture: ~ [ i386 ] ~ [ alpha ] ~ [ m68k ] ~ [ mips ] ~ [ ppc ] ~ [ sparc ] ~ [ sparc64 ] ~

Diff markup

Differences between /arch/arm/boot/compressed/head.S (Architecture alpha) and /arch/mips/boot/compressed/head.S (Architecture mips)


  1 /* SPDX-License-Identifier: GPL-2.0-only */    << 
  2 /*                                                  1 /*
  3  *  linux/arch/arm/boot/compressed/head.S      !!   2  * This file is subject to the terms and conditions of the GNU General Public
  4  *                                             !!   3  * License.  See the file "COPYING" in the main directory of this archive
  5  *  Copyright (C) 1996-2002 Russell King       !!   4  * for more details.
  6  *  Copyright (C) 2004 Hyok S. Choi (MPU suppo !!   5  *
  7  */                                            !!   6  * Copyright (C) 1994, 1995 Waldorf Electronics
  8 #include <linux/linkage.h>                     !!   7  * Written by Ralf Baechle and Andreas Busse
  9 #include <asm/assembler.h>                     !!   8  * Copyright (C) 1995 - 1999 Ralf Baechle
 10 #include <asm/v7m.h>                           !!   9  * Copyright (C) 1996 Paul M. Antoine
 11                                                !!  10  * Modified for DECStation and hence R3000 support by Paul M. Antoine
 12 #include "efi-header.S"                        !!  11  * Further modifications by David S. Miller and Harald Koerfgen
 13                                                !!  12  * Copyright (C) 1999 Silicon Graphics, Inc.
 14 #ifdef __ARMEB__                               !!  13  */
 15 #define OF_DT_MAGIC 0xd00dfeed                 !!  14 
 16 #else                                          !!  15 #include <asm/asm.h>
 17 #define OF_DT_MAGIC 0xedfe0dd0                 !!  16 #include <asm/regdef.h>
 18 #endif                                         !!  17 
 19                                                !!  18         LEAF(start)
 20  AR_CLASS(      .arch   armv7-a )              !!  19         /* Save boot rom start args */
 21  M_CLASS(       .arch   armv7-m )              !!  20         move    s0, a0
 22                                                !!  21         move    s1, a1
 23 /*                                             !!  22         move    s2, a2
 24  * Debugging stuff                             !!  23         move    s3, a3
 25  *                                             !!  24 
 26  * Note that these macros must not contain any !!  25         /* Clear BSS */
 27  * 100% relocatable.  Any attempt to do so wil !!  26         PTR_LA  a0, _edata
 28  * Please select one of the following when tur !!  27         PTR_LA  a2, _end
 29  */                                            !!  28 1:      PTR_S   zero, 0(a0)
 30 #ifdef DEBUG                                   !!  29         PTR_ADDIU a0, a0, PTRSIZE
 31                                                !!  30         bne     a2, a0, 1b
 32 #if defined(CONFIG_DEBUG_ICEDCC)               !!  31 
 33                                                !!  32         PTR_LA  a0, (.heap)          /* heap address */
 34 #if defined(CONFIG_CPU_V6) || defined(CONFIG_C !!  33         PTR_LA  sp, (.stack + 8192)  /* stack address */
 35                 .macro  loadsp, rb, tmp1, tmp2 !!  34 
 36                 .endm                          !!  35         PTR_LA  t9, decompress_kernel
 37                 .macro  writeb, ch, rb, tmp    !!  36         jalr    t9
 38                 mcr     p14, 0, \ch, c0, c5, 0 !!  37 
 39                 .endm                          !!  38 2:
 40 #elif defined(CONFIG_CPU_XSCALE)               !!  39         move    a0, s0
 41                 .macro  loadsp, rb, tmp1, tmp2 !!  40         move    a1, s1
 42                 .endm                          !!  41         move    a2, s2
 43                 .macro  writeb, ch, rb, tmp    !!  42         move    a3, s3
 44                 mcr     p14, 0, \ch, c8, c0, 0 !!  43         PTR_LI  t9, KERNEL_ENTRY
 45                 .endm                          !!  44         jalr    t9
 46 #else                                          << 
 47                 .macro  loadsp, rb, tmp1, tmp2 << 
 48                 .endm                          << 
 49                 .macro  writeb, ch, rb, tmp    << 
 50                 mcr     p14, 0, \ch, c1, c0, 0 << 
 51                 .endm                          << 
 52 #endif                                         << 
 53                                                << 
 54 #else                                          << 
 55                                                << 
 56 #include CONFIG_DEBUG_LL_INCLUDE               << 
 57                                                << 
 58                 .macro  writeb, ch, rb, tmp    << 
 59 #ifdef CONFIG_DEBUG_UART_FLOW_CONTROL          << 
 60                 waituartcts \tmp, \rb          << 
 61 #endif                                         << 
 62                 waituarttxrdy \tmp, \rb        << 
 63                 senduart \ch, \rb              << 
 64                 busyuart \tmp, \rb             << 
 65                 .endm                          << 
 66                                                << 
 67 #if defined(CONFIG_ARCH_SA1100)                << 
 68                 .macro  loadsp, rb, tmp1, tmp2 << 
 69                 mov     \rb, #0x80000000       << 
 70                 add     \rb, \rb, #0x00010000  << 
 71                 .endm                          << 
 72 #else                                          << 
 73                 .macro  loadsp, rb, tmp1, tmp2 << 
 74                 addruart \rb, \tmp1, \tmp2     << 
 75                 .endm                          << 
 76 #endif                                         << 
 77 #endif                                         << 
 78 #endif                                         << 
 79                                                << 
 80                 .macro  kputc,val              << 
 81                 mov     r0, \val               << 
 82                 bl      putc                   << 
 83                 .endm                          << 
 84                                                << 
 85                 .macro  kphex,val,len          << 
 86                 mov     r0, \val               << 
 87                 mov     r1, #\len              << 
 88                 bl      phex                   << 
 89                 .endm                          << 
 90                                                << 
 91                 /*                             << 
 92                  * Debug kernel copy by printi << 
 93                  */                            << 
 94                 .macro dbgkc, begin, end, cbeg << 
 95 #ifdef DEBUG                                   << 
 96                 kputc   #'C'                   << 
 97                 kputc   #':'                   << 
 98                 kputc   #'0'                   << 
 99                 kputc   #'x'                   << 
100                 kphex   \begin, 8       /* Sta << 
101                 kputc   #'-'                   << 
102                 kputc   #'0'                   << 
103                 kputc   #'x'                   << 
104                 kphex   \end, 8         /* End << 
105                 kputc   #'-'                   << 
106                 kputc   #'>'                   << 
107                 kputc   #'0'                   << 
108                 kputc   #'x'                   << 
109                 kphex   \cbegin, 8      /* Sta << 
110                 kputc   #'-'                   << 
111                 kputc   #'0'                   << 
112                 kputc   #'x'                   << 
113                 kphex   \cend, 8        /* End << 
114                 kputc   #'\n'                  << 
115 #endif                                         << 
116                 .endm                          << 
117                                                << 
118                 /*                             << 
119                  * Debug print of the final ap << 
120                  */                            << 
121                 .macro dbgadtb, begin, size    << 
122 #ifdef DEBUG                                   << 
123                 kputc   #'D'                   << 
124                 kputc   #'T'                   << 
125                 kputc   #'B'                   << 
126                 kputc   #':'                   << 
127                 kputc   #'0'                   << 
128                 kputc   #'x'                   << 
129                 kphex   \begin, 8       /* Sta << 
130                 kputc   #' '                   << 
131                 kputc   #'('                   << 
132                 kputc   #'0'                   << 
133                 kputc   #'x'                   << 
134                 kphex   \size, 8        /* Siz << 
135                 kputc   #')'                   << 
136                 kputc   #'\n'                  << 
137 #endif                                         << 
138                 .endm                          << 
139                                                << 
140                 .macro  enable_cp15_barriers,  << 
141                 mrc     p15, 0, \reg, c1, c0,  << 
142                 tst     \reg, #(1 << 5)        << 
143                 bne     .L_\@                  << 
144                 orr     \reg, \reg, #(1 << 5)  << 
145                 mcr     p15, 0, \reg, c1, c0,  << 
146  ARM(           .inst   0xf57ff06f             << 
147  THUMB(         isb                            << 
148 .L_\@:                                         << 
149                 .endm                          << 
150                                                << 
151                 /*                             << 
152                  * The kernel build system app << 
153                  * decompressed kernel at the  << 
154                  * in little-endian form.      << 
155                  */                            << 
156                 .macro  get_inflated_image_siz << 
157                 adr     \res, .Linflated_image << 
158                 ldr     \tmp1, [\res]          << 
159                 add     \tmp1, \tmp1, \res     << 
160                                                << 
161                 ldrb    \res, [\tmp1]          << 
162                 ldrb    \tmp2, [\tmp1, #1]     << 
163                 orr     \res, \res, \tmp2, lsl << 
164                 ldrb    \tmp2, [\tmp1, #2]     << 
165                 ldrb    \tmp1, [\tmp1, #3]     << 
166                 orr     \res, \res, \tmp2, lsl << 
167                 orr     \res, \res, \tmp1, lsl << 
168                 .endm                          << 
169                                                << 
170                 .macro  be32tocpu, val, tmp    << 
171 #ifndef __ARMEB__                              << 
172                 /* convert to little endian */ << 
173                 rev_l   \val, \tmp             << 
174 #endif                                         << 
175                 .endm                          << 
176                                                << 
177                 .section ".start", "ax"        << 
178 /*                                             << 
179  * sort out different calling conventions      << 
180  */                                            << 
181                 .align                         << 
182                 /*                             << 
183                  * Always enter in ARM state f << 
184                  * As of today (2014) that's e << 
185                  * classes.                    << 
186                  */                            << 
187  AR_CLASS(      .arm    )                      << 
188 start:                                         << 
189                 .type   start,#function        << 
190                 /*                             << 
191                  * These 7 nops along with the << 
192                  * !THUMB2 form 8 nops that ma << 
193                  * on legacy ARM systems that  << 
194                  * binary format. The boot loa << 
195                  * jump 32 bytes into the imag << 
196                  * with these 8 nops filling e << 
197                  * work as expected on these l << 
198                  * 7 of the nops as it turns o << 
199                  * were patching the initial i << 
200                  * had started to exploit this << 
201                  */                            << 
202                 __initial_nops                 << 
203                 .rept   5                      << 
204                 __nop                          << 
205                 .endr                          << 
206 #ifndef CONFIG_THUMB2_KERNEL                   << 
207                 __nop                          << 
208 #else                                          << 
209  AR_CLASS(      sub     pc, pc, #3      )      << 
210   M_CLASS(      nop.w                   )      << 
211                 .thumb                         << 
212 #endif                                         << 
213                 W(b)    1f                     << 
214                                                << 
215                 .word   _magic_sig      @ Magi << 
216                 .word   _magic_start    @ abso << 
217                 .word   _magic_end      @ zIma << 
218                 .word   0x04030201      @ endi << 
219                 .word   0x45454545      @ anot << 
220                 .word   _magic_table    @ addi << 
221                                                << 
222                 __EFI_HEADER                   << 
223 1:                                             << 
224  ARM_BE8(       setend  be              )      << 
225  AR_CLASS(      mrs     r9, cpsr        )      << 
226 #ifdef CONFIG_ARM_VIRT_EXT                     << 
227                 bl      __hyp_stub_install     << 
228 #endif                                         << 
229                 mov     r7, r1                 << 
230                 mov     r8, r2                 << 
231                                                << 
232 #ifndef CONFIG_CPU_V7M                         << 
233                 /*                             << 
234                  * Booting from Angel - need t << 
235                  * FIQs/IRQs (numeric definiti << 
236                  * We only do this if we were  << 
237                  */                            << 
238                 mrs     r2, cpsr               << 
239                 tst     r2, #3                 << 
240                 bne     not_angel              << 
241                 mov     r0, #0x17              << 
242  ARM(           swi     0x123456        )      << 
243  THUMB(         svc     0xab            )      << 
244 not_angel:                                     << 
245                 safe_svcmode_maskall r0        << 
246                 msr     spsr_cxsf, r9          << 
247                                                << 
248 #endif                                         << 
249                 /*                             << 
250                  * Note that some cache flushi << 
251                  * be needed here - is there a << 
252                  */                            << 
253                                                << 
254                 /*                             << 
255                  * some architecture specific  << 
256                  * by the linker here, but it  << 
257                  */                            << 
258                                                << 
259                 .text                          << 
260                                                << 
261 #ifdef CONFIG_AUTO_ZRELADDR                    << 
262                 /*                             << 
263                  * Find the start of physical  << 
264                  * without the MMU on, we are  << 
265                  * We just need to get rid of  << 
266                  * address.                    << 
267                  *                             << 
268                  * This alignment is a balance << 
269                  * different platforms - we ha << 
270                  * platforms which align the s << 
271                  * to 128MB to use this featur << 
272                  * to be placed within the fir << 
273                  * platforms.  Increasing the  << 
274                  * stricter alignment requirem << 
275                  * memory, but relaxing it mea << 
276                  * are already placing their z << 
277                  * of this range.              << 
278                  */                            << 
279                 mov     r0, pc                 << 
280                 and     r0, r0, #0xf8000000    << 
281 #ifdef CONFIG_USE_OF                           << 
282                 adr     r1, LC1                << 
283 #ifdef CONFIG_ARM_APPENDED_DTB                 << 
284                 /*                             << 
285                  * Look for an appended DTB.   << 
286                  * validate the calculated sta << 
287                  * memory nodes may need to be << 
288                  * an offset from the same sta << 
289                  */                            << 
290                 ldr     r2, [r1, #4]    @ get  << 
291                 add     r2, r2, r1      @ relo << 
292                 ldr     r2, [r2]        @ get  << 
293                 ldr     r3, =OF_DT_MAGIC       << 
294                 cmp     r2, r3          @ do w << 
295                 beq     1f              @ if y << 
296 #endif /* CONFIG_ARM_APPENDED_DTB */           << 
297                                                << 
298                 /*                             << 
299                  * Make sure we have some stac << 
300                  * No GOT fixup has occurred y << 
301                  * about to call uses any glob << 
302                  */                            << 
303                 ldr     sp, [r1]        @ get  << 
304                 add     sp, sp, r1      @ appl << 
305                                                << 
306                 /* Validate calculated start a << 
307                 mov     r1, r8                 << 
308                 bl      fdt_check_mem_start    << 
309 1:                                             << 
310 #endif /* CONFIG_USE_OF */                     << 
311                 /* Determine final kernel imag << 
312                 add     r4, r0, #TEXT_OFFSET   << 
313 #else                                          << 
314                 ldr     r4, =zreladdr          << 
315 #endif                                         << 
316                                                << 
317                 /*                             << 
318                  * Set up a page table only if << 
319                  * That means r4 < pc || r4 -  << 
320                  * Given that r4 > &_end is mo << 
321                  * additional 1MB of room for  << 
322                  */                            << 
323                 mov     r0, pc                 << 
324                 cmp     r0, r4                 << 
325                 ldrcc   r0, .Lheadroom         << 
326                 addcc   r0, r0, pc             << 
327                 cmpcc   r4, r0                 << 
328                 orrcc   r4, r4, #1             << 
329                 blcs    cache_on               << 
330                                                << 
331 restart:        adr     r0, LC1                << 
332                 ldr     sp, [r0]               << 
333                 ldr     r6, [r0, #4]           << 
334                 add     sp, sp, r0             << 
335                 add     r6, r6, r0             << 
336                                                << 
337                 get_inflated_image_size r9, r1 << 
338                                                << 
339 #ifndef CONFIG_ZBOOT_ROM                       << 
340                 /* malloc space is above the r << 
341                 add     r10, sp, #MALLOC_SIZE  << 
342 #else                                          << 
343                 /*                             << 
344                  * With ZBOOT_ROM the bss/stac << 
345                  * but someone could still run << 
346                  * in which case our reference << 
347                  */                            << 
348                 mov     r10, r6                << 
349 #endif                                         << 
350                                                << 
351                 mov     r5, #0                 << 
352 #ifdef CONFIG_ARM_APPENDED_DTB                 << 
353 /*                                             << 
354  *   r4  = final kernel address (possibly with << 
355  *   r5  = appended dtb size (still unknown)   << 
356  *   r6  = _edata                              << 
357  *   r7  = architecture ID                     << 
358  *   r8  = atags/device tree pointer           << 
359  *   r9  = size of decompressed image          << 
360  *   r10 = end of this image, including  bss/s << 
361  *   sp  = stack pointer                       << 
362  *                                             << 
363  * if there are device trees (dtb) appended to << 
364  * dtb data will get relocated along with the  << 
365  */                                            << 
366                                                << 
367                 ldr     lr, [r6, #0]           << 
368                 ldr     r1, =OF_DT_MAGIC       << 
369                 cmp     lr, r1                 << 
370                 bne     dtb_check_done         << 
371                                                << 
372 #ifdef CONFIG_ARM_ATAG_DTB_COMPAT              << 
373                 /*                             << 
374                  * OK... Let's do some funky b << 
375                  * If we do have a DTB appende << 
376                  * an ATAG list around, we wan << 
377                  * and folded into the former  << 
378                  * yet, but none of the code w << 
379                  * global variable.            << 
380                 */                             << 
381                                                << 
382                 /* Get the initial DTB size */ << 
383                 ldr     r5, [r6, #4]           << 
384                 be32tocpu r5, r1               << 
385                 dbgadtb r6, r5                 << 
386                 /* 50% DTB growth should be go << 
387                 add     r5, r5, r5, lsr #1     << 
388                 /* preserve 64-bit alignment * << 
389                 add     r5, r5, #7             << 
390                 bic     r5, r5, #7             << 
391                 /* clamp to 32KB min and 1MB m << 
392                 cmp     r5, #(1 << 15)         << 
393                 movlo   r5, #(1 << 15)         << 
394                 cmp     r5, #(1 << 20)         << 
395                 movhi   r5, #(1 << 20)         << 
396                 /* temporarily relocate the st << 
397                 add     sp, sp, r5             << 
398                                                << 
399                 mov     r0, r8                 << 
400                 mov     r1, r6                 << 
401                 mov     r2, r5                 << 
402                 bl      atags_to_fdt           << 
403                                                << 
404                 /*                             << 
405                  * If returned value is 1, the << 
406                  * pointed by r8.  Try the typ << 
407                  * of RAM and hope for the bes << 
408                  */                            << 
409                 cmp     r0, #1                 << 
410                 sub     r0, r4, #TEXT_OFFSET   << 
411                 bic     r0, r0, #1             << 
412                 add     r0, r0, #0x100         << 
413                 mov     r1, r6                 << 
414                 mov     r2, r5                 << 
415                 bleq    atags_to_fdt           << 
416                                                << 
417                 sub     sp, sp, r5             << 
418 #endif                                         << 
419                                                << 
420                 mov     r8, r6                 << 
421                                                << 
422                 /*                             << 
423                  * Make sure that the DTB does << 
424                  * kernel's .bss area. To do s << 
425                  * kernel size to compensate i << 
426                  * than the relocated code.    << 
427                  */                            << 
428                 ldr     r5, =_kernel_bss_size  << 
429                 adr     r1, wont_overwrite     << 
430                 sub     r1, r6, r1             << 
431                 subs    r1, r5, r1             << 
432                 addhi   r9, r9, r1             << 
433                                                << 
434                 /* Get the current DTB size */ << 
435                 ldr     r5, [r6, #4]           << 
436                 be32tocpu r5, r1               << 
437                                                << 
438                 /* preserve 64-bit alignment * << 
439                 add     r5, r5, #7             << 
440                 bic     r5, r5, #7             << 
441                                                << 
442                 /* relocate some pointers past << 
443                 add     r6, r6, r5             << 
444                 add     r10, r10, r5           << 
445                 add     sp, sp, r5             << 
446 dtb_check_done:                                << 
447 #endif                                         << 
448                                                << 
449 /*                                             << 
450  * Check to see if we will overwrite ourselves << 
451  *   r4  = final kernel address (possibly with << 
452  *   r9  = size of decompressed image          << 
453  *   r10 = end of this image, including  bss/s << 
454  * We basically want:                          << 
455  *   r4 - 16k page directory >= r10 -> OK      << 
456  *   r4 + image length <= address of wont_over << 
457  * Note: the possible LSB in r4 is harmless he << 
458  */                                            << 
459                 add     r10, r10, #16384       << 
460                 cmp     r4, r10                << 
461                 bhs     wont_overwrite         << 
462                 add     r10, r4, r9            << 
463                 adr     r9, wont_overwrite     << 
464                 cmp     r10, r9                << 
465                 bls     wont_overwrite         << 
466                                                << 
467 /*                                             << 
468  * Relocate ourselves past the end of the deco << 
469  *   r6  = _edata                              << 
470  *   r10 = end of the decompressed kernel      << 
471  * Because we always copy ahead, we need to do << 
472  * backward in case the source and destination << 
473  */                                            << 
474                 /*                             << 
475                  * Bump to the next 256-byte b << 
476                  * the relocation code added.  << 
477                  * ourself when the offset is  << 
478                  */                            << 
479                 add     r10, r10, #((reloc_cod << 
480                 bic     r10, r10, #255         << 
481                                                << 
482                 /* Get start of code we want t << 
483                 adr     r5, restart            << 
484                 bic     r5, r5, #31            << 
485                                                << 
486 /* Relocate the hyp vector base if necessary * << 
487 #ifdef CONFIG_ARM_VIRT_EXT                     << 
488                 mrs     r0, spsr               << 
489                 and     r0, r0, #MODE_MASK     << 
490                 cmp     r0, #HYP_MODE          << 
491                 bne     1f                     << 
492                                                << 
493                 /*                             << 
494                  * Compute the address of the  << 
495                  * Call __hyp_set_vectors with << 
496                  * can HVC again after the cop << 
497                  */                            << 
498                 adr_l   r0, __hyp_stub_vectors << 
499                 sub     r0, r0, r5             << 
500                 add     r0, r0, r10            << 
501                 bl      __hyp_set_vectors      << 
502 1:                                             << 
503 #endif                                         << 
504                                                << 
505                 sub     r9, r6, r5             << 
506                 add     r9, r9, #31            << 
507                 bic     r9, r9, #31            << 
508                 add     r6, r9, r5             << 
509                 add     r9, r9, r10            << 
510                                                << 
511 #ifdef DEBUG                                   << 
512                 sub     r10, r6, r5            << 
513                 sub     r10, r9, r10           << 
514                 /*                             << 
515                  * We are about to copy the ke << 
516                  * The boundaries of the new m << 
517                  * r10 and r9, whilst r5 and r << 
518                  * of the memory we are going  << 
519                  * Calling dbgkc will help wit << 
520                  * information.                << 
521                  */                            << 
522                 dbgkc   r5, r6, r10, r9        << 
523 #endif                                         << 
524                                                << 
525 1:              ldmdb   r6!, {r0 - r3, r10 - r << 
526                 cmp     r6, r5                 << 
527                 stmdb   r9!, {r0 - r3, r10 - r << 
528                 bhi     1b                     << 
529                                                << 
530                 /* Preserve offset to relocate << 
531                 sub     r6, r9, r6             << 
532                                                << 
533                 mov     r0, r9                 << 
534                 add     r1, sp, r6             << 
535                 bl      cache_clean_flush      << 
536                                                << 
537                 badr    r0, restart            << 
538                 add     r0, r0, r6             << 
539                 mov     pc, r0                 << 
540                                                << 
541 wont_overwrite:                                << 
542                 adr     r0, LC0                << 
543                 ldmia   r0, {r1, r2, r3, r11,  << 
544                 sub     r0, r0, r1             << 
545                                                << 
546 /*                                             << 
547  * If delta is zero, we are running at the add << 
548  *   r0  = delta                               << 
549  *   r2  = BSS start                           << 
550  *   r3  = BSS end                             << 
551  *   r4  = kernel execution address (possibly  << 
552  *   r5  = appended dtb size (0 if not present << 
553  *   r7  = architecture ID                     << 
554  *   r8  = atags pointer                       << 
555  *   r11 = GOT start                           << 
556  *   r12 = GOT end                             << 
557  *   sp  = stack pointer                       << 
558  */                                            << 
559                 orrs    r1, r0, r5             << 
560                 beq     not_relocated          << 
561                                                << 
562                 add     r11, r11, r0           << 
563                 add     r12, r12, r0           << 
564                                                << 
565 #ifndef CONFIG_ZBOOT_ROM                       << 
566                 /*                             << 
567                  * If we're running fully PIC  << 
568                  * we need to fix up pointers  << 
569                  * Note that the stack pointer << 
570                  */                            << 
571                 add     r2, r2, r0             << 
572                 add     r3, r3, r0             << 
573                                                << 
574                 /*                             << 
575                  * Relocate all entries in the << 
576                  * Bump bss entries to _edata  << 
577                  */                            << 
578 1:              ldr     r1, [r11, #0]          << 
579                 add     r1, r1, r0             << 
580                 cmp     r1, r2                 << 
581                 cmphs   r3, r1                 << 
582                 addhi   r1, r1, r5             << 
583                 str     r1, [r11], #4          << 
584                 cmp     r11, r12               << 
585                 blo     1b                     << 
586                                                << 
587                 /* bump our bss pointers too * << 
588                 add     r2, r2, r5             << 
589                 add     r3, r3, r5             << 
590                                                << 
591 #else                                          << 
592                                                << 
593                 /*                             << 
594                  * Relocate entries in the GOT << 
595                  * the entries that are outsid << 
596                  */                            << 
597 1:              ldr     r1, [r11, #0]          << 
598                 cmp     r1, r2                 << 
599                 cmphs   r3, r1                 << 
600                 addlo   r1, r1, r0             << 
601                 str     r1, [r11], #4          << 
602                 cmp     r11, r12               << 
603                 blo     1b                     << 
604 #endif                                         << 
605                                                << 
606 not_relocated:  mov     r0, #0                 << 
607 1:              str     r0, [r2], #4           << 
608                 str     r0, [r2], #4           << 
609                 str     r0, [r2], #4           << 
610                 str     r0, [r2], #4           << 
611                 cmp     r2, r3                 << 
612                 blo     1b                     << 
613                                                << 
614                 /*                             << 
615                  * Did we skip the cache setup << 
616                  * That is indicated by the LS << 
617                  * Do it now if so.            << 
618                  */                            << 
619                 tst     r4, #1                 << 
620                 bic     r4, r4, #1             << 
621                 blne    cache_on               << 
622                                                << 
623 /*                                             << 
624  * The C runtime environment should now be set << 
625  * Set up some pointers, and start decompressi << 
626  *   r4  = kernel execution address            << 
627  *   r7  = architecture ID                     << 
628  *   r8  = atags pointer                       << 
629  */                                            << 
630                 mov     r0, r4                 << 
631                 mov     r1, sp                 << 
632                 add     r2, sp, #MALLOC_SIZE   << 
633                 mov     r3, r7                 << 
634                 bl      decompress_kernel      << 
635                                                << 
636                 get_inflated_image_size r1, r2 << 
637                                                << 
638                 mov     r0, r4                 << 
639                 add     r1, r1, r0             << 
640                 bl      cache_clean_flush      << 
641                 bl      cache_off              << 
642                                                << 
643 #ifdef CONFIG_ARM_VIRT_EXT                     << 
644                 mrs     r0, spsr               << 
645                 and     r0, r0, #MODE_MASK     << 
646                 cmp     r0, #HYP_MODE          << 
647                 bne     __enter_kernel         << 
648                                                << 
649                 adr_l   r0, __hyp_reentry_vect << 
650                 bl      __hyp_set_vectors      << 
651                 __HVC(0)                       << 
652                                                << 
653                 b       .                      << 
654 #else                                          << 
655                 b       __enter_kernel         << 
656 #endif                                         << 
657                                                << 
658                 .align  2                      << 
659                 .type   LC0, #object           << 
660 LC0:            .word   LC0                    << 
661                 .word   __bss_start            << 
662                 .word   _end                   << 
663                 .word   _got_start             << 
664                 .word   _got_end               << 
665                 .size   LC0, . - LC0           << 
666                                                << 
667                 .type   LC1, #object           << 
668 LC1:            .word   .L_user_stack_end - LC << 
669                 .word   _edata - LC1           << 
670                 .size   LC1, . - LC1           << 
671                                                << 
672 .Lheadroom:                                    << 
673                 .word   _end - restart + 16384 << 
674                                                << 
675 .Linflated_image_size_offset:                  << 
676                 .long   (input_data_end - 4) - << 
677                                                << 
678 #ifdef CONFIG_ARCH_RPC                         << 
679                 .globl  params                 << 
680 params:         ldr     r0, =0x10000100        << 
681                 mov     pc, lr                 << 
682                 .ltorg                         << 
683                 .align                         << 
684 #endif                                         << 
685                                                << 
686 /*                                             << 
687  * dcache_line_size - get the minimum D-cache  << 
688  * on ARMv7.                                   << 
689  */                                            << 
690                 .macro  dcache_line_size, reg, << 
691 #ifdef CONFIG_CPU_V7M                          << 
692                 movw    \tmp, #:lower16:BASEAD << 
693                 movt    \tmp, #:upper16:BASEAD << 
694                 ldr     \tmp, [\tmp]           << 
695 #else                                          << 
696                 mrc     p15, 0, \tmp, c0, c0,  << 
697 #endif                                         << 
698                 lsr     \tmp, \tmp, #16        << 
699                 and     \tmp, \tmp, #0xf       << 
700                 mov     \reg, #4               << 
701                 mov     \reg, \reg, lsl \tmp   << 
702                 .endm                          << 
703                                                << 
704 /*                                             << 
705  * Turn on the cache.  We need to setup some p << 
706  * can have both the I and D caches on.        << 
707  *                                             << 
708  * We place the page tables 16k down from the  << 
709  * and we hope that nothing else is using it.  << 
710  * will go pop!                                << 
711  *                                             << 
712  * On entry,                                   << 
713  *  r4 = kernel execution address              << 
714  *  r7 = architecture number                   << 
715  *  r8 = atags pointer                         << 
716  * On exit,                                    << 
717  *  r0, r1, r2, r3, r9, r10, r12 corrupted     << 
718  * This routine must preserve:                 << 
719  *  r4, r7, r8                                 << 
720  */                                            << 
721                 .align  5                      << 
722 cache_on:       mov     r3, #8                 << 
723                 b       call_cache_fn          << 
724                                                << 
725 /*                                             << 
726  * Initialize the highest priority protection  << 
727  * to cover all 32bit address and cacheable an << 
728  */                                            << 
729 __armv4_mpu_cache_on:                          << 
730                 mov     r0, #0x3f              << 
731                 mcr     p15, 0, r0, c6, c7, 0  << 
732                 mcr     p15, 0, r0, c6, c7, 1  << 
733                                                << 
734                 mov     r0, #0x80              << 
735                 mcr     p15, 0, r0, c2, c0, 0  << 
736                 mcr     p15, 0, r0, c2, c0, 1  << 
737                 mcr     p15, 0, r0, c3, c0, 0  << 
738                                                << 
739                 mov     r0, #0xc000            << 
740                 mcr     p15, 0, r0, c5, c0, 1  << 
741                 mcr     p15, 0, r0, c5, c0, 0  << 
742                                                << 
743                 mov     r0, #0                 << 
744                 mcr     p15, 0, r0, c7, c10, 4 << 
745                 mcr     p15, 0, r0, c7, c5, 0  << 
746                 mcr     p15, 0, r0, c7, c6, 0  << 
747                 mrc     p15, 0, r0, c1, c0, 0  << 
748                                                << 
749                 orr     r0, r0, #0x002d        << 
750                 orr     r0, r0, #0x1000        << 
751                                                << 
752                 mcr     p15, 0, r0, c1, c0, 0  << 
753                                                << 
754                 mov     r0, #0                 << 
755                 mcr     p15, 0, r0, c7, c5, 0  << 
756                 mcr     p15, 0, r0, c7, c6, 0  << 
757                 mov     pc, lr                 << 
758                                                << 
759 __armv3_mpu_cache_on:                          << 
760                 mov     r0, #0x3f              << 
761                 mcr     p15, 0, r0, c6, c7, 0  << 
762                                                << 
763                 mov     r0, #0x80              << 
764                 mcr     p15, 0, r0, c2, c0, 0  << 
765                 mcr     p15, 0, r0, c3, c0, 0  << 
766                                                << 
767                 mov     r0, #0xc000            << 
768                 mcr     p15, 0, r0, c5, c0, 0  << 
769                                                << 
770                 mov     r0, #0                 << 
771                 mcr     p15, 0, r0, c7, c0, 0  << 
772                 /*                             << 
773                  * ?? ARMv3 MMU does not allow << 
774                  * does this really work on AR << 
775                  */                            << 
776                 mrc     p15, 0, r0, c1, c0, 0  << 
777                                                << 
778                 orr     r0, r0, #0x000d        << 
779                 /* ?? this overwrites the valu << 
780                 mov     r0, #0                 << 
781                 mcr     p15, 0, r0, c1, c0, 0  << 
782                                                << 
783                 /* ?? invalidate for the secon << 
784                 mcr     p15, 0, r0, c7, c0, 0  << 
785                 mov     pc, lr                 << 
786                                                << 
787 #ifdef CONFIG_CPU_DCACHE_WRITETHROUGH          << 
788 #define CB_BITS 0x08                           << 
789 #else                                          << 
790 #define CB_BITS 0x0c                           << 
791 #endif                                         << 
792                                                << 
793 __setup_mmu:    sub     r3, r4, #16384         << 
794                 bic     r3, r3, #0xff          << 
795                 bic     r3, r3, #0x3f00        << 
796 /*                                             << 
797  * Initialise the page tables, turning on the  << 
798  * bits for the RAM area only.                 << 
799  */                                            << 
800                 mov     r0, r3                 << 
801                 mov     r9, r0, lsr #18        << 
802                 mov     r9, r9, lsl #18        << 
803                 add     r10, r9, #0x10000000   << 
804                 mov     r1, #0x12              << 
805                 orr     r1, r1, #3 << 10       << 
806                 add     r2, r3, #16384         << 
807 1:              cmp     r1, r9                 << 
808                 cmphs   r10, r1                << 
809                 bic     r1, r1, #0x1c          << 
810                 orrlo   r1, r1, #0x10          << 
811                 orrhs   r1, r1, r6             << 
812                 str     r1, [r0], #4           << 
813                 add     r1, r1, #1048576       << 
814                 teq     r0, r2                 << 
815                 bne     1b                     << 
816 /*                                             << 
817  * If ever we are running from Flash, then we  << 
818  * to be enabled also for our execution instan << 
819  * so there is no map overlap problem for up t << 
820  * If the execution is in RAM then we would on << 
821  */                                            << 
822                 orr     r1, r6, #0x04          << 
823                 orr     r1, r1, #3 << 10       << 
824                 mov     r2, pc                 << 
825                 mov     r2, r2, lsr #20        << 
826                 orr     r1, r1, r2, lsl #20    << 
827                 add     r0, r3, r2, lsl #2     << 
828                 str     r1, [r0], #4           << 
829                 add     r1, r1, #1048576       << 
830                 str     r1, [r0]               << 
831                 mov     pc, lr                 << 
832 ENDPROC(__setup_mmu)                           << 
833                                                << 
834 @ Enable unaligned access on v6, to allow bett << 
835 @ for the decompressor C code:                 << 
836 __armv6_mmu_cache_on:                          << 
837                 mrc     p15, 0, r0, c1, c0, 0  << 
838                 bic     r0, r0, #2             << 
839                 orr     r0, r0, #1 << 22       << 
840                 mcr     p15, 0, r0, c1, c0, 0  << 
841                 b       __armv4_mmu_cache_on   << 
842                                                << 
843 __arm926ejs_mmu_cache_on:                      << 
844 #ifdef CONFIG_CPU_DCACHE_WRITETHROUGH          << 
845                 mov     r0, #4                 << 
846                 mcr     p15, 7, r0, c15, c0, 0 << 
847 #endif                                         << 
848                                                << 
849 __armv4_mmu_cache_on:                          << 
850                 mov     r12, lr                << 
851 #ifdef CONFIG_MMU                              << 
852                 mov     r6, #CB_BITS | 0x12    << 
853                 bl      __setup_mmu            << 
854                 mov     r0, #0                 << 
855                 mcr     p15, 0, r0, c7, c10, 4 << 
856                 mcr     p15, 0, r0, c8, c7, 0  << 
857                 mrc     p15, 0, r0, c1, c0, 0  << 
858                 orr     r0, r0, #0x5000        << 
859                 orr     r0, r0, #0x0030        << 
860  ARM_BE8(       orr     r0, r0, #1 << 25 )     << 
861                 bl      __common_mmu_cache_on  << 
862                 mov     r0, #0                 << 
863                 mcr     p15, 0, r0, c8, c7, 0  << 
864 #endif                                         << 
865                 mov     pc, r12                << 
866                                                << 
867 __armv7_mmu_cache_on:                          << 
868                 enable_cp15_barriers    r11    << 
869                 mov     r12, lr                << 
870 #ifdef CONFIG_MMU                              << 
871                 mrc     p15, 0, r11, c0, c1, 4 << 
872                 tst     r11, #0xf              << 
873                 movne   r6, #CB_BITS | 0x02    << 
874                 blne    __setup_mmu            << 
875                 mov     r0, #0                 << 
876                 mcr     p15, 0, r0, c7, c10, 4 << 
877                 tst     r11, #0xf              << 
878                 mcrne   p15, 0, r0, c8, c7, 0  << 
879 #endif                                         << 
880                 mrc     p15, 0, r0, c1, c0, 0  << 
881                 bic     r0, r0, #1 << 28       << 
882                 orr     r0, r0, #0x5000        << 
883                 orr     r0, r0, #0x003c        << 
884                 bic     r0, r0, #2             << 
885                 orr     r0, r0, #1 << 22       << 
886                                                << 
887 #ifdef CONFIG_MMU                              << 
888  ARM_BE8(       orr     r0, r0, #1 << 25 )     << 
889                 mrcne   p15, 0, r6, c2, c0, 2  << 
890                 orrne   r0, r0, #1             << 
891                 movne   r1, #0xfffffffd        << 
892                 bic     r6, r6, #1 << 31       << 
893                 bic     r6, r6, #(7 << 0) | (1 << 
894                 mcrne   p15, 0, r3, c2, c0, 0  << 
895                 mcrne   p15, 0, r1, c3, c0, 0  << 
896                 mcrne   p15, 0, r6, c2, c0, 2  << 
897 #endif                                         << 
898                 mcr     p15, 0, r0, c7, c5, 4  << 
899                 mcr     p15, 0, r0, c1, c0, 0  << 
900                 mrc     p15, 0, r0, c1, c0, 0  << 
901                 mov     r0, #0                 << 
902                 mcr     p15, 0, r0, c7, c5, 4  << 
903                 mov     pc, r12                << 
904                                                << 
905 __fa526_cache_on:                              << 
906                 mov     r12, lr                << 
907                 mov     r6, #CB_BITS | 0x12    << 
908                 bl      __setup_mmu            << 
909                 mov     r0, #0                 << 
910                 mcr     p15, 0, r0, c7, c7, 0  << 
911                 mcr     p15, 0, r0, c7, c10, 4 << 
912                 mcr     p15, 0, r0, c8, c7, 0  << 
913                 mrc     p15, 0, r0, c1, c0, 0  << 
914                 orr     r0, r0, #0x1000        << 
915                 bl      __common_mmu_cache_on  << 
916                 mov     r0, #0                 << 
917                 mcr     p15, 0, r0, c8, c7, 0  << 
918                 mov     pc, r12                << 
919                                                << 
920 __common_mmu_cache_on:                         << 
921 #ifndef CONFIG_THUMB2_KERNEL                   << 
922 #ifndef DEBUG                                  << 
923                 orr     r0, r0, #0x000d        << 
924 #endif                                         << 
925                 mov     r1, #-1                << 
926                 mcr     p15, 0, r3, c2, c0, 0  << 
927                 mcr     p15, 0, r1, c3, c0, 0  << 
928                 b       1f                     << 
929                 .align  5                      << 
930 1:              mcr     p15, 0, r0, c1, c0, 0  << 
931                 mrc     p15, 0, r0, c1, c0, 0  << 
932                 sub     pc, lr, r0, lsr #32    << 
933 #endif                                         << 
934                                                << 
935 #define PROC_ENTRY_SIZE (4*5)                  << 
936                                                << 
937 /*                                             << 
938  * Here follow the relocatable cache support f << 
939  * various processors.  This is a generic hook << 
940  * entry and jumping to an instruction at the  << 
941  * from the start of the block.  Please note t << 
942  * independent code.                           << 
943  *                                             << 
944  *  r1  = corrupted                            << 
945  *  r2  = corrupted                            << 
946  *  r3  = block offset                         << 
947  *  r9  = corrupted                            << 
948  *  r12 = corrupted                            << 
949  */                                            << 
950                                                << 
951 call_cache_fn:  adr     r12, proc_types        << 
952 #ifdef CONFIG_CPU_CP15                         << 
953                 mrc     p15, 0, r9, c0, c0     << 
954 #elif defined(CONFIG_CPU_V7M)                  << 
955                 /*                             << 
956                  * On v7-M the processor id is << 
957                  * register, but as cache hand << 
958                  * v7-M (if existant at all) w << 
959                  * If V7M_SCB_CPUID were used  << 
960                  * __armv7_mmu_cache_{on,off,f << 
961                  * use cp15 registers that are << 
962                  */                            << 
963                 bx      lr                     << 
964 #else                                          << 
965                 ldr     r9, =CONFIG_PROCESSOR_ << 
966 #endif                                         << 
967 1:              ldr     r1, [r12, #0]          << 
968                 ldr     r2, [r12, #4]          << 
969                 eor     r1, r1, r9             << 
970                 tst     r1, r2                 << 
971  ARM(           addeq   pc, r12, r3            << 
972  THUMB(         addeq   r12, r3                << 
973  THUMB(         moveq   pc, r12                << 
974                 add     r12, r12, #PROC_ENTRY_ << 
975                 b       1b                     << 
976                                                << 
977 /*                                             << 
978  * Table for cache operations.  This is basica << 
979  *   - CPU ID match                            << 
980  *   - CPU ID mask                             << 
981  *   - 'cache on' method instruction           << 
982  *   - 'cache off' method instruction          << 
983  *   - 'cache flush' method instruction        << 
984  *                                             << 
985  * We match an entry using: ((real_id ^ match) << 
986  *                                             << 
987  * Writethrough caches generally only need 'on << 
988  * methods.  Writeback caches _must_ have the  << 
989  * defined.                                    << 
990  */                                            << 
991                 .align  2                      << 
992                 .type   proc_types,#object     << 
993 proc_types:                                    << 
994                 .word   0x41000000             << 
995                 .word   0xff00f000             << 
996                 mov     pc, lr                 << 
997  THUMB(         nop                            << 
998                 mov     pc, lr                 << 
999  THUMB(         nop                            << 
1000                 mov     pc, lr                << 
1001  THUMB(         nop                           << 
1002                                               << 
1003                 .word   0x41007000            << 
1004                 .word   0xfff8fe00            << 
1005                 mov     pc, lr                << 
1006  THUMB(         nop                           << 
1007                 mov     pc, lr                << 
1008  THUMB(         nop                           << 
1009                 mov     pc, lr                << 
1010  THUMB(         nop                           << 
1011                                               << 
1012                 .word   0x41807200            << 
1013                 .word   0xffffff00            << 
1014                 W(b)    __armv4_mmu_cache_on  << 
1015                 W(b)    __armv4_mmu_cache_off << 
1016                 mov     pc, lr                << 
1017  THUMB(         nop                           << 
1018                                               << 
1019                 .word   0x41007400            << 
1020                 .word   0xff00ff00            << 
1021                 W(b)    __armv3_mpu_cache_on  << 
1022                 W(b)    __armv3_mpu_cache_off << 
1023                 W(b)    __armv3_mpu_cache_flu << 
1024                                               << 
1025                 .word   0x41009400            << 
1026                 .word   0xff00ff00            << 
1027                 W(b)    __armv4_mpu_cache_on  << 
1028                 W(b)    __armv4_mpu_cache_off << 
1029                 W(b)    __armv4_mpu_cache_flu << 
1030                                               << 
1031                 .word   0x41069260            << 
1032                 .word   0xff0ffff0            << 
1033                 W(b)    __arm926ejs_mmu_cache << 
1034                 W(b)    __armv4_mmu_cache_off << 
1035                 W(b)    __armv5tej_mmu_cache_ << 
1036                                               << 
1037                 .word   0x00007000            << 
1038                 .word   0x0000f000            << 
1039                 mov     pc, lr                << 
1040  THUMB(         nop                           << 
1041                 mov     pc, lr                << 
1042  THUMB(         nop                           << 
1043                 mov     pc, lr                << 
1044  THUMB(         nop                           << 
1045                                               << 
1046                 @ Everything from here on wil << 
1047                                               << 
1048                 .word   0x4401a100            << 
1049                 .word   0xffffffe0            << 
1050                 W(b)    __armv4_mmu_cache_on  << 
1051                 W(b)    __armv4_mmu_cache_off << 
1052                 W(b)    __armv4_mmu_cache_flu << 
1053                                               << 
1054                 .word   0x6901b110            << 
1055                 .word   0xfffffff0            << 
1056                 W(b)    __armv4_mmu_cache_on  << 
1057                 W(b)    __armv4_mmu_cache_off << 
1058                 W(b)    __armv4_mmu_cache_flu << 
1059                                               << 
1060                 .word   0x56056900            << 
1061                 .word   0xffffff00            << 
1062                 W(b)    __armv4_mmu_cache_on  << 
1063                 W(b)    __armv4_mmu_cache_off << 
1064                 W(b)    __armv4_mmu_cache_flu << 
1065                                               << 
1066                 .word   0x56158000            << 
1067                 .word   0xfffff000            << 
1068                 W(b)    __armv4_mmu_cache_on  << 
1069                 W(b)    __armv4_mmu_cache_off << 
1070                 W(b)    __armv5tej_mmu_cache_ << 
1071                                               << 
1072                 .word   0x56050000            << 
1073                 .word   0xff0f0000            << 
1074                 W(b)    __armv4_mmu_cache_on  << 
1075                 W(b)    __armv4_mmu_cache_off << 
1076                 W(b)    __armv5tej_mmu_cache_ << 
1077                                               << 
1078 #ifdef CONFIG_CPU_FEROCEON_OLD_ID             << 
1079                 /* this conflicts with the st << 
1080                 .long   0x41009260            << 
1081                 .long   0xff00fff0            << 
1082                 b       __armv4_mmu_cache_on  << 
1083                 b       __armv4_mmu_cache_off << 
1084                 b       __armv5tej_mmu_cache_ << 
1085 #endif                                        << 
1086                                               << 
1087                 .word   0x66015261            << 
1088                 .word   0xff01fff1            << 
1089                 W(b)    __fa526_cache_on      << 
1090                 W(b)    __armv4_mmu_cache_off << 
1091                 W(b)    __fa526_cache_flush   << 
1092                                               << 
1093                 @ These match on the architec << 
1094                                               << 
1095                 .word   0x00020000            << 
1096                 .word   0x000f0000            << 
1097                 W(b)    __armv4_mmu_cache_on  << 
1098                 W(b)    __armv4_mmu_cache_off << 
1099                 W(b)    __armv4_mmu_cache_flu << 
1100                                               << 
1101                 .word   0x00050000            << 
1102                 .word   0x000f0000            << 
1103                 W(b)    __armv4_mmu_cache_on  << 
1104                 W(b)    __armv4_mmu_cache_off << 
1105                 W(b)    __armv4_mmu_cache_flu << 
1106                                               << 
1107                 .word   0x00060000            << 
1108                 .word   0x000f0000            << 
1109                 W(b)    __armv4_mmu_cache_on  << 
1110                 W(b)    __armv4_mmu_cache_off << 
1111                 W(b)    __armv5tej_mmu_cache_ << 
1112                                               << 
1113                 .word   0x0007b000            << 
1114                 .word   0x000ff000            << 
1115                 W(b)    __armv6_mmu_cache_on  << 
1116                 W(b)    __armv4_mmu_cache_off << 
1117                 W(b)    __armv6_mmu_cache_flu << 
1118                                               << 
1119                 .word   0x000f0000            << 
1120                 .word   0x000f0000            << 
1121                 W(b)    __armv7_mmu_cache_on  << 
1122                 W(b)    __armv7_mmu_cache_off << 
1123                 W(b)    __armv7_mmu_cache_flu << 
1124                                               << 
1125                 .word   0                     << 
1126                 .word   0                     << 
1127                 mov     pc, lr                << 
1128  THUMB(         nop                           << 
1129                 mov     pc, lr                << 
1130  THUMB(         nop                           << 
1131                 mov     pc, lr                << 
1132  THUMB(         nop                           << 
1133                                               << 
1134                 .size   proc_types, . - proc_ << 
1135                                               << 
1136                 /*                            << 
1137                  * If you get a "non-constant << 
1138                  * error from the assembler o << 
1139                  * not accidentally written a << 
1140                  * have written W(b).         << 
1141                  */                           << 
1142                 .if (. - proc_types) % PROC_E << 
1143                 .error "The size of one or mo << 
1144                 .endif                        << 
1145                                               << 
1146 /*                                            << 
1147  * Turn off the Cache and MMU.  ARMv3 does no << 
1148  * reading the control register, but ARMv4 do << 
1149  *                                            << 
1150  * On exit,                                   << 
1151  *  r0, r1, r2, r3, r9, r12 corrupted         << 
1152  * This routine must preserve:                << 
1153  *  r4, r7, r8                                << 
1154  */                                           << 
1155                 .align  5                     << 
1156 cache_off:      mov     r3, #12               << 
1157                 b       call_cache_fn         << 
1158                                               << 
1159 __armv4_mpu_cache_off:                        << 
1160                 mrc     p15, 0, r0, c1, c0    << 
1161                 bic     r0, r0, #0x000d       << 
1162                 mcr     p15, 0, r0, c1, c0    << 
1163                 mov     r0, #0                << 
1164                 mcr     p15, 0, r0, c7, c10,  << 
1165                 mcr     p15, 0, r0, c7, c6, 0 << 
1166                 mcr     p15, 0, r0, c7, c5, 0 << 
1167                 mov     pc, lr                << 
1168                                               << 
1169 __armv3_mpu_cache_off:                        << 
1170                 mrc     p15, 0, r0, c1, c0    << 
1171                 bic     r0, r0, #0x000d       << 
1172                 mcr     p15, 0, r0, c1, c0, 0 << 
1173                 mov     r0, #0                << 
1174                 mcr     p15, 0, r0, c7, c0, 0 << 
1175                 mov     pc, lr                << 
1176                                               << 
1177 __armv4_mmu_cache_off:                        << 
1178 #ifdef CONFIG_MMU                             << 
1179                 mrc     p15, 0, r0, c1, c0    << 
1180                 bic     r0, r0, #0x000d       << 
1181                 mcr     p15, 0, r0, c1, c0    << 
1182                 mov     r0, #0                << 
1183                 mcr     p15, 0, r0, c7, c7    << 
1184                 mcr     p15, 0, r0, c8, c7    << 
1185 #endif                                        << 
1186                 mov     pc, lr                << 
1187                                               << 
1188 __armv7_mmu_cache_off:                        << 
1189                 mrc     p15, 0, r0, c1, c0    << 
1190 #ifdef CONFIG_MMU                             << 
1191                 bic     r0, r0, #0x0005       << 
1192 #else                                         << 
1193                 bic     r0, r0, #0x0004       << 
1194 #endif                                        << 
1195                 mcr     p15, 0, r0, c1, c0    << 
1196                 mov     r0, #0                << 
1197 #ifdef CONFIG_MMU                             << 
1198                 mcr     p15, 0, r0, c8, c7, 0 << 
1199 #endif                                        << 
1200                 mcr     p15, 0, r0, c7, c5, 6 << 
1201                 mcr     p15, 0, r0, c7, c10,  << 
1202                 mcr     p15, 0, r0, c7, c5, 4 << 
1203                 mov     pc, lr                << 
1204                                               << 
1205 /*                                            << 
1206  * Clean and flush the cache to maintain cons << 
1207  *                                            << 
1208  * On entry,                                  << 
1209  *  r0 = start address                        << 
1210  *  r1 = end address (exclusive)              << 
1211  * On exit,                                   << 
1212  *  r1, r2, r3, r9, r10, r11, r12 corrupted   << 
1213  * This routine must preserve:                << 
1214  *  r4, r6, r7, r8                            << 
1215  */                                           << 
1216                 .align  5                     << 
1217 cache_clean_flush:                            << 
1218                 mov     r3, #16               << 
1219                 mov     r11, r1               << 
1220                 b       call_cache_fn         << 
1221                                               << 
1222 __armv4_mpu_cache_flush:                      << 
1223                 tst     r4, #1                << 
1224                 movne   pc, lr                << 
1225                 mov     r2, #1                << 
1226                 mov     r3, #0                << 
1227                 mcr     p15, 0, ip, c7, c6, 0 << 
1228                 mov     r1, #7 << 5           << 
1229 1:              orr     r3, r1, #63 << 26     << 
1230 2:              mcr     p15, 0, r3, c7, c14,  << 
1231                 subs    r3, r3, #1 << 26      << 
1232                 bcs     2b                    << 
1233                 subs    r1, r1, #1 << 5       << 
1234                 bcs     1b                    << 
1235                                               << 
1236                 teq     r2, #0                << 
1237                 mcrne   p15, 0, ip, c7, c5, 0 << 
1238                 mcr     p15, 0, ip, c7, c10,  << 
1239                 mov     pc, lr                << 
1240                                               << 
1241 __fa526_cache_flush:                          << 
1242                 tst     r4, #1                << 
1243                 movne   pc, lr                << 
1244                 mov     r1, #0                << 
1245                 mcr     p15, 0, r1, c7, c14,  << 
1246                 mcr     p15, 0, r1, c7, c5, 0 << 
1247                 mcr     p15, 0, r1, c7, c10,  << 
1248                 mov     pc, lr                << 
1249                                               << 
1250 __armv6_mmu_cache_flush:                      << 
1251                 mov     r1, #0                << 
1252                 tst     r4, #1                << 
1253                 mcreq   p15, 0, r1, c7, c14,  << 
1254                 mcr     p15, 0, r1, c7, c5, 0 << 
1255                 mcreq   p15, 0, r1, c7, c15,  << 
1256                 mcr     p15, 0, r1, c7, c10,  << 
1257                 mov     pc, lr                << 
1258                                               << 
1259 __armv7_mmu_cache_flush:                      << 
1260                 enable_cp15_barriers    r10   << 
1261                 tst     r4, #1                << 
1262                 bne     iflush                << 
1263                 mrc     p15, 0, r10, c0, c1,  << 
1264                 tst     r10, #0xf << 16       << 
1265                 mov     r10, #0               << 
1266                 beq     hierarchical          << 
1267                 mcr     p15, 0, r10, c7, c14, << 
1268                 b       iflush                << 
1269 hierarchical:                                 << 
1270                 dcache_line_size r1, r2       << 
1271                 sub     r2, r1, #1            << 
1272                 bic     r0, r0, r2            << 
1273                 sub     r11, r11, #1          << 
1274                 bic     r11, r11, r2          << 
1275 0:              cmp     r0, r11               << 
1276                 bgt     iflush                << 
1277                 mcr     p15, 0, r0, c7, c14,  << 
1278                 add     r0, r0, r1            << 
1279                 b       0b                    << 
1280 iflush:                                       << 
1281                 mcr     p15, 0, r10, c7, c10, << 
1282                 mcr     p15, 0, r10, c7, c5,  << 
1283                 mcr     p15, 0, r10, c7, c10, << 
1284                 mcr     p15, 0, r10, c7, c5,  << 
1285                 mov     pc, lr                << 
1286                                               << 
1287 __armv5tej_mmu_cache_flush:                   << 
1288                 tst     r4, #1                << 
1289                 movne   pc, lr                << 
1290 1:              mrc     p15, 0, APSR_nzcv, c7 << 
1291                 bne     1b                    << 
1292                 mcr     p15, 0, r0, c7, c5, 0 << 
1293                 mcr     p15, 0, r0, c7, c10,  << 
1294                 mov     pc, lr                << 
1295                                               << 
1296 __armv4_mmu_cache_flush:                      << 
1297                 tst     r4, #1                << 
1298                 movne   pc, lr                << 
1299                 mov     r2, #64*1024          << 
1300                 mov     r11, #32              << 
1301                 mrc     p15, 0, r3, c0, c0, 1 << 
1302                 teq     r3, r9                << 
1303                 beq     no_cache_id           << 
1304                 mov     r1, r3, lsr #18       << 
1305                 and     r1, r1, #7            << 
1306                 mov     r2, #1024             << 
1307                 mov     r2, r2, lsl r1        << 
1308                 tst     r3, #1 << 14          << 
1309                 addne   r2, r2, r2, lsr #1    << 
1310                 mov     r3, r3, lsr #12       << 
1311                 and     r3, r3, #3            << 
1312                 mov     r11, #8               << 
1313                 mov     r11, r11, lsl r3      << 
1314 no_cache_id:                                  << 
1315                 mov     r1, pc                << 
1316                 bic     r1, r1, #63           << 
1317                 add     r2, r1, r2            << 
1318 1:                                            << 
1319  ARM(           ldr     r3, [r1], r11         << 
1320  THUMB(         ldr     r3, [r1]              << 
1321  THUMB(         add     r1, r1, r11           << 
1322                 teq     r1, r2                << 
1323                 bne     1b                    << 
1324                                               << 
1325                 mcr     p15, 0, r1, c7, c5, 0 << 
1326                 mcr     p15, 0, r1, c7, c6, 0 << 
1327                 mcr     p15, 0, r1, c7, c10,  << 
1328                 mov     pc, lr                << 
1329                                               << 
1330 __armv3_mmu_cache_flush:                      << 
1331 __armv3_mpu_cache_flush:                      << 
1332                 tst     r4, #1                << 
1333                 movne   pc, lr                << 
1334                 mov     r1, #0                << 
1335                 mcr     p15, 0, r1, c7, c0, 0 << 
1336                 mov     pc, lr                << 
1337                                               << 
1338 /*                                            << 
1339  * Various debugging routines for printing he << 
1340  * memory, which again must be relocatable.   << 
1341  */                                           << 
1342 #ifdef DEBUG                                  << 
1343                 .align  2                     << 
1344                 .type   phexbuf,#object       << 
1345 phexbuf:        .space  12                    << 
1346                 .size   phexbuf, . - phexbuf  << 
1347                                               << 
1348 @ phex corrupts {r0, r1, r2, r3}              << 
1349 phex:           adr     r3, phexbuf           << 
1350                 mov     r2, #0                << 
1351                 strb    r2, [r3, r1]          << 
1352 1:              subs    r1, r1, #1            << 
1353                 movmi   r0, r3                << 
1354                 bmi     puts                  << 
1355                 and     r2, r0, #15           << 
1356                 mov     r0, r0, lsr #4        << 
1357                 cmp     r2, #10               << 
1358                 addge   r2, r2, #7            << 
1359                 add     r2, r2, #'0'          << 
1360                 strb    r2, [r3, r1]          << 
1361                 b       1b                    << 
1362                                               << 
1363 @ puts corrupts {r0, r1, r2, r3}              << 
1364 puts:           loadsp  r3, r2, r1            << 
1365 1:              ldrb    r2, [r0], #1          << 
1366                 teq     r2, #0                << 
1367                 moveq   pc, lr                << 
1368 2:              writeb  r2, r3, r1            << 
1369                 mov     r1, #0x00020000       << 
1370 3:              subs    r1, r1, #1            << 
1371                 bne     3b                    << 
1372                 teq     r2, #'\n'             << 
1373                 moveq   r2, #'\r'             << 
1374                 beq     2b                    << 
1375                 teq     r0, #0                << 
1376                 bne     1b                    << 
1377                 mov     pc, lr                << 
1378 @ putc corrupts {r0, r1, r2, r3}              << 
1379 putc:                                         << 
1380                 mov     r2, r0                << 
1381                 loadsp  r3, r1, r0            << 
1382                 mov     r0, #0                << 
1383                 b       2b                    << 
1384                                               << 
1385 @ memdump corrupts {r0, r1, r2, r3, r10, r11, << 
1386 memdump:        mov     r12, r0               << 
1387                 mov     r10, lr               << 
1388                 mov     r11, #0               << 
1389 2:              mov     r0, r11, lsl #2       << 
1390                 add     r0, r0, r12           << 
1391                 mov     r1, #8                << 
1392                 bl      phex                  << 
1393                 mov     r0, #':'              << 
1394                 bl      putc                  << 
1395 1:              mov     r0, #' '              << 
1396                 bl      putc                  << 
1397                 ldr     r0, [r12, r11, lsl #2 << 
1398                 mov     r1, #8                << 
1399                 bl      phex                  << 
1400                 and     r0, r11, #7           << 
1401                 teq     r0, #3                << 
1402                 moveq   r0, #' '              << 
1403                 bleq    putc                  << 
1404                 and     r0, r11, #7           << 
1405                 add     r11, r11, #1          << 
1406                 teq     r0, #7                << 
1407                 bne     1b                    << 
1408                 mov     r0, #'\n'             << 
1409                 bl      putc                  << 
1410                 cmp     r11, #64              << 
1411                 blt     2b                    << 
1412                 mov     pc, r10               << 
1413 #endif                                        << 
1414                                               << 
1415                 .ltorg                        << 
1416                                               << 
1417 #ifdef CONFIG_ARM_VIRT_EXT                    << 
1418 .align 5                                      << 
1419 __hyp_reentry_vectors:                        << 
1420                 W(b)    .                     << 
1421                 W(b)    .                     << 
1422 #ifdef CONFIG_EFI_STUB                        << 
1423                 W(b)    __enter_kernel_from_h << 
1424 #else                                         << 
1425                 W(b)    .                     << 
1426 #endif                                        << 
1427                 W(b)    .                     << 
1428                 W(b)    .                     << 
1429                 W(b)    __enter_kernel        << 
1430                 W(b)    .                     << 
1431                 W(b)    .                     << 
1432 #endif /* CONFIG_ARM_VIRT_EXT */              << 
1433                                               << 
1434 __enter_kernel:                               << 
1435                 mov     r0, #0                << 
1436                 mov     r1, r7                << 
1437                 mov     r2, r8                << 
1438  ARM(           mov     pc, r4          )     << 
1439  M_CLASS(       add     r4, r4, #1      )     << 
1440  THUMB(         bx      r4              )     << 
1441                                               << 
1442 reloc_code_end:                               << 
1443                                               << 
1444 #ifdef CONFIG_EFI_STUB                        << 
1445 __enter_kernel_from_hyp:                      << 
1446                 mrc     p15, 4, r0, c1, c0, 0 << 
1447                 bic     r0, r0, #0x5          << 
1448                 mcr     p15, 4, r0, c1, c0, 0 << 
1449                 isb                           << 
1450                 b       __enter_kernel        << 
1451                                               << 
1452 ENTRY(efi_enter_kernel)                       << 
1453                 mov     r4, r0                << 
1454                 mov     r8, r1                << 
1455                                               << 
1456                 adr_l   r0, call_cache_fn     << 
1457                 adr     r1, 0f                << 
1458                 bl      cache_clean_flush     << 
1459                                               << 
1460 #ifdef CONFIG_ARM_VIRT_EXT                    << 
1461                 @                             << 
1462                 @ The EFI spec does not suppo << 
1463                 @ since it mandates that the  << 
1464                 @ 32-bit addressable DRAM map << 
1465                 @                             << 
1466                 @ While the EDK2 reference im << 
1467                 @ U-Boot might decide to ente << 
1468                 @ anyway, with the MMU and ca << 
1469                 @                             << 
1470                 mrs     r0, cpsr              << 
1471                 msr     spsr_cxsf, r0         << 
1472                 and     r0, r0, #MODE_MASK    << 
1473                 cmp     r0, #HYP_MODE         << 
1474                 bne     .Lefi_svc             << 
1475                                               << 
1476                 mrc     p15, 4, r1, c1, c0, 0 << 
1477                 tst     r1, #0x1              << 
1478                 beq     1f                    << 
1479                                               << 
1480                 @                             << 
1481                 @ When running in HYP mode wi << 
1482                 @ off just carrying on using  << 
1483                 @ firmware provided. Set up t << 
1484                 @ issued from HYP mode take u << 
1485                 @ will disable the MMU before << 
1486                 @                             << 
1487  ARM(           bic     r1, r1, #(1 << 30)    << 
1488  THUMB(         orr     r1, r1, #(1 << 30)    << 
1489                 mcr     p15, 4, r1, c1, c0, 0 << 
1490                 adr     r0, __hyp_reentry_vec << 
1491                 mcr     p15, 4, r0, c12, c0,  << 
1492                 isb                           << 
1493                 b       .Lefi_hyp             << 
1494                                               << 
1495                 @                             << 
1496                 @ When running in HYP mode wi << 
1497                 @ into SVC mode now, and let  << 
1498                 @ 1:1 mapping as usual.       << 
1499                 @                             << 
1500 1:              mov     r9, r4                << 
1501                 bl      __hyp_stub_install    << 
1502                 safe_svcmode_maskall    r1    << 
1503                 msr     spsr_cxsf, r0         << 
1504                 orr     r4, r9, #1            << 
1505                 b       .Lefi_hyp             << 
1506 .Lefi_svc:                                    << 
1507 #endif                                        << 
1508                 mrc     p15, 0, r0, c1, c0, 0 << 
1509                 tst     r0, #0x1              << 
1510                 orreq   r4, r4, #1            << 
1511                                               << 
1512 .Lefi_hyp:                                    << 
1513                 mov     r0, r8                << 
1514                 add     r1, r8, r2            << 
1515                 bl      cache_clean_flush     << 
1516                                                   45 
1517                 adr     r0, 0f                !!  46 3:
1518                 ldr     sp, [r0]              !!  47         b       3b
1519                 add     sp, sp, r0            << 
1520                                                   48 
1521                 mov     r5, #0                !!  49         END(start)
1522                 mov     r7, #0xFFFFFFFF       << 
1523                 b       wont_overwrite        << 
1524 ENDPROC(efi_enter_kernel)                     << 
1525 0:              .long   .L_user_stack_end - . << 
1526 #endif                                        << 
1527                                                   50 
1528                 .align                        !!  51         .comm .heap,BOOT_HEAP_SIZE,4
1529                 .section ".stack", "aw", %nob !!  52         .comm .stack,4096*2,4
1530 .L_user_stack:  .space  4096                  << 
1531 .L_user_stack_end:                            << 
                                                      

~ [ source navigation ] ~ [ diff markup ] ~ [ identifier search ] ~

kernel.org | git.kernel.org | LWN.net | Project Home | SVN repository | Mail admin

Linux® is a registered trademark of Linus Torvalds in the United States and other countries.
TOMOYO® is a registered trademark of NTT DATA CORPORATION.

sflogo.php