1 // SPDX-License-Identifier: GPL-2.0-only 1 2 /* 3 * This file defines C prototypes for the low- 4 * and populates a vtable for each selected AR 5 */ 6 7 #include <linux/types.h> 8 #include <asm/cacheflush.h> 9 10 #ifdef CONFIG_CPU_CACHE_V4 11 void v4_flush_icache_all(void); 12 void v4_flush_kern_cache_all(void); 13 void v4_flush_user_cache_all(void); 14 void v4_flush_user_cache_range(unsigned long, 15 void v4_coherent_kern_range(unsigned long, uns 16 int v4_coherent_user_range(unsigned long, unsi 17 void v4_flush_kern_dcache_area(void *, size_t) 18 void v4_dma_map_area(const void *, size_t, int 19 void v4_dma_unmap_area(const void *, size_t, i 20 void v4_dma_flush_range(const void *, const vo 21 22 struct cpu_cache_fns v4_cache_fns __initconst 23 .flush_icache_all = v4_flush_icache_al 24 .flush_kern_all = v4_flush_kern_cache_ 25 .flush_kern_louis = v4_flush_kern_cach 26 .flush_user_all = v4_flush_user_cache_ 27 .flush_user_range = v4_flush_user_cach 28 .coherent_kern_range = v4_coherent_ker 29 .coherent_user_range = v4_coherent_use 30 .flush_kern_dcache_area = v4_flush_ker 31 .dma_map_area = v4_dma_map_area, 32 .dma_unmap_area = v4_dma_unmap_area, 33 .dma_flush_range = v4_dma_flush_range, 34 }; 35 #endif 36 37 /* V4 write-back cache "V4WB" */ 38 #ifdef CONFIG_CPU_CACHE_V4WB 39 void v4wb_flush_icache_all(void); 40 void v4wb_flush_kern_cache_all(void); 41 void v4wb_flush_user_cache_all(void); 42 void v4wb_flush_user_cache_range(unsigned long 43 void v4wb_coherent_kern_range(unsigned long, u 44 int v4wb_coherent_user_range(unsigned long, un 45 void v4wb_flush_kern_dcache_area(void *, size_ 46 void v4wb_dma_map_area(const void *, size_t, i 47 void v4wb_dma_unmap_area(const void *, size_t, 48 void v4wb_dma_flush_range(const void *, const 49 50 struct cpu_cache_fns v4wb_cache_fns __initcons 51 .flush_icache_all = v4wb_flush_icache_ 52 .flush_kern_all = v4wb_flush_kern_cach 53 .flush_kern_louis = v4wb_flush_kern_ca 54 .flush_user_all = v4wb_flush_user_cach 55 .flush_user_range = v4wb_flush_user_ca 56 .coherent_kern_range = v4wb_coherent_k 57 .coherent_user_range = v4wb_coherent_u 58 .flush_kern_dcache_area = v4wb_flush_k 59 .dma_map_area = v4wb_dma_map_area, 60 .dma_unmap_area = v4wb_dma_unmap_area, 61 .dma_flush_range = v4wb_dma_flush_rang 62 }; 63 #endif 64 65 /* V4 write-through cache "V4WT" */ 66 #ifdef CONFIG_CPU_CACHE_V4WT 67 void v4wt_flush_icache_all(void); 68 void v4wt_flush_kern_cache_all(void); 69 void v4wt_flush_user_cache_all(void); 70 void v4wt_flush_user_cache_range(unsigned long 71 void v4wt_coherent_kern_range(unsigned long, u 72 int v4wt_coherent_user_range(unsigned long, un 73 void v4wt_flush_kern_dcache_area(void *, size_ 74 void v4wt_dma_map_area(const void *, size_t, i 75 void v4wt_dma_unmap_area(const void *, size_t, 76 void v4wt_dma_flush_range(const void *, const 77 78 struct cpu_cache_fns v4wt_cache_fns __initcons 79 .flush_icache_all = v4wt_flush_icache_ 80 .flush_kern_all = v4wt_flush_kern_cach 81 .flush_kern_louis = v4wt_flush_kern_ca 82 .flush_user_all = v4wt_flush_user_cach 83 .flush_user_range = v4wt_flush_user_ca 84 .coherent_kern_range = v4wt_coherent_k 85 .coherent_user_range = v4wt_coherent_u 86 .flush_kern_dcache_area = v4wt_flush_k 87 .dma_map_area = v4wt_dma_map_area, 88 .dma_unmap_area = v4wt_dma_unmap_area, 89 .dma_flush_range = v4wt_dma_flush_rang 90 }; 91 #endif 92 93 /* Faraday FA526 cache */ 94 #ifdef CONFIG_CPU_CACHE_FA 95 void fa_flush_icache_all(void); 96 void fa_flush_kern_cache_all(void); 97 void fa_flush_user_cache_all(void); 98 void fa_flush_user_cache_range(unsigned long, 99 void fa_coherent_kern_range(unsigned long, uns 100 int fa_coherent_user_range(unsigned long, unsi 101 void fa_flush_kern_dcache_area(void *, size_t) 102 void fa_dma_map_area(const void *, size_t, int 103 void fa_dma_unmap_area(const void *, size_t, i 104 void fa_dma_flush_range(const void *, const vo 105 106 struct cpu_cache_fns fa_cache_fns __initconst 107 .flush_icache_all = fa_flush_icache_al 108 .flush_kern_all = fa_flush_kern_cache_ 109 .flush_kern_louis = fa_flush_kern_cach 110 .flush_user_all = fa_flush_user_cache_ 111 .flush_user_range = fa_flush_user_cach 112 .coherent_kern_range = fa_coherent_ker 113 .coherent_user_range = fa_coherent_use 114 .flush_kern_dcache_area = fa_flush_ker 115 .dma_map_area = fa_dma_map_area, 116 .dma_unmap_area = fa_dma_unmap_area, 117 .dma_flush_range = fa_dma_flush_range, 118 }; 119 #endif 120 121 #ifdef CONFIG_CPU_CACHE_V6 122 void v6_flush_icache_all(void); 123 void v6_flush_kern_cache_all(void); 124 void v6_flush_user_cache_all(void); 125 void v6_flush_user_cache_range(unsigned long, 126 void v6_coherent_kern_range(unsigned long, uns 127 int v6_coherent_user_range(unsigned long, unsi 128 void v6_flush_kern_dcache_area(void *, size_t) 129 void v6_dma_map_area(const void *, size_t, int 130 void v6_dma_unmap_area(const void *, size_t, i 131 void v6_dma_flush_range(const void *, const vo 132 133 struct cpu_cache_fns v6_cache_fns __initconst 134 .flush_icache_all = v6_flush_icache_al 135 .flush_kern_all = v6_flush_kern_cache_ 136 .flush_kern_louis = v6_flush_kern_cach 137 .flush_user_all = v6_flush_user_cache_ 138 .flush_user_range = v6_flush_user_cach 139 .coherent_kern_range = v6_coherent_ker 140 .coherent_user_range = v6_coherent_use 141 .flush_kern_dcache_area = v6_flush_ker 142 .dma_map_area = v6_dma_map_area, 143 .dma_unmap_area = v6_dma_unmap_area, 144 .dma_flush_range = v6_dma_flush_range, 145 }; 146 #endif 147 148 #ifdef CONFIG_CPU_CACHE_V7 149 void v7_flush_icache_all(void); 150 void v7_flush_kern_cache_all(void); 151 void v7_flush_kern_cache_louis(void); 152 void v7_flush_user_cache_all(void); 153 void v7_flush_user_cache_range(unsigned long, 154 void v7_coherent_kern_range(unsigned long, uns 155 int v7_coherent_user_range(unsigned long, unsi 156 void v7_flush_kern_dcache_area(void *, size_t) 157 void v7_dma_map_area(const void *, size_t, int 158 void v7_dma_unmap_area(const void *, size_t, i 159 void v7_dma_flush_range(const void *, const vo 160 161 struct cpu_cache_fns v7_cache_fns __initconst 162 .flush_icache_all = v7_flush_icache_al 163 .flush_kern_all = v7_flush_kern_cache_ 164 .flush_kern_louis = v7_flush_kern_cach 165 .flush_user_all = v7_flush_user_cache_ 166 .flush_user_range = v7_flush_user_cach 167 .coherent_kern_range = v7_coherent_ker 168 .coherent_user_range = v7_coherent_use 169 .flush_kern_dcache_area = v7_flush_ker 170 .dma_map_area = v7_dma_map_area, 171 .dma_unmap_area = v7_dma_unmap_area, 172 .dma_flush_range = v7_dma_flush_range, 173 }; 174 175 /* Special quirky cache flush function for Bro 176 void b15_flush_kern_cache_all(void); 177 178 struct cpu_cache_fns b15_cache_fns __initconst 179 .flush_icache_all = v7_flush_icache_al 180 #ifdef CONFIG_CACHE_B15_RAC 181 .flush_kern_all = b15_flush_kern_cache 182 #else 183 .flush_kern_all = v7_flush_kern_cache_ 184 #endif 185 .flush_kern_louis = v7_flush_kern_cach 186 .flush_user_all = v7_flush_user_cache_ 187 .flush_user_range = v7_flush_user_cach 188 .coherent_kern_range = v7_coherent_ker 189 .coherent_user_range = v7_coherent_use 190 .flush_kern_dcache_area = v7_flush_ker 191 .dma_map_area = v7_dma_map_area, 192 .dma_unmap_area = v7_dma_unmap_area, 193 .dma_flush_range = v7_dma_flush_range, 194 }; 195 #endif 196 197 /* The NOP cache is just a set of dummy stubs 198 #ifdef CONFIG_CPU_CACHE_NOP 199 void nop_flush_icache_all(void); 200 void nop_flush_kern_cache_all(void); 201 void nop_flush_user_cache_all(void); 202 void nop_flush_user_cache_range(unsigned long 203 void nop_coherent_kern_range(unsigned long sta 204 int nop_coherent_user_range(unsigned long, uns 205 void nop_flush_kern_dcache_area(void *kaddr, s 206 void nop_dma_map_area(const void *start, size_ 207 void nop_dma_unmap_area(const void *start, siz 208 void nop_dma_flush_range(const void *start, co 209 210 struct cpu_cache_fns nop_cache_fns __initconst 211 .flush_icache_all = nop_flush_icache_a 212 .flush_kern_all = nop_flush_kern_cache 213 .flush_kern_louis = nop_flush_kern_cac 214 .flush_user_all = nop_flush_user_cache 215 .flush_user_range = nop_flush_user_cac 216 .coherent_kern_range = nop_coherent_ke 217 .coherent_user_range = nop_coherent_us 218 .flush_kern_dcache_area = nop_flush_ke 219 .dma_map_area = nop_dma_map_area, 220 .dma_unmap_area = nop_dma_unmap_area, 221 .dma_flush_range = nop_dma_flush_range 222 }; 223 #endif 224 225 #ifdef CONFIG_CPU_CACHE_V7M 226 void v7m_flush_icache_all(void); 227 void v7m_flush_kern_cache_all(void); 228 void v7m_flush_user_cache_all(void); 229 void v7m_flush_user_cache_range(unsigned long, 230 void v7m_coherent_kern_range(unsigned long, un 231 int v7m_coherent_user_range(unsigned long, uns 232 void v7m_flush_kern_dcache_area(void *, size_t 233 void v7m_dma_map_area(const void *, size_t, in 234 void v7m_dma_unmap_area(const void *, size_t, 235 void v7m_dma_flush_range(const void *, const v 236 237 struct cpu_cache_fns v7m_cache_fns __initconst 238 .flush_icache_all = v7m_flush_icache_a 239 .flush_kern_all = v7m_flush_kern_cache 240 .flush_kern_louis = v7m_flush_kern_cac 241 .flush_user_all = v7m_flush_user_cache 242 .flush_user_range = v7m_flush_user_cac 243 .coherent_kern_range = v7m_coherent_ke 244 .coherent_user_range = v7m_coherent_us 245 .flush_kern_dcache_area = v7m_flush_ke 246 .dma_map_area = v7m_dma_map_area, 247 .dma_unmap_area = v7m_dma_unmap_area, 248 .dma_flush_range = v7m_dma_flush_range 249 }; 250 #endif 251 252 #ifdef CONFIG_CPU_ARM1020 253 void arm1020_flush_icache_all(void); 254 void arm1020_flush_kern_cache_all(void); 255 void arm1020_flush_user_cache_all(void); 256 void arm1020_flush_user_cache_range(unsigned l 257 void arm1020_coherent_kern_range(unsigned long 258 int arm1020_coherent_user_range(unsigned long, 259 void arm1020_flush_kern_dcache_area(void *, si 260 void arm1020_dma_map_area(const void *, size_t 261 void arm1020_dma_unmap_area(const void *, size 262 void arm1020_dma_flush_range(const void *, con 263 264 struct cpu_cache_fns arm1020_cache_fns __initc 265 .flush_icache_all = arm1020_flush_icac 266 .flush_kern_all = arm1020_flush_kern_c 267 .flush_kern_louis = arm1020_flush_kern 268 .flush_user_all = arm1020_flush_user_c 269 .flush_user_range = arm1020_flush_user 270 .coherent_kern_range = arm1020_coheren 271 .coherent_user_range = arm1020_coheren 272 .flush_kern_dcache_area = arm1020_flus 273 .dma_map_area = arm1020_dma_map_area, 274 .dma_unmap_area = arm1020_dma_unmap_ar 275 .dma_flush_range = arm1020_dma_flush_r 276 }; 277 #endif 278 279 #ifdef CONFIG_CPU_ARM1020E 280 void arm1020e_flush_icache_all(void); 281 void arm1020e_flush_kern_cache_all(void); 282 void arm1020e_flush_user_cache_all(void); 283 void arm1020e_flush_user_cache_range(unsigned 284 void arm1020e_coherent_kern_range(unsigned lon 285 int arm1020e_coherent_user_range(unsigned long 286 void arm1020e_flush_kern_dcache_area(void *, s 287 void arm1020e_dma_map_area(const void *, size_ 288 void arm1020e_dma_unmap_area(const void *, siz 289 void arm1020e_dma_flush_range(const void *, co 290 291 struct cpu_cache_fns arm1020e_cache_fns __init 292 .flush_icache_all = arm1020e_flush_ica 293 .flush_kern_all = arm1020e_flush_kern_ 294 .flush_kern_louis = arm1020e_flush_ker 295 .flush_user_all = arm1020e_flush_user_ 296 .flush_user_range = arm1020e_flush_use 297 .coherent_kern_range = arm1020e_cohere 298 .coherent_user_range = arm1020e_cohere 299 .flush_kern_dcache_area = arm1020e_flu 300 .dma_map_area = arm1020e_dma_map_area, 301 .dma_unmap_area = arm1020e_dma_unmap_a 302 .dma_flush_range = arm1020e_dma_flush_ 303 }; 304 #endif 305 306 #ifdef CONFIG_CPU_ARM1022 307 void arm1022_flush_icache_all(void); 308 void arm1022_flush_kern_cache_all(void); 309 void arm1022_flush_user_cache_all(void); 310 void arm1022_flush_user_cache_range(unsigned l 311 void arm1022_coherent_kern_range(unsigned long 312 int arm1022_coherent_user_range(unsigned long, 313 void arm1022_flush_kern_dcache_area(void *, si 314 void arm1022_dma_map_area(const void *, size_t 315 void arm1022_dma_unmap_area(const void *, size 316 void arm1022_dma_flush_range(const void *, con 317 318 struct cpu_cache_fns arm1022_cache_fns __initc 319 .flush_icache_all = arm1022_flush_icac 320 .flush_kern_all = arm1022_flush_kern_c 321 .flush_kern_louis = arm1022_flush_kern 322 .flush_user_all = arm1022_flush_user_c 323 .flush_user_range = arm1022_flush_user 324 .coherent_kern_range = arm1022_coheren 325 .coherent_user_range = arm1022_coheren 326 .flush_kern_dcache_area = arm1022_flus 327 .dma_map_area = arm1022_dma_map_area, 328 .dma_unmap_area = arm1022_dma_unmap_ar 329 .dma_flush_range = arm1022_dma_flush_r 330 }; 331 #endif 332 333 #ifdef CONFIG_CPU_ARM1026 334 void arm1026_flush_icache_all(void); 335 void arm1026_flush_kern_cache_all(void); 336 void arm1026_flush_user_cache_all(void); 337 void arm1026_flush_user_cache_range(unsigned l 338 void arm1026_coherent_kern_range(unsigned long 339 int arm1026_coherent_user_range(unsigned long, 340 void arm1026_flush_kern_dcache_area(void *, si 341 void arm1026_dma_map_area(const void *, size_t 342 void arm1026_dma_unmap_area(const void *, size 343 void arm1026_dma_flush_range(const void *, con 344 345 struct cpu_cache_fns arm1026_cache_fns __initc 346 .flush_icache_all = arm1026_flush_icac 347 .flush_kern_all = arm1026_flush_kern_c 348 .flush_kern_louis = arm1026_flush_kern 349 .flush_user_all = arm1026_flush_user_c 350 .flush_user_range = arm1026_flush_user 351 .coherent_kern_range = arm1026_coheren 352 .coherent_user_range = arm1026_coheren 353 .flush_kern_dcache_area = arm1026_flus 354 .dma_map_area = arm1026_dma_map_area, 355 .dma_unmap_area = arm1026_dma_unmap_ar 356 .dma_flush_range = arm1026_dma_flush_r 357 }; 358 #endif 359 360 #if defined(CONFIG_CPU_ARM920T) && !defined(CO 361 void arm920_flush_icache_all(void); 362 void arm920_flush_kern_cache_all(void); 363 void arm920_flush_user_cache_all(void); 364 void arm920_flush_user_cache_range(unsigned lo 365 void arm920_coherent_kern_range(unsigned long, 366 int arm920_coherent_user_range(unsigned long, 367 void arm920_flush_kern_dcache_area(void *, siz 368 void arm920_dma_map_area(const void *, size_t, 369 void arm920_dma_unmap_area(const void *, size_ 370 void arm920_dma_flush_range(const void *, cons 371 372 struct cpu_cache_fns arm920_cache_fns __initco 373 .flush_icache_all = arm920_flush_icach 374 .flush_kern_all = arm920_flush_kern_ca 375 .flush_kern_louis = arm920_flush_kern_ 376 .flush_user_all = arm920_flush_user_ca 377 .flush_user_range = arm920_flush_user_ 378 .coherent_kern_range = arm920_coherent 379 .coherent_user_range = arm920_coherent 380 .flush_kern_dcache_area = arm920_flush 381 .dma_map_area = arm920_dma_map_area, 382 .dma_unmap_area = arm920_dma_unmap_are 383 .dma_flush_range = arm920_dma_flush_ra 384 }; 385 #endif 386 387 #if defined(CONFIG_CPU_ARM922T) && !defined(CO 388 void arm922_flush_icache_all(void); 389 void arm922_flush_kern_cache_all(void); 390 void arm922_flush_user_cache_all(void); 391 void arm922_flush_user_cache_range(unsigned lo 392 void arm922_coherent_kern_range(unsigned long, 393 int arm922_coherent_user_range(unsigned long, 394 void arm922_flush_kern_dcache_area(void *, siz 395 void arm922_dma_map_area(const void *, size_t, 396 void arm922_dma_unmap_area(const void *, size_ 397 void arm922_dma_flush_range(const void *, cons 398 399 struct cpu_cache_fns arm922_cache_fns __initco 400 .flush_icache_all = arm922_flush_icach 401 .flush_kern_all = arm922_flush_kern_ca 402 .flush_kern_louis = arm922_flush_kern_ 403 .flush_user_all = arm922_flush_user_ca 404 .flush_user_range = arm922_flush_user_ 405 .coherent_kern_range = arm922_coherent 406 .coherent_user_range = arm922_coherent 407 .flush_kern_dcache_area = arm922_flush 408 .dma_map_area = arm922_dma_map_area, 409 .dma_unmap_area = arm922_dma_unmap_are 410 .dma_flush_range = arm922_dma_flush_ra 411 }; 412 #endif 413 414 #ifdef CONFIG_CPU_ARM925T 415 void arm925_flush_icache_all(void); 416 void arm925_flush_kern_cache_all(void); 417 void arm925_flush_user_cache_all(void); 418 void arm925_flush_user_cache_range(unsigned lo 419 void arm925_coherent_kern_range(unsigned long, 420 int arm925_coherent_user_range(unsigned long, 421 void arm925_flush_kern_dcache_area(void *, siz 422 void arm925_dma_map_area(const void *, size_t, 423 void arm925_dma_unmap_area(const void *, size_ 424 void arm925_dma_flush_range(const void *, cons 425 426 struct cpu_cache_fns arm925_cache_fns __initco 427 .flush_icache_all = arm925_flush_icach 428 .flush_kern_all = arm925_flush_kern_ca 429 .flush_kern_louis = arm925_flush_kern_ 430 .flush_user_all = arm925_flush_user_ca 431 .flush_user_range = arm925_flush_user_ 432 .coherent_kern_range = arm925_coherent 433 .coherent_user_range = arm925_coherent 434 .flush_kern_dcache_area = arm925_flush 435 .dma_map_area = arm925_dma_map_area, 436 .dma_unmap_area = arm925_dma_unmap_are 437 .dma_flush_range = arm925_dma_flush_ra 438 }; 439 #endif 440 441 #ifdef CONFIG_CPU_ARM926T 442 void arm926_flush_icache_all(void); 443 void arm926_flush_kern_cache_all(void); 444 void arm926_flush_user_cache_all(void); 445 void arm926_flush_user_cache_range(unsigned lo 446 void arm926_coherent_kern_range(unsigned long, 447 int arm926_coherent_user_range(unsigned long, 448 void arm926_flush_kern_dcache_area(void *, siz 449 void arm926_dma_map_area(const void *, size_t, 450 void arm926_dma_unmap_area(const void *, size_ 451 void arm926_dma_flush_range(const void *, cons 452 453 struct cpu_cache_fns arm926_cache_fns __initco 454 .flush_icache_all = arm926_flush_icach 455 .flush_kern_all = arm926_flush_kern_ca 456 .flush_kern_louis = arm926_flush_kern_ 457 .flush_user_all = arm926_flush_user_ca 458 .flush_user_range = arm926_flush_user_ 459 .coherent_kern_range = arm926_coherent 460 .coherent_user_range = arm926_coherent 461 .flush_kern_dcache_area = arm926_flush 462 .dma_map_area = arm926_dma_map_area, 463 .dma_unmap_area = arm926_dma_unmap_are 464 .dma_flush_range = arm926_dma_flush_ra 465 }; 466 #endif 467 468 #ifdef CONFIG_CPU_ARM940T 469 void arm940_flush_icache_all(void); 470 void arm940_flush_kern_cache_all(void); 471 void arm940_flush_user_cache_all(void); 472 void arm940_flush_user_cache_range(unsigned lo 473 void arm940_coherent_kern_range(unsigned long, 474 int arm940_coherent_user_range(unsigned long, 475 void arm940_flush_kern_dcache_area(void *, siz 476 void arm940_dma_map_area(const void *, size_t, 477 void arm940_dma_unmap_area(const void *, size_ 478 void arm940_dma_flush_range(const void *, cons 479 480 struct cpu_cache_fns arm940_cache_fns __initco 481 .flush_icache_all = arm940_flush_icach 482 .flush_kern_all = arm940_flush_kern_ca 483 .flush_kern_louis = arm940_flush_kern_ 484 .flush_user_all = arm940_flush_user_ca 485 .flush_user_range = arm940_flush_user_ 486 .coherent_kern_range = arm940_coherent 487 .coherent_user_range = arm940_coherent 488 .flush_kern_dcache_area = arm940_flush 489 .dma_map_area = arm940_dma_map_area, 490 .dma_unmap_area = arm940_dma_unmap_are 491 .dma_flush_range = arm940_dma_flush_ra 492 }; 493 #endif 494 495 #ifdef CONFIG_CPU_ARM946E 496 void arm946_flush_icache_all(void); 497 void arm946_flush_kern_cache_all(void); 498 void arm946_flush_user_cache_all(void); 499 void arm946_flush_user_cache_range(unsigned lo 500 void arm946_coherent_kern_range(unsigned long, 501 int arm946_coherent_user_range(unsigned long, 502 void arm946_flush_kern_dcache_area(void *, siz 503 void arm946_dma_map_area(const void *, size_t, 504 void arm946_dma_unmap_area(const void *, size_ 505 void arm946_dma_flush_range(const void *, cons 506 507 struct cpu_cache_fns arm946_cache_fns __initco 508 .flush_icache_all = arm946_flush_icach 509 .flush_kern_all = arm946_flush_kern_ca 510 .flush_kern_louis = arm946_flush_kern_ 511 .flush_user_all = arm946_flush_user_ca 512 .flush_user_range = arm946_flush_user_ 513 .coherent_kern_range = arm946_coherent 514 .coherent_user_range = arm946_coherent 515 .flush_kern_dcache_area = arm946_flush 516 .dma_map_area = arm946_dma_map_area, 517 .dma_unmap_area = arm946_dma_unmap_are 518 .dma_flush_range = arm946_dma_flush_ra 519 }; 520 #endif 521 522 #ifdef CONFIG_CPU_XSCALE 523 void xscale_flush_icache_all(void); 524 void xscale_flush_kern_cache_all(void); 525 void xscale_flush_user_cache_all(void); 526 void xscale_flush_user_cache_range(unsigned lo 527 void xscale_coherent_kern_range(unsigned long, 528 int xscale_coherent_user_range(unsigned long, 529 void xscale_flush_kern_dcache_area(void *, siz 530 void xscale_dma_map_area(const void *, size_t, 531 void xscale_dma_unmap_area(const void *, size_ 532 void xscale_dma_flush_range(const void *, cons 533 534 struct cpu_cache_fns xscale_cache_fns __initco 535 .flush_icache_all = xscale_flush_icach 536 .flush_kern_all = xscale_flush_kern_ca 537 .flush_kern_louis = xscale_flush_kern_ 538 .flush_user_all = xscale_flush_user_ca 539 .flush_user_range = xscale_flush_user_ 540 .coherent_kern_range = xscale_coherent 541 .coherent_user_range = xscale_coherent 542 .flush_kern_dcache_area = xscale_flush 543 .dma_map_area = xscale_dma_map_area, 544 .dma_unmap_area = xscale_dma_unmap_are 545 .dma_flush_range = xscale_dma_flush_ra 546 }; 547 548 /* The 80200 A0 and A1 need a special quirk fo 549 void xscale_80200_A0_A1_dma_map_area(const voi 550 551 struct cpu_cache_fns xscale_80200_A0_A1_cache_ 552 .flush_icache_all = xscale_flush_icach 553 .flush_kern_all = xscale_flush_kern_ca 554 .flush_kern_louis = xscale_flush_kern_ 555 .flush_user_all = xscale_flush_user_ca 556 .flush_user_range = xscale_flush_user_ 557 .coherent_kern_range = xscale_coherent 558 .coherent_user_range = xscale_coherent 559 .flush_kern_dcache_area = xscale_flush 560 .dma_map_area = xscale_80200_A0_A1_dma 561 .dma_unmap_area = xscale_dma_unmap_are 562 .dma_flush_range = xscale_dma_flush_ra 563 }; 564 #endif 565 566 #ifdef CONFIG_CPU_XSC3 567 void xsc3_flush_icache_all(void); 568 void xsc3_flush_kern_cache_all(void); 569 void xsc3_flush_user_cache_all(void); 570 void xsc3_flush_user_cache_range(unsigned long 571 void xsc3_coherent_kern_range(unsigned long, u 572 int xsc3_coherent_user_range(unsigned long, un 573 void xsc3_flush_kern_dcache_area(void *, size_ 574 void xsc3_dma_map_area(const void *, size_t, i 575 void xsc3_dma_unmap_area(const void *, size_t, 576 void xsc3_dma_flush_range(const void *, const 577 578 struct cpu_cache_fns xsc3_cache_fns __initcons 579 .flush_icache_all = xsc3_flush_icache_ 580 .flush_kern_all = xsc3_flush_kern_cach 581 .flush_kern_louis = xsc3_flush_kern_ca 582 .flush_user_all = xsc3_flush_user_cach 583 .flush_user_range = xsc3_flush_user_ca 584 .coherent_kern_range = xsc3_coherent_k 585 .coherent_user_range = xsc3_coherent_u 586 .flush_kern_dcache_area = xsc3_flush_k 587 .dma_map_area = xsc3_dma_map_area, 588 .dma_unmap_area = xsc3_dma_unmap_area, 589 .dma_flush_range = xsc3_dma_flush_rang 590 }; 591 #endif 592 593 #ifdef CONFIG_CPU_MOHAWK 594 void mohawk_flush_icache_all(void); 595 void mohawk_flush_kern_cache_all(void); 596 void mohawk_flush_user_cache_all(void); 597 void mohawk_flush_user_cache_range(unsigned lo 598 void mohawk_coherent_kern_range(unsigned long, 599 int mohawk_coherent_user_range(unsigned long, 600 void mohawk_flush_kern_dcache_area(void *, siz 601 void mohawk_dma_map_area(const void *, size_t, 602 void mohawk_dma_unmap_area(const void *, size_ 603 void mohawk_dma_flush_range(const void *, cons 604 605 struct cpu_cache_fns mohawk_cache_fns __initco 606 .flush_icache_all = mohawk_flush_icach 607 .flush_kern_all = mohawk_flush_kern_ca 608 .flush_kern_louis = mohawk_flush_kern_ 609 .flush_user_all = mohawk_flush_user_ca 610 .flush_user_range = mohawk_flush_user_ 611 .coherent_kern_range = mohawk_coherent 612 .coherent_user_range = mohawk_coherent 613 .flush_kern_dcache_area = mohawk_flush 614 .dma_map_area = mohawk_dma_map_area, 615 .dma_unmap_area = mohawk_dma_unmap_are 616 .dma_flush_range = mohawk_dma_flush_ra 617 }; 618 #endif 619 620 #ifdef CONFIG_CPU_FEROCEON 621 void feroceon_flush_icache_all(void); 622 void feroceon_flush_kern_cache_all(void); 623 void feroceon_flush_user_cache_all(void); 624 void feroceon_flush_user_cache_range(unsigned 625 void feroceon_coherent_kern_range(unsigned lon 626 int feroceon_coherent_user_range(unsigned long 627 void feroceon_flush_kern_dcache_area(void *, s 628 void feroceon_dma_map_area(const void *, size_ 629 void feroceon_dma_unmap_area(const void *, siz 630 void feroceon_dma_flush_range(const void *, co 631 632 struct cpu_cache_fns feroceon_cache_fns __init 633 .flush_icache_all = feroceon_flush_ica 634 .flush_kern_all = feroceon_flush_kern_ 635 .flush_kern_louis = feroceon_flush_ker 636 .flush_user_all = feroceon_flush_user_ 637 .flush_user_range = feroceon_flush_use 638 .coherent_kern_range = feroceon_cohere 639 .coherent_user_range = feroceon_cohere 640 .flush_kern_dcache_area = feroceon_flu 641 .dma_map_area = feroceon_dma_map_area, 642 .dma_unmap_area = feroceon_dma_unmap_a 643 .dma_flush_range = feroceon_dma_flush_ 644 }; 645 646 void feroceon_range_flush_kern_dcache_area(voi 647 void feroceon_range_dma_map_area(const void *, 648 void feroceon_range_dma_flush_range(const void 649 650 struct cpu_cache_fns feroceon_range_cache_fns 651 .flush_icache_all = feroceon_flush_ica 652 .flush_kern_all = feroceon_flush_kern_ 653 .flush_kern_louis = feroceon_flush_ker 654 .flush_user_all = feroceon_flush_user_ 655 .flush_user_range = feroceon_flush_use 656 .coherent_kern_range = feroceon_cohere 657 .coherent_user_range = feroceon_cohere 658 .flush_kern_dcache_area = feroceon_ran 659 .dma_map_area = feroceon_range_dma_map 660 .dma_unmap_area = feroceon_dma_unmap_a 661 .dma_flush_range = feroceon_range_dma_ 662 }; 663 #endif 664
Linux® is a registered trademark of Linus Torvalds in the United States and other countries.
TOMOYO® is a registered trademark of NTT DATA CORPORATION.