1 /* 2 * This file is subject to the terms and condi 3 * License. See the file "COPYING" in the mai 4 * for more details. 5 * 6 * MIPS SIMD Architecture (MSA) context handli 7 * 8 * Copyright (C) 2015 Imagination Technologies 9 */ 10 11 #include <asm/asm.h> 12 #include <asm/asm-offsets.h> 13 #include <asm/asmmacro.h> 14 #include <asm/regdef.h> 15 16 .set noreorder 17 .set noat 18 19 LEAF(__kvm_save_msa) 20 st_d 0, VCPU_FPR0, a0 21 st_d 1, VCPU_FPR1, a0 22 st_d 2, VCPU_FPR2, a0 23 st_d 3, VCPU_FPR3, a0 24 st_d 4, VCPU_FPR4, a0 25 st_d 5, VCPU_FPR5, a0 26 st_d 6, VCPU_FPR6, a0 27 st_d 7, VCPU_FPR7, a0 28 st_d 8, VCPU_FPR8, a0 29 st_d 9, VCPU_FPR9, a0 30 st_d 10, VCPU_FPR10, a0 31 st_d 11, VCPU_FPR11, a0 32 st_d 12, VCPU_FPR12, a0 33 st_d 13, VCPU_FPR13, a0 34 st_d 14, VCPU_FPR14, a0 35 st_d 15, VCPU_FPR15, a0 36 st_d 16, VCPU_FPR16, a0 37 st_d 17, VCPU_FPR17, a0 38 st_d 18, VCPU_FPR18, a0 39 st_d 19, VCPU_FPR19, a0 40 st_d 20, VCPU_FPR20, a0 41 st_d 21, VCPU_FPR21, a0 42 st_d 22, VCPU_FPR22, a0 43 st_d 23, VCPU_FPR23, a0 44 st_d 24, VCPU_FPR24, a0 45 st_d 25, VCPU_FPR25, a0 46 st_d 26, VCPU_FPR26, a0 47 st_d 27, VCPU_FPR27, a0 48 st_d 28, VCPU_FPR28, a0 49 st_d 29, VCPU_FPR29, a0 50 st_d 30, VCPU_FPR30, a0 51 st_d 31, VCPU_FPR31, a0 52 jr ra 53 nop 54 END(__kvm_save_msa) 55 56 LEAF(__kvm_restore_msa) 57 ld_d 0, VCPU_FPR0, a0 58 ld_d 1, VCPU_FPR1, a0 59 ld_d 2, VCPU_FPR2, a0 60 ld_d 3, VCPU_FPR3, a0 61 ld_d 4, VCPU_FPR4, a0 62 ld_d 5, VCPU_FPR5, a0 63 ld_d 6, VCPU_FPR6, a0 64 ld_d 7, VCPU_FPR7, a0 65 ld_d 8, VCPU_FPR8, a0 66 ld_d 9, VCPU_FPR9, a0 67 ld_d 10, VCPU_FPR10, a0 68 ld_d 11, VCPU_FPR11, a0 69 ld_d 12, VCPU_FPR12, a0 70 ld_d 13, VCPU_FPR13, a0 71 ld_d 14, VCPU_FPR14, a0 72 ld_d 15, VCPU_FPR15, a0 73 ld_d 16, VCPU_FPR16, a0 74 ld_d 17, VCPU_FPR17, a0 75 ld_d 18, VCPU_FPR18, a0 76 ld_d 19, VCPU_FPR19, a0 77 ld_d 20, VCPU_FPR20, a0 78 ld_d 21, VCPU_FPR21, a0 79 ld_d 22, VCPU_FPR22, a0 80 ld_d 23, VCPU_FPR23, a0 81 ld_d 24, VCPU_FPR24, a0 82 ld_d 25, VCPU_FPR25, a0 83 ld_d 26, VCPU_FPR26, a0 84 ld_d 27, VCPU_FPR27, a0 85 ld_d 28, VCPU_FPR28, a0 86 ld_d 29, VCPU_FPR29, a0 87 ld_d 30, VCPU_FPR30, a0 88 ld_d 31, VCPU_FPR31, a0 89 jr ra 90 nop 91 END(__kvm_restore_msa) 92 93 .macro kvm_restore_msa_upper wr, of 94 .set push 95 .set noat 96 #ifdef CONFIG_64BIT 97 ld $1, \off(\base) 98 insert_d \wr, 1 99 #elif defined(CONFIG_CPU_LITTLE_ENDIAN) 100 lw $1, \off(\base) 101 insert_w \wr, 2 102 lw $1, (\off+4)(\base) 103 insert_w \wr, 3 104 #else /* CONFIG_CPU_BIG_ENDIAN */ 105 lw $1, (\off+4)(\base) 106 insert_w \wr, 2 107 lw $1, \off(\base) 108 insert_w \wr, 3 109 #endif 110 .set pop 111 .endm 112 113 LEAF(__kvm_restore_msa_upper) 114 kvm_restore_msa_upper 0, VCPU_FPR0 115 kvm_restore_msa_upper 1, VCPU_FPR1 116 kvm_restore_msa_upper 2, VCPU_FPR2 117 kvm_restore_msa_upper 3, VCPU_FPR3 118 kvm_restore_msa_upper 4, VCPU_FPR4 119 kvm_restore_msa_upper 5, VCPU_FPR5 120 kvm_restore_msa_upper 6, VCPU_FPR6 121 kvm_restore_msa_upper 7, VCPU_FPR7 122 kvm_restore_msa_upper 8, VCPU_FPR8 123 kvm_restore_msa_upper 9, VCPU_FPR9 124 kvm_restore_msa_upper 10, VCPU_FPR10 125 kvm_restore_msa_upper 11, VCPU_FPR11 126 kvm_restore_msa_upper 12, VCPU_FPR12 127 kvm_restore_msa_upper 13, VCPU_FPR13 128 kvm_restore_msa_upper 14, VCPU_FPR14 129 kvm_restore_msa_upper 15, VCPU_FPR15 130 kvm_restore_msa_upper 16, VCPU_FPR16 131 kvm_restore_msa_upper 17, VCPU_FPR17 132 kvm_restore_msa_upper 18, VCPU_FPR18 133 kvm_restore_msa_upper 19, VCPU_FPR19 134 kvm_restore_msa_upper 20, VCPU_FPR20 135 kvm_restore_msa_upper 21, VCPU_FPR21 136 kvm_restore_msa_upper 22, VCPU_FPR22 137 kvm_restore_msa_upper 23, VCPU_FPR23 138 kvm_restore_msa_upper 24, VCPU_FPR24 139 kvm_restore_msa_upper 25, VCPU_FPR25 140 kvm_restore_msa_upper 26, VCPU_FPR26 141 kvm_restore_msa_upper 27, VCPU_FPR27 142 kvm_restore_msa_upper 28, VCPU_FPR28 143 kvm_restore_msa_upper 29, VCPU_FPR29 144 kvm_restore_msa_upper 30, VCPU_FPR30 145 kvm_restore_msa_upper 31, VCPU_FPR31 146 jr ra 147 nop 148 END(__kvm_restore_msa_upper) 149 150 LEAF(__kvm_restore_msacsr) 151 lw t0, VCPU_MSA_CSR(a0) 152 /* 153 * The ctcmsa must stay at this offset 154 * See kvm_mips_csr_die_notify() which 155 * which triggers an MSA FP Exception, 156 * ignored since the set cause bits mu 157 */ 158 _ctcmsa MSA_CSR, t0 159 jr ra 160 nop 161 END(__kvm_restore_msacsr)
Linux® is a registered trademark of Linus Torvalds in the United States and other countries.
TOMOYO® is a registered trademark of NTT DATA CORPORATION.