1 /* SPDX-License-Identifier: GPL-2.0-only */ 2 /* 3 * Scalar AES core transform 4 * 5 * Copyright (C) 2017 Linaro Ltd <ard.biesheuve 6 */ 7 8 #include <linux/linkage.h> 9 #include <asm/assembler.h> 10 #include <asm/cache.h> 11 12 .text 13 14 rk .req x0 15 out .req x1 16 in .req x2 17 rounds .req x3 18 tt .req x2 19 20 .macro __pair1, sz, op, reg0, 21 .ifc \op\shift, b0 22 ubfiz \reg0, \in0, #2, #8 23 ubfiz \reg1, \in1e, #2, #8 24 .else 25 ubfx \reg0, \in0, #\shift, 26 ubfx \reg1, \in1e, #\shift, 27 .endif 28 29 /* 30 * AArch64 cannot do byte size indexed 31 * 32-bit quantities, i.e., 'ldrb w12, 32 * valid instruction. So perform the s 33 * high bytes (the low byte is shifted 34 * than ubfx above) 35 */ 36 .ifnc \op, b 37 ldr \reg0, [tt, \reg0, uxt 38 ldr \reg1, [tt, \reg1, uxt 39 .else 40 .if \shift > 0 41 lsl \reg0, \reg0, #2 42 lsl \reg1, \reg1, #2 43 .endif 44 ldrb \reg0, [tt, \reg0, uxt 45 ldrb \reg1, [tt, \reg1, uxt 46 .endif 47 .endm 48 49 .macro __pair0, sz, op, reg0, 50 ubfx \reg0, \in0, #\shift, 51 ubfx \reg1, \in1d, #\shift, 52 ldr\op \reg0, [tt, \reg0, uxt 53 ldr\op \reg1, [tt, \reg1, uxt 54 .endm 55 56 .macro __hround, out0, out1, 57 ldp \out0, \out1, [rk], #8 58 59 __pair\enc \sz, \op, w12, w13, \i 60 __pair\enc \sz, \op, w14, w15, \i 61 __pair\enc \sz, \op, w16, w17, \i 62 __pair\enc \sz, \op, \t0, \t1, \i 63 64 eor \out0, \out0, w12 65 eor \out1, \out1, w13 66 eor \out0, \out0, w14, ror 67 eor \out1, \out1, w15, ror 68 eor \out0, \out0, w16, ror 69 eor \out1, \out1, w17, ror 70 eor \out0, \out0, \t0, ror 71 eor \out1, \out1, \t1, ror 72 .endm 73 74 .macro fround, out0, out1, ou 75 __hround \out0, \out1, \in0, \i 76 __hround \out2, \out3, \in2, \i 77 .endm 78 79 .macro iround, out0, out1, ou 80 __hround \out0, \out1, \in0, \i 81 __hround \out2, \out3, \in2, \i 82 .endm 83 84 .macro do_crypt, round, ttab, 85 ldp w4, w5, [in] 86 ldp w6, w7, [in, #8] 87 ldp w8, w9, [rk], #16 88 ldp w10, w11, [rk, #-8] 89 90 CPU_BE( rev w4, w4 ) 91 CPU_BE( rev w5, w5 ) 92 CPU_BE( rev w6, w6 ) 93 CPU_BE( rev w7, w7 ) 94 95 eor w4, w4, w8 96 eor w5, w5, w9 97 eor w6, w6, w10 98 eor w7, w7, w11 99 100 adr_l tt, \ttab 101 102 tbnz rounds, #1, 1f 103 104 0: \round w8, w9, w10, w11, w4, 105 \round w4, w5, w6, w7, w8, w9 106 107 1: subs rounds, rounds, #4 108 \round w8, w9, w10, w11, w4, 109 b.ls 3f 110 2: \round w4, w5, w6, w7, w8, w9 111 b 0b 112 3: adr_l tt, \ltab 113 \round w4, w5, w6, w7, w8, w9 114 115 CPU_BE( rev w4, w4 ) 116 CPU_BE( rev w5, w5 ) 117 CPU_BE( rev w6, w6 ) 118 CPU_BE( rev w7, w7 ) 119 120 stp w4, w5, [out] 121 stp w6, w7, [out, #8] 122 ret 123 .endm 124 125 SYM_FUNC_START(__aes_arm64_encrypt) 126 do_crypt fround, crypto_ft_tab, 127 SYM_FUNC_END(__aes_arm64_encrypt) 128 129 .align 5 130 SYM_FUNC_START(__aes_arm64_decrypt) 131 do_crypt iround, crypto_it_tab, 132 SYM_FUNC_END(__aes_arm64_decrypt)
Linux® is a registered trademark of Linus Torvalds in the United States and other countries.
TOMOYO® is a registered trademark of NTT DATA CORPORATION.