~ [ source navigation ] ~ [ diff markup ] ~ [ identifier search ] ~

TOMOYO Linux Cross Reference
Linux/arch/riscv/crypto/aes-riscv64-zvkned.S

Version: ~ [ linux-6.12-rc7 ] ~ [ linux-6.11.7 ] ~ [ linux-6.10.14 ] ~ [ linux-6.9.12 ] ~ [ linux-6.8.12 ] ~ [ linux-6.7.12 ] ~ [ linux-6.6.60 ] ~ [ linux-6.5.13 ] ~ [ linux-6.4.16 ] ~ [ linux-6.3.13 ] ~ [ linux-6.2.16 ] ~ [ linux-6.1.116 ] ~ [ linux-6.0.19 ] ~ [ linux-5.19.17 ] ~ [ linux-5.18.19 ] ~ [ linux-5.17.15 ] ~ [ linux-5.16.20 ] ~ [ linux-5.15.171 ] ~ [ linux-5.14.21 ] ~ [ linux-5.13.19 ] ~ [ linux-5.12.19 ] ~ [ linux-5.11.22 ] ~ [ linux-5.10.229 ] ~ [ linux-5.9.16 ] ~ [ linux-5.8.18 ] ~ [ linux-5.7.19 ] ~ [ linux-5.6.19 ] ~ [ linux-5.5.19 ] ~ [ linux-5.4.285 ] ~ [ linux-5.3.18 ] ~ [ linux-5.2.21 ] ~ [ linux-5.1.21 ] ~ [ linux-5.0.21 ] ~ [ linux-4.20.17 ] ~ [ linux-4.19.323 ] ~ [ linux-4.18.20 ] ~ [ linux-4.17.19 ] ~ [ linux-4.16.18 ] ~ [ linux-4.15.18 ] ~ [ linux-4.14.336 ] ~ [ linux-4.13.16 ] ~ [ linux-4.12.14 ] ~ [ linux-4.11.12 ] ~ [ linux-4.10.17 ] ~ [ linux-4.9.337 ] ~ [ linux-4.4.302 ] ~ [ linux-3.10.108 ] ~ [ linux-2.6.32.71 ] ~ [ linux-2.6.0 ] ~ [ linux-2.4.37.11 ] ~ [ unix-v6-master ] ~ [ ccs-tools-1.8.12 ] ~ [ policy-sample ] ~
Architecture: ~ [ i386 ] ~ [ alpha ] ~ [ m68k ] ~ [ mips ] ~ [ ppc ] ~ [ sparc ] ~ [ sparc64 ] ~

Diff markup

Differences between /arch/riscv/crypto/aes-riscv64-zvkned.S (Version linux-6.12-rc7) and /arch/sparc64/crypto/aes-riscv64-zvkned.S (Version linux-5.18.19)


  1 /* SPDX-License-Identifier: Apache-2.0 OR BSD-    
  2 //                                                
  3 // This file is dual-licensed, meaning that yo    
  4 // choice of either of the following two licen    
  5 //                                                
  6 // Copyright 2023 The OpenSSL Project Authors.    
  7 //                                                
  8 // Licensed under the Apache License 2.0 (the     
  9 // a copy in the file LICENSE in the source di    
 10 // https://www.openssl.org/source/license.html    
 11 //                                                
 12 // or                                             
 13 //                                                
 14 // Copyright (c) 2023, Christoph Müllner <chri    
 15 // Copyright (c) 2023, Phoebe Chen <phoebe.chen    
 16 // Copyright (c) 2023, Jerry Shih <jerry.shih@s    
 17 // Copyright 2024 Google LLC                      
 18 // All rights reserved.                           
 19 //                                                
 20 // Redistribution and use in source and binary    
 21 // modification, are permitted provided that t    
 22 // are met:                                       
 23 // 1. Redistributions of source code must reta    
 24 //    notice, this list of conditions and the     
 25 // 2. Redistributions in binary form must repr    
 26 //    notice, this list of conditions and the     
 27 //    documentation and/or other materials pro    
 28 //                                                
 29 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT     
 30 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTI    
 31 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCH    
 32 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO     
 33 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIR    
 34 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGE    
 35 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS    
 36 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION)    
 37 // THEORY OF LIABILITY, WHETHER IN CONTRACT, S    
 38 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING    
 39 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE PO    
 40                                                   
 41 // The generated code of this file depends on     
 42 // - RV64I                                        
 43 // - RISC-V Vector ('V') with VLEN >= 128         
 44 // - RISC-V Vector AES block cipher extension     
 45                                                   
 46 #include <linux/linkage.h>                        
 47                                                   
 48 .text                                             
 49 .option arch, +zvkned                             
 50                                                   
 51 #include "aes-macros.S"                           
 52                                                   
 53 #define KEYP            a0                        
 54 #define INP             a1                        
 55 #define OUTP            a2                        
 56 #define LEN             a3                        
 57 #define IVP             a4                        
 58                                                   
 59 .macro  __aes_crypt_zvkned      enc, keylen       
 60         vle32.v         v16, (INP)                
 61         aes_crypt       v16, \enc, \keylen        
 62         vse32.v         v16, (OUTP)               
 63         ret                                       
 64 .endm                                             
 65                                                   
 66 .macro  aes_crypt_zvkned        enc               
 67         aes_begin       KEYP, 128f, 192f          
 68         __aes_crypt_zvkned      \enc, 256         
 69 128:                                              
 70         __aes_crypt_zvkned      \enc, 128         
 71 192:                                              
 72         __aes_crypt_zvkned      \enc, 192         
 73 .endm                                             
 74                                                   
 75 // void aes_encrypt_zvkned(const struct crypto    
 76 //                         const u8 in[16], u8    
 77 SYM_FUNC_START(aes_encrypt_zvkned)                
 78         aes_crypt_zvkned        1                 
 79 SYM_FUNC_END(aes_encrypt_zvkned)                  
 80                                                   
 81 // Same prototype and calling convention as th    
 82 SYM_FUNC_START(aes_decrypt_zvkned)                
 83         aes_crypt_zvkned        0                 
 84 SYM_FUNC_END(aes_decrypt_zvkned)                  
 85                                                   
 86 .macro  __aes_ecb_crypt enc, keylen               
 87         srli            t0, LEN, 2                
 88         // t0 is the remaining length in 32-bi    
 89 1:                                                
 90         vsetvli         t1, t0, e32, m8, ta, m    
 91         sub             t0, t0, t1      // Sub    
 92         slli            t1, t1, 2       // Wor    
 93         vle32.v         v16, (INP)                
 94         aes_crypt       v16, \enc, \keylen        
 95         vse32.v         v16, (OUTP)               
 96         add             INP, INP, t1              
 97         add             OUTP, OUTP, t1            
 98         bnez            t0, 1b                    
 99                                                   
100         ret                                       
101 .endm                                             
102                                                   
103 .macro  aes_ecb_crypt   enc                       
104         aes_begin       KEYP, 128f, 192f          
105         __aes_ecb_crypt \enc, 256                 
106 128:                                              
107         __aes_ecb_crypt \enc, 128                 
108 192:                                              
109         __aes_ecb_crypt \enc, 192                 
110 .endm                                             
111                                                   
112 // void aes_ecb_encrypt_zvkned(const struct cr    
113 //                             const u8 *in, u    
114 //                                                
115 // |len| must be nonzero and a multiple of 16     
116 SYM_FUNC_START(aes_ecb_encrypt_zvkned)            
117         aes_ecb_crypt   1                         
118 SYM_FUNC_END(aes_ecb_encrypt_zvkned)              
119                                                   
120 // Same prototype and calling convention as th    
121 SYM_FUNC_START(aes_ecb_decrypt_zvkned)            
122         aes_ecb_crypt   0                         
123 SYM_FUNC_END(aes_ecb_decrypt_zvkned)              
124                                                   
125 .macro  aes_cbc_encrypt keylen                    
126         vle32.v         v16, (IVP)      // Loa    
127 1:                                                
128         vle32.v         v17, (INP)      // Loa    
129         vxor.vv         v16, v16, v17   // XOR    
130         aes_encrypt     v16, \keylen    // Enc    
131         vse32.v         v16, (OUTP)     // Sto    
132         addi            INP, INP, 16              
133         addi            OUTP, OUTP, 16            
134         addi            LEN, LEN, -16             
135         bnez            LEN, 1b                   
136                                                   
137         vse32.v         v16, (IVP)      // Sto    
138         ret                                       
139 .endm                                             
140                                                   
141 .macro  aes_cbc_decrypt keylen                    
142         srli            LEN, LEN, 2     // Con    
143         vle32.v         v16, (IVP)      // Loa    
144 1:                                                
145         vsetvli         t0, LEN, e32, m4, ta,     
146         vle32.v         v20, (INP)      // Loa    
147         vslideup.vi     v16, v20, 4     // Set    
148         addi            t1, t0, -4                
149         vslidedown.vx   v24, v20, t1    // Sav    
150         aes_decrypt     v20, \keylen    // Dec    
151         vxor.vv         v20, v20, v16   // XOR    
152         vse32.v         v20, (OUTP)     // Sto    
153         vmv.v.v         v16, v24        // Nex    
154         slli            t1, t0, 2       // Wor    
155         add             INP, INP, t1              
156         add             OUTP, OUTP, t1            
157         sub             LEN, LEN, t0              
158         bnez            LEN, 1b                   
159                                                   
160         vsetivli        zero, 4, e32, m1, ta,     
161         vse32.v         v16, (IVP)      // Sto    
162         ret                                       
163 .endm                                             
164                                                   
165 // void aes_cbc_encrypt_zvkned(const struct cr    
166 //                             const u8 *in, u    
167 //                                                
168 // |len| must be nonzero and a multiple of 16     
169 SYM_FUNC_START(aes_cbc_encrypt_zvkned)            
170         aes_begin       KEYP, 128f, 192f          
171         aes_cbc_encrypt 256                       
172 128:                                              
173         aes_cbc_encrypt 128                       
174 192:                                              
175         aes_cbc_encrypt 192                       
176 SYM_FUNC_END(aes_cbc_encrypt_zvkned)              
177                                                   
178 // Same prototype and calling convention as th    
179 SYM_FUNC_START(aes_cbc_decrypt_zvkned)            
180         aes_begin       KEYP, 128f, 192f          
181         aes_cbc_decrypt 256                       
182 128:                                              
183         aes_cbc_decrypt 128                       
184 192:                                              
185         aes_cbc_decrypt 192                       
186 SYM_FUNC_END(aes_cbc_decrypt_zvkned)              
187                                                   
188 .macro  aes_cbc_cts_encrypt     keylen            
189                                                   
190         // CBC-encrypt all blocks except the l    
191         // second-to-last block to the output     
192         // handled specially in the ciphertext    
193         // message is single-block, still encr    
194         li              t0, 16                    
195         j               2f                        
196 1:                                                
197         vse32.v         v16, (OUTP)     // Sto    
198         addi            OUTP, OUTP, 16            
199 2:                                                
200         vle32.v         v17, (INP)      // Loa    
201         vxor.vv         v16, v16, v17   // XOR    
202         aes_encrypt     v16, \keylen    // Enc    
203         addi            INP, INP, 16              
204         addi            LEN, LEN, -16             
205         bgt             LEN, t0, 1b     // Rep    
206                                                   
207         // Special case: if the message is a s    
208         beqz            LEN, .Lcts_encrypt_don    
209                                                   
210         // Encrypt the last two blocks using c    
211         //      C[n-1] = Encrypt(Encrypt(P[n-1    
212         //      C[n] = Encrypt(P[n-1] ^ C[n-2]    
213         //                                        
214         // C[i] denotes the i'th ciphertext bl    
215         // plaintext block.  Block n, the last    
216         // is 1 <= LEN <= 16.  If there are on    
217         //                                        
218         // v16 already contains Encrypt(P[n-1]    
219         // INP points to P[n].  OUTP points to    
220         // To support in-place encryption, loa    
221         addi            t0, OUTP, 16    // Get    
222         vsetvli         zero, LEN, e8, m1, tu,    
223         vle8.v          v17, (INP)      // Loa    
224         vse8.v          v16, (t0)       // Sto    
225         vxor.vv         v16, v16, v17   // v16    
226         vsetivli        zero, 4, e32, m1, ta,     
227         aes_encrypt     v16, \keylen              
228 .Lcts_encrypt_done\@:                             
229         vse32.v         v16, (OUTP)     // Sto    
230         ret                                       
231 .endm                                             
232                                                   
233 #define LEN32           t4 // Length of remain    
234 #define LEN_MOD16       t5 // Length of messag    
235                                                   
236 .macro  aes_cbc_cts_decrypt     keylen            
237         andi            LEN32, LEN, ~15           
238         srli            LEN32, LEN32, 2           
239         andi            LEN_MOD16, LEN, 15        
240                                                   
241         // Save C[n-2] in v28 so that it's ava    
242         // stealing step.  If there are fewer     
243         // the IV, otherwise it means the thir    
244         vmv.v.v         v28, v16        // IV     
245         add             t0, LEN, -33              
246         bltz            t0, .Lcts_decrypt_loop    
247         andi            t0, t0, ~15               
248         add             t0, t0, INP               
249         vle32.v         v28, (t0)                 
250                                                   
251         // CBC-decrypt all full blocks.  For t    
252         // full blocks if the message is block    
253         // correct output blocks (unless the m    
254         // because it XORs the wrong values wi    
255         // fix this after this loop without re    
256         // approach allows more of the AES dec    
257 .Lcts_decrypt_loop\@:                             
258         vsetvli         t0, LEN32, e32, m4, ta    
259         addi            t1, t0, -4                
260         vle32.v         v20, (INP)      // Loa    
261         vmv.v.v         v24, v16        // Get    
262         vslideup.vi     v24, v20, 4     // Set    
263         vslidedown.vx   v16, v20, t1    // Sav    
264         aes_decrypt     v20, \keylen    // Dec    
265         vxor.vv         v24, v24, v20   // XOR    
266         vse32.v         v24, (OUTP)     // Sto    
267         sub             LEN32, LEN32, t0          
268         slli            t0, t0, 2       // Wor    
269         add             INP, INP, t0              
270         add             OUTP, OUTP, t0            
271         bnez            LEN32, .Lcts_decrypt_l    
272                                                   
273         vsetivli        zero, 4, e32, m4, ta,     
274         vslidedown.vx   v20, v20, t1    // Ext    
275         addi            t0, OUTP, -16   // Get    
276         bnez            LEN_MOD16, .Lcts_decry    
277                                                   
278         // Special case: if the message is a s    
279         li              t1, 16                    
280         beq             LEN, t1, .Lcts_decrypt    
281                                                   
282         // Block-aligned message.  Just fix up    
283         //                                        
284         //      P[n-1] = Decrypt(C[n]) ^ C[n-2    
285         //      P[n] = Decrypt(C[n-1]) ^ C[n]     
286         //                                        
287         // We have C[n] in v16, Decrypt(C[n])     
288         // Together with Decrypt(C[n-1]) ^ C[n    
289         // is everything needed to fix the out    
290         addi            t1, OUTP, -32   // Get    
291         vxor.vv         v20, v20, v28   // Dec    
292         vle32.v         v24, (t1)       // Dec    
293         vse32.v         v20, (t1)       // Sto    
294         vxor.vv         v20, v24, v16   // Dec    
295         j               .Lcts_decrypt_finish\@    
296                                                   
297 .Lcts_decrypt_non_block_aligned\@:                
298         // Decrypt the last two blocks using c    
299         //                                        
300         //      P[n-1] = Decrypt(C[n] || Decry    
301         //      P[n] = (Decrypt(C[n-1]) ^ C[n]    
302         //                                        
303         // We already have Decrypt(C[n-1]) in     
304         vmv.v.v         v16, v20        // v16    
305         vsetvli         zero, LEN_MOD16, e8, m    
306         vle8.v          v20, (INP)      // v20    
307         vxor.vv         v16, v16, v20   // v16    
308         vse8.v          v16, (OUTP)     // Sto    
309         vsetivli        zero, 4, e32, m1, ta,     
310         aes_decrypt     v20, \keylen    // v20    
311 .Lcts_decrypt_finish\@:                           
312         vxor.vv         v20, v20, v28   // XOR    
313         vse32.v         v20, (t0)       // Sto    
314 .Lcts_decrypt_done\@:                             
315         ret                                       
316 .endm                                             
317                                                   
318 .macro  aes_cbc_cts_crypt       keylen            
319         vle32.v         v16, (IVP)      // Loa    
320         beqz            a5, .Lcts_decrypt\@       
321         aes_cbc_cts_encrypt \keylen               
322 .Lcts_decrypt\@:                                  
323         aes_cbc_cts_decrypt \keylen               
324 .endm                                             
325                                                   
326 // void aes_cbc_cts_crypt_zvkned(const struct     
327 //                               const u8 *in,    
328 //                               const u8 iv[1    
329 //                                                
330 // Encrypts or decrypts a message with the CS3    
331 // This is the variant that unconditionally sw    
332 SYM_FUNC_START(aes_cbc_cts_crypt_zvkned)          
333         aes_begin       KEYP, 128f, 192f          
334         aes_cbc_cts_crypt 256                     
335 128:                                              
336         aes_cbc_cts_crypt 128                     
337 192:                                              
338         aes_cbc_cts_crypt 192                     
339 SYM_FUNC_END(aes_cbc_cts_crypt_zvkned)            
                                                      

~ [ source navigation ] ~ [ diff markup ] ~ [ identifier search ] ~

kernel.org | git.kernel.org | LWN.net | Project Home | SVN repository | Mail admin

Linux® is a registered trademark of Linus Torvalds in the United States and other countries.
TOMOYO® is a registered trademark of NTT DATA CORPORATION.

sflogo.php