~ [ source navigation ] ~ [ diff markup ] ~ [ identifier search ] ~

TOMOYO Linux Cross Reference
Linux/arch/x86/crypto/sha512_ssse3_glue.c

Version: ~ [ linux-6.11.5 ] ~ [ linux-6.10.14 ] ~ [ linux-6.9.12 ] ~ [ linux-6.8.12 ] ~ [ linux-6.7.12 ] ~ [ linux-6.6.58 ] ~ [ linux-6.5.13 ] ~ [ linux-6.4.16 ] ~ [ linux-6.3.13 ] ~ [ linux-6.2.16 ] ~ [ linux-6.1.114 ] ~ [ linux-6.0.19 ] ~ [ linux-5.19.17 ] ~ [ linux-5.18.19 ] ~ [ linux-5.17.15 ] ~ [ linux-5.16.20 ] ~ [ linux-5.15.169 ] ~ [ linux-5.14.21 ] ~ [ linux-5.13.19 ] ~ [ linux-5.12.19 ] ~ [ linux-5.11.22 ] ~ [ linux-5.10.228 ] ~ [ linux-5.9.16 ] ~ [ linux-5.8.18 ] ~ [ linux-5.7.19 ] ~ [ linux-5.6.19 ] ~ [ linux-5.5.19 ] ~ [ linux-5.4.284 ] ~ [ linux-5.3.18 ] ~ [ linux-5.2.21 ] ~ [ linux-5.1.21 ] ~ [ linux-5.0.21 ] ~ [ linux-4.20.17 ] ~ [ linux-4.19.322 ] ~ [ linux-4.18.20 ] ~ [ linux-4.17.19 ] ~ [ linux-4.16.18 ] ~ [ linux-4.15.18 ] ~ [ linux-4.14.336 ] ~ [ linux-4.13.16 ] ~ [ linux-4.12.14 ] ~ [ linux-4.11.12 ] ~ [ linux-4.10.17 ] ~ [ linux-4.9.337 ] ~ [ linux-4.4.302 ] ~ [ linux-3.10.108 ] ~ [ linux-2.6.32.71 ] ~ [ linux-2.6.0 ] ~ [ linux-2.4.37.11 ] ~ [ unix-v6-master ] ~ [ ccs-tools-1.8.9 ] ~ [ policy-sample ] ~
Architecture: ~ [ i386 ] ~ [ alpha ] ~ [ m68k ] ~ [ mips ] ~ [ ppc ] ~ [ sparc ] ~ [ sparc64 ] ~

  1 /*
  2  * Cryptographic API.
  3  *
  4  * Glue code for the SHA512 Secure Hash Algorithm assembler
  5  * implementation using supplemental SSE3 / AVX / AVX2 instructions.
  6  *
  7  * This file is based on sha512_generic.c
  8  *
  9  * Copyright (C) 2013 Intel Corporation
 10  * Author: Tim Chen <tim.c.chen@linux.intel.com>
 11  *
 12  * This program is free software; you can redistribute it and/or modify it
 13  * under the terms of the GNU General Public License as published by the Free
 14  * Software Foundation; either version 2 of the License, or (at your option)
 15  * any later version.
 16  *
 17  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
 18  * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
 19  * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
 20  * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
 21  * BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
 22  * ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
 23  * CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
 24  * SOFTWARE.
 25  *
 26  */
 27 
 28 #define pr_fmt(fmt)     KBUILD_MODNAME ": " fmt
 29 
 30 #include <crypto/internal/hash.h>
 31 #include <crypto/internal/simd.h>
 32 #include <linux/init.h>
 33 #include <linux/module.h>
 34 #include <linux/mm.h>
 35 #include <linux/string.h>
 36 #include <linux/types.h>
 37 #include <crypto/sha2.h>
 38 #include <crypto/sha512_base.h>
 39 #include <asm/cpu_device_id.h>
 40 #include <asm/simd.h>
 41 
 42 asmlinkage void sha512_transform_ssse3(struct sha512_state *state,
 43                                        const u8 *data, int blocks);
 44 
 45 static int sha512_update(struct shash_desc *desc, const u8 *data,
 46                        unsigned int len, sha512_block_fn *sha512_xform)
 47 {
 48         struct sha512_state *sctx = shash_desc_ctx(desc);
 49 
 50         if (!crypto_simd_usable() ||
 51             (sctx->count[0] % SHA512_BLOCK_SIZE) + len < SHA512_BLOCK_SIZE)
 52                 return crypto_sha512_update(desc, data, len);
 53 
 54         /*
 55          * Make sure struct sha512_state begins directly with the SHA512
 56          * 512-bit internal state, as this is what the asm functions expect.
 57          */
 58         BUILD_BUG_ON(offsetof(struct sha512_state, state) != 0);
 59 
 60         kernel_fpu_begin();
 61         sha512_base_do_update(desc, data, len, sha512_xform);
 62         kernel_fpu_end();
 63 
 64         return 0;
 65 }
 66 
 67 static int sha512_finup(struct shash_desc *desc, const u8 *data,
 68               unsigned int len, u8 *out, sha512_block_fn *sha512_xform)
 69 {
 70         if (!crypto_simd_usable())
 71                 return crypto_sha512_finup(desc, data, len, out);
 72 
 73         kernel_fpu_begin();
 74         if (len)
 75                 sha512_base_do_update(desc, data, len, sha512_xform);
 76         sha512_base_do_finalize(desc, sha512_xform);
 77         kernel_fpu_end();
 78 
 79         return sha512_base_finish(desc, out);
 80 }
 81 
 82 static int sha512_ssse3_update(struct shash_desc *desc, const u8 *data,
 83                        unsigned int len)
 84 {
 85         return sha512_update(desc, data, len, sha512_transform_ssse3);
 86 }
 87 
 88 static int sha512_ssse3_finup(struct shash_desc *desc, const u8 *data,
 89               unsigned int len, u8 *out)
 90 {
 91         return sha512_finup(desc, data, len, out, sha512_transform_ssse3);
 92 }
 93 
 94 /* Add padding and return the message digest. */
 95 static int sha512_ssse3_final(struct shash_desc *desc, u8 *out)
 96 {
 97         return sha512_ssse3_finup(desc, NULL, 0, out);
 98 }
 99 
100 static struct shash_alg sha512_ssse3_algs[] = { {
101         .digestsize     =       SHA512_DIGEST_SIZE,
102         .init           =       sha512_base_init,
103         .update         =       sha512_ssse3_update,
104         .final          =       sha512_ssse3_final,
105         .finup          =       sha512_ssse3_finup,
106         .descsize       =       sizeof(struct sha512_state),
107         .base           =       {
108                 .cra_name       =       "sha512",
109                 .cra_driver_name =      "sha512-ssse3",
110                 .cra_priority   =       150,
111                 .cra_blocksize  =       SHA512_BLOCK_SIZE,
112                 .cra_module     =       THIS_MODULE,
113         }
114 },  {
115         .digestsize     =       SHA384_DIGEST_SIZE,
116         .init           =       sha384_base_init,
117         .update         =       sha512_ssse3_update,
118         .final          =       sha512_ssse3_final,
119         .finup          =       sha512_ssse3_finup,
120         .descsize       =       sizeof(struct sha512_state),
121         .base           =       {
122                 .cra_name       =       "sha384",
123                 .cra_driver_name =      "sha384-ssse3",
124                 .cra_priority   =       150,
125                 .cra_blocksize  =       SHA384_BLOCK_SIZE,
126                 .cra_module     =       THIS_MODULE,
127         }
128 } };
129 
130 static int register_sha512_ssse3(void)
131 {
132         if (boot_cpu_has(X86_FEATURE_SSSE3))
133                 return crypto_register_shashes(sha512_ssse3_algs,
134                         ARRAY_SIZE(sha512_ssse3_algs));
135         return 0;
136 }
137 
138 static void unregister_sha512_ssse3(void)
139 {
140         if (boot_cpu_has(X86_FEATURE_SSSE3))
141                 crypto_unregister_shashes(sha512_ssse3_algs,
142                         ARRAY_SIZE(sha512_ssse3_algs));
143 }
144 
145 asmlinkage void sha512_transform_avx(struct sha512_state *state,
146                                      const u8 *data, int blocks);
147 static bool avx_usable(void)
148 {
149         if (!cpu_has_xfeatures(XFEATURE_MASK_SSE | XFEATURE_MASK_YMM, NULL)) {
150                 if (boot_cpu_has(X86_FEATURE_AVX))
151                         pr_info("AVX detected but unusable.\n");
152                 return false;
153         }
154 
155         return true;
156 }
157 
158 static int sha512_avx_update(struct shash_desc *desc, const u8 *data,
159                        unsigned int len)
160 {
161         return sha512_update(desc, data, len, sha512_transform_avx);
162 }
163 
164 static int sha512_avx_finup(struct shash_desc *desc, const u8 *data,
165               unsigned int len, u8 *out)
166 {
167         return sha512_finup(desc, data, len, out, sha512_transform_avx);
168 }
169 
170 /* Add padding and return the message digest. */
171 static int sha512_avx_final(struct shash_desc *desc, u8 *out)
172 {
173         return sha512_avx_finup(desc, NULL, 0, out);
174 }
175 
176 static struct shash_alg sha512_avx_algs[] = { {
177         .digestsize     =       SHA512_DIGEST_SIZE,
178         .init           =       sha512_base_init,
179         .update         =       sha512_avx_update,
180         .final          =       sha512_avx_final,
181         .finup          =       sha512_avx_finup,
182         .descsize       =       sizeof(struct sha512_state),
183         .base           =       {
184                 .cra_name       =       "sha512",
185                 .cra_driver_name =      "sha512-avx",
186                 .cra_priority   =       160,
187                 .cra_blocksize  =       SHA512_BLOCK_SIZE,
188                 .cra_module     =       THIS_MODULE,
189         }
190 },  {
191         .digestsize     =       SHA384_DIGEST_SIZE,
192         .init           =       sha384_base_init,
193         .update         =       sha512_avx_update,
194         .final          =       sha512_avx_final,
195         .finup          =       sha512_avx_finup,
196         .descsize       =       sizeof(struct sha512_state),
197         .base           =       {
198                 .cra_name       =       "sha384",
199                 .cra_driver_name =      "sha384-avx",
200                 .cra_priority   =       160,
201                 .cra_blocksize  =       SHA384_BLOCK_SIZE,
202                 .cra_module     =       THIS_MODULE,
203         }
204 } };
205 
206 static int register_sha512_avx(void)
207 {
208         if (avx_usable())
209                 return crypto_register_shashes(sha512_avx_algs,
210                         ARRAY_SIZE(sha512_avx_algs));
211         return 0;
212 }
213 
214 static void unregister_sha512_avx(void)
215 {
216         if (avx_usable())
217                 crypto_unregister_shashes(sha512_avx_algs,
218                         ARRAY_SIZE(sha512_avx_algs));
219 }
220 
221 asmlinkage void sha512_transform_rorx(struct sha512_state *state,
222                                       const u8 *data, int blocks);
223 
224 static int sha512_avx2_update(struct shash_desc *desc, const u8 *data,
225                        unsigned int len)
226 {
227         return sha512_update(desc, data, len, sha512_transform_rorx);
228 }
229 
230 static int sha512_avx2_finup(struct shash_desc *desc, const u8 *data,
231               unsigned int len, u8 *out)
232 {
233         return sha512_finup(desc, data, len, out, sha512_transform_rorx);
234 }
235 
236 /* Add padding and return the message digest. */
237 static int sha512_avx2_final(struct shash_desc *desc, u8 *out)
238 {
239         return sha512_avx2_finup(desc, NULL, 0, out);
240 }
241 
242 static struct shash_alg sha512_avx2_algs[] = { {
243         .digestsize     =       SHA512_DIGEST_SIZE,
244         .init           =       sha512_base_init,
245         .update         =       sha512_avx2_update,
246         .final          =       sha512_avx2_final,
247         .finup          =       sha512_avx2_finup,
248         .descsize       =       sizeof(struct sha512_state),
249         .base           =       {
250                 .cra_name       =       "sha512",
251                 .cra_driver_name =      "sha512-avx2",
252                 .cra_priority   =       170,
253                 .cra_blocksize  =       SHA512_BLOCK_SIZE,
254                 .cra_module     =       THIS_MODULE,
255         }
256 },  {
257         .digestsize     =       SHA384_DIGEST_SIZE,
258         .init           =       sha384_base_init,
259         .update         =       sha512_avx2_update,
260         .final          =       sha512_avx2_final,
261         .finup          =       sha512_avx2_finup,
262         .descsize       =       sizeof(struct sha512_state),
263         .base           =       {
264                 .cra_name       =       "sha384",
265                 .cra_driver_name =      "sha384-avx2",
266                 .cra_priority   =       170,
267                 .cra_blocksize  =       SHA384_BLOCK_SIZE,
268                 .cra_module     =       THIS_MODULE,
269         }
270 } };
271 
272 static bool avx2_usable(void)
273 {
274         if (avx_usable() && boot_cpu_has(X86_FEATURE_AVX2) &&
275                     boot_cpu_has(X86_FEATURE_BMI2))
276                 return true;
277 
278         return false;
279 }
280 
281 static int register_sha512_avx2(void)
282 {
283         if (avx2_usable())
284                 return crypto_register_shashes(sha512_avx2_algs,
285                         ARRAY_SIZE(sha512_avx2_algs));
286         return 0;
287 }
288 static const struct x86_cpu_id module_cpu_ids[] = {
289         X86_MATCH_FEATURE(X86_FEATURE_AVX2, NULL),
290         X86_MATCH_FEATURE(X86_FEATURE_AVX, NULL),
291         X86_MATCH_FEATURE(X86_FEATURE_SSSE3, NULL),
292         {}
293 };
294 MODULE_DEVICE_TABLE(x86cpu, module_cpu_ids);
295 
296 static void unregister_sha512_avx2(void)
297 {
298         if (avx2_usable())
299                 crypto_unregister_shashes(sha512_avx2_algs,
300                         ARRAY_SIZE(sha512_avx2_algs));
301 }
302 
303 static int __init sha512_ssse3_mod_init(void)
304 {
305         if (!x86_match_cpu(module_cpu_ids))
306                 return -ENODEV;
307 
308         if (register_sha512_ssse3())
309                 goto fail;
310 
311         if (register_sha512_avx()) {
312                 unregister_sha512_ssse3();
313                 goto fail;
314         }
315 
316         if (register_sha512_avx2()) {
317                 unregister_sha512_avx();
318                 unregister_sha512_ssse3();
319                 goto fail;
320         }
321 
322         return 0;
323 fail:
324         return -ENODEV;
325 }
326 
327 static void __exit sha512_ssse3_mod_fini(void)
328 {
329         unregister_sha512_avx2();
330         unregister_sha512_avx();
331         unregister_sha512_ssse3();
332 }
333 
334 module_init(sha512_ssse3_mod_init);
335 module_exit(sha512_ssse3_mod_fini);
336 
337 MODULE_LICENSE("GPL");
338 MODULE_DESCRIPTION("SHA512 Secure Hash Algorithm, Supplemental SSE3 accelerated");
339 
340 MODULE_ALIAS_CRYPTO("sha512");
341 MODULE_ALIAS_CRYPTO("sha512-ssse3");
342 MODULE_ALIAS_CRYPTO("sha512-avx");
343 MODULE_ALIAS_CRYPTO("sha512-avx2");
344 MODULE_ALIAS_CRYPTO("sha384");
345 MODULE_ALIAS_CRYPTO("sha384-ssse3");
346 MODULE_ALIAS_CRYPTO("sha384-avx");
347 MODULE_ALIAS_CRYPTO("sha384-avx2");
348 

~ [ source navigation ] ~ [ diff markup ] ~ [ identifier search ] ~

kernel.org | git.kernel.org | LWN.net | Project Home | SVN repository | Mail admin

Linux® is a registered trademark of Linus Torvalds in the United States and other countries.
TOMOYO® is a registered trademark of NTT DATA CORPORATION.

sflogo.php