1 /* 2 * Cryptographic API. 3 * 4 * Glue code for the SHA256 Secure Hash Algorithm assembler implementations 5 * using SSSE3, AVX, AVX2, and SHA-NI instructions. 6 * 7 * This file is based on sha256_generic.c 8 * 9 * Copyright (C) 2013 Intel Corporation. 10 * 11 * Author: 12 * Tim Chen <tim.c.chen@linux.intel.com> 13 * 14 * This program is free software; you can redistribute it and/or modify it 15 * under the terms of the GNU General Public License as published by the Free 16 * Software Foundation; either version 2 of the License, or (at your option) 17 * any later version. 18 * 19 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, 20 * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF 21 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND 22 * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS 23 * BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN 24 * ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN 25 * CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 26 * SOFTWARE. 27 */ 28 29 30 #define pr_fmt(fmt) KBUILD_MODNAME ": " fmt 31 32 #include <crypto/internal/hash.h> 33 #include <crypto/internal/simd.h> 34 #include <linux/init.h> 35 #include <linux/module.h> 36 #include <linux/mm.h> 37 #include <linux/types.h> 38 #include <crypto/sha2.h> 39 #include <crypto/sha256_base.h> 40 #include <linux/string.h> 41 #include <asm/cpu_device_id.h> 42 #include <asm/simd.h> 43 44 asmlinkage void sha256_transform_ssse3(struct sha256_state *state, 45 const u8 *data, int blocks); 46 47 static const struct x86_cpu_id module_cpu_ids[] = { 48 #ifdef CONFIG_AS_SHA256_NI 49 X86_MATCH_FEATURE(X86_FEATURE_SHA_NI, NULL), 50 #endif 51 X86_MATCH_FEATURE(X86_FEATURE_AVX2, NULL), 52 X86_MATCH_FEATURE(X86_FEATURE_AVX, NULL), 53 X86_MATCH_FEATURE(X86_FEATURE_SSSE3, NULL), 54 {} 55 }; 56 MODULE_DEVICE_TABLE(x86cpu, module_cpu_ids); 57 58 static int _sha256_update(struct shash_desc *desc, const u8 *data, 59 unsigned int len, sha256_block_fn *sha256_xform) 60 { 61 struct sha256_state *sctx = shash_desc_ctx(desc); 62 63 if (!crypto_simd_usable() || 64 (sctx->count % SHA256_BLOCK_SIZE) + len < SHA256_BLOCK_SIZE) 65 return crypto_sha256_update(desc, data, len); 66 67 /* 68 * Make sure struct sha256_state begins directly with the SHA256 69 * 256-bit internal state, as this is what the asm functions expect. 70 */ 71 BUILD_BUG_ON(offsetof(struct sha256_state, state) != 0); 72 73 kernel_fpu_begin(); 74 sha256_base_do_update(desc, data, len, sha256_xform); 75 kernel_fpu_end(); 76 77 return 0; 78 } 79 80 static int sha256_finup(struct shash_desc *desc, const u8 *data, 81 unsigned int len, u8 *out, sha256_block_fn *sha256_xform) 82 { 83 if (!crypto_simd_usable()) 84 return crypto_sha256_finup(desc, data, len, out); 85 86 kernel_fpu_begin(); 87 if (len) 88 sha256_base_do_update(desc, data, len, sha256_xform); 89 sha256_base_do_finalize(desc, sha256_xform); 90 kernel_fpu_end(); 91 92 return sha256_base_finish(desc, out); 93 } 94 95 static int sha256_ssse3_update(struct shash_desc *desc, const u8 *data, 96 unsigned int len) 97 { 98 return _sha256_update(desc, data, len, sha256_transform_ssse3); 99 } 100 101 static int sha256_ssse3_finup(struct shash_desc *desc, const u8 *data, 102 unsigned int len, u8 *out) 103 { 104 return sha256_finup(desc, data, len, out, sha256_transform_ssse3); 105 } 106 107 /* Add padding and return the message digest. */ 108 static int sha256_ssse3_final(struct shash_desc *desc, u8 *out) 109 { 110 return sha256_ssse3_finup(desc, NULL, 0, out); 111 } 112 113 static int sha256_ssse3_digest(struct shash_desc *desc, const u8 *data, 114 unsigned int len, u8 *out) 115 { 116 return sha256_base_init(desc) ?: 117 sha256_ssse3_finup(desc, data, len, out); 118 } 119 120 static struct shash_alg sha256_ssse3_algs[] = { { 121 .digestsize = SHA256_DIGEST_SIZE, 122 .init = sha256_base_init, 123 .update = sha256_ssse3_update, 124 .final = sha256_ssse3_final, 125 .finup = sha256_ssse3_finup, 126 .digest = sha256_ssse3_digest, 127 .descsize = sizeof(struct sha256_state), 128 .base = { 129 .cra_name = "sha256", 130 .cra_driver_name = "sha256-ssse3", 131 .cra_priority = 150, 132 .cra_blocksize = SHA256_BLOCK_SIZE, 133 .cra_module = THIS_MODULE, 134 } 135 }, { 136 .digestsize = SHA224_DIGEST_SIZE, 137 .init = sha224_base_init, 138 .update = sha256_ssse3_update, 139 .final = sha256_ssse3_final, 140 .finup = sha256_ssse3_finup, 141 .descsize = sizeof(struct sha256_state), 142 .base = { 143 .cra_name = "sha224", 144 .cra_driver_name = "sha224-ssse3", 145 .cra_priority = 150, 146 .cra_blocksize = SHA224_BLOCK_SIZE, 147 .cra_module = THIS_MODULE, 148 } 149 } }; 150 151 static int register_sha256_ssse3(void) 152 { 153 if (boot_cpu_has(X86_FEATURE_SSSE3)) 154 return crypto_register_shashes(sha256_ssse3_algs, 155 ARRAY_SIZE(sha256_ssse3_algs)); 156 return 0; 157 } 158 159 static void unregister_sha256_ssse3(void) 160 { 161 if (boot_cpu_has(X86_FEATURE_SSSE3)) 162 crypto_unregister_shashes(sha256_ssse3_algs, 163 ARRAY_SIZE(sha256_ssse3_algs)); 164 } 165 166 asmlinkage void sha256_transform_avx(struct sha256_state *state, 167 const u8 *data, int blocks); 168 169 static int sha256_avx_update(struct shash_desc *desc, const u8 *data, 170 unsigned int len) 171 { 172 return _sha256_update(desc, data, len, sha256_transform_avx); 173 } 174 175 static int sha256_avx_finup(struct shash_desc *desc, const u8 *data, 176 unsigned int len, u8 *out) 177 { 178 return sha256_finup(desc, data, len, out, sha256_transform_avx); 179 } 180 181 static int sha256_avx_final(struct shash_desc *desc, u8 *out) 182 { 183 return sha256_avx_finup(desc, NULL, 0, out); 184 } 185 186 static int sha256_avx_digest(struct shash_desc *desc, const u8 *data, 187 unsigned int len, u8 *out) 188 { 189 return sha256_base_init(desc) ?: 190 sha256_avx_finup(desc, data, len, out); 191 } 192 193 static struct shash_alg sha256_avx_algs[] = { { 194 .digestsize = SHA256_DIGEST_SIZE, 195 .init = sha256_base_init, 196 .update = sha256_avx_update, 197 .final = sha256_avx_final, 198 .finup = sha256_avx_finup, 199 .digest = sha256_avx_digest, 200 .descsize = sizeof(struct sha256_state), 201 .base = { 202 .cra_name = "sha256", 203 .cra_driver_name = "sha256-avx", 204 .cra_priority = 160, 205 .cra_blocksize = SHA256_BLOCK_SIZE, 206 .cra_module = THIS_MODULE, 207 } 208 }, { 209 .digestsize = SHA224_DIGEST_SIZE, 210 .init = sha224_base_init, 211 .update = sha256_avx_update, 212 .final = sha256_avx_final, 213 .finup = sha256_avx_finup, 214 .descsize = sizeof(struct sha256_state), 215 .base = { 216 .cra_name = "sha224", 217 .cra_driver_name = "sha224-avx", 218 .cra_priority = 160, 219 .cra_blocksize = SHA224_BLOCK_SIZE, 220 .cra_module = THIS_MODULE, 221 } 222 } }; 223 224 static bool avx_usable(void) 225 { 226 if (!cpu_has_xfeatures(XFEATURE_MASK_SSE | XFEATURE_MASK_YMM, NULL)) { 227 if (boot_cpu_has(X86_FEATURE_AVX)) 228 pr_info("AVX detected but unusable.\n"); 229 return false; 230 } 231 232 return true; 233 } 234 235 static int register_sha256_avx(void) 236 { 237 if (avx_usable()) 238 return crypto_register_shashes(sha256_avx_algs, 239 ARRAY_SIZE(sha256_avx_algs)); 240 return 0; 241 } 242 243 static void unregister_sha256_avx(void) 244 { 245 if (avx_usable()) 246 crypto_unregister_shashes(sha256_avx_algs, 247 ARRAY_SIZE(sha256_avx_algs)); 248 } 249 250 asmlinkage void sha256_transform_rorx(struct sha256_state *state, 251 const u8 *data, int blocks); 252 253 static int sha256_avx2_update(struct shash_desc *desc, const u8 *data, 254 unsigned int len) 255 { 256 return _sha256_update(desc, data, len, sha256_transform_rorx); 257 } 258 259 static int sha256_avx2_finup(struct shash_desc *desc, const u8 *data, 260 unsigned int len, u8 *out) 261 { 262 return sha256_finup(desc, data, len, out, sha256_transform_rorx); 263 } 264 265 static int sha256_avx2_final(struct shash_desc *desc, u8 *out) 266 { 267 return sha256_avx2_finup(desc, NULL, 0, out); 268 } 269 270 static int sha256_avx2_digest(struct shash_desc *desc, const u8 *data, 271 unsigned int len, u8 *out) 272 { 273 return sha256_base_init(desc) ?: 274 sha256_avx2_finup(desc, data, len, out); 275 } 276 277 static struct shash_alg sha256_avx2_algs[] = { { 278 .digestsize = SHA256_DIGEST_SIZE, 279 .init = sha256_base_init, 280 .update = sha256_avx2_update, 281 .final = sha256_avx2_final, 282 .finup = sha256_avx2_finup, 283 .digest = sha256_avx2_digest, 284 .descsize = sizeof(struct sha256_state), 285 .base = { 286 .cra_name = "sha256", 287 .cra_driver_name = "sha256-avx2", 288 .cra_priority = 170, 289 .cra_blocksize = SHA256_BLOCK_SIZE, 290 .cra_module = THIS_MODULE, 291 } 292 }, { 293 .digestsize = SHA224_DIGEST_SIZE, 294 .init = sha224_base_init, 295 .update = sha256_avx2_update, 296 .final = sha256_avx2_final, 297 .finup = sha256_avx2_finup, 298 .descsize = sizeof(struct sha256_state), 299 .base = { 300 .cra_name = "sha224", 301 .cra_driver_name = "sha224-avx2", 302 .cra_priority = 170, 303 .cra_blocksize = SHA224_BLOCK_SIZE, 304 .cra_module = THIS_MODULE, 305 } 306 } }; 307 308 static bool avx2_usable(void) 309 { 310 if (avx_usable() && boot_cpu_has(X86_FEATURE_AVX2) && 311 boot_cpu_has(X86_FEATURE_BMI2)) 312 return true; 313 314 return false; 315 } 316 317 static int register_sha256_avx2(void) 318 { 319 if (avx2_usable()) 320 return crypto_register_shashes(sha256_avx2_algs, 321 ARRAY_SIZE(sha256_avx2_algs)); 322 return 0; 323 } 324 325 static void unregister_sha256_avx2(void) 326 { 327 if (avx2_usable()) 328 crypto_unregister_shashes(sha256_avx2_algs, 329 ARRAY_SIZE(sha256_avx2_algs)); 330 } 331 332 #ifdef CONFIG_AS_SHA256_NI 333 asmlinkage void sha256_ni_transform(struct sha256_state *digest, 334 const u8 *data, int rounds); 335 336 static int sha256_ni_update(struct shash_desc *desc, const u8 *data, 337 unsigned int len) 338 { 339 return _sha256_update(desc, data, len, sha256_ni_transform); 340 } 341 342 static int sha256_ni_finup(struct shash_desc *desc, const u8 *data, 343 unsigned int len, u8 *out) 344 { 345 return sha256_finup(desc, data, len, out, sha256_ni_transform); 346 } 347 348 static int sha256_ni_final(struct shash_desc *desc, u8 *out) 349 { 350 return sha256_ni_finup(desc, NULL, 0, out); 351 } 352 353 static int sha256_ni_digest(struct shash_desc *desc, const u8 *data, 354 unsigned int len, u8 *out) 355 { 356 return sha256_base_init(desc) ?: 357 sha256_ni_finup(desc, data, len, out); 358 } 359 360 static struct shash_alg sha256_ni_algs[] = { { 361 .digestsize = SHA256_DIGEST_SIZE, 362 .init = sha256_base_init, 363 .update = sha256_ni_update, 364 .final = sha256_ni_final, 365 .finup = sha256_ni_finup, 366 .digest = sha256_ni_digest, 367 .descsize = sizeof(struct sha256_state), 368 .base = { 369 .cra_name = "sha256", 370 .cra_driver_name = "sha256-ni", 371 .cra_priority = 250, 372 .cra_blocksize = SHA256_BLOCK_SIZE, 373 .cra_module = THIS_MODULE, 374 } 375 }, { 376 .digestsize = SHA224_DIGEST_SIZE, 377 .init = sha224_base_init, 378 .update = sha256_ni_update, 379 .final = sha256_ni_final, 380 .finup = sha256_ni_finup, 381 .descsize = sizeof(struct sha256_state), 382 .base = { 383 .cra_name = "sha224", 384 .cra_driver_name = "sha224-ni", 385 .cra_priority = 250, 386 .cra_blocksize = SHA224_BLOCK_SIZE, 387 .cra_module = THIS_MODULE, 388 } 389 } }; 390 391 static int register_sha256_ni(void) 392 { 393 if (boot_cpu_has(X86_FEATURE_SHA_NI)) 394 return crypto_register_shashes(sha256_ni_algs, 395 ARRAY_SIZE(sha256_ni_algs)); 396 return 0; 397 } 398 399 static void unregister_sha256_ni(void) 400 { 401 if (boot_cpu_has(X86_FEATURE_SHA_NI)) 402 crypto_unregister_shashes(sha256_ni_algs, 403 ARRAY_SIZE(sha256_ni_algs)); 404 } 405 406 #else 407 static inline int register_sha256_ni(void) { return 0; } 408 static inline void unregister_sha256_ni(void) { } 409 #endif 410 411 static int __init sha256_ssse3_mod_init(void) 412 { 413 if (!x86_match_cpu(module_cpu_ids)) 414 return -ENODEV; 415 416 if (register_sha256_ssse3()) 417 goto fail; 418 419 if (register_sha256_avx()) { 420 unregister_sha256_ssse3(); 421 goto fail; 422 } 423 424 if (register_sha256_avx2()) { 425 unregister_sha256_avx(); 426 unregister_sha256_ssse3(); 427 goto fail; 428 } 429 430 if (register_sha256_ni()) { 431 unregister_sha256_avx2(); 432 unregister_sha256_avx(); 433 unregister_sha256_ssse3(); 434 goto fail; 435 } 436 437 return 0; 438 fail: 439 return -ENODEV; 440 } 441 442 static void __exit sha256_ssse3_mod_fini(void) 443 { 444 unregister_sha256_ni(); 445 unregister_sha256_avx2(); 446 unregister_sha256_avx(); 447 unregister_sha256_ssse3(); 448 } 449 450 module_init(sha256_ssse3_mod_init); 451 module_exit(sha256_ssse3_mod_fini); 452 453 MODULE_LICENSE("GPL"); 454 MODULE_DESCRIPTION("SHA256 Secure Hash Algorithm, Supplemental SSE3 accelerated"); 455 456 MODULE_ALIAS_CRYPTO("sha256"); 457 MODULE_ALIAS_CRYPTO("sha256-ssse3"); 458 MODULE_ALIAS_CRYPTO("sha256-avx"); 459 MODULE_ALIAS_CRYPTO("sha256-avx2"); 460 MODULE_ALIAS_CRYPTO("sha224"); 461 MODULE_ALIAS_CRYPTO("sha224-ssse3"); 462 MODULE_ALIAS_CRYPTO("sha224-avx"); 463 MODULE_ALIAS_CRYPTO("sha224-avx2"); 464 #ifdef CONFIG_AS_SHA256_NI 465 MODULE_ALIAS_CRYPTO("sha256-ni"); 466 MODULE_ALIAS_CRYPTO("sha224-ni"); 467 #endif 468
Linux® is a registered trademark of Linus Torvalds in the United States and other countries.
TOMOYO® is a registered trademark of NTT DATA CORPORATION.