1 // SPDX-License-Identifier: GPL-2.0-or-later 2 /* 3 * Asynchronous Cryptographic Hash operations. 4 * 5 * This is the implementation of the ahash (asynchronous hash) API. It differs 6 * from shash (synchronous hash) in that ahash supports asynchronous operations, 7 * and it hashes data from scatterlists instead of virtually addressed buffers. 8 * 9 * The ahash API provides access to both ahash and shash algorithms. The shash 10 * API only provides access to shash algorithms. 11 * 12 * Copyright (c) 2008 Loc Ho <lho@amcc.com> 13 */ 14 15 #include <crypto/scatterwalk.h> 16 #include <linux/cryptouser.h> 17 #include <linux/err.h> 18 #include <linux/kernel.h> 19 #include <linux/module.h> 20 #include <linux/sched.h> 21 #include <linux/slab.h> 22 #include <linux/seq_file.h> 23 #include <linux/string.h> 24 #include <net/netlink.h> 25 26 #include "hash.h" 27 28 #define CRYPTO_ALG_TYPE_AHASH_MASK 0x0000000e 29 30 /* 31 * For an ahash tfm that is using an shash algorithm (instead of an ahash 32 * algorithm), this returns the underlying shash tfm. 33 */ 34 static inline struct crypto_shash *ahash_to_shash(struct crypto_ahash *tfm) 35 { 36 return *(struct crypto_shash **)crypto_ahash_ctx(tfm); 37 } 38 39 static inline struct shash_desc *prepare_shash_desc(struct ahash_request *req, 40 struct crypto_ahash *tfm) 41 { 42 struct shash_desc *desc = ahash_request_ctx(req); 43 44 desc->tfm = ahash_to_shash(tfm); 45 return desc; 46 } 47 48 int shash_ahash_update(struct ahash_request *req, struct shash_desc *desc) 49 { 50 struct crypto_hash_walk walk; 51 int nbytes; 52 53 for (nbytes = crypto_hash_walk_first(req, &walk); nbytes > 0; 54 nbytes = crypto_hash_walk_done(&walk, nbytes)) 55 nbytes = crypto_shash_update(desc, walk.data, nbytes); 56 57 return nbytes; 58 } 59 EXPORT_SYMBOL_GPL(shash_ahash_update); 60 61 int shash_ahash_finup(struct ahash_request *req, struct shash_desc *desc) 62 { 63 struct crypto_hash_walk walk; 64 int nbytes; 65 66 nbytes = crypto_hash_walk_first(req, &walk); 67 if (!nbytes) 68 return crypto_shash_final(desc, req->result); 69 70 do { 71 nbytes = crypto_hash_walk_last(&walk) ? 72 crypto_shash_finup(desc, walk.data, nbytes, 73 req->result) : 74 crypto_shash_update(desc, walk.data, nbytes); 75 nbytes = crypto_hash_walk_done(&walk, nbytes); 76 } while (nbytes > 0); 77 78 return nbytes; 79 } 80 EXPORT_SYMBOL_GPL(shash_ahash_finup); 81 82 int shash_ahash_digest(struct ahash_request *req, struct shash_desc *desc) 83 { 84 unsigned int nbytes = req->nbytes; 85 struct scatterlist *sg; 86 unsigned int offset; 87 int err; 88 89 if (nbytes && 90 (sg = req->src, offset = sg->offset, 91 nbytes <= min(sg->length, ((unsigned int)(PAGE_SIZE)) - offset))) { 92 void *data; 93 94 data = kmap_local_page(sg_page(sg)); 95 err = crypto_shash_digest(desc, data + offset, nbytes, 96 req->result); 97 kunmap_local(data); 98 } else 99 err = crypto_shash_init(desc) ?: 100 shash_ahash_finup(req, desc); 101 102 return err; 103 } 104 EXPORT_SYMBOL_GPL(shash_ahash_digest); 105 106 static void crypto_exit_ahash_using_shash(struct crypto_tfm *tfm) 107 { 108 struct crypto_shash **ctx = crypto_tfm_ctx(tfm); 109 110 crypto_free_shash(*ctx); 111 } 112 113 static int crypto_init_ahash_using_shash(struct crypto_tfm *tfm) 114 { 115 struct crypto_alg *calg = tfm->__crt_alg; 116 struct crypto_ahash *crt = __crypto_ahash_cast(tfm); 117 struct crypto_shash **ctx = crypto_tfm_ctx(tfm); 118 struct crypto_shash *shash; 119 120 if (!crypto_mod_get(calg)) 121 return -EAGAIN; 122 123 shash = crypto_create_tfm(calg, &crypto_shash_type); 124 if (IS_ERR(shash)) { 125 crypto_mod_put(calg); 126 return PTR_ERR(shash); 127 } 128 129 crt->using_shash = true; 130 *ctx = shash; 131 tfm->exit = crypto_exit_ahash_using_shash; 132 133 crypto_ahash_set_flags(crt, crypto_shash_get_flags(shash) & 134 CRYPTO_TFM_NEED_KEY); 135 crt->reqsize = sizeof(struct shash_desc) + crypto_shash_descsize(shash); 136 137 return 0; 138 } 139 140 static int hash_walk_next(struct crypto_hash_walk *walk) 141 { 142 unsigned int offset = walk->offset; 143 unsigned int nbytes = min(walk->entrylen, 144 ((unsigned int)(PAGE_SIZE)) - offset); 145 146 walk->data = kmap_local_page(walk->pg); 147 walk->data += offset; 148 walk->entrylen -= nbytes; 149 return nbytes; 150 } 151 152 static int hash_walk_new_entry(struct crypto_hash_walk *walk) 153 { 154 struct scatterlist *sg; 155 156 sg = walk->sg; 157 walk->offset = sg->offset; 158 walk->pg = sg_page(walk->sg) + (walk->offset >> PAGE_SHIFT); 159 walk->offset = offset_in_page(walk->offset); 160 walk->entrylen = sg->length; 161 162 if (walk->entrylen > walk->total) 163 walk->entrylen = walk->total; 164 walk->total -= walk->entrylen; 165 166 return hash_walk_next(walk); 167 } 168 169 int crypto_hash_walk_done(struct crypto_hash_walk *walk, int err) 170 { 171 walk->data -= walk->offset; 172 173 kunmap_local(walk->data); 174 crypto_yield(walk->flags); 175 176 if (err) 177 return err; 178 179 if (walk->entrylen) { 180 walk->offset = 0; 181 walk->pg++; 182 return hash_walk_next(walk); 183 } 184 185 if (!walk->total) 186 return 0; 187 188 walk->sg = sg_next(walk->sg); 189 190 return hash_walk_new_entry(walk); 191 } 192 EXPORT_SYMBOL_GPL(crypto_hash_walk_done); 193 194 int crypto_hash_walk_first(struct ahash_request *req, 195 struct crypto_hash_walk *walk) 196 { 197 walk->total = req->nbytes; 198 199 if (!walk->total) { 200 walk->entrylen = 0; 201 return 0; 202 } 203 204 walk->sg = req->src; 205 walk->flags = req->base.flags; 206 207 return hash_walk_new_entry(walk); 208 } 209 EXPORT_SYMBOL_GPL(crypto_hash_walk_first); 210 211 static int ahash_nosetkey(struct crypto_ahash *tfm, const u8 *key, 212 unsigned int keylen) 213 { 214 return -ENOSYS; 215 } 216 217 static void ahash_set_needkey(struct crypto_ahash *tfm, struct ahash_alg *alg) 218 { 219 if (alg->setkey != ahash_nosetkey && 220 !(alg->halg.base.cra_flags & CRYPTO_ALG_OPTIONAL_KEY)) 221 crypto_ahash_set_flags(tfm, CRYPTO_TFM_NEED_KEY); 222 } 223 224 int crypto_ahash_setkey(struct crypto_ahash *tfm, const u8 *key, 225 unsigned int keylen) 226 { 227 if (likely(tfm->using_shash)) { 228 struct crypto_shash *shash = ahash_to_shash(tfm); 229 int err; 230 231 err = crypto_shash_setkey(shash, key, keylen); 232 if (unlikely(err)) { 233 crypto_ahash_set_flags(tfm, 234 crypto_shash_get_flags(shash) & 235 CRYPTO_TFM_NEED_KEY); 236 return err; 237 } 238 } else { 239 struct ahash_alg *alg = crypto_ahash_alg(tfm); 240 int err; 241 242 err = alg->setkey(tfm, key, keylen); 243 if (unlikely(err)) { 244 ahash_set_needkey(tfm, alg); 245 return err; 246 } 247 } 248 crypto_ahash_clear_flags(tfm, CRYPTO_TFM_NEED_KEY); 249 return 0; 250 } 251 EXPORT_SYMBOL_GPL(crypto_ahash_setkey); 252 253 int crypto_ahash_init(struct ahash_request *req) 254 { 255 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); 256 257 if (likely(tfm->using_shash)) 258 return crypto_shash_init(prepare_shash_desc(req, tfm)); 259 if (crypto_ahash_get_flags(tfm) & CRYPTO_TFM_NEED_KEY) 260 return -ENOKEY; 261 return crypto_ahash_alg(tfm)->init(req); 262 } 263 EXPORT_SYMBOL_GPL(crypto_ahash_init); 264 265 static int ahash_save_req(struct ahash_request *req, crypto_completion_t cplt, 266 bool has_state) 267 { 268 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); 269 unsigned int ds = crypto_ahash_digestsize(tfm); 270 struct ahash_request *subreq; 271 unsigned int subreq_size; 272 unsigned int reqsize; 273 u8 *result; 274 gfp_t gfp; 275 u32 flags; 276 277 subreq_size = sizeof(*subreq); 278 reqsize = crypto_ahash_reqsize(tfm); 279 reqsize = ALIGN(reqsize, crypto_tfm_ctx_alignment()); 280 subreq_size += reqsize; 281 subreq_size += ds; 282 283 flags = ahash_request_flags(req); 284 gfp = (flags & CRYPTO_TFM_REQ_MAY_SLEEP) ? GFP_KERNEL : GFP_ATOMIC; 285 subreq = kmalloc(subreq_size, gfp); 286 if (!subreq) 287 return -ENOMEM; 288 289 ahash_request_set_tfm(subreq, tfm); 290 ahash_request_set_callback(subreq, flags, cplt, req); 291 292 result = (u8 *)(subreq + 1) + reqsize; 293 294 ahash_request_set_crypt(subreq, req->src, result, req->nbytes); 295 296 if (has_state) { 297 void *state; 298 299 state = kmalloc(crypto_ahash_statesize(tfm), gfp); 300 if (!state) { 301 kfree(subreq); 302 return -ENOMEM; 303 } 304 305 crypto_ahash_export(req, state); 306 crypto_ahash_import(subreq, state); 307 kfree_sensitive(state); 308 } 309 310 req->priv = subreq; 311 312 return 0; 313 } 314 315 static void ahash_restore_req(struct ahash_request *req, int err) 316 { 317 struct ahash_request *subreq = req->priv; 318 319 if (!err) 320 memcpy(req->result, subreq->result, 321 crypto_ahash_digestsize(crypto_ahash_reqtfm(req))); 322 323 req->priv = NULL; 324 325 kfree_sensitive(subreq); 326 } 327 328 int crypto_ahash_update(struct ahash_request *req) 329 { 330 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); 331 332 if (likely(tfm->using_shash)) 333 return shash_ahash_update(req, ahash_request_ctx(req)); 334 335 return crypto_ahash_alg(tfm)->update(req); 336 } 337 EXPORT_SYMBOL_GPL(crypto_ahash_update); 338 339 int crypto_ahash_final(struct ahash_request *req) 340 { 341 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); 342 343 if (likely(tfm->using_shash)) 344 return crypto_shash_final(ahash_request_ctx(req), req->result); 345 346 return crypto_ahash_alg(tfm)->final(req); 347 } 348 EXPORT_SYMBOL_GPL(crypto_ahash_final); 349 350 int crypto_ahash_finup(struct ahash_request *req) 351 { 352 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); 353 354 if (likely(tfm->using_shash)) 355 return shash_ahash_finup(req, ahash_request_ctx(req)); 356 357 return crypto_ahash_alg(tfm)->finup(req); 358 } 359 EXPORT_SYMBOL_GPL(crypto_ahash_finup); 360 361 int crypto_ahash_digest(struct ahash_request *req) 362 { 363 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); 364 365 if (likely(tfm->using_shash)) 366 return shash_ahash_digest(req, prepare_shash_desc(req, tfm)); 367 368 if (crypto_ahash_get_flags(tfm) & CRYPTO_TFM_NEED_KEY) 369 return -ENOKEY; 370 371 return crypto_ahash_alg(tfm)->digest(req); 372 } 373 EXPORT_SYMBOL_GPL(crypto_ahash_digest); 374 375 static void ahash_def_finup_done2(void *data, int err) 376 { 377 struct ahash_request *areq = data; 378 379 if (err == -EINPROGRESS) 380 return; 381 382 ahash_restore_req(areq, err); 383 384 ahash_request_complete(areq, err); 385 } 386 387 static int ahash_def_finup_finish1(struct ahash_request *req, int err) 388 { 389 struct ahash_request *subreq = req->priv; 390 391 if (err) 392 goto out; 393 394 subreq->base.complete = ahash_def_finup_done2; 395 396 err = crypto_ahash_alg(crypto_ahash_reqtfm(req))->final(subreq); 397 if (err == -EINPROGRESS || err == -EBUSY) 398 return err; 399 400 out: 401 ahash_restore_req(req, err); 402 return err; 403 } 404 405 static void ahash_def_finup_done1(void *data, int err) 406 { 407 struct ahash_request *areq = data; 408 struct ahash_request *subreq; 409 410 if (err == -EINPROGRESS) 411 goto out; 412 413 subreq = areq->priv; 414 subreq->base.flags &= CRYPTO_TFM_REQ_MAY_BACKLOG; 415 416 err = ahash_def_finup_finish1(areq, err); 417 if (err == -EINPROGRESS || err == -EBUSY) 418 return; 419 420 out: 421 ahash_request_complete(areq, err); 422 } 423 424 static int ahash_def_finup(struct ahash_request *req) 425 { 426 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); 427 int err; 428 429 err = ahash_save_req(req, ahash_def_finup_done1, true); 430 if (err) 431 return err; 432 433 err = crypto_ahash_alg(tfm)->update(req->priv); 434 if (err == -EINPROGRESS || err == -EBUSY) 435 return err; 436 437 return ahash_def_finup_finish1(req, err); 438 } 439 440 int crypto_ahash_export(struct ahash_request *req, void *out) 441 { 442 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); 443 444 if (likely(tfm->using_shash)) 445 return crypto_shash_export(ahash_request_ctx(req), out); 446 return crypto_ahash_alg(tfm)->export(req, out); 447 } 448 EXPORT_SYMBOL_GPL(crypto_ahash_export); 449 450 int crypto_ahash_import(struct ahash_request *req, const void *in) 451 { 452 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); 453 454 if (likely(tfm->using_shash)) 455 return crypto_shash_import(prepare_shash_desc(req, tfm), in); 456 if (crypto_ahash_get_flags(tfm) & CRYPTO_TFM_NEED_KEY) 457 return -ENOKEY; 458 return crypto_ahash_alg(tfm)->import(req, in); 459 } 460 EXPORT_SYMBOL_GPL(crypto_ahash_import); 461 462 static void crypto_ahash_exit_tfm(struct crypto_tfm *tfm) 463 { 464 struct crypto_ahash *hash = __crypto_ahash_cast(tfm); 465 struct ahash_alg *alg = crypto_ahash_alg(hash); 466 467 alg->exit_tfm(hash); 468 } 469 470 static int crypto_ahash_init_tfm(struct crypto_tfm *tfm) 471 { 472 struct crypto_ahash *hash = __crypto_ahash_cast(tfm); 473 struct ahash_alg *alg = crypto_ahash_alg(hash); 474 475 crypto_ahash_set_statesize(hash, alg->halg.statesize); 476 477 if (tfm->__crt_alg->cra_type == &crypto_shash_type) 478 return crypto_init_ahash_using_shash(tfm); 479 480 ahash_set_needkey(hash, alg); 481 482 if (alg->exit_tfm) 483 tfm->exit = crypto_ahash_exit_tfm; 484 485 return alg->init_tfm ? alg->init_tfm(hash) : 0; 486 } 487 488 static unsigned int crypto_ahash_extsize(struct crypto_alg *alg) 489 { 490 if (alg->cra_type == &crypto_shash_type) 491 return sizeof(struct crypto_shash *); 492 493 return crypto_alg_extsize(alg); 494 } 495 496 static void crypto_ahash_free_instance(struct crypto_instance *inst) 497 { 498 struct ahash_instance *ahash = ahash_instance(inst); 499 500 ahash->free(ahash); 501 } 502 503 static int __maybe_unused crypto_ahash_report( 504 struct sk_buff *skb, struct crypto_alg *alg) 505 { 506 struct crypto_report_hash rhash; 507 508 memset(&rhash, 0, sizeof(rhash)); 509 510 strscpy(rhash.type, "ahash", sizeof(rhash.type)); 511 512 rhash.blocksize = alg->cra_blocksize; 513 rhash.digestsize = __crypto_hash_alg_common(alg)->digestsize; 514 515 return nla_put(skb, CRYPTOCFGA_REPORT_HASH, sizeof(rhash), &rhash); 516 } 517 518 static void crypto_ahash_show(struct seq_file *m, struct crypto_alg *alg) 519 __maybe_unused; 520 static void crypto_ahash_show(struct seq_file *m, struct crypto_alg *alg) 521 { 522 seq_printf(m, "type : ahash\n"); 523 seq_printf(m, "async : %s\n", alg->cra_flags & CRYPTO_ALG_ASYNC ? 524 "yes" : "no"); 525 seq_printf(m, "blocksize : %u\n", alg->cra_blocksize); 526 seq_printf(m, "digestsize : %u\n", 527 __crypto_hash_alg_common(alg)->digestsize); 528 } 529 530 static const struct crypto_type crypto_ahash_type = { 531 .extsize = crypto_ahash_extsize, 532 .init_tfm = crypto_ahash_init_tfm, 533 .free = crypto_ahash_free_instance, 534 #ifdef CONFIG_PROC_FS 535 .show = crypto_ahash_show, 536 #endif 537 #if IS_ENABLED(CONFIG_CRYPTO_USER) 538 .report = crypto_ahash_report, 539 #endif 540 .maskclear = ~CRYPTO_ALG_TYPE_MASK, 541 .maskset = CRYPTO_ALG_TYPE_AHASH_MASK, 542 .type = CRYPTO_ALG_TYPE_AHASH, 543 .tfmsize = offsetof(struct crypto_ahash, base), 544 }; 545 546 int crypto_grab_ahash(struct crypto_ahash_spawn *spawn, 547 struct crypto_instance *inst, 548 const char *name, u32 type, u32 mask) 549 { 550 spawn->base.frontend = &crypto_ahash_type; 551 return crypto_grab_spawn(&spawn->base, inst, name, type, mask); 552 } 553 EXPORT_SYMBOL_GPL(crypto_grab_ahash); 554 555 struct crypto_ahash *crypto_alloc_ahash(const char *alg_name, u32 type, 556 u32 mask) 557 { 558 return crypto_alloc_tfm(alg_name, &crypto_ahash_type, type, mask); 559 } 560 EXPORT_SYMBOL_GPL(crypto_alloc_ahash); 561 562 int crypto_has_ahash(const char *alg_name, u32 type, u32 mask) 563 { 564 return crypto_type_has_alg(alg_name, &crypto_ahash_type, type, mask); 565 } 566 EXPORT_SYMBOL_GPL(crypto_has_ahash); 567 568 static bool crypto_hash_alg_has_setkey(struct hash_alg_common *halg) 569 { 570 struct crypto_alg *alg = &halg->base; 571 572 if (alg->cra_type == &crypto_shash_type) 573 return crypto_shash_alg_has_setkey(__crypto_shash_alg(alg)); 574 575 return __crypto_ahash_alg(alg)->setkey != ahash_nosetkey; 576 } 577 578 struct crypto_ahash *crypto_clone_ahash(struct crypto_ahash *hash) 579 { 580 struct hash_alg_common *halg = crypto_hash_alg_common(hash); 581 struct crypto_tfm *tfm = crypto_ahash_tfm(hash); 582 struct crypto_ahash *nhash; 583 struct ahash_alg *alg; 584 int err; 585 586 if (!crypto_hash_alg_has_setkey(halg)) { 587 tfm = crypto_tfm_get(tfm); 588 if (IS_ERR(tfm)) 589 return ERR_CAST(tfm); 590 591 return hash; 592 } 593 594 nhash = crypto_clone_tfm(&crypto_ahash_type, tfm); 595 596 if (IS_ERR(nhash)) 597 return nhash; 598 599 nhash->reqsize = hash->reqsize; 600 nhash->statesize = hash->statesize; 601 602 if (likely(hash->using_shash)) { 603 struct crypto_shash **nctx = crypto_ahash_ctx(nhash); 604 struct crypto_shash *shash; 605 606 shash = crypto_clone_shash(ahash_to_shash(hash)); 607 if (IS_ERR(shash)) { 608 err = PTR_ERR(shash); 609 goto out_free_nhash; 610 } 611 nhash->using_shash = true; 612 *nctx = shash; 613 return nhash; 614 } 615 616 err = -ENOSYS; 617 alg = crypto_ahash_alg(hash); 618 if (!alg->clone_tfm) 619 goto out_free_nhash; 620 621 err = alg->clone_tfm(nhash, hash); 622 if (err) 623 goto out_free_nhash; 624 625 return nhash; 626 627 out_free_nhash: 628 crypto_free_ahash(nhash); 629 return ERR_PTR(err); 630 } 631 EXPORT_SYMBOL_GPL(crypto_clone_ahash); 632 633 static int ahash_prepare_alg(struct ahash_alg *alg) 634 { 635 struct crypto_alg *base = &alg->halg.base; 636 int err; 637 638 if (alg->halg.statesize == 0) 639 return -EINVAL; 640 641 err = hash_prepare_alg(&alg->halg); 642 if (err) 643 return err; 644 645 base->cra_type = &crypto_ahash_type; 646 base->cra_flags |= CRYPTO_ALG_TYPE_AHASH; 647 648 if (!alg->finup) 649 alg->finup = ahash_def_finup; 650 if (!alg->setkey) 651 alg->setkey = ahash_nosetkey; 652 653 return 0; 654 } 655 656 int crypto_register_ahash(struct ahash_alg *alg) 657 { 658 struct crypto_alg *base = &alg->halg.base; 659 int err; 660 661 err = ahash_prepare_alg(alg); 662 if (err) 663 return err; 664 665 return crypto_register_alg(base); 666 } 667 EXPORT_SYMBOL_GPL(crypto_register_ahash); 668 669 void crypto_unregister_ahash(struct ahash_alg *alg) 670 { 671 crypto_unregister_alg(&alg->halg.base); 672 } 673 EXPORT_SYMBOL_GPL(crypto_unregister_ahash); 674 675 int crypto_register_ahashes(struct ahash_alg *algs, int count) 676 { 677 int i, ret; 678 679 for (i = 0; i < count; i++) { 680 ret = crypto_register_ahash(&algs[i]); 681 if (ret) 682 goto err; 683 } 684 685 return 0; 686 687 err: 688 for (--i; i >= 0; --i) 689 crypto_unregister_ahash(&algs[i]); 690 691 return ret; 692 } 693 EXPORT_SYMBOL_GPL(crypto_register_ahashes); 694 695 void crypto_unregister_ahashes(struct ahash_alg *algs, int count) 696 { 697 int i; 698 699 for (i = count - 1; i >= 0; --i) 700 crypto_unregister_ahash(&algs[i]); 701 } 702 EXPORT_SYMBOL_GPL(crypto_unregister_ahashes); 703 704 int ahash_register_instance(struct crypto_template *tmpl, 705 struct ahash_instance *inst) 706 { 707 int err; 708 709 if (WARN_ON(!inst->free)) 710 return -EINVAL; 711 712 err = ahash_prepare_alg(&inst->alg); 713 if (err) 714 return err; 715 716 return crypto_register_instance(tmpl, ahash_crypto_instance(inst)); 717 } 718 EXPORT_SYMBOL_GPL(ahash_register_instance); 719 720 MODULE_LICENSE("GPL"); 721 MODULE_DESCRIPTION("Asynchronous cryptographic hash type"); 722
Linux® is a registered trademark of Linus Torvalds in the United States and other countries.
TOMOYO® is a registered trademark of NTT DATA CORPORATION.