1 // SPDX-License-Identifier: GPL-2.0-or-later << 2 /* 1 /* 3 * Algorithm testing framework and tests. 2 * Algorithm testing framework and tests. 4 * 3 * 5 * Copyright (c) 2002 James Morris <jmorris@in 4 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au> 6 * Copyright (c) 2002 Jean-Francois Dive <jef@ 5 * Copyright (c) 2002 Jean-Francois Dive <jef@linuxbe.org> 7 * Copyright (c) 2007 Nokia Siemens Networks 6 * Copyright (c) 2007 Nokia Siemens Networks 8 * Copyright (c) 2008 Herbert Xu <herbert@gond 7 * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au> 9 * Copyright (c) 2019 Google LLC << 10 * 8 * 11 * Updated RFC4106 AES-GCM testing. 9 * Updated RFC4106 AES-GCM testing. 12 * Authors: Aidan O'Mahony (aidan.o.mahony@ 10 * Authors: Aidan O'Mahony (aidan.o.mahony@intel.com) 13 * Adrian Hoban <adrian.hoban@inte 11 * Adrian Hoban <adrian.hoban@intel.com> 14 * Gabriele Paoloni <gabriele.paol 12 * Gabriele Paoloni <gabriele.paoloni@intel.com> 15 * Tadeusz Struk (tadeusz.struk@in 13 * Tadeusz Struk (tadeusz.struk@intel.com) 16 * Copyright (c) 2010, Intel Corporation. 14 * Copyright (c) 2010, Intel Corporation. >> 15 * >> 16 * This program is free software; you can redistribute it and/or modify it >> 17 * under the terms of the GNU General Public License as published by the Free >> 18 * Software Foundation; either version 2 of the License, or (at your option) >> 19 * any later version. >> 20 * 17 */ 21 */ 18 22 19 #include <crypto/aead.h> 23 #include <crypto/aead.h> 20 #include <crypto/hash.h> 24 #include <crypto/hash.h> 21 #include <crypto/skcipher.h> 25 #include <crypto/skcipher.h> 22 #include <linux/err.h> 26 #include <linux/err.h> 23 #include <linux/fips.h> 27 #include <linux/fips.h> 24 #include <linux/module.h> 28 #include <linux/module.h> 25 #include <linux/once.h> << 26 #include <linux/random.h> << 27 #include <linux/scatterlist.h> 29 #include <linux/scatterlist.h> 28 #include <linux/slab.h> 30 #include <linux/slab.h> 29 #include <linux/string.h> 31 #include <linux/string.h> 30 #include <linux/uio.h> << 31 #include <crypto/rng.h> 32 #include <crypto/rng.h> 32 #include <crypto/drbg.h> 33 #include <crypto/drbg.h> 33 #include <crypto/akcipher.h> 34 #include <crypto/akcipher.h> 34 #include <crypto/kpp.h> 35 #include <crypto/kpp.h> 35 #include <crypto/acompress.h> 36 #include <crypto/acompress.h> 36 #include <crypto/internal/cipher.h> << 37 #include <crypto/internal/simd.h> << 38 37 39 #include "internal.h" 38 #include "internal.h" 40 39 41 MODULE_IMPORT_NS(CRYPTO_INTERNAL); << 42 << 43 static bool notests; 40 static bool notests; 44 module_param(notests, bool, 0644); 41 module_param(notests, bool, 0644); 45 MODULE_PARM_DESC(notests, "disable crypto self 42 MODULE_PARM_DESC(notests, "disable crypto self-tests"); 46 43 47 static bool panic_on_fail; << 48 module_param(panic_on_fail, bool, 0444); << 49 << 50 #ifdef CONFIG_CRYPTO_MANAGER_EXTRA_TESTS << 51 static bool noextratests; << 52 module_param(noextratests, bool, 0644); << 53 MODULE_PARM_DESC(noextratests, "disable expens << 54 << 55 static unsigned int fuzz_iterations = 100; << 56 module_param(fuzz_iterations, uint, 0644); << 57 MODULE_PARM_DESC(fuzz_iterations, "number of f << 58 #endif << 59 << 60 #ifdef CONFIG_CRYPTO_MANAGER_DISABLE_TESTS 44 #ifdef CONFIG_CRYPTO_MANAGER_DISABLE_TESTS 61 45 62 /* a perfect nop */ 46 /* a perfect nop */ 63 int alg_test(const char *driver, const char *a 47 int alg_test(const char *driver, const char *alg, u32 type, u32 mask) 64 { 48 { 65 return 0; 49 return 0; 66 } 50 } 67 51 68 #else 52 #else 69 53 70 #include "testmgr.h" 54 #include "testmgr.h" 71 55 72 /* 56 /* 73 * Need slab memory for testing (size in numbe 57 * Need slab memory for testing (size in number of pages). 74 */ 58 */ 75 #define XBUFSIZE 8 59 #define XBUFSIZE 8 76 60 77 /* 61 /* >> 62 * Indexes into the xbuf to simulate cross-page access. >> 63 */ >> 64 #define IDX1 32 >> 65 #define IDX2 32400 >> 66 #define IDX3 1511 >> 67 #define IDX4 8193 >> 68 #define IDX5 22222 >> 69 #define IDX6 17101 >> 70 #define IDX7 27333 >> 71 #define IDX8 3000 >> 72 >> 73 /* 78 * Used by test_cipher() 74 * Used by test_cipher() 79 */ 75 */ 80 #define ENCRYPT 1 76 #define ENCRYPT 1 81 #define DECRYPT 0 77 #define DECRYPT 0 82 78 83 struct aead_test_suite { 79 struct aead_test_suite { 84 const struct aead_testvec *vecs; !! 80 struct { 85 unsigned int count; !! 81 const struct aead_testvec *vecs; 86 !! 82 unsigned int count; 87 /* !! 83 } enc, dec; 88 * Set if trying to decrypt an inauthe << 89 * algorithm might result in EINVAL ra << 90 * validation the algorithm does on th << 91 */ << 92 unsigned int einval_allowed : 1; << 93 << 94 /* << 95 * Set if this algorithm requires that << 96 * the AAD buffer, in addition to bein << 97 * behavior when the two IV copies dif << 98 */ << 99 unsigned int aad_iv : 1; << 100 }; 84 }; 101 85 102 struct cipher_test_suite { 86 struct cipher_test_suite { 103 const struct cipher_testvec *vecs; !! 87 struct { 104 unsigned int count; !! 88 const struct cipher_testvec *vecs; >> 89 unsigned int count; >> 90 } enc, dec; 105 }; 91 }; 106 92 107 struct comp_test_suite { 93 struct comp_test_suite { 108 struct { 94 struct { 109 const struct comp_testvec *vec 95 const struct comp_testvec *vecs; 110 unsigned int count; 96 unsigned int count; 111 } comp, decomp; 97 } comp, decomp; 112 }; 98 }; 113 99 114 struct hash_test_suite { 100 struct hash_test_suite { 115 const struct hash_testvec *vecs; 101 const struct hash_testvec *vecs; 116 unsigned int count; 102 unsigned int count; 117 }; 103 }; 118 104 119 struct cprng_test_suite { 105 struct cprng_test_suite { 120 const struct cprng_testvec *vecs; 106 const struct cprng_testvec *vecs; 121 unsigned int count; 107 unsigned int count; 122 }; 108 }; 123 109 124 struct drbg_test_suite { 110 struct drbg_test_suite { 125 const struct drbg_testvec *vecs; 111 const struct drbg_testvec *vecs; 126 unsigned int count; 112 unsigned int count; 127 }; 113 }; 128 114 129 struct akcipher_test_suite { 115 struct akcipher_test_suite { 130 const struct akcipher_testvec *vecs; 116 const struct akcipher_testvec *vecs; 131 unsigned int count; 117 unsigned int count; 132 }; 118 }; 133 119 134 struct kpp_test_suite { 120 struct kpp_test_suite { 135 const struct kpp_testvec *vecs; 121 const struct kpp_testvec *vecs; 136 unsigned int count; 122 unsigned int count; 137 }; 123 }; 138 124 139 struct alg_test_desc { 125 struct alg_test_desc { 140 const char *alg; 126 const char *alg; 141 const char *generic_driver; << 142 int (*test)(const struct alg_test_desc 127 int (*test)(const struct alg_test_desc *desc, const char *driver, 143 u32 type, u32 mask); 128 u32 type, u32 mask); 144 int fips_allowed; /* set if alg 129 int fips_allowed; /* set if alg is allowed in fips mode */ 145 130 146 union { 131 union { 147 struct aead_test_suite aead; 132 struct aead_test_suite aead; 148 struct cipher_test_suite ciphe 133 struct cipher_test_suite cipher; 149 struct comp_test_suite comp; 134 struct comp_test_suite comp; 150 struct hash_test_suite hash; 135 struct hash_test_suite hash; 151 struct cprng_test_suite cprng; 136 struct cprng_test_suite cprng; 152 struct drbg_test_suite drbg; 137 struct drbg_test_suite drbg; 153 struct akcipher_test_suite akc 138 struct akcipher_test_suite akcipher; 154 struct kpp_test_suite kpp; 139 struct kpp_test_suite kpp; 155 } suite; 140 } suite; 156 }; 141 }; 157 142 >> 143 static const unsigned int IDX[8] = { >> 144 IDX1, IDX2, IDX3, IDX4, IDX5, IDX6, IDX7, IDX8 }; >> 145 158 static void hexdump(unsigned char *buf, unsign 146 static void hexdump(unsigned char *buf, unsigned int len) 159 { 147 { 160 print_hex_dump(KERN_CONT, "", DUMP_PRE 148 print_hex_dump(KERN_CONT, "", DUMP_PREFIX_OFFSET, 161 16, 1, 149 16, 1, 162 buf, len, false); 150 buf, len, false); 163 } 151 } 164 152 165 static int __testmgr_alloc_buf(char *buf[XBUFS !! 153 static int testmgr_alloc_buf(char *buf[XBUFSIZE]) 166 { 154 { 167 int i; 155 int i; 168 156 169 for (i = 0; i < XBUFSIZE; i++) { 157 for (i = 0; i < XBUFSIZE; i++) { 170 buf[i] = (char *)__get_free_pa !! 158 buf[i] = (void *)__get_free_page(GFP_KERNEL); 171 if (!buf[i]) 159 if (!buf[i]) 172 goto err_free_buf; 160 goto err_free_buf; 173 } 161 } 174 162 175 return 0; 163 return 0; 176 164 177 err_free_buf: 165 err_free_buf: 178 while (i-- > 0) 166 while (i-- > 0) 179 free_pages((unsigned long)buf[ !! 167 free_page((unsigned long)buf[i]); 180 168 181 return -ENOMEM; 169 return -ENOMEM; 182 } 170 } 183 171 184 static int testmgr_alloc_buf(char *buf[XBUFSIZ !! 172 static void testmgr_free_buf(char *buf[XBUFSIZE]) 185 { << 186 return __testmgr_alloc_buf(buf, 0); << 187 } << 188 << 189 static void __testmgr_free_buf(char *buf[XBUFS << 190 { 173 { 191 int i; 174 int i; 192 175 193 for (i = 0; i < XBUFSIZE; i++) 176 for (i = 0; i < XBUFSIZE; i++) 194 free_pages((unsigned long)buf[ !! 177 free_page((unsigned long)buf[i]); 195 } << 196 << 197 static void testmgr_free_buf(char *buf[XBUFSIZ << 198 { << 199 __testmgr_free_buf(buf, 0); << 200 } << 201 << 202 #define TESTMGR_POISON_BYTE 0xfe << 203 #define TESTMGR_POISON_LEN 16 << 204 << 205 static inline void testmgr_poison(void *addr, << 206 { << 207 memset(addr, TESTMGR_POISON_BYTE, len) << 208 } << 209 << 210 /* Is the memory region still fully poisoned? << 211 static inline bool testmgr_is_poison(const voi << 212 { << 213 return memchr_inv(addr, TESTMGR_POISON << 214 } << 215 << 216 /* flush type for hash algorithms */ << 217 enum flush_type { << 218 /* merge with update of previous buffe << 219 FLUSH_TYPE_NONE = 0, << 220 << 221 /* update with previous buffer(s) befo << 222 FLUSH_TYPE_FLUSH, << 223 << 224 /* likewise, but also export and re-im << 225 FLUSH_TYPE_REIMPORT, << 226 }; << 227 << 228 /* finalization function for hash algorithms * << 229 enum finalization_type { << 230 FINALIZATION_TYPE_FINAL, /* use << 231 FINALIZATION_TYPE_FINUP, /* use << 232 FINALIZATION_TYPE_DIGEST, /* use << 233 }; << 234 << 235 /* << 236 * Whether the crypto operation will occur in- << 237 * source and destination scatterlist pointers << 238 * req->dst), or whether they'll merely point << 239 * (req->src != req->dst) that reference the s << 240 * << 241 * This is only relevant for algorithm types t << 242 */ << 243 enum inplace_mode { << 244 OUT_OF_PLACE, << 245 INPLACE_ONE_SGLIST, << 246 INPLACE_TWO_SGLISTS, << 247 }; << 248 << 249 #define TEST_SG_TOTAL 10000 << 250 << 251 /** << 252 * struct test_sg_division - description of a << 253 * << 254 * This struct describes one entry of a scatte << 255 * crypto test vector. << 256 * << 257 * @proportion_of_total: length of this chunk << 258 * given as a proportion << 259 * scales to fit any tes << 260 * @offset: byte offset into a 2-page buffer a << 261 * @offset_relative_to_alignmask: if true, add << 262 * @offset << 263 * @flush_type: for hashes, whether an update( << 264 * continuing to accumulate data << 265 * @nosimd: if doing the pending update(), do << 266 */ << 267 struct test_sg_division { << 268 unsigned int proportion_of_total; << 269 unsigned int offset; << 270 bool offset_relative_to_alignmask; << 271 enum flush_type flush_type; << 272 bool nosimd; << 273 }; << 274 << 275 /** << 276 * struct testvec_config - configuration for t << 277 * << 278 * This struct describes the data layout and o << 279 * crypto test vector can be tested. << 280 * << 281 * @name: name of this config, logged for debu << 282 * @inplace_mode: whether and how to operate o << 283 * @req_flags: extra request_flags, e.g. CRYPT << 284 * @src_divs: description of how to arrange th << 285 * @dst_divs: description of how to arrange th << 286 * for the algorithm type. Default << 287 * @iv_offset: misalignment of the IV in the r << 288 * where 0 is aligned to a 2*(MAX_ << 289 * @iv_offset_relative_to_alignmask: if true, << 290 * the @iv_o << 291 * @key_offset: misalignment of the key, where << 292 * @key_offset_relative_to_alignmask: if true, << 293 * the @key << 294 * @finalization_type: what finalization funct << 295 * @nosimd: execute with SIMD disabled? Requi << 296 * This applies to the parts of the o << 297 * individually by @nosimd_setkey or << 298 * @nosimd_setkey: set the key (if applicable) << 299 * !CRYPTO_TFM_REQ_MAY_SLEEP. << 300 */ << 301 struct testvec_config { << 302 const char *name; << 303 enum inplace_mode inplace_mode; << 304 u32 req_flags; << 305 struct test_sg_division src_divs[XBUFS << 306 struct test_sg_division dst_divs[XBUFS << 307 unsigned int iv_offset; << 308 unsigned int key_offset; << 309 bool iv_offset_relative_to_alignmask; << 310 bool key_offset_relative_to_alignmask; << 311 enum finalization_type finalization_ty << 312 bool nosimd; << 313 bool nosimd_setkey; << 314 }; << 315 << 316 #define TESTVEC_CONFIG_NAMELEN 192 << 317 << 318 /* << 319 * The following are the lists of testvec_conf << 320 * type when the basic crypto self-tests are e << 321 * CONFIG_CRYPTO_MANAGER_DISABLE_TESTS is unse << 322 * coverage, while keeping the test time much << 323 * so that the basic tests can be enabled in a << 324 */ << 325 << 326 /* Configs for skciphers and aeads */ << 327 static const struct testvec_config default_cip << 328 { << 329 .name = "in-place (one sglist) << 330 .inplace_mode = INPLACE_ONE_SG << 331 .src_divs = { { .proportion_of << 332 }, { << 333 .name = "in-place (two sglists << 334 .inplace_mode = INPLACE_TWO_SG << 335 .src_divs = { { .proportion_of << 336 }, { << 337 .name = "out-of-place", << 338 .inplace_mode = OUT_OF_PLACE, << 339 .src_divs = { { .proportion_of << 340 }, { << 341 .name = "unaligned buffer, off << 342 .src_divs = { { .proportion_of << 343 .iv_offset = 1, << 344 .key_offset = 1, << 345 }, { << 346 .name = "buffer aligned only t << 347 .src_divs = { << 348 { << 349 .proportion_of << 350 .offset = 1, << 351 .offset_relati << 352 }, << 353 }, << 354 .iv_offset = 1, << 355 .iv_offset_relative_to_alignma << 356 .key_offset = 1, << 357 .key_offset_relative_to_alignm << 358 }, { << 359 .name = "two even aligned spli << 360 .src_divs = { << 361 { .proportion_of_total << 362 { .proportion_of_total << 363 }, << 364 }, { << 365 .name = "one src, two even spl << 366 .inplace_mode = OUT_OF_PLACE, << 367 .src_divs = { { .proportion_of << 368 .dst_divs = { << 369 { .proportion_of_total << 370 { .proportion_of_total << 371 }, << 372 }, { << 373 .name = "uneven misaligned spl << 374 .req_flags = CRYPTO_TFM_REQ_MA << 375 .src_divs = { << 376 { .proportion_of_total << 377 { .proportion_of_total << 378 { .proportion_of_total << 379 }, << 380 .iv_offset = 3, << 381 .key_offset = 3, << 382 }, { << 383 .name = "misaligned splits cro << 384 .inplace_mode = INPLACE_ONE_SG << 385 .src_divs = { << 386 { << 387 .proportion_of << 388 .offset = PAGE << 389 }, { << 390 .proportion_of << 391 .offset = PAGE << 392 }, << 393 }, << 394 } << 395 }; << 396 << 397 static const struct testvec_config default_has << 398 { << 399 .name = "init+update+final ali << 400 .src_divs = { { .proportion_of << 401 .finalization_type = FINALIZAT << 402 }, { << 403 .name = "init+finup aligned bu << 404 .src_divs = { { .proportion_of << 405 .finalization_type = FINALIZAT << 406 }, { << 407 .name = "digest aligned buffer << 408 .src_divs = { { .proportion_of << 409 .finalization_type = FINALIZAT << 410 }, { << 411 .name = "init+update+final mis << 412 .src_divs = { { .proportion_of << 413 .finalization_type = FINALIZAT << 414 .key_offset = 1, << 415 }, { << 416 .name = "digest misaligned buf << 417 .src_divs = { << 418 { << 419 .proportion_of << 420 .offset = 1, << 421 }, << 422 }, << 423 .finalization_type = FINALIZAT << 424 .key_offset = 1, << 425 }, { << 426 .name = "init+update+update+fi << 427 .src_divs = { << 428 { .proportion_of_total << 429 { << 430 .proportion_of << 431 .flush_type = << 432 }, << 433 }, << 434 .finalization_type = FINALIZAT << 435 }, { << 436 .name = "digest uneven misalig << 437 .req_flags = CRYPTO_TFM_REQ_MA << 438 .src_divs = { << 439 { .proportion_of_total << 440 { .proportion_of_total << 441 { .proportion_of_total << 442 }, << 443 .finalization_type = FINALIZAT << 444 }, { << 445 .name = "digest misaligned spl << 446 .src_divs = { << 447 { << 448 .proportion_of << 449 .offset = PAGE << 450 }, { << 451 .proportion_of << 452 .offset = PAGE << 453 }, << 454 }, << 455 .finalization_type = FINALIZAT << 456 }, { << 457 .name = "import/export", << 458 .src_divs = { << 459 { << 460 .proportion_of << 461 .flush_type = << 462 }, { << 463 .proportion_of << 464 .flush_type = << 465 }, << 466 }, << 467 .finalization_type = FINALIZAT << 468 } << 469 }; << 470 << 471 static unsigned int count_test_sg_divisions(co << 472 { << 473 unsigned int remaining = TEST_SG_TOTAL << 474 unsigned int ndivs = 0; << 475 << 476 do { << 477 remaining -= divs[ndivs++].pro << 478 } while (remaining); << 479 << 480 return ndivs; << 481 } << 482 << 483 #define SGDIVS_HAVE_FLUSHES BIT(0) << 484 #define SGDIVS_HAVE_NOSIMD BIT(1) << 485 << 486 static bool valid_sg_divisions(const struct te << 487 unsigned int co << 488 { << 489 unsigned int total = 0; << 490 unsigned int i; << 491 << 492 for (i = 0; i < count && total != TEST << 493 if (divs[i].proportion_of_tota << 494 divs[i].proportion_of_tota << 495 return false; << 496 total += divs[i].proportion_of << 497 if (divs[i].flush_type != FLUS << 498 *flags_ret |= SGDIVS_H << 499 if (divs[i].nosimd) << 500 *flags_ret |= SGDIVS_H << 501 } << 502 return total == TEST_SG_TOTAL && << 503 memchr_inv(&divs[i], 0, (count << 504 } << 505 << 506 /* << 507 * Check whether the given testvec_config is v << 508 * since every testvec_config should be valid, << 509 * don't unknowingly add broken configs that d << 510 */ << 511 static bool valid_testvec_config(const struct << 512 { << 513 int flags = 0; << 514 << 515 if (cfg->name == NULL) << 516 return false; << 517 << 518 if (!valid_sg_divisions(cfg->src_divs, << 519 &flags)) << 520 return false; << 521 << 522 if (cfg->dst_divs[0].proportion_of_tot << 523 if (!valid_sg_divisions(cfg->d << 524 ARRAY_ << 525 return false; << 526 } else { << 527 if (memchr_inv(cfg->dst_divs, << 528 return false; << 529 /* defaults to dst_divs=src_di << 530 } << 531 << 532 if (cfg->iv_offset + << 533 (cfg->iv_offset_relative_to_alignm << 534 MAX_ALGAPI_ALIGNMASK + 1) << 535 return false; << 536 << 537 if ((flags & (SGDIVS_HAVE_FLUSHES | SG << 538 cfg->finalization_type == FINALIZA << 539 return false; << 540 << 541 if ((cfg->nosimd || cfg->nosimd_setkey << 542 (flags & SGDIVS_HAVE_NOSIMD)) && << 543 (cfg->req_flags & CRYPTO_TFM_REQ_M << 544 return false; << 545 << 546 return true; << 547 } << 548 << 549 struct test_sglist { << 550 char *bufs[XBUFSIZE]; << 551 struct scatterlist sgl[XBUFSIZE]; << 552 struct scatterlist sgl_saved[XBUFSIZE] << 553 struct scatterlist *sgl_ptr; << 554 unsigned int nents; << 555 }; << 556 << 557 static int init_test_sglist(struct test_sglist << 558 { << 559 return __testmgr_alloc_buf(tsgl->bufs, << 560 } << 561 << 562 static void destroy_test_sglist(struct test_sg << 563 { << 564 return __testmgr_free_buf(tsgl->bufs, << 565 } << 566 << 567 /** << 568 * build_test_sglist() - build a scatterlist f << 569 * << 570 * @tsgl: the scatterlist to build. @tsgl->bu << 571 * buffers which the scatterlist @tsgl- << 572 * @divs: the layout specification on which th << 573 * @alignmask: the algorithm's alignmask << 574 * @total_len: the total length of the scatter << 575 * @data: if non-NULL, the buffers will be fil << 576 * Otherwise the buffers will be poison << 577 * past the end of each buffer will be << 578 * @out_divs: if non-NULL, the test_sg_divisio << 579 * corresponds will be returned her << 580 * that divisions resolving to a le << 581 * not included in the scatterlist. << 582 * << 583 * Return: 0 or a -errno value << 584 */ << 585 static int build_test_sglist(struct test_sglis << 586 const struct test << 587 const unsigned in << 588 const unsigned in << 589 struct iov_iter * << 590 const struct test << 591 { << 592 struct { << 593 const struct test_sg_division << 594 size_t length; << 595 } partitions[XBUFSIZE]; << 596 const unsigned int ndivs = count_test_ << 597 unsigned int len_remaining = total_len << 598 unsigned int i; << 599 << 600 BUILD_BUG_ON(ARRAY_SIZE(partitions) != << 601 if (WARN_ON(ndivs > ARRAY_SIZE(partiti << 602 return -EINVAL; << 603 << 604 /* Calculate the (div, length) pairs * << 605 tsgl->nents = 0; << 606 for (i = 0; i < ndivs; i++) { << 607 unsigned int len_this_sg = << 608 min(len_remaining, << 609 (total_len * divs[ << 610 TEST_SG_TOTAL / 2 << 611 << 612 if (len_this_sg != 0) { << 613 partitions[tsgl->nents << 614 partitions[tsgl->nents << 615 tsgl->nents++; << 616 len_remaining -= len_t << 617 } << 618 } << 619 if (tsgl->nents == 0) { << 620 partitions[tsgl->nents].div = << 621 partitions[tsgl->nents].length << 622 tsgl->nents++; << 623 } << 624 partitions[tsgl->nents - 1].length += << 625 << 626 /* Set up the sgl entries and fill the << 627 sg_init_table(tsgl->sgl, tsgl->nents); << 628 for (i = 0; i < tsgl->nents; i++) { << 629 unsigned int offset = partitio << 630 void *addr; << 631 << 632 if (partitions[i].div->offset_ << 633 offset += alignmask; << 634 << 635 while (offset + partitions[i]. << 636 2 * PAGE_SIZE) { << 637 if (WARN_ON(offset <= << 638 return -EINVAL << 639 offset /= 2; << 640 } << 641 << 642 addr = &tsgl->bufs[i][offset]; << 643 sg_set_buf(&tsgl->sgl[i], addr << 644 << 645 if (out_divs) << 646 out_divs[i] = partitio << 647 << 648 if (data) { << 649 size_t copy_len, copie << 650 << 651 copy_len = min(partiti << 652 copied = copy_from_ite << 653 if (WARN_ON(copied != << 654 return -EINVAL << 655 testmgr_poison(addr + << 656 TESTMGR << 657 } else { << 658 testmgr_poison(addr, p << 659 TESTMGR << 660 } << 661 } << 662 << 663 sg_mark_end(&tsgl->sgl[tsgl->nents - 1 << 664 tsgl->sgl_ptr = tsgl->sgl; << 665 memcpy(tsgl->sgl_saved, tsgl->sgl, tsg << 666 return 0; << 667 } 178 } 668 179 669 /* !! 180 static int ahash_guard_result(char *result, char c, int size) 670 * Verify that a scatterlist crypto operation << 671 * << 672 * @tsgl: scatterlist containing the actual ou << 673 * @expected_output: buffer containing the exp << 674 * @len_to_check: length of @expected_output i << 675 * @unchecked_prefix_len: number of ignored by << 676 * @check_poison: verify that the poison bytes << 677 * << 678 * Return: 0 if correct, -EINVAL if incorrect, << 679 */ << 680 static int verify_correct_output(const struct << 681 const char *e << 682 unsigned int << 683 unsigned int << 684 bool check_po << 685 { 181 { 686 unsigned int i; !! 182 int i; 687 183 688 for (i = 0; i < tsgl->nents; i++) { !! 184 for (i = 0; i < size; i++) { 689 struct scatterlist *sg = &tsgl !! 185 if (result[i] != c) 690 unsigned int len = sg->length; << 691 unsigned int offset = sg->offs << 692 const char *actual_output; << 693 << 694 if (unchecked_prefix_len) { << 695 if (unchecked_prefix_l << 696 unchecked_pref << 697 continue; << 698 } << 699 offset += unchecked_pr << 700 len -= unchecked_prefi << 701 unchecked_prefix_len = << 702 } << 703 len = min(len, len_to_check); << 704 actual_output = page_address(s << 705 if (memcmp(expected_output, ac << 706 return -EINVAL; 186 return -EINVAL; 707 if (check_poison && << 708 !testmgr_is_poison(actual_ << 709 return -EOVERFLOW; << 710 len_to_check -= len; << 711 expected_output += len; << 712 } 187 } 713 if (WARN_ON(len_to_check != 0)) !! 188 714 return -EINVAL; << 715 return 0; 189 return 0; 716 } 190 } 717 191 718 static bool is_test_sglist_corrupted(const str !! 192 static int ahash_partial_update(struct ahash_request **preq, 719 { !! 193 struct crypto_ahash *tfm, const struct hash_testvec *template, 720 unsigned int i; !! 194 void *hash_buff, int k, int temp, struct scatterlist *sg, 721 !! 195 const char *algo, char *result, struct crypto_wait *wait) 722 for (i = 0; i < tsgl->nents; i++) { !! 196 { 723 if (tsgl->sgl[i].page_link != !! 197 char *state; 724 return true; !! 198 struct ahash_request *req; 725 if (tsgl->sgl[i].offset != tsg !! 199 int statesize, ret = -EINVAL; 726 return true; !! 200 static const unsigned char guard[] = { 0x00, 0xba, 0xad, 0x00 }; 727 if (tsgl->sgl[i].length != tsg !! 201 int digestsize = crypto_ahash_digestsize(tfm); 728 return true; !! 202 >> 203 req = *preq; >> 204 statesize = crypto_ahash_statesize( >> 205 crypto_ahash_reqtfm(req)); >> 206 state = kmalloc(statesize + sizeof(guard), GFP_KERNEL); >> 207 if (!state) { >> 208 pr_err("alg: hash: Failed to alloc state for %s\n", algo); >> 209 goto out_nostate; >> 210 } >> 211 memcpy(state + statesize, guard, sizeof(guard)); >> 212 memset(result, 1, digestsize); >> 213 ret = crypto_ahash_export(req, state); >> 214 WARN_ON(memcmp(state + statesize, guard, sizeof(guard))); >> 215 if (ret) { >> 216 pr_err("alg: hash: Failed to export() for %s\n", algo); >> 217 goto out; 729 } 218 } 730 return false; !! 219 ret = ahash_guard_result(result, 1, digestsize); 731 } !! 220 if (ret) { 732 !! 221 pr_err("alg: hash: Failed, export used req->result for %s\n", 733 struct cipher_test_sglists { !! 222 algo); 734 struct test_sglist src; !! 223 goto out; 735 struct test_sglist dst; << 736 }; << 737 << 738 static struct cipher_test_sglists *alloc_ciphe << 739 { << 740 struct cipher_test_sglists *tsgls; << 741 << 742 tsgls = kmalloc(sizeof(*tsgls), GFP_KE << 743 if (!tsgls) << 744 return NULL; << 745 << 746 if (init_test_sglist(&tsgls->src) != 0 << 747 goto fail_kfree; << 748 if (init_test_sglist(&tsgls->dst) != 0 << 749 goto fail_destroy_src; << 750 << 751 return tsgls; << 752 << 753 fail_destroy_src: << 754 destroy_test_sglist(&tsgls->src); << 755 fail_kfree: << 756 kfree(tsgls); << 757 return NULL; << 758 } << 759 << 760 static void free_cipher_test_sglists(struct ci << 761 { << 762 if (tsgls) { << 763 destroy_test_sglist(&tsgls->sr << 764 destroy_test_sglist(&tsgls->ds << 765 kfree(tsgls); << 766 } 224 } 767 } !! 225 ahash_request_free(req); 768 !! 226 req = ahash_request_alloc(tfm, GFP_KERNEL); 769 /* Build the src and dst scatterlists for an s !! 227 if (!req) { 770 static int build_cipher_test_sglists(struct ci !! 228 pr_err("alg: hash: Failed to alloc request for %s\n", algo); 771 const str !! 229 goto out_noreq; 772 unsigned << 773 unsigned << 774 unsigned << 775 const str << 776 unsigned << 777 { << 778 struct iov_iter input; << 779 int err; << 780 << 781 iov_iter_kvec(&input, ITER_SOURCE, inp << 782 err = build_test_sglist(&tsgls->src, c << 783 cfg->inplace_m << 784 max(ds << 785 src_to << 786 &input, NULL); << 787 if (err) << 788 return err; << 789 << 790 /* << 791 * In-place crypto operations can use << 792 * source and destination (req->src == << 793 * scatterlists (req->src != req->dst) << 794 * underlying memory. Make sure to te << 795 */ << 796 if (cfg->inplace_mode == INPLACE_ONE_S << 797 tsgls->dst.sgl_ptr = tsgls->sr << 798 tsgls->dst.nents = tsgls->src. << 799 return 0; << 800 } 230 } 801 if (cfg->inplace_mode == INPLACE_TWO_S !! 231 ahash_request_set_callback(req, 802 /* !! 232 CRYPTO_TFM_REQ_MAY_BACKLOG, 803 * For now we keep it simple a !! 233 crypto_req_done, wait); 804 * two scatterlists have ident !! 234 805 * different entries that spli !! 235 memcpy(hash_buff, template->plaintext + temp, 806 */ !! 236 template->tap[k]); 807 memcpy(tsgls->dst.sgl, tsgls-> !! 237 sg_init_one(&sg[0], hash_buff, template->tap[k]); 808 tsgls->src.nents * size !! 238 ahash_request_set_crypt(req, sg, result, template->tap[k]); 809 memcpy(tsgls->dst.sgl_saved, t !! 239 ret = crypto_ahash_import(req, state); 810 tsgls->src.nents * size !! 240 if (ret) { 811 tsgls->dst.sgl_ptr = tsgls->ds !! 241 pr_err("alg: hash: Failed to import() for %s\n", algo); 812 tsgls->dst.nents = tsgls->src. !! 242 goto out; 813 return 0; << 814 } 243 } 815 /* Out of place */ !! 244 ret = ahash_guard_result(result, 1, digestsize); 816 return build_test_sglist(&tsgls->dst, !! 245 if (ret) { 817 cfg->dst_divs !! 246 pr_err("alg: hash: Failed, import used req->result for %s\n", 818 cfg->d !! 247 algo); 819 alignmask, ds !! 248 goto out; 820 } << 821 << 822 /* << 823 * Support for testing passing a misaligned ke << 824 * << 825 * If cfg->key_offset is set, copy the key int << 826 * optionally adding alignmask. Else, just us << 827 */ << 828 static int prepare_keybuf(const u8 *key, unsig << 829 const struct testvec << 830 unsigned int alignma << 831 const u8 **keybuf_re << 832 { << 833 unsigned int key_offset = cfg->key_off << 834 u8 *keybuf = NULL, *keyptr = (u8 *)key << 835 << 836 if (key_offset != 0) { << 837 if (cfg->key_offset_relative_t << 838 key_offset += alignmas << 839 keybuf = kmalloc(key_offset + << 840 if (!keybuf) << 841 return -ENOMEM; << 842 keyptr = keybuf + key_offset; << 843 memcpy(keyptr, key, ksize); << 844 } 249 } 845 *keybuf_ret = keybuf; !! 250 ret = crypto_wait_req(crypto_ahash_update(req), wait); 846 *keyptr_ret = keyptr; !! 251 if (ret) 847 return 0; !! 252 goto out; 848 } !! 253 *preq = req; 849 !! 254 ret = 0; 850 /* !! 255 goto out_noreq; 851 * Like setkey_f(tfm, key, ksize), but sometim !! 256 out: 852 * In addition, run the setkey function in no- !! 257 ahash_request_free(req); 853 */ !! 258 out_noreq: 854 #define do_setkey(setkey_f, tfm, key, ksize, c !! 259 kfree(state); 855 ({ !! 260 out_nostate: 856 const u8 *keybuf, *keyptr; !! 261 return ret; 857 int err; << 858 << 859 err = prepare_keybuf((key), (ksize), ( << 860 &keybuf, &keyptr) << 861 if (err == 0) { << 862 if ((cfg)->nosimd_setkey) << 863 crypto_disable_simd_fo << 864 err = setkey_f((tfm), keyptr, << 865 if ((cfg)->nosimd_setkey) << 866 crypto_reenable_simd_f << 867 kfree(keybuf); << 868 } << 869 err; << 870 }) << 871 << 872 #ifdef CONFIG_CRYPTO_MANAGER_EXTRA_TESTS << 873 << 874 /* << 875 * The fuzz tests use prandom instead of the n << 876 * need cryptographically secure random number << 877 * performance of these tests, especially if t << 878 * has been initialized or if they are run on << 879 */ << 880 << 881 static inline void init_rnd_state(struct rnd_s << 882 { << 883 prandom_seed_state(rng, get_random_u64 << 884 } << 885 << 886 static inline u8 prandom_u8(struct rnd_state * << 887 { << 888 return prandom_u32_state(rng); << 889 } << 890 << 891 static inline u32 prandom_u32_below(struct rnd << 892 { << 893 /* << 894 * This is slightly biased for non-pow << 895 * isn't important here. << 896 */ << 897 return prandom_u32_state(rng) % ceil; << 898 } << 899 << 900 static inline bool prandom_bool(struct rnd_sta << 901 { << 902 return prandom_u32_below(rng, 2); << 903 } << 904 << 905 static inline u32 prandom_u32_inclusive(struct << 906 u32 fl << 907 { << 908 return floor + prandom_u32_below(rng, << 909 } << 910 << 911 /* Generate a random length in range [0, max_l << 912 static unsigned int generate_random_length(str << 913 uns << 914 { << 915 unsigned int len = prandom_u32_below(r << 916 << 917 switch (prandom_u32_below(rng, 4)) { << 918 case 0: << 919 len %= 64; << 920 break; << 921 case 1: << 922 len %= 256; << 923 break; << 924 case 2: << 925 len %= 1024; << 926 break; << 927 default: << 928 break; << 929 } << 930 if (len && prandom_u32_below(rng, 4) = << 931 len = rounddown_pow_of_two(len << 932 return len; << 933 } 262 } 934 263 935 /* Flip a random bit in the given nonempty dat !! 264 static int __test_hash(struct crypto_ahash *tfm, 936 static void flip_random_bit(struct rnd_state * !! 265 const struct hash_testvec *template, unsigned int tcount, 937 { !! 266 bool use_digest, const int align_offset) 938 size_t bitpos; !! 267 { 939 !! 268 const char *algo = crypto_tfm_alg_driver_name(crypto_ahash_tfm(tfm)); 940 bitpos = prandom_u32_below(rng, size * !! 269 size_t digest_size = crypto_ahash_digestsize(tfm); 941 buf[bitpos / 8] ^= 1 << (bitpos % 8); !! 270 unsigned int i, j, k, temp; 942 } !! 271 struct scatterlist sg[8]; >> 272 char *result; >> 273 char *key; >> 274 struct ahash_request *req; >> 275 struct crypto_wait wait; >> 276 void *hash_buff; >> 277 char *xbuf[XBUFSIZE]; >> 278 int ret = -ENOMEM; 943 279 944 /* Flip a random byte in the given nonempty da !! 280 result = kmalloc(digest_size, GFP_KERNEL); 945 static void flip_random_byte(struct rnd_state !! 281 if (!result) 946 { !! 282 return ret; 947 buf[prandom_u32_below(rng, size)] ^= 0 !! 283 key = kmalloc(MAX_KEYLEN, GFP_KERNEL); 948 } !! 284 if (!key) >> 285 goto out_nobuf; >> 286 if (testmgr_alloc_buf(xbuf)) >> 287 goto out_nobuf; 949 288 950 /* Sometimes make some random changes to the g !! 289 crypto_init_wait(&wait); 951 static void mutate_buffer(struct rnd_state *rn << 952 { << 953 size_t num_flips; << 954 size_t i; << 955 290 956 /* Sometimes flip some bits */ !! 291 req = ahash_request_alloc(tfm, GFP_KERNEL); 957 if (prandom_u32_below(rng, 4) == 0) { !! 292 if (!req) { 958 num_flips = min_t(size_t, 1 << !! 293 printk(KERN_ERR "alg: hash: Failed to allocate request for " 959 size * 8); !! 294 "%s\n", algo); 960 for (i = 0; i < num_flips; i++ !! 295 goto out_noreq; 961 flip_random_bit(rng, b << 962 } << 963 << 964 /* Sometimes flip some bytes */ << 965 if (prandom_u32_below(rng, 4) == 0) { << 966 num_flips = min_t(size_t, 1 << << 967 for (i = 0; i < num_flips; i++ << 968 flip_random_byte(rng, << 969 } 296 } 970 } !! 297 ahash_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG, >> 298 crypto_req_done, &wait); 971 299 972 /* Randomly generate 'count' bytes, but someti !! 300 j = 0; 973 static void generate_random_bytes(struct rnd_s !! 301 for (i = 0; i < tcount; i++) { 974 { !! 302 if (template[i].np) 975 u8 b; !! 303 continue; 976 u8 increment; << 977 size_t i; << 978 304 979 if (count == 0) !! 305 ret = -EINVAL; 980 return; !! 306 if (WARN_ON(align_offset + template[i].psize > PAGE_SIZE)) >> 307 goto out; 981 308 982 switch (prandom_u32_below(rng, 8)) { / !! 309 j++; 983 case 0: !! 310 memset(result, 0, digest_size); 984 case 1: << 985 /* All the same byte, plus opt << 986 switch (prandom_u32_below(rng, << 987 case 0: << 988 b = 0x00; << 989 break; << 990 case 1: << 991 b = 0xff; << 992 break; << 993 default: << 994 b = prandom_u8(rng); << 995 break; << 996 } << 997 memset(buf, b, count); << 998 mutate_buffer(rng, buf, count) << 999 break; << 1000 case 2: << 1001 /* Ascending or descending by << 1002 increment = prandom_u8(rng); << 1003 b = prandom_u8(rng); << 1004 for (i = 0; i < count; i++, b << 1005 buf[i] = b; << 1006 mutate_buffer(rng, buf, count << 1007 break; << 1008 default: << 1009 /* Fully random bytes */ << 1010 prandom_bytes_state(rng, buf, << 1011 } << 1012 } << 1013 311 1014 static char *generate_random_sgl_divisions(st !! 312 hash_buff = xbuf[0]; 1015 st !! 313 hash_buff += align_offset; 1016 si << 1017 bo << 1018 { << 1019 struct test_sg_division *div = divs; << 1020 unsigned int remaining = TEST_SG_TOTA << 1021 314 1022 do { !! 315 memcpy(hash_buff, template[i].plaintext, template[i].psize); 1023 unsigned int this_len; !! 316 sg_init_one(&sg[0], hash_buff, template[i].psize); 1024 const char *flushtype_str; << 1025 317 1026 if (div == &divs[max_divs - 1 !! 318 if (template[i].ksize) { 1027 this_len = remaining; !! 319 crypto_ahash_clear_flags(tfm, ~0); 1028 else if (prandom_u32_below(rn !! 320 if (template[i].ksize > MAX_KEYLEN) { 1029 this_len = (remaining !! 321 pr_err("alg: hash: setkey failed on test %d for %s: key size %d > %d\n", 1030 else !! 322 j, algo, template[i].ksize, MAX_KEYLEN); 1031 this_len = prandom_u3 !! 323 ret = -EINVAL; 1032 div->proportion_of_total = th !! 324 goto out; 1033 !! 325 } 1034 if (prandom_u32_below(rng, 4) !! 326 memcpy(key, template[i].key, template[i].ksize); 1035 div->offset = prandom !! 327 ret = crypto_ahash_setkey(tfm, key, template[i].ksize); 1036 !! 328 if (ret) { 1037 !! 329 printk(KERN_ERR "alg: hash: setkey failed on " 1038 else if (prandom_bool(rng)) !! 330 "test %d for %s: ret=%d\n", j, algo, 1039 div->offset = prandom !! 331 -ret); 1040 else !! 332 goto out; 1041 div->offset = prandom << 1042 if (prandom_u32_below(rng, 8) << 1043 div->offset_relative_ << 1044 << 1045 div->flush_type = FLUSH_TYPE_ << 1046 if (gen_flushes) { << 1047 switch (prandom_u32_b << 1048 case 0: << 1049 div->flush_ty << 1050 break; << 1051 case 1: << 1052 div->flush_ty << 1053 break; << 1054 } 333 } 1055 } 334 } 1056 335 1057 if (div->flush_type != FLUSH_ !! 336 ahash_request_set_crypt(req, sg, result, template[i].psize); 1058 !(req_flags & CRYPTO_TFM_ !! 337 if (use_digest) { 1059 prandom_bool(rng)) !! 338 ret = crypto_wait_req(crypto_ahash_digest(req), &wait); 1060 div->nosimd = true; !! 339 if (ret) { 1061 !! 340 pr_err("alg: hash: digest failed on test %d " 1062 switch (div->flush_type) { !! 341 "for %s: ret=%d\n", j, algo, -ret); 1063 case FLUSH_TYPE_FLUSH: !! 342 goto out; 1064 if (div->nosimd) !! 343 } 1065 flushtype_str !! 344 } else { 1066 else !! 345 memset(result, 1, digest_size); 1067 flushtype_str !! 346 ret = crypto_wait_req(crypto_ahash_init(req), &wait); 1068 break; !! 347 if (ret) { 1069 case FLUSH_TYPE_REIMPORT: !! 348 pr_err("alg: hash: init failed on test %d " 1070 if (div->nosimd) !! 349 "for %s: ret=%d\n", j, algo, -ret); 1071 flushtype_str !! 350 goto out; 1072 else !! 351 } 1073 flushtype_str !! 352 ret = ahash_guard_result(result, 1, digest_size); 1074 break; !! 353 if (ret) { 1075 default: !! 354 pr_err("alg: hash: init failed on test %d " 1076 flushtype_str = ""; !! 355 "for %s: used req->result\n", j, algo); 1077 break; !! 356 goto out; >> 357 } >> 358 ret = crypto_wait_req(crypto_ahash_update(req), &wait); >> 359 if (ret) { >> 360 pr_err("alg: hash: update failed on test %d " >> 361 "for %s: ret=%d\n", j, algo, -ret); >> 362 goto out; >> 363 } >> 364 ret = ahash_guard_result(result, 1, digest_size); >> 365 if (ret) { >> 366 pr_err("alg: hash: update failed on test %d " >> 367 "for %s: used req->result\n", j, algo); >> 368 goto out; >> 369 } >> 370 ret = crypto_wait_req(crypto_ahash_final(req), &wait); >> 371 if (ret) { >> 372 pr_err("alg: hash: final failed on test %d " >> 373 "for %s: ret=%d\n", j, algo, -ret); >> 374 goto out; >> 375 } 1078 } 376 } 1079 377 1080 BUILD_BUG_ON(TEST_SG_TOTAL != !! 378 if (memcmp(result, template[i].digest, 1081 p += scnprintf(p, end - p, "% !! 379 crypto_ahash_digestsize(tfm))) { 1082 this_len / 100 !! 380 printk(KERN_ERR "alg: hash: Test %d failed for %s\n", 1083 div->offset_re !! 381 j, algo); 1084 "alig !! 382 hexdump(result, crypto_ahash_digestsize(tfm)); 1085 div->offset, t !! 383 ret = -EINVAL; 1086 remaining -= this_len; !! 384 goto out; 1087 div++; !! 385 } 1088 } while (remaining); << 1089 << 1090 return p; << 1091 } << 1092 << 1093 /* Generate a random testvec_config for fuzz << 1094 static void generate_random_testvec_config(st << 1095 st << 1096 ch << 1097 { << 1098 char *p = name; << 1099 char * const end = name + max_namelen << 1100 << 1101 memset(cfg, 0, sizeof(*cfg)); << 1102 << 1103 cfg->name = name; << 1104 << 1105 p += scnprintf(p, end - p, "random:") << 1106 << 1107 switch (prandom_u32_below(rng, 4)) { << 1108 case 0: << 1109 case 1: << 1110 cfg->inplace_mode = OUT_OF_PL << 1111 break; << 1112 case 2: << 1113 cfg->inplace_mode = INPLACE_O << 1114 p += scnprintf(p, end - p, " << 1115 break; << 1116 default: << 1117 cfg->inplace_mode = INPLACE_T << 1118 p += scnprintf(p, end - p, " << 1119 break; << 1120 } << 1121 << 1122 if (prandom_bool(rng)) { << 1123 cfg->req_flags |= CRYPTO_TFM_ << 1124 p += scnprintf(p, end - p, " << 1125 } << 1126 << 1127 switch (prandom_u32_below(rng, 4)) { << 1128 case 0: << 1129 cfg->finalization_type = FINA << 1130 p += scnprintf(p, end - p, " << 1131 break; << 1132 case 1: << 1133 cfg->finalization_type = FINA << 1134 p += scnprintf(p, end - p, " << 1135 break; << 1136 default: << 1137 cfg->finalization_type = FINA << 1138 p += scnprintf(p, end - p, " << 1139 break; << 1140 } << 1141 << 1142 if (!(cfg->req_flags & CRYPTO_TFM_REQ << 1143 if (prandom_bool(rng)) { << 1144 cfg->nosimd = true; << 1145 p += scnprintf(p, end << 1146 } << 1147 if (prandom_bool(rng)) { << 1148 cfg->nosimd_setkey = << 1149 p += scnprintf(p, end << 1150 } << 1151 } << 1152 << 1153 p += scnprintf(p, end - p, " src_divs << 1154 p = generate_random_sgl_divisions(rng << 1155 ARR << 1156 (cf << 1157 FI << 1158 cfg << 1159 p += scnprintf(p, end - p, "]"); << 1160 << 1161 if (cfg->inplace_mode == OUT_OF_PLACE << 1162 p += scnprintf(p, end - p, " << 1163 p = generate_random_sgl_divis << 1164 << 1165 << 1166 << 1167 p += scnprintf(p, end - p, "] << 1168 } << 1169 << 1170 if (prandom_bool(rng)) { << 1171 cfg->iv_offset = prandom_u32_ << 1172 << 1173 p += scnprintf(p, end - p, " << 1174 } << 1175 << 1176 if (prandom_bool(rng)) { << 1177 cfg->key_offset = prandom_u32 << 1178 << 1179 p += scnprintf(p, end - p, " << 1180 } 386 } 1181 387 1182 WARN_ON_ONCE(!valid_testvec_config(cf !! 388 j = 0; 1183 } !! 389 for (i = 0; i < tcount; i++) { 1184 !! 390 /* alignment tests are only done with continuous buffers */ 1185 static void crypto_disable_simd_for_test(void !! 391 if (align_offset != 0) 1186 { !! 392 break; 1187 migrate_disable(); << 1188 __this_cpu_write(crypto_simd_disabled << 1189 } << 1190 << 1191 static void crypto_reenable_simd_for_test(voi << 1192 { << 1193 __this_cpu_write(crypto_simd_disabled << 1194 migrate_enable(); << 1195 } << 1196 393 1197 /* !! 394 if (!template[i].np) 1198 * Given an algorithm name, build the name of !! 395 continue; 1199 * algorithm, assuming the usual naming conve << 1200 * "-generic" to every part of the name that << 1201 * << 1202 * aes => aes-generic << 1203 * cbc(aes) => cbc(aes-generic) << 1204 * cts(cbc(aes)) => cts(cbc(aes-generic) << 1205 * rfc7539(chacha20,poly1305) => rfc7539 << 1206 * << 1207 * Return: 0 on success, or -ENAMETOOLONG if << 1208 */ << 1209 static int build_generic_driver_name(const ch << 1210 char dri << 1211 { << 1212 const char *in = algname; << 1213 char *out = driver_name; << 1214 size_t len = strlen(algname); << 1215 396 1216 if (len >= CRYPTO_MAX_ALG_NAME) !! 397 j++; 1217 goto too_long; !! 398 memset(result, 0, digest_size); 1218 do { << 1219 const char *in_saved = in; << 1220 399 1221 while (*in && *in != '(' && * !! 400 temp = 0; 1222 *out++ = *in++; !! 401 sg_init_table(sg, template[i].np); 1223 if (*in != '(' && in > in_sav !! 402 ret = -EINVAL; 1224 len += 8; !! 403 for (k = 0; k < template[i].np; k++) { 1225 if (len >= CRYPTO_MAX !! 404 if (WARN_ON(offset_in_page(IDX[k]) + 1226 goto too_long !! 405 template[i].tap[k] > PAGE_SIZE)) 1227 memcpy(out, "-generic !! 406 goto out; 1228 out += 8; !! 407 sg_set_buf(&sg[k], >> 408 memcpy(xbuf[IDX[k] >> PAGE_SHIFT] + >> 409 offset_in_page(IDX[k]), >> 410 template[i].plaintext + temp, >> 411 template[i].tap[k]), >> 412 template[i].tap[k]); >> 413 temp += template[i].tap[k]; 1229 } 414 } 1230 } while ((*out++ = *in++) != '\0'); << 1231 return 0; << 1232 << 1233 too_long: << 1234 pr_err("alg: generic driver name for << 1235 algname); << 1236 return -ENAMETOOLONG; << 1237 } << 1238 #else /* !CONFIG_CRYPTO_MANAGER_EXTRA_TESTS * << 1239 static void crypto_disable_simd_for_test(void << 1240 { << 1241 } << 1242 << 1243 static void crypto_reenable_simd_for_test(voi << 1244 { << 1245 } << 1246 #endif /* !CONFIG_CRYPTO_MANAGER_EXTRA_TESTS << 1247 << 1248 static int build_hash_sglist(struct test_sgli << 1249 const struct has << 1250 const struct tes << 1251 unsigned int ali << 1252 const struct tes << 1253 { << 1254 struct kvec kv; << 1255 struct iov_iter input; << 1256 << 1257 kv.iov_base = (void *)vec->plaintext; << 1258 kv.iov_len = vec->psize; << 1259 iov_iter_kvec(&input, ITER_SOURCE, &k << 1260 return build_test_sglist(tsgl, cfg->s << 1261 &input, divs << 1262 } << 1263 << 1264 static int check_hash_result(const char *type << 1265 const u8 *result << 1266 const struct has << 1267 const char *vec_ << 1268 const char *driv << 1269 const struct tes << 1270 { << 1271 if (memcmp(result, vec->digest, diges << 1272 pr_err("alg: %s: %s test fail << 1273 type, driver, vec_name << 1274 return -EINVAL; << 1275 } << 1276 if (!testmgr_is_poison(&result[digest << 1277 pr_err("alg: %s: %s overran r << 1278 type, driver, vec_name << 1279 return -EOVERFLOW; << 1280 } << 1281 return 0; << 1282 } << 1283 << 1284 static inline int check_shash_op(const char * << 1285 const char * << 1286 const struct << 1287 { << 1288 if (err) << 1289 pr_err("alg: shash: %s %s() f << 1290 driver, op, err, vec_n << 1291 return err; << 1292 } << 1293 415 1294 /* Test one hash test vector in one configura !! 416 if (template[i].ksize) { 1295 static int test_shash_vec_cfg(const struct ha !! 417 if (template[i].ksize > MAX_KEYLEN) { 1296 const char *vec !! 418 pr_err("alg: hash: setkey failed on test %d for %s: key size %d > %d\n", 1297 const struct te !! 419 j, algo, template[i].ksize, MAX_KEYLEN); 1298 struct shash_de !! 420 ret = -EINVAL; 1299 struct test_sgl !! 421 goto out; 1300 u8 *hashstate) !! 422 } 1301 { !! 423 crypto_ahash_clear_flags(tfm, ~0); 1302 struct crypto_shash *tfm = desc->tfm; !! 424 memcpy(key, template[i].key, template[i].ksize); 1303 const unsigned int digestsize = crypt !! 425 ret = crypto_ahash_setkey(tfm, key, template[i].ksize); 1304 const unsigned int statesize = crypto !! 426 1305 const char *driver = crypto_shash_dri !! 427 if (ret) { 1306 const struct test_sg_division *divs[X !! 428 printk(KERN_ERR "alg: hash: setkey " 1307 unsigned int i; !! 429 "failed on chunking test %d " 1308 u8 result[HASH_MAX_DIGESTSIZE + TESTM !! 430 "for %s: ret=%d\n", j, algo, -ret); 1309 int err; !! 431 goto out; 1310 !! 432 } 1311 /* Set the key, if specified */ << 1312 if (vec->ksize) { << 1313 err = do_setkey(crypto_shash_ << 1314 cfg, 0); << 1315 if (err) { << 1316 if (err == vec->setke << 1317 return 0; << 1318 pr_err("alg: shash: % << 1319 driver, vec_na << 1320 crypto_shash_g << 1321 return err; << 1322 } << 1323 if (vec->setkey_error) { << 1324 pr_err("alg: shash: % << 1325 driver, vec_na << 1326 return -EINVAL; << 1327 } 433 } 1328 } << 1329 434 1330 /* Build the scatterlist for the sour !! 435 ahash_request_set_crypt(req, sg, result, template[i].psize); 1331 err = build_hash_sglist(tsgl, vec, cf !! 436 ret = crypto_wait_req(crypto_ahash_digest(req), &wait); 1332 if (err) { !! 437 if (ret) { 1333 pr_err("alg: shash: %s: error !! 438 pr_err("alg: hash: digest failed on chunking test %d for %s: ret=%d\n", 1334 driver, vec_name, cfg- !! 439 j, algo, -ret); 1335 return err; !! 440 goto out; 1336 } << 1337 << 1338 /* Do the actual hashing */ << 1339 << 1340 testmgr_poison(desc->__ctx, crypto_sh << 1341 testmgr_poison(result, digestsize + T << 1342 << 1343 if (cfg->finalization_type == FINALIZ << 1344 vec->digest_error) { << 1345 /* Just using digest() */ << 1346 if (tsgl->nents != 1) << 1347 return 0; << 1348 if (cfg->nosimd) << 1349 crypto_disable_simd_f << 1350 err = crypto_shash_digest(des << 1351 tsg << 1352 if (cfg->nosimd) << 1353 crypto_reenable_simd_ << 1354 if (err) { << 1355 if (err == vec->diges << 1356 return 0; << 1357 pr_err("alg: shash: % << 1358 driver, vec_na << 1359 cfg->name); << 1360 return err; << 1361 } << 1362 if (vec->digest_error) { << 1363 pr_err("alg: shash: % << 1364 driver, vec_na << 1365 return -EINVAL; << 1366 } 441 } 1367 goto result_ready; << 1368 } << 1369 << 1370 /* Using init(), zero or more update( << 1371 << 1372 if (cfg->nosimd) << 1373 crypto_disable_simd_for_test( << 1374 err = crypto_shash_init(desc); << 1375 if (cfg->nosimd) << 1376 crypto_reenable_simd_for_test << 1377 err = check_shash_op("init", err, dri << 1378 if (err) << 1379 return err; << 1380 442 1381 for (i = 0; i < tsgl->nents; i++) { !! 443 if (memcmp(result, template[i].digest, 1382 if (i + 1 == tsgl->nents && !! 444 crypto_ahash_digestsize(tfm))) { 1383 cfg->finalization_type == !! 445 printk(KERN_ERR "alg: hash: Chunking test %d " 1384 if (divs[i]->nosimd) !! 446 "failed for %s\n", j, algo); 1385 crypto_disabl !! 447 hexdump(result, crypto_ahash_digestsize(tfm)); 1386 err = crypto_shash_fi !! 448 ret = -EINVAL; 1387 !! 449 goto out; 1388 if (divs[i]->nosimd) << 1389 crypto_reenab << 1390 err = check_shash_op( << 1391 << 1392 if (err) << 1393 return err; << 1394 goto result_ready; << 1395 } << 1396 if (divs[i]->nosimd) << 1397 crypto_disable_simd_f << 1398 err = crypto_shash_update(des << 1399 tsg << 1400 if (divs[i]->nosimd) << 1401 crypto_reenable_simd_ << 1402 err = check_shash_op("update" << 1403 if (err) << 1404 return err; << 1405 if (divs[i]->flush_type == FL << 1406 /* Test ->export() an << 1407 testmgr_poison(hashst << 1408 TESTMG << 1409 err = crypto_shash_ex << 1410 err = check_shash_op( << 1411 << 1412 if (err) << 1413 return err; << 1414 if (!testmgr_is_poiso << 1415 << 1416 pr_err("alg: << 1417 driver << 1418 return -EOVER << 1419 } << 1420 testmgr_poison(desc-> << 1421 err = crypto_shash_im << 1422 err = check_shash_op( << 1423 << 1424 if (err) << 1425 return err; << 1426 } 450 } 1427 } 451 } 1428 452 1429 if (cfg->nosimd) !! 453 /* partial update exercise */ 1430 crypto_disable_simd_for_test( !! 454 j = 0; 1431 err = crypto_shash_final(desc, result !! 455 for (i = 0; i < tcount; i++) { 1432 if (cfg->nosimd) !! 456 /* alignment tests are only done with continuous buffers */ 1433 crypto_reenable_simd_for_test !! 457 if (align_offset != 0) 1434 err = check_shash_op("final", err, dr !! 458 break; 1435 if (err) << 1436 return err; << 1437 result_ready: << 1438 return check_hash_result("shash", res << 1439 driver, cfg) << 1440 } << 1441 << 1442 static int do_ahash_op(int (*op)(struct ahash << 1443 struct ahash_request * << 1444 struct crypto_wait *wa << 1445 { << 1446 int err; << 1447 << 1448 if (nosimd) << 1449 crypto_disable_simd_for_test( << 1450 << 1451 err = op(req); << 1452 << 1453 if (nosimd) << 1454 crypto_reenable_simd_for_test << 1455 << 1456 return crypto_wait_req(err, wait); << 1457 } << 1458 459 1459 static int check_nonfinal_ahash_op(const char !! 460 if (template[i].np < 2) 1460 u8 *result !! 461 continue; 1461 const char << 1462 const stru << 1463 { << 1464 if (err) { << 1465 pr_err("alg: ahash: %s %s() f << 1466 driver, op, err, vec_n << 1467 return err; << 1468 } << 1469 if (!testmgr_is_poison(result, digest << 1470 pr_err("alg: ahash: %s %s() u << 1471 driver, op, vec_name, << 1472 return -EINVAL; << 1473 } << 1474 return 0; << 1475 } << 1476 462 1477 /* Test one hash test vector in one configura !! 463 j++; 1478 static int test_ahash_vec_cfg(const struct ha !! 464 memset(result, 0, digest_size); 1479 const char *vec << 1480 const struct te << 1481 struct ahash_re << 1482 struct test_sgl << 1483 u8 *hashstate) << 1484 { << 1485 struct crypto_ahash *tfm = crypto_aha << 1486 const unsigned int digestsize = crypt << 1487 const unsigned int statesize = crypto << 1488 const char *driver = crypto_ahash_dri << 1489 const u32 req_flags = CRYPTO_TFM_REQ_ << 1490 const struct test_sg_division *divs[X << 1491 DECLARE_CRYPTO_WAIT(wait); << 1492 unsigned int i; << 1493 struct scatterlist *pending_sgl; << 1494 unsigned int pending_len; << 1495 u8 result[HASH_MAX_DIGESTSIZE + TESTM << 1496 int err; << 1497 465 1498 /* Set the key, if specified */ !! 466 ret = -EINVAL; 1499 if (vec->ksize) { !! 467 hash_buff = xbuf[0]; 1500 err = do_setkey(crypto_ahash_ !! 468 memcpy(hash_buff, template[i].plaintext, 1501 cfg, 0); !! 469 template[i].tap[0]); 1502 if (err) { !! 470 sg_init_one(&sg[0], hash_buff, template[i].tap[0]); 1503 if (err == vec->setke !! 471 1504 return 0; !! 472 if (template[i].ksize) { 1505 pr_err("alg: ahash: % !! 473 crypto_ahash_clear_flags(tfm, ~0); 1506 driver, vec_na !! 474 if (template[i].ksize > MAX_KEYLEN) { 1507 crypto_ahash_g !! 475 pr_err("alg: hash: setkey failed on test %d for %s: key size %d > %d\n", 1508 return err; !! 476 j, algo, template[i].ksize, MAX_KEYLEN); 1509 } !! 477 ret = -EINVAL; 1510 if (vec->setkey_error) { !! 478 goto out; 1511 pr_err("alg: ahash: % !! 479 } 1512 driver, vec_na !! 480 memcpy(key, template[i].key, template[i].ksize); 1513 return -EINVAL; !! 481 ret = crypto_ahash_setkey(tfm, key, template[i].ksize); >> 482 if (ret) { >> 483 pr_err("alg: hash: setkey failed on test %d for %s: ret=%d\n", >> 484 j, algo, -ret); >> 485 goto out; >> 486 } 1514 } 487 } 1515 } << 1516 << 1517 /* Build the scatterlist for the sour << 1518 err = build_hash_sglist(tsgl, vec, cf << 1519 if (err) { << 1520 pr_err("alg: ahash: %s: error << 1521 driver, vec_name, cfg- << 1522 return err; << 1523 } << 1524 << 1525 /* Do the actual hashing */ << 1526 488 1527 testmgr_poison(req->__ctx, crypto_aha !! 489 ahash_request_set_crypt(req, sg, result, template[i].tap[0]); 1528 testmgr_poison(result, digestsize + T !! 490 ret = crypto_wait_req(crypto_ahash_init(req), &wait); 1529 !! 491 if (ret) { 1530 if (cfg->finalization_type == FINALIZ !! 492 pr_err("alg: hash: init failed on test %d for %s: ret=%d\n", 1531 vec->digest_error) { !! 493 j, algo, -ret); 1532 /* Just using digest() */ !! 494 goto out; 1533 ahash_request_set_callback(re !! 495 } 1534 &w !! 496 ret = crypto_wait_req(crypto_ahash_update(req), &wait); 1535 ahash_request_set_crypt(req, !! 497 if (ret) { 1536 err = do_ahash_op(crypto_ahas !! 498 pr_err("alg: hash: update failed on test %d for %s: ret=%d\n", 1537 if (err) { !! 499 j, algo, -ret); 1538 if (err == vec->diges !! 500 goto out; 1539 return 0; << 1540 pr_err("alg: ahash: % << 1541 driver, vec_na << 1542 cfg->name); << 1543 return err; << 1544 } << 1545 if (vec->digest_error) { << 1546 pr_err("alg: ahash: % << 1547 driver, vec_na << 1548 return -EINVAL; << 1549 } 501 } 1550 goto result_ready; << 1551 } << 1552 << 1553 /* Using init(), zero or more update( << 1554 << 1555 ahash_request_set_callback(req, req_f << 1556 ahash_request_set_crypt(req, NULL, re << 1557 err = do_ahash_op(crypto_ahash_init, << 1558 err = check_nonfinal_ahash_op("init", << 1559 driver, << 1560 if (err) << 1561 return err; << 1562 502 1563 pending_sgl = NULL; !! 503 temp = template[i].tap[0]; 1564 pending_len = 0; !! 504 for (k = 1; k < template[i].np; k++) { 1565 for (i = 0; i < tsgl->nents; i++) { !! 505 ret = ahash_partial_update(&req, tfm, &template[i], 1566 if (divs[i]->flush_type != FL !! 506 hash_buff, k, temp, &sg[0], algo, result, 1567 pending_sgl != NULL) { !! 507 &wait); 1568 /* update() with the !! 508 if (ret) { 1569 ahash_request_set_cal !! 509 pr_err("alg: hash: partial update failed on test %d for %s: ret=%d\n", 1570 !! 510 j, algo, -ret); 1571 ahash_request_set_cry !! 511 goto out_noreq; 1572 !! 512 } 1573 err = do_ahash_op(cry !! 513 temp += template[i].tap[k]; 1574 div << 1575 err = check_nonfinal_ << 1576 << 1577 << 1578 if (err) << 1579 return err; << 1580 pending_sgl = NULL; << 1581 pending_len = 0; << 1582 } << 1583 if (divs[i]->flush_type == FL << 1584 /* Test ->export() an << 1585 testmgr_poison(hashst << 1586 TESTMG << 1587 err = crypto_ahash_ex << 1588 err = check_nonfinal_ << 1589 << 1590 << 1591 if (err) << 1592 return err; << 1593 if (!testmgr_is_poiso << 1594 << 1595 pr_err("alg: << 1596 driver << 1597 return -EOVER << 1598 } << 1599 << 1600 testmgr_poison(req->_ << 1601 err = crypto_ahash_im << 1602 err = check_nonfinal_ << 1603 << 1604 << 1605 if (err) << 1606 return err; << 1607 } << 1608 if (pending_sgl == NULL) << 1609 pending_sgl = &tsgl-> << 1610 pending_len += tsgl->sgl[i].l << 1611 } << 1612 << 1613 ahash_request_set_callback(req, req_f << 1614 ahash_request_set_crypt(req, pending_ << 1615 if (cfg->finalization_type == FINALIZ << 1616 /* finish with update() and f << 1617 err = do_ahash_op(crypto_ahas << 1618 err = check_nonfinal_ahash_op << 1619 << 1620 if (err) << 1621 return err; << 1622 err = do_ahash_op(crypto_ahas << 1623 if (err) { << 1624 pr_err("alg: ahash: % << 1625 driver, err, v << 1626 return err; << 1627 } 514 } 1628 } else { !! 515 ret = crypto_wait_req(crypto_ahash_final(req), &wait); 1629 /* finish with finup() */ !! 516 if (ret) { 1630 err = do_ahash_op(crypto_ahas !! 517 pr_err("alg: hash: final failed on test %d for %s: ret=%d\n", 1631 if (err) { !! 518 j, algo, -ret); 1632 pr_err("alg: ahash: % !! 519 goto out; 1633 driver, err, v !! 520 } 1634 return err; !! 521 if (memcmp(result, template[i].digest, >> 522 crypto_ahash_digestsize(tfm))) { >> 523 pr_err("alg: hash: Partial Test %d failed for %s\n", >> 524 j, algo); >> 525 hexdump(result, crypto_ahash_digestsize(tfm)); >> 526 ret = -EINVAL; >> 527 goto out; 1635 } 528 } 1636 } 529 } 1637 530 1638 result_ready: !! 531 ret = 0; 1639 return check_hash_result("ahash", res << 1640 driver, cfg) << 1641 } << 1642 << 1643 static int test_hash_vec_cfg(const struct has << 1644 const char *vec_ << 1645 const struct tes << 1646 struct ahash_req << 1647 struct shash_des << 1648 struct test_sgli << 1649 u8 *hashstate) << 1650 { << 1651 int err; << 1652 << 1653 /* << 1654 * For algorithms implemented as "sha << 1655 * both the shash and ahash tests. T << 1656 * failures involve less indirection, << 1657 */ << 1658 << 1659 if (desc) { << 1660 err = test_shash_vec_cfg(vec, << 1661 hash << 1662 if (err) << 1663 return err; << 1664 } << 1665 532 1666 return test_ahash_vec_cfg(vec, vec_na !! 533 out: >> 534 ahash_request_free(req); >> 535 out_noreq: >> 536 testmgr_free_buf(xbuf); >> 537 out_nobuf: >> 538 kfree(key); >> 539 kfree(result); >> 540 return ret; 1667 } 541 } 1668 542 1669 static int test_hash_vec(const struct hash_te !! 543 static int test_hash(struct crypto_ahash *tfm, 1670 struct ahash_request !! 544 const struct hash_testvec *template, 1671 struct test_sglist * !! 545 unsigned int tcount, bool use_digest) 1672 { 546 { 1673 char vec_name[16]; !! 547 unsigned int alignmask; 1674 unsigned int i; !! 548 int ret; 1675 int err; << 1676 << 1677 sprintf(vec_name, "%u", vec_num); << 1678 << 1679 for (i = 0; i < ARRAY_SIZE(default_ha << 1680 err = test_hash_vec_cfg(vec, << 1681 &defa << 1682 req, << 1683 if (err) << 1684 return err; << 1685 } << 1686 549 1687 #ifdef CONFIG_CRYPTO_MANAGER_EXTRA_TESTS !! 550 ret = __test_hash(tfm, template, tcount, use_digest, 0); 1688 if (!noextratests) { !! 551 if (ret) 1689 struct rnd_state rng; !! 552 return ret; 1690 struct testvec_config cfg; << 1691 char cfgname[TESTVEC_CONFIG_N << 1692 553 1693 init_rnd_state(&rng); !! 554 /* test unaligned buffers, check with one byte offset */ >> 555 ret = __test_hash(tfm, template, tcount, use_digest, 1); >> 556 if (ret) >> 557 return ret; 1694 558 1695 for (i = 0; i < fuzz_iteratio !! 559 alignmask = crypto_tfm_alg_alignmask(&tfm->base); 1696 generate_random_testv !! 560 if (alignmask) { 1697 !! 561 /* Check if alignment mask for tfm is correctly set. */ 1698 err = test_hash_vec_c !! 562 ret = __test_hash(tfm, template, tcount, use_digest, 1699 !! 563 alignmask + 1); 1700 if (err) !! 564 if (ret) 1701 return err; !! 565 return ret; 1702 cond_resched(); << 1703 } << 1704 } 566 } 1705 #endif << 1706 return 0; << 1707 } << 1708 567 1709 #ifdef CONFIG_CRYPTO_MANAGER_EXTRA_TESTS !! 568 return 0; 1710 /* << 1711 * Generate a hash test vector from the given << 1712 * Assumes the buffers in 'vec' were already << 1713 */ << 1714 static void generate_random_hash_testvec(stru << 1715 stru << 1716 stru << 1717 unsi << 1718 unsi << 1719 char << 1720 { << 1721 /* Data */ << 1722 vec->psize = generate_random_length(r << 1723 generate_random_bytes(rng, (u8 *)vec- << 1724 << 1725 /* << 1726 * Key: length in range [1, maxkeysiz << 1727 * If algorithm is unkeyed, then maxk << 1728 */ << 1729 vec->setkey_error = 0; << 1730 vec->ksize = 0; << 1731 if (maxkeysize) { << 1732 vec->ksize = maxkeysize; << 1733 if (prandom_u32_below(rng, 4) << 1734 vec->ksize = prandom_ << 1735 generate_random_bytes(rng, (u << 1736 << 1737 vec->setkey_error = crypto_sh << 1738 << 1739 /* If the key couldn't be set << 1740 if (vec->setkey_error) << 1741 goto done; << 1742 } << 1743 << 1744 /* Digest */ << 1745 vec->digest_error = crypto_shash_dige << 1746 << 1747 done: << 1748 snprintf(name, max_namelen, "\"random << 1749 vec->psize, vec->ksize); << 1750 } 569 } 1751 570 1752 /* !! 571 static int __test_aead(struct crypto_aead *tfm, int enc, 1753 * Test the hash algorithm represented by @re !! 572 const struct aead_testvec *template, unsigned int tcount, 1754 * implementation, if one is available. !! 573 const bool diff_dst, const int align_offset) 1755 */ << 1756 static int test_hash_vs_generic_impl(const ch << 1757 unsigned << 1758 struct a << 1759 struct s << 1760 struct t << 1761 u8 *hash << 1762 { 574 { 1763 struct crypto_ahash *tfm = crypto_aha !! 575 const char *algo = crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm)); 1764 const unsigned int digestsize = crypt !! 576 unsigned int i, j, k, n, temp; 1765 const unsigned int blocksize = crypto !! 577 int ret = -ENOMEM; 1766 const unsigned int maxdatasize = (2 * !! 578 char *q; 1767 const char *algname = crypto_hash_alg !! 579 char *key; 1768 const char *driver = crypto_ahash_dri !! 580 struct aead_request *req; 1769 struct rnd_state rng; !! 581 struct scatterlist *sg; 1770 char _generic_driver[CRYPTO_MAX_ALG_N !! 582 struct scatterlist *sgout; 1771 struct crypto_shash *generic_tfm = NU !! 583 const char *e, *d; 1772 struct shash_desc *generic_desc = NUL !! 584 struct crypto_wait wait; 1773 unsigned int i; !! 585 unsigned int authsize, iv_len; 1774 struct hash_testvec vec = { 0 }; !! 586 void *input; 1775 char vec_name[64]; !! 587 void *output; 1776 struct testvec_config *cfg; !! 588 void *assoc; 1777 char cfgname[TESTVEC_CONFIG_NAMELEN]; !! 589 char *iv; 1778 int err; !! 590 char *xbuf[XBUFSIZE]; 1779 !! 591 char *xoutbuf[XBUFSIZE]; 1780 if (noextratests) !! 592 char *axbuf[XBUFSIZE]; 1781 return 0; << 1782 << 1783 init_rnd_state(&rng); << 1784 593 1785 if (!generic_driver) { /* Use default !! 594 iv = kzalloc(MAX_IVLEN, GFP_KERNEL); 1786 err = build_generic_driver_na !! 595 if (!iv) 1787 if (err) !! 596 return ret; 1788 return err; !! 597 key = kmalloc(MAX_KEYLEN, GFP_KERNEL); 1789 generic_driver = _generic_dri !! 598 if (!key) 1790 } !! 599 goto out_noxbuf; >> 600 if (testmgr_alloc_buf(xbuf)) >> 601 goto out_noxbuf; >> 602 if (testmgr_alloc_buf(axbuf)) >> 603 goto out_noaxbuf; >> 604 if (diff_dst && testmgr_alloc_buf(xoutbuf)) >> 605 goto out_nooutbuf; >> 606 >> 607 /* avoid "the frame size is larger than 1024 bytes" compiler warning */ >> 608 sg = kmalloc(sizeof(*sg) * 8 * (diff_dst ? 4 : 2), GFP_KERNEL); >> 609 if (!sg) >> 610 goto out_nosg; >> 611 sgout = &sg[16]; 1791 612 1792 if (strcmp(generic_driver, driver) == !! 613 if (diff_dst) 1793 return 0; !! 614 d = "-ddst"; >> 615 else >> 616 d = ""; 1794 617 1795 generic_tfm = crypto_alloc_shash(gene !! 618 if (enc == ENCRYPT) 1796 if (IS_ERR(generic_tfm)) { !! 619 e = "encryption"; 1797 err = PTR_ERR(generic_tfm); !! 620 else 1798 if (err == -ENOENT) { !! 621 e = "decryption"; 1799 pr_warn("alg: hash: s << 1800 driver, gener << 1801 return 0; << 1802 } << 1803 pr_err("alg: hash: error allo << 1804 generic_driver, algnam << 1805 return err; << 1806 } << 1807 622 1808 cfg = kzalloc(sizeof(*cfg), GFP_KERNE !! 623 crypto_init_wait(&wait); 1809 if (!cfg) { << 1810 err = -ENOMEM; << 1811 goto out; << 1812 } << 1813 624 1814 generic_desc = kzalloc(sizeof(*desc) !! 625 req = aead_request_alloc(tfm, GFP_KERNEL); 1815 crypto_shash_d !! 626 if (!req) { 1816 if (!generic_desc) { !! 627 pr_err("alg: aead%s: Failed to allocate request for %s\n", 1817 err = -ENOMEM; !! 628 d, algo); 1818 goto out; 629 goto out; 1819 } 630 } 1820 generic_desc->tfm = generic_tfm; << 1821 631 1822 /* Check the algorithm properties for !! 632 aead_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG, >> 633 crypto_req_done, &wait); 1823 634 1824 if (digestsize != crypto_shash_digest !! 635 iv_len = crypto_aead_ivsize(tfm); 1825 pr_err("alg: hash: digestsize << 1826 driver, digestsize, << 1827 crypto_shash_digestsiz << 1828 err = -EINVAL; << 1829 goto out; << 1830 } << 1831 636 1832 if (blocksize != crypto_shash_blocksi !! 637 for (i = 0, j = 0; i < tcount; i++) { 1833 pr_err("alg: hash: blocksize !! 638 if (template[i].np) 1834 driver, blocksize, cry !! 639 continue; 1835 err = -EINVAL; << 1836 goto out; << 1837 } << 1838 << 1839 /* << 1840 * Now generate test vectors using th << 1841 * the other implementation against t << 1842 */ << 1843 640 1844 vec.key = kmalloc(maxkeysize, GFP_KER !! 641 j++; 1845 vec.plaintext = kmalloc(maxdatasize, << 1846 vec.digest = kmalloc(digestsize, GFP_ << 1847 if (!vec.key || !vec.plaintext || !ve << 1848 err = -ENOMEM; << 1849 goto out; << 1850 } << 1851 642 1852 for (i = 0; i < fuzz_iterations * 8; !! 643 /* some templates have no input data but they will 1853 generate_random_hash_testvec( !! 644 * touch input 1854 !! 645 */ 1855 !! 646 input = xbuf[0]; 1856 generate_random_testvec_confi !! 647 input += align_offset; 1857 !! 648 assoc = axbuf[0]; 1858 649 1859 err = test_hash_vec_cfg(&vec, !! 650 ret = -EINVAL; 1860 req, !! 651 if (WARN_ON(align_offset + template[i].ilen > 1861 if (err) !! 652 PAGE_SIZE || template[i].alen > PAGE_SIZE)) 1862 goto out; 653 goto out; 1863 cond_resched(); << 1864 } << 1865 err = 0; << 1866 out: << 1867 kfree(cfg); << 1868 kfree(vec.key); << 1869 kfree(vec.plaintext); << 1870 kfree(vec.digest); << 1871 crypto_free_shash(generic_tfm); << 1872 kfree_sensitive(generic_desc); << 1873 return err; << 1874 } << 1875 #else /* !CONFIG_CRYPTO_MANAGER_EXTRA_TESTS * << 1876 static int test_hash_vs_generic_impl(const ch << 1877 unsigned << 1878 struct a << 1879 struct s << 1880 struct t << 1881 u8 *hash << 1882 { << 1883 return 0; << 1884 } << 1885 #endif /* !CONFIG_CRYPTO_MANAGER_EXTRA_TESTS << 1886 << 1887 static int alloc_shash(const char *driver, u3 << 1888 struct crypto_shash ** << 1889 struct shash_desc **de << 1890 { << 1891 struct crypto_shash *tfm; << 1892 struct shash_desc *desc; << 1893 << 1894 tfm = crypto_alloc_shash(driver, type << 1895 if (IS_ERR(tfm)) { << 1896 if (PTR_ERR(tfm) == -ENOENT) << 1897 /* << 1898 * This algorithm is << 1899 * API, not the shash << 1900 */ << 1901 return 0; << 1902 } << 1903 pr_err("alg: hash: failed to << 1904 driver, PTR_ERR(tfm)); << 1905 return PTR_ERR(tfm); << 1906 } << 1907 << 1908 desc = kmalloc(sizeof(*desc) + crypto << 1909 if (!desc) { << 1910 crypto_free_shash(tfm); << 1911 return -ENOMEM; << 1912 } << 1913 desc->tfm = tfm; << 1914 << 1915 *tfm_ret = tfm; << 1916 *desc_ret = desc; << 1917 return 0; << 1918 } << 1919 << 1920 static int __alg_test_hash(const struct hash_ << 1921 unsigned int num_v << 1922 u32 type, u32 mask << 1923 const char *generi << 1924 { << 1925 struct crypto_ahash *atfm = NULL; << 1926 struct ahash_request *req = NULL; << 1927 struct crypto_shash *stfm = NULL; << 1928 struct shash_desc *desc = NULL; << 1929 struct test_sglist *tsgl = NULL; << 1930 u8 *hashstate = NULL; << 1931 unsigned int statesize; << 1932 unsigned int i; << 1933 int err; << 1934 << 1935 /* << 1936 * Always test the ahash API. This w << 1937 * algorithm is implemented as ahash << 1938 */ << 1939 << 1940 atfm = crypto_alloc_ahash(driver, typ << 1941 if (IS_ERR(atfm)) { << 1942 if (PTR_ERR(atfm) == -ENOENT) << 1943 return 0; << 1944 pr_err("alg: hash: failed to << 1945 driver, PTR_ERR(atfm)) << 1946 return PTR_ERR(atfm); << 1947 } << 1948 driver = crypto_ahash_driver_name(atf << 1949 << 1950 req = ahash_request_alloc(atfm, GFP_K << 1951 if (!req) { << 1952 pr_err("alg: hash: failed to << 1953 driver); << 1954 err = -ENOMEM; << 1955 goto out; << 1956 } << 1957 654 1958 /* !! 655 memcpy(input, template[i].input, template[i].ilen); 1959 * If available also test the shash A !! 656 memcpy(assoc, template[i].assoc, template[i].alen); 1960 * be missed by testing the ahash API !! 657 if (template[i].iv) 1961 */ !! 658 memcpy(iv, template[i].iv, iv_len); 1962 err = alloc_shash(driver, type, mask, !! 659 else 1963 if (err) !! 660 memset(iv, 0, iv_len); 1964 goto out; << 1965 661 1966 tsgl = kmalloc(sizeof(*tsgl), GFP_KER !! 662 crypto_aead_clear_flags(tfm, ~0); 1967 if (!tsgl || init_test_sglist(tsgl) ! !! 663 if (template[i].wk) 1968 pr_err("alg: hash: failed to !! 664 crypto_aead_set_flags(tfm, CRYPTO_TFM_REQ_WEAK_KEY); 1969 driver); << 1970 kfree(tsgl); << 1971 tsgl = NULL; << 1972 err = -ENOMEM; << 1973 goto out; << 1974 } << 1975 665 1976 statesize = crypto_ahash_statesize(at !! 666 if (template[i].klen > MAX_KEYLEN) { 1977 if (stfm) !! 667 pr_err("alg: aead%s: setkey failed on test %d for %s: key size %d > %d\n", 1978 statesize = max(statesize, cr !! 668 d, j, algo, template[i].klen, 1979 hashstate = kmalloc(statesize + TESTM !! 669 MAX_KEYLEN); 1980 if (!hashstate) { !! 670 ret = -EINVAL; 1981 pr_err("alg: hash: failed to !! 671 goto out; 1982 driver); !! 672 } 1983 err = -ENOMEM; !! 673 memcpy(key, template[i].key, template[i].klen); 1984 goto out; << 1985 } << 1986 674 1987 for (i = 0; i < num_vecs; i++) { !! 675 ret = crypto_aead_setkey(tfm, key, template[i].klen); 1988 if (fips_enabled && vecs[i].f !! 676 if (template[i].fail == !ret) { >> 677 pr_err("alg: aead%s: setkey failed on test %d for %s: flags=%x\n", >> 678 d, j, algo, crypto_aead_get_flags(tfm)); >> 679 goto out; >> 680 } else if (ret) 1989 continue; 681 continue; 1990 682 1991 err = test_hash_vec(&vecs[i], !! 683 authsize = abs(template[i].rlen - template[i].ilen); 1992 if (err) !! 684 ret = crypto_aead_setauthsize(tfm, authsize); >> 685 if (ret) { >> 686 pr_err("alg: aead%s: Failed to set authsize to %u on test %d for %s\n", >> 687 d, authsize, j, algo); 1993 goto out; 688 goto out; 1994 cond_resched(); !! 689 } 1995 } << 1996 err = test_hash_vs_generic_impl(gener << 1997 desc, << 1998 out: << 1999 kfree(hashstate); << 2000 if (tsgl) { << 2001 destroy_test_sglist(tsgl); << 2002 kfree(tsgl); << 2003 } << 2004 kfree(desc); << 2005 crypto_free_shash(stfm); << 2006 ahash_request_free(req); << 2007 crypto_free_ahash(atfm); << 2008 return err; << 2009 } << 2010 690 2011 static int alg_test_hash(const struct alg_tes !! 691 k = !!template[i].alen; 2012 u32 type, u32 mask) !! 692 sg_init_table(sg, k + 1); 2013 { !! 693 sg_set_buf(&sg[0], assoc, template[i].alen); 2014 const struct hash_testvec *template = !! 694 sg_set_buf(&sg[k], input, 2015 unsigned int tcount = desc->suite.has !! 695 template[i].ilen + (enc ? authsize : 0)); 2016 unsigned int nr_unkeyed, nr_keyed; !! 696 output = input; 2017 unsigned int maxkeysize = 0; << 2018 int err; << 2019 697 2020 /* !! 698 if (diff_dst) { 2021 * For OPTIONAL_KEY algorithms, we ha !! 699 sg_init_table(sgout, k + 1); 2022 * first, before setting a key on the !! 700 sg_set_buf(&sgout[0], assoc, template[i].alen); 2023 * require that the unkeyed test vect << 2024 */ << 2025 701 2026 for (nr_unkeyed = 0; nr_unkeyed < tco !! 702 output = xoutbuf[0]; 2027 if (template[nr_unkeyed].ksiz !! 703 output += align_offset; 2028 break; !! 704 sg_set_buf(&sgout[k], output, 2029 } !! 705 template[i].rlen + (enc ? 0 : authsize)); 2030 for (nr_keyed = 0; nr_unkeyed + nr_ke << 2031 if (!template[nr_unkeyed + nr << 2032 pr_err("alg: hash: te << 2033 "unkeyed ones << 2034 return -EINVAL; << 2035 } 706 } 2036 maxkeysize = max_t(unsigned i << 2037 template[n << 2038 } << 2039 << 2040 err = 0; << 2041 if (nr_unkeyed) { << 2042 err = __alg_test_hash(templat << 2043 desc->g << 2044 template += nr_unkeyed; << 2045 } << 2046 707 2047 if (!err && nr_keyed) !! 708 aead_request_set_crypt(req, sg, (diff_dst) ? sgout : sg, 2048 err = __alg_test_hash(templat !! 709 template[i].ilen, iv); 2049 desc->g << 2050 710 2051 return err; !! 711 aead_request_set_ad(req, template[i].alen); 2052 } << 2053 712 2054 static int test_aead_vec_cfg(int enc, const s !! 713 ret = crypto_wait_req(enc ? crypto_aead_encrypt(req) 2055 const char *vec_ !! 714 : crypto_aead_decrypt(req), &wait); 2056 const struct tes << 2057 struct aead_requ << 2058 struct cipher_te << 2059 { << 2060 struct crypto_aead *tfm = crypto_aead << 2061 const unsigned int alignmask = crypto << 2062 const unsigned int ivsize = crypto_ae << 2063 const unsigned int authsize = vec->cl << 2064 const char *driver = crypto_aead_driv << 2065 const u32 req_flags = CRYPTO_TFM_REQ_ << 2066 const char *op = enc ? "encryption" : << 2067 DECLARE_CRYPTO_WAIT(wait); << 2068 u8 _iv[3 * (MAX_ALGAPI_ALIGNMASK + 1) << 2069 u8 *iv = PTR_ALIGN(&_iv[0], 2 * (MAX_ << 2070 cfg->iv_offset + << 2071 (cfg->iv_offset_relative_to_ << 2072 struct kvec input[2]; << 2073 int err; << 2074 << 2075 /* Set the key */ << 2076 if (vec->wk) << 2077 crypto_aead_set_flags(tfm, CR << 2078 else << 2079 crypto_aead_clear_flags(tfm, << 2080 715 2081 err = do_setkey(crypto_aead_setkey, t !! 716 switch (ret) { 2082 cfg, alignmask); !! 717 case 0: 2083 if (err && err != vec->setkey_error) !! 718 if (template[i].novrfy) { 2084 pr_err("alg: aead: %s setkey !! 719 /* verification was supposed to fail */ 2085 driver, vec_name, vec- !! 720 pr_err("alg: aead%s: %s failed on test %d for %s: ret was 0, expected -EBADMSG\n", 2086 crypto_aead_get_flags( !! 721 d, e, j, algo); 2087 return err; !! 722 /* so really, we got a bad message */ 2088 } !! 723 ret = -EBADMSG; 2089 if (!err && vec->setkey_error) { !! 724 goto out; 2090 pr_err("alg: aead: %s setkey !! 725 } 2091 driver, vec_name, vec- !! 726 break; 2092 return -EINVAL; !! 727 case -EBADMSG: 2093 } !! 728 if (template[i].novrfy) >> 729 /* verification failure was expected */ >> 730 continue; >> 731 /* fall through */ >> 732 default: >> 733 pr_err("alg: aead%s: %s failed on test %d for %s: ret=%d\n", >> 734 d, e, j, algo, -ret); >> 735 goto out; >> 736 } 2094 737 2095 /* Set the authentication tag size */ !! 738 q = output; 2096 err = crypto_aead_setauthsize(tfm, au !! 739 if (memcmp(q, template[i].result, template[i].rlen)) { 2097 if (err && err != vec->setauthsize_er !! 740 pr_err("alg: aead%s: Test %d failed on %s for %s\n", 2098 pr_err("alg: aead: %s setauth !! 741 d, j, e, algo); 2099 driver, vec_name, vec- !! 742 hexdump(q, template[i].rlen); 2100 return err; !! 743 ret = -EINVAL; 2101 } !! 744 goto out; 2102 if (!err && vec->setauthsize_error) { !! 745 } 2103 pr_err("alg: aead: %s setauth << 2104 driver, vec_name, vec- << 2105 return -EINVAL; << 2106 } 746 } 2107 747 2108 if (vec->setkey_error || vec->setauth !! 748 for (i = 0, j = 0; i < tcount; i++) { 2109 return 0; !! 749 /* alignment tests are only done with continuous buffers */ >> 750 if (align_offset != 0) >> 751 break; 2110 752 2111 /* The IV must be copied to a buffer, !! 753 if (!template[i].np) 2112 if (WARN_ON(ivsize > MAX_IVLEN)) !! 754 continue; 2113 return -EINVAL; << 2114 if (vec->iv) << 2115 memcpy(iv, vec->iv, ivsize); << 2116 else << 2117 memset(iv, 0, ivsize); << 2118 755 2119 /* Build the src/dst scatterlists */ !! 756 j++; 2120 input[0].iov_base = (void *)vec->asso << 2121 input[0].iov_len = vec->alen; << 2122 input[1].iov_base = enc ? (void *)vec << 2123 input[1].iov_len = enc ? vec->plen : << 2124 err = build_cipher_test_sglists(tsgls << 2125 vec-> << 2126 << 2127 vec-> << 2128 << 2129 input << 2130 if (err) { << 2131 pr_err("alg: aead: %s %s: err << 2132 driver, op, vec_name, << 2133 return err; << 2134 } << 2135 757 2136 /* Do the actual encryption or decryp !! 758 if (template[i].iv) 2137 testmgr_poison(req->__ctx, crypto_aea !! 759 memcpy(iv, template[i].iv, iv_len); 2138 aead_request_set_callback(req, req_fl << 2139 aead_request_set_crypt(req, tsgls->sr << 2140 enc ? vec->ple << 2141 aead_request_set_ad(req, vec->alen); << 2142 if (cfg->nosimd) << 2143 crypto_disable_simd_for_test( << 2144 err = enc ? crypto_aead_encrypt(req) << 2145 if (cfg->nosimd) << 2146 crypto_reenable_simd_for_test << 2147 err = crypto_wait_req(err, &wait); << 2148 << 2149 /* Check that the algorithm didn't ov << 2150 if (req->cryptlen != (enc ? vec->plen << 2151 req->assoclen != vec->alen || << 2152 req->iv != iv || << 2153 req->src != tsgls->src.sgl_ptr || << 2154 req->dst != tsgls->dst.sgl_ptr || << 2155 crypto_aead_reqtfm(req) != tfm || << 2156 req->base.complete != crypto_req_ << 2157 req->base.flags != req_flags || << 2158 req->base.data != &wait) { << 2159 pr_err("alg: aead: %s %s corr << 2160 driver, op, vec_name, << 2161 if (req->cryptlen != (enc ? v << 2162 pr_err("alg: aead: ch << 2163 if (req->assoclen != vec->ale << 2164 pr_err("alg: aead: ch << 2165 if (req->iv != iv) << 2166 pr_err("alg: aead: ch << 2167 if (req->src != tsgls->src.sg << 2168 pr_err("alg: aead: ch << 2169 if (req->dst != tsgls->dst.sg << 2170 pr_err("alg: aead: ch << 2171 if (crypto_aead_reqtfm(req) ! << 2172 pr_err("alg: aead: ch << 2173 if (req->base.complete != cry << 2174 pr_err("alg: aead: ch << 2175 if (req->base.flags != req_fl << 2176 pr_err("alg: aead: ch << 2177 if (req->base.data != &wait) << 2178 pr_err("alg: aead: ch << 2179 return -EINVAL; << 2180 } << 2181 if (is_test_sglist_corrupted(&tsgls-> << 2182 pr_err("alg: aead: %s %s corr << 2183 driver, op, vec_name, << 2184 return -EINVAL; << 2185 } << 2186 if (tsgls->dst.sgl_ptr != tsgls->src. << 2187 is_test_sglist_corrupted(&tsgls-> << 2188 pr_err("alg: aead: %s %s corr << 2189 driver, op, vec_name, << 2190 return -EINVAL; << 2191 } << 2192 << 2193 /* Check for unexpected success or fa << 2194 if ((err == 0 && vec->novrfy) || << 2195 (err != vec->crypt_error && !(err << 2196 char expected_error[32]; << 2197 << 2198 if (vec->novrfy && << 2199 vec->crypt_error != 0 && << 2200 sprintf(expected_erro << 2201 vec->crypt_er << 2202 else if (vec->novrfy) << 2203 sprintf(expected_erro << 2204 else 760 else 2205 sprintf(expected_erro !! 761 memset(iv, 0, MAX_IVLEN); 2206 if (err) { << 2207 pr_err("alg: aead: %s << 2208 driver, op, ve << 2209 cfg->name); << 2210 return err; << 2211 } << 2212 pr_err("alg: aead: %s %s unex << 2213 driver, op, vec_name, << 2214 return -EINVAL; << 2215 } << 2216 if (err) /* Expectedly failed. */ << 2217 return 0; << 2218 << 2219 /* Check for the correct output (ciph << 2220 err = verify_correct_output(&tsgls->d << 2221 enc ? vec << 2222 vec->alen << 2223 enc || cf << 2224 if (err == -EOVERFLOW) { << 2225 pr_err("alg: aead: %s %s over << 2226 driver, op, vec_name, << 2227 return err; << 2228 } << 2229 if (err) { << 2230 pr_err("alg: aead: %s %s test << 2231 driver, op, vec_name, << 2232 return err; << 2233 } << 2234 << 2235 return 0; << 2236 } << 2237 << 2238 static int test_aead_vec(int enc, const struc << 2239 unsigned int vec_num << 2240 struct cipher_test_s << 2241 { << 2242 char vec_name[16]; << 2243 unsigned int i; << 2244 int err; << 2245 << 2246 if (enc && vec->novrfy) << 2247 return 0; << 2248 << 2249 sprintf(vec_name, "%u", vec_num); << 2250 << 2251 for (i = 0; i < ARRAY_SIZE(default_ci << 2252 err = test_aead_vec_cfg(enc, << 2253 &defa << 2254 req, << 2255 if (err) << 2256 return err; << 2257 } << 2258 << 2259 #ifdef CONFIG_CRYPTO_MANAGER_EXTRA_TESTS << 2260 if (!noextratests) { << 2261 struct rnd_state rng; << 2262 struct testvec_config cfg; << 2263 char cfgname[TESTVEC_CONFIG_N << 2264 762 2265 init_rnd_state(&rng); !! 763 crypto_aead_clear_flags(tfm, ~0); 2266 !! 764 if (template[i].wk) 2267 for (i = 0; i < fuzz_iteratio !! 765 crypto_aead_set_flags(tfm, CRYPTO_TFM_REQ_WEAK_KEY); 2268 generate_random_testv !! 766 if (template[i].klen > MAX_KEYLEN) { 2269 !! 767 pr_err("alg: aead%s: setkey failed on test %d for %s: key size %d > %d\n", 2270 err = test_aead_vec_c !! 768 d, j, algo, template[i].klen, MAX_KEYLEN); 2271 !! 769 ret = -EINVAL; 2272 if (err) !! 770 goto out; 2273 return err; << 2274 cond_resched(); << 2275 } 771 } 2276 } !! 772 memcpy(key, template[i].key, template[i].klen); 2277 #endif << 2278 return 0; << 2279 } << 2280 << 2281 #ifdef CONFIG_CRYPTO_MANAGER_EXTRA_TESTS << 2282 << 2283 struct aead_extra_tests_ctx { << 2284 struct rnd_state rng; << 2285 struct aead_request *req; << 2286 struct crypto_aead *tfm; << 2287 const struct alg_test_desc *test_desc << 2288 struct cipher_test_sglists *tsgls; << 2289 unsigned int maxdatasize; << 2290 unsigned int maxkeysize; << 2291 << 2292 struct aead_testvec vec; << 2293 char vec_name[64]; << 2294 char cfgname[TESTVEC_CONFIG_NAMELEN]; << 2295 struct testvec_config cfg; << 2296 }; << 2297 773 2298 /* !! 774 ret = crypto_aead_setkey(tfm, key, template[i].klen); 2299 * Make at least one random change to a (ciph !! 775 if (template[i].fail == !ret) { 2300 * here means the full ciphertext including t !! 776 pr_err("alg: aead%s: setkey failed on chunk test %d for %s: flags=%x\n", 2301 * authentication tag (and hence also the cip !! 777 d, j, algo, crypto_aead_get_flags(tfm)); 2302 */ !! 778 goto out; 2303 static void mutate_aead_message(struct rnd_st !! 779 } else if (ret) 2304 struct aead_t !! 780 continue; 2305 unsigned int << 2306 { << 2307 const unsigned int aad_tail_size = aa << 2308 const unsigned int authsize = vec->cl << 2309 781 2310 if (prandom_bool(rng) && vec->alen > !! 782 authsize = abs(template[i].rlen - template[i].ilen); 2311 /* Mutate the AAD */ << 2312 flip_random_bit(rng, (u8 *)ve << 2313 vec->alen - a << 2314 if (prandom_bool(rng)) << 2315 return; << 2316 } << 2317 if (prandom_bool(rng)) { << 2318 /* Mutate auth tag (assuming << 2319 flip_random_bit(rng, (u8 *)ve << 2320 } else { << 2321 /* Mutate any part of the cip << 2322 flip_random_bit(rng, (u8 *)ve << 2323 } << 2324 } << 2325 783 2326 /* !! 784 ret = -EINVAL; 2327 * Minimum authentication tag size in bytes a !! 785 sg_init_table(sg, template[i].anp + template[i].np); 2328 * reliably generate inauthentic messages, i. !! 786 if (diff_dst) 2329 * message by chance. !! 787 sg_init_table(sgout, template[i].anp + template[i].np); 2330 */ << 2331 #define MIN_COLLISION_FREE_AUTHSIZE 8 << 2332 788 2333 static void generate_aead_message(struct rnd_ !! 789 ret = -EINVAL; 2334 struct aead !! 790 for (k = 0, temp = 0; k < template[i].anp; k++) { 2335 const struc !! 791 if (WARN_ON(offset_in_page(IDX[k]) + 2336 struct aead !! 792 template[i].atap[k] > PAGE_SIZE)) 2337 bool prefer !! 793 goto out; 2338 { !! 794 sg_set_buf(&sg[k], 2339 struct crypto_aead *tfm = crypto_aead !! 795 memcpy(axbuf[IDX[k] >> PAGE_SHIFT] + 2340 const unsigned int ivsize = crypto_ae !! 796 offset_in_page(IDX[k]), 2341 const unsigned int authsize = vec->cl !! 797 template[i].assoc + temp, 2342 const bool inauthentic = (authsize >= !! 798 template[i].atap[k]), 2343 (prefer_inau !! 799 template[i].atap[k]); 2344 prandom_u32 !! 800 if (diff_dst) 2345 !! 801 sg_set_buf(&sgout[k], 2346 /* Generate the AAD. */ !! 802 axbuf[IDX[k] >> PAGE_SHIFT] + 2347 generate_random_bytes(rng, (u8 *)vec- !! 803 offset_in_page(IDX[k]), 2348 if (suite->aad_iv && vec->alen >= ivs !! 804 template[i].atap[k]); 2349 /* Avoid implementation-defin !! 805 temp += template[i].atap[k]; 2350 memcpy((u8 *)vec->assoc + vec !! 806 } 2351 !! 807 2352 if (inauthentic && prandom_bool(rng)) !! 808 for (k = 0, temp = 0; k < template[i].np; k++) { 2353 /* Generate a random cipherte !! 809 if (WARN_ON(offset_in_page(IDX[k]) + 2354 generate_random_bytes(rng, (u !! 810 template[i].tap[k] > PAGE_SIZE)) 2355 } else { !! 811 goto out; 2356 int i = 0; << 2357 struct scatterlist src[2], ds << 2358 u8 iv[MAX_IVLEN]; << 2359 DECLARE_CRYPTO_WAIT(wait); << 2360 << 2361 /* Generate a random plaintex << 2362 sg_init_table(src, 2); << 2363 if (vec->alen) << 2364 sg_set_buf(&src[i++], << 2365 if (vec->plen) { << 2366 generate_random_bytes << 2367 sg_set_buf(&src[i++], << 2368 } << 2369 sg_init_one(&dst, vec->ctext, << 2370 memcpy(iv, vec->iv, ivsize); << 2371 aead_request_set_callback(req << 2372 aead_request_set_crypt(req, s << 2373 aead_request_set_ad(req, vec- << 2374 vec->crypt_error = crypto_wai << 2375 << 2376 /* If encryption failed, we'r << 2377 if (vec->crypt_error != 0) << 2378 return; << 2379 memmove((u8 *)vec->ctext, vec << 2380 if (!inauthentic) << 2381 return; << 2382 /* << 2383 * Mutate the authentic (ciph << 2384 * inauthentic one. << 2385 */ << 2386 mutate_aead_message(rng, vec, << 2387 } << 2388 vec->novrfy = 1; << 2389 if (suite->einval_allowed) << 2390 vec->crypt_error = -EINVAL; << 2391 } << 2392 812 2393 /* !! 813 q = xbuf[IDX[k] >> PAGE_SHIFT] + offset_in_page(IDX[k]); 2394 * Generate an AEAD test vector 'vec' using t !! 814 memcpy(q, template[i].input + temp, template[i].tap[k]); 2395 * 'req'. The buffers in 'vec' must already !! 815 sg_set_buf(&sg[template[i].anp + k], 2396 * !! 816 q, template[i].tap[k]); 2397 * If 'prefer_inauthentic' is true, then this !! 817 2398 * test vectors (i.e. vectors with 'vec->novr !! 818 if (diff_dst) { 2399 */ !! 819 q = xoutbuf[IDX[k] >> PAGE_SHIFT] + 2400 static void generate_random_aead_testvec(stru !! 820 offset_in_page(IDX[k]); 2401 stru << 2402 stru << 2403 cons << 2404 unsi << 2405 unsi << 2406 char << 2407 bool << 2408 { << 2409 struct crypto_aead *tfm = crypto_aead << 2410 const unsigned int ivsize = crypto_ae << 2411 const unsigned int maxauthsize = cryp << 2412 unsigned int authsize; << 2413 unsigned int total_len; << 2414 << 2415 /* Key: length in [0, maxkeysize], bu << 2416 vec->klen = maxkeysize; << 2417 if (prandom_u32_below(rng, 4) == 0) << 2418 vec->klen = prandom_u32_below << 2419 generate_random_bytes(rng, (u8 *)vec- << 2420 vec->setkey_error = crypto_aead_setke << 2421 << 2422 /* IV */ << 2423 generate_random_bytes(rng, (u8 *)vec- << 2424 << 2425 /* Tag length: in [0, maxauthsize], b << 2426 authsize = maxauthsize; << 2427 if (prandom_u32_below(rng, 4) == 0) << 2428 authsize = prandom_u32_below( << 2429 if (prefer_inauthentic && authsize < << 2430 authsize = MIN_COLLISION_FREE << 2431 if (WARN_ON(authsize > maxdatasize)) << 2432 authsize = maxdatasize; << 2433 maxdatasize -= authsize; << 2434 vec->setauthsize_error = crypto_aead_ << 2435 << 2436 /* AAD, plaintext, and ciphertext len << 2437 total_len = generate_random_length(rn << 2438 if (prandom_u32_below(rng, 4) == 0) << 2439 vec->alen = 0; << 2440 else << 2441 vec->alen = generate_random_l << 2442 vec->plen = total_len - vec->alen; << 2443 vec->clen = vec->plen + authsize; << 2444 821 2445 /* !! 822 memset(q, 0, template[i].tap[k]); 2446 * Generate the AAD, plaintext, and c << 2447 * key or the authentication tag size << 2448 */ << 2449 vec->novrfy = 0; << 2450 vec->crypt_error = 0; << 2451 if (vec->setkey_error == 0 && vec->se << 2452 generate_aead_message(rng, re << 2453 snprintf(name, max_namelen, << 2454 "\"random: alen=%u plen=%u a << 2455 vec->alen, vec->plen, authsi << 2456 } << 2457 823 2458 static void try_to_generate_inauthentic_testv !! 824 sg_set_buf(&sgout[template[i].anp + k], 2459 struc !! 825 q, template[i].tap[k]); 2460 { !! 826 } 2461 int i; << 2462 << 2463 for (i = 0; i < 10; i++) { << 2464 generate_random_aead_testvec( << 2465 << 2466 << 2467 << 2468 << 2469 if (ctx->vec.novrfy) << 2470 return; << 2471 } << 2472 } << 2473 827 2474 /* !! 828 n = template[i].tap[k]; 2475 * Generate inauthentic test vectors (i.e. ci !! 829 if (k == template[i].np - 1 && enc) 2476 * result of an encryption with the key) and !! 830 n += authsize; 2477 */ !! 831 if (offset_in_page(q) + n < PAGE_SIZE) 2478 static int test_aead_inauthentic_inputs(struc !! 832 q[n] = 0; 2479 { << 2480 unsigned int i; << 2481 int err; << 2482 833 2483 for (i = 0; i < fuzz_iterations * 8; !! 834 temp += template[i].tap[k]; 2484 /* << 2485 * Since this part of the tes << 2486 * implementation to another, << 2487 * test vectors other than in << 2488 * << 2489 * If we're having trouble ge << 2490 * if the algorithm keeps rej << 2491 * retry forever; just contin << 2492 */ << 2493 try_to_generate_inauthentic_t << 2494 if (ctx->vec.novrfy) { << 2495 generate_random_testv << 2496 << 2497 << 2498 err = test_aead_vec_c << 2499 << 2500 << 2501 if (err) << 2502 return err; << 2503 } 835 } 2504 cond_resched(); << 2505 } << 2506 return 0; << 2507 } << 2508 836 2509 /* !! 837 ret = crypto_aead_setauthsize(tfm, authsize); 2510 * Test the AEAD algorithm against the corres !! 838 if (ret) { 2511 * one is available. !! 839 pr_err("alg: aead%s: Failed to set authsize to %u on chunk test %d for %s\n", 2512 */ !! 840 d, authsize, j, algo); 2513 static int test_aead_vs_generic_impl(struct a !! 841 goto out; 2514 { << 2515 struct crypto_aead *tfm = ctx->tfm; << 2516 const char *algname = crypto_aead_alg << 2517 const char *driver = crypto_aead_driv << 2518 const char *generic_driver = ctx->tes << 2519 char _generic_driver[CRYPTO_MAX_ALG_N << 2520 struct crypto_aead *generic_tfm = NUL << 2521 struct aead_request *generic_req = NU << 2522 unsigned int i; << 2523 int err; << 2524 << 2525 if (!generic_driver) { /* Use default << 2526 err = build_generic_driver_na << 2527 if (err) << 2528 return err; << 2529 generic_driver = _generic_dri << 2530 } << 2531 << 2532 if (strcmp(generic_driver, driver) == << 2533 return 0; << 2534 << 2535 generic_tfm = crypto_alloc_aead(gener << 2536 if (IS_ERR(generic_tfm)) { << 2537 err = PTR_ERR(generic_tfm); << 2538 if (err == -ENOENT) { << 2539 pr_warn("alg: aead: s << 2540 driver, gener << 2541 return 0; << 2542 } 842 } 2543 pr_err("alg: aead: error allo << 2544 generic_driver, algnam << 2545 return err; << 2546 } << 2547 843 2548 generic_req = aead_request_alloc(gene !! 844 if (enc) { 2549 if (!generic_req) { !! 845 if (WARN_ON(sg[template[i].anp + k - 1].offset + 2550 err = -ENOMEM; !! 846 sg[template[i].anp + k - 1].length + 2551 goto out; !! 847 authsize > PAGE_SIZE)) { 2552 } !! 848 ret = -EINVAL; >> 849 goto out; >> 850 } 2553 851 2554 /* Check the algorithm properties for !! 852 if (diff_dst) >> 853 sgout[template[i].anp + k - 1].length += >> 854 authsize; >> 855 sg[template[i].anp + k - 1].length += authsize; >> 856 } 2555 857 2556 if (crypto_aead_maxauthsize(tfm) != !! 858 aead_request_set_crypt(req, sg, (diff_dst) ? sgout : sg, 2557 crypto_aead_maxauthsize(generic_t !! 859 template[i].ilen, 2558 pr_err("alg: aead: maxauthsiz !! 860 iv); 2559 driver, crypto_aead_ma << 2560 crypto_aead_maxauthsiz << 2561 err = -EINVAL; << 2562 goto out; << 2563 } << 2564 861 2565 if (crypto_aead_ivsize(tfm) != crypto !! 862 aead_request_set_ad(req, template[i].alen); 2566 pr_err("alg: aead: ivsize for << 2567 driver, crypto_aead_iv << 2568 crypto_aead_ivsize(gen << 2569 err = -EINVAL; << 2570 goto out; << 2571 } << 2572 863 2573 if (crypto_aead_blocksize(tfm) != cry !! 864 ret = crypto_wait_req(enc ? crypto_aead_encrypt(req) 2574 pr_err("alg: aead: blocksize !! 865 : crypto_aead_decrypt(req), &wait); 2575 driver, crypto_aead_bl << 2576 crypto_aead_blocksize( << 2577 err = -EINVAL; << 2578 goto out; << 2579 } << 2580 866 2581 /* !! 867 switch (ret) { 2582 * Now generate test vectors using th !! 868 case 0: 2583 * the other implementation against t !! 869 if (template[i].novrfy) { 2584 */ !! 870 /* verification was supposed to fail */ 2585 for (i = 0; i < fuzz_iterations * 8; !! 871 pr_err("alg: aead%s: %s failed on chunk test %d for %s: ret was 0, expected -EBADMSG\n", 2586 generate_random_aead_testvec( !! 872 d, e, j, algo); 2587 !! 873 /* so really, we got a bad message */ 2588 !! 874 ret = -EBADMSG; 2589 << 2590 << 2591 generate_random_testvec_confi << 2592 << 2593 << 2594 if (!ctx->vec.novrfy) { << 2595 err = test_aead_vec_c << 2596 << 2597 << 2598 if (err) << 2599 goto out; << 2600 } << 2601 if (ctx->vec.crypt_error == 0 << 2602 err = test_aead_vec_c << 2603 << 2604 << 2605 if (err) << 2606 goto out; 875 goto out; >> 876 } >> 877 break; >> 878 case -EBADMSG: >> 879 if (template[i].novrfy) >> 880 /* verification failure was expected */ >> 881 continue; >> 882 /* fall through */ >> 883 default: >> 884 pr_err("alg: aead%s: %s failed on chunk test %d for %s: ret=%d\n", >> 885 d, e, j, algo, -ret); >> 886 goto out; 2607 } 887 } 2608 cond_resched(); << 2609 } << 2610 err = 0; << 2611 out: << 2612 crypto_free_aead(generic_tfm); << 2613 aead_request_free(generic_req); << 2614 return err; << 2615 } << 2616 888 2617 static int test_aead_extra(const struct alg_t !! 889 ret = -EINVAL; 2618 struct aead_reques !! 890 for (k = 0, temp = 0; k < template[i].np; k++) { 2619 struct cipher_test !! 891 if (diff_dst) 2620 { !! 892 q = xoutbuf[IDX[k] >> PAGE_SHIFT] + 2621 struct aead_extra_tests_ctx *ctx; !! 893 offset_in_page(IDX[k]); 2622 unsigned int i; !! 894 else 2623 int err; !! 895 q = xbuf[IDX[k] >> PAGE_SHIFT] + >> 896 offset_in_page(IDX[k]); 2624 897 2625 if (noextratests) !! 898 n = template[i].tap[k]; 2626 return 0; !! 899 if (k == template[i].np - 1) >> 900 n += enc ? authsize : -authsize; >> 901 >> 902 if (memcmp(q, template[i].result + temp, n)) { >> 903 pr_err("alg: aead%s: Chunk test %d failed on %s at page %u for %s\n", >> 904 d, j, e, k, algo); >> 905 hexdump(q, n); >> 906 goto out; >> 907 } 2627 908 2628 ctx = kzalloc(sizeof(*ctx), GFP_KERNE !! 909 q += n; 2629 if (!ctx) !! 910 if (k == template[i].np - 1 && !enc) { 2630 return -ENOMEM; !! 911 if (!diff_dst && 2631 init_rnd_state(&ctx->rng); !! 912 memcmp(q, template[i].input + 2632 ctx->req = req; !! 913 temp + n, authsize)) 2633 ctx->tfm = crypto_aead_reqtfm(req); !! 914 n = authsize; 2634 ctx->test_desc = test_desc; !! 915 else 2635 ctx->tsgls = tsgls; !! 916 n = 0; 2636 ctx->maxdatasize = (2 * PAGE_SIZE) - !! 917 } else { 2637 ctx->maxkeysize = 0; !! 918 for (n = 0; offset_in_page(q + n) && q[n]; n++) 2638 for (i = 0; i < test_desc->suite.aead !! 919 ; 2639 ctx->maxkeysize = max_t(unsig !! 920 } 2640 test_ !! 921 if (n) { 2641 !! 922 pr_err("alg: aead%s: Result buffer corruption in chunk test %d on %s at page %u for %s: %u bytes:\n", 2642 ctx->vec.key = kmalloc(ctx->maxkeysiz !! 923 d, j, e, k, algo, n); 2643 ctx->vec.iv = kmalloc(crypto_aead_ivs !! 924 hexdump(q, n); 2644 ctx->vec.assoc = kmalloc(ctx->maxdata !! 925 goto out; 2645 ctx->vec.ptext = kmalloc(ctx->maxdata !! 926 } 2646 ctx->vec.ctext = kmalloc(ctx->maxdata !! 927 2647 if (!ctx->vec.key || !ctx->vec.iv || !! 928 temp += template[i].tap[k]; 2648 !ctx->vec.ptext || !ctx->vec.ctex !! 929 } 2649 err = -ENOMEM; << 2650 goto out; << 2651 } 930 } 2652 931 2653 err = test_aead_vs_generic_impl(ctx); !! 932 ret = 0; 2654 if (err) << 2655 goto out; << 2656 933 2657 err = test_aead_inauthentic_inputs(ct << 2658 out: 934 out: 2659 kfree(ctx->vec.key); !! 935 aead_request_free(req); 2660 kfree(ctx->vec.iv); !! 936 kfree(sg); 2661 kfree(ctx->vec.assoc); !! 937 out_nosg: 2662 kfree(ctx->vec.ptext); !! 938 if (diff_dst) 2663 kfree(ctx->vec.ctext); !! 939 testmgr_free_buf(xoutbuf); 2664 kfree(ctx); !! 940 out_nooutbuf: 2665 return err; !! 941 testmgr_free_buf(axbuf); 2666 } !! 942 out_noaxbuf: 2667 #else /* !CONFIG_CRYPTO_MANAGER_EXTRA_TESTS * !! 943 testmgr_free_buf(xbuf); 2668 static int test_aead_extra(const struct alg_t !! 944 out_noxbuf: 2669 struct aead_reques !! 945 kfree(key); 2670 struct cipher_test !! 946 kfree(iv); 2671 { !! 947 return ret; 2672 return 0; << 2673 } << 2674 #endif /* !CONFIG_CRYPTO_MANAGER_EXTRA_TESTS << 2675 << 2676 static int test_aead(int enc, const struct ae << 2677 struct aead_request *req << 2678 struct cipher_test_sglis << 2679 { << 2680 unsigned int i; << 2681 int err; << 2682 << 2683 for (i = 0; i < suite->count; i++) { << 2684 err = test_aead_vec(enc, &sui << 2685 if (err) << 2686 return err; << 2687 cond_resched(); << 2688 } << 2689 return 0; << 2690 } 948 } 2691 949 2692 static int alg_test_aead(const struct alg_tes !! 950 static int test_aead(struct crypto_aead *tfm, int enc, 2693 u32 type, u32 mask) !! 951 const struct aead_testvec *template, unsigned int tcount) 2694 { 952 { 2695 const struct aead_test_suite *suite = !! 953 unsigned int alignmask; 2696 struct crypto_aead *tfm; !! 954 int ret; 2697 struct aead_request *req = NULL; << 2698 struct cipher_test_sglists *tsgls = N << 2699 int err; << 2700 955 2701 if (suite->count <= 0) { !! 956 /* test 'dst == src' case */ 2702 pr_err("alg: aead: empty test !! 957 ret = __test_aead(tfm, enc, template, tcount, false, 0); 2703 return -EINVAL; !! 958 if (ret) 2704 } !! 959 return ret; 2705 960 2706 tfm = crypto_alloc_aead(driver, type, !! 961 /* test 'dst != src' case */ 2707 if (IS_ERR(tfm)) { !! 962 ret = __test_aead(tfm, enc, template, tcount, true, 0); 2708 if (PTR_ERR(tfm) == -ENOENT) !! 963 if (ret) 2709 return 0; !! 964 return ret; 2710 pr_err("alg: aead: failed to << 2711 driver, PTR_ERR(tfm)); << 2712 return PTR_ERR(tfm); << 2713 } << 2714 driver = crypto_aead_driver_name(tfm) << 2715 965 2716 req = aead_request_alloc(tfm, GFP_KER !! 966 /* test unaligned buffers, check with one byte offset */ 2717 if (!req) { !! 967 ret = __test_aead(tfm, enc, template, tcount, true, 1); 2718 pr_err("alg: aead: failed to !! 968 if (ret) 2719 driver); !! 969 return ret; 2720 err = -ENOMEM; << 2721 goto out; << 2722 } << 2723 970 2724 tsgls = alloc_cipher_test_sglists(); !! 971 alignmask = crypto_tfm_alg_alignmask(&tfm->base); 2725 if (!tsgls) { !! 972 if (alignmask) { 2726 pr_err("alg: aead: failed to !! 973 /* Check if alignment mask for tfm is correctly set. */ 2727 driver); !! 974 ret = __test_aead(tfm, enc, template, tcount, true, 2728 err = -ENOMEM; !! 975 alignmask + 1); 2729 goto out; !! 976 if (ret) >> 977 return ret; 2730 } 978 } 2731 979 2732 err = test_aead(ENCRYPT, suite, req, !! 980 return 0; 2733 if (err) << 2734 goto out; << 2735 << 2736 err = test_aead(DECRYPT, suite, req, << 2737 if (err) << 2738 goto out; << 2739 << 2740 err = test_aead_extra(desc, req, tsgl << 2741 out: << 2742 free_cipher_test_sglists(tsgls); << 2743 aead_request_free(req); << 2744 crypto_free_aead(tfm); << 2745 return err; << 2746 } 981 } 2747 982 2748 static int test_cipher(struct crypto_cipher * 983 static int test_cipher(struct crypto_cipher *tfm, int enc, 2749 const struct cipher_te 984 const struct cipher_testvec *template, 2750 unsigned int tcount) 985 unsigned int tcount) 2751 { 986 { 2752 const char *algo = crypto_tfm_alg_dri 987 const char *algo = crypto_tfm_alg_driver_name(crypto_cipher_tfm(tfm)); 2753 unsigned int i, j, k; 988 unsigned int i, j, k; 2754 char *q; 989 char *q; 2755 const char *e; 990 const char *e; 2756 const char *input, *result; << 2757 void *data; 991 void *data; 2758 char *xbuf[XBUFSIZE]; 992 char *xbuf[XBUFSIZE]; 2759 int ret = -ENOMEM; 993 int ret = -ENOMEM; 2760 994 2761 if (testmgr_alloc_buf(xbuf)) 995 if (testmgr_alloc_buf(xbuf)) 2762 goto out_nobuf; 996 goto out_nobuf; 2763 997 2764 if (enc == ENCRYPT) 998 if (enc == ENCRYPT) 2765 e = "encryption"; 999 e = "encryption"; 2766 else 1000 else 2767 e = "decryption"; 1001 e = "decryption"; 2768 1002 2769 j = 0; 1003 j = 0; 2770 for (i = 0; i < tcount; i++) { 1004 for (i = 0; i < tcount; i++) { >> 1005 if (template[i].np) >> 1006 continue; 2771 1007 2772 if (fips_enabled && template[ 1008 if (fips_enabled && template[i].fips_skip) 2773 continue; 1009 continue; 2774 1010 2775 input = enc ? template[i].pt << 2776 result = enc ? template[i].ct << 2777 j++; 1011 j++; 2778 1012 2779 ret = -EINVAL; 1013 ret = -EINVAL; 2780 if (WARN_ON(template[i].len > !! 1014 if (WARN_ON(template[i].ilen > PAGE_SIZE)) 2781 goto out; 1015 goto out; 2782 1016 2783 data = xbuf[0]; 1017 data = xbuf[0]; 2784 memcpy(data, input, template[ !! 1018 memcpy(data, template[i].input, template[i].ilen); 2785 1019 2786 crypto_cipher_clear_flags(tfm 1020 crypto_cipher_clear_flags(tfm, ~0); 2787 if (template[i].wk) 1021 if (template[i].wk) 2788 crypto_cipher_set_fla !! 1022 crypto_cipher_set_flags(tfm, CRYPTO_TFM_REQ_WEAK_KEY); 2789 1023 2790 ret = crypto_cipher_setkey(tf 1024 ret = crypto_cipher_setkey(tfm, template[i].key, 2791 te 1025 template[i].klen); 2792 if (ret) { !! 1026 if (template[i].fail == !ret) { 2793 if (ret == template[i !! 1027 printk(KERN_ERR "alg: cipher: setkey failed " 2794 continue; !! 1028 "on test %d for %s: flags=%x\n", j, 2795 pr_err("alg: cipher: !! 1029 algo, crypto_cipher_get_flags(tfm)); 2796 algo, j, templ << 2797 crypto_cipher_ << 2798 goto out; 1030 goto out; 2799 } !! 1031 } else if (ret) 2800 if (template[i].setkey_error) !! 1032 continue; 2801 pr_err("alg: cipher: << 2802 algo, j, templ << 2803 ret = -EINVAL; << 2804 goto out; << 2805 } << 2806 1033 2807 for (k = 0; k < template[i].l !! 1034 for (k = 0; k < template[i].ilen; 2808 k += crypto_cipher_block 1035 k += crypto_cipher_blocksize(tfm)) { 2809 if (enc) 1036 if (enc) 2810 crypto_cipher 1037 crypto_cipher_encrypt_one(tfm, data + k, 2811 1038 data + k); 2812 else 1039 else 2813 crypto_cipher 1040 crypto_cipher_decrypt_one(tfm, data + k, 2814 1041 data + k); 2815 } 1042 } 2816 1043 2817 q = data; 1044 q = data; 2818 if (memcmp(q, result, templat !! 1045 if (memcmp(q, template[i].result, template[i].rlen)) { 2819 printk(KERN_ERR "alg: 1046 printk(KERN_ERR "alg: cipher: Test %d failed " 2820 "on %s for %s\ 1047 "on %s for %s\n", j, e, algo); 2821 hexdump(q, template[i !! 1048 hexdump(q, template[i].rlen); 2822 ret = -EINVAL; 1049 ret = -EINVAL; 2823 goto out; 1050 goto out; 2824 } 1051 } 2825 } 1052 } 2826 1053 2827 ret = 0; 1054 ret = 0; 2828 1055 2829 out: 1056 out: 2830 testmgr_free_buf(xbuf); 1057 testmgr_free_buf(xbuf); 2831 out_nobuf: 1058 out_nobuf: 2832 return ret; 1059 return ret; 2833 } 1060 } 2834 1061 2835 static int test_skcipher_vec_cfg(int enc, con !! 1062 static int __test_skcipher(struct crypto_skcipher *tfm, int enc, 2836 const char * !! 1063 const struct cipher_testvec *template, 2837 const struct !! 1064 unsigned int tcount, 2838 struct skcip !! 1065 const bool diff_dst, const int align_offset) 2839 struct ciphe << 2840 { 1066 { 2841 struct crypto_skcipher *tfm = crypto_ !! 1067 const char *algo = 2842 const unsigned int alignmask = crypto !! 1068 crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm)); 2843 const unsigned int ivsize = crypto_sk !! 1069 unsigned int i, j, k, n, temp; 2844 const char *driver = crypto_skcipher_ !! 1070 char *q; 2845 const u32 req_flags = CRYPTO_TFM_REQ_ !! 1071 struct skcipher_request *req; 2846 const char *op = enc ? "encryption" : !! 1072 struct scatterlist sg[8]; 2847 DECLARE_CRYPTO_WAIT(wait); !! 1073 struct scatterlist sgout[8]; 2848 u8 _iv[3 * (MAX_ALGAPI_ALIGNMASK + 1) !! 1074 const char *e, *d; 2849 u8 *iv = PTR_ALIGN(&_iv[0], 2 * (MAX_ !! 1075 struct crypto_wait wait; 2850 cfg->iv_offset + !! 1076 void *data; 2851 (cfg->iv_offset_relative_to_ !! 1077 char iv[MAX_IVLEN]; 2852 struct kvec input; !! 1078 char *xbuf[XBUFSIZE]; 2853 int err; !! 1079 char *xoutbuf[XBUFSIZE]; >> 1080 int ret = -ENOMEM; >> 1081 unsigned int ivsize = crypto_skcipher_ivsize(tfm); 2854 1082 2855 /* Set the key */ !! 1083 if (testmgr_alloc_buf(xbuf)) 2856 if (vec->wk) !! 1084 goto out_nobuf; 2857 crypto_skcipher_set_flags(tfm !! 1085 >> 1086 if (diff_dst && testmgr_alloc_buf(xoutbuf)) >> 1087 goto out_nooutbuf; >> 1088 >> 1089 if (diff_dst) >> 1090 d = "-ddst"; 2858 else 1091 else 2859 crypto_skcipher_clear_flags(t !! 1092 d = ""; 2860 C << 2861 err = do_setkey(crypto_skcipher_setke << 2862 cfg, alignmask); << 2863 if (err) { << 2864 if (err == vec->setkey_error) << 2865 return 0; << 2866 pr_err("alg: skcipher: %s set << 2867 driver, vec_name, vec- << 2868 crypto_skcipher_get_fl << 2869 return err; << 2870 } << 2871 if (vec->setkey_error) { << 2872 pr_err("alg: skcipher: %s set << 2873 driver, vec_name, vec- << 2874 return -EINVAL; << 2875 } << 2876 1093 2877 /* The IV must be copied to a buffer, !! 1094 if (enc == ENCRYPT) 2878 if (ivsize) { !! 1095 e = "encryption"; 2879 if (WARN_ON(ivsize > MAX_IVLE !! 1096 else 2880 return -EINVAL; !! 1097 e = "decryption"; 2881 if (vec->generates_iv && !enc << 2882 memcpy(iv, vec->iv_ou << 2883 else if (vec->iv) << 2884 memcpy(iv, vec->iv, i << 2885 else << 2886 memset(iv, 0, ivsize) << 2887 } else { << 2888 if (vec->generates_iv) { << 2889 pr_err("alg: skcipher << 2890 driver, vec_na << 2891 return -EINVAL; << 2892 } << 2893 iv = NULL; << 2894 } << 2895 1098 2896 /* Build the src/dst scatterlists */ !! 1099 crypto_init_wait(&wait); 2897 input.iov_base = enc ? (void *)vec->p << 2898 input.iov_len = vec->len; << 2899 err = build_cipher_test_sglists(tsgls << 2900 vec-> << 2901 if (err) { << 2902 pr_err("alg: skcipher: %s %s: << 2903 driver, op, vec_name, << 2904 return err; << 2905 } << 2906 1100 2907 /* Do the actual encryption or decryp !! 1101 req = skcipher_request_alloc(tfm, GFP_KERNEL); 2908 testmgr_poison(req->__ctx, crypto_skc !! 1102 if (!req) { 2909 skcipher_request_set_callback(req, re !! 1103 pr_err("alg: skcipher%s: Failed to allocate request for %s\n", 2910 skcipher_request_set_crypt(req, tsgls !! 1104 d, algo); 2911 vec->len, !! 1105 goto out; 2912 if (cfg->nosimd) << 2913 crypto_disable_simd_for_test( << 2914 err = enc ? crypto_skcipher_encrypt(r << 2915 if (cfg->nosimd) << 2916 crypto_reenable_simd_for_test << 2917 err = crypto_wait_req(err, &wait); << 2918 << 2919 /* Check that the algorithm didn't ov << 2920 if (req->cryptlen != vec->len || << 2921 req->iv != iv || << 2922 req->src != tsgls->src.sgl_ptr || << 2923 req->dst != tsgls->dst.sgl_ptr || << 2924 crypto_skcipher_reqtfm(req) != tf << 2925 req->base.complete != crypto_req_ << 2926 req->base.flags != req_flags || << 2927 req->base.data != &wait) { << 2928 pr_err("alg: skcipher: %s %s << 2929 driver, op, vec_name, << 2930 if (req->cryptlen != vec->len << 2931 pr_err("alg: skcipher << 2932 if (req->iv != iv) << 2933 pr_err("alg: skcipher << 2934 if (req->src != tsgls->src.sg << 2935 pr_err("alg: skcipher << 2936 if (req->dst != tsgls->dst.sg << 2937 pr_err("alg: skcipher << 2938 if (crypto_skcipher_reqtfm(re << 2939 pr_err("alg: skcipher << 2940 if (req->base.complete != cry << 2941 pr_err("alg: skcipher << 2942 if (req->base.flags != req_fl << 2943 pr_err("alg: skcipher << 2944 if (req->base.data != &wait) << 2945 pr_err("alg: skcipher << 2946 return -EINVAL; << 2947 } << 2948 if (is_test_sglist_corrupted(&tsgls-> << 2949 pr_err("alg: skcipher: %s %s << 2950 driver, op, vec_name, << 2951 return -EINVAL; << 2952 } << 2953 if (tsgls->dst.sgl_ptr != tsgls->src. << 2954 is_test_sglist_corrupted(&tsgls-> << 2955 pr_err("alg: skcipher: %s %s << 2956 driver, op, vec_name, << 2957 return -EINVAL; << 2958 } 1106 } 2959 1107 2960 /* Check for success or failure */ !! 1108 skcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG, 2961 if (err) { !! 1109 crypto_req_done, &wait); 2962 if (err == vec->crypt_error) << 2963 return 0; << 2964 pr_err("alg: skcipher: %s %s << 2965 driver, op, vec_name, << 2966 return err; << 2967 } << 2968 if (vec->crypt_error) { << 2969 pr_err("alg: skcipher: %s %s << 2970 driver, op, vec_name, << 2971 return -EINVAL; << 2972 } << 2973 1110 2974 /* Check for the correct output (ciph !! 1111 j = 0; 2975 err = verify_correct_output(&tsgls->d !! 1112 for (i = 0; i < tcount; i++) { 2976 vec->len, !! 1113 if (template[i].np && !template[i].also_non_np) 2977 if (err == -EOVERFLOW) { !! 1114 continue; 2978 pr_err("alg: skcipher: %s %s << 2979 driver, op, vec_name, << 2980 return err; << 2981 } << 2982 if (err) { << 2983 pr_err("alg: skcipher: %s %s << 2984 driver, op, vec_name, << 2985 return err; << 2986 } << 2987 1115 2988 /* If applicable, check that the algo !! 1116 if (fips_enabled && template[i].fips_skip) 2989 if (vec->iv_out && memcmp(iv, vec->iv !! 1117 continue; 2990 pr_err("alg: skcipher: %s %s << 2991 driver, op, vec_name, << 2992 hexdump(iv, ivsize); << 2993 return -EINVAL; << 2994 } << 2995 1118 2996 return 0; !! 1119 if (template[i].iv) 2997 } !! 1120 memcpy(iv, template[i].iv, ivsize); >> 1121 else >> 1122 memset(iv, 0, MAX_IVLEN); 2998 1123 2999 static int test_skcipher_vec(int enc, const s !! 1124 j++; 3000 unsigned int vec !! 1125 ret = -EINVAL; 3001 struct skcipher_ !! 1126 if (WARN_ON(align_offset + template[i].ilen > PAGE_SIZE)) 3002 struct cipher_te !! 1127 goto out; 3003 { << 3004 char vec_name[16]; << 3005 unsigned int i; << 3006 int err; << 3007 1128 3008 if (fips_enabled && vec->fips_skip) !! 1129 data = xbuf[0]; 3009 return 0; !! 1130 data += align_offset; >> 1131 memcpy(data, template[i].input, template[i].ilen); 3010 1132 3011 sprintf(vec_name, "%u", vec_num); !! 1133 crypto_skcipher_clear_flags(tfm, ~0); >> 1134 if (template[i].wk) >> 1135 crypto_skcipher_set_flags(tfm, >> 1136 CRYPTO_TFM_REQ_WEAK_KEY); 3012 1137 3013 for (i = 0; i < ARRAY_SIZE(default_ci !! 1138 ret = crypto_skcipher_setkey(tfm, template[i].key, 3014 err = test_skcipher_vec_cfg(e !! 1139 template[i].klen); 3015 & !! 1140 if (template[i].fail == !ret) { 3016 r !! 1141 pr_err("alg: skcipher%s: setkey failed on test %d for %s: flags=%x\n", 3017 if (err) !! 1142 d, j, algo, crypto_skcipher_get_flags(tfm)); 3018 return err; !! 1143 goto out; 3019 } !! 1144 } else if (ret) >> 1145 continue; 3020 1146 3021 #ifdef CONFIG_CRYPTO_MANAGER_EXTRA_TESTS !! 1147 sg_init_one(&sg[0], data, template[i].ilen); 3022 if (!noextratests) { !! 1148 if (diff_dst) { 3023 struct rnd_state rng; !! 1149 data = xoutbuf[0]; 3024 struct testvec_config cfg; !! 1150 data += align_offset; 3025 char cfgname[TESTVEC_CONFIG_N !! 1151 sg_init_one(&sgout[0], data, template[i].ilen); >> 1152 } 3026 1153 3027 init_rnd_state(&rng); !! 1154 skcipher_request_set_crypt(req, sg, (diff_dst) ? sgout : sg, >> 1155 template[i].ilen, iv); >> 1156 ret = crypto_wait_req(enc ? crypto_skcipher_encrypt(req) : >> 1157 crypto_skcipher_decrypt(req), &wait); 3028 1158 3029 for (i = 0; i < fuzz_iteratio !! 1159 if (ret) { 3030 generate_random_testv !! 1160 pr_err("alg: skcipher%s: %s failed on test %d for %s: ret=%d\n", 3031 !! 1161 d, e, j, algo, -ret); 3032 err = test_skcipher_v !! 1162 goto out; 3033 << 3034 if (err) << 3035 return err; << 3036 cond_resched(); << 3037 } 1163 } 3038 } << 3039 #endif << 3040 return 0; << 3041 } << 3042 1164 3043 #ifdef CONFIG_CRYPTO_MANAGER_EXTRA_TESTS !! 1165 q = data; 3044 /* !! 1166 if (memcmp(q, template[i].result, template[i].rlen)) { 3045 * Generate a symmetric cipher test vector fr !! 1167 pr_err("alg: skcipher%s: Test %d failed (invalid result) on %s for %s\n", 3046 * Assumes the buffers in 'vec' were already !! 1168 d, j, e, algo); 3047 */ !! 1169 hexdump(q, template[i].rlen); 3048 static void generate_random_cipher_testvec(st !! 1170 ret = -EINVAL; 3049 st !! 1171 goto out; 3050 st !! 1172 } 3051 un << 3052 ch << 3053 { << 3054 struct crypto_skcipher *tfm = crypto_ << 3055 const unsigned int maxkeysize = crypt << 3056 const unsigned int ivsize = crypto_sk << 3057 struct scatterlist src, dst; << 3058 u8 iv[MAX_IVLEN]; << 3059 DECLARE_CRYPTO_WAIT(wait); << 3060 1173 3061 /* Key: length in [0, maxkeysize], bu !! 1174 if (template[i].iv_out && 3062 vec->klen = maxkeysize; !! 1175 memcmp(iv, template[i].iv_out, 3063 if (prandom_u32_below(rng, 4) == 0) !! 1176 crypto_skcipher_ivsize(tfm))) { 3064 vec->klen = prandom_u32_below !! 1177 pr_err("alg: skcipher%s: Test %d failed (invalid output IV) on %s for %s\n", 3065 generate_random_bytes(rng, (u8 *)vec- !! 1178 d, j, e, algo); 3066 vec->setkey_error = crypto_skcipher_s !! 1179 hexdump(iv, crypto_skcipher_ivsize(tfm)); 3067 !! 1180 ret = -EINVAL; 3068 /* IV */ !! 1181 goto out; 3069 generate_random_bytes(rng, (u8 *)vec- !! 1182 } 3070 << 3071 /* Plaintext */ << 3072 vec->len = generate_random_length(rng << 3073 generate_random_bytes(rng, (u8 *)vec- << 3074 << 3075 /* If the key couldn't be set, no nee << 3076 if (vec->setkey_error) << 3077 goto done; << 3078 << 3079 /* Ciphertext */ << 3080 sg_init_one(&src, vec->ptext, vec->le << 3081 sg_init_one(&dst, vec->ctext, vec->le << 3082 memcpy(iv, vec->iv, ivsize); << 3083 skcipher_request_set_callback(req, 0, << 3084 skcipher_request_set_crypt(req, &src, << 3085 vec->crypt_error = crypto_wait_req(cr << 3086 if (vec->crypt_error != 0) { << 3087 /* << 3088 * The only acceptable error << 3089 * skcipher decryption should << 3090 * We'll test for this. But << 3091 * explicitly initialize the << 3092 */ << 3093 memset((u8 *)vec->ctext, 0, v << 3094 } 1183 } 3095 done: << 3096 snprintf(name, max_namelen, "\"random << 3097 vec->len, vec->klen); << 3098 } << 3099 1184 3100 /* !! 1185 j = 0; 3101 * Test the skcipher algorithm represented by !! 1186 for (i = 0; i < tcount; i++) { 3102 * generic implementation, if one is availabl !! 1187 /* alignment tests are only done with continuous buffers */ 3103 */ !! 1188 if (align_offset != 0) 3104 static int test_skcipher_vs_generic_impl(cons !! 1189 break; 3105 stru << 3106 stru << 3107 { << 3108 struct crypto_skcipher *tfm = crypto_ << 3109 const unsigned int maxkeysize = crypt << 3110 const unsigned int ivsize = crypto_sk << 3111 const unsigned int blocksize = crypto << 3112 const unsigned int maxdatasize = (2 * << 3113 const char *algname = crypto_skcipher << 3114 const char *driver = crypto_skcipher_ << 3115 struct rnd_state rng; << 3116 char _generic_driver[CRYPTO_MAX_ALG_N << 3117 struct crypto_skcipher *generic_tfm = << 3118 struct skcipher_request *generic_req << 3119 unsigned int i; << 3120 struct cipher_testvec vec = { 0 }; << 3121 char vec_name[64]; << 3122 struct testvec_config *cfg; << 3123 char cfgname[TESTVEC_CONFIG_NAMELEN]; << 3124 int err; << 3125 << 3126 if (noextratests) << 3127 return 0; << 3128 1190 3129 /* Keywrap isn't supported here yet a !! 1191 if (!template[i].np) 3130 if (strncmp(algname, "kw(", 3) == 0) !! 1192 continue; 3131 return 0; << 3132 1193 3133 init_rnd_state(&rng); !! 1194 if (fips_enabled && template[i].fips_skip) >> 1195 continue; 3134 1196 3135 if (!generic_driver) { /* Use default !! 1197 if (template[i].iv) 3136 err = build_generic_driver_na !! 1198 memcpy(iv, template[i].iv, ivsize); 3137 if (err) !! 1199 else 3138 return err; !! 1200 memset(iv, 0, MAX_IVLEN); 3139 generic_driver = _generic_dri << 3140 } << 3141 1201 3142 if (strcmp(generic_driver, driver) == !! 1202 j++; 3143 return 0; !! 1203 crypto_skcipher_clear_flags(tfm, ~0); >> 1204 if (template[i].wk) >> 1205 crypto_skcipher_set_flags(tfm, >> 1206 CRYPTO_TFM_REQ_WEAK_KEY); 3144 1207 3145 generic_tfm = crypto_alloc_skcipher(g !! 1208 ret = crypto_skcipher_setkey(tfm, template[i].key, 3146 if (IS_ERR(generic_tfm)) { !! 1209 template[i].klen); 3147 err = PTR_ERR(generic_tfm); !! 1210 if (template[i].fail == !ret) { 3148 if (err == -ENOENT) { !! 1211 pr_err("alg: skcipher%s: setkey failed on chunk test %d for %s: flags=%x\n", 3149 pr_warn("alg: skciphe !! 1212 d, j, algo, crypto_skcipher_get_flags(tfm)); 3150 driver, gener !! 1213 goto out; 3151 return 0; !! 1214 } else if (ret) 3152 } !! 1215 continue; 3153 pr_err("alg: skcipher: error << 3154 generic_driver, algnam << 3155 return err; << 3156 } << 3157 1216 3158 cfg = kzalloc(sizeof(*cfg), GFP_KERNE !! 1217 temp = 0; 3159 if (!cfg) { !! 1218 ret = -EINVAL; 3160 err = -ENOMEM; !! 1219 sg_init_table(sg, template[i].np); 3161 goto out; !! 1220 if (diff_dst) 3162 } !! 1221 sg_init_table(sgout, template[i].np); >> 1222 for (k = 0; k < template[i].np; k++) { >> 1223 if (WARN_ON(offset_in_page(IDX[k]) + >> 1224 template[i].tap[k] > PAGE_SIZE)) >> 1225 goto out; 3163 1226 3164 generic_req = skcipher_request_alloc( !! 1227 q = xbuf[IDX[k] >> PAGE_SHIFT] + offset_in_page(IDX[k]); 3165 if (!generic_req) { << 3166 err = -ENOMEM; << 3167 goto out; << 3168 } << 3169 1228 3170 /* Check the algorithm properties for !! 1229 memcpy(q, template[i].input + temp, template[i].tap[k]); 3171 1230 3172 if (crypto_skcipher_min_keysize(tfm) !! 1231 if (offset_in_page(q) + template[i].tap[k] < PAGE_SIZE) 3173 crypto_skcipher_min_keysize(gener !! 1232 q[template[i].tap[k]] = 0; 3174 pr_err("alg: skcipher: min ke << 3175 driver, crypto_skciphe << 3176 crypto_skcipher_min_ke << 3177 err = -EINVAL; << 3178 goto out; << 3179 } << 3180 1233 3181 if (maxkeysize != crypto_skcipher_max !! 1234 sg_set_buf(&sg[k], q, template[i].tap[k]); 3182 pr_err("alg: skcipher: max ke !! 1235 if (diff_dst) { 3183 driver, maxkeysize, !! 1236 q = xoutbuf[IDX[k] >> PAGE_SHIFT] + 3184 crypto_skcipher_max_ke !! 1237 offset_in_page(IDX[k]); 3185 err = -EINVAL; << 3186 goto out; << 3187 } << 3188 1238 3189 if (ivsize != crypto_skcipher_ivsize( !! 1239 sg_set_buf(&sgout[k], q, template[i].tap[k]); 3190 pr_err("alg: skcipher: ivsize << 3191 driver, ivsize, crypto << 3192 err = -EINVAL; << 3193 goto out; << 3194 } << 3195 1240 3196 if (blocksize != crypto_skcipher_bloc !! 1241 memset(q, 0, template[i].tap[k]); 3197 pr_err("alg: skcipher: blocks !! 1242 if (offset_in_page(q) + 3198 driver, blocksize, !! 1243 template[i].tap[k] < PAGE_SIZE) 3199 crypto_skcipher_blocks !! 1244 q[template[i].tap[k]] = 0; 3200 err = -EINVAL; !! 1245 } 3201 goto out; << 3202 } << 3203 1246 3204 /* !! 1247 temp += template[i].tap[k]; 3205 * Now generate test vectors using th !! 1248 } 3206 * the other implementation against t << 3207 */ << 3208 1249 3209 vec.key = kmalloc(maxkeysize, GFP_KER !! 1250 skcipher_request_set_crypt(req, sg, (diff_dst) ? sgout : sg, 3210 vec.iv = kmalloc(ivsize, GFP_KERNEL); !! 1251 template[i].ilen, iv); 3211 vec.ptext = kmalloc(maxdatasize, GFP_ << 3212 vec.ctext = kmalloc(maxdatasize, GFP_ << 3213 if (!vec.key || !vec.iv || !vec.ptext << 3214 err = -ENOMEM; << 3215 goto out; << 3216 } << 3217 1252 3218 for (i = 0; i < fuzz_iterations * 8; !! 1253 ret = crypto_wait_req(enc ? crypto_skcipher_encrypt(req) : 3219 generate_random_cipher_testve !! 1254 crypto_skcipher_decrypt(req), &wait); 3220 << 3221 << 3222 generate_random_testvec_confi << 3223 << 3224 1255 3225 err = test_skcipher_vec_cfg(E !! 1256 if (ret) { 3226 c !! 1257 pr_err("alg: skcipher%s: %s failed on chunk test %d for %s: ret=%d\n", 3227 if (err) !! 1258 d, e, j, algo, -ret); 3228 goto out; << 3229 err = test_skcipher_vec_cfg(D << 3230 c << 3231 if (err) << 3232 goto out; 1259 goto out; 3233 cond_resched(); !! 1260 } 3234 } << 3235 err = 0; << 3236 out: << 3237 kfree(cfg); << 3238 kfree(vec.key); << 3239 kfree(vec.iv); << 3240 kfree(vec.ptext); << 3241 kfree(vec.ctext); << 3242 crypto_free_skcipher(generic_tfm); << 3243 skcipher_request_free(generic_req); << 3244 return err; << 3245 } << 3246 #else /* !CONFIG_CRYPTO_MANAGER_EXTRA_TESTS * << 3247 static int test_skcipher_vs_generic_impl(cons << 3248 stru << 3249 stru << 3250 { << 3251 return 0; << 3252 } << 3253 #endif /* !CONFIG_CRYPTO_MANAGER_EXTRA_TESTS << 3254 1261 3255 static int test_skcipher(int enc, const struc !! 1262 temp = 0; 3256 struct skcipher_requ !! 1263 ret = -EINVAL; 3257 struct cipher_test_s !! 1264 for (k = 0; k < template[i].np; k++) { 3258 { !! 1265 if (diff_dst) 3259 unsigned int i; !! 1266 q = xoutbuf[IDX[k] >> PAGE_SHIFT] + 3260 int err; !! 1267 offset_in_page(IDX[k]); >> 1268 else >> 1269 q = xbuf[IDX[k] >> PAGE_SHIFT] + >> 1270 offset_in_page(IDX[k]); 3261 1271 3262 for (i = 0; i < suite->count; i++) { !! 1272 if (memcmp(q, template[i].result + temp, 3263 err = test_skcipher_vec(enc, !! 1273 template[i].tap[k])) { 3264 if (err) !! 1274 pr_err("alg: skcipher%s: Chunk test %d failed on %s at page %u for %s\n", 3265 return err; !! 1275 d, j, e, k, algo); 3266 cond_resched(); !! 1276 hexdump(q, template[i].tap[k]); >> 1277 goto out; >> 1278 } >> 1279 >> 1280 q += template[i].tap[k]; >> 1281 for (n = 0; offset_in_page(q + n) && q[n]; n++) >> 1282 ; >> 1283 if (n) { >> 1284 pr_err("alg: skcipher%s: Result buffer corruption in chunk test %d on %s at page %u for %s: %u bytes:\n", >> 1285 d, j, e, k, algo, n); >> 1286 hexdump(q, n); >> 1287 goto out; >> 1288 } >> 1289 temp += template[i].tap[k]; >> 1290 } 3267 } 1291 } 3268 return 0; !! 1292 >> 1293 ret = 0; >> 1294 >> 1295 out: >> 1296 skcipher_request_free(req); >> 1297 if (diff_dst) >> 1298 testmgr_free_buf(xoutbuf); >> 1299 out_nooutbuf: >> 1300 testmgr_free_buf(xbuf); >> 1301 out_nobuf: >> 1302 return ret; 3269 } 1303 } 3270 1304 3271 static int alg_test_skcipher(const struct alg !! 1305 static int test_skcipher(struct crypto_skcipher *tfm, int enc, 3272 const char *driv !! 1306 const struct cipher_testvec *template, >> 1307 unsigned int tcount) 3273 { 1308 { 3274 const struct cipher_test_suite *suite !! 1309 unsigned int alignmask; 3275 struct crypto_skcipher *tfm; !! 1310 int ret; 3276 struct skcipher_request *req = NULL; << 3277 struct cipher_test_sglists *tsgls = N << 3278 int err; << 3279 1311 3280 if (suite->count <= 0) { !! 1312 /* test 'dst == src' case */ 3281 pr_err("alg: skcipher: empty !! 1313 ret = __test_skcipher(tfm, enc, template, tcount, false, 0); 3282 return -EINVAL; !! 1314 if (ret) 3283 } !! 1315 return ret; 3284 1316 3285 tfm = crypto_alloc_skcipher(driver, t !! 1317 /* test 'dst != src' case */ 3286 if (IS_ERR(tfm)) { !! 1318 ret = __test_skcipher(tfm, enc, template, tcount, true, 0); 3287 if (PTR_ERR(tfm) == -ENOENT) !! 1319 if (ret) 3288 return 0; !! 1320 return ret; 3289 pr_err("alg: skcipher: failed << 3290 driver, PTR_ERR(tfm)); << 3291 return PTR_ERR(tfm); << 3292 } << 3293 driver = crypto_skcipher_driver_name( << 3294 1321 3295 req = skcipher_request_alloc(tfm, GFP !! 1322 /* test unaligned buffers, check with one byte offset */ 3296 if (!req) { !! 1323 ret = __test_skcipher(tfm, enc, template, tcount, true, 1); 3297 pr_err("alg: skcipher: failed !! 1324 if (ret) 3298 driver); !! 1325 return ret; 3299 err = -ENOMEM; << 3300 goto out; << 3301 } << 3302 1326 3303 tsgls = alloc_cipher_test_sglists(); !! 1327 alignmask = crypto_tfm_alg_alignmask(&tfm->base); 3304 if (!tsgls) { !! 1328 if (alignmask) { 3305 pr_err("alg: skcipher: failed !! 1329 /* Check if alignment mask for tfm is correctly set. */ 3306 driver); !! 1330 ret = __test_skcipher(tfm, enc, template, tcount, true, 3307 err = -ENOMEM; !! 1331 alignmask + 1); 3308 goto out; !! 1332 if (ret) >> 1333 return ret; 3309 } 1334 } 3310 1335 3311 err = test_skcipher(ENCRYPT, suite, r !! 1336 return 0; 3312 if (err) << 3313 goto out; << 3314 << 3315 err = test_skcipher(DECRYPT, suite, r << 3316 if (err) << 3317 goto out; << 3318 << 3319 err = test_skcipher_vs_generic_impl(d << 3320 out: << 3321 free_cipher_test_sglists(tsgls); << 3322 skcipher_request_free(req); << 3323 crypto_free_skcipher(tfm); << 3324 return err; << 3325 } 1337 } 3326 1338 3327 static int test_comp(struct crypto_comp *tfm, 1339 static int test_comp(struct crypto_comp *tfm, 3328 const struct comp_testve 1340 const struct comp_testvec *ctemplate, 3329 const struct comp_testve 1341 const struct comp_testvec *dtemplate, 3330 int ctcount, int dtcount 1342 int ctcount, int dtcount) 3331 { 1343 { 3332 const char *algo = crypto_tfm_alg_dri 1344 const char *algo = crypto_tfm_alg_driver_name(crypto_comp_tfm(tfm)); 3333 char *output, *decomp_output; << 3334 unsigned int i; 1345 unsigned int i; >> 1346 char result[COMP_BUF_SIZE]; 3335 int ret; 1347 int ret; 3336 1348 3337 output = kmalloc(COMP_BUF_SIZE, GFP_K << 3338 if (!output) << 3339 return -ENOMEM; << 3340 << 3341 decomp_output = kmalloc(COMP_BUF_SIZE << 3342 if (!decomp_output) { << 3343 kfree(output); << 3344 return -ENOMEM; << 3345 } << 3346 << 3347 for (i = 0; i < ctcount; i++) { 1349 for (i = 0; i < ctcount; i++) { 3348 int ilen; 1350 int ilen; 3349 unsigned int dlen = COMP_BUF_ 1351 unsigned int dlen = COMP_BUF_SIZE; 3350 1352 3351 memset(output, 0, COMP_BUF_SI !! 1353 memset(result, 0, sizeof (result)); 3352 memset(decomp_output, 0, COMP << 3353 1354 3354 ilen = ctemplate[i].inlen; 1355 ilen = ctemplate[i].inlen; 3355 ret = crypto_comp_compress(tf 1356 ret = crypto_comp_compress(tfm, ctemplate[i].input, 3356 il !! 1357 ilen, result, &dlen); 3357 if (ret) { 1358 if (ret) { 3358 printk(KERN_ERR "alg: 1359 printk(KERN_ERR "alg: comp: compression failed " 3359 "on test %d fo 1360 "on test %d for %s: ret=%d\n", i + 1, algo, 3360 -ret); 1361 -ret); 3361 goto out; 1362 goto out; 3362 } 1363 } 3363 1364 3364 ilen = dlen; !! 1365 if (dlen != ctemplate[i].outlen) { 3365 dlen = COMP_BUF_SIZE; << 3366 ret = crypto_comp_decompress( << 3367 << 3368 if (ret) { << 3369 pr_err("alg: comp: co << 3370 i + 1, algo, - << 3371 goto out; << 3372 } << 3373 << 3374 if (dlen != ctemplate[i].inle << 3375 printk(KERN_ERR "alg: 1366 printk(KERN_ERR "alg: comp: Compression test %d " 3376 "failed for %s 1367 "failed for %s: output len = %d\n", i + 1, algo, 3377 dlen); 1368 dlen); 3378 ret = -EINVAL; 1369 ret = -EINVAL; 3379 goto out; 1370 goto out; 3380 } 1371 } 3381 1372 3382 if (memcmp(decomp_output, cte !! 1373 if (memcmp(result, ctemplate[i].output, dlen)) { 3383 ctemplate[i].inlen !! 1374 printk(KERN_ERR "alg: comp: Compression test %d " 3384 pr_err("alg: comp: co !! 1375 "failed for %s\n", i + 1, algo); 3385 i + 1, algo); !! 1376 hexdump(result, dlen); 3386 hexdump(decomp_output << 3387 ret = -EINVAL; 1377 ret = -EINVAL; 3388 goto out; 1378 goto out; 3389 } 1379 } 3390 } 1380 } 3391 1381 3392 for (i = 0; i < dtcount; i++) { 1382 for (i = 0; i < dtcount; i++) { 3393 int ilen; 1383 int ilen; 3394 unsigned int dlen = COMP_BUF_ 1384 unsigned int dlen = COMP_BUF_SIZE; 3395 1385 3396 memset(decomp_output, 0, COMP !! 1386 memset(result, 0, sizeof (result)); 3397 1387 3398 ilen = dtemplate[i].inlen; 1388 ilen = dtemplate[i].inlen; 3399 ret = crypto_comp_decompress( 1389 ret = crypto_comp_decompress(tfm, dtemplate[i].input, 3400 !! 1390 ilen, result, &dlen); 3401 if (ret) { 1391 if (ret) { 3402 printk(KERN_ERR "alg: 1392 printk(KERN_ERR "alg: comp: decompression failed " 3403 "on test %d fo 1393 "on test %d for %s: ret=%d\n", i + 1, algo, 3404 -ret); 1394 -ret); 3405 goto out; 1395 goto out; 3406 } 1396 } 3407 1397 3408 if (dlen != dtemplate[i].outl 1398 if (dlen != dtemplate[i].outlen) { 3409 printk(KERN_ERR "alg: 1399 printk(KERN_ERR "alg: comp: Decompression test %d " 3410 "failed for %s 1400 "failed for %s: output len = %d\n", i + 1, algo, 3411 dlen); 1401 dlen); 3412 ret = -EINVAL; 1402 ret = -EINVAL; 3413 goto out; 1403 goto out; 3414 } 1404 } 3415 1405 3416 if (memcmp(decomp_output, dte !! 1406 if (memcmp(result, dtemplate[i].output, dlen)) { 3417 printk(KERN_ERR "alg: 1407 printk(KERN_ERR "alg: comp: Decompression test %d " 3418 "failed for %s 1408 "failed for %s\n", i + 1, algo); 3419 hexdump(decomp_output !! 1409 hexdump(result, dlen); 3420 ret = -EINVAL; 1410 ret = -EINVAL; 3421 goto out; 1411 goto out; 3422 } 1412 } 3423 } 1413 } 3424 1414 3425 ret = 0; 1415 ret = 0; 3426 1416 3427 out: 1417 out: 3428 kfree(decomp_output); << 3429 kfree(output); << 3430 return ret; 1418 return ret; 3431 } 1419 } 3432 1420 3433 static int test_acomp(struct crypto_acomp *tf 1421 static int test_acomp(struct crypto_acomp *tfm, 3434 const struct comp_testv 1422 const struct comp_testvec *ctemplate, 3435 const struct comp_testv 1423 const struct comp_testvec *dtemplate, 3436 int ctcount, int dtcoun 1424 int ctcount, int dtcount) 3437 { 1425 { 3438 const char *algo = crypto_tfm_alg_dri 1426 const char *algo = crypto_tfm_alg_driver_name(crypto_acomp_tfm(tfm)); 3439 unsigned int i; 1427 unsigned int i; 3440 char *output, *decomp_out; 1428 char *output, *decomp_out; 3441 int ret; 1429 int ret; 3442 struct scatterlist src, dst; 1430 struct scatterlist src, dst; 3443 struct acomp_req *req; 1431 struct acomp_req *req; 3444 struct crypto_wait wait; 1432 struct crypto_wait wait; 3445 1433 3446 output = kmalloc(COMP_BUF_SIZE, GFP_K 1434 output = kmalloc(COMP_BUF_SIZE, GFP_KERNEL); 3447 if (!output) 1435 if (!output) 3448 return -ENOMEM; 1436 return -ENOMEM; 3449 1437 3450 decomp_out = kmalloc(COMP_BUF_SIZE, G 1438 decomp_out = kmalloc(COMP_BUF_SIZE, GFP_KERNEL); 3451 if (!decomp_out) { 1439 if (!decomp_out) { 3452 kfree(output); 1440 kfree(output); 3453 return -ENOMEM; 1441 return -ENOMEM; 3454 } 1442 } 3455 1443 3456 for (i = 0; i < ctcount; i++) { 1444 for (i = 0; i < ctcount; i++) { 3457 unsigned int dlen = COMP_BUF_ 1445 unsigned int dlen = COMP_BUF_SIZE; 3458 int ilen = ctemplate[i].inlen 1446 int ilen = ctemplate[i].inlen; 3459 void *input_vec; 1447 void *input_vec; 3460 1448 3461 input_vec = kmemdup(ctemplate 1449 input_vec = kmemdup(ctemplate[i].input, ilen, GFP_KERNEL); 3462 if (!input_vec) { 1450 if (!input_vec) { 3463 ret = -ENOMEM; 1451 ret = -ENOMEM; 3464 goto out; 1452 goto out; 3465 } 1453 } 3466 1454 3467 memset(output, 0, dlen); 1455 memset(output, 0, dlen); 3468 crypto_init_wait(&wait); 1456 crypto_init_wait(&wait); 3469 sg_init_one(&src, input_vec, 1457 sg_init_one(&src, input_vec, ilen); 3470 sg_init_one(&dst, output, dle 1458 sg_init_one(&dst, output, dlen); 3471 1459 3472 req = acomp_request_alloc(tfm 1460 req = acomp_request_alloc(tfm); 3473 if (!req) { 1461 if (!req) { 3474 pr_err("alg: acomp: r 1462 pr_err("alg: acomp: request alloc failed for %s\n", 3475 algo); 1463 algo); 3476 kfree(input_vec); 1464 kfree(input_vec); 3477 ret = -ENOMEM; 1465 ret = -ENOMEM; 3478 goto out; 1466 goto out; 3479 } 1467 } 3480 1468 3481 acomp_request_set_params(req, 1469 acomp_request_set_params(req, &src, &dst, ilen, dlen); 3482 acomp_request_set_callback(re 1470 acomp_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG, 3483 cr 1471 crypto_req_done, &wait); 3484 1472 3485 ret = crypto_wait_req(crypto_ 1473 ret = crypto_wait_req(crypto_acomp_compress(req), &wait); 3486 if (ret) { 1474 if (ret) { 3487 pr_err("alg: acomp: c 1475 pr_err("alg: acomp: compression failed on test %d for %s: ret=%d\n", 3488 i + 1, algo, - 1476 i + 1, algo, -ret); 3489 kfree(input_vec); 1477 kfree(input_vec); 3490 acomp_request_free(re 1478 acomp_request_free(req); 3491 goto out; 1479 goto out; 3492 } 1480 } 3493 1481 3494 ilen = req->dlen; 1482 ilen = req->dlen; 3495 dlen = COMP_BUF_SIZE; 1483 dlen = COMP_BUF_SIZE; 3496 sg_init_one(&src, output, ile 1484 sg_init_one(&src, output, ilen); 3497 sg_init_one(&dst, decomp_out, 1485 sg_init_one(&dst, decomp_out, dlen); 3498 crypto_init_wait(&wait); 1486 crypto_init_wait(&wait); 3499 acomp_request_set_params(req, 1487 acomp_request_set_params(req, &src, &dst, ilen, dlen); 3500 1488 3501 ret = crypto_wait_req(crypto_ 1489 ret = crypto_wait_req(crypto_acomp_decompress(req), &wait); 3502 if (ret) { 1490 if (ret) { 3503 pr_err("alg: acomp: c 1491 pr_err("alg: acomp: compression failed on test %d for %s: ret=%d\n", 3504 i + 1, algo, - 1492 i + 1, algo, -ret); 3505 kfree(input_vec); 1493 kfree(input_vec); 3506 acomp_request_free(re 1494 acomp_request_free(req); 3507 goto out; 1495 goto out; 3508 } 1496 } 3509 1497 3510 if (req->dlen != ctemplate[i] 1498 if (req->dlen != ctemplate[i].inlen) { 3511 pr_err("alg: acomp: C 1499 pr_err("alg: acomp: Compression test %d failed for %s: output len = %d\n", 3512 i + 1, algo, r 1500 i + 1, algo, req->dlen); 3513 ret = -EINVAL; 1501 ret = -EINVAL; 3514 kfree(input_vec); 1502 kfree(input_vec); 3515 acomp_request_free(re 1503 acomp_request_free(req); 3516 goto out; 1504 goto out; 3517 } 1505 } 3518 1506 3519 if (memcmp(input_vec, decomp_ 1507 if (memcmp(input_vec, decomp_out, req->dlen)) { 3520 pr_err("alg: acomp: C 1508 pr_err("alg: acomp: Compression test %d failed for %s\n", 3521 i + 1, algo); 1509 i + 1, algo); 3522 hexdump(output, req-> 1510 hexdump(output, req->dlen); 3523 ret = -EINVAL; 1511 ret = -EINVAL; 3524 kfree(input_vec); 1512 kfree(input_vec); 3525 acomp_request_free(re 1513 acomp_request_free(req); 3526 goto out; 1514 goto out; 3527 } 1515 } 3528 1516 3529 #ifdef CONFIG_CRYPTO_MANAGER_EXTRA_TESTS << 3530 crypto_init_wait(&wait); << 3531 sg_init_one(&src, input_vec, << 3532 acomp_request_set_params(req, << 3533 << 3534 ret = crypto_wait_req(crypto_ << 3535 if (ret) { << 3536 pr_err("alg: acomp: c << 3537 i + 1, algo, - << 3538 kfree(input_vec); << 3539 acomp_request_free(re << 3540 goto out; << 3541 } << 3542 #endif << 3543 << 3544 kfree(input_vec); 1517 kfree(input_vec); 3545 acomp_request_free(req); 1518 acomp_request_free(req); 3546 } 1519 } 3547 1520 3548 for (i = 0; i < dtcount; i++) { 1521 for (i = 0; i < dtcount; i++) { 3549 unsigned int dlen = COMP_BUF_ 1522 unsigned int dlen = COMP_BUF_SIZE; 3550 int ilen = dtemplate[i].inlen 1523 int ilen = dtemplate[i].inlen; 3551 void *input_vec; 1524 void *input_vec; 3552 1525 3553 input_vec = kmemdup(dtemplate 1526 input_vec = kmemdup(dtemplate[i].input, ilen, GFP_KERNEL); 3554 if (!input_vec) { 1527 if (!input_vec) { 3555 ret = -ENOMEM; 1528 ret = -ENOMEM; 3556 goto out; 1529 goto out; 3557 } 1530 } 3558 1531 3559 memset(output, 0, dlen); 1532 memset(output, 0, dlen); 3560 crypto_init_wait(&wait); 1533 crypto_init_wait(&wait); 3561 sg_init_one(&src, input_vec, 1534 sg_init_one(&src, input_vec, ilen); 3562 sg_init_one(&dst, output, dle 1535 sg_init_one(&dst, output, dlen); 3563 1536 3564 req = acomp_request_alloc(tfm 1537 req = acomp_request_alloc(tfm); 3565 if (!req) { 1538 if (!req) { 3566 pr_err("alg: acomp: r 1539 pr_err("alg: acomp: request alloc failed for %s\n", 3567 algo); 1540 algo); 3568 kfree(input_vec); 1541 kfree(input_vec); 3569 ret = -ENOMEM; 1542 ret = -ENOMEM; 3570 goto out; 1543 goto out; 3571 } 1544 } 3572 1545 3573 acomp_request_set_params(req, 1546 acomp_request_set_params(req, &src, &dst, ilen, dlen); 3574 acomp_request_set_callback(re 1547 acomp_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG, 3575 cr 1548 crypto_req_done, &wait); 3576 1549 3577 ret = crypto_wait_req(crypto_ 1550 ret = crypto_wait_req(crypto_acomp_decompress(req), &wait); 3578 if (ret) { 1551 if (ret) { 3579 pr_err("alg: acomp: d 1552 pr_err("alg: acomp: decompression failed on test %d for %s: ret=%d\n", 3580 i + 1, algo, - 1553 i + 1, algo, -ret); 3581 kfree(input_vec); 1554 kfree(input_vec); 3582 acomp_request_free(re 1555 acomp_request_free(req); 3583 goto out; 1556 goto out; 3584 } 1557 } 3585 1558 3586 if (req->dlen != dtemplate[i] 1559 if (req->dlen != dtemplate[i].outlen) { 3587 pr_err("alg: acomp: D 1560 pr_err("alg: acomp: Decompression test %d failed for %s: output len = %d\n", 3588 i + 1, algo, r 1561 i + 1, algo, req->dlen); 3589 ret = -EINVAL; 1562 ret = -EINVAL; 3590 kfree(input_vec); 1563 kfree(input_vec); 3591 acomp_request_free(re 1564 acomp_request_free(req); 3592 goto out; 1565 goto out; 3593 } 1566 } 3594 1567 3595 if (memcmp(output, dtemplate[ 1568 if (memcmp(output, dtemplate[i].output, req->dlen)) { 3596 pr_err("alg: acomp: D 1569 pr_err("alg: acomp: Decompression test %d failed for %s\n", 3597 i + 1, algo); 1570 i + 1, algo); 3598 hexdump(output, req-> 1571 hexdump(output, req->dlen); 3599 ret = -EINVAL; 1572 ret = -EINVAL; 3600 kfree(input_vec); 1573 kfree(input_vec); 3601 acomp_request_free(re 1574 acomp_request_free(req); 3602 goto out; 1575 goto out; 3603 } 1576 } 3604 1577 3605 #ifdef CONFIG_CRYPTO_MANAGER_EXTRA_TESTS << 3606 crypto_init_wait(&wait); << 3607 acomp_request_set_params(req, << 3608 << 3609 ret = crypto_wait_req(crypto_ << 3610 if (ret) { << 3611 pr_err("alg: acomp: d << 3612 i + 1, algo, - << 3613 kfree(input_vec); << 3614 acomp_request_free(re << 3615 goto out; << 3616 } << 3617 #endif << 3618 << 3619 kfree(input_vec); 1578 kfree(input_vec); 3620 acomp_request_free(req); 1579 acomp_request_free(req); 3621 } 1580 } 3622 1581 3623 ret = 0; 1582 ret = 0; 3624 1583 3625 out: 1584 out: 3626 kfree(decomp_out); 1585 kfree(decomp_out); 3627 kfree(output); 1586 kfree(output); 3628 return ret; 1587 return ret; 3629 } 1588 } 3630 1589 3631 static int test_cprng(struct crypto_rng *tfm, 1590 static int test_cprng(struct crypto_rng *tfm, 3632 const struct cprng_test 1591 const struct cprng_testvec *template, 3633 unsigned int tcount) 1592 unsigned int tcount) 3634 { 1593 { 3635 const char *algo = crypto_tfm_alg_dri 1594 const char *algo = crypto_tfm_alg_driver_name(crypto_rng_tfm(tfm)); 3636 int err = 0, i, j, seedsize; 1595 int err = 0, i, j, seedsize; 3637 u8 *seed; 1596 u8 *seed; 3638 char result[32]; 1597 char result[32]; 3639 1598 3640 seedsize = crypto_rng_seedsize(tfm); 1599 seedsize = crypto_rng_seedsize(tfm); 3641 1600 3642 seed = kmalloc(seedsize, GFP_KERNEL); 1601 seed = kmalloc(seedsize, GFP_KERNEL); 3643 if (!seed) { 1602 if (!seed) { 3644 printk(KERN_ERR "alg: cprng: 1603 printk(KERN_ERR "alg: cprng: Failed to allocate seed space " 3645 "for %s\n", algo); 1604 "for %s\n", algo); 3646 return -ENOMEM; 1605 return -ENOMEM; 3647 } 1606 } 3648 1607 3649 for (i = 0; i < tcount; i++) { 1608 for (i = 0; i < tcount; i++) { 3650 memset(result, 0, 32); 1609 memset(result, 0, 32); 3651 1610 3652 memcpy(seed, template[i].v, t 1611 memcpy(seed, template[i].v, template[i].vlen); 3653 memcpy(seed + template[i].vle 1612 memcpy(seed + template[i].vlen, template[i].key, 3654 template[i].klen); 1613 template[i].klen); 3655 memcpy(seed + template[i].vle 1614 memcpy(seed + template[i].vlen + template[i].klen, 3656 template[i].dt, templa 1615 template[i].dt, template[i].dtlen); 3657 1616 3658 err = crypto_rng_reset(tfm, s 1617 err = crypto_rng_reset(tfm, seed, seedsize); 3659 if (err) { 1618 if (err) { 3660 printk(KERN_ERR "alg: 1619 printk(KERN_ERR "alg: cprng: Failed to reset rng " 3661 "for %s\n", al 1620 "for %s\n", algo); 3662 goto out; 1621 goto out; 3663 } 1622 } 3664 1623 3665 for (j = 0; j < template[i].l 1624 for (j = 0; j < template[i].loops; j++) { 3666 err = crypto_rng_get_ 1625 err = crypto_rng_get_bytes(tfm, result, 3667 1626 template[i].rlen); 3668 if (err < 0) { 1627 if (err < 0) { 3669 printk(KERN_E 1628 printk(KERN_ERR "alg: cprng: Failed to obtain " 3670 "the c 1629 "the correct amount of random data for " 3671 "%s (r 1630 "%s (requested %d)\n", algo, 3672 templa 1631 template[i].rlen); 3673 goto out; 1632 goto out; 3674 } 1633 } 3675 } 1634 } 3676 1635 3677 err = memcmp(result, template 1636 err = memcmp(result, template[i].result, 3678 template[i].rlen 1637 template[i].rlen); 3679 if (err) { 1638 if (err) { 3680 printk(KERN_ERR "alg: 1639 printk(KERN_ERR "alg: cprng: Test %d failed for %s\n", 3681 i, algo); 1640 i, algo); 3682 hexdump(result, templ 1641 hexdump(result, template[i].rlen); 3683 err = -EINVAL; 1642 err = -EINVAL; 3684 goto out; 1643 goto out; 3685 } 1644 } 3686 } 1645 } 3687 1646 3688 out: 1647 out: 3689 kfree(seed); 1648 kfree(seed); 3690 return err; 1649 return err; 3691 } 1650 } 3692 1651 >> 1652 static int alg_test_aead(const struct alg_test_desc *desc, const char *driver, >> 1653 u32 type, u32 mask) >> 1654 { >> 1655 struct crypto_aead *tfm; >> 1656 int err = 0; >> 1657 >> 1658 tfm = crypto_alloc_aead(driver, type, mask); >> 1659 if (IS_ERR(tfm)) { >> 1660 printk(KERN_ERR "alg: aead: Failed to load transform for %s: " >> 1661 "%ld\n", driver, PTR_ERR(tfm)); >> 1662 return PTR_ERR(tfm); >> 1663 } >> 1664 >> 1665 if (desc->suite.aead.enc.vecs) { >> 1666 err = test_aead(tfm, ENCRYPT, desc->suite.aead.enc.vecs, >> 1667 desc->suite.aead.enc.count); >> 1668 if (err) >> 1669 goto out; >> 1670 } >> 1671 >> 1672 if (!err && desc->suite.aead.dec.vecs) >> 1673 err = test_aead(tfm, DECRYPT, desc->suite.aead.dec.vecs, >> 1674 desc->suite.aead.dec.count); >> 1675 >> 1676 out: >> 1677 crypto_free_aead(tfm); >> 1678 return err; >> 1679 } >> 1680 3693 static int alg_test_cipher(const struct alg_t 1681 static int alg_test_cipher(const struct alg_test_desc *desc, 3694 const char *driver 1682 const char *driver, u32 type, u32 mask) 3695 { 1683 { 3696 const struct cipher_test_suite *suite << 3697 struct crypto_cipher *tfm; 1684 struct crypto_cipher *tfm; 3698 int err; !! 1685 int err = 0; 3699 1686 3700 tfm = crypto_alloc_cipher(driver, typ 1687 tfm = crypto_alloc_cipher(driver, type, mask); 3701 if (IS_ERR(tfm)) { 1688 if (IS_ERR(tfm)) { 3702 if (PTR_ERR(tfm) == -ENOENT) << 3703 return 0; << 3704 printk(KERN_ERR "alg: cipher: 1689 printk(KERN_ERR "alg: cipher: Failed to load transform for " 3705 "%s: %ld\n", driver, P 1690 "%s: %ld\n", driver, PTR_ERR(tfm)); 3706 return PTR_ERR(tfm); 1691 return PTR_ERR(tfm); 3707 } 1692 } 3708 1693 3709 err = test_cipher(tfm, ENCRYPT, suite !! 1694 if (desc->suite.cipher.enc.vecs) { 3710 if (!err) !! 1695 err = test_cipher(tfm, ENCRYPT, desc->suite.cipher.enc.vecs, 3711 err = test_cipher(tfm, DECRYP !! 1696 desc->suite.cipher.enc.count); >> 1697 if (err) >> 1698 goto out; >> 1699 } 3712 1700 >> 1701 if (desc->suite.cipher.dec.vecs) >> 1702 err = test_cipher(tfm, DECRYPT, desc->suite.cipher.dec.vecs, >> 1703 desc->suite.cipher.dec.count); >> 1704 >> 1705 out: 3713 crypto_free_cipher(tfm); 1706 crypto_free_cipher(tfm); 3714 return err; 1707 return err; 3715 } 1708 } 3716 1709 >> 1710 static int alg_test_skcipher(const struct alg_test_desc *desc, >> 1711 const char *driver, u32 type, u32 mask) >> 1712 { >> 1713 struct crypto_skcipher *tfm; >> 1714 int err = 0; >> 1715 >> 1716 tfm = crypto_alloc_skcipher(driver, type, mask); >> 1717 if (IS_ERR(tfm)) { >> 1718 printk(KERN_ERR "alg: skcipher: Failed to load transform for " >> 1719 "%s: %ld\n", driver, PTR_ERR(tfm)); >> 1720 return PTR_ERR(tfm); >> 1721 } >> 1722 >> 1723 if (desc->suite.cipher.enc.vecs) { >> 1724 err = test_skcipher(tfm, ENCRYPT, desc->suite.cipher.enc.vecs, >> 1725 desc->suite.cipher.enc.count); >> 1726 if (err) >> 1727 goto out; >> 1728 } >> 1729 >> 1730 if (desc->suite.cipher.dec.vecs) >> 1731 err = test_skcipher(tfm, DECRYPT, desc->suite.cipher.dec.vecs, >> 1732 desc->suite.cipher.dec.count); >> 1733 >> 1734 out: >> 1735 crypto_free_skcipher(tfm); >> 1736 return err; >> 1737 } >> 1738 3717 static int alg_test_comp(const struct alg_tes 1739 static int alg_test_comp(const struct alg_test_desc *desc, const char *driver, 3718 u32 type, u32 mask) 1740 u32 type, u32 mask) 3719 { 1741 { 3720 struct crypto_comp *comp; 1742 struct crypto_comp *comp; 3721 struct crypto_acomp *acomp; 1743 struct crypto_acomp *acomp; 3722 int err; 1744 int err; 3723 u32 algo_type = type & CRYPTO_ALG_TYP 1745 u32 algo_type = type & CRYPTO_ALG_TYPE_ACOMPRESS_MASK; 3724 1746 3725 if (algo_type == CRYPTO_ALG_TYPE_ACOM 1747 if (algo_type == CRYPTO_ALG_TYPE_ACOMPRESS) { 3726 acomp = crypto_alloc_acomp(dr 1748 acomp = crypto_alloc_acomp(driver, type, mask); 3727 if (IS_ERR(acomp)) { 1749 if (IS_ERR(acomp)) { 3728 if (PTR_ERR(acomp) == << 3729 return 0; << 3730 pr_err("alg: acomp: F 1750 pr_err("alg: acomp: Failed to load transform for %s: %ld\n", 3731 driver, PTR_ER 1751 driver, PTR_ERR(acomp)); 3732 return PTR_ERR(acomp) 1752 return PTR_ERR(acomp); 3733 } 1753 } 3734 err = test_acomp(acomp, desc- 1754 err = test_acomp(acomp, desc->suite.comp.comp.vecs, 3735 desc->suite. 1755 desc->suite.comp.decomp.vecs, 3736 desc->suite. 1756 desc->suite.comp.comp.count, 3737 desc->suite. 1757 desc->suite.comp.decomp.count); 3738 crypto_free_acomp(acomp); 1758 crypto_free_acomp(acomp); 3739 } else { 1759 } else { 3740 comp = crypto_alloc_comp(driv 1760 comp = crypto_alloc_comp(driver, type, mask); 3741 if (IS_ERR(comp)) { 1761 if (IS_ERR(comp)) { 3742 if (PTR_ERR(comp) == << 3743 return 0; << 3744 pr_err("alg: comp: Fa 1762 pr_err("alg: comp: Failed to load transform for %s: %ld\n", 3745 driver, PTR_ER 1763 driver, PTR_ERR(comp)); 3746 return PTR_ERR(comp); 1764 return PTR_ERR(comp); 3747 } 1765 } 3748 1766 3749 err = test_comp(comp, desc->s 1767 err = test_comp(comp, desc->suite.comp.comp.vecs, 3750 desc->suite.c 1768 desc->suite.comp.decomp.vecs, 3751 desc->suite.c 1769 desc->suite.comp.comp.count, 3752 desc->suite.c 1770 desc->suite.comp.decomp.count); 3753 1771 3754 crypto_free_comp(comp); 1772 crypto_free_comp(comp); 3755 } 1773 } 3756 return err; 1774 return err; 3757 } 1775 } 3758 1776 >> 1777 static int alg_test_hash(const struct alg_test_desc *desc, const char *driver, >> 1778 u32 type, u32 mask) >> 1779 { >> 1780 struct crypto_ahash *tfm; >> 1781 int err; >> 1782 >> 1783 tfm = crypto_alloc_ahash(driver, type, mask); >> 1784 if (IS_ERR(tfm)) { >> 1785 printk(KERN_ERR "alg: hash: Failed to load transform for %s: " >> 1786 "%ld\n", driver, PTR_ERR(tfm)); >> 1787 return PTR_ERR(tfm); >> 1788 } >> 1789 >> 1790 err = test_hash(tfm, desc->suite.hash.vecs, >> 1791 desc->suite.hash.count, true); >> 1792 if (!err) >> 1793 err = test_hash(tfm, desc->suite.hash.vecs, >> 1794 desc->suite.hash.count, false); >> 1795 >> 1796 crypto_free_ahash(tfm); >> 1797 return err; >> 1798 } >> 1799 3759 static int alg_test_crc32c(const struct alg_t 1800 static int alg_test_crc32c(const struct alg_test_desc *desc, 3760 const char *driver 1801 const char *driver, u32 type, u32 mask) 3761 { 1802 { 3762 struct crypto_shash *tfm; 1803 struct crypto_shash *tfm; 3763 __le32 val; !! 1804 u32 val; 3764 int err; 1805 int err; 3765 1806 3766 err = alg_test_hash(desc, driver, typ 1807 err = alg_test_hash(desc, driver, type, mask); 3767 if (err) 1808 if (err) 3768 return err; !! 1809 goto out; 3769 1810 3770 tfm = crypto_alloc_shash(driver, type 1811 tfm = crypto_alloc_shash(driver, type, mask); 3771 if (IS_ERR(tfm)) { 1812 if (IS_ERR(tfm)) { 3772 if (PTR_ERR(tfm) == -ENOENT) << 3773 /* << 3774 * This crc32c implem << 3775 * ahash API, not the << 3776 * of the test is not << 3777 */ << 3778 return 0; << 3779 } << 3780 printk(KERN_ERR "alg: crc32c: 1813 printk(KERN_ERR "alg: crc32c: Failed to load transform for %s: " 3781 "%ld\n", driver, PTR_E 1814 "%ld\n", driver, PTR_ERR(tfm)); 3782 return PTR_ERR(tfm); !! 1815 err = PTR_ERR(tfm); >> 1816 goto out; 3783 } 1817 } 3784 driver = crypto_shash_driver_name(tfm << 3785 1818 3786 do { 1819 do { 3787 SHASH_DESC_ON_STACK(shash, tf 1820 SHASH_DESC_ON_STACK(shash, tfm); 3788 u32 *ctx = (u32 *)shash_desc_ 1821 u32 *ctx = (u32 *)shash_desc_ctx(shash); 3789 1822 3790 shash->tfm = tfm; 1823 shash->tfm = tfm; >> 1824 shash->flags = 0; 3791 1825 3792 *ctx = 420553207; !! 1826 *ctx = le32_to_cpu(420553207); 3793 err = crypto_shash_final(shas 1827 err = crypto_shash_final(shash, (u8 *)&val); 3794 if (err) { 1828 if (err) { 3795 printk(KERN_ERR "alg: 1829 printk(KERN_ERR "alg: crc32c: Operation failed for " 3796 "%s: %d\n", dr 1830 "%s: %d\n", driver, err); 3797 break; 1831 break; 3798 } 1832 } 3799 1833 3800 if (val != cpu_to_le32(~42055 !! 1834 if (val != ~420553207) { 3801 pr_err("alg: crc32c: !! 1835 printk(KERN_ERR "alg: crc32c: Test failed for %s: " 3802 driver, le32_t !! 1836 "%d\n", driver, val); 3803 err = -EINVAL; 1837 err = -EINVAL; 3804 } 1838 } 3805 } while (0); 1839 } while (0); 3806 1840 3807 crypto_free_shash(tfm); 1841 crypto_free_shash(tfm); 3808 1842 >> 1843 out: 3809 return err; 1844 return err; 3810 } 1845 } 3811 1846 3812 static int alg_test_cprng(const struct alg_te 1847 static int alg_test_cprng(const struct alg_test_desc *desc, const char *driver, 3813 u32 type, u32 mask) 1848 u32 type, u32 mask) 3814 { 1849 { 3815 struct crypto_rng *rng; 1850 struct crypto_rng *rng; 3816 int err; 1851 int err; 3817 1852 3818 rng = crypto_alloc_rng(driver, type, 1853 rng = crypto_alloc_rng(driver, type, mask); 3819 if (IS_ERR(rng)) { 1854 if (IS_ERR(rng)) { 3820 if (PTR_ERR(rng) == -ENOENT) << 3821 return 0; << 3822 printk(KERN_ERR "alg: cprng: 1855 printk(KERN_ERR "alg: cprng: Failed to load transform for %s: " 3823 "%ld\n", driver, PTR_E 1856 "%ld\n", driver, PTR_ERR(rng)); 3824 return PTR_ERR(rng); 1857 return PTR_ERR(rng); 3825 } 1858 } 3826 1859 3827 err = test_cprng(rng, desc->suite.cpr 1860 err = test_cprng(rng, desc->suite.cprng.vecs, desc->suite.cprng.count); 3828 1861 3829 crypto_free_rng(rng); 1862 crypto_free_rng(rng); 3830 1863 3831 return err; 1864 return err; 3832 } 1865 } 3833 1866 3834 1867 3835 static int drbg_cavs_test(const struct drbg_t 1868 static int drbg_cavs_test(const struct drbg_testvec *test, int pr, 3836 const char *driver, 1869 const char *driver, u32 type, u32 mask) 3837 { 1870 { 3838 int ret = -EAGAIN; 1871 int ret = -EAGAIN; 3839 struct crypto_rng *drng; 1872 struct crypto_rng *drng; 3840 struct drbg_test_data test_data; 1873 struct drbg_test_data test_data; 3841 struct drbg_string addtl, pers, teste 1874 struct drbg_string addtl, pers, testentropy; 3842 unsigned char *buf = kzalloc(test->ex 1875 unsigned char *buf = kzalloc(test->expectedlen, GFP_KERNEL); 3843 1876 3844 if (!buf) 1877 if (!buf) 3845 return -ENOMEM; 1878 return -ENOMEM; 3846 1879 3847 drng = crypto_alloc_rng(driver, type, 1880 drng = crypto_alloc_rng(driver, type, mask); 3848 if (IS_ERR(drng)) { 1881 if (IS_ERR(drng)) { 3849 kfree_sensitive(buf); << 3850 if (PTR_ERR(drng) == -ENOENT) << 3851 return 0; << 3852 printk(KERN_ERR "alg: drbg: c 1882 printk(KERN_ERR "alg: drbg: could not allocate DRNG handle for " 3853 "%s\n", driver); 1883 "%s\n", driver); 3854 return PTR_ERR(drng); !! 1884 kzfree(buf); >> 1885 return -ENOMEM; 3855 } 1886 } 3856 1887 3857 test_data.testentropy = &testentropy; 1888 test_data.testentropy = &testentropy; 3858 drbg_string_fill(&testentropy, test-> 1889 drbg_string_fill(&testentropy, test->entropy, test->entropylen); 3859 drbg_string_fill(&pers, test->pers, t 1890 drbg_string_fill(&pers, test->pers, test->perslen); 3860 ret = crypto_drbg_reset_test(drng, &p 1891 ret = crypto_drbg_reset_test(drng, &pers, &test_data); 3861 if (ret) { 1892 if (ret) { 3862 printk(KERN_ERR "alg: drbg: F 1893 printk(KERN_ERR "alg: drbg: Failed to reset rng\n"); 3863 goto outbuf; 1894 goto outbuf; 3864 } 1895 } 3865 1896 3866 drbg_string_fill(&addtl, test->addtla 1897 drbg_string_fill(&addtl, test->addtla, test->addtllen); 3867 if (pr) { 1898 if (pr) { 3868 drbg_string_fill(&testentropy 1899 drbg_string_fill(&testentropy, test->entpra, test->entprlen); 3869 ret = crypto_drbg_get_bytes_a 1900 ret = crypto_drbg_get_bytes_addtl_test(drng, 3870 buf, test->expectedle 1901 buf, test->expectedlen, &addtl, &test_data); 3871 } else { 1902 } else { 3872 ret = crypto_drbg_get_bytes_a 1903 ret = crypto_drbg_get_bytes_addtl(drng, 3873 buf, test->expectedle 1904 buf, test->expectedlen, &addtl); 3874 } 1905 } 3875 if (ret < 0) { 1906 if (ret < 0) { 3876 printk(KERN_ERR "alg: drbg: c 1907 printk(KERN_ERR "alg: drbg: could not obtain random data for " 3877 "driver %s\n", driver) 1908 "driver %s\n", driver); 3878 goto outbuf; 1909 goto outbuf; 3879 } 1910 } 3880 1911 3881 drbg_string_fill(&addtl, test->addtlb 1912 drbg_string_fill(&addtl, test->addtlb, test->addtllen); 3882 if (pr) { 1913 if (pr) { 3883 drbg_string_fill(&testentropy 1914 drbg_string_fill(&testentropy, test->entprb, test->entprlen); 3884 ret = crypto_drbg_get_bytes_a 1915 ret = crypto_drbg_get_bytes_addtl_test(drng, 3885 buf, test->expectedle 1916 buf, test->expectedlen, &addtl, &test_data); 3886 } else { 1917 } else { 3887 ret = crypto_drbg_get_bytes_a 1918 ret = crypto_drbg_get_bytes_addtl(drng, 3888 buf, test->expectedle 1919 buf, test->expectedlen, &addtl); 3889 } 1920 } 3890 if (ret < 0) { 1921 if (ret < 0) { 3891 printk(KERN_ERR "alg: drbg: c 1922 printk(KERN_ERR "alg: drbg: could not obtain random data for " 3892 "driver %s\n", driver) 1923 "driver %s\n", driver); 3893 goto outbuf; 1924 goto outbuf; 3894 } 1925 } 3895 1926 3896 ret = memcmp(test->expected, buf, tes 1927 ret = memcmp(test->expected, buf, test->expectedlen); 3897 1928 3898 outbuf: 1929 outbuf: 3899 crypto_free_rng(drng); 1930 crypto_free_rng(drng); 3900 kfree_sensitive(buf); !! 1931 kzfree(buf); 3901 return ret; 1932 return ret; 3902 } 1933 } 3903 1934 3904 1935 3905 static int alg_test_drbg(const struct alg_tes 1936 static int alg_test_drbg(const struct alg_test_desc *desc, const char *driver, 3906 u32 type, u32 mask) 1937 u32 type, u32 mask) 3907 { 1938 { 3908 int err = 0; 1939 int err = 0; 3909 int pr = 0; 1940 int pr = 0; 3910 int i = 0; 1941 int i = 0; 3911 const struct drbg_testvec *template = 1942 const struct drbg_testvec *template = desc->suite.drbg.vecs; 3912 unsigned int tcount = desc->suite.drb 1943 unsigned int tcount = desc->suite.drbg.count; 3913 1944 3914 if (0 == memcmp(driver, "drbg_pr_", 8 1945 if (0 == memcmp(driver, "drbg_pr_", 8)) 3915 pr = 1; 1946 pr = 1; 3916 1947 3917 for (i = 0; i < tcount; i++) { 1948 for (i = 0; i < tcount; i++) { 3918 err = drbg_cavs_test(&templat 1949 err = drbg_cavs_test(&template[i], pr, driver, type, mask); 3919 if (err) { 1950 if (err) { 3920 printk(KERN_ERR "alg: 1951 printk(KERN_ERR "alg: drbg: Test %d failed for %s\n", 3921 i, driver); 1952 i, driver); 3922 err = -EINVAL; 1953 err = -EINVAL; 3923 break; 1954 break; 3924 } 1955 } 3925 } 1956 } 3926 return err; 1957 return err; 3927 1958 3928 } 1959 } 3929 1960 3930 static int do_test_kpp(struct crypto_kpp *tfm 1961 static int do_test_kpp(struct crypto_kpp *tfm, const struct kpp_testvec *vec, 3931 const char *alg) 1962 const char *alg) 3932 { 1963 { 3933 struct kpp_request *req; 1964 struct kpp_request *req; 3934 void *input_buf = NULL; 1965 void *input_buf = NULL; 3935 void *output_buf = NULL; 1966 void *output_buf = NULL; 3936 void *a_public = NULL; 1967 void *a_public = NULL; 3937 void *a_ss = NULL; 1968 void *a_ss = NULL; 3938 void *shared_secret = NULL; 1969 void *shared_secret = NULL; 3939 struct crypto_wait wait; 1970 struct crypto_wait wait; 3940 unsigned int out_len_max; 1971 unsigned int out_len_max; 3941 int err = -ENOMEM; 1972 int err = -ENOMEM; 3942 struct scatterlist src, dst; 1973 struct scatterlist src, dst; 3943 1974 3944 req = kpp_request_alloc(tfm, GFP_KERN 1975 req = kpp_request_alloc(tfm, GFP_KERNEL); 3945 if (!req) 1976 if (!req) 3946 return err; 1977 return err; 3947 1978 3948 crypto_init_wait(&wait); 1979 crypto_init_wait(&wait); 3949 1980 3950 err = crypto_kpp_set_secret(tfm, vec- 1981 err = crypto_kpp_set_secret(tfm, vec->secret, vec->secret_size); 3951 if (err < 0) 1982 if (err < 0) 3952 goto free_req; 1983 goto free_req; 3953 1984 3954 out_len_max = crypto_kpp_maxsize(tfm) 1985 out_len_max = crypto_kpp_maxsize(tfm); 3955 output_buf = kzalloc(out_len_max, GFP 1986 output_buf = kzalloc(out_len_max, GFP_KERNEL); 3956 if (!output_buf) { 1987 if (!output_buf) { 3957 err = -ENOMEM; 1988 err = -ENOMEM; 3958 goto free_req; 1989 goto free_req; 3959 } 1990 } 3960 1991 3961 /* Use appropriate parameter as base 1992 /* Use appropriate parameter as base */ 3962 kpp_request_set_input(req, NULL, 0); 1993 kpp_request_set_input(req, NULL, 0); 3963 sg_init_one(&dst, output_buf, out_len 1994 sg_init_one(&dst, output_buf, out_len_max); 3964 kpp_request_set_output(req, &dst, out 1995 kpp_request_set_output(req, &dst, out_len_max); 3965 kpp_request_set_callback(req, CRYPTO_ 1996 kpp_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG, 3966 crypto_req_d 1997 crypto_req_done, &wait); 3967 1998 3968 /* Compute party A's public key */ 1999 /* Compute party A's public key */ 3969 err = crypto_wait_req(crypto_kpp_gene 2000 err = crypto_wait_req(crypto_kpp_generate_public_key(req), &wait); 3970 if (err) { 2001 if (err) { 3971 pr_err("alg: %s: Party A: gen 2002 pr_err("alg: %s: Party A: generate public key test failed. err %d\n", 3972 alg, err); 2003 alg, err); 3973 goto free_output; 2004 goto free_output; 3974 } 2005 } 3975 2006 3976 if (vec->genkey) { 2007 if (vec->genkey) { 3977 /* Save party A's public key 2008 /* Save party A's public key */ 3978 a_public = kmemdup(sg_virt(re !! 2009 a_public = kzalloc(out_len_max, GFP_KERNEL); 3979 if (!a_public) { 2010 if (!a_public) { 3980 err = -ENOMEM; 2011 err = -ENOMEM; 3981 goto free_output; 2012 goto free_output; 3982 } 2013 } >> 2014 memcpy(a_public, sg_virt(req->dst), out_len_max); 3983 } else { 2015 } else { 3984 /* Verify calculated public k 2016 /* Verify calculated public key */ 3985 if (memcmp(vec->expected_a_pu 2017 if (memcmp(vec->expected_a_public, sg_virt(req->dst), 3986 vec->expected_a_pu 2018 vec->expected_a_public_size)) { 3987 pr_err("alg: %s: Part 2019 pr_err("alg: %s: Party A: generate public key test failed. Invalid output\n", 3988 alg); 2020 alg); 3989 err = -EINVAL; 2021 err = -EINVAL; 3990 goto free_output; 2022 goto free_output; 3991 } 2023 } 3992 } 2024 } 3993 2025 3994 /* Calculate shared secret key by usi 2026 /* Calculate shared secret key by using counter part (b) public key. */ 3995 input_buf = kmemdup(vec->b_public, ve !! 2027 input_buf = kzalloc(vec->b_public_size, GFP_KERNEL); 3996 if (!input_buf) { 2028 if (!input_buf) { 3997 err = -ENOMEM; 2029 err = -ENOMEM; 3998 goto free_output; 2030 goto free_output; 3999 } 2031 } 4000 2032 >> 2033 memcpy(input_buf, vec->b_public, vec->b_public_size); 4001 sg_init_one(&src, input_buf, vec->b_p 2034 sg_init_one(&src, input_buf, vec->b_public_size); 4002 sg_init_one(&dst, output_buf, out_len 2035 sg_init_one(&dst, output_buf, out_len_max); 4003 kpp_request_set_input(req, &src, vec- 2036 kpp_request_set_input(req, &src, vec->b_public_size); 4004 kpp_request_set_output(req, &dst, out 2037 kpp_request_set_output(req, &dst, out_len_max); 4005 kpp_request_set_callback(req, CRYPTO_ 2038 kpp_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG, 4006 crypto_req_d 2039 crypto_req_done, &wait); 4007 err = crypto_wait_req(crypto_kpp_comp 2040 err = crypto_wait_req(crypto_kpp_compute_shared_secret(req), &wait); 4008 if (err) { 2041 if (err) { 4009 pr_err("alg: %s: Party A: com 2042 pr_err("alg: %s: Party A: compute shared secret test failed. err %d\n", 4010 alg, err); 2043 alg, err); 4011 goto free_all; 2044 goto free_all; 4012 } 2045 } 4013 2046 4014 if (vec->genkey) { 2047 if (vec->genkey) { 4015 /* Save the shared secret obt 2048 /* Save the shared secret obtained by party A */ 4016 a_ss = kmemdup(sg_virt(req->d !! 2049 a_ss = kzalloc(vec->expected_ss_size, GFP_KERNEL); 4017 if (!a_ss) { 2050 if (!a_ss) { 4018 err = -ENOMEM; 2051 err = -ENOMEM; 4019 goto free_all; 2052 goto free_all; 4020 } 2053 } >> 2054 memcpy(a_ss, sg_virt(req->dst), vec->expected_ss_size); 4021 2055 4022 /* 2056 /* 4023 * Calculate party B's shared 2057 * Calculate party B's shared secret by using party A's 4024 * public key. 2058 * public key. 4025 */ 2059 */ 4026 err = crypto_kpp_set_secret(t 2060 err = crypto_kpp_set_secret(tfm, vec->b_secret, 4027 v 2061 vec->b_secret_size); 4028 if (err < 0) 2062 if (err < 0) 4029 goto free_all; 2063 goto free_all; 4030 2064 4031 sg_init_one(&src, a_public, v 2065 sg_init_one(&src, a_public, vec->expected_a_public_size); 4032 sg_init_one(&dst, output_buf, 2066 sg_init_one(&dst, output_buf, out_len_max); 4033 kpp_request_set_input(req, &s 2067 kpp_request_set_input(req, &src, vec->expected_a_public_size); 4034 kpp_request_set_output(req, & 2068 kpp_request_set_output(req, &dst, out_len_max); 4035 kpp_request_set_callback(req, 2069 kpp_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG, 4036 cryp 2070 crypto_req_done, &wait); 4037 err = crypto_wait_req(crypto_ 2071 err = crypto_wait_req(crypto_kpp_compute_shared_secret(req), 4038 &wait); 2072 &wait); 4039 if (err) { 2073 if (err) { 4040 pr_err("alg: %s: Part 2074 pr_err("alg: %s: Party B: compute shared secret failed. err %d\n", 4041 alg, err); 2075 alg, err); 4042 goto free_all; 2076 goto free_all; 4043 } 2077 } 4044 2078 4045 shared_secret = a_ss; 2079 shared_secret = a_ss; 4046 } else { 2080 } else { 4047 shared_secret = (void *)vec-> 2081 shared_secret = (void *)vec->expected_ss; 4048 } 2082 } 4049 2083 4050 /* 2084 /* 4051 * verify shared secret from which th 2085 * verify shared secret from which the user will derive 4052 * secret key by executing whatever h 2086 * secret key by executing whatever hash it has chosen 4053 */ 2087 */ 4054 if (memcmp(shared_secret, sg_virt(req 2088 if (memcmp(shared_secret, sg_virt(req->dst), 4055 vec->expected_ss_size)) { 2089 vec->expected_ss_size)) { 4056 pr_err("alg: %s: compute shar 2090 pr_err("alg: %s: compute shared secret test failed. Invalid output\n", 4057 alg); 2091 alg); 4058 err = -EINVAL; 2092 err = -EINVAL; 4059 } 2093 } 4060 2094 4061 free_all: 2095 free_all: 4062 kfree(a_ss); 2096 kfree(a_ss); 4063 kfree(input_buf); 2097 kfree(input_buf); 4064 free_output: 2098 free_output: 4065 kfree(a_public); 2099 kfree(a_public); 4066 kfree(output_buf); 2100 kfree(output_buf); 4067 free_req: 2101 free_req: 4068 kpp_request_free(req); 2102 kpp_request_free(req); 4069 return err; 2103 return err; 4070 } 2104 } 4071 2105 4072 static int test_kpp(struct crypto_kpp *tfm, c 2106 static int test_kpp(struct crypto_kpp *tfm, const char *alg, 4073 const struct kpp_testvec 2107 const struct kpp_testvec *vecs, unsigned int tcount) 4074 { 2108 { 4075 int ret, i; 2109 int ret, i; 4076 2110 4077 for (i = 0; i < tcount; i++) { 2111 for (i = 0; i < tcount; i++) { 4078 ret = do_test_kpp(tfm, vecs++ 2112 ret = do_test_kpp(tfm, vecs++, alg); 4079 if (ret) { 2113 if (ret) { 4080 pr_err("alg: %s: test 2114 pr_err("alg: %s: test failed on vector %d, err=%d\n", 4081 alg, i + 1, re 2115 alg, i + 1, ret); 4082 return ret; 2116 return ret; 4083 } 2117 } 4084 } 2118 } 4085 return 0; 2119 return 0; 4086 } 2120 } 4087 2121 4088 static int alg_test_kpp(const struct alg_test 2122 static int alg_test_kpp(const struct alg_test_desc *desc, const char *driver, 4089 u32 type, u32 mask) 2123 u32 type, u32 mask) 4090 { 2124 { 4091 struct crypto_kpp *tfm; 2125 struct crypto_kpp *tfm; 4092 int err = 0; 2126 int err = 0; 4093 2127 4094 tfm = crypto_alloc_kpp(driver, type, 2128 tfm = crypto_alloc_kpp(driver, type, mask); 4095 if (IS_ERR(tfm)) { 2129 if (IS_ERR(tfm)) { 4096 if (PTR_ERR(tfm) == -ENOENT) << 4097 return 0; << 4098 pr_err("alg: kpp: Failed to l 2130 pr_err("alg: kpp: Failed to load tfm for %s: %ld\n", 4099 driver, PTR_ERR(tfm)); 2131 driver, PTR_ERR(tfm)); 4100 return PTR_ERR(tfm); 2132 return PTR_ERR(tfm); 4101 } 2133 } 4102 if (desc->suite.kpp.vecs) 2134 if (desc->suite.kpp.vecs) 4103 err = test_kpp(tfm, desc->alg 2135 err = test_kpp(tfm, desc->alg, desc->suite.kpp.vecs, 4104 desc->suite.kp 2136 desc->suite.kpp.count); 4105 2137 4106 crypto_free_kpp(tfm); 2138 crypto_free_kpp(tfm); 4107 return err; 2139 return err; 4108 } 2140 } 4109 2141 4110 static u8 *test_pack_u32(u8 *dst, u32 val) << 4111 { << 4112 memcpy(dst, &val, sizeof(val)); << 4113 return dst + sizeof(val); << 4114 } << 4115 << 4116 static int test_akcipher_one(struct crypto_ak 2142 static int test_akcipher_one(struct crypto_akcipher *tfm, 4117 const struct akc 2143 const struct akcipher_testvec *vecs) 4118 { 2144 { 4119 char *xbuf[XBUFSIZE]; 2145 char *xbuf[XBUFSIZE]; 4120 struct akcipher_request *req; 2146 struct akcipher_request *req; 4121 void *outbuf_enc = NULL; 2147 void *outbuf_enc = NULL; 4122 void *outbuf_dec = NULL; 2148 void *outbuf_dec = NULL; 4123 struct crypto_wait wait; 2149 struct crypto_wait wait; 4124 unsigned int out_len_max, out_len = 0 2150 unsigned int out_len_max, out_len = 0; 4125 int err = -ENOMEM; 2151 int err = -ENOMEM; 4126 struct scatterlist src, dst, src_tab[ !! 2152 struct scatterlist src, dst, src_tab[2]; 4127 const char *m, *c; << 4128 unsigned int m_size, c_size; << 4129 const char *op; << 4130 u8 *key, *ptr; << 4131 2153 4132 if (testmgr_alloc_buf(xbuf)) 2154 if (testmgr_alloc_buf(xbuf)) 4133 return err; 2155 return err; 4134 2156 4135 req = akcipher_request_alloc(tfm, GFP 2157 req = akcipher_request_alloc(tfm, GFP_KERNEL); 4136 if (!req) 2158 if (!req) 4137 goto free_xbuf; 2159 goto free_xbuf; 4138 2160 4139 crypto_init_wait(&wait); 2161 crypto_init_wait(&wait); 4140 2162 4141 key = kmalloc(vecs->key_len + sizeof( << 4142 GFP_KERNEL); << 4143 if (!key) << 4144 goto free_req; << 4145 memcpy(key, vecs->key, vecs->key_len) << 4146 ptr = key + vecs->key_len; << 4147 ptr = test_pack_u32(ptr, vecs->algo); << 4148 ptr = test_pack_u32(ptr, vecs->param_ << 4149 memcpy(ptr, vecs->params, vecs->param << 4150 << 4151 if (vecs->public_key_vec) 2163 if (vecs->public_key_vec) 4152 err = crypto_akcipher_set_pub !! 2164 err = crypto_akcipher_set_pub_key(tfm, vecs->key, >> 2165 vecs->key_len); 4153 else 2166 else 4154 err = crypto_akcipher_set_pri !! 2167 err = crypto_akcipher_set_priv_key(tfm, vecs->key, >> 2168 vecs->key_len); 4155 if (err) 2169 if (err) 4156 goto free_key; !! 2170 goto free_req; 4157 2171 4158 /* << 4159 * First run test which do not requir << 4160 * encrypt or verify. << 4161 */ << 4162 err = -ENOMEM; 2172 err = -ENOMEM; 4163 out_len_max = crypto_akcipher_maxsize 2173 out_len_max = crypto_akcipher_maxsize(tfm); 4164 outbuf_enc = kzalloc(out_len_max, GFP 2174 outbuf_enc = kzalloc(out_len_max, GFP_KERNEL); 4165 if (!outbuf_enc) 2175 if (!outbuf_enc) 4166 goto free_key; !! 2176 goto free_req; 4167 << 4168 if (!vecs->siggen_sigver_test) { << 4169 m = vecs->m; << 4170 m_size = vecs->m_size; << 4171 c = vecs->c; << 4172 c_size = vecs->c_size; << 4173 op = "encrypt"; << 4174 } else { << 4175 /* Swap args so we could keep << 4176 * in vecs->m, and cooked sig << 4177 */ << 4178 m = vecs->c; /* signature */ << 4179 m_size = vecs->c_size; << 4180 c = vecs->m; /* digest */ << 4181 c_size = vecs->m_size; << 4182 op = "verify"; << 4183 } << 4184 2177 4185 err = -E2BIG; !! 2178 if (WARN_ON(vecs->m_size > PAGE_SIZE)) 4186 if (WARN_ON(m_size > PAGE_SIZE)) << 4187 goto free_all; 2179 goto free_all; 4188 memcpy(xbuf[0], m, m_size); << 4189 2180 4190 sg_init_table(src_tab, 3); !! 2181 memcpy(xbuf[0], vecs->m, vecs->m_size); >> 2182 >> 2183 sg_init_table(src_tab, 2); 4191 sg_set_buf(&src_tab[0], xbuf[0], 8); 2184 sg_set_buf(&src_tab[0], xbuf[0], 8); 4192 sg_set_buf(&src_tab[1], xbuf[0] + 8, !! 2185 sg_set_buf(&src_tab[1], xbuf[0] + 8, vecs->m_size - 8); 4193 if (vecs->siggen_sigver_test) { !! 2186 sg_init_one(&dst, outbuf_enc, out_len_max); 4194 if (WARN_ON(c_size > PAGE_SIZ !! 2187 akcipher_request_set_crypt(req, src_tab, &dst, vecs->m_size, 4195 goto free_all; !! 2188 out_len_max); 4196 memcpy(xbuf[1], c, c_size); << 4197 sg_set_buf(&src_tab[2], xbuf[ << 4198 akcipher_request_set_crypt(re << 4199 } else { << 4200 sg_init_one(&dst, outbuf_enc, << 4201 akcipher_request_set_crypt(re << 4202 ou << 4203 } << 4204 akcipher_request_set_callback(req, CR 2189 akcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG, 4205 crypto_ 2190 crypto_req_done, &wait); 4206 2191 4207 err = crypto_wait_req(vecs->siggen_si 2192 err = crypto_wait_req(vecs->siggen_sigver_test ? 4208 /* Run asymmetr !! 2193 /* Run asymmetric signature generation */ 4209 crypto_akcipher !! 2194 crypto_akcipher_sign(req) : 4210 /* Run asymmetr 2195 /* Run asymmetric encrypt */ 4211 crypto_akcipher 2196 crypto_akcipher_encrypt(req), &wait); 4212 if (err) { 2197 if (err) { 4213 pr_err("alg: akcipher: %s tes !! 2198 pr_err("alg: akcipher: encrypt test failed. err %d\n", err); 4214 goto free_all; 2199 goto free_all; 4215 } 2200 } 4216 if (!vecs->siggen_sigver_test && c) { !! 2201 if (req->dst_len != vecs->c_size) { 4217 if (req->dst_len != c_size) { !! 2202 pr_err("alg: akcipher: encrypt test failed. Invalid output len\n"); 4218 pr_err("alg: akcipher !! 2203 err = -EINVAL; 4219 op); !! 2204 goto free_all; 4220 err = -EINVAL; << 4221 goto free_all; << 4222 } << 4223 /* verify that encrypted mess << 4224 if (memcmp(c, outbuf_enc, c_s << 4225 pr_err("alg: akcipher << 4226 op); << 4227 hexdump(outbuf_enc, c << 4228 err = -EINVAL; << 4229 goto free_all; << 4230 } << 4231 } 2205 } 4232 !! 2206 /* verify that encrypted message is equal to expected */ 4233 /* !! 2207 if (memcmp(vecs->c, outbuf_enc, vecs->c_size)) { 4234 * Don't invoke (decrypt or sign) tes !! 2208 pr_err("alg: akcipher: encrypt test failed. Invalid output\n"); 4235 * for vectors with only a public key !! 2209 hexdump(outbuf_enc, vecs->c_size); 4236 */ !! 2210 err = -EINVAL; >> 2211 goto free_all; >> 2212 } >> 2213 /* Don't invoke decrypt for vectors with public key */ 4237 if (vecs->public_key_vec) { 2214 if (vecs->public_key_vec) { 4238 err = 0; 2215 err = 0; 4239 goto free_all; 2216 goto free_all; 4240 } 2217 } 4241 outbuf_dec = kzalloc(out_len_max, GFP 2218 outbuf_dec = kzalloc(out_len_max, GFP_KERNEL); 4242 if (!outbuf_dec) { 2219 if (!outbuf_dec) { 4243 err = -ENOMEM; 2220 err = -ENOMEM; 4244 goto free_all; 2221 goto free_all; 4245 } 2222 } 4246 2223 4247 if (!vecs->siggen_sigver_test && !c) !! 2224 if (WARN_ON(vecs->c_size > PAGE_SIZE)) 4248 c = outbuf_enc; << 4249 c_size = req->dst_len; << 4250 } << 4251 << 4252 err = -E2BIG; << 4253 op = vecs->siggen_sigver_test ? "sign << 4254 if (WARN_ON(c_size > PAGE_SIZE)) << 4255 goto free_all; 2225 goto free_all; 4256 memcpy(xbuf[0], c, c_size); << 4257 2226 4258 sg_init_one(&src, xbuf[0], c_size); !! 2227 memcpy(xbuf[0], vecs->c, vecs->c_size); >> 2228 >> 2229 sg_init_one(&src, xbuf[0], vecs->c_size); 4259 sg_init_one(&dst, outbuf_dec, out_len 2230 sg_init_one(&dst, outbuf_dec, out_len_max); 4260 crypto_init_wait(&wait); 2231 crypto_init_wait(&wait); 4261 akcipher_request_set_crypt(req, &src, !! 2232 akcipher_request_set_crypt(req, &src, &dst, vecs->c_size, out_len_max); 4262 2233 4263 err = crypto_wait_req(vecs->siggen_si 2234 err = crypto_wait_req(vecs->siggen_sigver_test ? 4264 /* Run asymmetr !! 2235 /* Run asymmetric signature verification */ 4265 crypto_akcipher !! 2236 crypto_akcipher_verify(req) : 4266 /* Run asymmetr 2237 /* Run asymmetric decrypt */ 4267 crypto_akcipher 2238 crypto_akcipher_decrypt(req), &wait); 4268 if (err) { 2239 if (err) { 4269 pr_err("alg: akcipher: %s tes !! 2240 pr_err("alg: akcipher: decrypt test failed. err %d\n", err); 4270 goto free_all; 2241 goto free_all; 4271 } 2242 } 4272 out_len = req->dst_len; 2243 out_len = req->dst_len; 4273 if (out_len < m_size) { !! 2244 if (out_len < vecs->m_size) { 4274 pr_err("alg: akcipher: %s tes !! 2245 pr_err("alg: akcipher: decrypt test failed. " 4275 op, out_len); !! 2246 "Invalid output len %u\n", out_len); 4276 err = -EINVAL; 2247 err = -EINVAL; 4277 goto free_all; 2248 goto free_all; 4278 } 2249 } 4279 /* verify that decrypted message is e 2250 /* verify that decrypted message is equal to the original msg */ 4280 if (memchr_inv(outbuf_dec, 0, out_len !! 2251 if (memchr_inv(outbuf_dec, 0, out_len - vecs->m_size) || 4281 memcmp(m, outbuf_dec + out_len - !! 2252 memcmp(vecs->m, outbuf_dec + out_len - vecs->m_size, 4282 pr_err("alg: akcipher: %s tes !! 2253 vecs->m_size)) { >> 2254 pr_err("alg: akcipher: decrypt test failed. Invalid output\n"); 4283 hexdump(outbuf_dec, out_len); 2255 hexdump(outbuf_dec, out_len); 4284 err = -EINVAL; 2256 err = -EINVAL; 4285 } 2257 } 4286 free_all: 2258 free_all: 4287 kfree(outbuf_dec); 2259 kfree(outbuf_dec); 4288 kfree(outbuf_enc); 2260 kfree(outbuf_enc); 4289 free_key: << 4290 kfree(key); << 4291 free_req: 2261 free_req: 4292 akcipher_request_free(req); 2262 akcipher_request_free(req); 4293 free_xbuf: 2263 free_xbuf: 4294 testmgr_free_buf(xbuf); 2264 testmgr_free_buf(xbuf); 4295 return err; 2265 return err; 4296 } 2266 } 4297 2267 4298 static int test_akcipher(struct crypto_akciph 2268 static int test_akcipher(struct crypto_akcipher *tfm, const char *alg, 4299 const struct akciphe 2269 const struct akcipher_testvec *vecs, 4300 unsigned int tcount) 2270 unsigned int tcount) 4301 { 2271 { 4302 const char *algo = 2272 const char *algo = 4303 crypto_tfm_alg_driver_name(cr 2273 crypto_tfm_alg_driver_name(crypto_akcipher_tfm(tfm)); 4304 int ret, i; 2274 int ret, i; 4305 2275 4306 for (i = 0; i < tcount; i++) { 2276 for (i = 0; i < tcount; i++) { 4307 ret = test_akcipher_one(tfm, 2277 ret = test_akcipher_one(tfm, vecs++); 4308 if (!ret) 2278 if (!ret) 4309 continue; 2279 continue; 4310 2280 4311 pr_err("alg: akcipher: test % 2281 pr_err("alg: akcipher: test %d failed for %s, err=%d\n", 4312 i + 1, algo, ret); 2282 i + 1, algo, ret); 4313 return ret; 2283 return ret; 4314 } 2284 } 4315 return 0; 2285 return 0; 4316 } 2286 } 4317 2287 4318 static int alg_test_akcipher(const struct alg 2288 static int alg_test_akcipher(const struct alg_test_desc *desc, 4319 const char *driv 2289 const char *driver, u32 type, u32 mask) 4320 { 2290 { 4321 struct crypto_akcipher *tfm; 2291 struct crypto_akcipher *tfm; 4322 int err = 0; 2292 int err = 0; 4323 2293 4324 tfm = crypto_alloc_akcipher(driver, t 2294 tfm = crypto_alloc_akcipher(driver, type, mask); 4325 if (IS_ERR(tfm)) { 2295 if (IS_ERR(tfm)) { 4326 if (PTR_ERR(tfm) == -ENOENT) << 4327 return 0; << 4328 pr_err("alg: akcipher: Failed 2296 pr_err("alg: akcipher: Failed to load tfm for %s: %ld\n", 4329 driver, PTR_ERR(tfm)); 2297 driver, PTR_ERR(tfm)); 4330 return PTR_ERR(tfm); 2298 return PTR_ERR(tfm); 4331 } 2299 } 4332 if (desc->suite.akcipher.vecs) 2300 if (desc->suite.akcipher.vecs) 4333 err = test_akcipher(tfm, desc 2301 err = test_akcipher(tfm, desc->alg, desc->suite.akcipher.vecs, 4334 desc->sui 2302 desc->suite.akcipher.count); 4335 2303 4336 crypto_free_akcipher(tfm); 2304 crypto_free_akcipher(tfm); 4337 return err; 2305 return err; 4338 } 2306 } 4339 2307 4340 static int alg_test_null(const struct alg_tes 2308 static int alg_test_null(const struct alg_test_desc *desc, 4341 const char *driv 2309 const char *driver, u32 type, u32 mask) 4342 { 2310 { 4343 return 0; 2311 return 0; 4344 } 2312 } 4345 2313 4346 #define ____VECS(tv) .vecs = tv, .count = !! 2314 #define __VECS(tv) { .vecs = tv, .count = ARRAY_SIZE(tv) } 4347 #define __VECS(tv) { ____VECS(tv) } << 4348 2315 4349 /* Please keep this list sorted by algorithm 2316 /* Please keep this list sorted by algorithm name. */ 4350 static const struct alg_test_desc alg_test_de 2317 static const struct alg_test_desc alg_test_descs[] = { 4351 { 2318 { 4352 .alg = "adiantum(xchacha12,ae << 4353 .generic_driver = "adiantum(x << 4354 .test = alg_test_skcipher, << 4355 .suite = { << 4356 .cipher = __VECS(adia << 4357 }, << 4358 }, { << 4359 .alg = "adiantum(xchacha20,ae << 4360 .generic_driver = "adiantum(x << 4361 .test = alg_test_skcipher, << 4362 .suite = { << 4363 .cipher = __VECS(adia << 4364 }, << 4365 }, { << 4366 .alg = "aegis128", << 4367 .test = alg_test_aead, << 4368 .suite = { << 4369 .aead = __VECS(aegis1 << 4370 } << 4371 }, { << 4372 .alg = "ansi_cprng", 2319 .alg = "ansi_cprng", 4373 .test = alg_test_cprng, 2320 .test = alg_test_cprng, 4374 .suite = { 2321 .suite = { 4375 .cprng = __VECS(ansi_ 2322 .cprng = __VECS(ansi_cprng_aes_tv_template) 4376 } 2323 } 4377 }, { 2324 }, { 4378 .alg = "authenc(hmac(md5),ecb 2325 .alg = "authenc(hmac(md5),ecb(cipher_null))", 4379 .test = alg_test_aead, 2326 .test = alg_test_aead, 4380 .suite = { 2327 .suite = { 4381 .aead = __VECS(hmac_m !! 2328 .aead = { >> 2329 .enc = __VECS(hmac_md5_ecb_cipher_null_enc_tv_template), >> 2330 .dec = __VECS(hmac_md5_ecb_cipher_null_dec_tv_template) >> 2331 } 4382 } 2332 } 4383 }, { 2333 }, { 4384 .alg = "authenc(hmac(sha1),cb 2334 .alg = "authenc(hmac(sha1),cbc(aes))", 4385 .test = alg_test_aead, 2335 .test = alg_test_aead, 4386 .fips_allowed = 1, 2336 .fips_allowed = 1, 4387 .suite = { 2337 .suite = { 4388 .aead = __VECS(hmac_s !! 2338 .aead = { >> 2339 .enc = __VECS(hmac_sha1_aes_cbc_enc_tv_temp) >> 2340 } 4389 } 2341 } 4390 }, { 2342 }, { 4391 .alg = "authenc(hmac(sha1),cb 2343 .alg = "authenc(hmac(sha1),cbc(des))", 4392 .test = alg_test_aead, 2344 .test = alg_test_aead, 4393 .suite = { 2345 .suite = { 4394 .aead = __VECS(hmac_s !! 2346 .aead = { >> 2347 .enc = __VECS(hmac_sha1_des_cbc_enc_tv_temp) >> 2348 } 4395 } 2349 } 4396 }, { 2350 }, { 4397 .alg = "authenc(hmac(sha1),cb 2351 .alg = "authenc(hmac(sha1),cbc(des3_ede))", 4398 .test = alg_test_aead, 2352 .test = alg_test_aead, >> 2353 .fips_allowed = 1, 4399 .suite = { 2354 .suite = { 4400 .aead = __VECS(hmac_s !! 2355 .aead = { >> 2356 .enc = __VECS(hmac_sha1_des3_ede_cbc_enc_tv_temp) >> 2357 } 4401 } 2358 } 4402 }, { 2359 }, { 4403 .alg = "authenc(hmac(sha1),ct 2360 .alg = "authenc(hmac(sha1),ctr(aes))", 4404 .test = alg_test_null, 2361 .test = alg_test_null, 4405 .fips_allowed = 1, 2362 .fips_allowed = 1, 4406 }, { 2363 }, { 4407 .alg = "authenc(hmac(sha1),ec 2364 .alg = "authenc(hmac(sha1),ecb(cipher_null))", 4408 .test = alg_test_aead, 2365 .test = alg_test_aead, 4409 .suite = { 2366 .suite = { 4410 .aead = __VECS(hmac_s !! 2367 .aead = { >> 2368 .enc = __VECS(hmac_sha1_ecb_cipher_null_enc_tv_temp), >> 2369 .dec = __VECS(hmac_sha1_ecb_cipher_null_dec_tv_temp) >> 2370 } 4411 } 2371 } 4412 }, { 2372 }, { 4413 .alg = "authenc(hmac(sha1),rf 2373 .alg = "authenc(hmac(sha1),rfc3686(ctr(aes)))", 4414 .test = alg_test_null, 2374 .test = alg_test_null, 4415 .fips_allowed = 1, 2375 .fips_allowed = 1, 4416 }, { 2376 }, { 4417 .alg = "authenc(hmac(sha224), 2377 .alg = "authenc(hmac(sha224),cbc(des))", 4418 .test = alg_test_aead, 2378 .test = alg_test_aead, 4419 .suite = { 2379 .suite = { 4420 .aead = __VECS(hmac_s !! 2380 .aead = { >> 2381 .enc = __VECS(hmac_sha224_des_cbc_enc_tv_temp) >> 2382 } 4421 } 2383 } 4422 }, { 2384 }, { 4423 .alg = "authenc(hmac(sha224), 2385 .alg = "authenc(hmac(sha224),cbc(des3_ede))", 4424 .test = alg_test_aead, 2386 .test = alg_test_aead, >> 2387 .fips_allowed = 1, 4425 .suite = { 2388 .suite = { 4426 .aead = __VECS(hmac_s !! 2389 .aead = { >> 2390 .enc = __VECS(hmac_sha224_des3_ede_cbc_enc_tv_temp) >> 2391 } 4427 } 2392 } 4428 }, { 2393 }, { 4429 .alg = "authenc(hmac(sha256), 2394 .alg = "authenc(hmac(sha256),cbc(aes))", 4430 .test = alg_test_aead, 2395 .test = alg_test_aead, 4431 .fips_allowed = 1, 2396 .fips_allowed = 1, 4432 .suite = { 2397 .suite = { 4433 .aead = __VECS(hmac_s !! 2398 .aead = { >> 2399 .enc = __VECS(hmac_sha256_aes_cbc_enc_tv_temp) >> 2400 } 4434 } 2401 } 4435 }, { 2402 }, { 4436 .alg = "authenc(hmac(sha256), 2403 .alg = "authenc(hmac(sha256),cbc(des))", 4437 .test = alg_test_aead, 2404 .test = alg_test_aead, 4438 .suite = { 2405 .suite = { 4439 .aead = __VECS(hmac_s !! 2406 .aead = { >> 2407 .enc = __VECS(hmac_sha256_des_cbc_enc_tv_temp) >> 2408 } 4440 } 2409 } 4441 }, { 2410 }, { 4442 .alg = "authenc(hmac(sha256), 2411 .alg = "authenc(hmac(sha256),cbc(des3_ede))", 4443 .test = alg_test_aead, 2412 .test = alg_test_aead, >> 2413 .fips_allowed = 1, 4444 .suite = { 2414 .suite = { 4445 .aead = __VECS(hmac_s !! 2415 .aead = { >> 2416 .enc = __VECS(hmac_sha256_des3_ede_cbc_enc_tv_temp) >> 2417 } 4446 } 2418 } 4447 }, { 2419 }, { 4448 .alg = "authenc(hmac(sha256), 2420 .alg = "authenc(hmac(sha256),ctr(aes))", 4449 .test = alg_test_null, 2421 .test = alg_test_null, 4450 .fips_allowed = 1, 2422 .fips_allowed = 1, 4451 }, { 2423 }, { 4452 .alg = "authenc(hmac(sha256), 2424 .alg = "authenc(hmac(sha256),rfc3686(ctr(aes)))", 4453 .test = alg_test_null, 2425 .test = alg_test_null, 4454 .fips_allowed = 1, 2426 .fips_allowed = 1, 4455 }, { 2427 }, { 4456 .alg = "authenc(hmac(sha384), 2428 .alg = "authenc(hmac(sha384),cbc(des))", 4457 .test = alg_test_aead, 2429 .test = alg_test_aead, 4458 .suite = { 2430 .suite = { 4459 .aead = __VECS(hmac_s !! 2431 .aead = { >> 2432 .enc = __VECS(hmac_sha384_des_cbc_enc_tv_temp) >> 2433 } 4460 } 2434 } 4461 }, { 2435 }, { 4462 .alg = "authenc(hmac(sha384), 2436 .alg = "authenc(hmac(sha384),cbc(des3_ede))", 4463 .test = alg_test_aead, 2437 .test = alg_test_aead, >> 2438 .fips_allowed = 1, 4464 .suite = { 2439 .suite = { 4465 .aead = __VECS(hmac_s !! 2440 .aead = { >> 2441 .enc = __VECS(hmac_sha384_des3_ede_cbc_enc_tv_temp) >> 2442 } 4466 } 2443 } 4467 }, { 2444 }, { 4468 .alg = "authenc(hmac(sha384), 2445 .alg = "authenc(hmac(sha384),ctr(aes))", 4469 .test = alg_test_null, 2446 .test = alg_test_null, 4470 .fips_allowed = 1, 2447 .fips_allowed = 1, 4471 }, { 2448 }, { 4472 .alg = "authenc(hmac(sha384), 2449 .alg = "authenc(hmac(sha384),rfc3686(ctr(aes)))", 4473 .test = alg_test_null, 2450 .test = alg_test_null, 4474 .fips_allowed = 1, 2451 .fips_allowed = 1, 4475 }, { 2452 }, { 4476 .alg = "authenc(hmac(sha512), 2453 .alg = "authenc(hmac(sha512),cbc(aes))", 4477 .fips_allowed = 1, 2454 .fips_allowed = 1, 4478 .test = alg_test_aead, 2455 .test = alg_test_aead, 4479 .suite = { 2456 .suite = { 4480 .aead = __VECS(hmac_s !! 2457 .aead = { >> 2458 .enc = __VECS(hmac_sha512_aes_cbc_enc_tv_temp) >> 2459 } 4481 } 2460 } 4482 }, { 2461 }, { 4483 .alg = "authenc(hmac(sha512), 2462 .alg = "authenc(hmac(sha512),cbc(des))", 4484 .test = alg_test_aead, 2463 .test = alg_test_aead, 4485 .suite = { 2464 .suite = { 4486 .aead = __VECS(hmac_s !! 2465 .aead = { >> 2466 .enc = __VECS(hmac_sha512_des_cbc_enc_tv_temp) >> 2467 } 4487 } 2468 } 4488 }, { 2469 }, { 4489 .alg = "authenc(hmac(sha512), 2470 .alg = "authenc(hmac(sha512),cbc(des3_ede))", 4490 .test = alg_test_aead, 2471 .test = alg_test_aead, >> 2472 .fips_allowed = 1, 4491 .suite = { 2473 .suite = { 4492 .aead = __VECS(hmac_s !! 2474 .aead = { >> 2475 .enc = __VECS(hmac_sha512_des3_ede_cbc_enc_tv_temp) >> 2476 } 4493 } 2477 } 4494 }, { 2478 }, { 4495 .alg = "authenc(hmac(sha512), 2479 .alg = "authenc(hmac(sha512),ctr(aes))", 4496 .test = alg_test_null, 2480 .test = alg_test_null, 4497 .fips_allowed = 1, 2481 .fips_allowed = 1, 4498 }, { 2482 }, { 4499 .alg = "authenc(hmac(sha512), 2483 .alg = "authenc(hmac(sha512),rfc3686(ctr(aes)))", 4500 .test = alg_test_null, 2484 .test = alg_test_null, 4501 .fips_allowed = 1, 2485 .fips_allowed = 1, 4502 }, { 2486 }, { 4503 .alg = "blake2b-160", << 4504 .test = alg_test_hash, << 4505 .fips_allowed = 0, << 4506 .suite = { << 4507 .hash = __VECS(blake2 << 4508 } << 4509 }, { << 4510 .alg = "blake2b-256", << 4511 .test = alg_test_hash, << 4512 .fips_allowed = 0, << 4513 .suite = { << 4514 .hash = __VECS(blake2 << 4515 } << 4516 }, { << 4517 .alg = "blake2b-384", << 4518 .test = alg_test_hash, << 4519 .fips_allowed = 0, << 4520 .suite = { << 4521 .hash = __VECS(blake2 << 4522 } << 4523 }, { << 4524 .alg = "blake2b-512", << 4525 .test = alg_test_hash, << 4526 .fips_allowed = 0, << 4527 .suite = { << 4528 .hash = __VECS(blake2 << 4529 } << 4530 }, { << 4531 .alg = "cbc(aes)", 2487 .alg = "cbc(aes)", 4532 .test = alg_test_skcipher, 2488 .test = alg_test_skcipher, 4533 .fips_allowed = 1, 2489 .fips_allowed = 1, 4534 .suite = { 2490 .suite = { 4535 .cipher = __VECS(aes_ !! 2491 .cipher = { 4536 }, !! 2492 .enc = __VECS(aes_cbc_enc_tv_template), >> 2493 .dec = __VECS(aes_cbc_dec_tv_template) >> 2494 } >> 2495 } 4537 }, { 2496 }, { 4538 .alg = "cbc(anubis)", 2497 .alg = "cbc(anubis)", 4539 .test = alg_test_skcipher, 2498 .test = alg_test_skcipher, 4540 .suite = { 2499 .suite = { 4541 .cipher = __VECS(anub !! 2500 .cipher = { 4542 }, !! 2501 .enc = __VECS(anubis_cbc_enc_tv_template), 4543 }, { !! 2502 .dec = __VECS(anubis_cbc_dec_tv_template) 4544 .alg = "cbc(aria)", !! 2503 } 4545 .test = alg_test_skcipher, !! 2504 } 4546 .suite = { << 4547 .cipher = __VECS(aria << 4548 }, << 4549 }, { 2505 }, { 4550 .alg = "cbc(blowfish)", 2506 .alg = "cbc(blowfish)", 4551 .test = alg_test_skcipher, 2507 .test = alg_test_skcipher, 4552 .suite = { 2508 .suite = { 4553 .cipher = __VECS(bf_c !! 2509 .cipher = { 4554 }, !! 2510 .enc = __VECS(bf_cbc_enc_tv_template), >> 2511 .dec = __VECS(bf_cbc_dec_tv_template) >> 2512 } >> 2513 } 4555 }, { 2514 }, { 4556 .alg = "cbc(camellia)", 2515 .alg = "cbc(camellia)", 4557 .test = alg_test_skcipher, 2516 .test = alg_test_skcipher, 4558 .suite = { 2517 .suite = { 4559 .cipher = __VECS(came !! 2518 .cipher = { 4560 }, !! 2519 .enc = __VECS(camellia_cbc_enc_tv_template), >> 2520 .dec = __VECS(camellia_cbc_dec_tv_template) >> 2521 } >> 2522 } 4561 }, { 2523 }, { 4562 .alg = "cbc(cast5)", 2524 .alg = "cbc(cast5)", 4563 .test = alg_test_skcipher, 2525 .test = alg_test_skcipher, 4564 .suite = { 2526 .suite = { 4565 .cipher = __VECS(cast !! 2527 .cipher = { 4566 }, !! 2528 .enc = __VECS(cast5_cbc_enc_tv_template), >> 2529 .dec = __VECS(cast5_cbc_dec_tv_template) >> 2530 } >> 2531 } 4567 }, { 2532 }, { 4568 .alg = "cbc(cast6)", 2533 .alg = "cbc(cast6)", 4569 .test = alg_test_skcipher, 2534 .test = alg_test_skcipher, 4570 .suite = { 2535 .suite = { 4571 .cipher = __VECS(cast !! 2536 .cipher = { 4572 }, !! 2537 .enc = __VECS(cast6_cbc_enc_tv_template), >> 2538 .dec = __VECS(cast6_cbc_dec_tv_template) >> 2539 } >> 2540 } 4573 }, { 2541 }, { 4574 .alg = "cbc(des)", 2542 .alg = "cbc(des)", 4575 .test = alg_test_skcipher, 2543 .test = alg_test_skcipher, 4576 .suite = { 2544 .suite = { 4577 .cipher = __VECS(des_ !! 2545 .cipher = { 4578 }, !! 2546 .enc = __VECS(des_cbc_enc_tv_template), >> 2547 .dec = __VECS(des_cbc_dec_tv_template) >> 2548 } >> 2549 } 4579 }, { 2550 }, { 4580 .alg = "cbc(des3_ede)", 2551 .alg = "cbc(des3_ede)", 4581 .test = alg_test_skcipher, 2552 .test = alg_test_skcipher, 4582 .suite = { << 4583 .cipher = __VECS(des3 << 4584 }, << 4585 }, { << 4586 /* Same as cbc(aes) except th << 4587 * hardware secure memory whi << 4588 */ << 4589 .alg = "cbc(paes)", << 4590 .test = alg_test_null, << 4591 .fips_allowed = 1, 2553 .fips_allowed = 1, 4592 }, { << 4593 /* Same as cbc(sm4) except th << 4594 * hardware secure memory whi << 4595 */ << 4596 .alg = "cbc(psm4)", << 4597 .test = alg_test_null, << 4598 }, { << 4599 .alg = "cbc(serpent)", << 4600 .test = alg_test_skcipher, << 4601 .suite = { 2554 .suite = { 4602 .cipher = __VECS(serp !! 2555 .cipher = { 4603 }, !! 2556 .enc = __VECS(des3_ede_cbc_enc_tv_template), >> 2557 .dec = __VECS(des3_ede_cbc_dec_tv_template) >> 2558 } >> 2559 } 4604 }, { 2560 }, { 4605 .alg = "cbc(sm4)", !! 2561 .alg = "cbc(serpent)", 4606 .test = alg_test_skcipher, 2562 .test = alg_test_skcipher, 4607 .suite = { 2563 .suite = { 4608 .cipher = __VECS(sm4_ !! 2564 .cipher = { >> 2565 .enc = __VECS(serpent_cbc_enc_tv_template), >> 2566 .dec = __VECS(serpent_cbc_dec_tv_template) >> 2567 } 4609 } 2568 } 4610 }, { 2569 }, { 4611 .alg = "cbc(twofish)", 2570 .alg = "cbc(twofish)", 4612 .test = alg_test_skcipher, 2571 .test = alg_test_skcipher, 4613 .suite = { 2572 .suite = { 4614 .cipher = __VECS(tf_c !! 2573 .cipher = { 4615 }, !! 2574 .enc = __VECS(tf_cbc_enc_tv_template), 4616 }, { !! 2575 .dec = __VECS(tf_cbc_dec_tv_template) 4617 #if IS_ENABLED(CONFIG_CRYPTO_PAES_S390) !! 2576 } 4618 .alg = "cbc-paes-s390", << 4619 .fips_allowed = 1, << 4620 .test = alg_test_skcipher, << 4621 .suite = { << 4622 .cipher = __VECS(aes_ << 4623 } 2577 } 4624 }, { 2578 }, { 4625 #endif << 4626 .alg = "cbcmac(aes)", 2579 .alg = "cbcmac(aes)", >> 2580 .fips_allowed = 1, 4627 .test = alg_test_hash, 2581 .test = alg_test_hash, 4628 .suite = { 2582 .suite = { 4629 .hash = __VECS(aes_cb 2583 .hash = __VECS(aes_cbcmac_tv_template) 4630 } 2584 } 4631 }, { 2585 }, { 4632 .alg = "cbcmac(sm4)", << 4633 .test = alg_test_hash, << 4634 .suite = { << 4635 .hash = __VECS(sm4_cb << 4636 } << 4637 }, { << 4638 .alg = "ccm(aes)", 2586 .alg = "ccm(aes)", 4639 .generic_driver = "ccm_base(c << 4640 .test = alg_test_aead, 2587 .test = alg_test_aead, 4641 .fips_allowed = 1, 2588 .fips_allowed = 1, 4642 .suite = { 2589 .suite = { 4643 .aead = { 2590 .aead = { 4644 ____VECS(aes_ !! 2591 .enc = __VECS(aes_ccm_enc_tv_template), 4645 .einval_allow !! 2592 .dec = __VECS(aes_ccm_dec_tv_template) 4646 } << 4647 } << 4648 }, { << 4649 .alg = "ccm(sm4)", << 4650 .generic_driver = "ccm_base(c << 4651 .test = alg_test_aead, << 4652 .suite = { << 4653 .aead = { << 4654 ____VECS(sm4_ << 4655 .einval_allow << 4656 } 2593 } 4657 } 2594 } 4658 }, { 2595 }, { 4659 .alg = "chacha20", 2596 .alg = "chacha20", 4660 .test = alg_test_skcipher, 2597 .test = alg_test_skcipher, 4661 .suite = { 2598 .suite = { 4662 .cipher = __VECS(chac !! 2599 .cipher = { 4663 }, !! 2600 .enc = __VECS(chacha20_enc_tv_template), >> 2601 .dec = __VECS(chacha20_enc_tv_template), >> 2602 } >> 2603 } 4664 }, { 2604 }, { 4665 .alg = "cmac(aes)", 2605 .alg = "cmac(aes)", 4666 .fips_allowed = 1, 2606 .fips_allowed = 1, 4667 .test = alg_test_hash, 2607 .test = alg_test_hash, 4668 .suite = { 2608 .suite = { 4669 .hash = __VECS(aes_cm 2609 .hash = __VECS(aes_cmac128_tv_template) 4670 } 2610 } 4671 }, { 2611 }, { 4672 .alg = "cmac(camellia)", << 4673 .test = alg_test_hash, << 4674 .suite = { << 4675 .hash = __VECS(camell << 4676 } << 4677 }, { << 4678 .alg = "cmac(des3_ede)", 2612 .alg = "cmac(des3_ede)", >> 2613 .fips_allowed = 1, 4679 .test = alg_test_hash, 2614 .test = alg_test_hash, 4680 .suite = { 2615 .suite = { 4681 .hash = __VECS(des3_e 2616 .hash = __VECS(des3_ede_cmac64_tv_template) 4682 } 2617 } 4683 }, { 2618 }, { 4684 .alg = "cmac(sm4)", << 4685 .test = alg_test_hash, << 4686 .suite = { << 4687 .hash = __VECS(sm4_cm << 4688 } << 4689 }, { << 4690 .alg = "compress_null", 2619 .alg = "compress_null", 4691 .test = alg_test_null, 2620 .test = alg_test_null, 4692 }, { 2621 }, { 4693 .alg = "crc32", 2622 .alg = "crc32", 4694 .test = alg_test_hash, 2623 .test = alg_test_hash, 4695 .fips_allowed = 1, << 4696 .suite = { 2624 .suite = { 4697 .hash = __VECS(crc32_ 2625 .hash = __VECS(crc32_tv_template) 4698 } 2626 } 4699 }, { 2627 }, { 4700 .alg = "crc32c", 2628 .alg = "crc32c", 4701 .test = alg_test_crc32c, 2629 .test = alg_test_crc32c, 4702 .fips_allowed = 1, 2630 .fips_allowed = 1, 4703 .suite = { 2631 .suite = { 4704 .hash = __VECS(crc32c 2632 .hash = __VECS(crc32c_tv_template) 4705 } 2633 } 4706 }, { 2634 }, { 4707 .alg = "crc64-rocksoft", << 4708 .test = alg_test_hash, << 4709 .fips_allowed = 1, << 4710 .suite = { << 4711 .hash = __VECS(crc64_ << 4712 } << 4713 }, { << 4714 .alg = "crct10dif", 2635 .alg = "crct10dif", 4715 .test = alg_test_hash, 2636 .test = alg_test_hash, 4716 .fips_allowed = 1, 2637 .fips_allowed = 1, 4717 .suite = { 2638 .suite = { 4718 .hash = __VECS(crct10 2639 .hash = __VECS(crct10dif_tv_template) 4719 } 2640 } 4720 }, { 2641 }, { 4721 .alg = "ctr(aes)", 2642 .alg = "ctr(aes)", 4722 .test = alg_test_skcipher, 2643 .test = alg_test_skcipher, 4723 .fips_allowed = 1, 2644 .fips_allowed = 1, 4724 .suite = { 2645 .suite = { 4725 .cipher = __VECS(aes_ !! 2646 .cipher = { 4726 } !! 2647 .enc = __VECS(aes_ctr_enc_tv_template), 4727 }, { !! 2648 .dec = __VECS(aes_ctr_dec_tv_template) 4728 .alg = "ctr(aria)", !! 2649 } 4729 .test = alg_test_skcipher, << 4730 .suite = { << 4731 .cipher = __VECS(aria << 4732 } 2650 } 4733 }, { 2651 }, { 4734 .alg = "ctr(blowfish)", 2652 .alg = "ctr(blowfish)", 4735 .test = alg_test_skcipher, 2653 .test = alg_test_skcipher, 4736 .suite = { 2654 .suite = { 4737 .cipher = __VECS(bf_c !! 2655 .cipher = { >> 2656 .enc = __VECS(bf_ctr_enc_tv_template), >> 2657 .dec = __VECS(bf_ctr_dec_tv_template) >> 2658 } 4738 } 2659 } 4739 }, { 2660 }, { 4740 .alg = "ctr(camellia)", 2661 .alg = "ctr(camellia)", 4741 .test = alg_test_skcipher, 2662 .test = alg_test_skcipher, 4742 .suite = { 2663 .suite = { 4743 .cipher = __VECS(came !! 2664 .cipher = { >> 2665 .enc = __VECS(camellia_ctr_enc_tv_template), >> 2666 .dec = __VECS(camellia_ctr_dec_tv_template) >> 2667 } 4744 } 2668 } 4745 }, { 2669 }, { 4746 .alg = "ctr(cast5)", 2670 .alg = "ctr(cast5)", 4747 .test = alg_test_skcipher, 2671 .test = alg_test_skcipher, 4748 .suite = { 2672 .suite = { 4749 .cipher = __VECS(cast !! 2673 .cipher = { >> 2674 .enc = __VECS(cast5_ctr_enc_tv_template), >> 2675 .dec = __VECS(cast5_ctr_dec_tv_template) >> 2676 } 4750 } 2677 } 4751 }, { 2678 }, { 4752 .alg = "ctr(cast6)", 2679 .alg = "ctr(cast6)", 4753 .test = alg_test_skcipher, 2680 .test = alg_test_skcipher, 4754 .suite = { 2681 .suite = { 4755 .cipher = __VECS(cast !! 2682 .cipher = { >> 2683 .enc = __VECS(cast6_ctr_enc_tv_template), >> 2684 .dec = __VECS(cast6_ctr_dec_tv_template) >> 2685 } 4756 } 2686 } 4757 }, { 2687 }, { 4758 .alg = "ctr(des)", 2688 .alg = "ctr(des)", 4759 .test = alg_test_skcipher, 2689 .test = alg_test_skcipher, 4760 .suite = { 2690 .suite = { 4761 .cipher = __VECS(des_ !! 2691 .cipher = { >> 2692 .enc = __VECS(des_ctr_enc_tv_template), >> 2693 .dec = __VECS(des_ctr_dec_tv_template) >> 2694 } 4762 } 2695 } 4763 }, { 2696 }, { 4764 .alg = "ctr(des3_ede)", 2697 .alg = "ctr(des3_ede)", 4765 .test = alg_test_skcipher, 2698 .test = alg_test_skcipher, 4766 .suite = { << 4767 .cipher = __VECS(des3 << 4768 } << 4769 }, { << 4770 /* Same as ctr(aes) except th << 4771 * hardware secure memory whi << 4772 */ << 4773 .alg = "ctr(paes)", << 4774 .test = alg_test_null, << 4775 .fips_allowed = 1, 2699 .fips_allowed = 1, 4776 }, { << 4777 << 4778 /* Same as ctr(sm4) except th << 4779 * hardware secure memory whi << 4780 */ << 4781 .alg = "ctr(psm4)", << 4782 .test = alg_test_null, << 4783 }, { << 4784 .alg = "ctr(serpent)", << 4785 .test = alg_test_skcipher, << 4786 .suite = { 2700 .suite = { 4787 .cipher = __VECS(serp !! 2701 .cipher = { >> 2702 .enc = __VECS(des3_ede_ctr_enc_tv_template), >> 2703 .dec = __VECS(des3_ede_ctr_dec_tv_template) >> 2704 } 4788 } 2705 } 4789 }, { 2706 }, { 4790 .alg = "ctr(sm4)", !! 2707 .alg = "ctr(serpent)", 4791 .test = alg_test_skcipher, 2708 .test = alg_test_skcipher, 4792 .suite = { 2709 .suite = { 4793 .cipher = __VECS(sm4_ !! 2710 .cipher = { >> 2711 .enc = __VECS(serpent_ctr_enc_tv_template), >> 2712 .dec = __VECS(serpent_ctr_dec_tv_template) >> 2713 } 4794 } 2714 } 4795 }, { 2715 }, { 4796 .alg = "ctr(twofish)", 2716 .alg = "ctr(twofish)", 4797 .test = alg_test_skcipher, 2717 .test = alg_test_skcipher, 4798 .suite = { 2718 .suite = { 4799 .cipher = __VECS(tf_c !! 2719 .cipher = { 4800 } !! 2720 .enc = __VECS(tf_ctr_enc_tv_template), 4801 }, { !! 2721 .dec = __VECS(tf_ctr_dec_tv_template) 4802 #if IS_ENABLED(CONFIG_CRYPTO_PAES_S390) !! 2722 } 4803 .alg = "ctr-paes-s390", << 4804 .fips_allowed = 1, << 4805 .test = alg_test_skcipher, << 4806 .suite = { << 4807 .cipher = __VECS(aes_ << 4808 } 2723 } 4809 }, { 2724 }, { 4810 #endif << 4811 .alg = "cts(cbc(aes))", 2725 .alg = "cts(cbc(aes))", 4812 .test = alg_test_skcipher, 2726 .test = alg_test_skcipher, 4813 .fips_allowed = 1, << 4814 .suite = { << 4815 .cipher = __VECS(cts_ << 4816 } << 4817 }, { << 4818 /* Same as cts(cbc((aes)) exc << 4819 * hardware secure memory whi << 4820 */ << 4821 .alg = "cts(cbc(paes))", << 4822 .test = alg_test_null, << 4823 .fips_allowed = 1, << 4824 }, { << 4825 .alg = "cts(cbc(sm4))", << 4826 .test = alg_test_skcipher, << 4827 .suite = { 2727 .suite = { 4828 .cipher = __VECS(sm4_ !! 2728 .cipher = { 4829 } !! 2729 .enc = __VECS(cts_mode_enc_tv_template), 4830 }, { !! 2730 .dec = __VECS(cts_mode_dec_tv_template) 4831 .alg = "curve25519", << 4832 .test = alg_test_kpp, << 4833 .suite = { << 4834 .kpp = __VECS(curve25 << 4835 } << 4836 }, { << 4837 .alg = "deflate", << 4838 .test = alg_test_comp, << 4839 .fips_allowed = 1, << 4840 .suite = { << 4841 .comp = { << 4842 .comp = __VEC << 4843 .decomp = __V << 4844 } 2731 } 4845 } 2732 } 4846 }, { 2733 }, { 4847 .alg = "deflate-iaa", !! 2734 .alg = "deflate", 4848 .test = alg_test_comp, 2735 .test = alg_test_comp, 4849 .fips_allowed = 1, 2736 .fips_allowed = 1, 4850 .suite = { 2737 .suite = { 4851 .comp = { 2738 .comp = { 4852 .comp = __VEC 2739 .comp = __VECS(deflate_comp_tv_template), 4853 .decomp = __V 2740 .decomp = __VECS(deflate_decomp_tv_template) 4854 } 2741 } 4855 } 2742 } 4856 }, { 2743 }, { 4857 .alg = "dh", 2744 .alg = "dh", 4858 .test = alg_test_kpp, 2745 .test = alg_test_kpp, >> 2746 .fips_allowed = 1, 4859 .suite = { 2747 .suite = { 4860 .kpp = __VECS(dh_tv_t 2748 .kpp = __VECS(dh_tv_template) 4861 } 2749 } 4862 }, { 2750 }, { 4863 .alg = "digest_null", 2751 .alg = "digest_null", 4864 .test = alg_test_null, 2752 .test = alg_test_null, 4865 }, { 2753 }, { 4866 .alg = "drbg_nopr_ctr_aes128" 2754 .alg = "drbg_nopr_ctr_aes128", 4867 .test = alg_test_drbg, 2755 .test = alg_test_drbg, 4868 .fips_allowed = 1, 2756 .fips_allowed = 1, 4869 .suite = { 2757 .suite = { 4870 .drbg = __VECS(drbg_n 2758 .drbg = __VECS(drbg_nopr_ctr_aes128_tv_template) 4871 } 2759 } 4872 }, { 2760 }, { 4873 .alg = "drbg_nopr_ctr_aes192" 2761 .alg = "drbg_nopr_ctr_aes192", 4874 .test = alg_test_drbg, 2762 .test = alg_test_drbg, 4875 .fips_allowed = 1, 2763 .fips_allowed = 1, 4876 .suite = { 2764 .suite = { 4877 .drbg = __VECS(drbg_n 2765 .drbg = __VECS(drbg_nopr_ctr_aes192_tv_template) 4878 } 2766 } 4879 }, { 2767 }, { 4880 .alg = "drbg_nopr_ctr_aes256" 2768 .alg = "drbg_nopr_ctr_aes256", 4881 .test = alg_test_drbg, 2769 .test = alg_test_drbg, 4882 .fips_allowed = 1, 2770 .fips_allowed = 1, 4883 .suite = { 2771 .suite = { 4884 .drbg = __VECS(drbg_n 2772 .drbg = __VECS(drbg_nopr_ctr_aes256_tv_template) 4885 } 2773 } 4886 }, { 2774 }, { >> 2775 /* >> 2776 * There is no need to specifically test the DRBG with every >> 2777 * backend cipher -- covered by drbg_nopr_hmac_sha256 test >> 2778 */ >> 2779 .alg = "drbg_nopr_hmac_sha1", >> 2780 .fips_allowed = 1, >> 2781 .test = alg_test_null, >> 2782 }, { 4887 .alg = "drbg_nopr_hmac_sha256 2783 .alg = "drbg_nopr_hmac_sha256", 4888 .test = alg_test_drbg, 2784 .test = alg_test_drbg, 4889 .fips_allowed = 1, 2785 .fips_allowed = 1, 4890 .suite = { 2786 .suite = { 4891 .drbg = __VECS(drbg_n 2787 .drbg = __VECS(drbg_nopr_hmac_sha256_tv_template) 4892 } 2788 } 4893 }, { 2789 }, { 4894 /* !! 2790 /* covered by drbg_nopr_hmac_sha256 test */ 4895 * There is no need to specif << 4896 * backend cipher -- covered << 4897 */ << 4898 .alg = "drbg_nopr_hmac_sha384 2791 .alg = "drbg_nopr_hmac_sha384", >> 2792 .fips_allowed = 1, 4899 .test = alg_test_null, 2793 .test = alg_test_null, 4900 }, { 2794 }, { 4901 .alg = "drbg_nopr_hmac_sha512 2795 .alg = "drbg_nopr_hmac_sha512", 4902 .test = alg_test_drbg, !! 2796 .test = alg_test_null, 4903 .fips_allowed = 1, 2797 .fips_allowed = 1, 4904 .suite = { !! 2798 }, { 4905 .drbg = __VECS(drbg_n !! 2799 .alg = "drbg_nopr_sha1", 4906 } !! 2800 .fips_allowed = 1, >> 2801 .test = alg_test_null, 4907 }, { 2802 }, { 4908 .alg = "drbg_nopr_sha256", 2803 .alg = "drbg_nopr_sha256", 4909 .test = alg_test_drbg, 2804 .test = alg_test_drbg, 4910 .fips_allowed = 1, 2805 .fips_allowed = 1, 4911 .suite = { 2806 .suite = { 4912 .drbg = __VECS(drbg_n 2807 .drbg = __VECS(drbg_nopr_sha256_tv_template) 4913 } 2808 } 4914 }, { 2809 }, { 4915 /* covered by drbg_nopr_sha25 2810 /* covered by drbg_nopr_sha256 test */ 4916 .alg = "drbg_nopr_sha384", 2811 .alg = "drbg_nopr_sha384", >> 2812 .fips_allowed = 1, 4917 .test = alg_test_null, 2813 .test = alg_test_null, 4918 }, { 2814 }, { 4919 .alg = "drbg_nopr_sha512", 2815 .alg = "drbg_nopr_sha512", 4920 .fips_allowed = 1, 2816 .fips_allowed = 1, 4921 .test = alg_test_null, 2817 .test = alg_test_null, 4922 }, { 2818 }, { 4923 .alg = "drbg_pr_ctr_aes128", 2819 .alg = "drbg_pr_ctr_aes128", 4924 .test = alg_test_drbg, 2820 .test = alg_test_drbg, 4925 .fips_allowed = 1, 2821 .fips_allowed = 1, 4926 .suite = { 2822 .suite = { 4927 .drbg = __VECS(drbg_p 2823 .drbg = __VECS(drbg_pr_ctr_aes128_tv_template) 4928 } 2824 } 4929 }, { 2825 }, { 4930 /* covered by drbg_pr_ctr_aes 2826 /* covered by drbg_pr_ctr_aes128 test */ 4931 .alg = "drbg_pr_ctr_aes192", 2827 .alg = "drbg_pr_ctr_aes192", 4932 .fips_allowed = 1, 2828 .fips_allowed = 1, 4933 .test = alg_test_null, 2829 .test = alg_test_null, 4934 }, { 2830 }, { 4935 .alg = "drbg_pr_ctr_aes256", 2831 .alg = "drbg_pr_ctr_aes256", 4936 .fips_allowed = 1, 2832 .fips_allowed = 1, 4937 .test = alg_test_null, 2833 .test = alg_test_null, 4938 }, { 2834 }, { >> 2835 .alg = "drbg_pr_hmac_sha1", >> 2836 .fips_allowed = 1, >> 2837 .test = alg_test_null, >> 2838 }, { 4939 .alg = "drbg_pr_hmac_sha256", 2839 .alg = "drbg_pr_hmac_sha256", 4940 .test = alg_test_drbg, 2840 .test = alg_test_drbg, 4941 .fips_allowed = 1, 2841 .fips_allowed = 1, 4942 .suite = { 2842 .suite = { 4943 .drbg = __VECS(drbg_p 2843 .drbg = __VECS(drbg_pr_hmac_sha256_tv_template) 4944 } 2844 } 4945 }, { 2845 }, { 4946 /* covered by drbg_pr_hmac_sh 2846 /* covered by drbg_pr_hmac_sha256 test */ 4947 .alg = "drbg_pr_hmac_sha384", 2847 .alg = "drbg_pr_hmac_sha384", >> 2848 .fips_allowed = 1, 4948 .test = alg_test_null, 2849 .test = alg_test_null, 4949 }, { 2850 }, { 4950 .alg = "drbg_pr_hmac_sha512", 2851 .alg = "drbg_pr_hmac_sha512", 4951 .test = alg_test_null, 2852 .test = alg_test_null, 4952 .fips_allowed = 1, 2853 .fips_allowed = 1, 4953 }, { 2854 }, { >> 2855 .alg = "drbg_pr_sha1", >> 2856 .fips_allowed = 1, >> 2857 .test = alg_test_null, >> 2858 }, { 4954 .alg = "drbg_pr_sha256", 2859 .alg = "drbg_pr_sha256", 4955 .test = alg_test_drbg, 2860 .test = alg_test_drbg, 4956 .fips_allowed = 1, 2861 .fips_allowed = 1, 4957 .suite = { 2862 .suite = { 4958 .drbg = __VECS(drbg_p 2863 .drbg = __VECS(drbg_pr_sha256_tv_template) 4959 } 2864 } 4960 }, { 2865 }, { 4961 /* covered by drbg_pr_sha256 2866 /* covered by drbg_pr_sha256 test */ 4962 .alg = "drbg_pr_sha384", 2867 .alg = "drbg_pr_sha384", >> 2868 .fips_allowed = 1, 4963 .test = alg_test_null, 2869 .test = alg_test_null, 4964 }, { 2870 }, { 4965 .alg = "drbg_pr_sha512", 2871 .alg = "drbg_pr_sha512", 4966 .fips_allowed = 1, 2872 .fips_allowed = 1, 4967 .test = alg_test_null, 2873 .test = alg_test_null, 4968 }, { 2874 }, { 4969 .alg = "ecb(aes)", 2875 .alg = "ecb(aes)", 4970 .test = alg_test_skcipher, 2876 .test = alg_test_skcipher, 4971 .fips_allowed = 1, 2877 .fips_allowed = 1, 4972 .suite = { 2878 .suite = { 4973 .cipher = __VECS(aes_ !! 2879 .cipher = { >> 2880 .enc = __VECS(aes_enc_tv_template), >> 2881 .dec = __VECS(aes_dec_tv_template) >> 2882 } 4974 } 2883 } 4975 }, { 2884 }, { 4976 .alg = "ecb(anubis)", 2885 .alg = "ecb(anubis)", 4977 .test = alg_test_skcipher, 2886 .test = alg_test_skcipher, 4978 .suite = { 2887 .suite = { 4979 .cipher = __VECS(anub !! 2888 .cipher = { >> 2889 .enc = __VECS(anubis_enc_tv_template), >> 2890 .dec = __VECS(anubis_dec_tv_template) >> 2891 } 4980 } 2892 } 4981 }, { 2893 }, { 4982 .alg = "ecb(arc4)", 2894 .alg = "ecb(arc4)", 4983 .generic_driver = "arc4-gener << 4984 .test = alg_test_skcipher, << 4985 .suite = { << 4986 .cipher = __VECS(arc4 << 4987 } << 4988 }, { << 4989 .alg = "ecb(aria)", << 4990 .test = alg_test_skcipher, 2895 .test = alg_test_skcipher, 4991 .suite = { 2896 .suite = { 4992 .cipher = __VECS(aria !! 2897 .cipher = { >> 2898 .enc = __VECS(arc4_enc_tv_template), >> 2899 .dec = __VECS(arc4_dec_tv_template) >> 2900 } 4993 } 2901 } 4994 }, { 2902 }, { 4995 .alg = "ecb(blowfish)", 2903 .alg = "ecb(blowfish)", 4996 .test = alg_test_skcipher, 2904 .test = alg_test_skcipher, 4997 .suite = { 2905 .suite = { 4998 .cipher = __VECS(bf_t !! 2906 .cipher = { >> 2907 .enc = __VECS(bf_enc_tv_template), >> 2908 .dec = __VECS(bf_dec_tv_template) >> 2909 } 4999 } 2910 } 5000 }, { 2911 }, { 5001 .alg = "ecb(camellia)", 2912 .alg = "ecb(camellia)", 5002 .test = alg_test_skcipher, 2913 .test = alg_test_skcipher, 5003 .suite = { 2914 .suite = { 5004 .cipher = __VECS(came !! 2915 .cipher = { >> 2916 .enc = __VECS(camellia_enc_tv_template), >> 2917 .dec = __VECS(camellia_dec_tv_template) >> 2918 } 5005 } 2919 } 5006 }, { 2920 }, { 5007 .alg = "ecb(cast5)", 2921 .alg = "ecb(cast5)", 5008 .test = alg_test_skcipher, 2922 .test = alg_test_skcipher, 5009 .suite = { 2923 .suite = { 5010 .cipher = __VECS(cast !! 2924 .cipher = { >> 2925 .enc = __VECS(cast5_enc_tv_template), >> 2926 .dec = __VECS(cast5_dec_tv_template) >> 2927 } 5011 } 2928 } 5012 }, { 2929 }, { 5013 .alg = "ecb(cast6)", 2930 .alg = "ecb(cast6)", 5014 .test = alg_test_skcipher, 2931 .test = alg_test_skcipher, 5015 .suite = { 2932 .suite = { 5016 .cipher = __VECS(cast !! 2933 .cipher = { >> 2934 .enc = __VECS(cast6_enc_tv_template), >> 2935 .dec = __VECS(cast6_dec_tv_template) >> 2936 } 5017 } 2937 } 5018 }, { 2938 }, { 5019 .alg = "ecb(cipher_null)", 2939 .alg = "ecb(cipher_null)", 5020 .test = alg_test_null, 2940 .test = alg_test_null, 5021 .fips_allowed = 1, 2941 .fips_allowed = 1, 5022 }, { 2942 }, { 5023 .alg = "ecb(des)", 2943 .alg = "ecb(des)", 5024 .test = alg_test_skcipher, 2944 .test = alg_test_skcipher, 5025 .suite = { 2945 .suite = { 5026 .cipher = __VECS(des_ !! 2946 .cipher = { >> 2947 .enc = __VECS(des_enc_tv_template), >> 2948 .dec = __VECS(des_dec_tv_template) >> 2949 } 5027 } 2950 } 5028 }, { 2951 }, { 5029 .alg = "ecb(des3_ede)", 2952 .alg = "ecb(des3_ede)", 5030 .test = alg_test_skcipher, 2953 .test = alg_test_skcipher, >> 2954 .fips_allowed = 1, 5031 .suite = { 2955 .suite = { 5032 .cipher = __VECS(des3 !! 2956 .cipher = { >> 2957 .enc = __VECS(des3_ede_enc_tv_template), >> 2958 .dec = __VECS(des3_ede_dec_tv_template) >> 2959 } 5033 } 2960 } 5034 }, { 2961 }, { 5035 .alg = "ecb(fcrypt)", 2962 .alg = "ecb(fcrypt)", 5036 .test = alg_test_skcipher, 2963 .test = alg_test_skcipher, 5037 .suite = { 2964 .suite = { 5038 .cipher = { 2965 .cipher = { 5039 .vecs = fcryp !! 2966 .enc = { 5040 .count = 1 !! 2967 .vecs = fcrypt_pcbc_enc_tv_template, >> 2968 .count = 1 >> 2969 }, >> 2970 .dec = { >> 2971 .vecs = fcrypt_pcbc_dec_tv_template, >> 2972 .count = 1 >> 2973 } 5041 } 2974 } 5042 } 2975 } 5043 }, { 2976 }, { 5044 .alg = "ecb(khazad)", 2977 .alg = "ecb(khazad)", 5045 .test = alg_test_skcipher, 2978 .test = alg_test_skcipher, 5046 .suite = { 2979 .suite = { 5047 .cipher = __VECS(khaz !! 2980 .cipher = { >> 2981 .enc = __VECS(khazad_enc_tv_template), >> 2982 .dec = __VECS(khazad_dec_tv_template) >> 2983 } 5048 } 2984 } 5049 }, { 2985 }, { 5050 /* Same as ecb(aes) except th << 5051 * hardware secure memory whi << 5052 */ << 5053 .alg = "ecb(paes)", << 5054 .test = alg_test_null, << 5055 .fips_allowed = 1, << 5056 }, { << 5057 .alg = "ecb(seed)", 2986 .alg = "ecb(seed)", 5058 .test = alg_test_skcipher, 2987 .test = alg_test_skcipher, 5059 .suite = { 2988 .suite = { 5060 .cipher = __VECS(seed !! 2989 .cipher = { >> 2990 .enc = __VECS(seed_enc_tv_template), >> 2991 .dec = __VECS(seed_dec_tv_template) >> 2992 } 5061 } 2993 } 5062 }, { 2994 }, { 5063 .alg = "ecb(serpent)", 2995 .alg = "ecb(serpent)", 5064 .test = alg_test_skcipher, 2996 .test = alg_test_skcipher, 5065 .suite = { 2997 .suite = { 5066 .cipher = __VECS(serp !! 2998 .cipher = { >> 2999 .enc = __VECS(serpent_enc_tv_template), >> 3000 .dec = __VECS(serpent_dec_tv_template) >> 3001 } 5067 } 3002 } 5068 }, { 3003 }, { 5069 .alg = "ecb(sm4)", 3004 .alg = "ecb(sm4)", 5070 .test = alg_test_skcipher, 3005 .test = alg_test_skcipher, 5071 .suite = { 3006 .suite = { 5072 .cipher = __VECS(sm4_ !! 3007 .cipher = { >> 3008 .enc = __VECS(sm4_enc_tv_template), >> 3009 .dec = __VECS(sm4_dec_tv_template) >> 3010 } 5073 } 3011 } 5074 }, { 3012 }, { 5075 .alg = "ecb(tea)", !! 3013 .alg = "ecb(speck128)", 5076 .test = alg_test_skcipher, 3014 .test = alg_test_skcipher, 5077 .suite = { 3015 .suite = { 5078 .cipher = __VECS(tea_ !! 3016 .cipher = { >> 3017 .enc = __VECS(speck128_enc_tv_template), >> 3018 .dec = __VECS(speck128_dec_tv_template) >> 3019 } 5079 } 3020 } 5080 }, { 3021 }, { 5081 .alg = "ecb(twofish)", !! 3022 .alg = "ecb(speck64)", 5082 .test = alg_test_skcipher, 3023 .test = alg_test_skcipher, 5083 .suite = { 3024 .suite = { 5084 .cipher = __VECS(tf_t !! 3025 .cipher = { >> 3026 .enc = __VECS(speck64_enc_tv_template), >> 3027 .dec = __VECS(speck64_dec_tv_template) >> 3028 } 5085 } 3029 } 5086 }, { 3030 }, { 5087 .alg = "ecb(xeta)", !! 3031 .alg = "ecb(tea)", 5088 .test = alg_test_skcipher, 3032 .test = alg_test_skcipher, 5089 .suite = { 3033 .suite = { 5090 .cipher = __VECS(xeta !! 3034 .cipher = { >> 3035 .enc = __VECS(tea_enc_tv_template), >> 3036 .dec = __VECS(tea_dec_tv_template) >> 3037 } 5091 } 3038 } 5092 }, { 3039 }, { 5093 .alg = "ecb(xtea)", !! 3040 .alg = "ecb(tnepres)", 5094 .test = alg_test_skcipher, 3041 .test = alg_test_skcipher, 5095 .suite = { 3042 .suite = { 5096 .cipher = __VECS(xtea !! 3043 .cipher = { >> 3044 .enc = __VECS(tnepres_enc_tv_template), >> 3045 .dec = __VECS(tnepres_dec_tv_template) >> 3046 } 5097 } 3047 } 5098 }, { 3048 }, { 5099 #if IS_ENABLED(CONFIG_CRYPTO_PAES_S390) !! 3049 .alg = "ecb(twofish)", 5100 .alg = "ecb-paes-s390", << 5101 .fips_allowed = 1, << 5102 .test = alg_test_skcipher, 3050 .test = alg_test_skcipher, 5103 .suite = { 3051 .suite = { 5104 .cipher = __VECS(aes_ !! 3052 .cipher = { 5105 } !! 3053 .enc = __VECS(tf_enc_tv_template), 5106 }, { !! 3054 .dec = __VECS(tf_dec_tv_template) 5107 #endif !! 3055 } 5108 .alg = "ecdh-nist-p192", << 5109 .test = alg_test_kpp, << 5110 .suite = { << 5111 .kpp = __VECS(ecdh_p1 << 5112 } << 5113 }, { << 5114 .alg = "ecdh-nist-p256", << 5115 .test = alg_test_kpp, << 5116 .fips_allowed = 1, << 5117 .suite = { << 5118 .kpp = __VECS(ecdh_p2 << 5119 } << 5120 }, { << 5121 .alg = "ecdh-nist-p384", << 5122 .test = alg_test_kpp, << 5123 .fips_allowed = 1, << 5124 .suite = { << 5125 .kpp = __VECS(ecdh_p3 << 5126 } << 5127 }, { << 5128 .alg = "ecdsa-nist-p192", << 5129 .test = alg_test_akcipher, << 5130 .suite = { << 5131 .akcipher = __VECS(ec << 5132 } << 5133 }, { << 5134 .alg = "ecdsa-nist-p256", << 5135 .test = alg_test_akcipher, << 5136 .fips_allowed = 1, << 5137 .suite = { << 5138 .akcipher = __VECS(ec << 5139 } << 5140 }, { << 5141 .alg = "ecdsa-nist-p384", << 5142 .test = alg_test_akcipher, << 5143 .fips_allowed = 1, << 5144 .suite = { << 5145 .akcipher = __VECS(ec << 5146 } << 5147 }, { << 5148 .alg = "ecdsa-nist-p521", << 5149 .test = alg_test_akcipher, << 5150 .fips_allowed = 1, << 5151 .suite = { << 5152 .akcipher = __VECS(ec << 5153 } << 5154 }, { << 5155 .alg = "ecrdsa", << 5156 .test = alg_test_akcipher, << 5157 .suite = { << 5158 .akcipher = __VECS(ec << 5159 } << 5160 }, { << 5161 .alg = "essiv(authenc(hmac(sh << 5162 .test = alg_test_aead, << 5163 .fips_allowed = 1, << 5164 .suite = { << 5165 .aead = __VECS(essiv_ << 5166 } 3056 } 5167 }, { 3057 }, { 5168 .alg = "essiv(cbc(aes),sha256 !! 3058 .alg = "ecb(xeta)", 5169 .test = alg_test_skcipher, 3059 .test = alg_test_skcipher, 5170 .fips_allowed = 1, << 5171 .suite = { << 5172 .cipher = __VECS(essi << 5173 } << 5174 }, { << 5175 #if IS_ENABLED(CONFIG_CRYPTO_DH_RFC7919_GROUP << 5176 .alg = "ffdhe2048(dh)", << 5177 .test = alg_test_kpp, << 5178 .fips_allowed = 1, << 5179 .suite = { 3060 .suite = { 5180 .kpp = __VECS(ffdhe20 !! 3061 .cipher = { 5181 } !! 3062 .enc = __VECS(xeta_enc_tv_template), 5182 }, { !! 3063 .dec = __VECS(xeta_dec_tv_template) 5183 .alg = "ffdhe3072(dh)", !! 3064 } 5184 .test = alg_test_kpp, << 5185 .fips_allowed = 1, << 5186 .suite = { << 5187 .kpp = __VECS(ffdhe30 << 5188 } << 5189 }, { << 5190 .alg = "ffdhe4096(dh)", << 5191 .test = alg_test_kpp, << 5192 .fips_allowed = 1, << 5193 .suite = { << 5194 .kpp = __VECS(ffdhe40 << 5195 } 3065 } 5196 }, { 3066 }, { 5197 .alg = "ffdhe6144(dh)", !! 3067 .alg = "ecb(xtea)", 5198 .test = alg_test_kpp, !! 3068 .test = alg_test_skcipher, 5199 .fips_allowed = 1, << 5200 .suite = { 3069 .suite = { 5201 .kpp = __VECS(ffdhe61 !! 3070 .cipher = { >> 3071 .enc = __VECS(xtea_enc_tv_template), >> 3072 .dec = __VECS(xtea_dec_tv_template) >> 3073 } 5202 } 3074 } 5203 }, { 3075 }, { 5204 .alg = "ffdhe8192(dh)", !! 3076 .alg = "ecdh", 5205 .test = alg_test_kpp, 3077 .test = alg_test_kpp, 5206 .fips_allowed = 1, 3078 .fips_allowed = 1, 5207 .suite = { 3079 .suite = { 5208 .kpp = __VECS(ffdhe81 !! 3080 .kpp = __VECS(ecdh_tv_template) 5209 } 3081 } 5210 }, { 3082 }, { 5211 #endif /* CONFIG_CRYPTO_DH_RFC7919_GROUPS */ << 5212 .alg = "gcm(aes)", 3083 .alg = "gcm(aes)", 5213 .generic_driver = "gcm_base(c << 5214 .test = alg_test_aead, 3084 .test = alg_test_aead, 5215 .fips_allowed = 1, 3085 .fips_allowed = 1, 5216 .suite = { 3086 .suite = { 5217 .aead = __VECS(aes_gc !! 3087 .aead = { 5218 } !! 3088 .enc = __VECS(aes_gcm_enc_tv_template), 5219 }, { !! 3089 .dec = __VECS(aes_gcm_dec_tv_template) 5220 .alg = "gcm(aria)", !! 3090 } 5221 .generic_driver = "gcm_base(c << 5222 .test = alg_test_aead, << 5223 .suite = { << 5224 .aead = __VECS(aria_g << 5225 } << 5226 }, { << 5227 .alg = "gcm(sm4)", << 5228 .generic_driver = "gcm_base(c << 5229 .test = alg_test_aead, << 5230 .suite = { << 5231 .aead = __VECS(sm4_gc << 5232 } 3091 } 5233 }, { 3092 }, { 5234 .alg = "ghash", 3093 .alg = "ghash", 5235 .test = alg_test_hash, 3094 .test = alg_test_hash, >> 3095 .fips_allowed = 1, 5236 .suite = { 3096 .suite = { 5237 .hash = __VECS(ghash_ 3097 .hash = __VECS(ghash_tv_template) 5238 } 3098 } 5239 }, { 3099 }, { 5240 .alg = "hctr2(aes)", !! 3100 .alg = "hmac(crc32)", 5241 .generic_driver = !! 3101 .test = alg_test_hash, 5242 "hctr2_base(xctr(aes-gene << 5243 .test = alg_test_skcipher, << 5244 .suite = { 3102 .suite = { 5245 .cipher = __VECS(aes_ !! 3103 .hash = __VECS(bfin_crc_tv_template) 5246 } 3104 } 5247 }, { 3105 }, { 5248 .alg = "hmac(md5)", 3106 .alg = "hmac(md5)", 5249 .test = alg_test_hash, 3107 .test = alg_test_hash, 5250 .suite = { 3108 .suite = { 5251 .hash = __VECS(hmac_m 3109 .hash = __VECS(hmac_md5_tv_template) 5252 } 3110 } 5253 }, { 3111 }, { >> 3112 .alg = "hmac(rmd128)", >> 3113 .test = alg_test_hash, >> 3114 .suite = { >> 3115 .hash = __VECS(hmac_rmd128_tv_template) >> 3116 } >> 3117 }, { 5254 .alg = "hmac(rmd160)", 3118 .alg = "hmac(rmd160)", 5255 .test = alg_test_hash, 3119 .test = alg_test_hash, 5256 .suite = { 3120 .suite = { 5257 .hash = __VECS(hmac_r 3121 .hash = __VECS(hmac_rmd160_tv_template) 5258 } 3122 } 5259 }, { 3123 }, { 5260 .alg = "hmac(sha1)", 3124 .alg = "hmac(sha1)", 5261 .test = alg_test_hash, 3125 .test = alg_test_hash, 5262 .fips_allowed = 1, 3126 .fips_allowed = 1, 5263 .suite = { 3127 .suite = { 5264 .hash = __VECS(hmac_s 3128 .hash = __VECS(hmac_sha1_tv_template) 5265 } 3129 } 5266 }, { 3130 }, { 5267 .alg = "hmac(sha224)", 3131 .alg = "hmac(sha224)", 5268 .test = alg_test_hash, 3132 .test = alg_test_hash, 5269 .fips_allowed = 1, 3133 .fips_allowed = 1, 5270 .suite = { 3134 .suite = { 5271 .hash = __VECS(hmac_s 3135 .hash = __VECS(hmac_sha224_tv_template) 5272 } 3136 } 5273 }, { 3137 }, { 5274 .alg = "hmac(sha256)", 3138 .alg = "hmac(sha256)", 5275 .test = alg_test_hash, 3139 .test = alg_test_hash, 5276 .fips_allowed = 1, 3140 .fips_allowed = 1, 5277 .suite = { 3141 .suite = { 5278 .hash = __VECS(hmac_s 3142 .hash = __VECS(hmac_sha256_tv_template) 5279 } 3143 } 5280 }, { 3144 }, { 5281 .alg = "hmac(sha3-224)", 3145 .alg = "hmac(sha3-224)", 5282 .test = alg_test_hash, 3146 .test = alg_test_hash, 5283 .fips_allowed = 1, 3147 .fips_allowed = 1, 5284 .suite = { 3148 .suite = { 5285 .hash = __VECS(hmac_s 3149 .hash = __VECS(hmac_sha3_224_tv_template) 5286 } 3150 } 5287 }, { 3151 }, { 5288 .alg = "hmac(sha3-256)", 3152 .alg = "hmac(sha3-256)", 5289 .test = alg_test_hash, 3153 .test = alg_test_hash, 5290 .fips_allowed = 1, 3154 .fips_allowed = 1, 5291 .suite = { 3155 .suite = { 5292 .hash = __VECS(hmac_s 3156 .hash = __VECS(hmac_sha3_256_tv_template) 5293 } 3157 } 5294 }, { 3158 }, { 5295 .alg = "hmac(sha3-384)", 3159 .alg = "hmac(sha3-384)", 5296 .test = alg_test_hash, 3160 .test = alg_test_hash, 5297 .fips_allowed = 1, 3161 .fips_allowed = 1, 5298 .suite = { 3162 .suite = { 5299 .hash = __VECS(hmac_s 3163 .hash = __VECS(hmac_sha3_384_tv_template) 5300 } 3164 } 5301 }, { 3165 }, { 5302 .alg = "hmac(sha3-512)", 3166 .alg = "hmac(sha3-512)", 5303 .test = alg_test_hash, 3167 .test = alg_test_hash, 5304 .fips_allowed = 1, 3168 .fips_allowed = 1, 5305 .suite = { 3169 .suite = { 5306 .hash = __VECS(hmac_s 3170 .hash = __VECS(hmac_sha3_512_tv_template) 5307 } 3171 } 5308 }, { 3172 }, { 5309 .alg = "hmac(sha384)", 3173 .alg = "hmac(sha384)", 5310 .test = alg_test_hash, 3174 .test = alg_test_hash, 5311 .fips_allowed = 1, 3175 .fips_allowed = 1, 5312 .suite = { 3176 .suite = { 5313 .hash = __VECS(hmac_s 3177 .hash = __VECS(hmac_sha384_tv_template) 5314 } 3178 } 5315 }, { 3179 }, { 5316 .alg = "hmac(sha512)", 3180 .alg = "hmac(sha512)", 5317 .test = alg_test_hash, 3181 .test = alg_test_hash, 5318 .fips_allowed = 1, 3182 .fips_allowed = 1, 5319 .suite = { 3183 .suite = { 5320 .hash = __VECS(hmac_s 3184 .hash = __VECS(hmac_sha512_tv_template) 5321 } 3185 } 5322 }, { 3186 }, { 5323 .alg = "hmac(sm3)", << 5324 .test = alg_test_hash, << 5325 .suite = { << 5326 .hash = __VECS(hmac_s << 5327 } << 5328 }, { << 5329 .alg = "hmac(streebog256)", << 5330 .test = alg_test_hash, << 5331 .suite = { << 5332 .hash = __VECS(hmac_s << 5333 } << 5334 }, { << 5335 .alg = "hmac(streebog512)", << 5336 .test = alg_test_hash, << 5337 .suite = { << 5338 .hash = __VECS(hmac_s << 5339 } << 5340 }, { << 5341 .alg = "jitterentropy_rng", 3187 .alg = "jitterentropy_rng", 5342 .fips_allowed = 1, 3188 .fips_allowed = 1, 5343 .test = alg_test_null, 3189 .test = alg_test_null, 5344 }, { 3190 }, { 5345 .alg = "kw(aes)", 3191 .alg = "kw(aes)", 5346 .test = alg_test_skcipher, 3192 .test = alg_test_skcipher, 5347 .fips_allowed = 1, 3193 .fips_allowed = 1, 5348 .suite = { 3194 .suite = { 5349 .cipher = __VECS(aes_ !! 3195 .cipher = { >> 3196 .enc = __VECS(aes_kw_enc_tv_template), >> 3197 .dec = __VECS(aes_kw_dec_tv_template) >> 3198 } 5350 } 3199 } 5351 }, { 3200 }, { 5352 .alg = "lrw(aes)", 3201 .alg = "lrw(aes)", 5353 .generic_driver = "lrw(ecb(ae << 5354 .test = alg_test_skcipher, 3202 .test = alg_test_skcipher, 5355 .suite = { 3203 .suite = { 5356 .cipher = __VECS(aes_ !! 3204 .cipher = { >> 3205 .enc = __VECS(aes_lrw_enc_tv_template), >> 3206 .dec = __VECS(aes_lrw_dec_tv_template) >> 3207 } 5357 } 3208 } 5358 }, { 3209 }, { 5359 .alg = "lrw(camellia)", 3210 .alg = "lrw(camellia)", 5360 .generic_driver = "lrw(ecb(ca << 5361 .test = alg_test_skcipher, 3211 .test = alg_test_skcipher, 5362 .suite = { 3212 .suite = { 5363 .cipher = __VECS(came !! 3213 .cipher = { >> 3214 .enc = __VECS(camellia_lrw_enc_tv_template), >> 3215 .dec = __VECS(camellia_lrw_dec_tv_template) >> 3216 } 5364 } 3217 } 5365 }, { 3218 }, { 5366 .alg = "lrw(cast6)", 3219 .alg = "lrw(cast6)", 5367 .generic_driver = "lrw(ecb(ca << 5368 .test = alg_test_skcipher, 3220 .test = alg_test_skcipher, 5369 .suite = { 3221 .suite = { 5370 .cipher = __VECS(cast !! 3222 .cipher = { >> 3223 .enc = __VECS(cast6_lrw_enc_tv_template), >> 3224 .dec = __VECS(cast6_lrw_dec_tv_template) >> 3225 } 5371 } 3226 } 5372 }, { 3227 }, { 5373 .alg = "lrw(serpent)", 3228 .alg = "lrw(serpent)", 5374 .generic_driver = "lrw(ecb(se << 5375 .test = alg_test_skcipher, 3229 .test = alg_test_skcipher, 5376 .suite = { 3230 .suite = { 5377 .cipher = __VECS(serp !! 3231 .cipher = { >> 3232 .enc = __VECS(serpent_lrw_enc_tv_template), >> 3233 .dec = __VECS(serpent_lrw_dec_tv_template) >> 3234 } 5378 } 3235 } 5379 }, { 3236 }, { 5380 .alg = "lrw(twofish)", 3237 .alg = "lrw(twofish)", 5381 .generic_driver = "lrw(ecb(tw << 5382 .test = alg_test_skcipher, 3238 .test = alg_test_skcipher, 5383 .suite = { 3239 .suite = { 5384 .cipher = __VECS(tf_l !! 3240 .cipher = { >> 3241 .enc = __VECS(tf_lrw_enc_tv_template), >> 3242 .dec = __VECS(tf_lrw_dec_tv_template) >> 3243 } 5385 } 3244 } 5386 }, { 3245 }, { 5387 .alg = "lz4", 3246 .alg = "lz4", 5388 .test = alg_test_comp, 3247 .test = alg_test_comp, 5389 .fips_allowed = 1, 3248 .fips_allowed = 1, 5390 .suite = { 3249 .suite = { 5391 .comp = { 3250 .comp = { 5392 .comp = __VEC 3251 .comp = __VECS(lz4_comp_tv_template), 5393 .decomp = __V 3252 .decomp = __VECS(lz4_decomp_tv_template) 5394 } 3253 } 5395 } 3254 } 5396 }, { 3255 }, { 5397 .alg = "lz4hc", 3256 .alg = "lz4hc", 5398 .test = alg_test_comp, 3257 .test = alg_test_comp, 5399 .fips_allowed = 1, 3258 .fips_allowed = 1, 5400 .suite = { 3259 .suite = { 5401 .comp = { 3260 .comp = { 5402 .comp = __VEC 3261 .comp = __VECS(lz4hc_comp_tv_template), 5403 .decomp = __V 3262 .decomp = __VECS(lz4hc_decomp_tv_template) 5404 } 3263 } 5405 } 3264 } 5406 }, { 3265 }, { 5407 .alg = "lzo", 3266 .alg = "lzo", 5408 .test = alg_test_comp, 3267 .test = alg_test_comp, 5409 .fips_allowed = 1, 3268 .fips_allowed = 1, 5410 .suite = { 3269 .suite = { 5411 .comp = { 3270 .comp = { 5412 .comp = __VEC 3271 .comp = __VECS(lzo_comp_tv_template), 5413 .decomp = __V 3272 .decomp = __VECS(lzo_decomp_tv_template) 5414 } 3273 } 5415 } 3274 } 5416 }, { 3275 }, { 5417 .alg = "lzo-rle", << 5418 .test = alg_test_comp, << 5419 .fips_allowed = 1, << 5420 .suite = { << 5421 .comp = { << 5422 .comp = __VEC << 5423 .decomp = __V << 5424 } << 5425 } << 5426 }, { << 5427 .alg = "md4", 3276 .alg = "md4", 5428 .test = alg_test_hash, 3277 .test = alg_test_hash, 5429 .suite = { 3278 .suite = { 5430 .hash = __VECS(md4_tv 3279 .hash = __VECS(md4_tv_template) 5431 } 3280 } 5432 }, { 3281 }, { 5433 .alg = "md5", 3282 .alg = "md5", 5434 .test = alg_test_hash, 3283 .test = alg_test_hash, 5435 .suite = { 3284 .suite = { 5436 .hash = __VECS(md5_tv 3285 .hash = __VECS(md5_tv_template) 5437 } 3286 } 5438 }, { 3287 }, { 5439 .alg = "michael_mic", 3288 .alg = "michael_mic", 5440 .test = alg_test_hash, 3289 .test = alg_test_hash, 5441 .suite = { 3290 .suite = { 5442 .hash = __VECS(michae 3291 .hash = __VECS(michael_mic_tv_template) 5443 } 3292 } 5444 }, { 3293 }, { 5445 .alg = "nhpoly1305", !! 3294 .alg = "ofb(aes)", 5446 .test = alg_test_hash, !! 3295 .test = alg_test_skcipher, >> 3296 .fips_allowed = 1, 5447 .suite = { 3297 .suite = { 5448 .hash = __VECS(nhpoly !! 3298 .cipher = { >> 3299 .enc = __VECS(aes_ofb_enc_tv_template), >> 3300 .dec = __VECS(aes_ofb_dec_tv_template) >> 3301 } 5449 } 3302 } 5450 }, { 3303 }, { 5451 .alg = "pcbc(fcrypt)", 3304 .alg = "pcbc(fcrypt)", 5452 .test = alg_test_skcipher, 3305 .test = alg_test_skcipher, 5453 .suite = { 3306 .suite = { 5454 .cipher = __VECS(fcry !! 3307 .cipher = { >> 3308 .enc = __VECS(fcrypt_pcbc_enc_tv_template), >> 3309 .dec = __VECS(fcrypt_pcbc_dec_tv_template) >> 3310 } 5455 } 3311 } 5456 }, { 3312 }, { 5457 .alg = "pkcs1pad(rsa,sha224)" 3313 .alg = "pkcs1pad(rsa,sha224)", 5458 .test = alg_test_null, 3314 .test = alg_test_null, 5459 .fips_allowed = 1, 3315 .fips_allowed = 1, 5460 }, { 3316 }, { 5461 .alg = "pkcs1pad(rsa,sha256)" 3317 .alg = "pkcs1pad(rsa,sha256)", 5462 .test = alg_test_akcipher, 3318 .test = alg_test_akcipher, 5463 .fips_allowed = 1, 3319 .fips_allowed = 1, 5464 .suite = { 3320 .suite = { 5465 .akcipher = __VECS(pk 3321 .akcipher = __VECS(pkcs1pad_rsa_tv_template) 5466 } 3322 } 5467 }, { 3323 }, { 5468 .alg = "pkcs1pad(rsa,sha3-256 << 5469 .test = alg_test_null, << 5470 .fips_allowed = 1, << 5471 }, { << 5472 .alg = "pkcs1pad(rsa,sha3-384 << 5473 .test = alg_test_null, << 5474 .fips_allowed = 1, << 5475 }, { << 5476 .alg = "pkcs1pad(rsa,sha3-512 << 5477 .test = alg_test_null, << 5478 .fips_allowed = 1, << 5479 }, { << 5480 .alg = "pkcs1pad(rsa,sha384)" 3324 .alg = "pkcs1pad(rsa,sha384)", 5481 .test = alg_test_null, 3325 .test = alg_test_null, 5482 .fips_allowed = 1, 3326 .fips_allowed = 1, 5483 }, { 3327 }, { 5484 .alg = "pkcs1pad(rsa,sha512)" 3328 .alg = "pkcs1pad(rsa,sha512)", 5485 .test = alg_test_null, 3329 .test = alg_test_null, 5486 .fips_allowed = 1, 3330 .fips_allowed = 1, 5487 }, { 3331 }, { 5488 .alg = "poly1305", 3332 .alg = "poly1305", 5489 .test = alg_test_hash, 3333 .test = alg_test_hash, 5490 .suite = { 3334 .suite = { 5491 .hash = __VECS(poly13 3335 .hash = __VECS(poly1305_tv_template) 5492 } 3336 } 5493 }, { 3337 }, { 5494 .alg = "polyval", << 5495 .test = alg_test_hash, << 5496 .suite = { << 5497 .hash = __VECS(polyva << 5498 } << 5499 }, { << 5500 .alg = "rfc3686(ctr(aes))", 3338 .alg = "rfc3686(ctr(aes))", 5501 .test = alg_test_skcipher, 3339 .test = alg_test_skcipher, 5502 .fips_allowed = 1, 3340 .fips_allowed = 1, 5503 .suite = { 3341 .suite = { 5504 .cipher = __VECS(aes_ !! 3342 .cipher = { 5505 } !! 3343 .enc = __VECS(aes_ctr_rfc3686_enc_tv_template), 5506 }, { !! 3344 .dec = __VECS(aes_ctr_rfc3686_dec_tv_template) 5507 .alg = "rfc3686(ctr(sm4))", !! 3345 } 5508 .test = alg_test_skcipher, << 5509 .suite = { << 5510 .cipher = __VECS(sm4_ << 5511 } 3346 } 5512 }, { 3347 }, { 5513 .alg = "rfc4106(gcm(aes))", 3348 .alg = "rfc4106(gcm(aes))", 5514 .generic_driver = "rfc4106(gc << 5515 .test = alg_test_aead, 3349 .test = alg_test_aead, 5516 .fips_allowed = 1, 3350 .fips_allowed = 1, 5517 .suite = { 3351 .suite = { 5518 .aead = { 3352 .aead = { 5519 ____VECS(aes_ !! 3353 .enc = __VECS(aes_gcm_rfc4106_enc_tv_template), 5520 .einval_allow !! 3354 .dec = __VECS(aes_gcm_rfc4106_dec_tv_template) 5521 .aad_iv = 1, << 5522 } 3355 } 5523 } 3356 } 5524 }, { 3357 }, { 5525 .alg = "rfc4309(ccm(aes))", 3358 .alg = "rfc4309(ccm(aes))", 5526 .generic_driver = "rfc4309(cc << 5527 .test = alg_test_aead, 3359 .test = alg_test_aead, 5528 .fips_allowed = 1, 3360 .fips_allowed = 1, 5529 .suite = { 3361 .suite = { 5530 .aead = { 3362 .aead = { 5531 ____VECS(aes_ !! 3363 .enc = __VECS(aes_ccm_rfc4309_enc_tv_template), 5532 .einval_allow !! 3364 .dec = __VECS(aes_ccm_rfc4309_dec_tv_template) 5533 .aad_iv = 1, << 5534 } 3365 } 5535 } 3366 } 5536 }, { 3367 }, { 5537 .alg = "rfc4543(gcm(aes))", 3368 .alg = "rfc4543(gcm(aes))", 5538 .generic_driver = "rfc4543(gc << 5539 .test = alg_test_aead, 3369 .test = alg_test_aead, 5540 .suite = { 3370 .suite = { 5541 .aead = { 3371 .aead = { 5542 ____VECS(aes_ !! 3372 .enc = __VECS(aes_gcm_rfc4543_enc_tv_template), 5543 .einval_allow !! 3373 .dec = __VECS(aes_gcm_rfc4543_dec_tv_template), 5544 .aad_iv = 1, << 5545 } 3374 } 5546 } 3375 } 5547 }, { 3376 }, { 5548 .alg = "rfc7539(chacha20,poly 3377 .alg = "rfc7539(chacha20,poly1305)", 5549 .test = alg_test_aead, 3378 .test = alg_test_aead, 5550 .suite = { 3379 .suite = { 5551 .aead = __VECS(rfc753 !! 3380 .aead = { >> 3381 .enc = __VECS(rfc7539_enc_tv_template), >> 3382 .dec = __VECS(rfc7539_dec_tv_template), >> 3383 } 5552 } 3384 } 5553 }, { 3385 }, { 5554 .alg = "rfc7539esp(chacha20,p 3386 .alg = "rfc7539esp(chacha20,poly1305)", 5555 .test = alg_test_aead, 3387 .test = alg_test_aead, 5556 .suite = { 3388 .suite = { 5557 .aead = { 3389 .aead = { 5558 ____VECS(rfc7 !! 3390 .enc = __VECS(rfc7539esp_enc_tv_template), 5559 .einval_allow !! 3391 .dec = __VECS(rfc7539esp_dec_tv_template), 5560 .aad_iv = 1, << 5561 } 3392 } 5562 } 3393 } 5563 }, { 3394 }, { >> 3395 .alg = "rmd128", >> 3396 .test = alg_test_hash, >> 3397 .suite = { >> 3398 .hash = __VECS(rmd128_tv_template) >> 3399 } >> 3400 }, { 5564 .alg = "rmd160", 3401 .alg = "rmd160", 5565 .test = alg_test_hash, 3402 .test = alg_test_hash, 5566 .suite = { 3403 .suite = { 5567 .hash = __VECS(rmd160 3404 .hash = __VECS(rmd160_tv_template) 5568 } 3405 } 5569 }, { 3406 }, { >> 3407 .alg = "rmd256", >> 3408 .test = alg_test_hash, >> 3409 .suite = { >> 3410 .hash = __VECS(rmd256_tv_template) >> 3411 } >> 3412 }, { >> 3413 .alg = "rmd320", >> 3414 .test = alg_test_hash, >> 3415 .suite = { >> 3416 .hash = __VECS(rmd320_tv_template) >> 3417 } >> 3418 }, { 5570 .alg = "rsa", 3419 .alg = "rsa", 5571 .test = alg_test_akcipher, 3420 .test = alg_test_akcipher, 5572 .fips_allowed = 1, 3421 .fips_allowed = 1, 5573 .suite = { 3422 .suite = { 5574 .akcipher = __VECS(rs 3423 .akcipher = __VECS(rsa_tv_template) 5575 } 3424 } 5576 }, { 3425 }, { >> 3426 .alg = "salsa20", >> 3427 .test = alg_test_skcipher, >> 3428 .suite = { >> 3429 .cipher = { >> 3430 .enc = __VECS(salsa20_stream_enc_tv_template) >> 3431 } >> 3432 } >> 3433 }, { 5577 .alg = "sha1", 3434 .alg = "sha1", 5578 .test = alg_test_hash, 3435 .test = alg_test_hash, 5579 .fips_allowed = 1, 3436 .fips_allowed = 1, 5580 .suite = { 3437 .suite = { 5581 .hash = __VECS(sha1_t 3438 .hash = __VECS(sha1_tv_template) 5582 } 3439 } 5583 }, { 3440 }, { 5584 .alg = "sha224", 3441 .alg = "sha224", 5585 .test = alg_test_hash, 3442 .test = alg_test_hash, 5586 .fips_allowed = 1, 3443 .fips_allowed = 1, 5587 .suite = { 3444 .suite = { 5588 .hash = __VECS(sha224 3445 .hash = __VECS(sha224_tv_template) 5589 } 3446 } 5590 }, { 3447 }, { 5591 .alg = "sha256", 3448 .alg = "sha256", 5592 .test = alg_test_hash, 3449 .test = alg_test_hash, 5593 .fips_allowed = 1, 3450 .fips_allowed = 1, 5594 .suite = { 3451 .suite = { 5595 .hash = __VECS(sha256 3452 .hash = __VECS(sha256_tv_template) 5596 } 3453 } 5597 }, { 3454 }, { 5598 .alg = "sha3-224", 3455 .alg = "sha3-224", 5599 .test = alg_test_hash, 3456 .test = alg_test_hash, 5600 .fips_allowed = 1, 3457 .fips_allowed = 1, 5601 .suite = { 3458 .suite = { 5602 .hash = __VECS(sha3_2 3459 .hash = __VECS(sha3_224_tv_template) 5603 } 3460 } 5604 }, { 3461 }, { 5605 .alg = "sha3-256", 3462 .alg = "sha3-256", 5606 .test = alg_test_hash, 3463 .test = alg_test_hash, 5607 .fips_allowed = 1, 3464 .fips_allowed = 1, 5608 .suite = { 3465 .suite = { 5609 .hash = __VECS(sha3_2 3466 .hash = __VECS(sha3_256_tv_template) 5610 } 3467 } 5611 }, { 3468 }, { 5612 .alg = "sha3-384", 3469 .alg = "sha3-384", 5613 .test = alg_test_hash, 3470 .test = alg_test_hash, 5614 .fips_allowed = 1, 3471 .fips_allowed = 1, 5615 .suite = { 3472 .suite = { 5616 .hash = __VECS(sha3_3 3473 .hash = __VECS(sha3_384_tv_template) 5617 } 3474 } 5618 }, { 3475 }, { 5619 .alg = "sha3-512", 3476 .alg = "sha3-512", 5620 .test = alg_test_hash, 3477 .test = alg_test_hash, 5621 .fips_allowed = 1, 3478 .fips_allowed = 1, 5622 .suite = { 3479 .suite = { 5623 .hash = __VECS(sha3_5 3480 .hash = __VECS(sha3_512_tv_template) 5624 } 3481 } 5625 }, { 3482 }, { 5626 .alg = "sha384", 3483 .alg = "sha384", 5627 .test = alg_test_hash, 3484 .test = alg_test_hash, 5628 .fips_allowed = 1, 3485 .fips_allowed = 1, 5629 .suite = { 3486 .suite = { 5630 .hash = __VECS(sha384 3487 .hash = __VECS(sha384_tv_template) 5631 } 3488 } 5632 }, { 3489 }, { 5633 .alg = "sha512", 3490 .alg = "sha512", 5634 .test = alg_test_hash, 3491 .test = alg_test_hash, 5635 .fips_allowed = 1, 3492 .fips_allowed = 1, 5636 .suite = { 3493 .suite = { 5637 .hash = __VECS(sha512 3494 .hash = __VECS(sha512_tv_template) 5638 } 3495 } 5639 }, { 3496 }, { 5640 .alg = "sm3", 3497 .alg = "sm3", 5641 .test = alg_test_hash, 3498 .test = alg_test_hash, 5642 .suite = { 3499 .suite = { 5643 .hash = __VECS(sm3_tv 3500 .hash = __VECS(sm3_tv_template) 5644 } 3501 } 5645 }, { 3502 }, { 5646 .alg = "streebog256", !! 3503 .alg = "tgr128", 5647 .test = alg_test_hash, 3504 .test = alg_test_hash, 5648 .suite = { 3505 .suite = { 5649 .hash = __VECS(streeb !! 3506 .hash = __VECS(tgr128_tv_template) 5650 } 3507 } 5651 }, { 3508 }, { 5652 .alg = "streebog512", !! 3509 .alg = "tgr160", 5653 .test = alg_test_hash, 3510 .test = alg_test_hash, 5654 .suite = { 3511 .suite = { 5655 .hash = __VECS(streeb !! 3512 .hash = __VECS(tgr160_tv_template) 5656 } 3513 } 5657 }, { 3514 }, { 5658 .alg = "vmac64(aes)", !! 3515 .alg = "tgr192", 5659 .test = alg_test_hash, 3516 .test = alg_test_hash, 5660 .suite = { 3517 .suite = { 5661 .hash = __VECS(vmac64 !! 3518 .hash = __VECS(tgr192_tv_template) >> 3519 } >> 3520 }, { >> 3521 .alg = "vmac(aes)", >> 3522 .test = alg_test_hash, >> 3523 .suite = { >> 3524 .hash = __VECS(aes_vmac128_tv_template) 5662 } 3525 } 5663 }, { 3526 }, { 5664 .alg = "wp256", 3527 .alg = "wp256", 5665 .test = alg_test_hash, 3528 .test = alg_test_hash, 5666 .suite = { 3529 .suite = { 5667 .hash = __VECS(wp256_ 3530 .hash = __VECS(wp256_tv_template) 5668 } 3531 } 5669 }, { 3532 }, { 5670 .alg = "wp384", 3533 .alg = "wp384", 5671 .test = alg_test_hash, 3534 .test = alg_test_hash, 5672 .suite = { 3535 .suite = { 5673 .hash = __VECS(wp384_ 3536 .hash = __VECS(wp384_tv_template) 5674 } 3537 } 5675 }, { 3538 }, { 5676 .alg = "wp512", 3539 .alg = "wp512", 5677 .test = alg_test_hash, 3540 .test = alg_test_hash, 5678 .suite = { 3541 .suite = { 5679 .hash = __VECS(wp512_ 3542 .hash = __VECS(wp512_tv_template) 5680 } 3543 } 5681 }, { 3544 }, { 5682 .alg = "xcbc(aes)", 3545 .alg = "xcbc(aes)", 5683 .test = alg_test_hash, 3546 .test = alg_test_hash, 5684 .suite = { 3547 .suite = { 5685 .hash = __VECS(aes_xc 3548 .hash = __VECS(aes_xcbc128_tv_template) 5686 } 3549 } 5687 }, { 3550 }, { 5688 .alg = "xcbc(sm4)", << 5689 .test = alg_test_hash, << 5690 .suite = { << 5691 .hash = __VECS(sm4_xc << 5692 } << 5693 }, { << 5694 .alg = "xchacha12", << 5695 .test = alg_test_skcipher, << 5696 .suite = { << 5697 .cipher = __VECS(xcha << 5698 }, << 5699 }, { << 5700 .alg = "xchacha20", << 5701 .test = alg_test_skcipher, << 5702 .suite = { << 5703 .cipher = __VECS(xcha << 5704 }, << 5705 }, { << 5706 .alg = "xctr(aes)", << 5707 .test = alg_test_skcipher, << 5708 .suite = { << 5709 .cipher = __VECS(aes_ << 5710 } << 5711 }, { << 5712 .alg = "xts(aes)", 3551 .alg = "xts(aes)", 5713 .generic_driver = "xts(ecb(ae << 5714 .test = alg_test_skcipher, 3552 .test = alg_test_skcipher, 5715 .fips_allowed = 1, 3553 .fips_allowed = 1, 5716 .suite = { 3554 .suite = { 5717 .cipher = __VECS(aes_ !! 3555 .cipher = { >> 3556 .enc = __VECS(aes_xts_enc_tv_template), >> 3557 .dec = __VECS(aes_xts_dec_tv_template) >> 3558 } 5718 } 3559 } 5719 }, { 3560 }, { 5720 .alg = "xts(camellia)", 3561 .alg = "xts(camellia)", 5721 .generic_driver = "xts(ecb(ca << 5722 .test = alg_test_skcipher, 3562 .test = alg_test_skcipher, 5723 .suite = { 3563 .suite = { 5724 .cipher = __VECS(came !! 3564 .cipher = { >> 3565 .enc = __VECS(camellia_xts_enc_tv_template), >> 3566 .dec = __VECS(camellia_xts_dec_tv_template) >> 3567 } 5725 } 3568 } 5726 }, { 3569 }, { 5727 .alg = "xts(cast6)", 3570 .alg = "xts(cast6)", 5728 .generic_driver = "xts(ecb(ca << 5729 .test = alg_test_skcipher, 3571 .test = alg_test_skcipher, 5730 .suite = { 3572 .suite = { 5731 .cipher = __VECS(cast !! 3573 .cipher = { >> 3574 .enc = __VECS(cast6_xts_enc_tv_template), >> 3575 .dec = __VECS(cast6_xts_dec_tv_template) >> 3576 } 5732 } 3577 } 5733 }, { 3578 }, { 5734 /* Same as xts(aes) except th << 5735 * hardware secure memory whi << 5736 */ << 5737 .alg = "xts(paes)", << 5738 .test = alg_test_null, << 5739 .fips_allowed = 1, << 5740 }, { << 5741 .alg = "xts(serpent)", 3579 .alg = "xts(serpent)", 5742 .generic_driver = "xts(ecb(se << 5743 .test = alg_test_skcipher, 3580 .test = alg_test_skcipher, 5744 .suite = { 3581 .suite = { 5745 .cipher = __VECS(serp !! 3582 .cipher = { >> 3583 .enc = __VECS(serpent_xts_enc_tv_template), >> 3584 .dec = __VECS(serpent_xts_dec_tv_template) >> 3585 } 5746 } 3586 } 5747 }, { 3587 }, { 5748 .alg = "xts(sm4)", !! 3588 .alg = "xts(speck128)", 5749 .generic_driver = "xts(ecb(sm << 5750 .test = alg_test_skcipher, 3589 .test = alg_test_skcipher, 5751 .suite = { 3590 .suite = { 5752 .cipher = __VECS(sm4_ !! 3591 .cipher = { >> 3592 .enc = __VECS(speck128_xts_enc_tv_template), >> 3593 .dec = __VECS(speck128_xts_dec_tv_template) >> 3594 } 5753 } 3595 } 5754 }, { 3596 }, { 5755 .alg = "xts(twofish)", !! 3597 .alg = "xts(speck64)", 5756 .generic_driver = "xts(ecb(tw << 5757 .test = alg_test_skcipher, 3598 .test = alg_test_skcipher, 5758 .suite = { 3599 .suite = { 5759 .cipher = __VECS(tf_x !! 3600 .cipher = { >> 3601 .enc = __VECS(speck64_xts_enc_tv_template), >> 3602 .dec = __VECS(speck64_xts_dec_tv_template) >> 3603 } 5760 } 3604 } 5761 }, { 3605 }, { 5762 #if IS_ENABLED(CONFIG_CRYPTO_PAES_S390) !! 3606 .alg = "xts(twofish)", 5763 .alg = "xts-paes-s390", << 5764 .fips_allowed = 1, << 5765 .test = alg_test_skcipher, 3607 .test = alg_test_skcipher, 5766 .suite = { 3608 .suite = { 5767 .cipher = __VECS(aes_ !! 3609 .cipher = { 5768 } !! 3610 .enc = __VECS(tf_xts_enc_tv_template), 5769 }, { !! 3611 .dec = __VECS(tf_xts_dec_tv_template) 5770 #endif !! 3612 } 5771 .alg = "xxhash64", << 5772 .test = alg_test_hash, << 5773 .fips_allowed = 1, << 5774 .suite = { << 5775 .hash = __VECS(xxhash << 5776 } 3613 } 5777 }, { 3614 }, { 5778 .alg = "zstd", !! 3615 .alg = "zlib-deflate", 5779 .test = alg_test_comp, 3616 .test = alg_test_comp, 5780 .fips_allowed = 1, 3617 .fips_allowed = 1, 5781 .suite = { 3618 .suite = { 5782 .comp = { 3619 .comp = { 5783 .comp = __VEC !! 3620 .comp = __VECS(zlib_deflate_comp_tv_template), 5784 .decomp = __V !! 3621 .decomp = __VECS(zlib_deflate_decomp_tv_template) 5785 } 3622 } 5786 } 3623 } 5787 } 3624 } 5788 }; 3625 }; 5789 3626 5790 static void alg_check_test_descs_order(void) !! 3627 static bool alg_test_descs_checked; >> 3628 >> 3629 static void alg_test_descs_check_order(void) 5791 { 3630 { 5792 int i; 3631 int i; 5793 3632 >> 3633 /* only check once */ >> 3634 if (alg_test_descs_checked) >> 3635 return; >> 3636 >> 3637 alg_test_descs_checked = true; >> 3638 5794 for (i = 1; i < ARRAY_SIZE(alg_test_d 3639 for (i = 1; i < ARRAY_SIZE(alg_test_descs); i++) { 5795 int diff = strcmp(alg_test_de 3640 int diff = strcmp(alg_test_descs[i - 1].alg, 5796 alg_test_de 3641 alg_test_descs[i].alg); 5797 3642 5798 if (WARN_ON(diff > 0)) { 3643 if (WARN_ON(diff > 0)) { 5799 pr_warn("testmgr: alg 3644 pr_warn("testmgr: alg_test_descs entries in wrong order: '%s' before '%s'\n", 5800 alg_test_desc 3645 alg_test_descs[i - 1].alg, 5801 alg_test_desc 3646 alg_test_descs[i].alg); 5802 } 3647 } 5803 3648 5804 if (WARN_ON(diff == 0)) { 3649 if (WARN_ON(diff == 0)) { 5805 pr_warn("testmgr: dup 3650 pr_warn("testmgr: duplicate alg_test_descs entry: '%s'\n", 5806 alg_test_desc 3651 alg_test_descs[i].alg); 5807 } 3652 } 5808 } 3653 } 5809 } 3654 } 5810 3655 5811 static void alg_check_testvec_configs(void) << 5812 { << 5813 int i; << 5814 << 5815 for (i = 0; i < ARRAY_SIZE(default_ci << 5816 WARN_ON(!valid_testvec_config << 5817 &default_ciph << 5818 << 5819 for (i = 0; i < ARRAY_SIZE(default_ha << 5820 WARN_ON(!valid_testvec_config << 5821 &default_hash << 5822 } << 5823 << 5824 static void testmgr_onetime_init(void) << 5825 { << 5826 alg_check_test_descs_order(); << 5827 alg_check_testvec_configs(); << 5828 << 5829 #ifdef CONFIG_CRYPTO_MANAGER_EXTRA_TESTS << 5830 pr_warn("alg: extra crypto tests enab << 5831 #endif << 5832 } << 5833 << 5834 static int alg_find_test(const char *alg) 3656 static int alg_find_test(const char *alg) 5835 { 3657 { 5836 int start = 0; 3658 int start = 0; 5837 int end = ARRAY_SIZE(alg_test_descs); 3659 int end = ARRAY_SIZE(alg_test_descs); 5838 3660 5839 while (start < end) { 3661 while (start < end) { 5840 int i = (start + end) / 2; 3662 int i = (start + end) / 2; 5841 int diff = strcmp(alg_test_de 3663 int diff = strcmp(alg_test_descs[i].alg, alg); 5842 3664 5843 if (diff > 0) { 3665 if (diff > 0) { 5844 end = i; 3666 end = i; 5845 continue; 3667 continue; 5846 } 3668 } 5847 3669 5848 if (diff < 0) { 3670 if (diff < 0) { 5849 start = i + 1; 3671 start = i + 1; 5850 continue; 3672 continue; 5851 } 3673 } 5852 3674 5853 return i; 3675 return i; 5854 } 3676 } 5855 3677 5856 return -1; 3678 return -1; 5857 } 3679 } 5858 3680 5859 static int alg_fips_disabled(const char *driv << 5860 { << 5861 pr_info("alg: %s (%s) is disabled due << 5862 << 5863 return -ECANCELED; << 5864 } << 5865 << 5866 int alg_test(const char *driver, const char * 3681 int alg_test(const char *driver, const char *alg, u32 type, u32 mask) 5867 { 3682 { 5868 int i; 3683 int i; 5869 int j; 3684 int j; 5870 int rc; 3685 int rc; 5871 3686 5872 if (!fips_enabled && notests) { 3687 if (!fips_enabled && notests) { 5873 printk_once(KERN_INFO "alg: s 3688 printk_once(KERN_INFO "alg: self-tests disabled\n"); 5874 return 0; 3689 return 0; 5875 } 3690 } 5876 3691 5877 DO_ONCE(testmgr_onetime_init); !! 3692 alg_test_descs_check_order(); 5878 3693 5879 if ((type & CRYPTO_ALG_TYPE_MASK) == 3694 if ((type & CRYPTO_ALG_TYPE_MASK) == CRYPTO_ALG_TYPE_CIPHER) { 5880 char nalg[CRYPTO_MAX_ALG_NAME 3695 char nalg[CRYPTO_MAX_ALG_NAME]; 5881 3696 5882 if (snprintf(nalg, sizeof(nal 3697 if (snprintf(nalg, sizeof(nalg), "ecb(%s)", alg) >= 5883 sizeof(nalg)) 3698 sizeof(nalg)) 5884 return -ENAMETOOLONG; 3699 return -ENAMETOOLONG; 5885 3700 5886 i = alg_find_test(nalg); 3701 i = alg_find_test(nalg); 5887 if (i < 0) 3702 if (i < 0) 5888 goto notest; 3703 goto notest; 5889 3704 5890 if (fips_enabled && !alg_test 3705 if (fips_enabled && !alg_test_descs[i].fips_allowed) 5891 goto non_fips_alg; 3706 goto non_fips_alg; 5892 3707 5893 rc = alg_test_cipher(alg_test 3708 rc = alg_test_cipher(alg_test_descs + i, driver, type, mask); 5894 goto test_done; 3709 goto test_done; 5895 } 3710 } 5896 3711 5897 i = alg_find_test(alg); 3712 i = alg_find_test(alg); 5898 j = alg_find_test(driver); 3713 j = alg_find_test(driver); 5899 if (i < 0 && j < 0) 3714 if (i < 0 && j < 0) 5900 goto notest; 3715 goto notest; 5901 3716 5902 if (fips_enabled) { !! 3717 if (fips_enabled && ((i >= 0 && !alg_test_descs[i].fips_allowed) || 5903 if (j >= 0 && !alg_test_descs !! 3718 (j >= 0 && !alg_test_descs[j].fips_allowed))) 5904 return -EINVAL; !! 3719 goto non_fips_alg; 5905 << 5906 if (i >= 0 && !alg_test_descs << 5907 goto non_fips_alg; << 5908 } << 5909 3720 5910 rc = 0; 3721 rc = 0; 5911 if (i >= 0) 3722 if (i >= 0) 5912 rc |= alg_test_descs[i].test( 3723 rc |= alg_test_descs[i].test(alg_test_descs + i, driver, 5913 3724 type, mask); 5914 if (j >= 0 && j != i) 3725 if (j >= 0 && j != i) 5915 rc |= alg_test_descs[j].test( 3726 rc |= alg_test_descs[j].test(alg_test_descs + j, driver, 5916 3727 type, mask); 5917 3728 5918 test_done: 3729 test_done: 5919 if (rc) { !! 3730 if (fips_enabled && rc) 5920 if (fips_enabled || panic_on_ !! 3731 panic("%s: %s alg self test failed in fips mode!\n", driver, alg); 5921 fips_fail_notify(); !! 3732 5922 panic("alg: self-test !! 3733 if (fips_enabled && !rc) 5923 driver, alg, !! 3734 pr_info("alg: self-tests for %s (%s) passed\n", driver, alg); 5924 fips_enabled ? << 5925 } << 5926 pr_warn("alg: self-tests for << 5927 alg, driver, rc); << 5928 WARN(rc != -ENOENT, << 5929 "alg: self-tests for %s << 5930 alg, driver, rc); << 5931 } else { << 5932 if (fips_enabled) << 5933 pr_info("alg: self-te << 5934 driver, alg); << 5935 } << 5936 3735 5937 return rc; 3736 return rc; 5938 3737 5939 notest: 3738 notest: 5940 if ((type & CRYPTO_ALG_TYPE_MASK) == << 5941 char nalg[CRYPTO_MAX_ALG_NAME << 5942 << 5943 if (snprintf(nalg, sizeof(nal << 5944 sizeof(nalg)) << 5945 goto notest2; << 5946 << 5947 i = alg_find_test(nalg); << 5948 if (i < 0) << 5949 goto notest2; << 5950 << 5951 if (fips_enabled && !alg_test << 5952 goto non_fips_alg; << 5953 << 5954 rc = alg_test_skcipher(alg_te << 5955 goto test_done; << 5956 } << 5957 << 5958 notest2: << 5959 printk(KERN_INFO "alg: No test for %s 3739 printk(KERN_INFO "alg: No test for %s (%s)\n", alg, driver); 5960 << 5961 if (type & CRYPTO_ALG_FIPS_INTERNAL) << 5962 return alg_fips_disabled(driv << 5963 << 5964 return 0; 3740 return 0; 5965 non_fips_alg: 3741 non_fips_alg: 5966 return alg_fips_disabled(driver, alg) !! 3742 return -EINVAL; 5967 } 3743 } 5968 3744 5969 #endif /* CONFIG_CRYPTO_MANAGER_DISABLE_TESTS 3745 #endif /* CONFIG_CRYPTO_MANAGER_DISABLE_TESTS */ 5970 3746 5971 EXPORT_SYMBOL_GPL(alg_test); 3747 EXPORT_SYMBOL_GPL(alg_test); 5972 3748
Linux® is a registered trademark of Linus Torvalds in the United States and other countries.
TOMOYO® is a registered trademark of NTT DATA CORPORATION.