1 // SPDX-License-Identifier: GPL-2.0-or-later << 2 /* 1 /* 3 * Symmetric key cipher operations. 2 * Symmetric key cipher operations. 4 * 3 * 5 * Generic encrypt/decrypt wrapper for ciphers 4 * Generic encrypt/decrypt wrapper for ciphers, handles operations across 6 * multiple page boundaries by using temporary 5 * multiple page boundaries by using temporary blocks. In user context, 7 * the kernel is given a chance to schedule us 6 * the kernel is given a chance to schedule us once per page. 8 * 7 * 9 * Copyright (c) 2015 Herbert Xu <herbert@gond 8 * Copyright (c) 2015 Herbert Xu <herbert@gondor.apana.org.au> >> 9 * >> 10 * This program is free software; you can redistribute it and/or modify it >> 11 * under the terms of the GNU General Public License as published by the Free >> 12 * Software Foundation; either version 2 of the License, or (at your option) >> 13 * any later version. >> 14 * 10 */ 15 */ 11 16 12 #include <crypto/internal/aead.h> << 13 #include <crypto/internal/cipher.h> << 14 #include <crypto/internal/skcipher.h> 17 #include <crypto/internal/skcipher.h> 15 #include <crypto/scatterwalk.h> << 16 #include <linux/bug.h> 18 #include <linux/bug.h> 17 #include <linux/cryptouser.h> << 18 #include <linux/err.h> << 19 #include <linux/kernel.h> << 20 #include <linux/list.h> << 21 #include <linux/mm.h> << 22 #include <linux/module.h> 19 #include <linux/module.h> 23 #include <linux/seq_file.h> << 24 #include <linux/slab.h> << 25 #include <linux/string.h> << 26 #include <net/netlink.h> << 27 #include "skcipher.h" << 28 << 29 #define CRYPTO_ALG_TYPE_SKCIPHER_MASK 0x0000 << 30 << 31 enum { << 32 SKCIPHER_WALK_PHYS = 1 << 0, << 33 SKCIPHER_WALK_SLOW = 1 << 1, << 34 SKCIPHER_WALK_COPY = 1 << 2, << 35 SKCIPHER_WALK_DIFF = 1 << 3, << 36 SKCIPHER_WALK_SLEEP = 1 << 4, << 37 }; << 38 << 39 struct skcipher_walk_buffer { << 40 struct list_head entry; << 41 struct scatter_walk dst; << 42 unsigned int len; << 43 u8 *data; << 44 u8 buffer[]; << 45 }; << 46 20 47 static const struct crypto_type crypto_skciphe !! 21 #include "internal.h" 48 22 49 static int skcipher_walk_next(struct skcipher_ !! 23 static unsigned int crypto_skcipher_extsize(struct crypto_alg *alg) 50 << 51 static inline void skcipher_map_src(struct skc << 52 { << 53 walk->src.virt.addr = scatterwalk_map( << 54 } << 55 << 56 static inline void skcipher_map_dst(struct skc << 57 { << 58 walk->dst.virt.addr = scatterwalk_map( << 59 } << 60 << 61 static inline void skcipher_unmap_src(struct s << 62 { << 63 scatterwalk_unmap(walk->src.virt.addr) << 64 } << 65 << 66 static inline void skcipher_unmap_dst(struct s << 67 { << 68 scatterwalk_unmap(walk->dst.virt.addr) << 69 } << 70 << 71 static inline gfp_t skcipher_walk_gfp(struct s << 72 { << 73 return walk->flags & SKCIPHER_WALK_SLE << 74 } << 75 << 76 /* Get a spot of the specified length that doe << 77 * The caller needs to ensure that there is en << 78 */ << 79 static inline u8 *skcipher_get_spot(u8 *start, << 80 { << 81 u8 *end_page = (u8 *)(((unsigned long) << 82 << 83 return max(start, end_page); << 84 } << 85 << 86 static inline struct skcipher_alg *__crypto_sk << 87 struct crypto_alg *alg) << 88 { 24 { 89 return container_of(alg, struct skciph !! 25 if (alg->cra_type == &crypto_blkcipher_type) 90 } !! 26 return sizeof(struct crypto_blkcipher *); 91 27 92 static int skcipher_done_slow(struct skcipher_ !! 28 BUG_ON(alg->cra_type != &crypto_ablkcipher_type && 93 { !! 29 alg->cra_type != &crypto_givcipher_type); 94 u8 *addr; << 95 30 96 addr = (u8 *)ALIGN((unsigned long)walk !! 31 return sizeof(struct crypto_ablkcipher *); 97 addr = skcipher_get_spot(addr, bsize); << 98 scatterwalk_copychunks(addr, &walk->ou << 99 (walk->flags & << 100 return 0; << 101 } 32 } 102 33 103 int skcipher_walk_done(struct skcipher_walk *w !! 34 static int skcipher_setkey_blkcipher(struct crypto_skcipher *tfm, >> 35 const u8 *key, unsigned int keylen) 104 { 36 { 105 unsigned int n = walk->nbytes; !! 37 struct crypto_blkcipher **ctx = crypto_skcipher_ctx(tfm); 106 unsigned int nbytes = 0; !! 38 struct crypto_blkcipher *blkcipher = *ctx; 107 !! 39 int err; 108 if (!n) << 109 goto finish; << 110 << 111 if (likely(err >= 0)) { << 112 n -= err; << 113 nbytes = walk->total - n; << 114 } << 115 << 116 if (likely(!(walk->flags & (SKCIPHER_W << 117 SKCIPHER_W << 118 SKCIPHER_W << 119 SKCIPHER_W << 120 unmap_src: << 121 skcipher_unmap_src(walk); << 122 } else if (walk->flags & SKCIPHER_WALK << 123 skcipher_unmap_dst(walk); << 124 goto unmap_src; << 125 } else if (walk->flags & SKCIPHER_WALK << 126 skcipher_map_dst(walk); << 127 memcpy(walk->dst.virt.addr, wa << 128 skcipher_unmap_dst(walk); << 129 } else if (unlikely(walk->flags & SKCI << 130 if (err > 0) { << 131 /* << 132 * Didn't process all << 133 * broken, or this was << 134 * the message wasn't << 135 * the algorithm requi << 136 */ << 137 err = -EINVAL; << 138 nbytes = 0; << 139 } else << 140 n = skcipher_done_slow << 141 } << 142 << 143 if (err > 0) << 144 err = 0; << 145 << 146 walk->total = nbytes; << 147 walk->nbytes = 0; << 148 << 149 scatterwalk_advance(&walk->in, n); << 150 scatterwalk_advance(&walk->out, n); << 151 scatterwalk_done(&walk->in, 0, nbytes) << 152 scatterwalk_done(&walk->out, 1, nbytes << 153 << 154 if (nbytes) { << 155 crypto_yield(walk->flags & SKC << 156 CRYPTO_TFM_REQ_MA << 157 return skcipher_walk_next(walk << 158 } << 159 40 160 finish: !! 41 crypto_blkcipher_clear_flags(blkcipher, ~0); 161 /* Short-circuit for the common/fast p !! 42 crypto_blkcipher_set_flags(blkcipher, crypto_skcipher_get_flags(tfm) & 162 if (!((unsigned long)walk->buffer | (u !! 43 CRYPTO_TFM_REQ_MASK); 163 goto out; !! 44 err = crypto_blkcipher_setkey(blkcipher, key, keylen); 164 !! 45 crypto_skcipher_set_flags(tfm, crypto_blkcipher_get_flags(blkcipher) & 165 if (walk->flags & SKCIPHER_WALK_PHYS) !! 46 CRYPTO_TFM_RES_MASK); 166 goto out; << 167 << 168 if (walk->iv != walk->oiv) << 169 memcpy(walk->oiv, walk->iv, wa << 170 if (walk->buffer != walk->page) << 171 kfree(walk->buffer); << 172 if (walk->page) << 173 free_page((unsigned long)walk- << 174 47 175 out: << 176 return err; 48 return err; 177 } 49 } 178 EXPORT_SYMBOL_GPL(skcipher_walk_done); << 179 50 180 void skcipher_walk_complete(struct skcipher_wa !! 51 static int skcipher_crypt_blkcipher(struct skcipher_request *req, >> 52 int (*crypt)(struct blkcipher_desc *, >> 53 struct scatterlist *, >> 54 struct scatterlist *, >> 55 unsigned int)) 181 { 56 { 182 struct skcipher_walk_buffer *p, *tmp; !! 57 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); 183 !! 58 struct crypto_blkcipher **ctx = crypto_skcipher_ctx(tfm); 184 list_for_each_entry_safe(p, tmp, &walk !! 59 struct blkcipher_desc desc = { 185 u8 *data; !! 60 .tfm = *ctx, 186 !! 61 .info = req->iv, 187 if (err) !! 62 .flags = req->base.flags, 188 goto done; !! 63 }; 189 << 190 data = p->data; << 191 if (!data) { << 192 data = PTR_ALIGN(&p->b << 193 data = skcipher_get_sp << 194 } << 195 << 196 scatterwalk_copychunks(data, & << 197 << 198 if (offset_in_page(p->data) + << 199 PAGE_SIZE) << 200 free_page((unsigned lo << 201 << 202 done: << 203 list_del(&p->entry); << 204 kfree(p); << 205 } << 206 64 207 if (!err && walk->iv != walk->oiv) << 208 memcpy(walk->oiv, walk->iv, wa << 209 if (walk->buffer != walk->page) << 210 kfree(walk->buffer); << 211 if (walk->page) << 212 free_page((unsigned long)walk- << 213 } << 214 EXPORT_SYMBOL_GPL(skcipher_walk_complete); << 215 65 216 static void skcipher_queue_write(struct skciph !! 66 return crypt(&desc, req->dst, req->src, req->cryptlen); 217 struct skciph << 218 { << 219 p->dst = walk->out; << 220 list_add_tail(&p->entry, &walk->buffer << 221 } 67 } 222 68 223 static int skcipher_next_slow(struct skcipher_ !! 69 static int skcipher_encrypt_blkcipher(struct skcipher_request *req) 224 { 70 { 225 bool phys = walk->flags & SKCIPHER_WAL !! 71 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req); 226 unsigned alignmask = walk->alignmask; !! 72 struct crypto_tfm *tfm = crypto_skcipher_tfm(skcipher); 227 struct skcipher_walk_buffer *p; !! 73 struct blkcipher_alg *alg = &tfm->__crt_alg->cra_blkcipher; 228 unsigned a; << 229 unsigned n; << 230 u8 *buffer; << 231 void *v; << 232 << 233 if (!phys) { << 234 if (!walk->buffer) << 235 walk->buffer = walk->p << 236 buffer = walk->buffer; << 237 if (buffer) << 238 goto ok; << 239 } << 240 << 241 /* Start with the minimum alignment of << 242 a = crypto_tfm_ctx_alignment() - 1; << 243 n = bsize; << 244 << 245 if (phys) { << 246 /* Calculate the minimum align << 247 a &= (sizeof(*p) ^ (sizeof(*p) << 248 n += sizeof(*p); << 249 } << 250 << 251 /* Minimum size to align p->buffer by << 252 n += alignmask & ~a; << 253 << 254 /* Minimum size to ensure p->buffer do << 255 n += (bsize - 1) & ~(alignmask | a); << 256 74 257 v = kzalloc(n, skcipher_walk_gfp(walk) !! 75 return skcipher_crypt_blkcipher(req, alg->encrypt); 258 if (!v) << 259 return skcipher_walk_done(walk << 260 << 261 if (phys) { << 262 p = v; << 263 p->len = bsize; << 264 skcipher_queue_write(walk, p); << 265 buffer = p->buffer; << 266 } else { << 267 walk->buffer = v; << 268 buffer = v; << 269 } << 270 << 271 ok: << 272 walk->dst.virt.addr = PTR_ALIGN(buffer << 273 walk->dst.virt.addr = skcipher_get_spo << 274 walk->src.virt.addr = walk->dst.virt.a << 275 << 276 scatterwalk_copychunks(walk->src.virt. << 277 << 278 walk->nbytes = bsize; << 279 walk->flags |= SKCIPHER_WALK_SLOW; << 280 << 281 return 0; << 282 } 76 } 283 77 284 static int skcipher_next_copy(struct skcipher_ !! 78 static int skcipher_decrypt_blkcipher(struct skcipher_request *req) 285 { 79 { 286 struct skcipher_walk_buffer *p; !! 80 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req); 287 u8 *tmp = walk->page; !! 81 struct crypto_tfm *tfm = crypto_skcipher_tfm(skcipher); 288 !! 82 struct blkcipher_alg *alg = &tfm->__crt_alg->cra_blkcipher; 289 skcipher_map_src(walk); << 290 memcpy(tmp, walk->src.virt.addr, walk- << 291 skcipher_unmap_src(walk); << 292 << 293 walk->src.virt.addr = tmp; << 294 walk->dst.virt.addr = tmp; << 295 << 296 if (!(walk->flags & SKCIPHER_WALK_PHYS << 297 return 0; << 298 << 299 p = kmalloc(sizeof(*p), skcipher_walk_ << 300 if (!p) << 301 return -ENOMEM; << 302 << 303 p->data = walk->page; << 304 p->len = walk->nbytes; << 305 skcipher_queue_write(walk, p); << 306 << 307 if (offset_in_page(walk->page) + walk- << 308 PAGE_SIZE) << 309 walk->page = NULL; << 310 else << 311 walk->page += walk->nbytes; << 312 83 313 return 0; !! 84 return skcipher_crypt_blkcipher(req, alg->decrypt); 314 } 85 } 315 86 316 static int skcipher_next_fast(struct skcipher_ !! 87 static void crypto_exit_skcipher_ops_blkcipher(struct crypto_tfm *tfm) 317 { 88 { 318 unsigned long diff; !! 89 struct crypto_blkcipher **ctx = crypto_tfm_ctx(tfm); 319 << 320 walk->src.phys.page = scatterwalk_page << 321 walk->src.phys.offset = offset_in_page << 322 walk->dst.phys.page = scatterwalk_page << 323 walk->dst.phys.offset = offset_in_page << 324 90 325 if (walk->flags & SKCIPHER_WALK_PHYS) !! 91 crypto_free_blkcipher(*ctx); 326 return 0; << 327 << 328 diff = walk->src.phys.offset - walk->d << 329 diff |= walk->src.virt.page - walk->ds << 330 << 331 skcipher_map_src(walk); << 332 walk->dst.virt.addr = walk->src.virt.a << 333 << 334 if (diff) { << 335 walk->flags |= SKCIPHER_WALK_D << 336 skcipher_map_dst(walk); << 337 } << 338 << 339 return 0; << 340 } 92 } 341 93 342 static int skcipher_walk_next(struct skcipher_ !! 94 static int crypto_init_skcipher_ops_blkcipher(struct crypto_tfm *tfm) 343 { 95 { 344 unsigned int bsize; !! 96 struct crypto_alg *calg = tfm->__crt_alg; 345 unsigned int n; !! 97 struct crypto_skcipher *skcipher = __crypto_skcipher_cast(tfm); 346 int err; !! 98 struct crypto_blkcipher **ctx = crypto_tfm_ctx(tfm); 347 !! 99 struct crypto_blkcipher *blkcipher; 348 walk->flags &= ~(SKCIPHER_WALK_SLOW | !! 100 struct crypto_tfm *btfm; 349 SKCIPHER_WALK_DIFF); << 350 << 351 n = walk->total; << 352 bsize = min(walk->stride, max(n, walk- << 353 n = scatterwalk_clamp(&walk->in, n); << 354 n = scatterwalk_clamp(&walk->out, n); << 355 << 356 if (unlikely(n < bsize)) { << 357 if (unlikely(walk->total < wal << 358 return skcipher_walk_d << 359 << 360 slow_path: << 361 err = skcipher_next_slow(walk, << 362 goto set_phys_lowmem; << 363 } << 364 << 365 if (unlikely((walk->in.offset | walk-> << 366 if (!walk->page) { << 367 gfp_t gfp = skcipher_w << 368 << 369 walk->page = (void *)_ << 370 if (!walk->page) << 371 goto slow_path << 372 } << 373 << 374 walk->nbytes = min_t(unsigned, << 375 PAGE_SIZE << 376 walk->flags |= SKCIPHER_WALK_C << 377 err = skcipher_next_copy(walk) << 378 goto set_phys_lowmem; << 379 } << 380 << 381 walk->nbytes = n; << 382 101 383 return skcipher_next_fast(walk); !! 102 if (!crypto_mod_get(calg)) >> 103 return -EAGAIN; 384 104 385 set_phys_lowmem: !! 105 btfm = __crypto_alloc_tfm(calg, CRYPTO_ALG_TYPE_BLKCIPHER, 386 if (!err && (walk->flags & SKCIPHER_WA !! 106 CRYPTO_ALG_TYPE_MASK); 387 walk->src.phys.page = virt_to_ !! 107 if (IS_ERR(btfm)) { 388 walk->dst.phys.page = virt_to_ !! 108 crypto_mod_put(calg); 389 walk->src.phys.offset &= PAGE_ !! 109 return PTR_ERR(btfm); 390 walk->dst.phys.offset &= PAGE_ << 391 } 110 } 392 return err; << 393 } << 394 111 395 static int skcipher_copy_iv(struct skcipher_wa !! 112 blkcipher = __crypto_blkcipher_cast(btfm); 396 { !! 113 *ctx = blkcipher; 397 unsigned a = crypto_tfm_ctx_alignment( !! 114 tfm->exit = crypto_exit_skcipher_ops_blkcipher; 398 unsigned alignmask = walk->alignmask; << 399 unsigned ivsize = walk->ivsize; << 400 unsigned bs = walk->stride; << 401 unsigned aligned_bs; << 402 unsigned size; << 403 u8 *iv; << 404 << 405 aligned_bs = ALIGN(bs, alignmask + 1); << 406 << 407 /* Minimum size to align buffer by ali << 408 size = alignmask & ~a; << 409 << 410 if (walk->flags & SKCIPHER_WALK_PHYS) << 411 size += ivsize; << 412 else { << 413 size += aligned_bs + ivsize; << 414 << 415 /* Minimum size to ensure buff << 416 size += (bs - 1) & ~(alignmask << 417 } << 418 115 419 walk->buffer = kmalloc(size, skcipher_ !! 116 skcipher->setkey = skcipher_setkey_blkcipher; 420 if (!walk->buffer) !! 117 skcipher->encrypt = skcipher_encrypt_blkcipher; 421 return -ENOMEM; !! 118 skcipher->decrypt = skcipher_decrypt_blkcipher; 422 119 423 iv = PTR_ALIGN(walk->buffer, alignmask !! 120 skcipher->ivsize = crypto_blkcipher_ivsize(blkcipher); 424 iv = skcipher_get_spot(iv, bs) + align !! 121 skcipher->has_setkey = calg->cra_blkcipher.max_keysize; 425 122 426 walk->iv = memcpy(iv, walk->iv, walk-> << 427 return 0; 123 return 0; 428 } 124 } 429 125 430 static int skcipher_walk_first(struct skcipher !! 126 static int skcipher_setkey_ablkcipher(struct crypto_skcipher *tfm, 431 { !! 127 const u8 *key, unsigned int keylen) 432 if (WARN_ON_ONCE(in_hardirq())) << 433 return -EDEADLK; << 434 << 435 walk->buffer = NULL; << 436 if (unlikely(((unsigned long)walk->iv << 437 int err = skcipher_copy_iv(wal << 438 if (err) << 439 return err; << 440 } << 441 << 442 walk->page = NULL; << 443 << 444 return skcipher_walk_next(walk); << 445 } << 446 << 447 static int skcipher_walk_skcipher(struct skcip << 448 struct skcip << 449 { << 450 struct crypto_skcipher *tfm = crypto_s << 451 struct skcipher_alg *alg = crypto_skci << 452 << 453 walk->total = req->cryptlen; << 454 walk->nbytes = 0; << 455 walk->iv = req->iv; << 456 walk->oiv = req->iv; << 457 << 458 if (unlikely(!walk->total)) << 459 return 0; << 460 << 461 scatterwalk_start(&walk->in, req->src) << 462 scatterwalk_start(&walk->out, req->dst << 463 << 464 walk->flags &= ~SKCIPHER_WALK_SLEEP; << 465 walk->flags |= req->base.flags & CRYPT << 466 SKCIPHER_WALK_SLEEP : 0 << 467 << 468 walk->blocksize = crypto_skcipher_bloc << 469 walk->ivsize = crypto_skcipher_ivsize( << 470 walk->alignmask = crypto_skcipher_alig << 471 << 472 if (alg->co.base.cra_type != &crypto_s << 473 walk->stride = alg->co.chunksi << 474 else << 475 walk->stride = alg->walksize; << 476 << 477 return skcipher_walk_first(walk); << 478 } << 479 << 480 int skcipher_walk_virt(struct skcipher_walk *w << 481 struct skcipher_request << 482 { 128 { >> 129 struct crypto_ablkcipher **ctx = crypto_skcipher_ctx(tfm); >> 130 struct crypto_ablkcipher *ablkcipher = *ctx; 483 int err; 131 int err; 484 132 485 might_sleep_if(req->base.flags & CRYPT !! 133 crypto_ablkcipher_clear_flags(ablkcipher, ~0); 486 !! 134 crypto_ablkcipher_set_flags(ablkcipher, 487 walk->flags &= ~SKCIPHER_WALK_PHYS; !! 135 crypto_skcipher_get_flags(tfm) & 488 !! 136 CRYPTO_TFM_REQ_MASK); 489 err = skcipher_walk_skcipher(walk, req !! 137 err = crypto_ablkcipher_setkey(ablkcipher, key, keylen); 490 !! 138 crypto_skcipher_set_flags(tfm, 491 walk->flags &= atomic ? ~SKCIPHER_WALK !! 139 crypto_ablkcipher_get_flags(ablkcipher) & >> 140 CRYPTO_TFM_RES_MASK); 492 141 493 return err; 142 return err; 494 } 143 } 495 EXPORT_SYMBOL_GPL(skcipher_walk_virt); << 496 144 497 int skcipher_walk_async(struct skcipher_walk * !! 145 static int skcipher_crypt_ablkcipher(struct skcipher_request *req, 498 struct skcipher_reques !! 146 int (*crypt)(struct ablkcipher_request *)) 499 { << 500 walk->flags |= SKCIPHER_WALK_PHYS; << 501 << 502 INIT_LIST_HEAD(&walk->buffers); << 503 << 504 return skcipher_walk_skcipher(walk, re << 505 } << 506 EXPORT_SYMBOL_GPL(skcipher_walk_async); << 507 << 508 static int skcipher_walk_aead_common(struct sk << 509 struct ae << 510 { << 511 struct crypto_aead *tfm = crypto_aead_ << 512 int err; << 513 << 514 walk->nbytes = 0; << 515 walk->iv = req->iv; << 516 walk->oiv = req->iv; << 517 << 518 if (unlikely(!walk->total)) << 519 return 0; << 520 << 521 walk->flags &= ~SKCIPHER_WALK_PHYS; << 522 << 523 scatterwalk_start(&walk->in, req->src) << 524 scatterwalk_start(&walk->out, req->dst << 525 << 526 scatterwalk_copychunks(NULL, &walk->in << 527 scatterwalk_copychunks(NULL, &walk->ou << 528 << 529 scatterwalk_done(&walk->in, 0, walk->t << 530 scatterwalk_done(&walk->out, 0, walk-> << 531 << 532 if (req->base.flags & CRYPTO_TFM_REQ_M << 533 walk->flags |= SKCIPHER_WALK_S << 534 else << 535 walk->flags &= ~SKCIPHER_WALK_ << 536 << 537 walk->blocksize = crypto_aead_blocksiz << 538 walk->stride = crypto_aead_chunksize(t << 539 walk->ivsize = crypto_aead_ivsize(tfm) << 540 walk->alignmask = crypto_aead_alignmas << 541 << 542 err = skcipher_walk_first(walk); << 543 << 544 if (atomic) << 545 walk->flags &= ~SKCIPHER_WALK_ << 546 << 547 return err; << 548 } << 549 << 550 int skcipher_walk_aead_encrypt(struct skcipher << 551 struct aead_req << 552 { << 553 walk->total = req->cryptlen; << 554 << 555 return skcipher_walk_aead_common(walk, << 556 } << 557 EXPORT_SYMBOL_GPL(skcipher_walk_aead_encrypt); << 558 << 559 int skcipher_walk_aead_decrypt(struct skcipher << 560 struct aead_req << 561 { << 562 struct crypto_aead *tfm = crypto_aead_ << 563 << 564 walk->total = req->cryptlen - crypto_a << 565 << 566 return skcipher_walk_aead_common(walk, << 567 } << 568 EXPORT_SYMBOL_GPL(skcipher_walk_aead_decrypt); << 569 << 570 static void skcipher_set_needkey(struct crypto << 571 { << 572 if (crypto_skcipher_max_keysize(tfm) ! << 573 crypto_skcipher_set_flags(tfm, << 574 } << 575 << 576 static int skcipher_setkey_unaligned(struct cr << 577 const u8 << 578 { << 579 unsigned long alignmask = crypto_skcip << 580 struct skcipher_alg *cipher = crypto_s << 581 u8 *buffer, *alignbuffer; << 582 unsigned long absize; << 583 int ret; << 584 << 585 absize = keylen + alignmask; << 586 buffer = kmalloc(absize, GFP_ATOMIC); << 587 if (!buffer) << 588 return -ENOMEM; << 589 << 590 alignbuffer = (u8 *)ALIGN((unsigned lo << 591 memcpy(alignbuffer, key, keylen); << 592 ret = cipher->setkey(tfm, alignbuffer, << 593 kfree_sensitive(buffer); << 594 return ret; << 595 } << 596 << 597 int crypto_skcipher_setkey(struct crypto_skcip << 598 unsigned int keylen << 599 { << 600 struct skcipher_alg *cipher = crypto_s << 601 unsigned long alignmask = crypto_skcip << 602 int err; << 603 << 604 if (cipher->co.base.cra_type != &crypt << 605 struct crypto_lskcipher **ctx << 606 << 607 crypto_lskcipher_clear_flags(* << 608 crypto_lskcipher_set_flags(*ct << 609 cry << 610 CRY << 611 err = crypto_lskcipher_setkey( << 612 goto out; << 613 } << 614 << 615 if (keylen < cipher->min_keysize || ke << 616 return -EINVAL; << 617 << 618 if ((unsigned long)key & alignmask) << 619 err = skcipher_setkey_unaligne << 620 else << 621 err = cipher->setkey(tfm, key, << 622 << 623 out: << 624 if (unlikely(err)) { << 625 skcipher_set_needkey(tfm); << 626 return err; << 627 } << 628 << 629 crypto_skcipher_clear_flags(tfm, CRYPT << 630 return 0; << 631 } << 632 EXPORT_SYMBOL_GPL(crypto_skcipher_setkey); << 633 << 634 int crypto_skcipher_encrypt(struct skcipher_re << 635 { 147 { 636 struct crypto_skcipher *tfm = crypto_s 148 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); 637 struct skcipher_alg *alg = crypto_skci !! 149 struct crypto_ablkcipher **ctx = crypto_skcipher_ctx(tfm); >> 150 struct ablkcipher_request *subreq = skcipher_request_ctx(req); 638 151 639 if (crypto_skcipher_get_flags(tfm) & C !! 152 ablkcipher_request_set_tfm(subreq, *ctx); 640 return -ENOKEY; !! 153 ablkcipher_request_set_callback(subreq, skcipher_request_flags(req), 641 if (alg->co.base.cra_type != &crypto_s !! 154 req->base.complete, req->base.data); 642 return crypto_lskcipher_encryp !! 155 ablkcipher_request_set_crypt(subreq, req->src, req->dst, req->cryptlen, 643 return alg->encrypt(req); !! 156 req->iv); 644 } << 645 EXPORT_SYMBOL_GPL(crypto_skcipher_encrypt); << 646 157 647 int crypto_skcipher_decrypt(struct skcipher_re !! 158 return crypt(subreq); 648 { << 649 struct crypto_skcipher *tfm = crypto_s << 650 struct skcipher_alg *alg = crypto_skci << 651 << 652 if (crypto_skcipher_get_flags(tfm) & C << 653 return -ENOKEY; << 654 if (alg->co.base.cra_type != &crypto_s << 655 return crypto_lskcipher_decryp << 656 return alg->decrypt(req); << 657 } 159 } 658 EXPORT_SYMBOL_GPL(crypto_skcipher_decrypt); << 659 160 660 static int crypto_lskcipher_export(struct skci !! 161 static int skcipher_encrypt_ablkcipher(struct skcipher_request *req) 661 { 162 { 662 struct crypto_skcipher *tfm = crypto_s !! 163 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req); 663 u8 *ivs = skcipher_request_ctx(req); !! 164 struct crypto_tfm *tfm = crypto_skcipher_tfm(skcipher); 664 !! 165 struct ablkcipher_alg *alg = &tfm->__crt_alg->cra_ablkcipher; 665 ivs = PTR_ALIGN(ivs, crypto_skcipher_a << 666 166 667 memcpy(out, ivs + crypto_skcipher_ivsi !! 167 return skcipher_crypt_ablkcipher(req, alg->encrypt); 668 crypto_skcipher_statesize(tfm)) << 669 << 670 return 0; << 671 } 168 } 672 169 673 static int crypto_lskcipher_import(struct skci !! 170 static int skcipher_decrypt_ablkcipher(struct skcipher_request *req) 674 { 171 { 675 struct crypto_skcipher *tfm = crypto_s !! 172 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req); 676 u8 *ivs = skcipher_request_ctx(req); !! 173 struct crypto_tfm *tfm = crypto_skcipher_tfm(skcipher); 677 !! 174 struct ablkcipher_alg *alg = &tfm->__crt_alg->cra_ablkcipher; 678 ivs = PTR_ALIGN(ivs, crypto_skcipher_a << 679 175 680 memcpy(ivs + crypto_skcipher_ivsize(tf !! 176 return skcipher_crypt_ablkcipher(req, alg->decrypt); 681 crypto_skcipher_statesize(tfm)) << 682 << 683 return 0; << 684 } 177 } 685 178 686 static int skcipher_noexport(struct skcipher_r !! 179 static void crypto_exit_skcipher_ops_ablkcipher(struct crypto_tfm *tfm) 687 { 180 { 688 return 0; !! 181 struct crypto_ablkcipher **ctx = crypto_tfm_ctx(tfm); 689 } << 690 << 691 static int skcipher_noimport(struct skcipher_r << 692 { << 693 return 0; << 694 } << 695 << 696 int crypto_skcipher_export(struct skcipher_req << 697 { << 698 struct crypto_skcipher *tfm = crypto_s << 699 struct skcipher_alg *alg = crypto_skci << 700 182 701 if (alg->co.base.cra_type != &crypto_s !! 183 crypto_free_ablkcipher(*ctx); 702 return crypto_lskcipher_export << 703 return alg->export(req, out); << 704 } 184 } 705 EXPORT_SYMBOL_GPL(crypto_skcipher_export); << 706 185 707 int crypto_skcipher_import(struct skcipher_req !! 186 static int crypto_init_skcipher_ops_ablkcipher(struct crypto_tfm *tfm) 708 { << 709 struct crypto_skcipher *tfm = crypto_s << 710 struct skcipher_alg *alg = crypto_skci << 711 << 712 if (alg->co.base.cra_type != &crypto_s << 713 return crypto_lskcipher_import << 714 return alg->import(req, in); << 715 } << 716 EXPORT_SYMBOL_GPL(crypto_skcipher_import); << 717 << 718 static void crypto_skcipher_exit_tfm(struct cr << 719 { 187 { >> 188 struct crypto_alg *calg = tfm->__crt_alg; 720 struct crypto_skcipher *skcipher = __c 189 struct crypto_skcipher *skcipher = __crypto_skcipher_cast(tfm); 721 struct skcipher_alg *alg = crypto_skci !! 190 struct crypto_ablkcipher **ctx = crypto_tfm_ctx(tfm); 722 !! 191 struct crypto_ablkcipher *ablkcipher; 723 alg->exit(skcipher); !! 192 struct crypto_tfm *abtfm; 724 } !! 193 725 !! 194 if (!crypto_mod_get(calg)) 726 static int crypto_skcipher_init_tfm(struct cry !! 195 return -EAGAIN; 727 { !! 196 728 struct crypto_skcipher *skcipher = __c !! 197 abtfm = __crypto_alloc_tfm(calg, 0, 0); 729 struct skcipher_alg *alg = crypto_skci !! 198 if (IS_ERR(abtfm)) { 730 !! 199 crypto_mod_put(calg); 731 skcipher_set_needkey(skcipher); !! 200 return PTR_ERR(abtfm); 732 !! 201 } 733 if (tfm->__crt_alg->cra_type != &crypt !! 202 734 unsigned am = crypto_skcipher_ !! 203 ablkcipher = __crypto_ablkcipher_cast(abtfm); 735 unsigned reqsize; !! 204 *ctx = ablkcipher; 736 !! 205 tfm->exit = crypto_exit_skcipher_ops_ablkcipher; 737 reqsize = am & ~(crypto_tfm_ct !! 206 738 reqsize += crypto_skcipher_ivs !! 207 skcipher->setkey = skcipher_setkey_ablkcipher; 739 reqsize += crypto_skcipher_sta !! 208 skcipher->encrypt = skcipher_encrypt_ablkcipher; 740 crypto_skcipher_set_reqsize(sk !! 209 skcipher->decrypt = skcipher_decrypt_ablkcipher; 741 !! 210 742 return crypto_init_lskcipher_o !! 211 skcipher->ivsize = crypto_ablkcipher_ivsize(ablkcipher); 743 } !! 212 skcipher->reqsize = crypto_ablkcipher_reqsize(ablkcipher) + 744 !! 213 sizeof(struct ablkcipher_request); 745 if (alg->exit) !! 214 skcipher->has_setkey = calg->cra_ablkcipher.max_keysize; 746 skcipher->base.exit = crypto_s << 747 << 748 if (alg->init) << 749 return alg->init(skcipher); << 750 215 751 return 0; 216 return 0; 752 } 217 } 753 218 754 static unsigned int crypto_skcipher_extsize(st !! 219 static int crypto_skcipher_init_tfm(struct crypto_tfm *tfm) 755 { << 756 if (alg->cra_type != &crypto_skcipher_ << 757 return sizeof(struct crypto_ls << 758 << 759 return crypto_alg_extsize(alg); << 760 } << 761 << 762 static void crypto_skcipher_free_instance(stru << 763 { << 764 struct skcipher_instance *skcipher = << 765 container_of(inst, struct skci << 766 << 767 skcipher->free(skcipher); << 768 } << 769 << 770 static void crypto_skcipher_show(struct seq_fi << 771 __maybe_unused; << 772 static void crypto_skcipher_show(struct seq_fi << 773 { << 774 struct skcipher_alg *skcipher = __cryp << 775 << 776 seq_printf(m, "type : skcipher << 777 seq_printf(m, "async : %s\n", << 778 alg->cra_flags & CRYPTO_ALG << 779 seq_printf(m, "blocksize : %u\n", a << 780 seq_printf(m, "min keysize : %u\n", s << 781 seq_printf(m, "max keysize : %u\n", s << 782 seq_printf(m, "ivsize : %u\n", s << 783 seq_printf(m, "chunksize : %u\n", s << 784 seq_printf(m, "walksize : %u\n", s << 785 seq_printf(m, "statesize : %u\n", s << 786 } << 787 << 788 static int __maybe_unused crypto_skcipher_repo << 789 struct sk_buff *skb, struct crypto_alg << 790 { 220 { 791 struct skcipher_alg *skcipher = __cryp !! 221 if (tfm->__crt_alg->cra_type == &crypto_blkcipher_type) 792 struct crypto_report_blkcipher rblkcip !! 222 return crypto_init_skcipher_ops_blkcipher(tfm); 793 223 794 memset(&rblkcipher, 0, sizeof(rblkciph !! 224 BUG_ON(tfm->__crt_alg->cra_type != &crypto_ablkcipher_type && >> 225 tfm->__crt_alg->cra_type != &crypto_givcipher_type); 795 226 796 strscpy(rblkcipher.type, "skcipher", s !! 227 return crypto_init_skcipher_ops_ablkcipher(tfm); 797 strscpy(rblkcipher.geniv, "<none>", si << 798 << 799 rblkcipher.blocksize = alg->cra_blocks << 800 rblkcipher.min_keysize = skcipher->min << 801 rblkcipher.max_keysize = skcipher->max << 802 rblkcipher.ivsize = skcipher->ivsize; << 803 << 804 return nla_put(skb, CRYPTOCFGA_REPORT_ << 805 sizeof(rblkcipher), &rb << 806 } 228 } 807 229 808 static const struct crypto_type crypto_skciphe !! 230 static const struct crypto_type crypto_skcipher_type2 = { 809 .extsize = crypto_skcipher_extsize, 231 .extsize = crypto_skcipher_extsize, 810 .init_tfm = crypto_skcipher_init_tfm, 232 .init_tfm = crypto_skcipher_init_tfm, 811 .free = crypto_skcipher_free_instance, << 812 #ifdef CONFIG_PROC_FS << 813 .show = crypto_skcipher_show, << 814 #endif << 815 #if IS_ENABLED(CONFIG_CRYPTO_USER) << 816 .report = crypto_skcipher_report, << 817 #endif << 818 .maskclear = ~CRYPTO_ALG_TYPE_MASK, 233 .maskclear = ~CRYPTO_ALG_TYPE_MASK, 819 .maskset = CRYPTO_ALG_TYPE_SKCIPHER_MA !! 234 .maskset = CRYPTO_ALG_TYPE_BLKCIPHER_MASK, 820 .type = CRYPTO_ALG_TYPE_SKCIPHER, !! 235 .type = CRYPTO_ALG_TYPE_BLKCIPHER, 821 .tfmsize = offsetof(struct crypto_skci 236 .tfmsize = offsetof(struct crypto_skcipher, base), 822 }; 237 }; 823 238 824 int crypto_grab_skcipher(struct crypto_skciphe << 825 struct crypto_instanc << 826 const char *name, u32 << 827 { << 828 spawn->base.frontend = &crypto_skciphe << 829 return crypto_grab_spawn(&spawn->base, << 830 } << 831 EXPORT_SYMBOL_GPL(crypto_grab_skcipher); << 832 << 833 struct crypto_skcipher *crypto_alloc_skcipher( 239 struct crypto_skcipher *crypto_alloc_skcipher(const char *alg_name, 834 240 u32 type, u32 mask) 835 { 241 { 836 return crypto_alloc_tfm(alg_name, &cry !! 242 return crypto_alloc_tfm(alg_name, &crypto_skcipher_type2, type, mask); 837 } 243 } 838 EXPORT_SYMBOL_GPL(crypto_alloc_skcipher); 244 EXPORT_SYMBOL_GPL(crypto_alloc_skcipher); 839 245 840 struct crypto_sync_skcipher *crypto_alloc_sync << 841 const char *al << 842 { << 843 struct crypto_skcipher *tfm; << 844 << 845 /* Only sync algorithms allowed. */ << 846 mask |= CRYPTO_ALG_ASYNC | CRYPTO_ALG_ << 847 << 848 tfm = crypto_alloc_tfm(alg_name, &cryp << 849 << 850 /* << 851 * Make sure we do not allocate someth << 852 * an on-stack request: check the requ << 853 */ << 854 if (!IS_ERR(tfm) && WARN_ON(crypto_skc << 855 MAX_SYNC_S << 856 crypto_free_skcipher(tfm); << 857 return ERR_PTR(-EINVAL); << 858 } << 859 << 860 return (struct crypto_sync_skcipher *) << 861 } << 862 EXPORT_SYMBOL_GPL(crypto_alloc_sync_skcipher); << 863 << 864 int crypto_has_skcipher(const char *alg_name, << 865 { << 866 return crypto_type_has_alg(alg_name, & << 867 } << 868 EXPORT_SYMBOL_GPL(crypto_has_skcipher); << 869 << 870 int skcipher_prepare_alg_common(struct skciphe << 871 { << 872 struct crypto_alg *base = &alg->base; << 873 << 874 if (alg->ivsize > PAGE_SIZE / 8 || alg << 875 alg->statesize > PAGE_SIZE / 2 || << 876 (alg->ivsize + alg->statesize) > P << 877 return -EINVAL; << 878 << 879 if (!alg->chunksize) << 880 alg->chunksize = base->cra_blo << 881 << 882 base->cra_flags &= ~CRYPTO_ALG_TYPE_MA << 883 << 884 return 0; << 885 } << 886 << 887 static int skcipher_prepare_alg(struct skciphe << 888 { << 889 struct crypto_alg *base = &alg->base; << 890 int err; << 891 << 892 err = skcipher_prepare_alg_common(&alg << 893 if (err) << 894 return err; << 895 << 896 if (alg->walksize > PAGE_SIZE / 8) << 897 return -EINVAL; << 898 << 899 if (!alg->walksize) << 900 alg->walksize = alg->chunksize << 901 << 902 if (!alg->statesize) { << 903 alg->import = skcipher_noimpor << 904 alg->export = skcipher_noexpor << 905 } else if (!(alg->import && alg->expor << 906 return -EINVAL; << 907 << 908 base->cra_type = &crypto_skcipher_type << 909 base->cra_flags |= CRYPTO_ALG_TYPE_SKC << 910 << 911 return 0; << 912 } << 913 << 914 int crypto_register_skcipher(struct skcipher_a << 915 { << 916 struct crypto_alg *base = &alg->base; << 917 int err; << 918 << 919 err = skcipher_prepare_alg(alg); << 920 if (err) << 921 return err; << 922 << 923 return crypto_register_alg(base); << 924 } << 925 EXPORT_SYMBOL_GPL(crypto_register_skcipher); << 926 << 927 void crypto_unregister_skcipher(struct skciphe << 928 { << 929 crypto_unregister_alg(&alg->base); << 930 } << 931 EXPORT_SYMBOL_GPL(crypto_unregister_skcipher); << 932 << 933 int crypto_register_skciphers(struct skcipher_ << 934 { << 935 int i, ret; << 936 << 937 for (i = 0; i < count; i++) { << 938 ret = crypto_register_skcipher << 939 if (ret) << 940 goto err; << 941 } << 942 << 943 return 0; << 944 << 945 err: << 946 for (--i; i >= 0; --i) << 947 crypto_unregister_skcipher(&al << 948 << 949 return ret; << 950 } << 951 EXPORT_SYMBOL_GPL(crypto_register_skciphers); << 952 << 953 void crypto_unregister_skciphers(struct skciph << 954 { << 955 int i; << 956 << 957 for (i = count - 1; i >= 0; --i) << 958 crypto_unregister_skcipher(&al << 959 } << 960 EXPORT_SYMBOL_GPL(crypto_unregister_skciphers) << 961 << 962 int skcipher_register_instance(struct crypto_t << 963 struct skcipher_ins << 964 { << 965 int err; << 966 << 967 if (WARN_ON(!inst->free)) << 968 return -EINVAL; << 969 << 970 err = skcipher_prepare_alg(&inst->alg) << 971 if (err) << 972 return err; << 973 << 974 return crypto_register_instance(tmpl, << 975 } << 976 EXPORT_SYMBOL_GPL(skcipher_register_instance); << 977 << 978 static int skcipher_setkey_simple(struct crypt << 979 unsigned int << 980 { << 981 struct crypto_cipher *cipher = skciphe << 982 << 983 crypto_cipher_clear_flags(cipher, CRYP << 984 crypto_cipher_set_flags(cipher, crypto << 985 CRYPTO_TFM_REQ << 986 return crypto_cipher_setkey(cipher, ke << 987 } << 988 << 989 static int skcipher_init_tfm_simple(struct cry << 990 { << 991 struct skcipher_instance *inst = skcip << 992 struct crypto_cipher_spawn *spawn = sk << 993 struct skcipher_ctx_simple *ctx = cryp << 994 struct crypto_cipher *cipher; << 995 << 996 cipher = crypto_spawn_cipher(spawn); << 997 if (IS_ERR(cipher)) << 998 return PTR_ERR(cipher); << 999 << 1000 ctx->cipher = cipher; << 1001 return 0; << 1002 } << 1003 << 1004 static void skcipher_exit_tfm_simple(struct c << 1005 { << 1006 struct skcipher_ctx_simple *ctx = cry << 1007 << 1008 crypto_free_cipher(ctx->cipher); << 1009 } << 1010 << 1011 static void skcipher_free_instance_simple(str << 1012 { << 1013 crypto_drop_cipher(skcipher_instance_ << 1014 kfree(inst); << 1015 } << 1016 << 1017 /** << 1018 * skcipher_alloc_instance_simple - allocate << 1019 * << 1020 * Allocate an skcipher_instance for a simple << 1021 * e.g. cbc or ecb. The instance context wil << 1022 * that for the underlying cipher. The {min, << 1023 * alignmask, and priority are set from the u << 1024 * overridden if needed. The tfm context def << 1025 * default ->setkey(), ->init(), and ->exit() << 1026 * << 1027 * @tmpl: the template being instantiated << 1028 * @tb: the template parameters << 1029 * << 1030 * Return: a pointer to the new instance, or << 1031 * needs to register the instance. << 1032 */ << 1033 struct skcipher_instance *skcipher_alloc_inst << 1034 struct crypto_template *tmpl, struct << 1035 { << 1036 u32 mask; << 1037 struct skcipher_instance *inst; << 1038 struct crypto_cipher_spawn *spawn; << 1039 struct crypto_alg *cipher_alg; << 1040 int err; << 1041 << 1042 err = crypto_check_attr_type(tb, CRYP << 1043 if (err) << 1044 return ERR_PTR(err); << 1045 << 1046 inst = kzalloc(sizeof(*inst) + sizeof << 1047 if (!inst) << 1048 return ERR_PTR(-ENOMEM); << 1049 spawn = skcipher_instance_ctx(inst); << 1050 << 1051 err = crypto_grab_cipher(spawn, skcip << 1052 crypto_attr_ << 1053 if (err) << 1054 goto err_free_inst; << 1055 cipher_alg = crypto_spawn_cipher_alg( << 1056 << 1057 err = crypto_inst_setname(skcipher_cr << 1058 cipher_alg) << 1059 if (err) << 1060 goto err_free_inst; << 1061 << 1062 inst->free = skcipher_free_instance_s << 1063 << 1064 /* Default algorithm properties, can << 1065 inst->alg.base.cra_blocksize = cipher << 1066 inst->alg.base.cra_alignmask = cipher << 1067 inst->alg.base.cra_priority = cipher_ << 1068 inst->alg.min_keysize = cipher_alg->c << 1069 inst->alg.max_keysize = cipher_alg->c << 1070 inst->alg.ivsize = cipher_alg->cra_bl << 1071 << 1072 /* Use skcipher_ctx_simple by default << 1073 inst->alg.base.cra_ctxsize = sizeof(s << 1074 inst->alg.setkey = skcipher_setkey_si << 1075 inst->alg.init = skcipher_init_tfm_si << 1076 inst->alg.exit = skcipher_exit_tfm_si << 1077 << 1078 return inst; << 1079 << 1080 err_free_inst: << 1081 skcipher_free_instance_simple(inst); << 1082 return ERR_PTR(err); << 1083 } << 1084 EXPORT_SYMBOL_GPL(skcipher_alloc_instance_sim << 1085 << 1086 MODULE_LICENSE("GPL"); 246 MODULE_LICENSE("GPL"); 1087 MODULE_DESCRIPTION("Symmetric key cipher type 247 MODULE_DESCRIPTION("Symmetric key cipher type"); 1088 MODULE_IMPORT_NS(CRYPTO_INTERNAL); << 1089 248
Linux® is a registered trademark of Linus Torvalds in the United States and other countries.
TOMOYO® is a registered trademark of NTT DATA CORPORATION.