1 // SPDX-License-Identifier: GPL-2.0-or-later 1 // SPDX-License-Identifier: GPL-2.0-or-later 2 /* 2 /* 3 * Symmetric key cipher operations. 3 * Symmetric key cipher operations. 4 * 4 * 5 * Generic encrypt/decrypt wrapper for ciphers 5 * Generic encrypt/decrypt wrapper for ciphers, handles operations across 6 * multiple page boundaries by using temporary 6 * multiple page boundaries by using temporary blocks. In user context, 7 * the kernel is given a chance to schedule us 7 * the kernel is given a chance to schedule us once per page. 8 * 8 * 9 * Copyright (c) 2015 Herbert Xu <herbert@gond 9 * Copyright (c) 2015 Herbert Xu <herbert@gondor.apana.org.au> 10 */ 10 */ 11 11 12 #include <crypto/internal/aead.h> 12 #include <crypto/internal/aead.h> 13 #include <crypto/internal/cipher.h> 13 #include <crypto/internal/cipher.h> 14 #include <crypto/internal/skcipher.h> 14 #include <crypto/internal/skcipher.h> 15 #include <crypto/scatterwalk.h> 15 #include <crypto/scatterwalk.h> 16 #include <linux/bug.h> 16 #include <linux/bug.h> 17 #include <linux/cryptouser.h> 17 #include <linux/cryptouser.h> 18 #include <linux/err.h> 18 #include <linux/err.h> 19 #include <linux/kernel.h> 19 #include <linux/kernel.h> 20 #include <linux/list.h> 20 #include <linux/list.h> 21 #include <linux/mm.h> 21 #include <linux/mm.h> 22 #include <linux/module.h> 22 #include <linux/module.h> 23 #include <linux/seq_file.h> 23 #include <linux/seq_file.h> 24 #include <linux/slab.h> 24 #include <linux/slab.h> 25 #include <linux/string.h> 25 #include <linux/string.h> 26 #include <net/netlink.h> 26 #include <net/netlink.h> 27 #include "skcipher.h" 27 #include "skcipher.h" 28 28 29 #define CRYPTO_ALG_TYPE_SKCIPHER_MASK 0x0000 29 #define CRYPTO_ALG_TYPE_SKCIPHER_MASK 0x0000000e 30 30 31 enum { 31 enum { 32 SKCIPHER_WALK_PHYS = 1 << 0, 32 SKCIPHER_WALK_PHYS = 1 << 0, 33 SKCIPHER_WALK_SLOW = 1 << 1, 33 SKCIPHER_WALK_SLOW = 1 << 1, 34 SKCIPHER_WALK_COPY = 1 << 2, 34 SKCIPHER_WALK_COPY = 1 << 2, 35 SKCIPHER_WALK_DIFF = 1 << 3, 35 SKCIPHER_WALK_DIFF = 1 << 3, 36 SKCIPHER_WALK_SLEEP = 1 << 4, 36 SKCIPHER_WALK_SLEEP = 1 << 4, 37 }; 37 }; 38 38 39 struct skcipher_walk_buffer { 39 struct skcipher_walk_buffer { 40 struct list_head entry; 40 struct list_head entry; 41 struct scatter_walk dst; 41 struct scatter_walk dst; 42 unsigned int len; 42 unsigned int len; 43 u8 *data; 43 u8 *data; 44 u8 buffer[]; 44 u8 buffer[]; 45 }; 45 }; 46 46 47 static const struct crypto_type crypto_skciphe 47 static const struct crypto_type crypto_skcipher_type; 48 48 49 static int skcipher_walk_next(struct skcipher_ 49 static int skcipher_walk_next(struct skcipher_walk *walk); 50 50 51 static inline void skcipher_map_src(struct skc 51 static inline void skcipher_map_src(struct skcipher_walk *walk) 52 { 52 { 53 walk->src.virt.addr = scatterwalk_map( 53 walk->src.virt.addr = scatterwalk_map(&walk->in); 54 } 54 } 55 55 56 static inline void skcipher_map_dst(struct skc 56 static inline void skcipher_map_dst(struct skcipher_walk *walk) 57 { 57 { 58 walk->dst.virt.addr = scatterwalk_map( 58 walk->dst.virt.addr = scatterwalk_map(&walk->out); 59 } 59 } 60 60 61 static inline void skcipher_unmap_src(struct s 61 static inline void skcipher_unmap_src(struct skcipher_walk *walk) 62 { 62 { 63 scatterwalk_unmap(walk->src.virt.addr) 63 scatterwalk_unmap(walk->src.virt.addr); 64 } 64 } 65 65 66 static inline void skcipher_unmap_dst(struct s 66 static inline void skcipher_unmap_dst(struct skcipher_walk *walk) 67 { 67 { 68 scatterwalk_unmap(walk->dst.virt.addr) 68 scatterwalk_unmap(walk->dst.virt.addr); 69 } 69 } 70 70 71 static inline gfp_t skcipher_walk_gfp(struct s 71 static inline gfp_t skcipher_walk_gfp(struct skcipher_walk *walk) 72 { 72 { 73 return walk->flags & SKCIPHER_WALK_SLE 73 return walk->flags & SKCIPHER_WALK_SLEEP ? GFP_KERNEL : GFP_ATOMIC; 74 } 74 } 75 75 76 /* Get a spot of the specified length that doe 76 /* Get a spot of the specified length that does not straddle a page. 77 * The caller needs to ensure that there is en 77 * The caller needs to ensure that there is enough space for this operation. 78 */ 78 */ 79 static inline u8 *skcipher_get_spot(u8 *start, 79 static inline u8 *skcipher_get_spot(u8 *start, unsigned int len) 80 { 80 { 81 u8 *end_page = (u8 *)(((unsigned long) 81 u8 *end_page = (u8 *)(((unsigned long)(start + len - 1)) & PAGE_MASK); 82 82 83 return max(start, end_page); 83 return max(start, end_page); 84 } 84 } 85 85 86 static inline struct skcipher_alg *__crypto_sk 86 static inline struct skcipher_alg *__crypto_skcipher_alg( 87 struct crypto_alg *alg) 87 struct crypto_alg *alg) 88 { 88 { 89 return container_of(alg, struct skciph 89 return container_of(alg, struct skcipher_alg, base); 90 } 90 } 91 91 >> 92 static inline struct crypto_istat_cipher *skcipher_get_stat( >> 93 struct skcipher_alg *alg) >> 94 { >> 95 return skcipher_get_stat_common(&alg->co); >> 96 } >> 97 >> 98 static inline int crypto_skcipher_errstat(struct skcipher_alg *alg, int err) >> 99 { >> 100 struct crypto_istat_cipher *istat = skcipher_get_stat(alg); >> 101 >> 102 if (!IS_ENABLED(CONFIG_CRYPTO_STATS)) >> 103 return err; >> 104 >> 105 if (err && err != -EINPROGRESS && err != -EBUSY) >> 106 atomic64_inc(&istat->err_cnt); >> 107 >> 108 return err; >> 109 } >> 110 92 static int skcipher_done_slow(struct skcipher_ 111 static int skcipher_done_slow(struct skcipher_walk *walk, unsigned int bsize) 93 { 112 { 94 u8 *addr; 113 u8 *addr; 95 114 96 addr = (u8 *)ALIGN((unsigned long)walk 115 addr = (u8 *)ALIGN((unsigned long)walk->buffer, walk->alignmask + 1); 97 addr = skcipher_get_spot(addr, bsize); 116 addr = skcipher_get_spot(addr, bsize); 98 scatterwalk_copychunks(addr, &walk->ou 117 scatterwalk_copychunks(addr, &walk->out, bsize, 99 (walk->flags & 118 (walk->flags & SKCIPHER_WALK_PHYS) ? 2 : 1); 100 return 0; 119 return 0; 101 } 120 } 102 121 103 int skcipher_walk_done(struct skcipher_walk *w 122 int skcipher_walk_done(struct skcipher_walk *walk, int err) 104 { 123 { 105 unsigned int n = walk->nbytes; 124 unsigned int n = walk->nbytes; 106 unsigned int nbytes = 0; 125 unsigned int nbytes = 0; 107 126 108 if (!n) 127 if (!n) 109 goto finish; 128 goto finish; 110 129 111 if (likely(err >= 0)) { 130 if (likely(err >= 0)) { 112 n -= err; 131 n -= err; 113 nbytes = walk->total - n; 132 nbytes = walk->total - n; 114 } 133 } 115 134 116 if (likely(!(walk->flags & (SKCIPHER_W 135 if (likely(!(walk->flags & (SKCIPHER_WALK_PHYS | 117 SKCIPHER_W 136 SKCIPHER_WALK_SLOW | 118 SKCIPHER_W 137 SKCIPHER_WALK_COPY | 119 SKCIPHER_W 138 SKCIPHER_WALK_DIFF)))) { 120 unmap_src: 139 unmap_src: 121 skcipher_unmap_src(walk); 140 skcipher_unmap_src(walk); 122 } else if (walk->flags & SKCIPHER_WALK 141 } else if (walk->flags & SKCIPHER_WALK_DIFF) { 123 skcipher_unmap_dst(walk); 142 skcipher_unmap_dst(walk); 124 goto unmap_src; 143 goto unmap_src; 125 } else if (walk->flags & SKCIPHER_WALK 144 } else if (walk->flags & SKCIPHER_WALK_COPY) { 126 skcipher_map_dst(walk); 145 skcipher_map_dst(walk); 127 memcpy(walk->dst.virt.addr, wa 146 memcpy(walk->dst.virt.addr, walk->page, n); 128 skcipher_unmap_dst(walk); 147 skcipher_unmap_dst(walk); 129 } else if (unlikely(walk->flags & SKCI 148 } else if (unlikely(walk->flags & SKCIPHER_WALK_SLOW)) { 130 if (err > 0) { 149 if (err > 0) { 131 /* 150 /* 132 * Didn't process all 151 * Didn't process all bytes. Either the algorithm is 133 * broken, or this was 152 * broken, or this was the last step and it turned out 134 * the message wasn't 153 * the message wasn't evenly divisible into blocks but 135 * the algorithm requi 154 * the algorithm requires it. 136 */ 155 */ 137 err = -EINVAL; 156 err = -EINVAL; 138 nbytes = 0; 157 nbytes = 0; 139 } else 158 } else 140 n = skcipher_done_slow 159 n = skcipher_done_slow(walk, n); 141 } 160 } 142 161 143 if (err > 0) 162 if (err > 0) 144 err = 0; 163 err = 0; 145 164 146 walk->total = nbytes; 165 walk->total = nbytes; 147 walk->nbytes = 0; 166 walk->nbytes = 0; 148 167 149 scatterwalk_advance(&walk->in, n); 168 scatterwalk_advance(&walk->in, n); 150 scatterwalk_advance(&walk->out, n); 169 scatterwalk_advance(&walk->out, n); 151 scatterwalk_done(&walk->in, 0, nbytes) 170 scatterwalk_done(&walk->in, 0, nbytes); 152 scatterwalk_done(&walk->out, 1, nbytes 171 scatterwalk_done(&walk->out, 1, nbytes); 153 172 154 if (nbytes) { 173 if (nbytes) { 155 crypto_yield(walk->flags & SKC 174 crypto_yield(walk->flags & SKCIPHER_WALK_SLEEP ? 156 CRYPTO_TFM_REQ_MA 175 CRYPTO_TFM_REQ_MAY_SLEEP : 0); 157 return skcipher_walk_next(walk 176 return skcipher_walk_next(walk); 158 } 177 } 159 178 160 finish: 179 finish: 161 /* Short-circuit for the common/fast p 180 /* Short-circuit for the common/fast path. */ 162 if (!((unsigned long)walk->buffer | (u 181 if (!((unsigned long)walk->buffer | (unsigned long)walk->page)) 163 goto out; 182 goto out; 164 183 165 if (walk->flags & SKCIPHER_WALK_PHYS) 184 if (walk->flags & SKCIPHER_WALK_PHYS) 166 goto out; 185 goto out; 167 186 168 if (walk->iv != walk->oiv) 187 if (walk->iv != walk->oiv) 169 memcpy(walk->oiv, walk->iv, wa 188 memcpy(walk->oiv, walk->iv, walk->ivsize); 170 if (walk->buffer != walk->page) 189 if (walk->buffer != walk->page) 171 kfree(walk->buffer); 190 kfree(walk->buffer); 172 if (walk->page) 191 if (walk->page) 173 free_page((unsigned long)walk- 192 free_page((unsigned long)walk->page); 174 193 175 out: 194 out: 176 return err; 195 return err; 177 } 196 } 178 EXPORT_SYMBOL_GPL(skcipher_walk_done); 197 EXPORT_SYMBOL_GPL(skcipher_walk_done); 179 198 180 void skcipher_walk_complete(struct skcipher_wa 199 void skcipher_walk_complete(struct skcipher_walk *walk, int err) 181 { 200 { 182 struct skcipher_walk_buffer *p, *tmp; 201 struct skcipher_walk_buffer *p, *tmp; 183 202 184 list_for_each_entry_safe(p, tmp, &walk 203 list_for_each_entry_safe(p, tmp, &walk->buffers, entry) { 185 u8 *data; 204 u8 *data; 186 205 187 if (err) 206 if (err) 188 goto done; 207 goto done; 189 208 190 data = p->data; 209 data = p->data; 191 if (!data) { 210 if (!data) { 192 data = PTR_ALIGN(&p->b 211 data = PTR_ALIGN(&p->buffer[0], walk->alignmask + 1); 193 data = skcipher_get_sp 212 data = skcipher_get_spot(data, walk->stride); 194 } 213 } 195 214 196 scatterwalk_copychunks(data, & 215 scatterwalk_copychunks(data, &p->dst, p->len, 1); 197 216 198 if (offset_in_page(p->data) + 217 if (offset_in_page(p->data) + p->len + walk->stride > 199 PAGE_SIZE) 218 PAGE_SIZE) 200 free_page((unsigned lo 219 free_page((unsigned long)p->data); 201 220 202 done: 221 done: 203 list_del(&p->entry); 222 list_del(&p->entry); 204 kfree(p); 223 kfree(p); 205 } 224 } 206 225 207 if (!err && walk->iv != walk->oiv) 226 if (!err && walk->iv != walk->oiv) 208 memcpy(walk->oiv, walk->iv, wa 227 memcpy(walk->oiv, walk->iv, walk->ivsize); 209 if (walk->buffer != walk->page) 228 if (walk->buffer != walk->page) 210 kfree(walk->buffer); 229 kfree(walk->buffer); 211 if (walk->page) 230 if (walk->page) 212 free_page((unsigned long)walk- 231 free_page((unsigned long)walk->page); 213 } 232 } 214 EXPORT_SYMBOL_GPL(skcipher_walk_complete); 233 EXPORT_SYMBOL_GPL(skcipher_walk_complete); 215 234 216 static void skcipher_queue_write(struct skciph 235 static void skcipher_queue_write(struct skcipher_walk *walk, 217 struct skciph 236 struct skcipher_walk_buffer *p) 218 { 237 { 219 p->dst = walk->out; 238 p->dst = walk->out; 220 list_add_tail(&p->entry, &walk->buffer 239 list_add_tail(&p->entry, &walk->buffers); 221 } 240 } 222 241 223 static int skcipher_next_slow(struct skcipher_ 242 static int skcipher_next_slow(struct skcipher_walk *walk, unsigned int bsize) 224 { 243 { 225 bool phys = walk->flags & SKCIPHER_WAL 244 bool phys = walk->flags & SKCIPHER_WALK_PHYS; 226 unsigned alignmask = walk->alignmask; 245 unsigned alignmask = walk->alignmask; 227 struct skcipher_walk_buffer *p; 246 struct skcipher_walk_buffer *p; 228 unsigned a; 247 unsigned a; 229 unsigned n; 248 unsigned n; 230 u8 *buffer; 249 u8 *buffer; 231 void *v; 250 void *v; 232 251 233 if (!phys) { 252 if (!phys) { 234 if (!walk->buffer) 253 if (!walk->buffer) 235 walk->buffer = walk->p 254 walk->buffer = walk->page; 236 buffer = walk->buffer; 255 buffer = walk->buffer; 237 if (buffer) 256 if (buffer) 238 goto ok; 257 goto ok; 239 } 258 } 240 259 241 /* Start with the minimum alignment of 260 /* Start with the minimum alignment of kmalloc. */ 242 a = crypto_tfm_ctx_alignment() - 1; 261 a = crypto_tfm_ctx_alignment() - 1; 243 n = bsize; 262 n = bsize; 244 263 245 if (phys) { 264 if (phys) { 246 /* Calculate the minimum align 265 /* Calculate the minimum alignment of p->buffer. */ 247 a &= (sizeof(*p) ^ (sizeof(*p) 266 a &= (sizeof(*p) ^ (sizeof(*p) - 1)) >> 1; 248 n += sizeof(*p); 267 n += sizeof(*p); 249 } 268 } 250 269 251 /* Minimum size to align p->buffer by 270 /* Minimum size to align p->buffer by alignmask. */ 252 n += alignmask & ~a; 271 n += alignmask & ~a; 253 272 254 /* Minimum size to ensure p->buffer do 273 /* Minimum size to ensure p->buffer does not straddle a page. */ 255 n += (bsize - 1) & ~(alignmask | a); 274 n += (bsize - 1) & ~(alignmask | a); 256 275 257 v = kzalloc(n, skcipher_walk_gfp(walk) 276 v = kzalloc(n, skcipher_walk_gfp(walk)); 258 if (!v) 277 if (!v) 259 return skcipher_walk_done(walk 278 return skcipher_walk_done(walk, -ENOMEM); 260 279 261 if (phys) { 280 if (phys) { 262 p = v; 281 p = v; 263 p->len = bsize; 282 p->len = bsize; 264 skcipher_queue_write(walk, p); 283 skcipher_queue_write(walk, p); 265 buffer = p->buffer; 284 buffer = p->buffer; 266 } else { 285 } else { 267 walk->buffer = v; 286 walk->buffer = v; 268 buffer = v; 287 buffer = v; 269 } 288 } 270 289 271 ok: 290 ok: 272 walk->dst.virt.addr = PTR_ALIGN(buffer 291 walk->dst.virt.addr = PTR_ALIGN(buffer, alignmask + 1); 273 walk->dst.virt.addr = skcipher_get_spo 292 walk->dst.virt.addr = skcipher_get_spot(walk->dst.virt.addr, bsize); 274 walk->src.virt.addr = walk->dst.virt.a 293 walk->src.virt.addr = walk->dst.virt.addr; 275 294 276 scatterwalk_copychunks(walk->src.virt. 295 scatterwalk_copychunks(walk->src.virt.addr, &walk->in, bsize, 0); 277 296 278 walk->nbytes = bsize; 297 walk->nbytes = bsize; 279 walk->flags |= SKCIPHER_WALK_SLOW; 298 walk->flags |= SKCIPHER_WALK_SLOW; 280 299 281 return 0; 300 return 0; 282 } 301 } 283 302 284 static int skcipher_next_copy(struct skcipher_ 303 static int skcipher_next_copy(struct skcipher_walk *walk) 285 { 304 { 286 struct skcipher_walk_buffer *p; 305 struct skcipher_walk_buffer *p; 287 u8 *tmp = walk->page; 306 u8 *tmp = walk->page; 288 307 289 skcipher_map_src(walk); 308 skcipher_map_src(walk); 290 memcpy(tmp, walk->src.virt.addr, walk- 309 memcpy(tmp, walk->src.virt.addr, walk->nbytes); 291 skcipher_unmap_src(walk); 310 skcipher_unmap_src(walk); 292 311 293 walk->src.virt.addr = tmp; 312 walk->src.virt.addr = tmp; 294 walk->dst.virt.addr = tmp; 313 walk->dst.virt.addr = tmp; 295 314 296 if (!(walk->flags & SKCIPHER_WALK_PHYS 315 if (!(walk->flags & SKCIPHER_WALK_PHYS)) 297 return 0; 316 return 0; 298 317 299 p = kmalloc(sizeof(*p), skcipher_walk_ 318 p = kmalloc(sizeof(*p), skcipher_walk_gfp(walk)); 300 if (!p) 319 if (!p) 301 return -ENOMEM; 320 return -ENOMEM; 302 321 303 p->data = walk->page; 322 p->data = walk->page; 304 p->len = walk->nbytes; 323 p->len = walk->nbytes; 305 skcipher_queue_write(walk, p); 324 skcipher_queue_write(walk, p); 306 325 307 if (offset_in_page(walk->page) + walk- 326 if (offset_in_page(walk->page) + walk->nbytes + walk->stride > 308 PAGE_SIZE) 327 PAGE_SIZE) 309 walk->page = NULL; 328 walk->page = NULL; 310 else 329 else 311 walk->page += walk->nbytes; 330 walk->page += walk->nbytes; 312 331 313 return 0; 332 return 0; 314 } 333 } 315 334 316 static int skcipher_next_fast(struct skcipher_ 335 static int skcipher_next_fast(struct skcipher_walk *walk) 317 { 336 { 318 unsigned long diff; 337 unsigned long diff; 319 338 320 walk->src.phys.page = scatterwalk_page 339 walk->src.phys.page = scatterwalk_page(&walk->in); 321 walk->src.phys.offset = offset_in_page 340 walk->src.phys.offset = offset_in_page(walk->in.offset); 322 walk->dst.phys.page = scatterwalk_page 341 walk->dst.phys.page = scatterwalk_page(&walk->out); 323 walk->dst.phys.offset = offset_in_page 342 walk->dst.phys.offset = offset_in_page(walk->out.offset); 324 343 325 if (walk->flags & SKCIPHER_WALK_PHYS) 344 if (walk->flags & SKCIPHER_WALK_PHYS) 326 return 0; 345 return 0; 327 346 328 diff = walk->src.phys.offset - walk->d 347 diff = walk->src.phys.offset - walk->dst.phys.offset; 329 diff |= walk->src.virt.page - walk->ds 348 diff |= walk->src.virt.page - walk->dst.virt.page; 330 349 331 skcipher_map_src(walk); 350 skcipher_map_src(walk); 332 walk->dst.virt.addr = walk->src.virt.a 351 walk->dst.virt.addr = walk->src.virt.addr; 333 352 334 if (diff) { 353 if (diff) { 335 walk->flags |= SKCIPHER_WALK_D 354 walk->flags |= SKCIPHER_WALK_DIFF; 336 skcipher_map_dst(walk); 355 skcipher_map_dst(walk); 337 } 356 } 338 357 339 return 0; 358 return 0; 340 } 359 } 341 360 342 static int skcipher_walk_next(struct skcipher_ 361 static int skcipher_walk_next(struct skcipher_walk *walk) 343 { 362 { 344 unsigned int bsize; 363 unsigned int bsize; 345 unsigned int n; 364 unsigned int n; 346 int err; 365 int err; 347 366 348 walk->flags &= ~(SKCIPHER_WALK_SLOW | 367 walk->flags &= ~(SKCIPHER_WALK_SLOW | SKCIPHER_WALK_COPY | 349 SKCIPHER_WALK_DIFF); 368 SKCIPHER_WALK_DIFF); 350 369 351 n = walk->total; 370 n = walk->total; 352 bsize = min(walk->stride, max(n, walk- 371 bsize = min(walk->stride, max(n, walk->blocksize)); 353 n = scatterwalk_clamp(&walk->in, n); 372 n = scatterwalk_clamp(&walk->in, n); 354 n = scatterwalk_clamp(&walk->out, n); 373 n = scatterwalk_clamp(&walk->out, n); 355 374 356 if (unlikely(n < bsize)) { 375 if (unlikely(n < bsize)) { 357 if (unlikely(walk->total < wal 376 if (unlikely(walk->total < walk->blocksize)) 358 return skcipher_walk_d 377 return skcipher_walk_done(walk, -EINVAL); 359 378 360 slow_path: 379 slow_path: 361 err = skcipher_next_slow(walk, 380 err = skcipher_next_slow(walk, bsize); 362 goto set_phys_lowmem; 381 goto set_phys_lowmem; 363 } 382 } 364 383 365 if (unlikely((walk->in.offset | walk-> 384 if (unlikely((walk->in.offset | walk->out.offset) & walk->alignmask)) { 366 if (!walk->page) { 385 if (!walk->page) { 367 gfp_t gfp = skcipher_w 386 gfp_t gfp = skcipher_walk_gfp(walk); 368 387 369 walk->page = (void *)_ 388 walk->page = (void *)__get_free_page(gfp); 370 if (!walk->page) 389 if (!walk->page) 371 goto slow_path 390 goto slow_path; 372 } 391 } 373 392 374 walk->nbytes = min_t(unsigned, 393 walk->nbytes = min_t(unsigned, n, 375 PAGE_SIZE 394 PAGE_SIZE - offset_in_page(walk->page)); 376 walk->flags |= SKCIPHER_WALK_C 395 walk->flags |= SKCIPHER_WALK_COPY; 377 err = skcipher_next_copy(walk) 396 err = skcipher_next_copy(walk); 378 goto set_phys_lowmem; 397 goto set_phys_lowmem; 379 } 398 } 380 399 381 walk->nbytes = n; 400 walk->nbytes = n; 382 401 383 return skcipher_next_fast(walk); 402 return skcipher_next_fast(walk); 384 403 385 set_phys_lowmem: 404 set_phys_lowmem: 386 if (!err && (walk->flags & SKCIPHER_WA 405 if (!err && (walk->flags & SKCIPHER_WALK_PHYS)) { 387 walk->src.phys.page = virt_to_ 406 walk->src.phys.page = virt_to_page(walk->src.virt.addr); 388 walk->dst.phys.page = virt_to_ 407 walk->dst.phys.page = virt_to_page(walk->dst.virt.addr); 389 walk->src.phys.offset &= PAGE_ 408 walk->src.phys.offset &= PAGE_SIZE - 1; 390 walk->dst.phys.offset &= PAGE_ 409 walk->dst.phys.offset &= PAGE_SIZE - 1; 391 } 410 } 392 return err; 411 return err; 393 } 412 } 394 413 395 static int skcipher_copy_iv(struct skcipher_wa 414 static int skcipher_copy_iv(struct skcipher_walk *walk) 396 { 415 { 397 unsigned a = crypto_tfm_ctx_alignment( 416 unsigned a = crypto_tfm_ctx_alignment() - 1; 398 unsigned alignmask = walk->alignmask; 417 unsigned alignmask = walk->alignmask; 399 unsigned ivsize = walk->ivsize; 418 unsigned ivsize = walk->ivsize; 400 unsigned bs = walk->stride; 419 unsigned bs = walk->stride; 401 unsigned aligned_bs; 420 unsigned aligned_bs; 402 unsigned size; 421 unsigned size; 403 u8 *iv; 422 u8 *iv; 404 423 405 aligned_bs = ALIGN(bs, alignmask + 1); 424 aligned_bs = ALIGN(bs, alignmask + 1); 406 425 407 /* Minimum size to align buffer by ali 426 /* Minimum size to align buffer by alignmask. */ 408 size = alignmask & ~a; 427 size = alignmask & ~a; 409 428 410 if (walk->flags & SKCIPHER_WALK_PHYS) 429 if (walk->flags & SKCIPHER_WALK_PHYS) 411 size += ivsize; 430 size += ivsize; 412 else { 431 else { 413 size += aligned_bs + ivsize; 432 size += aligned_bs + ivsize; 414 433 415 /* Minimum size to ensure buff 434 /* Minimum size to ensure buffer does not straddle a page. */ 416 size += (bs - 1) & ~(alignmask 435 size += (bs - 1) & ~(alignmask | a); 417 } 436 } 418 437 419 walk->buffer = kmalloc(size, skcipher_ 438 walk->buffer = kmalloc(size, skcipher_walk_gfp(walk)); 420 if (!walk->buffer) 439 if (!walk->buffer) 421 return -ENOMEM; 440 return -ENOMEM; 422 441 423 iv = PTR_ALIGN(walk->buffer, alignmask 442 iv = PTR_ALIGN(walk->buffer, alignmask + 1); 424 iv = skcipher_get_spot(iv, bs) + align 443 iv = skcipher_get_spot(iv, bs) + aligned_bs; 425 444 426 walk->iv = memcpy(iv, walk->iv, walk-> 445 walk->iv = memcpy(iv, walk->iv, walk->ivsize); 427 return 0; 446 return 0; 428 } 447 } 429 448 430 static int skcipher_walk_first(struct skcipher 449 static int skcipher_walk_first(struct skcipher_walk *walk) 431 { 450 { 432 if (WARN_ON_ONCE(in_hardirq())) 451 if (WARN_ON_ONCE(in_hardirq())) 433 return -EDEADLK; 452 return -EDEADLK; 434 453 435 walk->buffer = NULL; 454 walk->buffer = NULL; 436 if (unlikely(((unsigned long)walk->iv 455 if (unlikely(((unsigned long)walk->iv & walk->alignmask))) { 437 int err = skcipher_copy_iv(wal 456 int err = skcipher_copy_iv(walk); 438 if (err) 457 if (err) 439 return err; 458 return err; 440 } 459 } 441 460 442 walk->page = NULL; 461 walk->page = NULL; 443 462 444 return skcipher_walk_next(walk); 463 return skcipher_walk_next(walk); 445 } 464 } 446 465 447 static int skcipher_walk_skcipher(struct skcip 466 static int skcipher_walk_skcipher(struct skcipher_walk *walk, 448 struct skcip 467 struct skcipher_request *req) 449 { 468 { 450 struct crypto_skcipher *tfm = crypto_s 469 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); 451 struct skcipher_alg *alg = crypto_skci 470 struct skcipher_alg *alg = crypto_skcipher_alg(tfm); 452 471 453 walk->total = req->cryptlen; 472 walk->total = req->cryptlen; 454 walk->nbytes = 0; 473 walk->nbytes = 0; 455 walk->iv = req->iv; 474 walk->iv = req->iv; 456 walk->oiv = req->iv; 475 walk->oiv = req->iv; 457 476 458 if (unlikely(!walk->total)) 477 if (unlikely(!walk->total)) 459 return 0; 478 return 0; 460 479 461 scatterwalk_start(&walk->in, req->src) 480 scatterwalk_start(&walk->in, req->src); 462 scatterwalk_start(&walk->out, req->dst 481 scatterwalk_start(&walk->out, req->dst); 463 482 464 walk->flags &= ~SKCIPHER_WALK_SLEEP; 483 walk->flags &= ~SKCIPHER_WALK_SLEEP; 465 walk->flags |= req->base.flags & CRYPT 484 walk->flags |= req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP ? 466 SKCIPHER_WALK_SLEEP : 0 485 SKCIPHER_WALK_SLEEP : 0; 467 486 468 walk->blocksize = crypto_skcipher_bloc 487 walk->blocksize = crypto_skcipher_blocksize(tfm); 469 walk->ivsize = crypto_skcipher_ivsize( 488 walk->ivsize = crypto_skcipher_ivsize(tfm); 470 walk->alignmask = crypto_skcipher_alig 489 walk->alignmask = crypto_skcipher_alignmask(tfm); 471 490 472 if (alg->co.base.cra_type != &crypto_s 491 if (alg->co.base.cra_type != &crypto_skcipher_type) 473 walk->stride = alg->co.chunksi 492 walk->stride = alg->co.chunksize; 474 else 493 else 475 walk->stride = alg->walksize; 494 walk->stride = alg->walksize; 476 495 477 return skcipher_walk_first(walk); 496 return skcipher_walk_first(walk); 478 } 497 } 479 498 480 int skcipher_walk_virt(struct skcipher_walk *w 499 int skcipher_walk_virt(struct skcipher_walk *walk, 481 struct skcipher_request 500 struct skcipher_request *req, bool atomic) 482 { 501 { 483 int err; 502 int err; 484 503 485 might_sleep_if(req->base.flags & CRYPT 504 might_sleep_if(req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP); 486 505 487 walk->flags &= ~SKCIPHER_WALK_PHYS; 506 walk->flags &= ~SKCIPHER_WALK_PHYS; 488 507 489 err = skcipher_walk_skcipher(walk, req 508 err = skcipher_walk_skcipher(walk, req); 490 509 491 walk->flags &= atomic ? ~SKCIPHER_WALK 510 walk->flags &= atomic ? ~SKCIPHER_WALK_SLEEP : ~0; 492 511 493 return err; 512 return err; 494 } 513 } 495 EXPORT_SYMBOL_GPL(skcipher_walk_virt); 514 EXPORT_SYMBOL_GPL(skcipher_walk_virt); 496 515 497 int skcipher_walk_async(struct skcipher_walk * 516 int skcipher_walk_async(struct skcipher_walk *walk, 498 struct skcipher_reques 517 struct skcipher_request *req) 499 { 518 { 500 walk->flags |= SKCIPHER_WALK_PHYS; 519 walk->flags |= SKCIPHER_WALK_PHYS; 501 520 502 INIT_LIST_HEAD(&walk->buffers); 521 INIT_LIST_HEAD(&walk->buffers); 503 522 504 return skcipher_walk_skcipher(walk, re 523 return skcipher_walk_skcipher(walk, req); 505 } 524 } 506 EXPORT_SYMBOL_GPL(skcipher_walk_async); 525 EXPORT_SYMBOL_GPL(skcipher_walk_async); 507 526 508 static int skcipher_walk_aead_common(struct sk 527 static int skcipher_walk_aead_common(struct skcipher_walk *walk, 509 struct ae 528 struct aead_request *req, bool atomic) 510 { 529 { 511 struct crypto_aead *tfm = crypto_aead_ 530 struct crypto_aead *tfm = crypto_aead_reqtfm(req); 512 int err; 531 int err; 513 532 514 walk->nbytes = 0; 533 walk->nbytes = 0; 515 walk->iv = req->iv; 534 walk->iv = req->iv; 516 walk->oiv = req->iv; 535 walk->oiv = req->iv; 517 536 518 if (unlikely(!walk->total)) 537 if (unlikely(!walk->total)) 519 return 0; 538 return 0; 520 539 521 walk->flags &= ~SKCIPHER_WALK_PHYS; 540 walk->flags &= ~SKCIPHER_WALK_PHYS; 522 541 523 scatterwalk_start(&walk->in, req->src) 542 scatterwalk_start(&walk->in, req->src); 524 scatterwalk_start(&walk->out, req->dst 543 scatterwalk_start(&walk->out, req->dst); 525 544 526 scatterwalk_copychunks(NULL, &walk->in 545 scatterwalk_copychunks(NULL, &walk->in, req->assoclen, 2); 527 scatterwalk_copychunks(NULL, &walk->ou 546 scatterwalk_copychunks(NULL, &walk->out, req->assoclen, 2); 528 547 529 scatterwalk_done(&walk->in, 0, walk->t 548 scatterwalk_done(&walk->in, 0, walk->total); 530 scatterwalk_done(&walk->out, 0, walk-> 549 scatterwalk_done(&walk->out, 0, walk->total); 531 550 532 if (req->base.flags & CRYPTO_TFM_REQ_M 551 if (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) 533 walk->flags |= SKCIPHER_WALK_S 552 walk->flags |= SKCIPHER_WALK_SLEEP; 534 else 553 else 535 walk->flags &= ~SKCIPHER_WALK_ 554 walk->flags &= ~SKCIPHER_WALK_SLEEP; 536 555 537 walk->blocksize = crypto_aead_blocksiz 556 walk->blocksize = crypto_aead_blocksize(tfm); 538 walk->stride = crypto_aead_chunksize(t 557 walk->stride = crypto_aead_chunksize(tfm); 539 walk->ivsize = crypto_aead_ivsize(tfm) 558 walk->ivsize = crypto_aead_ivsize(tfm); 540 walk->alignmask = crypto_aead_alignmas 559 walk->alignmask = crypto_aead_alignmask(tfm); 541 560 542 err = skcipher_walk_first(walk); 561 err = skcipher_walk_first(walk); 543 562 544 if (atomic) 563 if (atomic) 545 walk->flags &= ~SKCIPHER_WALK_ 564 walk->flags &= ~SKCIPHER_WALK_SLEEP; 546 565 547 return err; 566 return err; 548 } 567 } 549 568 550 int skcipher_walk_aead_encrypt(struct skcipher 569 int skcipher_walk_aead_encrypt(struct skcipher_walk *walk, 551 struct aead_req 570 struct aead_request *req, bool atomic) 552 { 571 { 553 walk->total = req->cryptlen; 572 walk->total = req->cryptlen; 554 573 555 return skcipher_walk_aead_common(walk, 574 return skcipher_walk_aead_common(walk, req, atomic); 556 } 575 } 557 EXPORT_SYMBOL_GPL(skcipher_walk_aead_encrypt); 576 EXPORT_SYMBOL_GPL(skcipher_walk_aead_encrypt); 558 577 559 int skcipher_walk_aead_decrypt(struct skcipher 578 int skcipher_walk_aead_decrypt(struct skcipher_walk *walk, 560 struct aead_req 579 struct aead_request *req, bool atomic) 561 { 580 { 562 struct crypto_aead *tfm = crypto_aead_ 581 struct crypto_aead *tfm = crypto_aead_reqtfm(req); 563 582 564 walk->total = req->cryptlen - crypto_a 583 walk->total = req->cryptlen - crypto_aead_authsize(tfm); 565 584 566 return skcipher_walk_aead_common(walk, 585 return skcipher_walk_aead_common(walk, req, atomic); 567 } 586 } 568 EXPORT_SYMBOL_GPL(skcipher_walk_aead_decrypt); 587 EXPORT_SYMBOL_GPL(skcipher_walk_aead_decrypt); 569 588 570 static void skcipher_set_needkey(struct crypto 589 static void skcipher_set_needkey(struct crypto_skcipher *tfm) 571 { 590 { 572 if (crypto_skcipher_max_keysize(tfm) ! 591 if (crypto_skcipher_max_keysize(tfm) != 0) 573 crypto_skcipher_set_flags(tfm, 592 crypto_skcipher_set_flags(tfm, CRYPTO_TFM_NEED_KEY); 574 } 593 } 575 594 576 static int skcipher_setkey_unaligned(struct cr 595 static int skcipher_setkey_unaligned(struct crypto_skcipher *tfm, 577 const u8 596 const u8 *key, unsigned int keylen) 578 { 597 { 579 unsigned long alignmask = crypto_skcip 598 unsigned long alignmask = crypto_skcipher_alignmask(tfm); 580 struct skcipher_alg *cipher = crypto_s 599 struct skcipher_alg *cipher = crypto_skcipher_alg(tfm); 581 u8 *buffer, *alignbuffer; 600 u8 *buffer, *alignbuffer; 582 unsigned long absize; 601 unsigned long absize; 583 int ret; 602 int ret; 584 603 585 absize = keylen + alignmask; 604 absize = keylen + alignmask; 586 buffer = kmalloc(absize, GFP_ATOMIC); 605 buffer = kmalloc(absize, GFP_ATOMIC); 587 if (!buffer) 606 if (!buffer) 588 return -ENOMEM; 607 return -ENOMEM; 589 608 590 alignbuffer = (u8 *)ALIGN((unsigned lo 609 alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1); 591 memcpy(alignbuffer, key, keylen); 610 memcpy(alignbuffer, key, keylen); 592 ret = cipher->setkey(tfm, alignbuffer, 611 ret = cipher->setkey(tfm, alignbuffer, keylen); 593 kfree_sensitive(buffer); 612 kfree_sensitive(buffer); 594 return ret; 613 return ret; 595 } 614 } 596 615 597 int crypto_skcipher_setkey(struct crypto_skcip 616 int crypto_skcipher_setkey(struct crypto_skcipher *tfm, const u8 *key, 598 unsigned int keylen 617 unsigned int keylen) 599 { 618 { 600 struct skcipher_alg *cipher = crypto_s 619 struct skcipher_alg *cipher = crypto_skcipher_alg(tfm); 601 unsigned long alignmask = crypto_skcip 620 unsigned long alignmask = crypto_skcipher_alignmask(tfm); 602 int err; 621 int err; 603 622 604 if (cipher->co.base.cra_type != &crypt 623 if (cipher->co.base.cra_type != &crypto_skcipher_type) { 605 struct crypto_lskcipher **ctx 624 struct crypto_lskcipher **ctx = crypto_skcipher_ctx(tfm); 606 625 607 crypto_lskcipher_clear_flags(* 626 crypto_lskcipher_clear_flags(*ctx, CRYPTO_TFM_REQ_MASK); 608 crypto_lskcipher_set_flags(*ct 627 crypto_lskcipher_set_flags(*ctx, 609 cry 628 crypto_skcipher_get_flags(tfm) & 610 CRY 629 CRYPTO_TFM_REQ_MASK); 611 err = crypto_lskcipher_setkey( 630 err = crypto_lskcipher_setkey(*ctx, key, keylen); 612 goto out; 631 goto out; 613 } 632 } 614 633 615 if (keylen < cipher->min_keysize || ke 634 if (keylen < cipher->min_keysize || keylen > cipher->max_keysize) 616 return -EINVAL; 635 return -EINVAL; 617 636 618 if ((unsigned long)key & alignmask) 637 if ((unsigned long)key & alignmask) 619 err = skcipher_setkey_unaligne 638 err = skcipher_setkey_unaligned(tfm, key, keylen); 620 else 639 else 621 err = cipher->setkey(tfm, key, 640 err = cipher->setkey(tfm, key, keylen); 622 641 623 out: 642 out: 624 if (unlikely(err)) { 643 if (unlikely(err)) { 625 skcipher_set_needkey(tfm); 644 skcipher_set_needkey(tfm); 626 return err; 645 return err; 627 } 646 } 628 647 629 crypto_skcipher_clear_flags(tfm, CRYPT 648 crypto_skcipher_clear_flags(tfm, CRYPTO_TFM_NEED_KEY); 630 return 0; 649 return 0; 631 } 650 } 632 EXPORT_SYMBOL_GPL(crypto_skcipher_setkey); 651 EXPORT_SYMBOL_GPL(crypto_skcipher_setkey); 633 652 634 int crypto_skcipher_encrypt(struct skcipher_re 653 int crypto_skcipher_encrypt(struct skcipher_request *req) 635 { 654 { 636 struct crypto_skcipher *tfm = crypto_s 655 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); 637 struct skcipher_alg *alg = crypto_skci 656 struct skcipher_alg *alg = crypto_skcipher_alg(tfm); >> 657 int ret; >> 658 >> 659 if (IS_ENABLED(CONFIG_CRYPTO_STATS)) { >> 660 struct crypto_istat_cipher *istat = skcipher_get_stat(alg); >> 661 >> 662 atomic64_inc(&istat->encrypt_cnt); >> 663 atomic64_add(req->cryptlen, &istat->encrypt_tlen); >> 664 } 638 665 639 if (crypto_skcipher_get_flags(tfm) & C 666 if (crypto_skcipher_get_flags(tfm) & CRYPTO_TFM_NEED_KEY) 640 return -ENOKEY; !! 667 ret = -ENOKEY; 641 if (alg->co.base.cra_type != &crypto_s !! 668 else if (alg->co.base.cra_type != &crypto_skcipher_type) 642 return crypto_lskcipher_encryp !! 669 ret = crypto_lskcipher_encrypt_sg(req); 643 return alg->encrypt(req); !! 670 else >> 671 ret = alg->encrypt(req); >> 672 >> 673 return crypto_skcipher_errstat(alg, ret); 644 } 674 } 645 EXPORT_SYMBOL_GPL(crypto_skcipher_encrypt); 675 EXPORT_SYMBOL_GPL(crypto_skcipher_encrypt); 646 676 647 int crypto_skcipher_decrypt(struct skcipher_re 677 int crypto_skcipher_decrypt(struct skcipher_request *req) 648 { 678 { 649 struct crypto_skcipher *tfm = crypto_s 679 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); 650 struct skcipher_alg *alg = crypto_skci 680 struct skcipher_alg *alg = crypto_skcipher_alg(tfm); >> 681 int ret; >> 682 >> 683 if (IS_ENABLED(CONFIG_CRYPTO_STATS)) { >> 684 struct crypto_istat_cipher *istat = skcipher_get_stat(alg); >> 685 >> 686 atomic64_inc(&istat->decrypt_cnt); >> 687 atomic64_add(req->cryptlen, &istat->decrypt_tlen); >> 688 } 651 689 652 if (crypto_skcipher_get_flags(tfm) & C 690 if (crypto_skcipher_get_flags(tfm) & CRYPTO_TFM_NEED_KEY) 653 return -ENOKEY; !! 691 ret = -ENOKEY; 654 if (alg->co.base.cra_type != &crypto_s !! 692 else if (alg->co.base.cra_type != &crypto_skcipher_type) 655 return crypto_lskcipher_decryp !! 693 ret = crypto_lskcipher_decrypt_sg(req); 656 return alg->decrypt(req); !! 694 else >> 695 ret = alg->decrypt(req); >> 696 >> 697 return crypto_skcipher_errstat(alg, ret); 657 } 698 } 658 EXPORT_SYMBOL_GPL(crypto_skcipher_decrypt); 699 EXPORT_SYMBOL_GPL(crypto_skcipher_decrypt); 659 700 660 static int crypto_lskcipher_export(struct skci 701 static int crypto_lskcipher_export(struct skcipher_request *req, void *out) 661 { 702 { 662 struct crypto_skcipher *tfm = crypto_s 703 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); 663 u8 *ivs = skcipher_request_ctx(req); 704 u8 *ivs = skcipher_request_ctx(req); 664 705 665 ivs = PTR_ALIGN(ivs, crypto_skcipher_a 706 ivs = PTR_ALIGN(ivs, crypto_skcipher_alignmask(tfm) + 1); 666 707 667 memcpy(out, ivs + crypto_skcipher_ivsi 708 memcpy(out, ivs + crypto_skcipher_ivsize(tfm), 668 crypto_skcipher_statesize(tfm)) 709 crypto_skcipher_statesize(tfm)); 669 710 670 return 0; 711 return 0; 671 } 712 } 672 713 673 static int crypto_lskcipher_import(struct skci 714 static int crypto_lskcipher_import(struct skcipher_request *req, const void *in) 674 { 715 { 675 struct crypto_skcipher *tfm = crypto_s 716 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); 676 u8 *ivs = skcipher_request_ctx(req); 717 u8 *ivs = skcipher_request_ctx(req); 677 718 678 ivs = PTR_ALIGN(ivs, crypto_skcipher_a 719 ivs = PTR_ALIGN(ivs, crypto_skcipher_alignmask(tfm) + 1); 679 720 680 memcpy(ivs + crypto_skcipher_ivsize(tf 721 memcpy(ivs + crypto_skcipher_ivsize(tfm), in, 681 crypto_skcipher_statesize(tfm)) 722 crypto_skcipher_statesize(tfm)); 682 723 683 return 0; 724 return 0; 684 } 725 } 685 726 686 static int skcipher_noexport(struct skcipher_r 727 static int skcipher_noexport(struct skcipher_request *req, void *out) 687 { 728 { 688 return 0; 729 return 0; 689 } 730 } 690 731 691 static int skcipher_noimport(struct skcipher_r 732 static int skcipher_noimport(struct skcipher_request *req, const void *in) 692 { 733 { 693 return 0; 734 return 0; 694 } 735 } 695 736 696 int crypto_skcipher_export(struct skcipher_req 737 int crypto_skcipher_export(struct skcipher_request *req, void *out) 697 { 738 { 698 struct crypto_skcipher *tfm = crypto_s 739 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); 699 struct skcipher_alg *alg = crypto_skci 740 struct skcipher_alg *alg = crypto_skcipher_alg(tfm); 700 741 701 if (alg->co.base.cra_type != &crypto_s 742 if (alg->co.base.cra_type != &crypto_skcipher_type) 702 return crypto_lskcipher_export 743 return crypto_lskcipher_export(req, out); 703 return alg->export(req, out); 744 return alg->export(req, out); 704 } 745 } 705 EXPORT_SYMBOL_GPL(crypto_skcipher_export); 746 EXPORT_SYMBOL_GPL(crypto_skcipher_export); 706 747 707 int crypto_skcipher_import(struct skcipher_req 748 int crypto_skcipher_import(struct skcipher_request *req, const void *in) 708 { 749 { 709 struct crypto_skcipher *tfm = crypto_s 750 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); 710 struct skcipher_alg *alg = crypto_skci 751 struct skcipher_alg *alg = crypto_skcipher_alg(tfm); 711 752 712 if (alg->co.base.cra_type != &crypto_s 753 if (alg->co.base.cra_type != &crypto_skcipher_type) 713 return crypto_lskcipher_import 754 return crypto_lskcipher_import(req, in); 714 return alg->import(req, in); 755 return alg->import(req, in); 715 } 756 } 716 EXPORT_SYMBOL_GPL(crypto_skcipher_import); 757 EXPORT_SYMBOL_GPL(crypto_skcipher_import); 717 758 718 static void crypto_skcipher_exit_tfm(struct cr 759 static void crypto_skcipher_exit_tfm(struct crypto_tfm *tfm) 719 { 760 { 720 struct crypto_skcipher *skcipher = __c 761 struct crypto_skcipher *skcipher = __crypto_skcipher_cast(tfm); 721 struct skcipher_alg *alg = crypto_skci 762 struct skcipher_alg *alg = crypto_skcipher_alg(skcipher); 722 763 723 alg->exit(skcipher); 764 alg->exit(skcipher); 724 } 765 } 725 766 726 static int crypto_skcipher_init_tfm(struct cry 767 static int crypto_skcipher_init_tfm(struct crypto_tfm *tfm) 727 { 768 { 728 struct crypto_skcipher *skcipher = __c 769 struct crypto_skcipher *skcipher = __crypto_skcipher_cast(tfm); 729 struct skcipher_alg *alg = crypto_skci 770 struct skcipher_alg *alg = crypto_skcipher_alg(skcipher); 730 771 731 skcipher_set_needkey(skcipher); 772 skcipher_set_needkey(skcipher); 732 773 733 if (tfm->__crt_alg->cra_type != &crypt 774 if (tfm->__crt_alg->cra_type != &crypto_skcipher_type) { 734 unsigned am = crypto_skcipher_ 775 unsigned am = crypto_skcipher_alignmask(skcipher); 735 unsigned reqsize; 776 unsigned reqsize; 736 777 737 reqsize = am & ~(crypto_tfm_ct 778 reqsize = am & ~(crypto_tfm_ctx_alignment() - 1); 738 reqsize += crypto_skcipher_ivs 779 reqsize += crypto_skcipher_ivsize(skcipher); 739 reqsize += crypto_skcipher_sta 780 reqsize += crypto_skcipher_statesize(skcipher); 740 crypto_skcipher_set_reqsize(sk 781 crypto_skcipher_set_reqsize(skcipher, reqsize); 741 782 742 return crypto_init_lskcipher_o 783 return crypto_init_lskcipher_ops_sg(tfm); 743 } 784 } 744 785 745 if (alg->exit) 786 if (alg->exit) 746 skcipher->base.exit = crypto_s 787 skcipher->base.exit = crypto_skcipher_exit_tfm; 747 788 748 if (alg->init) 789 if (alg->init) 749 return alg->init(skcipher); 790 return alg->init(skcipher); 750 791 751 return 0; 792 return 0; 752 } 793 } 753 794 754 static unsigned int crypto_skcipher_extsize(st 795 static unsigned int crypto_skcipher_extsize(struct crypto_alg *alg) 755 { 796 { 756 if (alg->cra_type != &crypto_skcipher_ 797 if (alg->cra_type != &crypto_skcipher_type) 757 return sizeof(struct crypto_ls 798 return sizeof(struct crypto_lskcipher *); 758 799 759 return crypto_alg_extsize(alg); 800 return crypto_alg_extsize(alg); 760 } 801 } 761 802 762 static void crypto_skcipher_free_instance(stru 803 static void crypto_skcipher_free_instance(struct crypto_instance *inst) 763 { 804 { 764 struct skcipher_instance *skcipher = 805 struct skcipher_instance *skcipher = 765 container_of(inst, struct skci 806 container_of(inst, struct skcipher_instance, s.base); 766 807 767 skcipher->free(skcipher); 808 skcipher->free(skcipher); 768 } 809 } 769 810 770 static void crypto_skcipher_show(struct seq_fi 811 static void crypto_skcipher_show(struct seq_file *m, struct crypto_alg *alg) 771 __maybe_unused; 812 __maybe_unused; 772 static void crypto_skcipher_show(struct seq_fi 813 static void crypto_skcipher_show(struct seq_file *m, struct crypto_alg *alg) 773 { 814 { 774 struct skcipher_alg *skcipher = __cryp 815 struct skcipher_alg *skcipher = __crypto_skcipher_alg(alg); 775 816 776 seq_printf(m, "type : skcipher 817 seq_printf(m, "type : skcipher\n"); 777 seq_printf(m, "async : %s\n", 818 seq_printf(m, "async : %s\n", 778 alg->cra_flags & CRYPTO_ALG 819 alg->cra_flags & CRYPTO_ALG_ASYNC ? "yes" : "no"); 779 seq_printf(m, "blocksize : %u\n", a 820 seq_printf(m, "blocksize : %u\n", alg->cra_blocksize); 780 seq_printf(m, "min keysize : %u\n", s 821 seq_printf(m, "min keysize : %u\n", skcipher->min_keysize); 781 seq_printf(m, "max keysize : %u\n", s 822 seq_printf(m, "max keysize : %u\n", skcipher->max_keysize); 782 seq_printf(m, "ivsize : %u\n", s 823 seq_printf(m, "ivsize : %u\n", skcipher->ivsize); 783 seq_printf(m, "chunksize : %u\n", s 824 seq_printf(m, "chunksize : %u\n", skcipher->chunksize); 784 seq_printf(m, "walksize : %u\n", s 825 seq_printf(m, "walksize : %u\n", skcipher->walksize); 785 seq_printf(m, "statesize : %u\n", s 826 seq_printf(m, "statesize : %u\n", skcipher->statesize); 786 } 827 } 787 828 788 static int __maybe_unused crypto_skcipher_repo 829 static int __maybe_unused crypto_skcipher_report( 789 struct sk_buff *skb, struct crypto_alg 830 struct sk_buff *skb, struct crypto_alg *alg) 790 { 831 { 791 struct skcipher_alg *skcipher = __cryp 832 struct skcipher_alg *skcipher = __crypto_skcipher_alg(alg); 792 struct crypto_report_blkcipher rblkcip 833 struct crypto_report_blkcipher rblkcipher; 793 834 794 memset(&rblkcipher, 0, sizeof(rblkciph 835 memset(&rblkcipher, 0, sizeof(rblkcipher)); 795 836 796 strscpy(rblkcipher.type, "skcipher", s 837 strscpy(rblkcipher.type, "skcipher", sizeof(rblkcipher.type)); 797 strscpy(rblkcipher.geniv, "<none>", si 838 strscpy(rblkcipher.geniv, "<none>", sizeof(rblkcipher.geniv)); 798 839 799 rblkcipher.blocksize = alg->cra_blocks 840 rblkcipher.blocksize = alg->cra_blocksize; 800 rblkcipher.min_keysize = skcipher->min 841 rblkcipher.min_keysize = skcipher->min_keysize; 801 rblkcipher.max_keysize = skcipher->max 842 rblkcipher.max_keysize = skcipher->max_keysize; 802 rblkcipher.ivsize = skcipher->ivsize; 843 rblkcipher.ivsize = skcipher->ivsize; 803 844 804 return nla_put(skb, CRYPTOCFGA_REPORT_ 845 return nla_put(skb, CRYPTOCFGA_REPORT_BLKCIPHER, 805 sizeof(rblkcipher), &rb 846 sizeof(rblkcipher), &rblkcipher); 806 } 847 } 807 848 >> 849 static int __maybe_unused crypto_skcipher_report_stat( >> 850 struct sk_buff *skb, struct crypto_alg *alg) >> 851 { >> 852 struct skcipher_alg *skcipher = __crypto_skcipher_alg(alg); >> 853 struct crypto_istat_cipher *istat; >> 854 struct crypto_stat_cipher rcipher; >> 855 >> 856 istat = skcipher_get_stat(skcipher); >> 857 >> 858 memset(&rcipher, 0, sizeof(rcipher)); >> 859 >> 860 strscpy(rcipher.type, "cipher", sizeof(rcipher.type)); >> 861 >> 862 rcipher.stat_encrypt_cnt = atomic64_read(&istat->encrypt_cnt); >> 863 rcipher.stat_encrypt_tlen = atomic64_read(&istat->encrypt_tlen); >> 864 rcipher.stat_decrypt_cnt = atomic64_read(&istat->decrypt_cnt); >> 865 rcipher.stat_decrypt_tlen = atomic64_read(&istat->decrypt_tlen); >> 866 rcipher.stat_err_cnt = atomic64_read(&istat->err_cnt); >> 867 >> 868 return nla_put(skb, CRYPTOCFGA_STAT_CIPHER, sizeof(rcipher), &rcipher); >> 869 } >> 870 808 static const struct crypto_type crypto_skciphe 871 static const struct crypto_type crypto_skcipher_type = { 809 .extsize = crypto_skcipher_extsize, 872 .extsize = crypto_skcipher_extsize, 810 .init_tfm = crypto_skcipher_init_tfm, 873 .init_tfm = crypto_skcipher_init_tfm, 811 .free = crypto_skcipher_free_instance, 874 .free = crypto_skcipher_free_instance, 812 #ifdef CONFIG_PROC_FS 875 #ifdef CONFIG_PROC_FS 813 .show = crypto_skcipher_show, 876 .show = crypto_skcipher_show, 814 #endif 877 #endif 815 #if IS_ENABLED(CONFIG_CRYPTO_USER) 878 #if IS_ENABLED(CONFIG_CRYPTO_USER) 816 .report = crypto_skcipher_report, 879 .report = crypto_skcipher_report, 817 #endif 880 #endif >> 881 #ifdef CONFIG_CRYPTO_STATS >> 882 .report_stat = crypto_skcipher_report_stat, >> 883 #endif 818 .maskclear = ~CRYPTO_ALG_TYPE_MASK, 884 .maskclear = ~CRYPTO_ALG_TYPE_MASK, 819 .maskset = CRYPTO_ALG_TYPE_SKCIPHER_MA 885 .maskset = CRYPTO_ALG_TYPE_SKCIPHER_MASK, 820 .type = CRYPTO_ALG_TYPE_SKCIPHER, 886 .type = CRYPTO_ALG_TYPE_SKCIPHER, 821 .tfmsize = offsetof(struct crypto_skci 887 .tfmsize = offsetof(struct crypto_skcipher, base), 822 }; 888 }; 823 889 824 int crypto_grab_skcipher(struct crypto_skciphe 890 int crypto_grab_skcipher(struct crypto_skcipher_spawn *spawn, 825 struct crypto_instanc 891 struct crypto_instance *inst, 826 const char *name, u32 892 const char *name, u32 type, u32 mask) 827 { 893 { 828 spawn->base.frontend = &crypto_skciphe 894 spawn->base.frontend = &crypto_skcipher_type; 829 return crypto_grab_spawn(&spawn->base, 895 return crypto_grab_spawn(&spawn->base, inst, name, type, mask); 830 } 896 } 831 EXPORT_SYMBOL_GPL(crypto_grab_skcipher); 897 EXPORT_SYMBOL_GPL(crypto_grab_skcipher); 832 898 833 struct crypto_skcipher *crypto_alloc_skcipher( 899 struct crypto_skcipher *crypto_alloc_skcipher(const char *alg_name, 834 900 u32 type, u32 mask) 835 { 901 { 836 return crypto_alloc_tfm(alg_name, &cry 902 return crypto_alloc_tfm(alg_name, &crypto_skcipher_type, type, mask); 837 } 903 } 838 EXPORT_SYMBOL_GPL(crypto_alloc_skcipher); 904 EXPORT_SYMBOL_GPL(crypto_alloc_skcipher); 839 905 840 struct crypto_sync_skcipher *crypto_alloc_sync 906 struct crypto_sync_skcipher *crypto_alloc_sync_skcipher( 841 const char *al 907 const char *alg_name, u32 type, u32 mask) 842 { 908 { 843 struct crypto_skcipher *tfm; 909 struct crypto_skcipher *tfm; 844 910 845 /* Only sync algorithms allowed. */ 911 /* Only sync algorithms allowed. */ 846 mask |= CRYPTO_ALG_ASYNC | CRYPTO_ALG_ 912 mask |= CRYPTO_ALG_ASYNC | CRYPTO_ALG_SKCIPHER_REQSIZE_LARGE; 847 913 848 tfm = crypto_alloc_tfm(alg_name, &cryp 914 tfm = crypto_alloc_tfm(alg_name, &crypto_skcipher_type, type, mask); 849 915 850 /* 916 /* 851 * Make sure we do not allocate someth 917 * Make sure we do not allocate something that might get used with 852 * an on-stack request: check the requ 918 * an on-stack request: check the request size. 853 */ 919 */ 854 if (!IS_ERR(tfm) && WARN_ON(crypto_skc 920 if (!IS_ERR(tfm) && WARN_ON(crypto_skcipher_reqsize(tfm) > 855 MAX_SYNC_S 921 MAX_SYNC_SKCIPHER_REQSIZE)) { 856 crypto_free_skcipher(tfm); 922 crypto_free_skcipher(tfm); 857 return ERR_PTR(-EINVAL); 923 return ERR_PTR(-EINVAL); 858 } 924 } 859 925 860 return (struct crypto_sync_skcipher *) 926 return (struct crypto_sync_skcipher *)tfm; 861 } 927 } 862 EXPORT_SYMBOL_GPL(crypto_alloc_sync_skcipher); 928 EXPORT_SYMBOL_GPL(crypto_alloc_sync_skcipher); 863 929 864 int crypto_has_skcipher(const char *alg_name, 930 int crypto_has_skcipher(const char *alg_name, u32 type, u32 mask) 865 { 931 { 866 return crypto_type_has_alg(alg_name, & 932 return crypto_type_has_alg(alg_name, &crypto_skcipher_type, type, mask); 867 } 933 } 868 EXPORT_SYMBOL_GPL(crypto_has_skcipher); 934 EXPORT_SYMBOL_GPL(crypto_has_skcipher); 869 935 870 int skcipher_prepare_alg_common(struct skciphe 936 int skcipher_prepare_alg_common(struct skcipher_alg_common *alg) 871 { 937 { >> 938 struct crypto_istat_cipher *istat = skcipher_get_stat_common(alg); 872 struct crypto_alg *base = &alg->base; 939 struct crypto_alg *base = &alg->base; 873 940 874 if (alg->ivsize > PAGE_SIZE / 8 || alg 941 if (alg->ivsize > PAGE_SIZE / 8 || alg->chunksize > PAGE_SIZE / 8 || 875 alg->statesize > PAGE_SIZE / 2 || 942 alg->statesize > PAGE_SIZE / 2 || 876 (alg->ivsize + alg->statesize) > P 943 (alg->ivsize + alg->statesize) > PAGE_SIZE / 2) 877 return -EINVAL; 944 return -EINVAL; 878 945 879 if (!alg->chunksize) 946 if (!alg->chunksize) 880 alg->chunksize = base->cra_blo 947 alg->chunksize = base->cra_blocksize; 881 948 882 base->cra_flags &= ~CRYPTO_ALG_TYPE_MA 949 base->cra_flags &= ~CRYPTO_ALG_TYPE_MASK; >> 950 >> 951 if (IS_ENABLED(CONFIG_CRYPTO_STATS)) >> 952 memset(istat, 0, sizeof(*istat)); 883 953 884 return 0; 954 return 0; 885 } 955 } 886 956 887 static int skcipher_prepare_alg(struct skciphe 957 static int skcipher_prepare_alg(struct skcipher_alg *alg) 888 { 958 { 889 struct crypto_alg *base = &alg->base; 959 struct crypto_alg *base = &alg->base; 890 int err; 960 int err; 891 961 892 err = skcipher_prepare_alg_common(&alg 962 err = skcipher_prepare_alg_common(&alg->co); 893 if (err) 963 if (err) 894 return err; 964 return err; 895 965 896 if (alg->walksize > PAGE_SIZE / 8) 966 if (alg->walksize > PAGE_SIZE / 8) 897 return -EINVAL; 967 return -EINVAL; 898 968 899 if (!alg->walksize) 969 if (!alg->walksize) 900 alg->walksize = alg->chunksize 970 alg->walksize = alg->chunksize; 901 971 902 if (!alg->statesize) { 972 if (!alg->statesize) { 903 alg->import = skcipher_noimpor 973 alg->import = skcipher_noimport; 904 alg->export = skcipher_noexpor 974 alg->export = skcipher_noexport; 905 } else if (!(alg->import && alg->expor 975 } else if (!(alg->import && alg->export)) 906 return -EINVAL; 976 return -EINVAL; 907 977 908 base->cra_type = &crypto_skcipher_type 978 base->cra_type = &crypto_skcipher_type; 909 base->cra_flags |= CRYPTO_ALG_TYPE_SKC 979 base->cra_flags |= CRYPTO_ALG_TYPE_SKCIPHER; 910 980 911 return 0; 981 return 0; 912 } 982 } 913 983 914 int crypto_register_skcipher(struct skcipher_a 984 int crypto_register_skcipher(struct skcipher_alg *alg) 915 { 985 { 916 struct crypto_alg *base = &alg->base; 986 struct crypto_alg *base = &alg->base; 917 int err; 987 int err; 918 988 919 err = skcipher_prepare_alg(alg); 989 err = skcipher_prepare_alg(alg); 920 if (err) 990 if (err) 921 return err; 991 return err; 922 992 923 return crypto_register_alg(base); 993 return crypto_register_alg(base); 924 } 994 } 925 EXPORT_SYMBOL_GPL(crypto_register_skcipher); 995 EXPORT_SYMBOL_GPL(crypto_register_skcipher); 926 996 927 void crypto_unregister_skcipher(struct skciphe 997 void crypto_unregister_skcipher(struct skcipher_alg *alg) 928 { 998 { 929 crypto_unregister_alg(&alg->base); 999 crypto_unregister_alg(&alg->base); 930 } 1000 } 931 EXPORT_SYMBOL_GPL(crypto_unregister_skcipher); 1001 EXPORT_SYMBOL_GPL(crypto_unregister_skcipher); 932 1002 933 int crypto_register_skciphers(struct skcipher_ 1003 int crypto_register_skciphers(struct skcipher_alg *algs, int count) 934 { 1004 { 935 int i, ret; 1005 int i, ret; 936 1006 937 for (i = 0; i < count; i++) { 1007 for (i = 0; i < count; i++) { 938 ret = crypto_register_skcipher 1008 ret = crypto_register_skcipher(&algs[i]); 939 if (ret) 1009 if (ret) 940 goto err; 1010 goto err; 941 } 1011 } 942 1012 943 return 0; 1013 return 0; 944 1014 945 err: 1015 err: 946 for (--i; i >= 0; --i) 1016 for (--i; i >= 0; --i) 947 crypto_unregister_skcipher(&al 1017 crypto_unregister_skcipher(&algs[i]); 948 1018 949 return ret; 1019 return ret; 950 } 1020 } 951 EXPORT_SYMBOL_GPL(crypto_register_skciphers); 1021 EXPORT_SYMBOL_GPL(crypto_register_skciphers); 952 1022 953 void crypto_unregister_skciphers(struct skciph 1023 void crypto_unregister_skciphers(struct skcipher_alg *algs, int count) 954 { 1024 { 955 int i; 1025 int i; 956 1026 957 for (i = count - 1; i >= 0; --i) 1027 for (i = count - 1; i >= 0; --i) 958 crypto_unregister_skcipher(&al 1028 crypto_unregister_skcipher(&algs[i]); 959 } 1029 } 960 EXPORT_SYMBOL_GPL(crypto_unregister_skciphers) 1030 EXPORT_SYMBOL_GPL(crypto_unregister_skciphers); 961 1031 962 int skcipher_register_instance(struct crypto_t 1032 int skcipher_register_instance(struct crypto_template *tmpl, 963 struct skcipher_ins 1033 struct skcipher_instance *inst) 964 { 1034 { 965 int err; 1035 int err; 966 1036 967 if (WARN_ON(!inst->free)) 1037 if (WARN_ON(!inst->free)) 968 return -EINVAL; 1038 return -EINVAL; 969 1039 970 err = skcipher_prepare_alg(&inst->alg) 1040 err = skcipher_prepare_alg(&inst->alg); 971 if (err) 1041 if (err) 972 return err; 1042 return err; 973 1043 974 return crypto_register_instance(tmpl, 1044 return crypto_register_instance(tmpl, skcipher_crypto_instance(inst)); 975 } 1045 } 976 EXPORT_SYMBOL_GPL(skcipher_register_instance); 1046 EXPORT_SYMBOL_GPL(skcipher_register_instance); 977 1047 978 static int skcipher_setkey_simple(struct crypt 1048 static int skcipher_setkey_simple(struct crypto_skcipher *tfm, const u8 *key, 979 unsigned int 1049 unsigned int keylen) 980 { 1050 { 981 struct crypto_cipher *cipher = skciphe 1051 struct crypto_cipher *cipher = skcipher_cipher_simple(tfm); 982 1052 983 crypto_cipher_clear_flags(cipher, CRYP 1053 crypto_cipher_clear_flags(cipher, CRYPTO_TFM_REQ_MASK); 984 crypto_cipher_set_flags(cipher, crypto 1054 crypto_cipher_set_flags(cipher, crypto_skcipher_get_flags(tfm) & 985 CRYPTO_TFM_REQ 1055 CRYPTO_TFM_REQ_MASK); 986 return crypto_cipher_setkey(cipher, ke 1056 return crypto_cipher_setkey(cipher, key, keylen); 987 } 1057 } 988 1058 989 static int skcipher_init_tfm_simple(struct cry 1059 static int skcipher_init_tfm_simple(struct crypto_skcipher *tfm) 990 { 1060 { 991 struct skcipher_instance *inst = skcip 1061 struct skcipher_instance *inst = skcipher_alg_instance(tfm); 992 struct crypto_cipher_spawn *spawn = sk 1062 struct crypto_cipher_spawn *spawn = skcipher_instance_ctx(inst); 993 struct skcipher_ctx_simple *ctx = cryp 1063 struct skcipher_ctx_simple *ctx = crypto_skcipher_ctx(tfm); 994 struct crypto_cipher *cipher; 1064 struct crypto_cipher *cipher; 995 1065 996 cipher = crypto_spawn_cipher(spawn); 1066 cipher = crypto_spawn_cipher(spawn); 997 if (IS_ERR(cipher)) 1067 if (IS_ERR(cipher)) 998 return PTR_ERR(cipher); 1068 return PTR_ERR(cipher); 999 1069 1000 ctx->cipher = cipher; 1070 ctx->cipher = cipher; 1001 return 0; 1071 return 0; 1002 } 1072 } 1003 1073 1004 static void skcipher_exit_tfm_simple(struct c 1074 static void skcipher_exit_tfm_simple(struct crypto_skcipher *tfm) 1005 { 1075 { 1006 struct skcipher_ctx_simple *ctx = cry 1076 struct skcipher_ctx_simple *ctx = crypto_skcipher_ctx(tfm); 1007 1077 1008 crypto_free_cipher(ctx->cipher); 1078 crypto_free_cipher(ctx->cipher); 1009 } 1079 } 1010 1080 1011 static void skcipher_free_instance_simple(str 1081 static void skcipher_free_instance_simple(struct skcipher_instance *inst) 1012 { 1082 { 1013 crypto_drop_cipher(skcipher_instance_ 1083 crypto_drop_cipher(skcipher_instance_ctx(inst)); 1014 kfree(inst); 1084 kfree(inst); 1015 } 1085 } 1016 1086 1017 /** 1087 /** 1018 * skcipher_alloc_instance_simple - allocate 1088 * skcipher_alloc_instance_simple - allocate instance of simple block cipher mode 1019 * 1089 * 1020 * Allocate an skcipher_instance for a simple 1090 * Allocate an skcipher_instance for a simple block cipher mode of operation, 1021 * e.g. cbc or ecb. The instance context wil 1091 * e.g. cbc or ecb. The instance context will have just a single crypto_spawn, 1022 * that for the underlying cipher. The {min, 1092 * that for the underlying cipher. The {min,max}_keysize, ivsize, blocksize, 1023 * alignmask, and priority are set from the u 1093 * alignmask, and priority are set from the underlying cipher but can be 1024 * overridden if needed. The tfm context def 1094 * overridden if needed. The tfm context defaults to skcipher_ctx_simple, and 1025 * default ->setkey(), ->init(), and ->exit() 1095 * default ->setkey(), ->init(), and ->exit() methods are installed. 1026 * 1096 * 1027 * @tmpl: the template being instantiated 1097 * @tmpl: the template being instantiated 1028 * @tb: the template parameters 1098 * @tb: the template parameters 1029 * 1099 * 1030 * Return: a pointer to the new instance, or 1100 * Return: a pointer to the new instance, or an ERR_PTR(). The caller still 1031 * needs to register the instance. 1101 * needs to register the instance. 1032 */ 1102 */ 1033 struct skcipher_instance *skcipher_alloc_inst 1103 struct skcipher_instance *skcipher_alloc_instance_simple( 1034 struct crypto_template *tmpl, struct 1104 struct crypto_template *tmpl, struct rtattr **tb) 1035 { 1105 { 1036 u32 mask; 1106 u32 mask; 1037 struct skcipher_instance *inst; 1107 struct skcipher_instance *inst; 1038 struct crypto_cipher_spawn *spawn; 1108 struct crypto_cipher_spawn *spawn; 1039 struct crypto_alg *cipher_alg; 1109 struct crypto_alg *cipher_alg; 1040 int err; 1110 int err; 1041 1111 1042 err = crypto_check_attr_type(tb, CRYP 1112 err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_SKCIPHER, &mask); 1043 if (err) 1113 if (err) 1044 return ERR_PTR(err); 1114 return ERR_PTR(err); 1045 1115 1046 inst = kzalloc(sizeof(*inst) + sizeof 1116 inst = kzalloc(sizeof(*inst) + sizeof(*spawn), GFP_KERNEL); 1047 if (!inst) 1117 if (!inst) 1048 return ERR_PTR(-ENOMEM); 1118 return ERR_PTR(-ENOMEM); 1049 spawn = skcipher_instance_ctx(inst); 1119 spawn = skcipher_instance_ctx(inst); 1050 1120 1051 err = crypto_grab_cipher(spawn, skcip 1121 err = crypto_grab_cipher(spawn, skcipher_crypto_instance(inst), 1052 crypto_attr_ 1122 crypto_attr_alg_name(tb[1]), 0, mask); 1053 if (err) 1123 if (err) 1054 goto err_free_inst; 1124 goto err_free_inst; 1055 cipher_alg = crypto_spawn_cipher_alg( 1125 cipher_alg = crypto_spawn_cipher_alg(spawn); 1056 1126 1057 err = crypto_inst_setname(skcipher_cr 1127 err = crypto_inst_setname(skcipher_crypto_instance(inst), tmpl->name, 1058 cipher_alg) 1128 cipher_alg); 1059 if (err) 1129 if (err) 1060 goto err_free_inst; 1130 goto err_free_inst; 1061 1131 1062 inst->free = skcipher_free_instance_s 1132 inst->free = skcipher_free_instance_simple; 1063 1133 1064 /* Default algorithm properties, can 1134 /* Default algorithm properties, can be overridden */ 1065 inst->alg.base.cra_blocksize = cipher 1135 inst->alg.base.cra_blocksize = cipher_alg->cra_blocksize; 1066 inst->alg.base.cra_alignmask = cipher 1136 inst->alg.base.cra_alignmask = cipher_alg->cra_alignmask; 1067 inst->alg.base.cra_priority = cipher_ 1137 inst->alg.base.cra_priority = cipher_alg->cra_priority; 1068 inst->alg.min_keysize = cipher_alg->c 1138 inst->alg.min_keysize = cipher_alg->cra_cipher.cia_min_keysize; 1069 inst->alg.max_keysize = cipher_alg->c 1139 inst->alg.max_keysize = cipher_alg->cra_cipher.cia_max_keysize; 1070 inst->alg.ivsize = cipher_alg->cra_bl 1140 inst->alg.ivsize = cipher_alg->cra_blocksize; 1071 1141 1072 /* Use skcipher_ctx_simple by default 1142 /* Use skcipher_ctx_simple by default, can be overridden */ 1073 inst->alg.base.cra_ctxsize = sizeof(s 1143 inst->alg.base.cra_ctxsize = sizeof(struct skcipher_ctx_simple); 1074 inst->alg.setkey = skcipher_setkey_si 1144 inst->alg.setkey = skcipher_setkey_simple; 1075 inst->alg.init = skcipher_init_tfm_si 1145 inst->alg.init = skcipher_init_tfm_simple; 1076 inst->alg.exit = skcipher_exit_tfm_si 1146 inst->alg.exit = skcipher_exit_tfm_simple; 1077 1147 1078 return inst; 1148 return inst; 1079 1149 1080 err_free_inst: 1150 err_free_inst: 1081 skcipher_free_instance_simple(inst); 1151 skcipher_free_instance_simple(inst); 1082 return ERR_PTR(err); 1152 return ERR_PTR(err); 1083 } 1153 } 1084 EXPORT_SYMBOL_GPL(skcipher_alloc_instance_sim 1154 EXPORT_SYMBOL_GPL(skcipher_alloc_instance_simple); 1085 1155 1086 MODULE_LICENSE("GPL"); 1156 MODULE_LICENSE("GPL"); 1087 MODULE_DESCRIPTION("Symmetric key cipher type 1157 MODULE_DESCRIPTION("Symmetric key cipher type"); 1088 MODULE_IMPORT_NS(CRYPTO_INTERNAL); 1158 MODULE_IMPORT_NS(CRYPTO_INTERNAL); 1089 1159
Linux® is a registered trademark of Linus Torvalds in the United States and other countries.
TOMOYO® is a registered trademark of NTT DATA CORPORATION.