~ [ source navigation ] ~ [ diff markup ] ~ [ identifier search ] ~

TOMOYO Linux Cross Reference
Linux/crypto/skcipher.c

Version: ~ [ linux-6.11.5 ] ~ [ linux-6.10.14 ] ~ [ linux-6.9.12 ] ~ [ linux-6.8.12 ] ~ [ linux-6.7.12 ] ~ [ linux-6.6.58 ] ~ [ linux-6.5.13 ] ~ [ linux-6.4.16 ] ~ [ linux-6.3.13 ] ~ [ linux-6.2.16 ] ~ [ linux-6.1.114 ] ~ [ linux-6.0.19 ] ~ [ linux-5.19.17 ] ~ [ linux-5.18.19 ] ~ [ linux-5.17.15 ] ~ [ linux-5.16.20 ] ~ [ linux-5.15.169 ] ~ [ linux-5.14.21 ] ~ [ linux-5.13.19 ] ~ [ linux-5.12.19 ] ~ [ linux-5.11.22 ] ~ [ linux-5.10.228 ] ~ [ linux-5.9.16 ] ~ [ linux-5.8.18 ] ~ [ linux-5.7.19 ] ~ [ linux-5.6.19 ] ~ [ linux-5.5.19 ] ~ [ linux-5.4.284 ] ~ [ linux-5.3.18 ] ~ [ linux-5.2.21 ] ~ [ linux-5.1.21 ] ~ [ linux-5.0.21 ] ~ [ linux-4.20.17 ] ~ [ linux-4.19.322 ] ~ [ linux-4.18.20 ] ~ [ linux-4.17.19 ] ~ [ linux-4.16.18 ] ~ [ linux-4.15.18 ] ~ [ linux-4.14.336 ] ~ [ linux-4.13.16 ] ~ [ linux-4.12.14 ] ~ [ linux-4.11.12 ] ~ [ linux-4.10.17 ] ~ [ linux-4.9.337 ] ~ [ linux-4.4.302 ] ~ [ linux-3.10.108 ] ~ [ linux-2.6.32.71 ] ~ [ linux-2.6.0 ] ~ [ linux-2.4.37.11 ] ~ [ unix-v6-master ] ~ [ ccs-tools-1.8.9 ] ~ [ policy-sample ] ~
Architecture: ~ [ i386 ] ~ [ alpha ] ~ [ m68k ] ~ [ mips ] ~ [ ppc ] ~ [ sparc ] ~ [ sparc64 ] ~

Diff markup

Differences between /crypto/skcipher.c (Version linux-6.11.5) and /crypto/skcipher.c (Version linux-5.3.18)


  1 // SPDX-License-Identifier: GPL-2.0-or-later        1 // SPDX-License-Identifier: GPL-2.0-or-later
  2 /*                                                  2 /*
  3  * Symmetric key cipher operations.                 3  * Symmetric key cipher operations.
  4  *                                                  4  *
  5  * Generic encrypt/decrypt wrapper for ciphers      5  * Generic encrypt/decrypt wrapper for ciphers, handles operations across
  6  * multiple page boundaries by using temporary      6  * multiple page boundaries by using temporary blocks.  In user context,
  7  * the kernel is given a chance to schedule us      7  * the kernel is given a chance to schedule us once per page.
  8  *                                                  8  *
  9  * Copyright (c) 2015 Herbert Xu <herbert@gond      9  * Copyright (c) 2015 Herbert Xu <herbert@gondor.apana.org.au>
 10  */                                                10  */
 11                                                    11 
 12 #include <crypto/internal/aead.h>                  12 #include <crypto/internal/aead.h>
 13 #include <crypto/internal/cipher.h>            << 
 14 #include <crypto/internal/skcipher.h>              13 #include <crypto/internal/skcipher.h>
 15 #include <crypto/scatterwalk.h>                    14 #include <crypto/scatterwalk.h>
 16 #include <linux/bug.h>                             15 #include <linux/bug.h>
 17 #include <linux/cryptouser.h>                      16 #include <linux/cryptouser.h>
 18 #include <linux/err.h>                         !!  17 #include <linux/compiler.h>
 19 #include <linux/kernel.h>                      << 
 20 #include <linux/list.h>                            18 #include <linux/list.h>
 21 #include <linux/mm.h>                          << 
 22 #include <linux/module.h>                          19 #include <linux/module.h>
                                                   >>  20 #include <linux/rtnetlink.h>
 23 #include <linux/seq_file.h>                        21 #include <linux/seq_file.h>
 24 #include <linux/slab.h>                        << 
 25 #include <linux/string.h>                      << 
 26 #include <net/netlink.h>                           22 #include <net/netlink.h>
 27 #include "skcipher.h"                          << 
 28                                                    23 
 29 #define CRYPTO_ALG_TYPE_SKCIPHER_MASK   0x0000 !!  24 #include "internal.h"
 30                                                    25 
 31 enum {                                             26 enum {
 32         SKCIPHER_WALK_PHYS = 1 << 0,               27         SKCIPHER_WALK_PHYS = 1 << 0,
 33         SKCIPHER_WALK_SLOW = 1 << 1,               28         SKCIPHER_WALK_SLOW = 1 << 1,
 34         SKCIPHER_WALK_COPY = 1 << 2,               29         SKCIPHER_WALK_COPY = 1 << 2,
 35         SKCIPHER_WALK_DIFF = 1 << 3,               30         SKCIPHER_WALK_DIFF = 1 << 3,
 36         SKCIPHER_WALK_SLEEP = 1 << 4,              31         SKCIPHER_WALK_SLEEP = 1 << 4,
 37 };                                                 32 };
 38                                                    33 
 39 struct skcipher_walk_buffer {                      34 struct skcipher_walk_buffer {
 40         struct list_head entry;                    35         struct list_head entry;
 41         struct scatter_walk dst;                   36         struct scatter_walk dst;
 42         unsigned int len;                          37         unsigned int len;
 43         u8 *data;                                  38         u8 *data;
 44         u8 buffer[];                               39         u8 buffer[];
 45 };                                                 40 };
 46                                                    41 
 47 static const struct crypto_type crypto_skciphe << 
 48                                                << 
 49 static int skcipher_walk_next(struct skcipher_     42 static int skcipher_walk_next(struct skcipher_walk *walk);
 50                                                    43 
                                                   >>  44 static inline void skcipher_unmap(struct scatter_walk *walk, void *vaddr)
                                                   >>  45 {
                                                   >>  46         if (PageHighMem(scatterwalk_page(walk)))
                                                   >>  47                 kunmap_atomic(vaddr);
                                                   >>  48 }
                                                   >>  49 
                                                   >>  50 static inline void *skcipher_map(struct scatter_walk *walk)
                                                   >>  51 {
                                                   >>  52         struct page *page = scatterwalk_page(walk);
                                                   >>  53 
                                                   >>  54         return (PageHighMem(page) ? kmap_atomic(page) : page_address(page)) +
                                                   >>  55                offset_in_page(walk->offset);
                                                   >>  56 }
                                                   >>  57 
 51 static inline void skcipher_map_src(struct skc     58 static inline void skcipher_map_src(struct skcipher_walk *walk)
 52 {                                                  59 {
 53         walk->src.virt.addr = scatterwalk_map( !!  60         walk->src.virt.addr = skcipher_map(&walk->in);
 54 }                                                  61 }
 55                                                    62 
 56 static inline void skcipher_map_dst(struct skc     63 static inline void skcipher_map_dst(struct skcipher_walk *walk)
 57 {                                                  64 {
 58         walk->dst.virt.addr = scatterwalk_map( !!  65         walk->dst.virt.addr = skcipher_map(&walk->out);
 59 }                                                  66 }
 60                                                    67 
 61 static inline void skcipher_unmap_src(struct s     68 static inline void skcipher_unmap_src(struct skcipher_walk *walk)
 62 {                                                  69 {
 63         scatterwalk_unmap(walk->src.virt.addr) !!  70         skcipher_unmap(&walk->in, walk->src.virt.addr);
 64 }                                                  71 }
 65                                                    72 
 66 static inline void skcipher_unmap_dst(struct s     73 static inline void skcipher_unmap_dst(struct skcipher_walk *walk)
 67 {                                                  74 {
 68         scatterwalk_unmap(walk->dst.virt.addr) !!  75         skcipher_unmap(&walk->out, walk->dst.virt.addr);
 69 }                                                  76 }
 70                                                    77 
 71 static inline gfp_t skcipher_walk_gfp(struct s     78 static inline gfp_t skcipher_walk_gfp(struct skcipher_walk *walk)
 72 {                                                  79 {
 73         return walk->flags & SKCIPHER_WALK_SLE     80         return walk->flags & SKCIPHER_WALK_SLEEP ? GFP_KERNEL : GFP_ATOMIC;
 74 }                                                  81 }
 75                                                    82 
 76 /* Get a spot of the specified length that doe     83 /* Get a spot of the specified length that does not straddle a page.
 77  * The caller needs to ensure that there is en     84  * The caller needs to ensure that there is enough space for this operation.
 78  */                                                85  */
 79 static inline u8 *skcipher_get_spot(u8 *start,     86 static inline u8 *skcipher_get_spot(u8 *start, unsigned int len)
 80 {                                                  87 {
 81         u8 *end_page = (u8 *)(((unsigned long)     88         u8 *end_page = (u8 *)(((unsigned long)(start + len - 1)) & PAGE_MASK);
 82                                                    89 
 83         return max(start, end_page);               90         return max(start, end_page);
 84 }                                                  91 }
 85                                                    92 
 86 static inline struct skcipher_alg *__crypto_sk << 
 87         struct crypto_alg *alg)                << 
 88 {                                              << 
 89         return container_of(alg, struct skciph << 
 90 }                                              << 
 91                                                << 
 92 static int skcipher_done_slow(struct skcipher_     93 static int skcipher_done_slow(struct skcipher_walk *walk, unsigned int bsize)
 93 {                                                  94 {
 94         u8 *addr;                                  95         u8 *addr;
 95                                                    96 
 96         addr = (u8 *)ALIGN((unsigned long)walk     97         addr = (u8 *)ALIGN((unsigned long)walk->buffer, walk->alignmask + 1);
 97         addr = skcipher_get_spot(addr, bsize);     98         addr = skcipher_get_spot(addr, bsize);
 98         scatterwalk_copychunks(addr, &walk->ou     99         scatterwalk_copychunks(addr, &walk->out, bsize,
 99                                (walk->flags &     100                                (walk->flags & SKCIPHER_WALK_PHYS) ? 2 : 1);
100         return 0;                                 101         return 0;
101 }                                                 102 }
102                                                   103 
103 int skcipher_walk_done(struct skcipher_walk *w    104 int skcipher_walk_done(struct skcipher_walk *walk, int err)
104 {                                                 105 {
105         unsigned int n = walk->nbytes;            106         unsigned int n = walk->nbytes;
106         unsigned int nbytes = 0;                  107         unsigned int nbytes = 0;
107                                                   108 
108         if (!n)                                   109         if (!n)
109                 goto finish;                      110                 goto finish;
110                                                   111 
111         if (likely(err >= 0)) {                   112         if (likely(err >= 0)) {
112                 n -= err;                         113                 n -= err;
113                 nbytes = walk->total - n;         114                 nbytes = walk->total - n;
114         }                                         115         }
115                                                   116 
116         if (likely(!(walk->flags & (SKCIPHER_W    117         if (likely(!(walk->flags & (SKCIPHER_WALK_PHYS |
117                                     SKCIPHER_W    118                                     SKCIPHER_WALK_SLOW |
118                                     SKCIPHER_W    119                                     SKCIPHER_WALK_COPY |
119                                     SKCIPHER_W    120                                     SKCIPHER_WALK_DIFF)))) {
120 unmap_src:                                        121 unmap_src:
121                 skcipher_unmap_src(walk);         122                 skcipher_unmap_src(walk);
122         } else if (walk->flags & SKCIPHER_WALK    123         } else if (walk->flags & SKCIPHER_WALK_DIFF) {
123                 skcipher_unmap_dst(walk);         124                 skcipher_unmap_dst(walk);
124                 goto unmap_src;                   125                 goto unmap_src;
125         } else if (walk->flags & SKCIPHER_WALK    126         } else if (walk->flags & SKCIPHER_WALK_COPY) {
126                 skcipher_map_dst(walk);           127                 skcipher_map_dst(walk);
127                 memcpy(walk->dst.virt.addr, wa    128                 memcpy(walk->dst.virt.addr, walk->page, n);
128                 skcipher_unmap_dst(walk);         129                 skcipher_unmap_dst(walk);
129         } else if (unlikely(walk->flags & SKCI    130         } else if (unlikely(walk->flags & SKCIPHER_WALK_SLOW)) {
130                 if (err > 0) {                    131                 if (err > 0) {
131                         /*                        132                         /*
132                          * Didn't process all     133                          * Didn't process all bytes.  Either the algorithm is
133                          * broken, or this was    134                          * broken, or this was the last step and it turned out
134                          * the message wasn't     135                          * the message wasn't evenly divisible into blocks but
135                          * the algorithm requi    136                          * the algorithm requires it.
136                          */                       137                          */
137                         err = -EINVAL;            138                         err = -EINVAL;
138                         nbytes = 0;               139                         nbytes = 0;
139                 } else                            140                 } else
140                         n = skcipher_done_slow    141                         n = skcipher_done_slow(walk, n);
141         }                                         142         }
142                                                   143 
143         if (err > 0)                              144         if (err > 0)
144                 err = 0;                          145                 err = 0;
145                                                   146 
146         walk->total = nbytes;                     147         walk->total = nbytes;
147         walk->nbytes = 0;                         148         walk->nbytes = 0;
148                                                   149 
149         scatterwalk_advance(&walk->in, n);        150         scatterwalk_advance(&walk->in, n);
150         scatterwalk_advance(&walk->out, n);       151         scatterwalk_advance(&walk->out, n);
151         scatterwalk_done(&walk->in, 0, nbytes)    152         scatterwalk_done(&walk->in, 0, nbytes);
152         scatterwalk_done(&walk->out, 1, nbytes    153         scatterwalk_done(&walk->out, 1, nbytes);
153                                                   154 
154         if (nbytes) {                             155         if (nbytes) {
155                 crypto_yield(walk->flags & SKC    156                 crypto_yield(walk->flags & SKCIPHER_WALK_SLEEP ?
156                              CRYPTO_TFM_REQ_MA    157                              CRYPTO_TFM_REQ_MAY_SLEEP : 0);
157                 return skcipher_walk_next(walk    158                 return skcipher_walk_next(walk);
158         }                                         159         }
159                                                   160 
160 finish:                                           161 finish:
161         /* Short-circuit for the common/fast p    162         /* Short-circuit for the common/fast path. */
162         if (!((unsigned long)walk->buffer | (u    163         if (!((unsigned long)walk->buffer | (unsigned long)walk->page))
163                 goto out;                         164                 goto out;
164                                                   165 
165         if (walk->flags & SKCIPHER_WALK_PHYS)     166         if (walk->flags & SKCIPHER_WALK_PHYS)
166                 goto out;                         167                 goto out;
167                                                   168 
168         if (walk->iv != walk->oiv)                169         if (walk->iv != walk->oiv)
169                 memcpy(walk->oiv, walk->iv, wa    170                 memcpy(walk->oiv, walk->iv, walk->ivsize);
170         if (walk->buffer != walk->page)           171         if (walk->buffer != walk->page)
171                 kfree(walk->buffer);              172                 kfree(walk->buffer);
172         if (walk->page)                           173         if (walk->page)
173                 free_page((unsigned long)walk-    174                 free_page((unsigned long)walk->page);
174                                                   175 
175 out:                                              176 out:
176         return err;                               177         return err;
177 }                                                 178 }
178 EXPORT_SYMBOL_GPL(skcipher_walk_done);            179 EXPORT_SYMBOL_GPL(skcipher_walk_done);
179                                                   180 
180 void skcipher_walk_complete(struct skcipher_wa    181 void skcipher_walk_complete(struct skcipher_walk *walk, int err)
181 {                                                 182 {
182         struct skcipher_walk_buffer *p, *tmp;     183         struct skcipher_walk_buffer *p, *tmp;
183                                                   184 
184         list_for_each_entry_safe(p, tmp, &walk    185         list_for_each_entry_safe(p, tmp, &walk->buffers, entry) {
185                 u8 *data;                         186                 u8 *data;
186                                                   187 
187                 if (err)                          188                 if (err)
188                         goto done;                189                         goto done;
189                                                   190 
190                 data = p->data;                   191                 data = p->data;
191                 if (!data) {                      192                 if (!data) {
192                         data = PTR_ALIGN(&p->b    193                         data = PTR_ALIGN(&p->buffer[0], walk->alignmask + 1);
193                         data = skcipher_get_sp    194                         data = skcipher_get_spot(data, walk->stride);
194                 }                                 195                 }
195                                                   196 
196                 scatterwalk_copychunks(data, &    197                 scatterwalk_copychunks(data, &p->dst, p->len, 1);
197                                                   198 
198                 if (offset_in_page(p->data) +     199                 if (offset_in_page(p->data) + p->len + walk->stride >
199                     PAGE_SIZE)                    200                     PAGE_SIZE)
200                         free_page((unsigned lo    201                         free_page((unsigned long)p->data);
201                                                   202 
202 done:                                             203 done:
203                 list_del(&p->entry);              204                 list_del(&p->entry);
204                 kfree(p);                         205                 kfree(p);
205         }                                         206         }
206                                                   207 
207         if (!err && walk->iv != walk->oiv)        208         if (!err && walk->iv != walk->oiv)
208                 memcpy(walk->oiv, walk->iv, wa    209                 memcpy(walk->oiv, walk->iv, walk->ivsize);
209         if (walk->buffer != walk->page)           210         if (walk->buffer != walk->page)
210                 kfree(walk->buffer);              211                 kfree(walk->buffer);
211         if (walk->page)                           212         if (walk->page)
212                 free_page((unsigned long)walk-    213                 free_page((unsigned long)walk->page);
213 }                                                 214 }
214 EXPORT_SYMBOL_GPL(skcipher_walk_complete);        215 EXPORT_SYMBOL_GPL(skcipher_walk_complete);
215                                                   216 
216 static void skcipher_queue_write(struct skciph    217 static void skcipher_queue_write(struct skcipher_walk *walk,
217                                  struct skciph    218                                  struct skcipher_walk_buffer *p)
218 {                                                 219 {
219         p->dst = walk->out;                       220         p->dst = walk->out;
220         list_add_tail(&p->entry, &walk->buffer    221         list_add_tail(&p->entry, &walk->buffers);
221 }                                                 222 }
222                                                   223 
223 static int skcipher_next_slow(struct skcipher_    224 static int skcipher_next_slow(struct skcipher_walk *walk, unsigned int bsize)
224 {                                                 225 {
225         bool phys = walk->flags & SKCIPHER_WAL    226         bool phys = walk->flags & SKCIPHER_WALK_PHYS;
226         unsigned alignmask = walk->alignmask;     227         unsigned alignmask = walk->alignmask;
227         struct skcipher_walk_buffer *p;           228         struct skcipher_walk_buffer *p;
228         unsigned a;                               229         unsigned a;
229         unsigned n;                               230         unsigned n;
230         u8 *buffer;                               231         u8 *buffer;
231         void *v;                                  232         void *v;
232                                                   233 
233         if (!phys) {                              234         if (!phys) {
234                 if (!walk->buffer)                235                 if (!walk->buffer)
235                         walk->buffer = walk->p    236                         walk->buffer = walk->page;
236                 buffer = walk->buffer;            237                 buffer = walk->buffer;
237                 if (buffer)                       238                 if (buffer)
238                         goto ok;                  239                         goto ok;
239         }                                         240         }
240                                                   241 
241         /* Start with the minimum alignment of    242         /* Start with the minimum alignment of kmalloc. */
242         a = crypto_tfm_ctx_alignment() - 1;       243         a = crypto_tfm_ctx_alignment() - 1;
243         n = bsize;                                244         n = bsize;
244                                                   245 
245         if (phys) {                               246         if (phys) {
246                 /* Calculate the minimum align    247                 /* Calculate the minimum alignment of p->buffer. */
247                 a &= (sizeof(*p) ^ (sizeof(*p)    248                 a &= (sizeof(*p) ^ (sizeof(*p) - 1)) >> 1;
248                 n += sizeof(*p);                  249                 n += sizeof(*p);
249         }                                         250         }
250                                                   251 
251         /* Minimum size to align p->buffer by     252         /* Minimum size to align p->buffer by alignmask. */
252         n += alignmask & ~a;                      253         n += alignmask & ~a;
253                                                   254 
254         /* Minimum size to ensure p->buffer do    255         /* Minimum size to ensure p->buffer does not straddle a page. */
255         n += (bsize - 1) & ~(alignmask | a);      256         n += (bsize - 1) & ~(alignmask | a);
256                                                   257 
257         v = kzalloc(n, skcipher_walk_gfp(walk)    258         v = kzalloc(n, skcipher_walk_gfp(walk));
258         if (!v)                                   259         if (!v)
259                 return skcipher_walk_done(walk    260                 return skcipher_walk_done(walk, -ENOMEM);
260                                                   261 
261         if (phys) {                               262         if (phys) {
262                 p = v;                            263                 p = v;
263                 p->len = bsize;                   264                 p->len = bsize;
264                 skcipher_queue_write(walk, p);    265                 skcipher_queue_write(walk, p);
265                 buffer = p->buffer;               266                 buffer = p->buffer;
266         } else {                                  267         } else {
267                 walk->buffer = v;                 268                 walk->buffer = v;
268                 buffer = v;                       269                 buffer = v;
269         }                                         270         }
270                                                   271 
271 ok:                                               272 ok:
272         walk->dst.virt.addr = PTR_ALIGN(buffer    273         walk->dst.virt.addr = PTR_ALIGN(buffer, alignmask + 1);
273         walk->dst.virt.addr = skcipher_get_spo    274         walk->dst.virt.addr = skcipher_get_spot(walk->dst.virt.addr, bsize);
274         walk->src.virt.addr = walk->dst.virt.a    275         walk->src.virt.addr = walk->dst.virt.addr;
275                                                   276 
276         scatterwalk_copychunks(walk->src.virt.    277         scatterwalk_copychunks(walk->src.virt.addr, &walk->in, bsize, 0);
277                                                   278 
278         walk->nbytes = bsize;                     279         walk->nbytes = bsize;
279         walk->flags |= SKCIPHER_WALK_SLOW;        280         walk->flags |= SKCIPHER_WALK_SLOW;
280                                                   281 
281         return 0;                                 282         return 0;
282 }                                                 283 }
283                                                   284 
284 static int skcipher_next_copy(struct skcipher_    285 static int skcipher_next_copy(struct skcipher_walk *walk)
285 {                                                 286 {
286         struct skcipher_walk_buffer *p;           287         struct skcipher_walk_buffer *p;
287         u8 *tmp = walk->page;                     288         u8 *tmp = walk->page;
288                                                   289 
289         skcipher_map_src(walk);                   290         skcipher_map_src(walk);
290         memcpy(tmp, walk->src.virt.addr, walk-    291         memcpy(tmp, walk->src.virt.addr, walk->nbytes);
291         skcipher_unmap_src(walk);                 292         skcipher_unmap_src(walk);
292                                                   293 
293         walk->src.virt.addr = tmp;                294         walk->src.virt.addr = tmp;
294         walk->dst.virt.addr = tmp;                295         walk->dst.virt.addr = tmp;
295                                                   296 
296         if (!(walk->flags & SKCIPHER_WALK_PHYS    297         if (!(walk->flags & SKCIPHER_WALK_PHYS))
297                 return 0;                         298                 return 0;
298                                                   299 
299         p = kmalloc(sizeof(*p), skcipher_walk_    300         p = kmalloc(sizeof(*p), skcipher_walk_gfp(walk));
300         if (!p)                                   301         if (!p)
301                 return -ENOMEM;                   302                 return -ENOMEM;
302                                                   303 
303         p->data = walk->page;                     304         p->data = walk->page;
304         p->len = walk->nbytes;                    305         p->len = walk->nbytes;
305         skcipher_queue_write(walk, p);            306         skcipher_queue_write(walk, p);
306                                                   307 
307         if (offset_in_page(walk->page) + walk-    308         if (offset_in_page(walk->page) + walk->nbytes + walk->stride >
308             PAGE_SIZE)                            309             PAGE_SIZE)
309                 walk->page = NULL;                310                 walk->page = NULL;
310         else                                      311         else
311                 walk->page += walk->nbytes;       312                 walk->page += walk->nbytes;
312                                                   313 
313         return 0;                                 314         return 0;
314 }                                                 315 }
315                                                   316 
316 static int skcipher_next_fast(struct skcipher_    317 static int skcipher_next_fast(struct skcipher_walk *walk)
317 {                                                 318 {
318         unsigned long diff;                       319         unsigned long diff;
319                                                   320 
320         walk->src.phys.page = scatterwalk_page    321         walk->src.phys.page = scatterwalk_page(&walk->in);
321         walk->src.phys.offset = offset_in_page    322         walk->src.phys.offset = offset_in_page(walk->in.offset);
322         walk->dst.phys.page = scatterwalk_page    323         walk->dst.phys.page = scatterwalk_page(&walk->out);
323         walk->dst.phys.offset = offset_in_page    324         walk->dst.phys.offset = offset_in_page(walk->out.offset);
324                                                   325 
325         if (walk->flags & SKCIPHER_WALK_PHYS)     326         if (walk->flags & SKCIPHER_WALK_PHYS)
326                 return 0;                         327                 return 0;
327                                                   328 
328         diff = walk->src.phys.offset - walk->d    329         diff = walk->src.phys.offset - walk->dst.phys.offset;
329         diff |= walk->src.virt.page - walk->ds    330         diff |= walk->src.virt.page - walk->dst.virt.page;
330                                                   331 
331         skcipher_map_src(walk);                   332         skcipher_map_src(walk);
332         walk->dst.virt.addr = walk->src.virt.a    333         walk->dst.virt.addr = walk->src.virt.addr;
333                                                   334 
334         if (diff) {                               335         if (diff) {
335                 walk->flags |= SKCIPHER_WALK_D    336                 walk->flags |= SKCIPHER_WALK_DIFF;
336                 skcipher_map_dst(walk);           337                 skcipher_map_dst(walk);
337         }                                         338         }
338                                                   339 
339         return 0;                                 340         return 0;
340 }                                                 341 }
341                                                   342 
342 static int skcipher_walk_next(struct skcipher_    343 static int skcipher_walk_next(struct skcipher_walk *walk)
343 {                                                 344 {
344         unsigned int bsize;                       345         unsigned int bsize;
345         unsigned int n;                           346         unsigned int n;
346         int err;                                  347         int err;
347                                                   348 
348         walk->flags &= ~(SKCIPHER_WALK_SLOW |     349         walk->flags &= ~(SKCIPHER_WALK_SLOW | SKCIPHER_WALK_COPY |
349                          SKCIPHER_WALK_DIFF);     350                          SKCIPHER_WALK_DIFF);
350                                                   351 
351         n = walk->total;                          352         n = walk->total;
352         bsize = min(walk->stride, max(n, walk-    353         bsize = min(walk->stride, max(n, walk->blocksize));
353         n = scatterwalk_clamp(&walk->in, n);      354         n = scatterwalk_clamp(&walk->in, n);
354         n = scatterwalk_clamp(&walk->out, n);     355         n = scatterwalk_clamp(&walk->out, n);
355                                                   356 
356         if (unlikely(n < bsize)) {                357         if (unlikely(n < bsize)) {
357                 if (unlikely(walk->total < wal    358                 if (unlikely(walk->total < walk->blocksize))
358                         return skcipher_walk_d    359                         return skcipher_walk_done(walk, -EINVAL);
359                                                   360 
360 slow_path:                                        361 slow_path:
361                 err = skcipher_next_slow(walk,    362                 err = skcipher_next_slow(walk, bsize);
362                 goto set_phys_lowmem;             363                 goto set_phys_lowmem;
363         }                                         364         }
364                                                   365 
365         if (unlikely((walk->in.offset | walk->    366         if (unlikely((walk->in.offset | walk->out.offset) & walk->alignmask)) {
366                 if (!walk->page) {                367                 if (!walk->page) {
367                         gfp_t gfp = skcipher_w    368                         gfp_t gfp = skcipher_walk_gfp(walk);
368                                                   369 
369                         walk->page = (void *)_    370                         walk->page = (void *)__get_free_page(gfp);
370                         if (!walk->page)          371                         if (!walk->page)
371                                 goto slow_path    372                                 goto slow_path;
372                 }                                 373                 }
373                                                   374 
374                 walk->nbytes = min_t(unsigned,    375                 walk->nbytes = min_t(unsigned, n,
375                                      PAGE_SIZE    376                                      PAGE_SIZE - offset_in_page(walk->page));
376                 walk->flags |= SKCIPHER_WALK_C    377                 walk->flags |= SKCIPHER_WALK_COPY;
377                 err = skcipher_next_copy(walk)    378                 err = skcipher_next_copy(walk);
378                 goto set_phys_lowmem;             379                 goto set_phys_lowmem;
379         }                                         380         }
380                                                   381 
381         walk->nbytes = n;                         382         walk->nbytes = n;
382                                                   383 
383         return skcipher_next_fast(walk);          384         return skcipher_next_fast(walk);
384                                                   385 
385 set_phys_lowmem:                                  386 set_phys_lowmem:
386         if (!err && (walk->flags & SKCIPHER_WA    387         if (!err && (walk->flags & SKCIPHER_WALK_PHYS)) {
387                 walk->src.phys.page = virt_to_    388                 walk->src.phys.page = virt_to_page(walk->src.virt.addr);
388                 walk->dst.phys.page = virt_to_    389                 walk->dst.phys.page = virt_to_page(walk->dst.virt.addr);
389                 walk->src.phys.offset &= PAGE_    390                 walk->src.phys.offset &= PAGE_SIZE - 1;
390                 walk->dst.phys.offset &= PAGE_    391                 walk->dst.phys.offset &= PAGE_SIZE - 1;
391         }                                         392         }
392         return err;                               393         return err;
393 }                                                 394 }
394                                                   395 
395 static int skcipher_copy_iv(struct skcipher_wa    396 static int skcipher_copy_iv(struct skcipher_walk *walk)
396 {                                                 397 {
397         unsigned a = crypto_tfm_ctx_alignment(    398         unsigned a = crypto_tfm_ctx_alignment() - 1;
398         unsigned alignmask = walk->alignmask;     399         unsigned alignmask = walk->alignmask;
399         unsigned ivsize = walk->ivsize;           400         unsigned ivsize = walk->ivsize;
400         unsigned bs = walk->stride;               401         unsigned bs = walk->stride;
401         unsigned aligned_bs;                      402         unsigned aligned_bs;
402         unsigned size;                            403         unsigned size;
403         u8 *iv;                                   404         u8 *iv;
404                                                   405 
405         aligned_bs = ALIGN(bs, alignmask + 1);    406         aligned_bs = ALIGN(bs, alignmask + 1);
406                                                   407 
407         /* Minimum size to align buffer by ali    408         /* Minimum size to align buffer by alignmask. */
408         size = alignmask & ~a;                    409         size = alignmask & ~a;
409                                                   410 
410         if (walk->flags & SKCIPHER_WALK_PHYS)     411         if (walk->flags & SKCIPHER_WALK_PHYS)
411                 size += ivsize;                   412                 size += ivsize;
412         else {                                    413         else {
413                 size += aligned_bs + ivsize;      414                 size += aligned_bs + ivsize;
414                                                   415 
415                 /* Minimum size to ensure buff    416                 /* Minimum size to ensure buffer does not straddle a page. */
416                 size += (bs - 1) & ~(alignmask    417                 size += (bs - 1) & ~(alignmask | a);
417         }                                         418         }
418                                                   419 
419         walk->buffer = kmalloc(size, skcipher_    420         walk->buffer = kmalloc(size, skcipher_walk_gfp(walk));
420         if (!walk->buffer)                        421         if (!walk->buffer)
421                 return -ENOMEM;                   422                 return -ENOMEM;
422                                                   423 
423         iv = PTR_ALIGN(walk->buffer, alignmask    424         iv = PTR_ALIGN(walk->buffer, alignmask + 1);
424         iv = skcipher_get_spot(iv, bs) + align    425         iv = skcipher_get_spot(iv, bs) + aligned_bs;
425                                                   426 
426         walk->iv = memcpy(iv, walk->iv, walk->    427         walk->iv = memcpy(iv, walk->iv, walk->ivsize);
427         return 0;                                 428         return 0;
428 }                                                 429 }
429                                                   430 
430 static int skcipher_walk_first(struct skcipher    431 static int skcipher_walk_first(struct skcipher_walk *walk)
431 {                                                 432 {
432         if (WARN_ON_ONCE(in_hardirq()))        !! 433         if (WARN_ON_ONCE(in_irq()))
433                 return -EDEADLK;                  434                 return -EDEADLK;
434                                                   435 
435         walk->buffer = NULL;                      436         walk->buffer = NULL;
436         if (unlikely(((unsigned long)walk->iv     437         if (unlikely(((unsigned long)walk->iv & walk->alignmask))) {
437                 int err = skcipher_copy_iv(wal    438                 int err = skcipher_copy_iv(walk);
438                 if (err)                          439                 if (err)
439                         return err;               440                         return err;
440         }                                         441         }
441                                                   442 
442         walk->page = NULL;                        443         walk->page = NULL;
443                                                   444 
444         return skcipher_walk_next(walk);          445         return skcipher_walk_next(walk);
445 }                                                 446 }
446                                                   447 
447 static int skcipher_walk_skcipher(struct skcip    448 static int skcipher_walk_skcipher(struct skcipher_walk *walk,
448                                   struct skcip    449                                   struct skcipher_request *req)
449 {                                                 450 {
450         struct crypto_skcipher *tfm = crypto_s    451         struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
451         struct skcipher_alg *alg = crypto_skci << 
452                                                   452 
453         walk->total = req->cryptlen;              453         walk->total = req->cryptlen;
454         walk->nbytes = 0;                         454         walk->nbytes = 0;
455         walk->iv = req->iv;                       455         walk->iv = req->iv;
456         walk->oiv = req->iv;                      456         walk->oiv = req->iv;
457                                                   457 
458         if (unlikely(!walk->total))               458         if (unlikely(!walk->total))
459                 return 0;                         459                 return 0;
460                                                   460 
461         scatterwalk_start(&walk->in, req->src)    461         scatterwalk_start(&walk->in, req->src);
462         scatterwalk_start(&walk->out, req->dst    462         scatterwalk_start(&walk->out, req->dst);
463                                                   463 
464         walk->flags &= ~SKCIPHER_WALK_SLEEP;      464         walk->flags &= ~SKCIPHER_WALK_SLEEP;
465         walk->flags |= req->base.flags & CRYPT    465         walk->flags |= req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP ?
466                        SKCIPHER_WALK_SLEEP : 0    466                        SKCIPHER_WALK_SLEEP : 0;
467                                                   467 
468         walk->blocksize = crypto_skcipher_bloc    468         walk->blocksize = crypto_skcipher_blocksize(tfm);
                                                   >> 469         walk->stride = crypto_skcipher_walksize(tfm);
469         walk->ivsize = crypto_skcipher_ivsize(    470         walk->ivsize = crypto_skcipher_ivsize(tfm);
470         walk->alignmask = crypto_skcipher_alig    471         walk->alignmask = crypto_skcipher_alignmask(tfm);
471                                                   472 
472         if (alg->co.base.cra_type != &crypto_s << 
473                 walk->stride = alg->co.chunksi << 
474         else                                   << 
475                 walk->stride = alg->walksize;  << 
476                                                << 
477         return skcipher_walk_first(walk);         473         return skcipher_walk_first(walk);
478 }                                                 474 }
479                                                   475 
480 int skcipher_walk_virt(struct skcipher_walk *w    476 int skcipher_walk_virt(struct skcipher_walk *walk,
481                        struct skcipher_request    477                        struct skcipher_request *req, bool atomic)
482 {                                                 478 {
483         int err;                                  479         int err;
484                                                   480 
485         might_sleep_if(req->base.flags & CRYPT    481         might_sleep_if(req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP);
486                                                   482 
487         walk->flags &= ~SKCIPHER_WALK_PHYS;       483         walk->flags &= ~SKCIPHER_WALK_PHYS;
488                                                   484 
489         err = skcipher_walk_skcipher(walk, req    485         err = skcipher_walk_skcipher(walk, req);
490                                                   486 
491         walk->flags &= atomic ? ~SKCIPHER_WALK    487         walk->flags &= atomic ? ~SKCIPHER_WALK_SLEEP : ~0;
492                                                   488 
493         return err;                               489         return err;
494 }                                                 490 }
495 EXPORT_SYMBOL_GPL(skcipher_walk_virt);            491 EXPORT_SYMBOL_GPL(skcipher_walk_virt);
496                                                   492 
                                                   >> 493 void skcipher_walk_atomise(struct skcipher_walk *walk)
                                                   >> 494 {
                                                   >> 495         walk->flags &= ~SKCIPHER_WALK_SLEEP;
                                                   >> 496 }
                                                   >> 497 EXPORT_SYMBOL_GPL(skcipher_walk_atomise);
                                                   >> 498 
497 int skcipher_walk_async(struct skcipher_walk *    499 int skcipher_walk_async(struct skcipher_walk *walk,
498                         struct skcipher_reques    500                         struct skcipher_request *req)
499 {                                                 501 {
500         walk->flags |= SKCIPHER_WALK_PHYS;        502         walk->flags |= SKCIPHER_WALK_PHYS;
501                                                   503 
502         INIT_LIST_HEAD(&walk->buffers);           504         INIT_LIST_HEAD(&walk->buffers);
503                                                   505 
504         return skcipher_walk_skcipher(walk, re    506         return skcipher_walk_skcipher(walk, req);
505 }                                                 507 }
506 EXPORT_SYMBOL_GPL(skcipher_walk_async);           508 EXPORT_SYMBOL_GPL(skcipher_walk_async);
507                                                   509 
508 static int skcipher_walk_aead_common(struct sk    510 static int skcipher_walk_aead_common(struct skcipher_walk *walk,
509                                      struct ae    511                                      struct aead_request *req, bool atomic)
510 {                                                 512 {
511         struct crypto_aead *tfm = crypto_aead_    513         struct crypto_aead *tfm = crypto_aead_reqtfm(req);
512         int err;                                  514         int err;
513                                                   515 
514         walk->nbytes = 0;                         516         walk->nbytes = 0;
515         walk->iv = req->iv;                       517         walk->iv = req->iv;
516         walk->oiv = req->iv;                      518         walk->oiv = req->iv;
517                                                   519 
518         if (unlikely(!walk->total))               520         if (unlikely(!walk->total))
519                 return 0;                         521                 return 0;
520                                                   522 
521         walk->flags &= ~SKCIPHER_WALK_PHYS;       523         walk->flags &= ~SKCIPHER_WALK_PHYS;
522                                                   524 
523         scatterwalk_start(&walk->in, req->src)    525         scatterwalk_start(&walk->in, req->src);
524         scatterwalk_start(&walk->out, req->dst    526         scatterwalk_start(&walk->out, req->dst);
525                                                   527 
526         scatterwalk_copychunks(NULL, &walk->in    528         scatterwalk_copychunks(NULL, &walk->in, req->assoclen, 2);
527         scatterwalk_copychunks(NULL, &walk->ou    529         scatterwalk_copychunks(NULL, &walk->out, req->assoclen, 2);
528                                                   530 
529         scatterwalk_done(&walk->in, 0, walk->t    531         scatterwalk_done(&walk->in, 0, walk->total);
530         scatterwalk_done(&walk->out, 0, walk->    532         scatterwalk_done(&walk->out, 0, walk->total);
531                                                   533 
532         if (req->base.flags & CRYPTO_TFM_REQ_M    534         if (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP)
533                 walk->flags |= SKCIPHER_WALK_S    535                 walk->flags |= SKCIPHER_WALK_SLEEP;
534         else                                      536         else
535                 walk->flags &= ~SKCIPHER_WALK_    537                 walk->flags &= ~SKCIPHER_WALK_SLEEP;
536                                                   538 
537         walk->blocksize = crypto_aead_blocksiz    539         walk->blocksize = crypto_aead_blocksize(tfm);
538         walk->stride = crypto_aead_chunksize(t    540         walk->stride = crypto_aead_chunksize(tfm);
539         walk->ivsize = crypto_aead_ivsize(tfm)    541         walk->ivsize = crypto_aead_ivsize(tfm);
540         walk->alignmask = crypto_aead_alignmas    542         walk->alignmask = crypto_aead_alignmask(tfm);
541                                                   543 
542         err = skcipher_walk_first(walk);          544         err = skcipher_walk_first(walk);
543                                                   545 
544         if (atomic)                               546         if (atomic)
545                 walk->flags &= ~SKCIPHER_WALK_    547                 walk->flags &= ~SKCIPHER_WALK_SLEEP;
546                                                   548 
547         return err;                               549         return err;
548 }                                                 550 }
549                                                   551 
                                                   >> 552 int skcipher_walk_aead(struct skcipher_walk *walk, struct aead_request *req,
                                                   >> 553                        bool atomic)
                                                   >> 554 {
                                                   >> 555         walk->total = req->cryptlen;
                                                   >> 556 
                                                   >> 557         return skcipher_walk_aead_common(walk, req, atomic);
                                                   >> 558 }
                                                   >> 559 EXPORT_SYMBOL_GPL(skcipher_walk_aead);
                                                   >> 560 
550 int skcipher_walk_aead_encrypt(struct skcipher    561 int skcipher_walk_aead_encrypt(struct skcipher_walk *walk,
551                                struct aead_req    562                                struct aead_request *req, bool atomic)
552 {                                                 563 {
553         walk->total = req->cryptlen;              564         walk->total = req->cryptlen;
554                                                   565 
555         return skcipher_walk_aead_common(walk,    566         return skcipher_walk_aead_common(walk, req, atomic);
556 }                                                 567 }
557 EXPORT_SYMBOL_GPL(skcipher_walk_aead_encrypt);    568 EXPORT_SYMBOL_GPL(skcipher_walk_aead_encrypt);
558                                                   569 
559 int skcipher_walk_aead_decrypt(struct skcipher    570 int skcipher_walk_aead_decrypt(struct skcipher_walk *walk,
560                                struct aead_req    571                                struct aead_request *req, bool atomic)
561 {                                                 572 {
562         struct crypto_aead *tfm = crypto_aead_    573         struct crypto_aead *tfm = crypto_aead_reqtfm(req);
563                                                   574 
564         walk->total = req->cryptlen - crypto_a    575         walk->total = req->cryptlen - crypto_aead_authsize(tfm);
565                                                   576 
566         return skcipher_walk_aead_common(walk,    577         return skcipher_walk_aead_common(walk, req, atomic);
567 }                                                 578 }
568 EXPORT_SYMBOL_GPL(skcipher_walk_aead_decrypt);    579 EXPORT_SYMBOL_GPL(skcipher_walk_aead_decrypt);
569                                                   580 
                                                   >> 581 static unsigned int crypto_skcipher_extsize(struct crypto_alg *alg)
                                                   >> 582 {
                                                   >> 583         if (alg->cra_type == &crypto_blkcipher_type)
                                                   >> 584                 return sizeof(struct crypto_blkcipher *);
                                                   >> 585 
                                                   >> 586         if (alg->cra_type == &crypto_ablkcipher_type)
                                                   >> 587                 return sizeof(struct crypto_ablkcipher *);
                                                   >> 588 
                                                   >> 589         return crypto_alg_extsize(alg);
                                                   >> 590 }
                                                   >> 591 
570 static void skcipher_set_needkey(struct crypto    592 static void skcipher_set_needkey(struct crypto_skcipher *tfm)
571 {                                                 593 {
572         if (crypto_skcipher_max_keysize(tfm) ! !! 594         if (tfm->keysize)
573                 crypto_skcipher_set_flags(tfm,    595                 crypto_skcipher_set_flags(tfm, CRYPTO_TFM_NEED_KEY);
574 }                                                 596 }
575                                                   597 
576 static int skcipher_setkey_unaligned(struct cr !! 598 static int skcipher_setkey_blkcipher(struct crypto_skcipher *tfm,
577                                      const u8     599                                      const u8 *key, unsigned int keylen)
578 {                                                 600 {
579         unsigned long alignmask = crypto_skcip !! 601         struct crypto_blkcipher **ctx = crypto_skcipher_ctx(tfm);
580         struct skcipher_alg *cipher = crypto_s !! 602         struct crypto_blkcipher *blkcipher = *ctx;
581         u8 *buffer, *alignbuffer;              !! 603         int err;
582         unsigned long absize;                  << 
583         int ret;                               << 
584                                                   604 
585         absize = keylen + alignmask;           !! 605         crypto_blkcipher_clear_flags(blkcipher, ~0);
586         buffer = kmalloc(absize, GFP_ATOMIC);  !! 606         crypto_blkcipher_set_flags(blkcipher, crypto_skcipher_get_flags(tfm) &
587         if (!buffer)                           !! 607                                               CRYPTO_TFM_REQ_MASK);
588                 return -ENOMEM;                !! 608         err = crypto_blkcipher_setkey(blkcipher, key, keylen);
                                                   >> 609         crypto_skcipher_set_flags(tfm, crypto_blkcipher_get_flags(blkcipher) &
                                                   >> 610                                        CRYPTO_TFM_RES_MASK);
                                                   >> 611         if (unlikely(err)) {
                                                   >> 612                 skcipher_set_needkey(tfm);
                                                   >> 613                 return err;
                                                   >> 614         }
589                                                   615 
590         alignbuffer = (u8 *)ALIGN((unsigned lo !! 616         crypto_skcipher_clear_flags(tfm, CRYPTO_TFM_NEED_KEY);
591         memcpy(alignbuffer, key, keylen);      !! 617         return 0;
592         ret = cipher->setkey(tfm, alignbuffer, << 
593         kfree_sensitive(buffer);               << 
594         return ret;                            << 
595 }                                                 618 }
596                                                   619 
597 int crypto_skcipher_setkey(struct crypto_skcip !! 620 static int skcipher_crypt_blkcipher(struct skcipher_request *req,
598                            unsigned int keylen !! 621                                     int (*crypt)(struct blkcipher_desc *,
                                                   >> 622                                                  struct scatterlist *,
                                                   >> 623                                                  struct scatterlist *,
                                                   >> 624                                                  unsigned int))
599 {                                                 625 {
600         struct skcipher_alg *cipher = crypto_s !! 626         struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
601         unsigned long alignmask = crypto_skcip !! 627         struct crypto_blkcipher **ctx = crypto_skcipher_ctx(tfm);
602         int err;                               !! 628         struct blkcipher_desc desc = {
                                                   >> 629                 .tfm = *ctx,
                                                   >> 630                 .info = req->iv,
                                                   >> 631                 .flags = req->base.flags,
                                                   >> 632         };
603                                                   633 
604         if (cipher->co.base.cra_type != &crypt << 
605                 struct crypto_lskcipher **ctx  << 
606                                                   634 
607                 crypto_lskcipher_clear_flags(* !! 635         return crypt(&desc, req->dst, req->src, req->cryptlen);
608                 crypto_lskcipher_set_flags(*ct !! 636 }
609                                            cry !! 637 
610                                            CRY !! 638 static int skcipher_encrypt_blkcipher(struct skcipher_request *req)
611                 err = crypto_lskcipher_setkey( !! 639 {
612                 goto out;                      !! 640         struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
                                                   >> 641         struct crypto_tfm *tfm = crypto_skcipher_tfm(skcipher);
                                                   >> 642         struct blkcipher_alg *alg = &tfm->__crt_alg->cra_blkcipher;
                                                   >> 643 
                                                   >> 644         return skcipher_crypt_blkcipher(req, alg->encrypt);
                                                   >> 645 }
                                                   >> 646 
                                                   >> 647 static int skcipher_decrypt_blkcipher(struct skcipher_request *req)
                                                   >> 648 {
                                                   >> 649         struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
                                                   >> 650         struct crypto_tfm *tfm = crypto_skcipher_tfm(skcipher);
                                                   >> 651         struct blkcipher_alg *alg = &tfm->__crt_alg->cra_blkcipher;
                                                   >> 652 
                                                   >> 653         return skcipher_crypt_blkcipher(req, alg->decrypt);
                                                   >> 654 }
                                                   >> 655 
                                                   >> 656 static void crypto_exit_skcipher_ops_blkcipher(struct crypto_tfm *tfm)
                                                   >> 657 {
                                                   >> 658         struct crypto_blkcipher **ctx = crypto_tfm_ctx(tfm);
                                                   >> 659 
                                                   >> 660         crypto_free_blkcipher(*ctx);
                                                   >> 661 }
                                                   >> 662 
                                                   >> 663 static int crypto_init_skcipher_ops_blkcipher(struct crypto_tfm *tfm)
                                                   >> 664 {
                                                   >> 665         struct crypto_alg *calg = tfm->__crt_alg;
                                                   >> 666         struct crypto_skcipher *skcipher = __crypto_skcipher_cast(tfm);
                                                   >> 667         struct crypto_blkcipher **ctx = crypto_tfm_ctx(tfm);
                                                   >> 668         struct crypto_blkcipher *blkcipher;
                                                   >> 669         struct crypto_tfm *btfm;
                                                   >> 670 
                                                   >> 671         if (!crypto_mod_get(calg))
                                                   >> 672                 return -EAGAIN;
                                                   >> 673 
                                                   >> 674         btfm = __crypto_alloc_tfm(calg, CRYPTO_ALG_TYPE_BLKCIPHER,
                                                   >> 675                                         CRYPTO_ALG_TYPE_MASK);
                                                   >> 676         if (IS_ERR(btfm)) {
                                                   >> 677                 crypto_mod_put(calg);
                                                   >> 678                 return PTR_ERR(btfm);
613         }                                         679         }
614                                                   680 
615         if (keylen < cipher->min_keysize || ke !! 681         blkcipher = __crypto_blkcipher_cast(btfm);
616                 return -EINVAL;                !! 682         *ctx = blkcipher;
                                                   >> 683         tfm->exit = crypto_exit_skcipher_ops_blkcipher;
                                                   >> 684 
                                                   >> 685         skcipher->setkey = skcipher_setkey_blkcipher;
                                                   >> 686         skcipher->encrypt = skcipher_encrypt_blkcipher;
                                                   >> 687         skcipher->decrypt = skcipher_decrypt_blkcipher;
617                                                   688 
618         if ((unsigned long)key & alignmask)    !! 689         skcipher->ivsize = crypto_blkcipher_ivsize(blkcipher);
619                 err = skcipher_setkey_unaligne !! 690         skcipher->keysize = calg->cra_blkcipher.max_keysize;
620         else                                   << 
621                 err = cipher->setkey(tfm, key, << 
622                                                   691 
623 out:                                           !! 692         skcipher_set_needkey(skcipher);
                                                   >> 693 
                                                   >> 694         return 0;
                                                   >> 695 }
                                                   >> 696 
                                                   >> 697 static int skcipher_setkey_ablkcipher(struct crypto_skcipher *tfm,
                                                   >> 698                                       const u8 *key, unsigned int keylen)
                                                   >> 699 {
                                                   >> 700         struct crypto_ablkcipher **ctx = crypto_skcipher_ctx(tfm);
                                                   >> 701         struct crypto_ablkcipher *ablkcipher = *ctx;
                                                   >> 702         int err;
                                                   >> 703 
                                                   >> 704         crypto_ablkcipher_clear_flags(ablkcipher, ~0);
                                                   >> 705         crypto_ablkcipher_set_flags(ablkcipher,
                                                   >> 706                                     crypto_skcipher_get_flags(tfm) &
                                                   >> 707                                     CRYPTO_TFM_REQ_MASK);
                                                   >> 708         err = crypto_ablkcipher_setkey(ablkcipher, key, keylen);
                                                   >> 709         crypto_skcipher_set_flags(tfm,
                                                   >> 710                                   crypto_ablkcipher_get_flags(ablkcipher) &
                                                   >> 711                                   CRYPTO_TFM_RES_MASK);
624         if (unlikely(err)) {                      712         if (unlikely(err)) {
625                 skcipher_set_needkey(tfm);        713                 skcipher_set_needkey(tfm);
626                 return err;                       714                 return err;
627         }                                         715         }
628                                                   716 
629         crypto_skcipher_clear_flags(tfm, CRYPT    717         crypto_skcipher_clear_flags(tfm, CRYPTO_TFM_NEED_KEY);
630         return 0;                                 718         return 0;
631 }                                                 719 }
632 EXPORT_SYMBOL_GPL(crypto_skcipher_setkey);     << 
633                                                   720 
634 int crypto_skcipher_encrypt(struct skcipher_re !! 721 static int skcipher_crypt_ablkcipher(struct skcipher_request *req,
                                                   >> 722                                      int (*crypt)(struct ablkcipher_request *))
635 {                                                 723 {
636         struct crypto_skcipher *tfm = crypto_s    724         struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
637         struct skcipher_alg *alg = crypto_skci !! 725         struct crypto_ablkcipher **ctx = crypto_skcipher_ctx(tfm);
                                                   >> 726         struct ablkcipher_request *subreq = skcipher_request_ctx(req);
638                                                   727 
639         if (crypto_skcipher_get_flags(tfm) & C !! 728         ablkcipher_request_set_tfm(subreq, *ctx);
640                 return -ENOKEY;                !! 729         ablkcipher_request_set_callback(subreq, skcipher_request_flags(req),
641         if (alg->co.base.cra_type != &crypto_s !! 730                                         req->base.complete, req->base.data);
642                 return crypto_lskcipher_encryp !! 731         ablkcipher_request_set_crypt(subreq, req->src, req->dst, req->cryptlen,
643         return alg->encrypt(req);              !! 732                                      req->iv);
                                                   >> 733 
                                                   >> 734         return crypt(subreq);
644 }                                                 735 }
645 EXPORT_SYMBOL_GPL(crypto_skcipher_encrypt);    << 
646                                                   736 
647 int crypto_skcipher_decrypt(struct skcipher_re !! 737 static int skcipher_encrypt_ablkcipher(struct skcipher_request *req)
648 {                                                 738 {
649         struct crypto_skcipher *tfm = crypto_s !! 739         struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
650         struct skcipher_alg *alg = crypto_skci !! 740         struct crypto_tfm *tfm = crypto_skcipher_tfm(skcipher);
                                                   >> 741         struct ablkcipher_alg *alg = &tfm->__crt_alg->cra_ablkcipher;
651                                                   742 
652         if (crypto_skcipher_get_flags(tfm) & C !! 743         return skcipher_crypt_ablkcipher(req, alg->encrypt);
653                 return -ENOKEY;                << 
654         if (alg->co.base.cra_type != &crypto_s << 
655                 return crypto_lskcipher_decryp << 
656         return alg->decrypt(req);              << 
657 }                                                 744 }
658 EXPORT_SYMBOL_GPL(crypto_skcipher_decrypt);    << 
659                                                   745 
660 static int crypto_lskcipher_export(struct skci !! 746 static int skcipher_decrypt_ablkcipher(struct skcipher_request *req)
661 {                                                 747 {
662         struct crypto_skcipher *tfm = crypto_s !! 748         struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
663         u8 *ivs = skcipher_request_ctx(req);   !! 749         struct crypto_tfm *tfm = crypto_skcipher_tfm(skcipher);
                                                   >> 750         struct ablkcipher_alg *alg = &tfm->__crt_alg->cra_ablkcipher;
664                                                   751 
665         ivs = PTR_ALIGN(ivs, crypto_skcipher_a !! 752         return skcipher_crypt_ablkcipher(req, alg->decrypt);
                                                   >> 753 }
666                                                   754 
667         memcpy(out, ivs + crypto_skcipher_ivsi !! 755 static void crypto_exit_skcipher_ops_ablkcipher(struct crypto_tfm *tfm)
668                crypto_skcipher_statesize(tfm)) !! 756 {
                                                   >> 757         struct crypto_ablkcipher **ctx = crypto_tfm_ctx(tfm);
669                                                   758 
670         return 0;                              !! 759         crypto_free_ablkcipher(*ctx);
671 }                                                 760 }
672                                                   761 
673 static int crypto_lskcipher_import(struct skci !! 762 static int crypto_init_skcipher_ops_ablkcipher(struct crypto_tfm *tfm)
674 {                                                 763 {
675         struct crypto_skcipher *tfm = crypto_s !! 764         struct crypto_alg *calg = tfm->__crt_alg;
676         u8 *ivs = skcipher_request_ctx(req);   !! 765         struct crypto_skcipher *skcipher = __crypto_skcipher_cast(tfm);
                                                   >> 766         struct crypto_ablkcipher **ctx = crypto_tfm_ctx(tfm);
                                                   >> 767         struct crypto_ablkcipher *ablkcipher;
                                                   >> 768         struct crypto_tfm *abtfm;
                                                   >> 769 
                                                   >> 770         if (!crypto_mod_get(calg))
                                                   >> 771                 return -EAGAIN;
                                                   >> 772 
                                                   >> 773         abtfm = __crypto_alloc_tfm(calg, 0, 0);
                                                   >> 774         if (IS_ERR(abtfm)) {
                                                   >> 775                 crypto_mod_put(calg);
                                                   >> 776                 return PTR_ERR(abtfm);
                                                   >> 777         }
677                                                   778 
678         ivs = PTR_ALIGN(ivs, crypto_skcipher_a !! 779         ablkcipher = __crypto_ablkcipher_cast(abtfm);
                                                   >> 780         *ctx = ablkcipher;
                                                   >> 781         tfm->exit = crypto_exit_skcipher_ops_ablkcipher;
                                                   >> 782 
                                                   >> 783         skcipher->setkey = skcipher_setkey_ablkcipher;
                                                   >> 784         skcipher->encrypt = skcipher_encrypt_ablkcipher;
                                                   >> 785         skcipher->decrypt = skcipher_decrypt_ablkcipher;
                                                   >> 786 
                                                   >> 787         skcipher->ivsize = crypto_ablkcipher_ivsize(ablkcipher);
                                                   >> 788         skcipher->reqsize = crypto_ablkcipher_reqsize(ablkcipher) +
                                                   >> 789                             sizeof(struct ablkcipher_request);
                                                   >> 790         skcipher->keysize = calg->cra_ablkcipher.max_keysize;
679                                                   791 
680         memcpy(ivs + crypto_skcipher_ivsize(tf !! 792         skcipher_set_needkey(skcipher);
681                crypto_skcipher_statesize(tfm)) << 
682                                                   793 
683         return 0;                                 794         return 0;
684 }                                                 795 }
685                                                   796 
686 static int skcipher_noexport(struct skcipher_r !! 797 static int skcipher_setkey_unaligned(struct crypto_skcipher *tfm,
                                                   >> 798                                      const u8 *key, unsigned int keylen)
687 {                                                 799 {
688         return 0;                              !! 800         unsigned long alignmask = crypto_skcipher_alignmask(tfm);
                                                   >> 801         struct skcipher_alg *cipher = crypto_skcipher_alg(tfm);
                                                   >> 802         u8 *buffer, *alignbuffer;
                                                   >> 803         unsigned long absize;
                                                   >> 804         int ret;
                                                   >> 805 
                                                   >> 806         absize = keylen + alignmask;
                                                   >> 807         buffer = kmalloc(absize, GFP_ATOMIC);
                                                   >> 808         if (!buffer)
                                                   >> 809                 return -ENOMEM;
                                                   >> 810 
                                                   >> 811         alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1);
                                                   >> 812         memcpy(alignbuffer, key, keylen);
                                                   >> 813         ret = cipher->setkey(tfm, alignbuffer, keylen);
                                                   >> 814         kzfree(buffer);
                                                   >> 815         return ret;
689 }                                                 816 }
690                                                   817 
691 static int skcipher_noimport(struct skcipher_r !! 818 static int skcipher_setkey(struct crypto_skcipher *tfm, const u8 *key,
                                                   >> 819                            unsigned int keylen)
692 {                                                 820 {
                                                   >> 821         struct skcipher_alg *cipher = crypto_skcipher_alg(tfm);
                                                   >> 822         unsigned long alignmask = crypto_skcipher_alignmask(tfm);
                                                   >> 823         int err;
                                                   >> 824 
                                                   >> 825         if (keylen < cipher->min_keysize || keylen > cipher->max_keysize) {
                                                   >> 826                 crypto_skcipher_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
                                                   >> 827                 return -EINVAL;
                                                   >> 828         }
                                                   >> 829 
                                                   >> 830         if ((unsigned long)key & alignmask)
                                                   >> 831                 err = skcipher_setkey_unaligned(tfm, key, keylen);
                                                   >> 832         else
                                                   >> 833                 err = cipher->setkey(tfm, key, keylen);
                                                   >> 834 
                                                   >> 835         if (unlikely(err)) {
                                                   >> 836                 skcipher_set_needkey(tfm);
                                                   >> 837                 return err;
                                                   >> 838         }
                                                   >> 839 
                                                   >> 840         crypto_skcipher_clear_flags(tfm, CRYPTO_TFM_NEED_KEY);
693         return 0;                                 841         return 0;
694 }                                                 842 }
695                                                   843 
696 int crypto_skcipher_export(struct skcipher_req !! 844 int crypto_skcipher_encrypt(struct skcipher_request *req)
697 {                                                 845 {
698         struct crypto_skcipher *tfm = crypto_s    846         struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
699         struct skcipher_alg *alg = crypto_skci !! 847         struct crypto_alg *alg = tfm->base.__crt_alg;
                                                   >> 848         unsigned int cryptlen = req->cryptlen;
                                                   >> 849         int ret;
700                                                   850 
701         if (alg->co.base.cra_type != &crypto_s !! 851         crypto_stats_get(alg);
702                 return crypto_lskcipher_export !! 852         if (crypto_skcipher_get_flags(tfm) & CRYPTO_TFM_NEED_KEY)
703         return alg->export(req, out);          !! 853                 ret = -ENOKEY;
                                                   >> 854         else
                                                   >> 855                 ret = tfm->encrypt(req);
                                                   >> 856         crypto_stats_skcipher_encrypt(cryptlen, ret, alg);
                                                   >> 857         return ret;
704 }                                                 858 }
705 EXPORT_SYMBOL_GPL(crypto_skcipher_export);     !! 859 EXPORT_SYMBOL_GPL(crypto_skcipher_encrypt);
706                                                   860 
707 int crypto_skcipher_import(struct skcipher_req !! 861 int crypto_skcipher_decrypt(struct skcipher_request *req)
708 {                                                 862 {
709         struct crypto_skcipher *tfm = crypto_s    863         struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
710         struct skcipher_alg *alg = crypto_skci !! 864         struct crypto_alg *alg = tfm->base.__crt_alg;
                                                   >> 865         unsigned int cryptlen = req->cryptlen;
                                                   >> 866         int ret;
711                                                   867 
712         if (alg->co.base.cra_type != &crypto_s !! 868         crypto_stats_get(alg);
713                 return crypto_lskcipher_import !! 869         if (crypto_skcipher_get_flags(tfm) & CRYPTO_TFM_NEED_KEY)
714         return alg->import(req, in);           !! 870                 ret = -ENOKEY;
                                                   >> 871         else
                                                   >> 872                 ret = tfm->decrypt(req);
                                                   >> 873         crypto_stats_skcipher_decrypt(cryptlen, ret, alg);
                                                   >> 874         return ret;
715 }                                                 875 }
716 EXPORT_SYMBOL_GPL(crypto_skcipher_import);     !! 876 EXPORT_SYMBOL_GPL(crypto_skcipher_decrypt);
717                                                   877 
718 static void crypto_skcipher_exit_tfm(struct cr    878 static void crypto_skcipher_exit_tfm(struct crypto_tfm *tfm)
719 {                                                 879 {
720         struct crypto_skcipher *skcipher = __c    880         struct crypto_skcipher *skcipher = __crypto_skcipher_cast(tfm);
721         struct skcipher_alg *alg = crypto_skci    881         struct skcipher_alg *alg = crypto_skcipher_alg(skcipher);
722                                                   882 
723         alg->exit(skcipher);                      883         alg->exit(skcipher);
724 }                                                 884 }
725                                                   885 
726 static int crypto_skcipher_init_tfm(struct cry    886 static int crypto_skcipher_init_tfm(struct crypto_tfm *tfm)
727 {                                                 887 {
728         struct crypto_skcipher *skcipher = __c    888         struct crypto_skcipher *skcipher = __crypto_skcipher_cast(tfm);
729         struct skcipher_alg *alg = crypto_skci    889         struct skcipher_alg *alg = crypto_skcipher_alg(skcipher);
730                                                   890 
731         skcipher_set_needkey(skcipher);        !! 891         if (tfm->__crt_alg->cra_type == &crypto_blkcipher_type)
                                                   >> 892                 return crypto_init_skcipher_ops_blkcipher(tfm);
732                                                   893 
733         if (tfm->__crt_alg->cra_type != &crypt !! 894         if (tfm->__crt_alg->cra_type == &crypto_ablkcipher_type)
734                 unsigned am = crypto_skcipher_ !! 895                 return crypto_init_skcipher_ops_ablkcipher(tfm);
735                 unsigned reqsize;              << 
736                                                << 
737                 reqsize = am & ~(crypto_tfm_ct << 
738                 reqsize += crypto_skcipher_ivs << 
739                 reqsize += crypto_skcipher_sta << 
740                 crypto_skcipher_set_reqsize(sk << 
741                                                   896 
742                 return crypto_init_lskcipher_o !! 897         skcipher->setkey = skcipher_setkey;
743         }                                      !! 898         skcipher->encrypt = alg->encrypt;
                                                   >> 899         skcipher->decrypt = alg->decrypt;
                                                   >> 900         skcipher->ivsize = alg->ivsize;
                                                   >> 901         skcipher->keysize = alg->max_keysize;
                                                   >> 902 
                                                   >> 903         skcipher_set_needkey(skcipher);
744                                                   904 
745         if (alg->exit)                            905         if (alg->exit)
746                 skcipher->base.exit = crypto_s    906                 skcipher->base.exit = crypto_skcipher_exit_tfm;
747                                                   907 
748         if (alg->init)                            908         if (alg->init)
749                 return alg->init(skcipher);       909                 return alg->init(skcipher);
750                                                   910 
751         return 0;                                 911         return 0;
752 }                                                 912 }
753                                                   913 
754 static unsigned int crypto_skcipher_extsize(st << 
755 {                                              << 
756         if (alg->cra_type != &crypto_skcipher_ << 
757                 return sizeof(struct crypto_ls << 
758                                                << 
759         return crypto_alg_extsize(alg);        << 
760 }                                              << 
761                                                << 
762 static void crypto_skcipher_free_instance(stru    914 static void crypto_skcipher_free_instance(struct crypto_instance *inst)
763 {                                                 915 {
764         struct skcipher_instance *skcipher =      916         struct skcipher_instance *skcipher =
765                 container_of(inst, struct skci    917                 container_of(inst, struct skcipher_instance, s.base);
766                                                   918 
767         skcipher->free(skcipher);                 919         skcipher->free(skcipher);
768 }                                                 920 }
769                                                   921 
770 static void crypto_skcipher_show(struct seq_fi    922 static void crypto_skcipher_show(struct seq_file *m, struct crypto_alg *alg)
771         __maybe_unused;                           923         __maybe_unused;
772 static void crypto_skcipher_show(struct seq_fi    924 static void crypto_skcipher_show(struct seq_file *m, struct crypto_alg *alg)
773 {                                                 925 {
774         struct skcipher_alg *skcipher = __cryp !! 926         struct skcipher_alg *skcipher = container_of(alg, struct skcipher_alg,
                                                   >> 927                                                      base);
775                                                   928 
776         seq_printf(m, "type         : skcipher    929         seq_printf(m, "type         : skcipher\n");
777         seq_printf(m, "async        : %s\n",      930         seq_printf(m, "async        : %s\n",
778                    alg->cra_flags & CRYPTO_ALG    931                    alg->cra_flags & CRYPTO_ALG_ASYNC ?  "yes" : "no");
779         seq_printf(m, "blocksize    : %u\n", a    932         seq_printf(m, "blocksize    : %u\n", alg->cra_blocksize);
780         seq_printf(m, "min keysize  : %u\n", s    933         seq_printf(m, "min keysize  : %u\n", skcipher->min_keysize);
781         seq_printf(m, "max keysize  : %u\n", s    934         seq_printf(m, "max keysize  : %u\n", skcipher->max_keysize);
782         seq_printf(m, "ivsize       : %u\n", s    935         seq_printf(m, "ivsize       : %u\n", skcipher->ivsize);
783         seq_printf(m, "chunksize    : %u\n", s    936         seq_printf(m, "chunksize    : %u\n", skcipher->chunksize);
784         seq_printf(m, "walksize     : %u\n", s    937         seq_printf(m, "walksize     : %u\n", skcipher->walksize);
785         seq_printf(m, "statesize    : %u\n", s << 
786 }                                                 938 }
787                                                   939 
788 static int __maybe_unused crypto_skcipher_repo !! 940 #ifdef CONFIG_NET
789         struct sk_buff *skb, struct crypto_alg !! 941 static int crypto_skcipher_report(struct sk_buff *skb, struct crypto_alg *alg)
790 {                                                 942 {
791         struct skcipher_alg *skcipher = __cryp << 
792         struct crypto_report_blkcipher rblkcip    943         struct crypto_report_blkcipher rblkcipher;
                                                   >> 944         struct skcipher_alg *skcipher = container_of(alg, struct skcipher_alg,
                                                   >> 945                                                      base);
793                                                   946 
794         memset(&rblkcipher, 0, sizeof(rblkciph    947         memset(&rblkcipher, 0, sizeof(rblkcipher));
795                                                   948 
796         strscpy(rblkcipher.type, "skcipher", s    949         strscpy(rblkcipher.type, "skcipher", sizeof(rblkcipher.type));
797         strscpy(rblkcipher.geniv, "<none>", si    950         strscpy(rblkcipher.geniv, "<none>", sizeof(rblkcipher.geniv));
798                                                   951 
799         rblkcipher.blocksize = alg->cra_blocks    952         rblkcipher.blocksize = alg->cra_blocksize;
800         rblkcipher.min_keysize = skcipher->min    953         rblkcipher.min_keysize = skcipher->min_keysize;
801         rblkcipher.max_keysize = skcipher->max    954         rblkcipher.max_keysize = skcipher->max_keysize;
802         rblkcipher.ivsize = skcipher->ivsize;     955         rblkcipher.ivsize = skcipher->ivsize;
803                                                   956 
804         return nla_put(skb, CRYPTOCFGA_REPORT_    957         return nla_put(skb, CRYPTOCFGA_REPORT_BLKCIPHER,
805                        sizeof(rblkcipher), &rb    958                        sizeof(rblkcipher), &rblkcipher);
806 }                                                 959 }
                                                   >> 960 #else
                                                   >> 961 static int crypto_skcipher_report(struct sk_buff *skb, struct crypto_alg *alg)
                                                   >> 962 {
                                                   >> 963         return -ENOSYS;
                                                   >> 964 }
                                                   >> 965 #endif
807                                                   966 
808 static const struct crypto_type crypto_skciphe !! 967 static const struct crypto_type crypto_skcipher_type2 = {
809         .extsize = crypto_skcipher_extsize,       968         .extsize = crypto_skcipher_extsize,
810         .init_tfm = crypto_skcipher_init_tfm,     969         .init_tfm = crypto_skcipher_init_tfm,
811         .free = crypto_skcipher_free_instance,    970         .free = crypto_skcipher_free_instance,
812 #ifdef CONFIG_PROC_FS                             971 #ifdef CONFIG_PROC_FS
813         .show = crypto_skcipher_show,             972         .show = crypto_skcipher_show,
814 #endif                                            973 #endif
815 #if IS_ENABLED(CONFIG_CRYPTO_USER)             << 
816         .report = crypto_skcipher_report,         974         .report = crypto_skcipher_report,
817 #endif                                         << 
818         .maskclear = ~CRYPTO_ALG_TYPE_MASK,       975         .maskclear = ~CRYPTO_ALG_TYPE_MASK,
819         .maskset = CRYPTO_ALG_TYPE_SKCIPHER_MA !! 976         .maskset = CRYPTO_ALG_TYPE_BLKCIPHER_MASK,
820         .type = CRYPTO_ALG_TYPE_SKCIPHER,         977         .type = CRYPTO_ALG_TYPE_SKCIPHER,
821         .tfmsize = offsetof(struct crypto_skci    978         .tfmsize = offsetof(struct crypto_skcipher, base),
822 };                                                979 };
823                                                   980 
824 int crypto_grab_skcipher(struct crypto_skciphe    981 int crypto_grab_skcipher(struct crypto_skcipher_spawn *spawn,
825                          struct crypto_instanc !! 982                           const char *name, u32 type, u32 mask)
826                          const char *name, u32 << 
827 {                                                 983 {
828         spawn->base.frontend = &crypto_skciphe !! 984         spawn->base.frontend = &crypto_skcipher_type2;
829         return crypto_grab_spawn(&spawn->base, !! 985         return crypto_grab_spawn(&spawn->base, name, type, mask);
830 }                                                 986 }
831 EXPORT_SYMBOL_GPL(crypto_grab_skcipher);          987 EXPORT_SYMBOL_GPL(crypto_grab_skcipher);
832                                                   988 
833 struct crypto_skcipher *crypto_alloc_skcipher(    989 struct crypto_skcipher *crypto_alloc_skcipher(const char *alg_name,
834                                                   990                                               u32 type, u32 mask)
835 {                                                 991 {
836         return crypto_alloc_tfm(alg_name, &cry !! 992         return crypto_alloc_tfm(alg_name, &crypto_skcipher_type2, type, mask);
837 }                                                 993 }
838 EXPORT_SYMBOL_GPL(crypto_alloc_skcipher);         994 EXPORT_SYMBOL_GPL(crypto_alloc_skcipher);
839                                                   995 
840 struct crypto_sync_skcipher *crypto_alloc_sync    996 struct crypto_sync_skcipher *crypto_alloc_sync_skcipher(
841                                 const char *al    997                                 const char *alg_name, u32 type, u32 mask)
842 {                                                 998 {
843         struct crypto_skcipher *tfm;              999         struct crypto_skcipher *tfm;
844                                                   1000 
845         /* Only sync algorithms allowed. */       1001         /* Only sync algorithms allowed. */
846         mask |= CRYPTO_ALG_ASYNC | CRYPTO_ALG_ !! 1002         mask |= CRYPTO_ALG_ASYNC;
847                                                   1003 
848         tfm = crypto_alloc_tfm(alg_name, &cryp !! 1004         tfm = crypto_alloc_tfm(alg_name, &crypto_skcipher_type2, type, mask);
849                                                   1005 
850         /*                                        1006         /*
851          * Make sure we do not allocate someth    1007          * Make sure we do not allocate something that might get used with
852          * an on-stack request: check the requ    1008          * an on-stack request: check the request size.
853          */                                       1009          */
854         if (!IS_ERR(tfm) && WARN_ON(crypto_skc    1010         if (!IS_ERR(tfm) && WARN_ON(crypto_skcipher_reqsize(tfm) >
855                                     MAX_SYNC_S    1011                                     MAX_SYNC_SKCIPHER_REQSIZE)) {
856                 crypto_free_skcipher(tfm);        1012                 crypto_free_skcipher(tfm);
857                 return ERR_PTR(-EINVAL);          1013                 return ERR_PTR(-EINVAL);
858         }                                         1014         }
859                                                   1015 
860         return (struct crypto_sync_skcipher *)    1016         return (struct crypto_sync_skcipher *)tfm;
861 }                                                 1017 }
862 EXPORT_SYMBOL_GPL(crypto_alloc_sync_skcipher);    1018 EXPORT_SYMBOL_GPL(crypto_alloc_sync_skcipher);
863                                                   1019 
864 int crypto_has_skcipher(const char *alg_name,  !! 1020 int crypto_has_skcipher2(const char *alg_name, u32 type, u32 mask)
865 {                                                 1021 {
866         return crypto_type_has_alg(alg_name, & !! 1022         return crypto_type_has_alg(alg_name, &crypto_skcipher_type2,
                                                   >> 1023                                    type, mask);
867 }                                                 1024 }
868 EXPORT_SYMBOL_GPL(crypto_has_skcipher);        !! 1025 EXPORT_SYMBOL_GPL(crypto_has_skcipher2);
869                                                   1026 
870 int skcipher_prepare_alg_common(struct skciphe !! 1027 static int skcipher_prepare_alg(struct skcipher_alg *alg)
871 {                                                 1028 {
872         struct crypto_alg *base = &alg->base;     1029         struct crypto_alg *base = &alg->base;
873                                                   1030 
874         if (alg->ivsize > PAGE_SIZE / 8 || alg    1031         if (alg->ivsize > PAGE_SIZE / 8 || alg->chunksize > PAGE_SIZE / 8 ||
875             alg->statesize > PAGE_SIZE / 2 ||  !! 1032             alg->walksize > PAGE_SIZE / 8)
876             (alg->ivsize + alg->statesize) > P << 
877                 return -EINVAL;                   1033                 return -EINVAL;
878                                                   1034 
879         if (!alg->chunksize)                      1035         if (!alg->chunksize)
880                 alg->chunksize = base->cra_blo    1036                 alg->chunksize = base->cra_blocksize;
881                                                << 
882         base->cra_flags &= ~CRYPTO_ALG_TYPE_MA << 
883                                                << 
884         return 0;                              << 
885 }                                              << 
886                                                << 
887 static int skcipher_prepare_alg(struct skciphe << 
888 {                                              << 
889         struct crypto_alg *base = &alg->base;  << 
890         int err;                               << 
891                                                << 
892         err = skcipher_prepare_alg_common(&alg << 
893         if (err)                               << 
894                 return err;                    << 
895                                                << 
896         if (alg->walksize > PAGE_SIZE / 8)     << 
897                 return -EINVAL;                << 
898                                                << 
899         if (!alg->walksize)                       1037         if (!alg->walksize)
900                 alg->walksize = alg->chunksize    1038                 alg->walksize = alg->chunksize;
901                                                   1039 
902         if (!alg->statesize) {                 !! 1040         base->cra_type = &crypto_skcipher_type2;
903                 alg->import = skcipher_noimpor !! 1041         base->cra_flags &= ~CRYPTO_ALG_TYPE_MASK;
904                 alg->export = skcipher_noexpor << 
905         } else if (!(alg->import && alg->expor << 
906                 return -EINVAL;                << 
907                                                << 
908         base->cra_type = &crypto_skcipher_type << 
909         base->cra_flags |= CRYPTO_ALG_TYPE_SKC    1042         base->cra_flags |= CRYPTO_ALG_TYPE_SKCIPHER;
910                                                   1043 
911         return 0;                                 1044         return 0;
912 }                                                 1045 }
913                                                   1046 
914 int crypto_register_skcipher(struct skcipher_a    1047 int crypto_register_skcipher(struct skcipher_alg *alg)
915 {                                                 1048 {
916         struct crypto_alg *base = &alg->base;     1049         struct crypto_alg *base = &alg->base;
917         int err;                                  1050         int err;
918                                                   1051 
919         err = skcipher_prepare_alg(alg);          1052         err = skcipher_prepare_alg(alg);
920         if (err)                                  1053         if (err)
921                 return err;                       1054                 return err;
922                                                   1055 
923         return crypto_register_alg(base);         1056         return crypto_register_alg(base);
924 }                                                 1057 }
925 EXPORT_SYMBOL_GPL(crypto_register_skcipher);      1058 EXPORT_SYMBOL_GPL(crypto_register_skcipher);
926                                                   1059 
927 void crypto_unregister_skcipher(struct skciphe    1060 void crypto_unregister_skcipher(struct skcipher_alg *alg)
928 {                                                 1061 {
929         crypto_unregister_alg(&alg->base);        1062         crypto_unregister_alg(&alg->base);
930 }                                                 1063 }
931 EXPORT_SYMBOL_GPL(crypto_unregister_skcipher);    1064 EXPORT_SYMBOL_GPL(crypto_unregister_skcipher);
932                                                   1065 
933 int crypto_register_skciphers(struct skcipher_    1066 int crypto_register_skciphers(struct skcipher_alg *algs, int count)
934 {                                                 1067 {
935         int i, ret;                               1068         int i, ret;
936                                                   1069 
937         for (i = 0; i < count; i++) {             1070         for (i = 0; i < count; i++) {
938                 ret = crypto_register_skcipher    1071                 ret = crypto_register_skcipher(&algs[i]);
939                 if (ret)                          1072                 if (ret)
940                         goto err;                 1073                         goto err;
941         }                                         1074         }
942                                                   1075 
943         return 0;                                 1076         return 0;
944                                                   1077 
945 err:                                              1078 err:
946         for (--i; i >= 0; --i)                    1079         for (--i; i >= 0; --i)
947                 crypto_unregister_skcipher(&al    1080                 crypto_unregister_skcipher(&algs[i]);
948                                                   1081 
949         return ret;                               1082         return ret;
950 }                                                 1083 }
951 EXPORT_SYMBOL_GPL(crypto_register_skciphers);     1084 EXPORT_SYMBOL_GPL(crypto_register_skciphers);
952                                                   1085 
953 void crypto_unregister_skciphers(struct skciph    1086 void crypto_unregister_skciphers(struct skcipher_alg *algs, int count)
954 {                                                 1087 {
955         int i;                                    1088         int i;
956                                                   1089 
957         for (i = count - 1; i >= 0; --i)          1090         for (i = count - 1; i >= 0; --i)
958                 crypto_unregister_skcipher(&al    1091                 crypto_unregister_skcipher(&algs[i]);
959 }                                                 1092 }
960 EXPORT_SYMBOL_GPL(crypto_unregister_skciphers)    1093 EXPORT_SYMBOL_GPL(crypto_unregister_skciphers);
961                                                   1094 
962 int skcipher_register_instance(struct crypto_t    1095 int skcipher_register_instance(struct crypto_template *tmpl,
963                            struct skcipher_ins    1096                            struct skcipher_instance *inst)
964 {                                                 1097 {
965         int err;                                  1098         int err;
966                                                   1099 
967         if (WARN_ON(!inst->free))              << 
968                 return -EINVAL;                << 
969                                                << 
970         err = skcipher_prepare_alg(&inst->alg)    1100         err = skcipher_prepare_alg(&inst->alg);
971         if (err)                                  1101         if (err)
972                 return err;                       1102                 return err;
973                                                   1103 
974         return crypto_register_instance(tmpl,     1104         return crypto_register_instance(tmpl, skcipher_crypto_instance(inst));
975 }                                                 1105 }
976 EXPORT_SYMBOL_GPL(skcipher_register_instance);    1106 EXPORT_SYMBOL_GPL(skcipher_register_instance);
977                                                   1107 
978 static int skcipher_setkey_simple(struct crypt    1108 static int skcipher_setkey_simple(struct crypto_skcipher *tfm, const u8 *key,
979                                   unsigned int    1109                                   unsigned int keylen)
980 {                                                 1110 {
981         struct crypto_cipher *cipher = skciphe    1111         struct crypto_cipher *cipher = skcipher_cipher_simple(tfm);
                                                   >> 1112         int err;
982                                                   1113 
983         crypto_cipher_clear_flags(cipher, CRYP    1114         crypto_cipher_clear_flags(cipher, CRYPTO_TFM_REQ_MASK);
984         crypto_cipher_set_flags(cipher, crypto    1115         crypto_cipher_set_flags(cipher, crypto_skcipher_get_flags(tfm) &
985                                 CRYPTO_TFM_REQ    1116                                 CRYPTO_TFM_REQ_MASK);
986         return crypto_cipher_setkey(cipher, ke !! 1117         err = crypto_cipher_setkey(cipher, key, keylen);
                                                   >> 1118         crypto_skcipher_set_flags(tfm, crypto_cipher_get_flags(cipher) &
                                                   >> 1119                                   CRYPTO_TFM_RES_MASK);
                                                   >> 1120         return err;
987 }                                                 1121 }
988                                                   1122 
989 static int skcipher_init_tfm_simple(struct cry    1123 static int skcipher_init_tfm_simple(struct crypto_skcipher *tfm)
990 {                                                 1124 {
991         struct skcipher_instance *inst = skcip    1125         struct skcipher_instance *inst = skcipher_alg_instance(tfm);
992         struct crypto_cipher_spawn *spawn = sk !! 1126         struct crypto_spawn *spawn = skcipher_instance_ctx(inst);
993         struct skcipher_ctx_simple *ctx = cryp    1127         struct skcipher_ctx_simple *ctx = crypto_skcipher_ctx(tfm);
994         struct crypto_cipher *cipher;             1128         struct crypto_cipher *cipher;
995                                                   1129 
996         cipher = crypto_spawn_cipher(spawn);      1130         cipher = crypto_spawn_cipher(spawn);
997         if (IS_ERR(cipher))                       1131         if (IS_ERR(cipher))
998                 return PTR_ERR(cipher);           1132                 return PTR_ERR(cipher);
999                                                   1133 
1000         ctx->cipher = cipher;                    1134         ctx->cipher = cipher;
1001         return 0;                                1135         return 0;
1002 }                                                1136 }
1003                                                  1137 
1004 static void skcipher_exit_tfm_simple(struct c    1138 static void skcipher_exit_tfm_simple(struct crypto_skcipher *tfm)
1005 {                                                1139 {
1006         struct skcipher_ctx_simple *ctx = cry    1140         struct skcipher_ctx_simple *ctx = crypto_skcipher_ctx(tfm);
1007                                                  1141 
1008         crypto_free_cipher(ctx->cipher);         1142         crypto_free_cipher(ctx->cipher);
1009 }                                                1143 }
1010                                                  1144 
1011 static void skcipher_free_instance_simple(str    1145 static void skcipher_free_instance_simple(struct skcipher_instance *inst)
1012 {                                                1146 {
1013         crypto_drop_cipher(skcipher_instance_ !! 1147         crypto_drop_spawn(skcipher_instance_ctx(inst));
1014         kfree(inst);                             1148         kfree(inst);
1015 }                                                1149 }
1016                                                  1150 
1017 /**                                              1151 /**
1018  * skcipher_alloc_instance_simple - allocate     1152  * skcipher_alloc_instance_simple - allocate instance of simple block cipher mode
1019  *                                               1153  *
1020  * Allocate an skcipher_instance for a simple    1154  * Allocate an skcipher_instance for a simple block cipher mode of operation,
1021  * e.g. cbc or ecb.  The instance context wil    1155  * e.g. cbc or ecb.  The instance context will have just a single crypto_spawn,
1022  * that for the underlying cipher.  The {min,    1156  * that for the underlying cipher.  The {min,max}_keysize, ivsize, blocksize,
1023  * alignmask, and priority are set from the u    1157  * alignmask, and priority are set from the underlying cipher but can be
1024  * overridden if needed.  The tfm context def    1158  * overridden if needed.  The tfm context defaults to skcipher_ctx_simple, and
1025  * default ->setkey(), ->init(), and ->exit()    1159  * default ->setkey(), ->init(), and ->exit() methods are installed.
1026  *                                               1160  *
1027  * @tmpl: the template being instantiated        1161  * @tmpl: the template being instantiated
1028  * @tb: the template parameters                  1162  * @tb: the template parameters
                                                   >> 1163  * @cipher_alg_ret: on success, a pointer to the underlying cipher algorithm is
                                                   >> 1164  *                  returned here.  It must be dropped with crypto_mod_put().
1029  *                                               1165  *
1030  * Return: a pointer to the new instance, or     1166  * Return: a pointer to the new instance, or an ERR_PTR().  The caller still
1031  *         needs to register the instance.       1167  *         needs to register the instance.
1032  */                                              1168  */
1033 struct skcipher_instance *skcipher_alloc_inst !! 1169 struct skcipher_instance *
1034         struct crypto_template *tmpl, struct  !! 1170 skcipher_alloc_instance_simple(struct crypto_template *tmpl, struct rtattr **tb,
                                                   >> 1171                                struct crypto_alg **cipher_alg_ret)
1035 {                                                1172 {
1036         u32 mask;                             !! 1173         struct crypto_attr_type *algt;
1037         struct skcipher_instance *inst;       << 
1038         struct crypto_cipher_spawn *spawn;    << 
1039         struct crypto_alg *cipher_alg;           1174         struct crypto_alg *cipher_alg;
                                                   >> 1175         struct skcipher_instance *inst;
                                                   >> 1176         struct crypto_spawn *spawn;
                                                   >> 1177         u32 mask;
1040         int err;                                 1178         int err;
1041                                                  1179 
1042         err = crypto_check_attr_type(tb, CRYP !! 1180         algt = crypto_get_attr_type(tb);
1043         if (err)                              !! 1181         if (IS_ERR(algt))
1044                 return ERR_PTR(err);          !! 1182                 return ERR_CAST(algt);
                                                   >> 1183 
                                                   >> 1184         if ((algt->type ^ CRYPTO_ALG_TYPE_SKCIPHER) & algt->mask)
                                                   >> 1185                 return ERR_PTR(-EINVAL);
                                                   >> 1186 
                                                   >> 1187         mask = CRYPTO_ALG_TYPE_MASK |
                                                   >> 1188                 crypto_requires_off(algt->type, algt->mask,
                                                   >> 1189                                     CRYPTO_ALG_NEED_FALLBACK);
                                                   >> 1190 
                                                   >> 1191         cipher_alg = crypto_get_attr_alg(tb, CRYPTO_ALG_TYPE_CIPHER, mask);
                                                   >> 1192         if (IS_ERR(cipher_alg))
                                                   >> 1193                 return ERR_CAST(cipher_alg);
1045                                                  1194 
1046         inst = kzalloc(sizeof(*inst) + sizeof    1195         inst = kzalloc(sizeof(*inst) + sizeof(*spawn), GFP_KERNEL);
1047         if (!inst)                            !! 1196         if (!inst) {
1048                 return ERR_PTR(-ENOMEM);      !! 1197                 err = -ENOMEM;
                                                   >> 1198                 goto err_put_cipher_alg;
                                                   >> 1199         }
1049         spawn = skcipher_instance_ctx(inst);     1200         spawn = skcipher_instance_ctx(inst);
1050                                                  1201 
1051         err = crypto_grab_cipher(spawn, skcip << 
1052                                  crypto_attr_ << 
1053         if (err)                              << 
1054                 goto err_free_inst;           << 
1055         cipher_alg = crypto_spawn_cipher_alg( << 
1056                                               << 
1057         err = crypto_inst_setname(skcipher_cr    1202         err = crypto_inst_setname(skcipher_crypto_instance(inst), tmpl->name,
1058                                   cipher_alg)    1203                                   cipher_alg);
1059         if (err)                                 1204         if (err)
1060                 goto err_free_inst;              1205                 goto err_free_inst;
1061                                                  1206 
                                                   >> 1207         err = crypto_init_spawn(spawn, cipher_alg,
                                                   >> 1208                                 skcipher_crypto_instance(inst),
                                                   >> 1209                                 CRYPTO_ALG_TYPE_MASK);
                                                   >> 1210         if (err)
                                                   >> 1211                 goto err_free_inst;
1062         inst->free = skcipher_free_instance_s    1212         inst->free = skcipher_free_instance_simple;
1063                                                  1213 
1064         /* Default algorithm properties, can     1214         /* Default algorithm properties, can be overridden */
1065         inst->alg.base.cra_blocksize = cipher    1215         inst->alg.base.cra_blocksize = cipher_alg->cra_blocksize;
1066         inst->alg.base.cra_alignmask = cipher    1216         inst->alg.base.cra_alignmask = cipher_alg->cra_alignmask;
1067         inst->alg.base.cra_priority = cipher_    1217         inst->alg.base.cra_priority = cipher_alg->cra_priority;
1068         inst->alg.min_keysize = cipher_alg->c    1218         inst->alg.min_keysize = cipher_alg->cra_cipher.cia_min_keysize;
1069         inst->alg.max_keysize = cipher_alg->c    1219         inst->alg.max_keysize = cipher_alg->cra_cipher.cia_max_keysize;
1070         inst->alg.ivsize = cipher_alg->cra_bl    1220         inst->alg.ivsize = cipher_alg->cra_blocksize;
1071                                                  1221 
1072         /* Use skcipher_ctx_simple by default    1222         /* Use skcipher_ctx_simple by default, can be overridden */
1073         inst->alg.base.cra_ctxsize = sizeof(s    1223         inst->alg.base.cra_ctxsize = sizeof(struct skcipher_ctx_simple);
1074         inst->alg.setkey = skcipher_setkey_si    1224         inst->alg.setkey = skcipher_setkey_simple;
1075         inst->alg.init = skcipher_init_tfm_si    1225         inst->alg.init = skcipher_init_tfm_simple;
1076         inst->alg.exit = skcipher_exit_tfm_si    1226         inst->alg.exit = skcipher_exit_tfm_simple;
1077                                                  1227 
                                                   >> 1228         *cipher_alg_ret = cipher_alg;
1078         return inst;                             1229         return inst;
1079                                                  1230 
1080 err_free_inst:                                   1231 err_free_inst:
1081         skcipher_free_instance_simple(inst);  !! 1232         kfree(inst);
                                                   >> 1233 err_put_cipher_alg:
                                                   >> 1234         crypto_mod_put(cipher_alg);
1082         return ERR_PTR(err);                     1235         return ERR_PTR(err);
1083 }                                                1236 }
1084 EXPORT_SYMBOL_GPL(skcipher_alloc_instance_sim    1237 EXPORT_SYMBOL_GPL(skcipher_alloc_instance_simple);
1085                                                  1238 
1086 MODULE_LICENSE("GPL");                           1239 MODULE_LICENSE("GPL");
1087 MODULE_DESCRIPTION("Symmetric key cipher type    1240 MODULE_DESCRIPTION("Symmetric key cipher type");
1088 MODULE_IMPORT_NS(CRYPTO_INTERNAL);            << 
1089                                                  1241 

~ [ source navigation ] ~ [ diff markup ] ~ [ identifier search ] ~

kernel.org | git.kernel.org | LWN.net | Project Home | SVN repository | Mail admin

Linux® is a registered trademark of Linus Torvalds in the United States and other countries.
TOMOYO® is a registered trademark of NTT DATA CORPORATION.

sflogo.php