~ [ source navigation ] ~ [ diff markup ] ~ [ identifier search ] ~

TOMOYO Linux Cross Reference
Linux/crypto/cts.c

Version: ~ [ linux-6.11.5 ] ~ [ linux-6.10.14 ] ~ [ linux-6.9.12 ] ~ [ linux-6.8.12 ] ~ [ linux-6.7.12 ] ~ [ linux-6.6.58 ] ~ [ linux-6.5.13 ] ~ [ linux-6.4.16 ] ~ [ linux-6.3.13 ] ~ [ linux-6.2.16 ] ~ [ linux-6.1.114 ] ~ [ linux-6.0.19 ] ~ [ linux-5.19.17 ] ~ [ linux-5.18.19 ] ~ [ linux-5.17.15 ] ~ [ linux-5.16.20 ] ~ [ linux-5.15.169 ] ~ [ linux-5.14.21 ] ~ [ linux-5.13.19 ] ~ [ linux-5.12.19 ] ~ [ linux-5.11.22 ] ~ [ linux-5.10.228 ] ~ [ linux-5.9.16 ] ~ [ linux-5.8.18 ] ~ [ linux-5.7.19 ] ~ [ linux-5.6.19 ] ~ [ linux-5.5.19 ] ~ [ linux-5.4.284 ] ~ [ linux-5.3.18 ] ~ [ linux-5.2.21 ] ~ [ linux-5.1.21 ] ~ [ linux-5.0.21 ] ~ [ linux-4.20.17 ] ~ [ linux-4.19.322 ] ~ [ linux-4.18.20 ] ~ [ linux-4.17.19 ] ~ [ linux-4.16.18 ] ~ [ linux-4.15.18 ] ~ [ linux-4.14.336 ] ~ [ linux-4.13.16 ] ~ [ linux-4.12.14 ] ~ [ linux-4.11.12 ] ~ [ linux-4.10.17 ] ~ [ linux-4.9.337 ] ~ [ linux-4.4.302 ] ~ [ linux-3.10.108 ] ~ [ linux-2.6.32.71 ] ~ [ linux-2.6.0 ] ~ [ linux-2.4.37.11 ] ~ [ unix-v6-master ] ~ [ ccs-tools-1.8.9 ] ~ [ policy-sample ] ~
Architecture: ~ [ i386 ] ~ [ alpha ] ~ [ m68k ] ~ [ mips ] ~ [ ppc ] ~ [ sparc ] ~ [ sparc64 ] ~

  1 /*
  2  * CTS: Cipher Text Stealing mode
  3  *
  4  * COPYRIGHT (c) 2008
  5  * The Regents of the University of Michigan
  6  * ALL RIGHTS RESERVED
  7  *
  8  * Permission is granted to use, copy, create derivative works
  9  * and redistribute this software and such derivative works
 10  * for any purpose, so long as the name of The University of
 11  * Michigan is not used in any advertising or publicity
 12  * pertaining to the use of distribution of this software
 13  * without specific, written prior authorization.  If the
 14  * above copyright notice or any other identification of the
 15  * University of Michigan is included in any copy of any
 16  * portion of this software, then the disclaimer below must
 17  * also be included.
 18  *
 19  * THIS SOFTWARE IS PROVIDED AS IS, WITHOUT REPRESENTATION
 20  * FROM THE UNIVERSITY OF MICHIGAN AS TO ITS FITNESS FOR ANY
 21  * PURPOSE, AND WITHOUT WARRANTY BY THE UNIVERSITY OF
 22  * MICHIGAN OF ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING
 23  * WITHOUT LIMITATION THE IMPLIED WARRANTIES OF
 24  * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE
 25  * REGENTS OF THE UNIVERSITY OF MICHIGAN SHALL NOT BE LIABLE
 26  * FOR ANY DAMAGES, INCLUDING SPECIAL, INDIRECT, INCIDENTAL, OR
 27  * CONSEQUENTIAL DAMAGES, WITH RESPECT TO ANY CLAIM ARISING
 28  * OUT OF OR IN CONNECTION WITH THE USE OF THE SOFTWARE, EVEN
 29  * IF IT HAS BEEN OR IS HEREAFTER ADVISED OF THE POSSIBILITY OF
 30  * SUCH DAMAGES.
 31  */
 32 
 33 /* Derived from various:
 34  *      Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
 35  */
 36 
 37 /*
 38  * This is the Cipher Text Stealing mode as described by
 39  * Section 8 of rfc2040 and referenced by rfc3962.
 40  * rfc3962 includes errata information in its Appendix A.
 41  */
 42 
 43 #include <crypto/algapi.h>
 44 #include <crypto/internal/skcipher.h>
 45 #include <linux/err.h>
 46 #include <linux/init.h>
 47 #include <linux/kernel.h>
 48 #include <linux/log2.h>
 49 #include <linux/module.h>
 50 #include <linux/scatterlist.h>
 51 #include <crypto/scatterwalk.h>
 52 #include <linux/slab.h>
 53 #include <linux/compiler.h>
 54 
 55 struct crypto_cts_ctx {
 56         struct crypto_skcipher *child;
 57 };
 58 
 59 struct crypto_cts_reqctx {
 60         struct scatterlist sg[2];
 61         unsigned offset;
 62         struct skcipher_request subreq;
 63 };
 64 
 65 static inline u8 *crypto_cts_reqctx_space(struct skcipher_request *req)
 66 {
 67         struct crypto_cts_reqctx *rctx = skcipher_request_ctx(req);
 68         struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
 69         struct crypto_cts_ctx *ctx = crypto_skcipher_ctx(tfm);
 70         struct crypto_skcipher *child = ctx->child;
 71 
 72         return PTR_ALIGN((u8 *)(rctx + 1) + crypto_skcipher_reqsize(child),
 73                          crypto_skcipher_alignmask(tfm) + 1);
 74 }
 75 
 76 static int crypto_cts_setkey(struct crypto_skcipher *parent, const u8 *key,
 77                              unsigned int keylen)
 78 {
 79         struct crypto_cts_ctx *ctx = crypto_skcipher_ctx(parent);
 80         struct crypto_skcipher *child = ctx->child;
 81 
 82         crypto_skcipher_clear_flags(child, CRYPTO_TFM_REQ_MASK);
 83         crypto_skcipher_set_flags(child, crypto_skcipher_get_flags(parent) &
 84                                          CRYPTO_TFM_REQ_MASK);
 85         return crypto_skcipher_setkey(child, key, keylen);
 86 }
 87 
 88 static void cts_cbc_crypt_done(void *data, int err)
 89 {
 90         struct skcipher_request *req = data;
 91 
 92         if (err == -EINPROGRESS)
 93                 return;
 94 
 95         skcipher_request_complete(req, err);
 96 }
 97 
 98 static int cts_cbc_encrypt(struct skcipher_request *req)
 99 {
100         struct crypto_cts_reqctx *rctx = skcipher_request_ctx(req);
101         struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
102         struct skcipher_request *subreq = &rctx->subreq;
103         int bsize = crypto_skcipher_blocksize(tfm);
104         u8 d[MAX_CIPHER_BLOCKSIZE * 2] __aligned(__alignof__(u32));
105         struct scatterlist *sg;
106         unsigned int offset;
107         int lastn;
108 
109         offset = rctx->offset;
110         lastn = req->cryptlen - offset;
111 
112         sg = scatterwalk_ffwd(rctx->sg, req->dst, offset - bsize);
113         scatterwalk_map_and_copy(d + bsize, sg, 0, bsize, 0);
114 
115         memset(d, 0, bsize);
116         scatterwalk_map_and_copy(d, req->src, offset, lastn, 0);
117 
118         scatterwalk_map_and_copy(d, sg, 0, bsize + lastn, 1);
119         memzero_explicit(d, sizeof(d));
120 
121         skcipher_request_set_callback(subreq, req->base.flags &
122                                               CRYPTO_TFM_REQ_MAY_BACKLOG,
123                                       cts_cbc_crypt_done, req);
124         skcipher_request_set_crypt(subreq, sg, sg, bsize, req->iv);
125         return crypto_skcipher_encrypt(subreq);
126 }
127 
128 static void crypto_cts_encrypt_done(void *data, int err)
129 {
130         struct skcipher_request *req = data;
131 
132         if (err)
133                 goto out;
134 
135         err = cts_cbc_encrypt(req);
136         if (err == -EINPROGRESS || err == -EBUSY)
137                 return;
138 
139 out:
140         skcipher_request_complete(req, err);
141 }
142 
143 static int crypto_cts_encrypt(struct skcipher_request *req)
144 {
145         struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
146         struct crypto_cts_reqctx *rctx = skcipher_request_ctx(req);
147         struct crypto_cts_ctx *ctx = crypto_skcipher_ctx(tfm);
148         struct skcipher_request *subreq = &rctx->subreq;
149         int bsize = crypto_skcipher_blocksize(tfm);
150         unsigned int nbytes = req->cryptlen;
151         unsigned int offset;
152 
153         skcipher_request_set_tfm(subreq, ctx->child);
154 
155         if (nbytes < bsize)
156                 return -EINVAL;
157 
158         if (nbytes == bsize) {
159                 skcipher_request_set_callback(subreq, req->base.flags,
160                                               req->base.complete,
161                                               req->base.data);
162                 skcipher_request_set_crypt(subreq, req->src, req->dst, nbytes,
163                                            req->iv);
164                 return crypto_skcipher_encrypt(subreq);
165         }
166 
167         offset = rounddown(nbytes - 1, bsize);
168         rctx->offset = offset;
169 
170         skcipher_request_set_callback(subreq, req->base.flags,
171                                       crypto_cts_encrypt_done, req);
172         skcipher_request_set_crypt(subreq, req->src, req->dst,
173                                    offset, req->iv);
174 
175         return crypto_skcipher_encrypt(subreq) ?:
176                cts_cbc_encrypt(req);
177 }
178 
179 static int cts_cbc_decrypt(struct skcipher_request *req)
180 {
181         struct crypto_cts_reqctx *rctx = skcipher_request_ctx(req);
182         struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
183         struct skcipher_request *subreq = &rctx->subreq;
184         int bsize = crypto_skcipher_blocksize(tfm);
185         u8 d[MAX_CIPHER_BLOCKSIZE * 2] __aligned(__alignof__(u32));
186         struct scatterlist *sg;
187         unsigned int offset;
188         u8 *space;
189         int lastn;
190 
191         offset = rctx->offset;
192         lastn = req->cryptlen - offset;
193 
194         sg = scatterwalk_ffwd(rctx->sg, req->dst, offset - bsize);
195 
196         /* 1. Decrypt Cn-1 (s) to create Dn */
197         scatterwalk_map_and_copy(d + bsize, sg, 0, bsize, 0);
198         space = crypto_cts_reqctx_space(req);
199         crypto_xor(d + bsize, space, bsize);
200         /* 2. Pad Cn with zeros at the end to create C of length BB */
201         memset(d, 0, bsize);
202         scatterwalk_map_and_copy(d, req->src, offset, lastn, 0);
203         /* 3. Exclusive-or Dn with C to create Xn */
204         /* 4. Select the first Ln bytes of Xn to create Pn */
205         crypto_xor(d + bsize, d, lastn);
206 
207         /* 5. Append the tail (BB - Ln) bytes of Xn to Cn to create En */
208         memcpy(d + lastn, d + bsize + lastn, bsize - lastn);
209         /* 6. Decrypt En to create Pn-1 */
210 
211         scatterwalk_map_and_copy(d, sg, 0, bsize + lastn, 1);
212         memzero_explicit(d, sizeof(d));
213 
214         skcipher_request_set_callback(subreq, req->base.flags &
215                                               CRYPTO_TFM_REQ_MAY_BACKLOG,
216                                       cts_cbc_crypt_done, req);
217 
218         skcipher_request_set_crypt(subreq, sg, sg, bsize, space);
219         return crypto_skcipher_decrypt(subreq);
220 }
221 
222 static void crypto_cts_decrypt_done(void *data, int err)
223 {
224         struct skcipher_request *req = data;
225 
226         if (err)
227                 goto out;
228 
229         err = cts_cbc_decrypt(req);
230         if (err == -EINPROGRESS || err == -EBUSY)
231                 return;
232 
233 out:
234         skcipher_request_complete(req, err);
235 }
236 
237 static int crypto_cts_decrypt(struct skcipher_request *req)
238 {
239         struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
240         struct crypto_cts_reqctx *rctx = skcipher_request_ctx(req);
241         struct crypto_cts_ctx *ctx = crypto_skcipher_ctx(tfm);
242         struct skcipher_request *subreq = &rctx->subreq;
243         int bsize = crypto_skcipher_blocksize(tfm);
244         unsigned int nbytes = req->cryptlen;
245         unsigned int offset;
246         u8 *space;
247 
248         skcipher_request_set_tfm(subreq, ctx->child);
249 
250         if (nbytes < bsize)
251                 return -EINVAL;
252 
253         if (nbytes == bsize) {
254                 skcipher_request_set_callback(subreq, req->base.flags,
255                                               req->base.complete,
256                                               req->base.data);
257                 skcipher_request_set_crypt(subreq, req->src, req->dst, nbytes,
258                                            req->iv);
259                 return crypto_skcipher_decrypt(subreq);
260         }
261 
262         skcipher_request_set_callback(subreq, req->base.flags,
263                                       crypto_cts_decrypt_done, req);
264 
265         space = crypto_cts_reqctx_space(req);
266 
267         offset = rounddown(nbytes - 1, bsize);
268         rctx->offset = offset;
269 
270         if (offset <= bsize)
271                 memcpy(space, req->iv, bsize);
272         else
273                 scatterwalk_map_and_copy(space, req->src, offset - 2 * bsize,
274                                          bsize, 0);
275 
276         skcipher_request_set_crypt(subreq, req->src, req->dst,
277                                    offset, req->iv);
278 
279         return crypto_skcipher_decrypt(subreq) ?:
280                cts_cbc_decrypt(req);
281 }
282 
283 static int crypto_cts_init_tfm(struct crypto_skcipher *tfm)
284 {
285         struct skcipher_instance *inst = skcipher_alg_instance(tfm);
286         struct crypto_skcipher_spawn *spawn = skcipher_instance_ctx(inst);
287         struct crypto_cts_ctx *ctx = crypto_skcipher_ctx(tfm);
288         struct crypto_skcipher *cipher;
289         unsigned reqsize;
290         unsigned bsize;
291         unsigned align;
292 
293         cipher = crypto_spawn_skcipher(spawn);
294         if (IS_ERR(cipher))
295                 return PTR_ERR(cipher);
296 
297         ctx->child = cipher;
298 
299         align = crypto_skcipher_alignmask(tfm);
300         bsize = crypto_skcipher_blocksize(cipher);
301         reqsize = ALIGN(sizeof(struct crypto_cts_reqctx) +
302                         crypto_skcipher_reqsize(cipher),
303                         crypto_tfm_ctx_alignment()) +
304                   (align & ~(crypto_tfm_ctx_alignment() - 1)) + bsize;
305 
306         crypto_skcipher_set_reqsize(tfm, reqsize);
307 
308         return 0;
309 }
310 
311 static void crypto_cts_exit_tfm(struct crypto_skcipher *tfm)
312 {
313         struct crypto_cts_ctx *ctx = crypto_skcipher_ctx(tfm);
314 
315         crypto_free_skcipher(ctx->child);
316 }
317 
318 static void crypto_cts_free(struct skcipher_instance *inst)
319 {
320         crypto_drop_skcipher(skcipher_instance_ctx(inst));
321         kfree(inst);
322 }
323 
324 static int crypto_cts_create(struct crypto_template *tmpl, struct rtattr **tb)
325 {
326         struct crypto_skcipher_spawn *spawn;
327         struct skcipher_alg_common *alg;
328         struct skcipher_instance *inst;
329         u32 mask;
330         int err;
331 
332         err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_SKCIPHER, &mask);
333         if (err)
334                 return err;
335 
336         inst = kzalloc(sizeof(*inst) + sizeof(*spawn), GFP_KERNEL);
337         if (!inst)
338                 return -ENOMEM;
339 
340         spawn = skcipher_instance_ctx(inst);
341 
342         err = crypto_grab_skcipher(spawn, skcipher_crypto_instance(inst),
343                                    crypto_attr_alg_name(tb[1]), 0, mask);
344         if (err)
345                 goto err_free_inst;
346 
347         alg = crypto_spawn_skcipher_alg_common(spawn);
348 
349         err = -EINVAL;
350         if (alg->ivsize != alg->base.cra_blocksize)
351                 goto err_free_inst;
352 
353         if (strncmp(alg->base.cra_name, "cbc(", 4))
354                 goto err_free_inst;
355 
356         err = crypto_inst_setname(skcipher_crypto_instance(inst), "cts",
357                                   &alg->base);
358         if (err)
359                 goto err_free_inst;
360 
361         inst->alg.base.cra_priority = alg->base.cra_priority;
362         inst->alg.base.cra_blocksize = alg->base.cra_blocksize;
363         inst->alg.base.cra_alignmask = alg->base.cra_alignmask;
364 
365         inst->alg.ivsize = alg->base.cra_blocksize;
366         inst->alg.chunksize = alg->chunksize;
367         inst->alg.min_keysize = alg->min_keysize;
368         inst->alg.max_keysize = alg->max_keysize;
369 
370         inst->alg.base.cra_ctxsize = sizeof(struct crypto_cts_ctx);
371 
372         inst->alg.init = crypto_cts_init_tfm;
373         inst->alg.exit = crypto_cts_exit_tfm;
374 
375         inst->alg.setkey = crypto_cts_setkey;
376         inst->alg.encrypt = crypto_cts_encrypt;
377         inst->alg.decrypt = crypto_cts_decrypt;
378 
379         inst->free = crypto_cts_free;
380 
381         err = skcipher_register_instance(tmpl, inst);
382         if (err) {
383 err_free_inst:
384                 crypto_cts_free(inst);
385         }
386         return err;
387 }
388 
389 static struct crypto_template crypto_cts_tmpl = {
390         .name = "cts",
391         .create = crypto_cts_create,
392         .module = THIS_MODULE,
393 };
394 
395 static int __init crypto_cts_module_init(void)
396 {
397         return crypto_register_template(&crypto_cts_tmpl);
398 }
399 
400 static void __exit crypto_cts_module_exit(void)
401 {
402         crypto_unregister_template(&crypto_cts_tmpl);
403 }
404 
405 subsys_initcall(crypto_cts_module_init);
406 module_exit(crypto_cts_module_exit);
407 
408 MODULE_LICENSE("Dual BSD/GPL");
409 MODULE_DESCRIPTION("CTS-CBC CipherText Stealing for CBC");
410 MODULE_ALIAS_CRYPTO("cts");
411 

~ [ source navigation ] ~ [ diff markup ] ~ [ identifier search ] ~

kernel.org | git.kernel.org | LWN.net | Project Home | SVN repository | Mail admin

Linux® is a registered trademark of Linus Torvalds in the United States and other countries.
TOMOYO® is a registered trademark of NTT DATA CORPORATION.

sflogo.php