1 // SPDX-License-Identifier: GPL-2.0-or-later << 2 /* 1 /* 3 * CMAC: Cipher Block Mode for Authentication 2 * CMAC: Cipher Block Mode for Authentication 4 * 3 * 5 * Copyright © 2013 Jussi Kivilinna <jussi.ki 4 * Copyright © 2013 Jussi Kivilinna <jussi.kivilinna@iki.fi> 6 * 5 * 7 * Based on work by: 6 * Based on work by: 8 * Copyright © 2013 Tom St Denis <tstdenis@e 7 * Copyright © 2013 Tom St Denis <tstdenis@elliptictech.com> 9 * Based on crypto/xcbc.c: 8 * Based on crypto/xcbc.c: 10 * Copyright © 2006 USAGI/WIDE Project, 9 * Copyright © 2006 USAGI/WIDE Project, 11 * Author: Kazunori Miyazawa <miyazawa@linux 10 * Author: Kazunori Miyazawa <miyazawa@linux-ipv6.org> >> 11 * >> 12 * This program is free software; you can redistribute it and/or modify >> 13 * it under the terms of the GNU General Public License as published by >> 14 * the Free Software Foundation; either version 2 of the License, or >> 15 * (at your option) any later version. >> 16 * 12 */ 17 */ 13 18 14 #include <crypto/internal/cipher.h> << 15 #include <crypto/internal/hash.h> 19 #include <crypto/internal/hash.h> 16 #include <linux/err.h> 20 #include <linux/err.h> 17 #include <linux/kernel.h> 21 #include <linux/kernel.h> 18 #include <linux/module.h> 22 #include <linux/module.h> 19 23 20 /* 24 /* 21 * +------------------------ 25 * +------------------------ 22 * | <parent tfm> 26 * | <parent tfm> 23 * +------------------------ 27 * +------------------------ 24 * | cmac_tfm_ctx 28 * | cmac_tfm_ctx 25 * +------------------------ 29 * +------------------------ 26 * | consts (block size * 2) 30 * | consts (block size * 2) 27 * +------------------------ 31 * +------------------------ 28 */ 32 */ 29 struct cmac_tfm_ctx { 33 struct cmac_tfm_ctx { 30 struct crypto_cipher *child; 34 struct crypto_cipher *child; 31 __be64 consts[]; !! 35 u8 ctx[]; 32 }; 36 }; 33 37 34 /* 38 /* 35 * +------------------------ 39 * +------------------------ 36 * | <shash desc> 40 * | <shash desc> 37 * +------------------------ 41 * +------------------------ 38 * | cmac_desc_ctx 42 * | cmac_desc_ctx 39 * +------------------------ 43 * +------------------------ 40 * | odds (block size) 44 * | odds (block size) 41 * +------------------------ 45 * +------------------------ 42 * | prev (block size) 46 * | prev (block size) 43 * +------------------------ 47 * +------------------------ 44 */ 48 */ 45 struct cmac_desc_ctx { 49 struct cmac_desc_ctx { 46 unsigned int len; 50 unsigned int len; 47 u8 odds[]; !! 51 u8 ctx[]; 48 }; 52 }; 49 53 50 static int crypto_cmac_digest_setkey(struct cr 54 static int crypto_cmac_digest_setkey(struct crypto_shash *parent, 51 const u8 55 const u8 *inkey, unsigned int keylen) 52 { 56 { >> 57 unsigned long alignmask = crypto_shash_alignmask(parent); 53 struct cmac_tfm_ctx *ctx = crypto_shas 58 struct cmac_tfm_ctx *ctx = crypto_shash_ctx(parent); 54 unsigned int bs = crypto_shash_blocksi 59 unsigned int bs = crypto_shash_blocksize(parent); 55 __be64 *consts = ctx->consts; !! 60 __be64 *consts = PTR_ALIGN((void *)ctx->ctx, >> 61 (alignmask | (__alignof__(__be64) - 1)) + 1); 56 u64 _const[2]; 62 u64 _const[2]; 57 int i, err = 0; 63 int i, err = 0; 58 u8 msb_mask, gfmask; 64 u8 msb_mask, gfmask; 59 65 60 err = crypto_cipher_setkey(ctx->child, 66 err = crypto_cipher_setkey(ctx->child, inkey, keylen); 61 if (err) 67 if (err) 62 return err; 68 return err; 63 69 64 /* encrypt the zero block */ 70 /* encrypt the zero block */ 65 memset(consts, 0, bs); 71 memset(consts, 0, bs); 66 crypto_cipher_encrypt_one(ctx->child, 72 crypto_cipher_encrypt_one(ctx->child, (u8 *)consts, (u8 *)consts); 67 73 68 switch (bs) { 74 switch (bs) { 69 case 16: 75 case 16: 70 gfmask = 0x87; 76 gfmask = 0x87; 71 _const[0] = be64_to_cpu(consts 77 _const[0] = be64_to_cpu(consts[1]); 72 _const[1] = be64_to_cpu(consts 78 _const[1] = be64_to_cpu(consts[0]); 73 79 74 /* gf(2^128) multiply zero-cip 80 /* gf(2^128) multiply zero-ciphertext with u and u^2 */ 75 for (i = 0; i < 4; i += 2) { 81 for (i = 0; i < 4; i += 2) { 76 msb_mask = ((s64)_cons 82 msb_mask = ((s64)_const[1] >> 63) & gfmask; 77 _const[1] = (_const[1] 83 _const[1] = (_const[1] << 1) | (_const[0] >> 63); 78 _const[0] = (_const[0] 84 _const[0] = (_const[0] << 1) ^ msb_mask; 79 85 80 consts[i + 0] = cpu_to 86 consts[i + 0] = cpu_to_be64(_const[1]); 81 consts[i + 1] = cpu_to 87 consts[i + 1] = cpu_to_be64(_const[0]); 82 } 88 } 83 89 84 break; 90 break; 85 case 8: 91 case 8: 86 gfmask = 0x1B; 92 gfmask = 0x1B; 87 _const[0] = be64_to_cpu(consts 93 _const[0] = be64_to_cpu(consts[0]); 88 94 89 /* gf(2^64) multiply zero-ciph 95 /* gf(2^64) multiply zero-ciphertext with u and u^2 */ 90 for (i = 0; i < 2; i++) { 96 for (i = 0; i < 2; i++) { 91 msb_mask = ((s64)_cons 97 msb_mask = ((s64)_const[0] >> 63) & gfmask; 92 _const[0] = (_const[0] 98 _const[0] = (_const[0] << 1) ^ msb_mask; 93 99 94 consts[i] = cpu_to_be6 100 consts[i] = cpu_to_be64(_const[0]); 95 } 101 } 96 102 97 break; 103 break; 98 } 104 } 99 105 100 return 0; 106 return 0; 101 } 107 } 102 108 103 static int crypto_cmac_digest_init(struct shas 109 static int crypto_cmac_digest_init(struct shash_desc *pdesc) 104 { 110 { >> 111 unsigned long alignmask = crypto_shash_alignmask(pdesc->tfm); 105 struct cmac_desc_ctx *ctx = shash_desc 112 struct cmac_desc_ctx *ctx = shash_desc_ctx(pdesc); 106 int bs = crypto_shash_blocksize(pdesc- 113 int bs = crypto_shash_blocksize(pdesc->tfm); 107 u8 *prev = &ctx->odds[bs]; !! 114 u8 *prev = PTR_ALIGN((void *)ctx->ctx, alignmask + 1) + bs; 108 115 109 ctx->len = 0; 116 ctx->len = 0; 110 memset(prev, 0, bs); 117 memset(prev, 0, bs); 111 118 112 return 0; 119 return 0; 113 } 120 } 114 121 115 static int crypto_cmac_digest_update(struct sh 122 static int crypto_cmac_digest_update(struct shash_desc *pdesc, const u8 *p, 116 unsigned 123 unsigned int len) 117 { 124 { 118 struct crypto_shash *parent = pdesc->t 125 struct crypto_shash *parent = pdesc->tfm; >> 126 unsigned long alignmask = crypto_shash_alignmask(parent); 119 struct cmac_tfm_ctx *tctx = crypto_sha 127 struct cmac_tfm_ctx *tctx = crypto_shash_ctx(parent); 120 struct cmac_desc_ctx *ctx = shash_desc 128 struct cmac_desc_ctx *ctx = shash_desc_ctx(pdesc); 121 struct crypto_cipher *tfm = tctx->chil 129 struct crypto_cipher *tfm = tctx->child; 122 int bs = crypto_shash_blocksize(parent 130 int bs = crypto_shash_blocksize(parent); 123 u8 *odds = ctx->odds; !! 131 u8 *odds = PTR_ALIGN((void *)ctx->ctx, alignmask + 1); 124 u8 *prev = odds + bs; 132 u8 *prev = odds + bs; 125 133 126 /* checking the data can fill the bloc 134 /* checking the data can fill the block */ 127 if ((ctx->len + len) <= bs) { 135 if ((ctx->len + len) <= bs) { 128 memcpy(odds + ctx->len, p, len 136 memcpy(odds + ctx->len, p, len); 129 ctx->len += len; 137 ctx->len += len; 130 return 0; 138 return 0; 131 } 139 } 132 140 133 /* filling odds with new data and encr 141 /* filling odds with new data and encrypting it */ 134 memcpy(odds + ctx->len, p, bs - ctx->l 142 memcpy(odds + ctx->len, p, bs - ctx->len); 135 len -= bs - ctx->len; 143 len -= bs - ctx->len; 136 p += bs - ctx->len; 144 p += bs - ctx->len; 137 145 138 crypto_xor(prev, odds, bs); 146 crypto_xor(prev, odds, bs); 139 crypto_cipher_encrypt_one(tfm, prev, p 147 crypto_cipher_encrypt_one(tfm, prev, prev); 140 148 141 /* clearing the length */ 149 /* clearing the length */ 142 ctx->len = 0; 150 ctx->len = 0; 143 151 144 /* encrypting the rest of data */ 152 /* encrypting the rest of data */ 145 while (len > bs) { 153 while (len > bs) { 146 crypto_xor(prev, p, bs); 154 crypto_xor(prev, p, bs); 147 crypto_cipher_encrypt_one(tfm, 155 crypto_cipher_encrypt_one(tfm, prev, prev); 148 p += bs; 156 p += bs; 149 len -= bs; 157 len -= bs; 150 } 158 } 151 159 152 /* keeping the surplus of blocksize */ 160 /* keeping the surplus of blocksize */ 153 if (len) { 161 if (len) { 154 memcpy(odds, p, len); 162 memcpy(odds, p, len); 155 ctx->len = len; 163 ctx->len = len; 156 } 164 } 157 165 158 return 0; 166 return 0; 159 } 167 } 160 168 161 static int crypto_cmac_digest_final(struct sha 169 static int crypto_cmac_digest_final(struct shash_desc *pdesc, u8 *out) 162 { 170 { 163 struct crypto_shash *parent = pdesc->t 171 struct crypto_shash *parent = pdesc->tfm; >> 172 unsigned long alignmask = crypto_shash_alignmask(parent); 164 struct cmac_tfm_ctx *tctx = crypto_sha 173 struct cmac_tfm_ctx *tctx = crypto_shash_ctx(parent); 165 struct cmac_desc_ctx *ctx = shash_desc 174 struct cmac_desc_ctx *ctx = shash_desc_ctx(pdesc); 166 struct crypto_cipher *tfm = tctx->chil 175 struct crypto_cipher *tfm = tctx->child; 167 int bs = crypto_shash_blocksize(parent 176 int bs = crypto_shash_blocksize(parent); 168 u8 *odds = ctx->odds; !! 177 u8 *consts = PTR_ALIGN((void *)tctx->ctx, >> 178 (alignmask | (__alignof__(__be64) - 1)) + 1); >> 179 u8 *odds = PTR_ALIGN((void *)ctx->ctx, alignmask + 1); 169 u8 *prev = odds + bs; 180 u8 *prev = odds + bs; 170 unsigned int offset = 0; 181 unsigned int offset = 0; 171 182 172 if (ctx->len != bs) { 183 if (ctx->len != bs) { 173 unsigned int rlen; 184 unsigned int rlen; 174 u8 *p = odds + ctx->len; 185 u8 *p = odds + ctx->len; 175 186 176 *p = 0x80; 187 *p = 0x80; 177 p++; 188 p++; 178 189 179 rlen = bs - ctx->len - 1; 190 rlen = bs - ctx->len - 1; 180 if (rlen) 191 if (rlen) 181 memset(p, 0, rlen); 192 memset(p, 0, rlen); 182 193 183 offset += bs; 194 offset += bs; 184 } 195 } 185 196 186 crypto_xor(prev, odds, bs); 197 crypto_xor(prev, odds, bs); 187 crypto_xor(prev, (const u8 *)tctx->con !! 198 crypto_xor(prev, consts + offset, bs); 188 199 189 crypto_cipher_encrypt_one(tfm, out, pr 200 crypto_cipher_encrypt_one(tfm, out, prev); 190 201 191 return 0; 202 return 0; 192 } 203 } 193 204 194 static int cmac_init_tfm(struct crypto_shash * !! 205 static int cmac_init_tfm(struct crypto_tfm *tfm) 195 { 206 { 196 struct shash_instance *inst = shash_al << 197 struct cmac_tfm_ctx *ctx = crypto_shas << 198 struct crypto_cipher_spawn *spawn; << 199 struct crypto_cipher *cipher; 207 struct crypto_cipher *cipher; >> 208 struct crypto_instance *inst = (void *)tfm->__crt_alg; >> 209 struct crypto_spawn *spawn = crypto_instance_ctx(inst); >> 210 struct cmac_tfm_ctx *ctx = crypto_tfm_ctx(tfm); 200 211 201 spawn = shash_instance_ctx(inst); << 202 cipher = crypto_spawn_cipher(spawn); 212 cipher = crypto_spawn_cipher(spawn); 203 if (IS_ERR(cipher)) 213 if (IS_ERR(cipher)) 204 return PTR_ERR(cipher); 214 return PTR_ERR(cipher); 205 215 206 ctx->child = cipher; 216 ctx->child = cipher; 207 217 208 return 0; 218 return 0; 209 } !! 219 }; 210 << 211 static int cmac_clone_tfm(struct crypto_shash << 212 { << 213 struct cmac_tfm_ctx *octx = crypto_sha << 214 struct cmac_tfm_ctx *ctx = crypto_shas << 215 struct crypto_cipher *cipher; << 216 << 217 cipher = crypto_clone_cipher(octx->chi << 218 if (IS_ERR(cipher)) << 219 return PTR_ERR(cipher); << 220 << 221 ctx->child = cipher; << 222 << 223 return 0; << 224 } << 225 220 226 static void cmac_exit_tfm(struct crypto_shash !! 221 static void cmac_exit_tfm(struct crypto_tfm *tfm) 227 { 222 { 228 struct cmac_tfm_ctx *ctx = crypto_shas !! 223 struct cmac_tfm_ctx *ctx = crypto_tfm_ctx(tfm); 229 crypto_free_cipher(ctx->child); 224 crypto_free_cipher(ctx->child); 230 } 225 } 231 226 232 static int cmac_create(struct crypto_template 227 static int cmac_create(struct crypto_template *tmpl, struct rtattr **tb) 233 { 228 { 234 struct shash_instance *inst; 229 struct shash_instance *inst; 235 struct crypto_cipher_spawn *spawn; << 236 struct crypto_alg *alg; 230 struct crypto_alg *alg; 237 u32 mask; !! 231 unsigned long alignmask; 238 int err; 232 int err; 239 233 240 err = crypto_check_attr_type(tb, CRYPT !! 234 err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_SHASH); 241 if (err) 235 if (err) 242 return err; 236 return err; 243 237 244 inst = kzalloc(sizeof(*inst) + sizeof( !! 238 alg = crypto_get_attr_alg(tb, CRYPTO_ALG_TYPE_CIPHER, 245 if (!inst) !! 239 CRYPTO_ALG_TYPE_MASK); 246 return -ENOMEM; !! 240 if (IS_ERR(alg)) 247 spawn = shash_instance_ctx(inst); !! 241 return PTR_ERR(alg); 248 << 249 err = crypto_grab_cipher(spawn, shash_ << 250 crypto_attr_a << 251 if (err) << 252 goto err_free_inst; << 253 alg = crypto_spawn_cipher_alg(spawn); << 254 242 255 switch (alg->cra_blocksize) { 243 switch (alg->cra_blocksize) { 256 case 16: 244 case 16: 257 case 8: 245 case 8: 258 break; 246 break; 259 default: 247 default: 260 err = -EINVAL; 248 err = -EINVAL; 261 goto err_free_inst; !! 249 goto out_put_alg; 262 } 250 } 263 251 264 err = crypto_inst_setname(shash_crypto !! 252 inst = shash_alloc_instance("cmac", alg); >> 253 err = PTR_ERR(inst); >> 254 if (IS_ERR(inst)) >> 255 goto out_put_alg; >> 256 >> 257 err = crypto_init_spawn(shash_instance_ctx(inst), alg, >> 258 shash_crypto_instance(inst), >> 259 CRYPTO_ALG_TYPE_MASK); 265 if (err) 260 if (err) 266 goto err_free_inst; !! 261 goto out_free_inst; 267 262 >> 263 alignmask = alg->cra_alignmask; >> 264 inst->alg.base.cra_alignmask = alignmask; 268 inst->alg.base.cra_priority = alg->cra 265 inst->alg.base.cra_priority = alg->cra_priority; 269 inst->alg.base.cra_blocksize = alg->cr 266 inst->alg.base.cra_blocksize = alg->cra_blocksize; 270 inst->alg.base.cra_ctxsize = sizeof(st << 271 alg->cra_ << 272 267 273 inst->alg.digestsize = alg->cra_blocks 268 inst->alg.digestsize = alg->cra_blocksize; 274 inst->alg.descsize = sizeof(struct cma !! 269 inst->alg.descsize = 275 alg->cra_blocksiz !! 270 ALIGN(sizeof(struct cmac_desc_ctx), crypto_tfm_ctx_alignment()) >> 271 + (alignmask & ~(crypto_tfm_ctx_alignment() - 1)) >> 272 + alg->cra_blocksize * 2; >> 273 >> 274 inst->alg.base.cra_ctxsize = >> 275 ALIGN(sizeof(struct cmac_tfm_ctx), crypto_tfm_ctx_alignment()) >> 276 + ((alignmask | (__alignof__(__be64) - 1)) & >> 277 ~(crypto_tfm_ctx_alignment() - 1)) >> 278 + alg->cra_blocksize * 2; >> 279 >> 280 inst->alg.base.cra_init = cmac_init_tfm; >> 281 inst->alg.base.cra_exit = cmac_exit_tfm; >> 282 276 inst->alg.init = crypto_cmac_digest_in 283 inst->alg.init = crypto_cmac_digest_init; 277 inst->alg.update = crypto_cmac_digest_ 284 inst->alg.update = crypto_cmac_digest_update; 278 inst->alg.final = crypto_cmac_digest_f 285 inst->alg.final = crypto_cmac_digest_final; 279 inst->alg.setkey = crypto_cmac_digest_ 286 inst->alg.setkey = crypto_cmac_digest_setkey; 280 inst->alg.init_tfm = cmac_init_tfm; << 281 inst->alg.clone_tfm = cmac_clone_tfm; << 282 inst->alg.exit_tfm = cmac_exit_tfm; << 283 << 284 inst->free = shash_free_singlespawn_in << 285 287 286 err = shash_register_instance(tmpl, in 288 err = shash_register_instance(tmpl, inst); 287 if (err) { 289 if (err) { 288 err_free_inst: !! 290 out_free_inst: 289 shash_free_singlespawn_instanc !! 291 shash_free_instance(shash_crypto_instance(inst)); 290 } 292 } >> 293 >> 294 out_put_alg: >> 295 crypto_mod_put(alg); 291 return err; 296 return err; 292 } 297 } 293 298 294 static struct crypto_template crypto_cmac_tmpl 299 static struct crypto_template crypto_cmac_tmpl = { 295 .name = "cmac", 300 .name = "cmac", 296 .create = cmac_create, 301 .create = cmac_create, >> 302 .free = shash_free_instance, 297 .module = THIS_MODULE, 303 .module = THIS_MODULE, 298 }; 304 }; 299 305 300 static int __init crypto_cmac_module_init(void 306 static int __init crypto_cmac_module_init(void) 301 { 307 { 302 return crypto_register_template(&crypt 308 return crypto_register_template(&crypto_cmac_tmpl); 303 } 309 } 304 310 305 static void __exit crypto_cmac_module_exit(voi 311 static void __exit crypto_cmac_module_exit(void) 306 { 312 { 307 crypto_unregister_template(&crypto_cma 313 crypto_unregister_template(&crypto_cmac_tmpl); 308 } 314 } 309 315 310 subsys_initcall(crypto_cmac_module_init); !! 316 module_init(crypto_cmac_module_init); 311 module_exit(crypto_cmac_module_exit); 317 module_exit(crypto_cmac_module_exit); 312 318 313 MODULE_LICENSE("GPL"); 319 MODULE_LICENSE("GPL"); 314 MODULE_DESCRIPTION("CMAC keyed hash algorithm" 320 MODULE_DESCRIPTION("CMAC keyed hash algorithm"); 315 MODULE_ALIAS_CRYPTO("cmac"); 321 MODULE_ALIAS_CRYPTO("cmac"); 316 MODULE_IMPORT_NS(CRYPTO_INTERNAL); << 317 322
Linux® is a registered trademark of Linus Torvalds in the United States and other countries.
TOMOYO® is a registered trademark of NTT DATA CORPORATION.