1 // SPDX-License-Identifier: GPL-2.0-or-later 1 // SPDX-License-Identifier: GPL-2.0-or-later 2 /* 2 /* 3 * CMAC: Cipher Block Mode for Authentication 3 * CMAC: Cipher Block Mode for Authentication 4 * 4 * 5 * Copyright © 2013 Jussi Kivilinna <jussi.ki 5 * Copyright © 2013 Jussi Kivilinna <jussi.kivilinna@iki.fi> 6 * 6 * 7 * Based on work by: 7 * Based on work by: 8 * Copyright © 2013 Tom St Denis <tstdenis@e 8 * Copyright © 2013 Tom St Denis <tstdenis@elliptictech.com> 9 * Based on crypto/xcbc.c: 9 * Based on crypto/xcbc.c: 10 * Copyright © 2006 USAGI/WIDE Project, 10 * Copyright © 2006 USAGI/WIDE Project, 11 * Author: Kazunori Miyazawa <miyazawa@linux 11 * Author: Kazunori Miyazawa <miyazawa@linux-ipv6.org> 12 */ 12 */ 13 13 14 #include <crypto/internal/cipher.h> << 15 #include <crypto/internal/hash.h> 14 #include <crypto/internal/hash.h> 16 #include <linux/err.h> 15 #include <linux/err.h> 17 #include <linux/kernel.h> 16 #include <linux/kernel.h> 18 #include <linux/module.h> 17 #include <linux/module.h> 19 18 20 /* 19 /* 21 * +------------------------ 20 * +------------------------ 22 * | <parent tfm> 21 * | <parent tfm> 23 * +------------------------ 22 * +------------------------ 24 * | cmac_tfm_ctx 23 * | cmac_tfm_ctx 25 * +------------------------ 24 * +------------------------ 26 * | consts (block size * 2) 25 * | consts (block size * 2) 27 * +------------------------ 26 * +------------------------ 28 */ 27 */ 29 struct cmac_tfm_ctx { 28 struct cmac_tfm_ctx { 30 struct crypto_cipher *child; 29 struct crypto_cipher *child; 31 __be64 consts[]; !! 30 u8 ctx[]; 32 }; 31 }; 33 32 34 /* 33 /* 35 * +------------------------ 34 * +------------------------ 36 * | <shash desc> 35 * | <shash desc> 37 * +------------------------ 36 * +------------------------ 38 * | cmac_desc_ctx 37 * | cmac_desc_ctx 39 * +------------------------ 38 * +------------------------ 40 * | odds (block size) 39 * | odds (block size) 41 * +------------------------ 40 * +------------------------ 42 * | prev (block size) 41 * | prev (block size) 43 * +------------------------ 42 * +------------------------ 44 */ 43 */ 45 struct cmac_desc_ctx { 44 struct cmac_desc_ctx { 46 unsigned int len; 45 unsigned int len; 47 u8 odds[]; !! 46 u8 ctx[]; 48 }; 47 }; 49 48 50 static int crypto_cmac_digest_setkey(struct cr 49 static int crypto_cmac_digest_setkey(struct crypto_shash *parent, 51 const u8 50 const u8 *inkey, unsigned int keylen) 52 { 51 { >> 52 unsigned long alignmask = crypto_shash_alignmask(parent); 53 struct cmac_tfm_ctx *ctx = crypto_shas 53 struct cmac_tfm_ctx *ctx = crypto_shash_ctx(parent); 54 unsigned int bs = crypto_shash_blocksi 54 unsigned int bs = crypto_shash_blocksize(parent); 55 __be64 *consts = ctx->consts; !! 55 __be64 *consts = PTR_ALIGN((void *)ctx->ctx, >> 56 (alignmask | (__alignof__(__be64) - 1)) + 1); 56 u64 _const[2]; 57 u64 _const[2]; 57 int i, err = 0; 58 int i, err = 0; 58 u8 msb_mask, gfmask; 59 u8 msb_mask, gfmask; 59 60 60 err = crypto_cipher_setkey(ctx->child, 61 err = crypto_cipher_setkey(ctx->child, inkey, keylen); 61 if (err) 62 if (err) 62 return err; 63 return err; 63 64 64 /* encrypt the zero block */ 65 /* encrypt the zero block */ 65 memset(consts, 0, bs); 66 memset(consts, 0, bs); 66 crypto_cipher_encrypt_one(ctx->child, 67 crypto_cipher_encrypt_one(ctx->child, (u8 *)consts, (u8 *)consts); 67 68 68 switch (bs) { 69 switch (bs) { 69 case 16: 70 case 16: 70 gfmask = 0x87; 71 gfmask = 0x87; 71 _const[0] = be64_to_cpu(consts 72 _const[0] = be64_to_cpu(consts[1]); 72 _const[1] = be64_to_cpu(consts 73 _const[1] = be64_to_cpu(consts[0]); 73 74 74 /* gf(2^128) multiply zero-cip 75 /* gf(2^128) multiply zero-ciphertext with u and u^2 */ 75 for (i = 0; i < 4; i += 2) { 76 for (i = 0; i < 4; i += 2) { 76 msb_mask = ((s64)_cons 77 msb_mask = ((s64)_const[1] >> 63) & gfmask; 77 _const[1] = (_const[1] 78 _const[1] = (_const[1] << 1) | (_const[0] >> 63); 78 _const[0] = (_const[0] 79 _const[0] = (_const[0] << 1) ^ msb_mask; 79 80 80 consts[i + 0] = cpu_to 81 consts[i + 0] = cpu_to_be64(_const[1]); 81 consts[i + 1] = cpu_to 82 consts[i + 1] = cpu_to_be64(_const[0]); 82 } 83 } 83 84 84 break; 85 break; 85 case 8: 86 case 8: 86 gfmask = 0x1B; 87 gfmask = 0x1B; 87 _const[0] = be64_to_cpu(consts 88 _const[0] = be64_to_cpu(consts[0]); 88 89 89 /* gf(2^64) multiply zero-ciph 90 /* gf(2^64) multiply zero-ciphertext with u and u^2 */ 90 for (i = 0; i < 2; i++) { 91 for (i = 0; i < 2; i++) { 91 msb_mask = ((s64)_cons 92 msb_mask = ((s64)_const[0] >> 63) & gfmask; 92 _const[0] = (_const[0] 93 _const[0] = (_const[0] << 1) ^ msb_mask; 93 94 94 consts[i] = cpu_to_be6 95 consts[i] = cpu_to_be64(_const[0]); 95 } 96 } 96 97 97 break; 98 break; 98 } 99 } 99 100 100 return 0; 101 return 0; 101 } 102 } 102 103 103 static int crypto_cmac_digest_init(struct shas 104 static int crypto_cmac_digest_init(struct shash_desc *pdesc) 104 { 105 { >> 106 unsigned long alignmask = crypto_shash_alignmask(pdesc->tfm); 105 struct cmac_desc_ctx *ctx = shash_desc 107 struct cmac_desc_ctx *ctx = shash_desc_ctx(pdesc); 106 int bs = crypto_shash_blocksize(pdesc- 108 int bs = crypto_shash_blocksize(pdesc->tfm); 107 u8 *prev = &ctx->odds[bs]; !! 109 u8 *prev = PTR_ALIGN((void *)ctx->ctx, alignmask + 1) + bs; 108 110 109 ctx->len = 0; 111 ctx->len = 0; 110 memset(prev, 0, bs); 112 memset(prev, 0, bs); 111 113 112 return 0; 114 return 0; 113 } 115 } 114 116 115 static int crypto_cmac_digest_update(struct sh 117 static int crypto_cmac_digest_update(struct shash_desc *pdesc, const u8 *p, 116 unsigned 118 unsigned int len) 117 { 119 { 118 struct crypto_shash *parent = pdesc->t 120 struct crypto_shash *parent = pdesc->tfm; >> 121 unsigned long alignmask = crypto_shash_alignmask(parent); 119 struct cmac_tfm_ctx *tctx = crypto_sha 122 struct cmac_tfm_ctx *tctx = crypto_shash_ctx(parent); 120 struct cmac_desc_ctx *ctx = shash_desc 123 struct cmac_desc_ctx *ctx = shash_desc_ctx(pdesc); 121 struct crypto_cipher *tfm = tctx->chil 124 struct crypto_cipher *tfm = tctx->child; 122 int bs = crypto_shash_blocksize(parent 125 int bs = crypto_shash_blocksize(parent); 123 u8 *odds = ctx->odds; !! 126 u8 *odds = PTR_ALIGN((void *)ctx->ctx, alignmask + 1); 124 u8 *prev = odds + bs; 127 u8 *prev = odds + bs; 125 128 126 /* checking the data can fill the bloc 129 /* checking the data can fill the block */ 127 if ((ctx->len + len) <= bs) { 130 if ((ctx->len + len) <= bs) { 128 memcpy(odds + ctx->len, p, len 131 memcpy(odds + ctx->len, p, len); 129 ctx->len += len; 132 ctx->len += len; 130 return 0; 133 return 0; 131 } 134 } 132 135 133 /* filling odds with new data and encr 136 /* filling odds with new data and encrypting it */ 134 memcpy(odds + ctx->len, p, bs - ctx->l 137 memcpy(odds + ctx->len, p, bs - ctx->len); 135 len -= bs - ctx->len; 138 len -= bs - ctx->len; 136 p += bs - ctx->len; 139 p += bs - ctx->len; 137 140 138 crypto_xor(prev, odds, bs); 141 crypto_xor(prev, odds, bs); 139 crypto_cipher_encrypt_one(tfm, prev, p 142 crypto_cipher_encrypt_one(tfm, prev, prev); 140 143 141 /* clearing the length */ 144 /* clearing the length */ 142 ctx->len = 0; 145 ctx->len = 0; 143 146 144 /* encrypting the rest of data */ 147 /* encrypting the rest of data */ 145 while (len > bs) { 148 while (len > bs) { 146 crypto_xor(prev, p, bs); 149 crypto_xor(prev, p, bs); 147 crypto_cipher_encrypt_one(tfm, 150 crypto_cipher_encrypt_one(tfm, prev, prev); 148 p += bs; 151 p += bs; 149 len -= bs; 152 len -= bs; 150 } 153 } 151 154 152 /* keeping the surplus of blocksize */ 155 /* keeping the surplus of blocksize */ 153 if (len) { 156 if (len) { 154 memcpy(odds, p, len); 157 memcpy(odds, p, len); 155 ctx->len = len; 158 ctx->len = len; 156 } 159 } 157 160 158 return 0; 161 return 0; 159 } 162 } 160 163 161 static int crypto_cmac_digest_final(struct sha 164 static int crypto_cmac_digest_final(struct shash_desc *pdesc, u8 *out) 162 { 165 { 163 struct crypto_shash *parent = pdesc->t 166 struct crypto_shash *parent = pdesc->tfm; >> 167 unsigned long alignmask = crypto_shash_alignmask(parent); 164 struct cmac_tfm_ctx *tctx = crypto_sha 168 struct cmac_tfm_ctx *tctx = crypto_shash_ctx(parent); 165 struct cmac_desc_ctx *ctx = shash_desc 169 struct cmac_desc_ctx *ctx = shash_desc_ctx(pdesc); 166 struct crypto_cipher *tfm = tctx->chil 170 struct crypto_cipher *tfm = tctx->child; 167 int bs = crypto_shash_blocksize(parent 171 int bs = crypto_shash_blocksize(parent); 168 u8 *odds = ctx->odds; !! 172 u8 *consts = PTR_ALIGN((void *)tctx->ctx, >> 173 (alignmask | (__alignof__(__be64) - 1)) + 1); >> 174 u8 *odds = PTR_ALIGN((void *)ctx->ctx, alignmask + 1); 169 u8 *prev = odds + bs; 175 u8 *prev = odds + bs; 170 unsigned int offset = 0; 176 unsigned int offset = 0; 171 177 172 if (ctx->len != bs) { 178 if (ctx->len != bs) { 173 unsigned int rlen; 179 unsigned int rlen; 174 u8 *p = odds + ctx->len; 180 u8 *p = odds + ctx->len; 175 181 176 *p = 0x80; 182 *p = 0x80; 177 p++; 183 p++; 178 184 179 rlen = bs - ctx->len - 1; 185 rlen = bs - ctx->len - 1; 180 if (rlen) 186 if (rlen) 181 memset(p, 0, rlen); 187 memset(p, 0, rlen); 182 188 183 offset += bs; 189 offset += bs; 184 } 190 } 185 191 186 crypto_xor(prev, odds, bs); 192 crypto_xor(prev, odds, bs); 187 crypto_xor(prev, (const u8 *)tctx->con !! 193 crypto_xor(prev, consts + offset, bs); 188 194 189 crypto_cipher_encrypt_one(tfm, out, pr 195 crypto_cipher_encrypt_one(tfm, out, prev); 190 196 191 return 0; 197 return 0; 192 } 198 } 193 199 194 static int cmac_init_tfm(struct crypto_shash * !! 200 static int cmac_init_tfm(struct crypto_tfm *tfm) 195 { 201 { 196 struct shash_instance *inst = shash_al << 197 struct cmac_tfm_ctx *ctx = crypto_shas << 198 struct crypto_cipher_spawn *spawn; << 199 struct crypto_cipher *cipher; 202 struct crypto_cipher *cipher; >> 203 struct crypto_instance *inst = (void *)tfm->__crt_alg; >> 204 struct crypto_spawn *spawn = crypto_instance_ctx(inst); >> 205 struct cmac_tfm_ctx *ctx = crypto_tfm_ctx(tfm); 200 206 201 spawn = shash_instance_ctx(inst); << 202 cipher = crypto_spawn_cipher(spawn); 207 cipher = crypto_spawn_cipher(spawn); 203 if (IS_ERR(cipher)) 208 if (IS_ERR(cipher)) 204 return PTR_ERR(cipher); 209 return PTR_ERR(cipher); 205 210 206 ctx->child = cipher; 211 ctx->child = cipher; 207 212 208 return 0; 213 return 0; 209 } !! 214 }; 210 << 211 static int cmac_clone_tfm(struct crypto_shash << 212 { << 213 struct cmac_tfm_ctx *octx = crypto_sha << 214 struct cmac_tfm_ctx *ctx = crypto_shas << 215 struct crypto_cipher *cipher; << 216 << 217 cipher = crypto_clone_cipher(octx->chi << 218 if (IS_ERR(cipher)) << 219 return PTR_ERR(cipher); << 220 << 221 ctx->child = cipher; << 222 << 223 return 0; << 224 } << 225 215 226 static void cmac_exit_tfm(struct crypto_shash !! 216 static void cmac_exit_tfm(struct crypto_tfm *tfm) 227 { 217 { 228 struct cmac_tfm_ctx *ctx = crypto_shas !! 218 struct cmac_tfm_ctx *ctx = crypto_tfm_ctx(tfm); 229 crypto_free_cipher(ctx->child); 219 crypto_free_cipher(ctx->child); 230 } 220 } 231 221 232 static int cmac_create(struct crypto_template 222 static int cmac_create(struct crypto_template *tmpl, struct rtattr **tb) 233 { 223 { 234 struct shash_instance *inst; 224 struct shash_instance *inst; 235 struct crypto_cipher_spawn *spawn; << 236 struct crypto_alg *alg; 225 struct crypto_alg *alg; 237 u32 mask; !! 226 unsigned long alignmask; 238 int err; 227 int err; 239 228 240 err = crypto_check_attr_type(tb, CRYPT !! 229 err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_SHASH); 241 if (err) 230 if (err) 242 return err; 231 return err; 243 232 244 inst = kzalloc(sizeof(*inst) + sizeof( !! 233 alg = crypto_get_attr_alg(tb, CRYPTO_ALG_TYPE_CIPHER, 245 if (!inst) !! 234 CRYPTO_ALG_TYPE_MASK); 246 return -ENOMEM; !! 235 if (IS_ERR(alg)) 247 spawn = shash_instance_ctx(inst); !! 236 return PTR_ERR(alg); 248 << 249 err = crypto_grab_cipher(spawn, shash_ << 250 crypto_attr_a << 251 if (err) << 252 goto err_free_inst; << 253 alg = crypto_spawn_cipher_alg(spawn); << 254 237 255 switch (alg->cra_blocksize) { 238 switch (alg->cra_blocksize) { 256 case 16: 239 case 16: 257 case 8: 240 case 8: 258 break; 241 break; 259 default: 242 default: 260 err = -EINVAL; 243 err = -EINVAL; 261 goto err_free_inst; !! 244 goto out_put_alg; 262 } 245 } 263 246 264 err = crypto_inst_setname(shash_crypto !! 247 inst = shash_alloc_instance("cmac", alg); >> 248 err = PTR_ERR(inst); >> 249 if (IS_ERR(inst)) >> 250 goto out_put_alg; >> 251 >> 252 err = crypto_init_spawn(shash_instance_ctx(inst), alg, >> 253 shash_crypto_instance(inst), >> 254 CRYPTO_ALG_TYPE_MASK); 265 if (err) 255 if (err) 266 goto err_free_inst; !! 256 goto out_free_inst; 267 257 >> 258 alignmask = alg->cra_alignmask; >> 259 inst->alg.base.cra_alignmask = alignmask; 268 inst->alg.base.cra_priority = alg->cra 260 inst->alg.base.cra_priority = alg->cra_priority; 269 inst->alg.base.cra_blocksize = alg->cr 261 inst->alg.base.cra_blocksize = alg->cra_blocksize; 270 inst->alg.base.cra_ctxsize = sizeof(st << 271 alg->cra_ << 272 262 273 inst->alg.digestsize = alg->cra_blocks 263 inst->alg.digestsize = alg->cra_blocksize; 274 inst->alg.descsize = sizeof(struct cma !! 264 inst->alg.descsize = 275 alg->cra_blocksiz !! 265 ALIGN(sizeof(struct cmac_desc_ctx), crypto_tfm_ctx_alignment()) >> 266 + (alignmask & ~(crypto_tfm_ctx_alignment() - 1)) >> 267 + alg->cra_blocksize * 2; >> 268 >> 269 inst->alg.base.cra_ctxsize = >> 270 ALIGN(sizeof(struct cmac_tfm_ctx), crypto_tfm_ctx_alignment()) >> 271 + ((alignmask | (__alignof__(__be64) - 1)) & >> 272 ~(crypto_tfm_ctx_alignment() - 1)) >> 273 + alg->cra_blocksize * 2; >> 274 >> 275 inst->alg.base.cra_init = cmac_init_tfm; >> 276 inst->alg.base.cra_exit = cmac_exit_tfm; >> 277 276 inst->alg.init = crypto_cmac_digest_in 278 inst->alg.init = crypto_cmac_digest_init; 277 inst->alg.update = crypto_cmac_digest_ 279 inst->alg.update = crypto_cmac_digest_update; 278 inst->alg.final = crypto_cmac_digest_f 280 inst->alg.final = crypto_cmac_digest_final; 279 inst->alg.setkey = crypto_cmac_digest_ 281 inst->alg.setkey = crypto_cmac_digest_setkey; 280 inst->alg.init_tfm = cmac_init_tfm; << 281 inst->alg.clone_tfm = cmac_clone_tfm; << 282 inst->alg.exit_tfm = cmac_exit_tfm; << 283 << 284 inst->free = shash_free_singlespawn_in << 285 282 286 err = shash_register_instance(tmpl, in 283 err = shash_register_instance(tmpl, inst); 287 if (err) { 284 if (err) { 288 err_free_inst: !! 285 out_free_inst: 289 shash_free_singlespawn_instanc !! 286 shash_free_instance(shash_crypto_instance(inst)); 290 } 287 } >> 288 >> 289 out_put_alg: >> 290 crypto_mod_put(alg); 291 return err; 291 return err; 292 } 292 } 293 293 294 static struct crypto_template crypto_cmac_tmpl 294 static struct crypto_template crypto_cmac_tmpl = { 295 .name = "cmac", 295 .name = "cmac", 296 .create = cmac_create, 296 .create = cmac_create, >> 297 .free = shash_free_instance, 297 .module = THIS_MODULE, 298 .module = THIS_MODULE, 298 }; 299 }; 299 300 300 static int __init crypto_cmac_module_init(void 301 static int __init crypto_cmac_module_init(void) 301 { 302 { 302 return crypto_register_template(&crypt 303 return crypto_register_template(&crypto_cmac_tmpl); 303 } 304 } 304 305 305 static void __exit crypto_cmac_module_exit(voi 306 static void __exit crypto_cmac_module_exit(void) 306 { 307 { 307 crypto_unregister_template(&crypto_cma 308 crypto_unregister_template(&crypto_cmac_tmpl); 308 } 309 } 309 310 310 subsys_initcall(crypto_cmac_module_init); 311 subsys_initcall(crypto_cmac_module_init); 311 module_exit(crypto_cmac_module_exit); 312 module_exit(crypto_cmac_module_exit); 312 313 313 MODULE_LICENSE("GPL"); 314 MODULE_LICENSE("GPL"); 314 MODULE_DESCRIPTION("CMAC keyed hash algorithm" 315 MODULE_DESCRIPTION("CMAC keyed hash algorithm"); 315 MODULE_ALIAS_CRYPTO("cmac"); 316 MODULE_ALIAS_CRYPTO("cmac"); 316 MODULE_IMPORT_NS(CRYPTO_INTERNAL); << 317 317
Linux® is a registered trademark of Linus Torvalds in the United States and other countries.
TOMOYO® is a registered trademark of NTT DATA CORPORATION.