1 // SPDX-License-Identifier: GPL-2.0-or-later << 2 /* 1 /* 3 * CMAC: Cipher Block Mode for Authentication 2 * CMAC: Cipher Block Mode for Authentication 4 * 3 * 5 * Copyright © 2013 Jussi Kivilinna <jussi.ki 4 * Copyright © 2013 Jussi Kivilinna <jussi.kivilinna@iki.fi> 6 * 5 * 7 * Based on work by: 6 * Based on work by: 8 * Copyright © 2013 Tom St Denis <tstdenis@e 7 * Copyright © 2013 Tom St Denis <tstdenis@elliptictech.com> 9 * Based on crypto/xcbc.c: 8 * Based on crypto/xcbc.c: 10 * Copyright © 2006 USAGI/WIDE Project, 9 * Copyright © 2006 USAGI/WIDE Project, 11 * Author: Kazunori Miyazawa <miyazawa@linux 10 * Author: Kazunori Miyazawa <miyazawa@linux-ipv6.org> >> 11 * >> 12 * This program is free software; you can redistribute it and/or modify >> 13 * it under the terms of the GNU General Public License as published by >> 14 * the Free Software Foundation; either version 2 of the License, or >> 15 * (at your option) any later version. >> 16 * 12 */ 17 */ 13 18 14 #include <crypto/internal/cipher.h> << 15 #include <crypto/internal/hash.h> 19 #include <crypto/internal/hash.h> 16 #include <linux/err.h> 20 #include <linux/err.h> 17 #include <linux/kernel.h> 21 #include <linux/kernel.h> 18 #include <linux/module.h> 22 #include <linux/module.h> 19 23 20 /* 24 /* 21 * +------------------------ 25 * +------------------------ 22 * | <parent tfm> 26 * | <parent tfm> 23 * +------------------------ 27 * +------------------------ 24 * | cmac_tfm_ctx 28 * | cmac_tfm_ctx 25 * +------------------------ 29 * +------------------------ 26 * | consts (block size * 2) 30 * | consts (block size * 2) 27 * +------------------------ 31 * +------------------------ 28 */ 32 */ 29 struct cmac_tfm_ctx { 33 struct cmac_tfm_ctx { 30 struct crypto_cipher *child; 34 struct crypto_cipher *child; 31 __be64 consts[]; !! 35 u8 ctx[]; 32 }; 36 }; 33 37 34 /* 38 /* 35 * +------------------------ 39 * +------------------------ 36 * | <shash desc> 40 * | <shash desc> 37 * +------------------------ 41 * +------------------------ 38 * | cmac_desc_ctx 42 * | cmac_desc_ctx 39 * +------------------------ 43 * +------------------------ 40 * | odds (block size) 44 * | odds (block size) 41 * +------------------------ 45 * +------------------------ 42 * | prev (block size) 46 * | prev (block size) 43 * +------------------------ 47 * +------------------------ 44 */ 48 */ 45 struct cmac_desc_ctx { 49 struct cmac_desc_ctx { 46 unsigned int len; 50 unsigned int len; 47 u8 odds[]; !! 51 u8 ctx[]; 48 }; 52 }; 49 53 50 static int crypto_cmac_digest_setkey(struct cr 54 static int crypto_cmac_digest_setkey(struct crypto_shash *parent, 51 const u8 55 const u8 *inkey, unsigned int keylen) 52 { 56 { >> 57 unsigned long alignmask = crypto_shash_alignmask(parent); 53 struct cmac_tfm_ctx *ctx = crypto_shas 58 struct cmac_tfm_ctx *ctx = crypto_shash_ctx(parent); 54 unsigned int bs = crypto_shash_blocksi 59 unsigned int bs = crypto_shash_blocksize(parent); 55 __be64 *consts = ctx->consts; !! 60 __be64 *consts = PTR_ALIGN((void *)ctx->ctx, alignmask + 1); 56 u64 _const[2]; 61 u64 _const[2]; 57 int i, err = 0; 62 int i, err = 0; 58 u8 msb_mask, gfmask; 63 u8 msb_mask, gfmask; 59 64 60 err = crypto_cipher_setkey(ctx->child, 65 err = crypto_cipher_setkey(ctx->child, inkey, keylen); 61 if (err) 66 if (err) 62 return err; 67 return err; 63 68 64 /* encrypt the zero block */ 69 /* encrypt the zero block */ 65 memset(consts, 0, bs); 70 memset(consts, 0, bs); 66 crypto_cipher_encrypt_one(ctx->child, 71 crypto_cipher_encrypt_one(ctx->child, (u8 *)consts, (u8 *)consts); 67 72 68 switch (bs) { 73 switch (bs) { 69 case 16: 74 case 16: 70 gfmask = 0x87; 75 gfmask = 0x87; 71 _const[0] = be64_to_cpu(consts 76 _const[0] = be64_to_cpu(consts[1]); 72 _const[1] = be64_to_cpu(consts 77 _const[1] = be64_to_cpu(consts[0]); 73 78 74 /* gf(2^128) multiply zero-cip 79 /* gf(2^128) multiply zero-ciphertext with u and u^2 */ 75 for (i = 0; i < 4; i += 2) { 80 for (i = 0; i < 4; i += 2) { 76 msb_mask = ((s64)_cons 81 msb_mask = ((s64)_const[1] >> 63) & gfmask; 77 _const[1] = (_const[1] 82 _const[1] = (_const[1] << 1) | (_const[0] >> 63); 78 _const[0] = (_const[0] 83 _const[0] = (_const[0] << 1) ^ msb_mask; 79 84 80 consts[i + 0] = cpu_to 85 consts[i + 0] = cpu_to_be64(_const[1]); 81 consts[i + 1] = cpu_to 86 consts[i + 1] = cpu_to_be64(_const[0]); 82 } 87 } 83 88 84 break; 89 break; 85 case 8: 90 case 8: 86 gfmask = 0x1B; 91 gfmask = 0x1B; 87 _const[0] = be64_to_cpu(consts 92 _const[0] = be64_to_cpu(consts[0]); 88 93 89 /* gf(2^64) multiply zero-ciph 94 /* gf(2^64) multiply zero-ciphertext with u and u^2 */ 90 for (i = 0; i < 2; i++) { 95 for (i = 0; i < 2; i++) { 91 msb_mask = ((s64)_cons 96 msb_mask = ((s64)_const[0] >> 63) & gfmask; 92 _const[0] = (_const[0] 97 _const[0] = (_const[0] << 1) ^ msb_mask; 93 98 94 consts[i] = cpu_to_be6 99 consts[i] = cpu_to_be64(_const[0]); 95 } 100 } 96 101 97 break; 102 break; 98 } 103 } 99 104 100 return 0; 105 return 0; 101 } 106 } 102 107 103 static int crypto_cmac_digest_init(struct shas 108 static int crypto_cmac_digest_init(struct shash_desc *pdesc) 104 { 109 { >> 110 unsigned long alignmask = crypto_shash_alignmask(pdesc->tfm); 105 struct cmac_desc_ctx *ctx = shash_desc 111 struct cmac_desc_ctx *ctx = shash_desc_ctx(pdesc); 106 int bs = crypto_shash_blocksize(pdesc- 112 int bs = crypto_shash_blocksize(pdesc->tfm); 107 u8 *prev = &ctx->odds[bs]; !! 113 u8 *prev = PTR_ALIGN((void *)ctx->ctx, alignmask + 1) + bs; 108 114 109 ctx->len = 0; 115 ctx->len = 0; 110 memset(prev, 0, bs); 116 memset(prev, 0, bs); 111 117 112 return 0; 118 return 0; 113 } 119 } 114 120 115 static int crypto_cmac_digest_update(struct sh 121 static int crypto_cmac_digest_update(struct shash_desc *pdesc, const u8 *p, 116 unsigned 122 unsigned int len) 117 { 123 { 118 struct crypto_shash *parent = pdesc->t 124 struct crypto_shash *parent = pdesc->tfm; >> 125 unsigned long alignmask = crypto_shash_alignmask(parent); 119 struct cmac_tfm_ctx *tctx = crypto_sha 126 struct cmac_tfm_ctx *tctx = crypto_shash_ctx(parent); 120 struct cmac_desc_ctx *ctx = shash_desc 127 struct cmac_desc_ctx *ctx = shash_desc_ctx(pdesc); 121 struct crypto_cipher *tfm = tctx->chil 128 struct crypto_cipher *tfm = tctx->child; 122 int bs = crypto_shash_blocksize(parent 129 int bs = crypto_shash_blocksize(parent); 123 u8 *odds = ctx->odds; !! 130 u8 *odds = PTR_ALIGN((void *)ctx->ctx, alignmask + 1); 124 u8 *prev = odds + bs; 131 u8 *prev = odds + bs; 125 132 126 /* checking the data can fill the bloc 133 /* checking the data can fill the block */ 127 if ((ctx->len + len) <= bs) { 134 if ((ctx->len + len) <= bs) { 128 memcpy(odds + ctx->len, p, len 135 memcpy(odds + ctx->len, p, len); 129 ctx->len += len; 136 ctx->len += len; 130 return 0; 137 return 0; 131 } 138 } 132 139 133 /* filling odds with new data and encr 140 /* filling odds with new data and encrypting it */ 134 memcpy(odds + ctx->len, p, bs - ctx->l 141 memcpy(odds + ctx->len, p, bs - ctx->len); 135 len -= bs - ctx->len; 142 len -= bs - ctx->len; 136 p += bs - ctx->len; 143 p += bs - ctx->len; 137 144 138 crypto_xor(prev, odds, bs); 145 crypto_xor(prev, odds, bs); 139 crypto_cipher_encrypt_one(tfm, prev, p 146 crypto_cipher_encrypt_one(tfm, prev, prev); 140 147 141 /* clearing the length */ 148 /* clearing the length */ 142 ctx->len = 0; 149 ctx->len = 0; 143 150 144 /* encrypting the rest of data */ 151 /* encrypting the rest of data */ 145 while (len > bs) { 152 while (len > bs) { 146 crypto_xor(prev, p, bs); 153 crypto_xor(prev, p, bs); 147 crypto_cipher_encrypt_one(tfm, 154 crypto_cipher_encrypt_one(tfm, prev, prev); 148 p += bs; 155 p += bs; 149 len -= bs; 156 len -= bs; 150 } 157 } 151 158 152 /* keeping the surplus of blocksize */ 159 /* keeping the surplus of blocksize */ 153 if (len) { 160 if (len) { 154 memcpy(odds, p, len); 161 memcpy(odds, p, len); 155 ctx->len = len; 162 ctx->len = len; 156 } 163 } 157 164 158 return 0; 165 return 0; 159 } 166 } 160 167 161 static int crypto_cmac_digest_final(struct sha 168 static int crypto_cmac_digest_final(struct shash_desc *pdesc, u8 *out) 162 { 169 { 163 struct crypto_shash *parent = pdesc->t 170 struct crypto_shash *parent = pdesc->tfm; >> 171 unsigned long alignmask = crypto_shash_alignmask(parent); 164 struct cmac_tfm_ctx *tctx = crypto_sha 172 struct cmac_tfm_ctx *tctx = crypto_shash_ctx(parent); 165 struct cmac_desc_ctx *ctx = shash_desc 173 struct cmac_desc_ctx *ctx = shash_desc_ctx(pdesc); 166 struct crypto_cipher *tfm = tctx->chil 174 struct crypto_cipher *tfm = tctx->child; 167 int bs = crypto_shash_blocksize(parent 175 int bs = crypto_shash_blocksize(parent); 168 u8 *odds = ctx->odds; !! 176 u8 *consts = PTR_ALIGN((void *)tctx->ctx, alignmask + 1); >> 177 u8 *odds = PTR_ALIGN((void *)ctx->ctx, alignmask + 1); 169 u8 *prev = odds + bs; 178 u8 *prev = odds + bs; 170 unsigned int offset = 0; 179 unsigned int offset = 0; 171 180 172 if (ctx->len != bs) { 181 if (ctx->len != bs) { 173 unsigned int rlen; 182 unsigned int rlen; 174 u8 *p = odds + ctx->len; 183 u8 *p = odds + ctx->len; 175 184 176 *p = 0x80; 185 *p = 0x80; 177 p++; 186 p++; 178 187 179 rlen = bs - ctx->len - 1; 188 rlen = bs - ctx->len - 1; 180 if (rlen) 189 if (rlen) 181 memset(p, 0, rlen); 190 memset(p, 0, rlen); 182 191 183 offset += bs; 192 offset += bs; 184 } 193 } 185 194 186 crypto_xor(prev, odds, bs); 195 crypto_xor(prev, odds, bs); 187 crypto_xor(prev, (const u8 *)tctx->con !! 196 crypto_xor(prev, consts + offset, bs); 188 197 189 crypto_cipher_encrypt_one(tfm, out, pr 198 crypto_cipher_encrypt_one(tfm, out, prev); 190 199 191 return 0; 200 return 0; 192 } 201 } 193 202 194 static int cmac_init_tfm(struct crypto_shash * !! 203 static int cmac_init_tfm(struct crypto_tfm *tfm) 195 { 204 { 196 struct shash_instance *inst = shash_al << 197 struct cmac_tfm_ctx *ctx = crypto_shas << 198 struct crypto_cipher_spawn *spawn; << 199 struct crypto_cipher *cipher; 205 struct crypto_cipher *cipher; >> 206 struct crypto_instance *inst = (void *)tfm->__crt_alg; >> 207 struct crypto_spawn *spawn = crypto_instance_ctx(inst); >> 208 struct cmac_tfm_ctx *ctx = crypto_tfm_ctx(tfm); 200 209 201 spawn = shash_instance_ctx(inst); << 202 cipher = crypto_spawn_cipher(spawn); 210 cipher = crypto_spawn_cipher(spawn); 203 if (IS_ERR(cipher)) 211 if (IS_ERR(cipher)) 204 return PTR_ERR(cipher); 212 return PTR_ERR(cipher); 205 213 206 ctx->child = cipher; 214 ctx->child = cipher; 207 215 208 return 0; 216 return 0; 209 } !! 217 }; 210 << 211 static int cmac_clone_tfm(struct crypto_shash << 212 { << 213 struct cmac_tfm_ctx *octx = crypto_sha << 214 struct cmac_tfm_ctx *ctx = crypto_shas << 215 struct crypto_cipher *cipher; << 216 << 217 cipher = crypto_clone_cipher(octx->chi << 218 if (IS_ERR(cipher)) << 219 return PTR_ERR(cipher); << 220 << 221 ctx->child = cipher; << 222 << 223 return 0; << 224 } << 225 218 226 static void cmac_exit_tfm(struct crypto_shash !! 219 static void cmac_exit_tfm(struct crypto_tfm *tfm) 227 { 220 { 228 struct cmac_tfm_ctx *ctx = crypto_shas !! 221 struct cmac_tfm_ctx *ctx = crypto_tfm_ctx(tfm); 229 crypto_free_cipher(ctx->child); 222 crypto_free_cipher(ctx->child); 230 } 223 } 231 224 232 static int cmac_create(struct crypto_template 225 static int cmac_create(struct crypto_template *tmpl, struct rtattr **tb) 233 { 226 { 234 struct shash_instance *inst; 227 struct shash_instance *inst; 235 struct crypto_cipher_spawn *spawn; << 236 struct crypto_alg *alg; 228 struct crypto_alg *alg; 237 u32 mask; !! 229 unsigned long alignmask; 238 int err; 230 int err; 239 231 240 err = crypto_check_attr_type(tb, CRYPT !! 232 err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_SHASH); 241 if (err) 233 if (err) 242 return err; 234 return err; 243 235 244 inst = kzalloc(sizeof(*inst) + sizeof( !! 236 alg = crypto_get_attr_alg(tb, CRYPTO_ALG_TYPE_CIPHER, 245 if (!inst) !! 237 CRYPTO_ALG_TYPE_MASK); 246 return -ENOMEM; !! 238 if (IS_ERR(alg)) 247 spawn = shash_instance_ctx(inst); !! 239 return PTR_ERR(alg); 248 << 249 err = crypto_grab_cipher(spawn, shash_ << 250 crypto_attr_a << 251 if (err) << 252 goto err_free_inst; << 253 alg = crypto_spawn_cipher_alg(spawn); << 254 240 255 switch (alg->cra_blocksize) { 241 switch (alg->cra_blocksize) { 256 case 16: 242 case 16: 257 case 8: 243 case 8: 258 break; 244 break; 259 default: 245 default: 260 err = -EINVAL; !! 246 goto out_put_alg; 261 goto err_free_inst; << 262 } 247 } 263 248 264 err = crypto_inst_setname(shash_crypto !! 249 inst = shash_alloc_instance("cmac", alg); >> 250 err = PTR_ERR(inst); >> 251 if (IS_ERR(inst)) >> 252 goto out_put_alg; >> 253 >> 254 err = crypto_init_spawn(shash_instance_ctx(inst), alg, >> 255 shash_crypto_instance(inst), >> 256 CRYPTO_ALG_TYPE_MASK); 265 if (err) 257 if (err) 266 goto err_free_inst; !! 258 goto out_free_inst; 267 259 >> 260 alignmask = alg->cra_alignmask | (sizeof(long) - 1); >> 261 inst->alg.base.cra_alignmask = alignmask; 268 inst->alg.base.cra_priority = alg->cra 262 inst->alg.base.cra_priority = alg->cra_priority; 269 inst->alg.base.cra_blocksize = alg->cr 263 inst->alg.base.cra_blocksize = alg->cra_blocksize; 270 inst->alg.base.cra_ctxsize = sizeof(st << 271 alg->cra_ << 272 264 273 inst->alg.digestsize = alg->cra_blocks 265 inst->alg.digestsize = alg->cra_blocksize; 274 inst->alg.descsize = sizeof(struct cma !! 266 inst->alg.descsize = 275 alg->cra_blocksiz !! 267 ALIGN(sizeof(struct cmac_desc_ctx), crypto_tfm_ctx_alignment()) >> 268 + (alignmask & ~(crypto_tfm_ctx_alignment() - 1)) >> 269 + alg->cra_blocksize * 2; >> 270 >> 271 inst->alg.base.cra_ctxsize = >> 272 ALIGN(sizeof(struct cmac_tfm_ctx), alignmask + 1) >> 273 + alg->cra_blocksize * 2; >> 274 >> 275 inst->alg.base.cra_init = cmac_init_tfm; >> 276 inst->alg.base.cra_exit = cmac_exit_tfm; >> 277 276 inst->alg.init = crypto_cmac_digest_in 278 inst->alg.init = crypto_cmac_digest_init; 277 inst->alg.update = crypto_cmac_digest_ 279 inst->alg.update = crypto_cmac_digest_update; 278 inst->alg.final = crypto_cmac_digest_f 280 inst->alg.final = crypto_cmac_digest_final; 279 inst->alg.setkey = crypto_cmac_digest_ 281 inst->alg.setkey = crypto_cmac_digest_setkey; 280 inst->alg.init_tfm = cmac_init_tfm; << 281 inst->alg.clone_tfm = cmac_clone_tfm; << 282 inst->alg.exit_tfm = cmac_exit_tfm; << 283 << 284 inst->free = shash_free_singlespawn_in << 285 282 286 err = shash_register_instance(tmpl, in 283 err = shash_register_instance(tmpl, inst); 287 if (err) { 284 if (err) { 288 err_free_inst: !! 285 out_free_inst: 289 shash_free_singlespawn_instanc !! 286 shash_free_instance(shash_crypto_instance(inst)); 290 } 287 } >> 288 >> 289 out_put_alg: >> 290 crypto_mod_put(alg); 291 return err; 291 return err; 292 } 292 } 293 293 294 static struct crypto_template crypto_cmac_tmpl 294 static struct crypto_template crypto_cmac_tmpl = { 295 .name = "cmac", 295 .name = "cmac", 296 .create = cmac_create, 296 .create = cmac_create, >> 297 .free = shash_free_instance, 297 .module = THIS_MODULE, 298 .module = THIS_MODULE, 298 }; 299 }; 299 300 300 static int __init crypto_cmac_module_init(void 301 static int __init crypto_cmac_module_init(void) 301 { 302 { 302 return crypto_register_template(&crypt 303 return crypto_register_template(&crypto_cmac_tmpl); 303 } 304 } 304 305 305 static void __exit crypto_cmac_module_exit(voi 306 static void __exit crypto_cmac_module_exit(void) 306 { 307 { 307 crypto_unregister_template(&crypto_cma 308 crypto_unregister_template(&crypto_cmac_tmpl); 308 } 309 } 309 310 310 subsys_initcall(crypto_cmac_module_init); !! 311 module_init(crypto_cmac_module_init); 311 module_exit(crypto_cmac_module_exit); 312 module_exit(crypto_cmac_module_exit); 312 313 313 MODULE_LICENSE("GPL"); 314 MODULE_LICENSE("GPL"); 314 MODULE_DESCRIPTION("CMAC keyed hash algorithm" 315 MODULE_DESCRIPTION("CMAC keyed hash algorithm"); 315 MODULE_ALIAS_CRYPTO("cmac"); 316 MODULE_ALIAS_CRYPTO("cmac"); 316 MODULE_IMPORT_NS(CRYPTO_INTERNAL); << 317 317
Linux® is a registered trademark of Linus Torvalds in the United States and other countries.
TOMOYO® is a registered trademark of NTT DATA CORPORATION.