1 // SPDX-License-Identifier: GPL-2.0-or-later 1 // SPDX-License-Identifier: GPL-2.0-or-later 2 /* 2 /* 3 * CMAC: Cipher Block Mode for Authentication 3 * CMAC: Cipher Block Mode for Authentication 4 * 4 * 5 * Copyright © 2013 Jussi Kivilinna <jussi.ki 5 * Copyright © 2013 Jussi Kivilinna <jussi.kivilinna@iki.fi> 6 * 6 * 7 * Based on work by: 7 * Based on work by: 8 * Copyright © 2013 Tom St Denis <tstdenis@e 8 * Copyright © 2013 Tom St Denis <tstdenis@elliptictech.com> 9 * Based on crypto/xcbc.c: 9 * Based on crypto/xcbc.c: 10 * Copyright © 2006 USAGI/WIDE Project, 10 * Copyright © 2006 USAGI/WIDE Project, 11 * Author: Kazunori Miyazawa <miyazawa@linux 11 * Author: Kazunori Miyazawa <miyazawa@linux-ipv6.org> 12 */ 12 */ 13 13 14 #include <crypto/internal/cipher.h> 14 #include <crypto/internal/cipher.h> 15 #include <crypto/internal/hash.h> 15 #include <crypto/internal/hash.h> 16 #include <linux/err.h> 16 #include <linux/err.h> 17 #include <linux/kernel.h> 17 #include <linux/kernel.h> 18 #include <linux/module.h> 18 #include <linux/module.h> 19 19 20 /* 20 /* 21 * +------------------------ 21 * +------------------------ 22 * | <parent tfm> 22 * | <parent tfm> 23 * +------------------------ 23 * +------------------------ 24 * | cmac_tfm_ctx 24 * | cmac_tfm_ctx 25 * +------------------------ 25 * +------------------------ 26 * | consts (block size * 2) 26 * | consts (block size * 2) 27 * +------------------------ 27 * +------------------------ 28 */ 28 */ 29 struct cmac_tfm_ctx { 29 struct cmac_tfm_ctx { 30 struct crypto_cipher *child; 30 struct crypto_cipher *child; 31 __be64 consts[]; !! 31 u8 ctx[]; 32 }; 32 }; 33 33 34 /* 34 /* 35 * +------------------------ 35 * +------------------------ 36 * | <shash desc> 36 * | <shash desc> 37 * +------------------------ 37 * +------------------------ 38 * | cmac_desc_ctx 38 * | cmac_desc_ctx 39 * +------------------------ 39 * +------------------------ 40 * | odds (block size) 40 * | odds (block size) 41 * +------------------------ 41 * +------------------------ 42 * | prev (block size) 42 * | prev (block size) 43 * +------------------------ 43 * +------------------------ 44 */ 44 */ 45 struct cmac_desc_ctx { 45 struct cmac_desc_ctx { 46 unsigned int len; 46 unsigned int len; 47 u8 odds[]; !! 47 u8 ctx[]; 48 }; 48 }; 49 49 50 static int crypto_cmac_digest_setkey(struct cr 50 static int crypto_cmac_digest_setkey(struct crypto_shash *parent, 51 const u8 51 const u8 *inkey, unsigned int keylen) 52 { 52 { >> 53 unsigned long alignmask = crypto_shash_alignmask(parent); 53 struct cmac_tfm_ctx *ctx = crypto_shas 54 struct cmac_tfm_ctx *ctx = crypto_shash_ctx(parent); 54 unsigned int bs = crypto_shash_blocksi 55 unsigned int bs = crypto_shash_blocksize(parent); 55 __be64 *consts = ctx->consts; !! 56 __be64 *consts = PTR_ALIGN((void *)ctx->ctx, >> 57 (alignmask | (__alignof__(__be64) - 1)) + 1); 56 u64 _const[2]; 58 u64 _const[2]; 57 int i, err = 0; 59 int i, err = 0; 58 u8 msb_mask, gfmask; 60 u8 msb_mask, gfmask; 59 61 60 err = crypto_cipher_setkey(ctx->child, 62 err = crypto_cipher_setkey(ctx->child, inkey, keylen); 61 if (err) 63 if (err) 62 return err; 64 return err; 63 65 64 /* encrypt the zero block */ 66 /* encrypt the zero block */ 65 memset(consts, 0, bs); 67 memset(consts, 0, bs); 66 crypto_cipher_encrypt_one(ctx->child, 68 crypto_cipher_encrypt_one(ctx->child, (u8 *)consts, (u8 *)consts); 67 69 68 switch (bs) { 70 switch (bs) { 69 case 16: 71 case 16: 70 gfmask = 0x87; 72 gfmask = 0x87; 71 _const[0] = be64_to_cpu(consts 73 _const[0] = be64_to_cpu(consts[1]); 72 _const[1] = be64_to_cpu(consts 74 _const[1] = be64_to_cpu(consts[0]); 73 75 74 /* gf(2^128) multiply zero-cip 76 /* gf(2^128) multiply zero-ciphertext with u and u^2 */ 75 for (i = 0; i < 4; i += 2) { 77 for (i = 0; i < 4; i += 2) { 76 msb_mask = ((s64)_cons 78 msb_mask = ((s64)_const[1] >> 63) & gfmask; 77 _const[1] = (_const[1] 79 _const[1] = (_const[1] << 1) | (_const[0] >> 63); 78 _const[0] = (_const[0] 80 _const[0] = (_const[0] << 1) ^ msb_mask; 79 81 80 consts[i + 0] = cpu_to 82 consts[i + 0] = cpu_to_be64(_const[1]); 81 consts[i + 1] = cpu_to 83 consts[i + 1] = cpu_to_be64(_const[0]); 82 } 84 } 83 85 84 break; 86 break; 85 case 8: 87 case 8: 86 gfmask = 0x1B; 88 gfmask = 0x1B; 87 _const[0] = be64_to_cpu(consts 89 _const[0] = be64_to_cpu(consts[0]); 88 90 89 /* gf(2^64) multiply zero-ciph 91 /* gf(2^64) multiply zero-ciphertext with u and u^2 */ 90 for (i = 0; i < 2; i++) { 92 for (i = 0; i < 2; i++) { 91 msb_mask = ((s64)_cons 93 msb_mask = ((s64)_const[0] >> 63) & gfmask; 92 _const[0] = (_const[0] 94 _const[0] = (_const[0] << 1) ^ msb_mask; 93 95 94 consts[i] = cpu_to_be6 96 consts[i] = cpu_to_be64(_const[0]); 95 } 97 } 96 98 97 break; 99 break; 98 } 100 } 99 101 100 return 0; 102 return 0; 101 } 103 } 102 104 103 static int crypto_cmac_digest_init(struct shas 105 static int crypto_cmac_digest_init(struct shash_desc *pdesc) 104 { 106 { >> 107 unsigned long alignmask = crypto_shash_alignmask(pdesc->tfm); 105 struct cmac_desc_ctx *ctx = shash_desc 108 struct cmac_desc_ctx *ctx = shash_desc_ctx(pdesc); 106 int bs = crypto_shash_blocksize(pdesc- 109 int bs = crypto_shash_blocksize(pdesc->tfm); 107 u8 *prev = &ctx->odds[bs]; !! 110 u8 *prev = PTR_ALIGN((void *)ctx->ctx, alignmask + 1) + bs; 108 111 109 ctx->len = 0; 112 ctx->len = 0; 110 memset(prev, 0, bs); 113 memset(prev, 0, bs); 111 114 112 return 0; 115 return 0; 113 } 116 } 114 117 115 static int crypto_cmac_digest_update(struct sh 118 static int crypto_cmac_digest_update(struct shash_desc *pdesc, const u8 *p, 116 unsigned 119 unsigned int len) 117 { 120 { 118 struct crypto_shash *parent = pdesc->t 121 struct crypto_shash *parent = pdesc->tfm; >> 122 unsigned long alignmask = crypto_shash_alignmask(parent); 119 struct cmac_tfm_ctx *tctx = crypto_sha 123 struct cmac_tfm_ctx *tctx = crypto_shash_ctx(parent); 120 struct cmac_desc_ctx *ctx = shash_desc 124 struct cmac_desc_ctx *ctx = shash_desc_ctx(pdesc); 121 struct crypto_cipher *tfm = tctx->chil 125 struct crypto_cipher *tfm = tctx->child; 122 int bs = crypto_shash_blocksize(parent 126 int bs = crypto_shash_blocksize(parent); 123 u8 *odds = ctx->odds; !! 127 u8 *odds = PTR_ALIGN((void *)ctx->ctx, alignmask + 1); 124 u8 *prev = odds + bs; 128 u8 *prev = odds + bs; 125 129 126 /* checking the data can fill the bloc 130 /* checking the data can fill the block */ 127 if ((ctx->len + len) <= bs) { 131 if ((ctx->len + len) <= bs) { 128 memcpy(odds + ctx->len, p, len 132 memcpy(odds + ctx->len, p, len); 129 ctx->len += len; 133 ctx->len += len; 130 return 0; 134 return 0; 131 } 135 } 132 136 133 /* filling odds with new data and encr 137 /* filling odds with new data and encrypting it */ 134 memcpy(odds + ctx->len, p, bs - ctx->l 138 memcpy(odds + ctx->len, p, bs - ctx->len); 135 len -= bs - ctx->len; 139 len -= bs - ctx->len; 136 p += bs - ctx->len; 140 p += bs - ctx->len; 137 141 138 crypto_xor(prev, odds, bs); 142 crypto_xor(prev, odds, bs); 139 crypto_cipher_encrypt_one(tfm, prev, p 143 crypto_cipher_encrypt_one(tfm, prev, prev); 140 144 141 /* clearing the length */ 145 /* clearing the length */ 142 ctx->len = 0; 146 ctx->len = 0; 143 147 144 /* encrypting the rest of data */ 148 /* encrypting the rest of data */ 145 while (len > bs) { 149 while (len > bs) { 146 crypto_xor(prev, p, bs); 150 crypto_xor(prev, p, bs); 147 crypto_cipher_encrypt_one(tfm, 151 crypto_cipher_encrypt_one(tfm, prev, prev); 148 p += bs; 152 p += bs; 149 len -= bs; 153 len -= bs; 150 } 154 } 151 155 152 /* keeping the surplus of blocksize */ 156 /* keeping the surplus of blocksize */ 153 if (len) { 157 if (len) { 154 memcpy(odds, p, len); 158 memcpy(odds, p, len); 155 ctx->len = len; 159 ctx->len = len; 156 } 160 } 157 161 158 return 0; 162 return 0; 159 } 163 } 160 164 161 static int crypto_cmac_digest_final(struct sha 165 static int crypto_cmac_digest_final(struct shash_desc *pdesc, u8 *out) 162 { 166 { 163 struct crypto_shash *parent = pdesc->t 167 struct crypto_shash *parent = pdesc->tfm; >> 168 unsigned long alignmask = crypto_shash_alignmask(parent); 164 struct cmac_tfm_ctx *tctx = crypto_sha 169 struct cmac_tfm_ctx *tctx = crypto_shash_ctx(parent); 165 struct cmac_desc_ctx *ctx = shash_desc 170 struct cmac_desc_ctx *ctx = shash_desc_ctx(pdesc); 166 struct crypto_cipher *tfm = tctx->chil 171 struct crypto_cipher *tfm = tctx->child; 167 int bs = crypto_shash_blocksize(parent 172 int bs = crypto_shash_blocksize(parent); 168 u8 *odds = ctx->odds; !! 173 u8 *consts = PTR_ALIGN((void *)tctx->ctx, >> 174 (alignmask | (__alignof__(__be64) - 1)) + 1); >> 175 u8 *odds = PTR_ALIGN((void *)ctx->ctx, alignmask + 1); 169 u8 *prev = odds + bs; 176 u8 *prev = odds + bs; 170 unsigned int offset = 0; 177 unsigned int offset = 0; 171 178 172 if (ctx->len != bs) { 179 if (ctx->len != bs) { 173 unsigned int rlen; 180 unsigned int rlen; 174 u8 *p = odds + ctx->len; 181 u8 *p = odds + ctx->len; 175 182 176 *p = 0x80; 183 *p = 0x80; 177 p++; 184 p++; 178 185 179 rlen = bs - ctx->len - 1; 186 rlen = bs - ctx->len - 1; 180 if (rlen) 187 if (rlen) 181 memset(p, 0, rlen); 188 memset(p, 0, rlen); 182 189 183 offset += bs; 190 offset += bs; 184 } 191 } 185 192 186 crypto_xor(prev, odds, bs); 193 crypto_xor(prev, odds, bs); 187 crypto_xor(prev, (const u8 *)tctx->con !! 194 crypto_xor(prev, consts + offset, bs); 188 195 189 crypto_cipher_encrypt_one(tfm, out, pr 196 crypto_cipher_encrypt_one(tfm, out, prev); 190 197 191 return 0; 198 return 0; 192 } 199 } 193 200 194 static int cmac_init_tfm(struct crypto_shash * !! 201 static int cmac_init_tfm(struct crypto_tfm *tfm) 195 { 202 { 196 struct shash_instance *inst = shash_al << 197 struct cmac_tfm_ctx *ctx = crypto_shas << 198 struct crypto_cipher_spawn *spawn; << 199 struct crypto_cipher *cipher; 203 struct crypto_cipher *cipher; >> 204 struct crypto_instance *inst = (void *)tfm->__crt_alg; >> 205 struct crypto_cipher_spawn *spawn = crypto_instance_ctx(inst); >> 206 struct cmac_tfm_ctx *ctx = crypto_tfm_ctx(tfm); 200 207 201 spawn = shash_instance_ctx(inst); << 202 cipher = crypto_spawn_cipher(spawn); 208 cipher = crypto_spawn_cipher(spawn); 203 if (IS_ERR(cipher)) 209 if (IS_ERR(cipher)) 204 return PTR_ERR(cipher); 210 return PTR_ERR(cipher); 205 211 206 ctx->child = cipher; 212 ctx->child = cipher; 207 213 208 return 0; 214 return 0; 209 } !! 215 }; 210 << 211 static int cmac_clone_tfm(struct crypto_shash << 212 { << 213 struct cmac_tfm_ctx *octx = crypto_sha << 214 struct cmac_tfm_ctx *ctx = crypto_shas << 215 struct crypto_cipher *cipher; << 216 << 217 cipher = crypto_clone_cipher(octx->chi << 218 if (IS_ERR(cipher)) << 219 return PTR_ERR(cipher); << 220 << 221 ctx->child = cipher; << 222 << 223 return 0; << 224 } << 225 216 226 static void cmac_exit_tfm(struct crypto_shash !! 217 static void cmac_exit_tfm(struct crypto_tfm *tfm) 227 { 218 { 228 struct cmac_tfm_ctx *ctx = crypto_shas !! 219 struct cmac_tfm_ctx *ctx = crypto_tfm_ctx(tfm); 229 crypto_free_cipher(ctx->child); 220 crypto_free_cipher(ctx->child); 230 } 221 } 231 222 232 static int cmac_create(struct crypto_template 223 static int cmac_create(struct crypto_template *tmpl, struct rtattr **tb) 233 { 224 { 234 struct shash_instance *inst; 225 struct shash_instance *inst; 235 struct crypto_cipher_spawn *spawn; 226 struct crypto_cipher_spawn *spawn; 236 struct crypto_alg *alg; 227 struct crypto_alg *alg; >> 228 unsigned long alignmask; 237 u32 mask; 229 u32 mask; 238 int err; 230 int err; 239 231 240 err = crypto_check_attr_type(tb, CRYPT 232 err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_SHASH, &mask); 241 if (err) 233 if (err) 242 return err; 234 return err; 243 235 244 inst = kzalloc(sizeof(*inst) + sizeof( 236 inst = kzalloc(sizeof(*inst) + sizeof(*spawn), GFP_KERNEL); 245 if (!inst) 237 if (!inst) 246 return -ENOMEM; 238 return -ENOMEM; 247 spawn = shash_instance_ctx(inst); 239 spawn = shash_instance_ctx(inst); 248 240 249 err = crypto_grab_cipher(spawn, shash_ 241 err = crypto_grab_cipher(spawn, shash_crypto_instance(inst), 250 crypto_attr_a 242 crypto_attr_alg_name(tb[1]), 0, mask); 251 if (err) 243 if (err) 252 goto err_free_inst; 244 goto err_free_inst; 253 alg = crypto_spawn_cipher_alg(spawn); 245 alg = crypto_spawn_cipher_alg(spawn); 254 246 255 switch (alg->cra_blocksize) { 247 switch (alg->cra_blocksize) { 256 case 16: 248 case 16: 257 case 8: 249 case 8: 258 break; 250 break; 259 default: 251 default: 260 err = -EINVAL; 252 err = -EINVAL; 261 goto err_free_inst; 253 goto err_free_inst; 262 } 254 } 263 255 264 err = crypto_inst_setname(shash_crypto 256 err = crypto_inst_setname(shash_crypto_instance(inst), tmpl->name, alg); 265 if (err) 257 if (err) 266 goto err_free_inst; 258 goto err_free_inst; 267 259 >> 260 alignmask = alg->cra_alignmask; >> 261 inst->alg.base.cra_alignmask = alignmask; 268 inst->alg.base.cra_priority = alg->cra 262 inst->alg.base.cra_priority = alg->cra_priority; 269 inst->alg.base.cra_blocksize = alg->cr 263 inst->alg.base.cra_blocksize = alg->cra_blocksize; 270 inst->alg.base.cra_ctxsize = sizeof(st << 271 alg->cra_ << 272 264 273 inst->alg.digestsize = alg->cra_blocks 265 inst->alg.digestsize = alg->cra_blocksize; 274 inst->alg.descsize = sizeof(struct cma !! 266 inst->alg.descsize = 275 alg->cra_blocksiz !! 267 ALIGN(sizeof(struct cmac_desc_ctx), crypto_tfm_ctx_alignment()) >> 268 + (alignmask & ~(crypto_tfm_ctx_alignment() - 1)) >> 269 + alg->cra_blocksize * 2; >> 270 >> 271 inst->alg.base.cra_ctxsize = >> 272 ALIGN(sizeof(struct cmac_tfm_ctx), crypto_tfm_ctx_alignment()) >> 273 + ((alignmask | (__alignof__(__be64) - 1)) & >> 274 ~(crypto_tfm_ctx_alignment() - 1)) >> 275 + alg->cra_blocksize * 2; >> 276 >> 277 inst->alg.base.cra_init = cmac_init_tfm; >> 278 inst->alg.base.cra_exit = cmac_exit_tfm; >> 279 276 inst->alg.init = crypto_cmac_digest_in 280 inst->alg.init = crypto_cmac_digest_init; 277 inst->alg.update = crypto_cmac_digest_ 281 inst->alg.update = crypto_cmac_digest_update; 278 inst->alg.final = crypto_cmac_digest_f 282 inst->alg.final = crypto_cmac_digest_final; 279 inst->alg.setkey = crypto_cmac_digest_ 283 inst->alg.setkey = crypto_cmac_digest_setkey; 280 inst->alg.init_tfm = cmac_init_tfm; << 281 inst->alg.clone_tfm = cmac_clone_tfm; << 282 inst->alg.exit_tfm = cmac_exit_tfm; << 283 284 284 inst->free = shash_free_singlespawn_in 285 inst->free = shash_free_singlespawn_instance; 285 286 286 err = shash_register_instance(tmpl, in 287 err = shash_register_instance(tmpl, inst); 287 if (err) { 288 if (err) { 288 err_free_inst: 289 err_free_inst: 289 shash_free_singlespawn_instanc 290 shash_free_singlespawn_instance(inst); 290 } 291 } 291 return err; 292 return err; 292 } 293 } 293 294 294 static struct crypto_template crypto_cmac_tmpl 295 static struct crypto_template crypto_cmac_tmpl = { 295 .name = "cmac", 296 .name = "cmac", 296 .create = cmac_create, 297 .create = cmac_create, 297 .module = THIS_MODULE, 298 .module = THIS_MODULE, 298 }; 299 }; 299 300 300 static int __init crypto_cmac_module_init(void 301 static int __init crypto_cmac_module_init(void) 301 { 302 { 302 return crypto_register_template(&crypt 303 return crypto_register_template(&crypto_cmac_tmpl); 303 } 304 } 304 305 305 static void __exit crypto_cmac_module_exit(voi 306 static void __exit crypto_cmac_module_exit(void) 306 { 307 { 307 crypto_unregister_template(&crypto_cma 308 crypto_unregister_template(&crypto_cmac_tmpl); 308 } 309 } 309 310 310 subsys_initcall(crypto_cmac_module_init); 311 subsys_initcall(crypto_cmac_module_init); 311 module_exit(crypto_cmac_module_exit); 312 module_exit(crypto_cmac_module_exit); 312 313 313 MODULE_LICENSE("GPL"); 314 MODULE_LICENSE("GPL"); 314 MODULE_DESCRIPTION("CMAC keyed hash algorithm" 315 MODULE_DESCRIPTION("CMAC keyed hash algorithm"); 315 MODULE_ALIAS_CRYPTO("cmac"); 316 MODULE_ALIAS_CRYPTO("cmac"); 316 MODULE_IMPORT_NS(CRYPTO_INTERNAL); 317 MODULE_IMPORT_NS(CRYPTO_INTERNAL); 317 318
Linux® is a registered trademark of Linus Torvalds in the United States and other countries.
TOMOYO® is a registered trademark of NTT DATA CORPORATION.