1 // SPDX-License-Identifier: GPL-2.0-or-later 1 // SPDX-License-Identifier: GPL-2.0-or-later 2 /* 2 /* 3 * Asynchronous Cryptographic Hash operations. 3 * Asynchronous Cryptographic Hash operations. 4 * 4 * 5 * This is the implementation of the ahash (as !! 5 * This is the asynchronous version of hash.c with notification of 6 * from shash (synchronous hash) in that ahash !! 6 * completion via a callback. 7 * and it hashes data from scatterlists instea << 8 * << 9 * The ahash API provides access to both ahash << 10 * API only provides access to shash algorithm << 11 * 7 * 12 * Copyright (c) 2008 Loc Ho <lho@amcc.com> 8 * Copyright (c) 2008 Loc Ho <lho@amcc.com> 13 */ 9 */ 14 10 >> 11 #include <crypto/internal/hash.h> 15 #include <crypto/scatterwalk.h> 12 #include <crypto/scatterwalk.h> 16 #include <linux/cryptouser.h> << 17 #include <linux/err.h> 13 #include <linux/err.h> 18 #include <linux/kernel.h> 14 #include <linux/kernel.h> 19 #include <linux/module.h> 15 #include <linux/module.h> 20 #include <linux/sched.h> 16 #include <linux/sched.h> 21 #include <linux/slab.h> 17 #include <linux/slab.h> 22 #include <linux/seq_file.h> 18 #include <linux/seq_file.h> 23 #include <linux/string.h> !! 19 #include <linux/cryptouser.h> >> 20 #include <linux/compiler.h> 24 #include <net/netlink.h> 21 #include <net/netlink.h> 25 22 26 #include "hash.h" !! 23 #include "internal.h" 27 << 28 #define CRYPTO_ALG_TYPE_AHASH_MASK 0x0000 << 29 << 30 /* << 31 * For an ahash tfm that is using an shash alg << 32 * algorithm), this returns the underlying sha << 33 */ << 34 static inline struct crypto_shash *ahash_to_sh << 35 { << 36 return *(struct crypto_shash **)crypto << 37 } << 38 << 39 static inline struct shash_desc *prepare_shash << 40 << 41 { << 42 struct shash_desc *desc = ahash_reques << 43 << 44 desc->tfm = ahash_to_shash(tfm); << 45 return desc; << 46 } << 47 24 48 int shash_ahash_update(struct ahash_request *r !! 25 static const struct crypto_type crypto_ahash_type; 49 { << 50 struct crypto_hash_walk walk; << 51 int nbytes; << 52 << 53 for (nbytes = crypto_hash_walk_first(r << 54 nbytes = crypto_hash_walk_done(&w << 55 nbytes = crypto_shash_update(d << 56 << 57 return nbytes; << 58 } << 59 EXPORT_SYMBOL_GPL(shash_ahash_update); << 60 << 61 int shash_ahash_finup(struct ahash_request *re << 62 { << 63 struct crypto_hash_walk walk; << 64 int nbytes; << 65 << 66 nbytes = crypto_hash_walk_first(req, & << 67 if (!nbytes) << 68 return crypto_shash_final(desc << 69 << 70 do { << 71 nbytes = crypto_hash_walk_last << 72 crypto_shash_finup(de << 73 re << 74 crypto_shash_update(d << 75 nbytes = crypto_hash_walk_done << 76 } while (nbytes > 0); << 77 << 78 return nbytes; << 79 } << 80 EXPORT_SYMBOL_GPL(shash_ahash_finup); << 81 26 82 int shash_ahash_digest(struct ahash_request *r !! 27 struct ahash_request_priv { 83 { !! 28 crypto_completion_t complete; 84 unsigned int nbytes = req->nbytes; !! 29 void *data; 85 struct scatterlist *sg; !! 30 u8 *result; 86 unsigned int offset; !! 31 u32 flags; 87 int err; !! 32 void *ubuf[] CRYPTO_MINALIGN_ATTR; 88 !! 33 }; 89 if (nbytes && << 90 (sg = req->src, offset = sg->offse << 91 nbytes <= min(sg->length, ((unsig << 92 void *data; << 93 << 94 data = kmap_local_page(sg_page << 95 err = crypto_shash_digest(desc << 96 req- << 97 kunmap_local(data); << 98 } else << 99 err = crypto_shash_init(desc) << 100 shash_ahash_finup(req, d << 101 << 102 return err; << 103 } << 104 EXPORT_SYMBOL_GPL(shash_ahash_digest); << 105 << 106 static void crypto_exit_ahash_using_shash(stru << 107 { << 108 struct crypto_shash **ctx = crypto_tfm << 109 << 110 crypto_free_shash(*ctx); << 111 } << 112 34 113 static int crypto_init_ahash_using_shash(struc !! 35 static inline struct ahash_alg *crypto_ahash_alg(struct crypto_ahash *hash) 114 { 36 { 115 struct crypto_alg *calg = tfm->__crt_a !! 37 return container_of(crypto_hash_alg_common(hash), struct ahash_alg, 116 struct crypto_ahash *crt = __crypto_ah !! 38 halg); 117 struct crypto_shash **ctx = crypto_tfm << 118 struct crypto_shash *shash; << 119 << 120 if (!crypto_mod_get(calg)) << 121 return -EAGAIN; << 122 << 123 shash = crypto_create_tfm(calg, &crypt << 124 if (IS_ERR(shash)) { << 125 crypto_mod_put(calg); << 126 return PTR_ERR(shash); << 127 } << 128 << 129 crt->using_shash = true; << 130 *ctx = shash; << 131 tfm->exit = crypto_exit_ahash_using_sh << 132 << 133 crypto_ahash_set_flags(crt, crypto_sha << 134 CRYPTO_TFM << 135 crt->reqsize = sizeof(struct shash_des << 136 << 137 return 0; << 138 } 39 } 139 40 140 static int hash_walk_next(struct crypto_hash_w 41 static int hash_walk_next(struct crypto_hash_walk *walk) 141 { 42 { >> 43 unsigned int alignmask = walk->alignmask; 142 unsigned int offset = walk->offset; 44 unsigned int offset = walk->offset; 143 unsigned int nbytes = min(walk->entryl 45 unsigned int nbytes = min(walk->entrylen, 144 ((unsigned i 46 ((unsigned int)(PAGE_SIZE)) - offset); 145 47 146 walk->data = kmap_local_page(walk->pg) !! 48 walk->data = kmap_atomic(walk->pg); 147 walk->data += offset; 49 walk->data += offset; >> 50 >> 51 if (offset & alignmask) { >> 52 unsigned int unaligned = alignmask + 1 - (offset & alignmask); >> 53 >> 54 if (nbytes > unaligned) >> 55 nbytes = unaligned; >> 56 } >> 57 148 walk->entrylen -= nbytes; 58 walk->entrylen -= nbytes; 149 return nbytes; 59 return nbytes; 150 } 60 } 151 61 152 static int hash_walk_new_entry(struct crypto_h 62 static int hash_walk_new_entry(struct crypto_hash_walk *walk) 153 { 63 { 154 struct scatterlist *sg; 64 struct scatterlist *sg; 155 65 156 sg = walk->sg; 66 sg = walk->sg; 157 walk->offset = sg->offset; 67 walk->offset = sg->offset; 158 walk->pg = sg_page(walk->sg) + (walk-> 68 walk->pg = sg_page(walk->sg) + (walk->offset >> PAGE_SHIFT); 159 walk->offset = offset_in_page(walk->of 69 walk->offset = offset_in_page(walk->offset); 160 walk->entrylen = sg->length; 70 walk->entrylen = sg->length; 161 71 162 if (walk->entrylen > walk->total) 72 if (walk->entrylen > walk->total) 163 walk->entrylen = walk->total; 73 walk->entrylen = walk->total; 164 walk->total -= walk->entrylen; 74 walk->total -= walk->entrylen; 165 75 166 return hash_walk_next(walk); 76 return hash_walk_next(walk); 167 } 77 } 168 78 169 int crypto_hash_walk_done(struct crypto_hash_w 79 int crypto_hash_walk_done(struct crypto_hash_walk *walk, int err) 170 { 80 { >> 81 unsigned int alignmask = walk->alignmask; >> 82 171 walk->data -= walk->offset; 83 walk->data -= walk->offset; 172 84 173 kunmap_local(walk->data); !! 85 if (walk->entrylen && (walk->offset & alignmask) && !err) { >> 86 unsigned int nbytes; >> 87 >> 88 walk->offset = ALIGN(walk->offset, alignmask + 1); >> 89 nbytes = min(walk->entrylen, >> 90 (unsigned int)(PAGE_SIZE - walk->offset)); >> 91 if (nbytes) { >> 92 walk->entrylen -= nbytes; >> 93 walk->data += walk->offset; >> 94 return nbytes; >> 95 } >> 96 } >> 97 >> 98 kunmap_atomic(walk->data); 174 crypto_yield(walk->flags); 99 crypto_yield(walk->flags); 175 100 176 if (err) 101 if (err) 177 return err; 102 return err; 178 103 179 if (walk->entrylen) { 104 if (walk->entrylen) { 180 walk->offset = 0; 105 walk->offset = 0; 181 walk->pg++; 106 walk->pg++; 182 return hash_walk_next(walk); 107 return hash_walk_next(walk); 183 } 108 } 184 109 185 if (!walk->total) 110 if (!walk->total) 186 return 0; 111 return 0; 187 112 188 walk->sg = sg_next(walk->sg); 113 walk->sg = sg_next(walk->sg); 189 114 190 return hash_walk_new_entry(walk); 115 return hash_walk_new_entry(walk); 191 } 116 } 192 EXPORT_SYMBOL_GPL(crypto_hash_walk_done); 117 EXPORT_SYMBOL_GPL(crypto_hash_walk_done); 193 118 194 int crypto_hash_walk_first(struct ahash_reques 119 int crypto_hash_walk_first(struct ahash_request *req, 195 struct crypto_hash_ 120 struct crypto_hash_walk *walk) 196 { 121 { 197 walk->total = req->nbytes; 122 walk->total = req->nbytes; 198 123 199 if (!walk->total) { 124 if (!walk->total) { 200 walk->entrylen = 0; 125 walk->entrylen = 0; 201 return 0; 126 return 0; 202 } 127 } 203 128 >> 129 walk->alignmask = crypto_ahash_alignmask(crypto_ahash_reqtfm(req)); 204 walk->sg = req->src; 130 walk->sg = req->src; 205 walk->flags = req->base.flags; 131 walk->flags = req->base.flags; 206 132 207 return hash_walk_new_entry(walk); 133 return hash_walk_new_entry(walk); 208 } 134 } 209 EXPORT_SYMBOL_GPL(crypto_hash_walk_first); 135 EXPORT_SYMBOL_GPL(crypto_hash_walk_first); 210 136 >> 137 static int ahash_setkey_unaligned(struct crypto_ahash *tfm, const u8 *key, >> 138 unsigned int keylen) >> 139 { >> 140 unsigned long alignmask = crypto_ahash_alignmask(tfm); >> 141 int ret; >> 142 u8 *buffer, *alignbuffer; >> 143 unsigned long absize; >> 144 >> 145 absize = keylen + alignmask; >> 146 buffer = kmalloc(absize, GFP_KERNEL); >> 147 if (!buffer) >> 148 return -ENOMEM; >> 149 >> 150 alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1); >> 151 memcpy(alignbuffer, key, keylen); >> 152 ret = tfm->setkey(tfm, alignbuffer, keylen); >> 153 kfree_sensitive(buffer); >> 154 return ret; >> 155 } >> 156 211 static int ahash_nosetkey(struct crypto_ahash 157 static int ahash_nosetkey(struct crypto_ahash *tfm, const u8 *key, 212 unsigned int keylen) 158 unsigned int keylen) 213 { 159 { 214 return -ENOSYS; 160 return -ENOSYS; 215 } 161 } 216 162 217 static void ahash_set_needkey(struct crypto_ah !! 163 static void ahash_set_needkey(struct crypto_ahash *tfm) 218 { 164 { 219 if (alg->setkey != ahash_nosetkey && !! 165 const struct hash_alg_common *alg = crypto_hash_alg_common(tfm); 220 !(alg->halg.base.cra_flags & CRYPT !! 166 >> 167 if (tfm->setkey != ahash_nosetkey && >> 168 !(alg->base.cra_flags & CRYPTO_ALG_OPTIONAL_KEY)) 221 crypto_ahash_set_flags(tfm, CR 169 crypto_ahash_set_flags(tfm, CRYPTO_TFM_NEED_KEY); 222 } 170 } 223 171 224 int crypto_ahash_setkey(struct crypto_ahash *t 172 int crypto_ahash_setkey(struct crypto_ahash *tfm, const u8 *key, 225 unsigned int keylen) 173 unsigned int keylen) 226 { 174 { 227 if (likely(tfm->using_shash)) { !! 175 unsigned long alignmask = crypto_ahash_alignmask(tfm); 228 struct crypto_shash *shash = a !! 176 int err; 229 int err; !! 177 230 !! 178 if ((unsigned long)key & alignmask) 231 err = crypto_shash_setkey(shas !! 179 err = ahash_setkey_unaligned(tfm, key, keylen); 232 if (unlikely(err)) { !! 180 else 233 crypto_ahash_set_flags !! 181 err = tfm->setkey(tfm, key, keylen); 234 !! 182 235 !! 183 if (unlikely(err)) { 236 return err; !! 184 ahash_set_needkey(tfm); 237 } !! 185 return err; 238 } else { << 239 struct ahash_alg *alg = crypto << 240 int err; << 241 << 242 err = alg->setkey(tfm, key, ke << 243 if (unlikely(err)) { << 244 ahash_set_needkey(tfm, << 245 return err; << 246 } << 247 } 186 } >> 187 248 crypto_ahash_clear_flags(tfm, CRYPTO_T 188 crypto_ahash_clear_flags(tfm, CRYPTO_TFM_NEED_KEY); 249 return 0; 189 return 0; 250 } 190 } 251 EXPORT_SYMBOL_GPL(crypto_ahash_setkey); 191 EXPORT_SYMBOL_GPL(crypto_ahash_setkey); 252 192 253 int crypto_ahash_init(struct ahash_request *re !! 193 static inline unsigned int ahash_align_buffer_size(unsigned len, >> 194 unsigned long mask) 254 { 195 { 255 struct crypto_ahash *tfm = crypto_ahas !! 196 return len + (mask & ~(crypto_tfm_ctx_alignment() - 1)); 256 << 257 if (likely(tfm->using_shash)) << 258 return crypto_shash_init(prepa << 259 if (crypto_ahash_get_flags(tfm) & CRYP << 260 return -ENOKEY; << 261 return crypto_ahash_alg(tfm)->init(req << 262 } 197 } 263 EXPORT_SYMBOL_GPL(crypto_ahash_init); << 264 198 265 static int ahash_save_req(struct ahash_request !! 199 static int ahash_save_req(struct ahash_request *req, crypto_completion_t cplt) 266 bool has_state) << 267 { 200 { 268 struct crypto_ahash *tfm = crypto_ahas 201 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); >> 202 unsigned long alignmask = crypto_ahash_alignmask(tfm); 269 unsigned int ds = crypto_ahash_digests 203 unsigned int ds = crypto_ahash_digestsize(tfm); 270 struct ahash_request *subreq; !! 204 struct ahash_request_priv *priv; 271 unsigned int subreq_size; << 272 unsigned int reqsize; << 273 u8 *result; << 274 gfp_t gfp; << 275 u32 flags; << 276 205 277 subreq_size = sizeof(*subreq); !! 206 priv = kmalloc(sizeof(*priv) + ahash_align_buffer_size(ds, alignmask), 278 reqsize = crypto_ahash_reqsize(tfm); !! 207 (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ? 279 reqsize = ALIGN(reqsize, crypto_tfm_ct !! 208 GFP_KERNEL : GFP_ATOMIC); 280 subreq_size += reqsize; !! 209 if (!priv) 281 subreq_size += ds; << 282 << 283 flags = ahash_request_flags(req); << 284 gfp = (flags & CRYPTO_TFM_REQ_MAY_SLEE << 285 subreq = kmalloc(subreq_size, gfp); << 286 if (!subreq) << 287 return -ENOMEM; 210 return -ENOMEM; 288 211 289 ahash_request_set_tfm(subreq, tfm); !! 212 /* 290 ahash_request_set_callback(subreq, fla !! 213 * WARNING: Voodoo programming below! >> 214 * >> 215 * The code below is obscure and hard to understand, thus explanation >> 216 * is necessary. See include/crypto/hash.h and include/linux/crypto.h >> 217 * to understand the layout of structures used here! >> 218 * >> 219 * The code here will replace portions of the ORIGINAL request with >> 220 * pointers to new code and buffers so the hashing operation can store >> 221 * the result in aligned buffer. We will call the modified request >> 222 * an ADJUSTED request. >> 223 * >> 224 * The newly mangled request will look as such: >> 225 * >> 226 * req { >> 227 * .result = ADJUSTED[new aligned buffer] >> 228 * .base.complete = ADJUSTED[pointer to completion function] >> 229 * .base.data = ADJUSTED[*req (pointer to self)] >> 230 * .priv = ADJUSTED[new priv] { >> 231 * .result = ORIGINAL(result) >> 232 * .complete = ORIGINAL(base.complete) >> 233 * .data = ORIGINAL(base.data) >> 234 * } >> 235 */ >> 236 >> 237 priv->result = req->result; >> 238 priv->complete = req->base.complete; >> 239 priv->data = req->base.data; >> 240 priv->flags = req->base.flags; >> 241 >> 242 /* >> 243 * WARNING: We do not backup req->priv here! The req->priv >> 244 * is for internal use of the Crypto API and the >> 245 * user must _NOT_ _EVER_ depend on it's content! >> 246 */ >> 247 >> 248 req->result = PTR_ALIGN((u8 *)priv->ubuf, alignmask + 1); >> 249 req->base.complete = cplt; >> 250 req->base.data = req; >> 251 req->priv = priv; >> 252 >> 253 return 0; >> 254 } >> 255 >> 256 static void ahash_restore_req(struct ahash_request *req, int err) >> 257 { >> 258 struct ahash_request_priv *priv = req->priv; >> 259 >> 260 if (!err) >> 261 memcpy(priv->result, req->result, >> 262 crypto_ahash_digestsize(crypto_ahash_reqtfm(req))); 291 263 292 result = (u8 *)(subreq + 1) + reqsize; !! 264 /* Restore the original crypto request. */ >> 265 req->result = priv->result; 293 266 294 ahash_request_set_crypt(subreq, req->s !! 267 ahash_request_set_callback(req, priv->flags, >> 268 priv->complete, priv->data); >> 269 req->priv = NULL; 295 270 296 if (has_state) { !! 271 /* Free the req->priv.priv from the ADJUSTED request. */ 297 void *state; !! 272 kfree_sensitive(priv); >> 273 } 298 274 299 state = kmalloc(crypto_ahash_s !! 275 static void ahash_notify_einprogress(struct ahash_request *req) 300 if (!state) { !! 276 { 301 kfree(subreq); !! 277 struct ahash_request_priv *priv = req->priv; 302 return -ENOMEM; !! 278 struct crypto_async_request oreq; 303 } !! 279 >> 280 oreq.data = priv->data; 304 281 305 crypto_ahash_export(req, state !! 282 priv->complete(&oreq, -EINPROGRESS); 306 crypto_ahash_import(subreq, st !! 283 } 307 kfree_sensitive(state); !! 284 >> 285 static void ahash_op_unaligned_done(struct crypto_async_request *req, int err) >> 286 { >> 287 struct ahash_request *areq = req->data; >> 288 >> 289 if (err == -EINPROGRESS) { >> 290 ahash_notify_einprogress(areq); >> 291 return; 308 } 292 } 309 293 310 req->priv = subreq; !! 294 /* >> 295 * Restore the original request, see ahash_op_unaligned() for what >> 296 * goes where. >> 297 * >> 298 * The "struct ahash_request *req" here is in fact the "req.base" >> 299 * from the ADJUSTED request from ahash_op_unaligned(), thus as it >> 300 * is a pointer to self, it is also the ADJUSTED "req" . >> 301 */ 311 302 312 return 0; !! 303 /* First copy req->result into req->priv.result */ >> 304 ahash_restore_req(areq, err); >> 305 >> 306 /* Complete the ORIGINAL request. */ >> 307 areq->base.complete(&areq->base, err); 313 } 308 } 314 309 315 static void ahash_restore_req(struct ahash_req !! 310 static int ahash_op_unaligned(struct ahash_request *req, >> 311 int (*op)(struct ahash_request *)) 316 { 312 { 317 struct ahash_request *subreq = req->pr !! 313 int err; 318 314 319 if (!err) !! 315 err = ahash_save_req(req, ahash_op_unaligned_done); 320 memcpy(req->result, subreq->re !! 316 if (err) 321 crypto_ahash_digestsize !! 317 return err; 322 318 323 req->priv = NULL; !! 319 err = op(req); >> 320 if (err == -EINPROGRESS || err == -EBUSY) >> 321 return err; 324 322 325 kfree_sensitive(subreq); !! 323 ahash_restore_req(req, err); >> 324 >> 325 return err; 326 } 326 } 327 327 328 int crypto_ahash_update(struct ahash_request * !! 328 static int crypto_ahash_op(struct ahash_request *req, >> 329 int (*op)(struct ahash_request *)) 329 { 330 { 330 struct crypto_ahash *tfm = crypto_ahas 331 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); >> 332 unsigned long alignmask = crypto_ahash_alignmask(tfm); 331 333 332 if (likely(tfm->using_shash)) !! 334 if ((unsigned long)req->result & alignmask) 333 return shash_ahash_update(req, !! 335 return ahash_op_unaligned(req, op); 334 336 335 return crypto_ahash_alg(tfm)->update(r !! 337 return op(req); 336 } 338 } 337 EXPORT_SYMBOL_GPL(crypto_ahash_update); << 338 339 339 int crypto_ahash_final(struct ahash_request *r 340 int crypto_ahash_final(struct ahash_request *req) 340 { 341 { 341 struct crypto_ahash *tfm = crypto_ahas 342 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); >> 343 struct crypto_alg *alg = tfm->base.__crt_alg; >> 344 unsigned int nbytes = req->nbytes; >> 345 int ret; 342 346 343 if (likely(tfm->using_shash)) !! 347 crypto_stats_get(alg); 344 return crypto_shash_final(ahas !! 348 ret = crypto_ahash_op(req, crypto_ahash_reqtfm(req)->final); 345 !! 349 crypto_stats_ahash_final(nbytes, ret, alg); 346 return crypto_ahash_alg(tfm)->final(re !! 350 return ret; 347 } 351 } 348 EXPORT_SYMBOL_GPL(crypto_ahash_final); 352 EXPORT_SYMBOL_GPL(crypto_ahash_final); 349 353 350 int crypto_ahash_finup(struct ahash_request *r 354 int crypto_ahash_finup(struct ahash_request *req) 351 { 355 { 352 struct crypto_ahash *tfm = crypto_ahas 356 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); >> 357 struct crypto_alg *alg = tfm->base.__crt_alg; >> 358 unsigned int nbytes = req->nbytes; >> 359 int ret; 353 360 354 if (likely(tfm->using_shash)) !! 361 crypto_stats_get(alg); 355 return shash_ahash_finup(req, !! 362 ret = crypto_ahash_op(req, crypto_ahash_reqtfm(req)->finup); 356 !! 363 crypto_stats_ahash_final(nbytes, ret, alg); 357 return crypto_ahash_alg(tfm)->finup(re !! 364 return ret; 358 } 365 } 359 EXPORT_SYMBOL_GPL(crypto_ahash_finup); 366 EXPORT_SYMBOL_GPL(crypto_ahash_finup); 360 367 361 int crypto_ahash_digest(struct ahash_request * 368 int crypto_ahash_digest(struct ahash_request *req) 362 { 369 { 363 struct crypto_ahash *tfm = crypto_ahas 370 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); >> 371 struct crypto_alg *alg = tfm->base.__crt_alg; >> 372 unsigned int nbytes = req->nbytes; >> 373 int ret; 364 374 365 if (likely(tfm->using_shash)) !! 375 crypto_stats_get(alg); 366 return shash_ahash_digest(req, << 367 << 368 if (crypto_ahash_get_flags(tfm) & CRYP 376 if (crypto_ahash_get_flags(tfm) & CRYPTO_TFM_NEED_KEY) 369 return -ENOKEY; !! 377 ret = -ENOKEY; 370 !! 378 else 371 return crypto_ahash_alg(tfm)->digest(r !! 379 ret = crypto_ahash_op(req, tfm->digest); >> 380 crypto_stats_ahash_final(nbytes, ret, alg); >> 381 return ret; 372 } 382 } 373 EXPORT_SYMBOL_GPL(crypto_ahash_digest); 383 EXPORT_SYMBOL_GPL(crypto_ahash_digest); 374 384 375 static void ahash_def_finup_done2(void *data, !! 385 static void ahash_def_finup_done2(struct crypto_async_request *req, int err) 376 { 386 { 377 struct ahash_request *areq = data; !! 387 struct ahash_request *areq = req->data; 378 388 379 if (err == -EINPROGRESS) 389 if (err == -EINPROGRESS) 380 return; 390 return; 381 391 382 ahash_restore_req(areq, err); 392 ahash_restore_req(areq, err); 383 393 384 ahash_request_complete(areq, err); !! 394 areq->base.complete(&areq->base, err); 385 } 395 } 386 396 387 static int ahash_def_finup_finish1(struct ahas 397 static int ahash_def_finup_finish1(struct ahash_request *req, int err) 388 { 398 { 389 struct ahash_request *subreq = req->pr << 390 << 391 if (err) 399 if (err) 392 goto out; 400 goto out; 393 401 394 subreq->base.complete = ahash_def_finu !! 402 req->base.complete = ahash_def_finup_done2; 395 403 396 err = crypto_ahash_alg(crypto_ahash_re !! 404 err = crypto_ahash_reqtfm(req)->final(req); 397 if (err == -EINPROGRESS || err == -EBU 405 if (err == -EINPROGRESS || err == -EBUSY) 398 return err; 406 return err; 399 407 400 out: 408 out: 401 ahash_restore_req(req, err); 409 ahash_restore_req(req, err); 402 return err; 410 return err; 403 } 411 } 404 412 405 static void ahash_def_finup_done1(void *data, !! 413 static void ahash_def_finup_done1(struct crypto_async_request *req, int err) 406 { 414 { 407 struct ahash_request *areq = data; !! 415 struct ahash_request *areq = req->data; 408 struct ahash_request *subreq; << 409 416 410 if (err == -EINPROGRESS) !! 417 if (err == -EINPROGRESS) { 411 goto out; !! 418 ahash_notify_einprogress(areq); >> 419 return; >> 420 } 412 421 413 subreq = areq->priv; !! 422 areq->base.flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP; 414 subreq->base.flags &= CRYPTO_TFM_REQ_M << 415 423 416 err = ahash_def_finup_finish1(areq, er 424 err = ahash_def_finup_finish1(areq, err); 417 if (err == -EINPROGRESS || err == -EBU !! 425 if (areq->priv) 418 return; 426 return; 419 427 420 out: !! 428 areq->base.complete(&areq->base, err); 421 ahash_request_complete(areq, err); << 422 } 429 } 423 430 424 static int ahash_def_finup(struct ahash_reques 431 static int ahash_def_finup(struct ahash_request *req) 425 { 432 { 426 struct crypto_ahash *tfm = crypto_ahas 433 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); 427 int err; 434 int err; 428 435 429 err = ahash_save_req(req, ahash_def_fi !! 436 err = ahash_save_req(req, ahash_def_finup_done1); 430 if (err) 437 if (err) 431 return err; 438 return err; 432 439 433 err = crypto_ahash_alg(tfm)->update(re !! 440 err = tfm->update(req); 434 if (err == -EINPROGRESS || err == -EBU 441 if (err == -EINPROGRESS || err == -EBUSY) 435 return err; 442 return err; 436 443 437 return ahash_def_finup_finish1(req, er 444 return ahash_def_finup_finish1(req, err); 438 } 445 } 439 446 440 int crypto_ahash_export(struct ahash_request * << 441 { << 442 struct crypto_ahash *tfm = crypto_ahas << 443 << 444 if (likely(tfm->using_shash)) << 445 return crypto_shash_export(aha << 446 return crypto_ahash_alg(tfm)->export(r << 447 } << 448 EXPORT_SYMBOL_GPL(crypto_ahash_export); << 449 << 450 int crypto_ahash_import(struct ahash_request * << 451 { << 452 struct crypto_ahash *tfm = crypto_ahas << 453 << 454 if (likely(tfm->using_shash)) << 455 return crypto_shash_import(pre << 456 if (crypto_ahash_get_flags(tfm) & CRYP << 457 return -ENOKEY; << 458 return crypto_ahash_alg(tfm)->import(r << 459 } << 460 EXPORT_SYMBOL_GPL(crypto_ahash_import); << 461 << 462 static void crypto_ahash_exit_tfm(struct crypt 447 static void crypto_ahash_exit_tfm(struct crypto_tfm *tfm) 463 { 448 { 464 struct crypto_ahash *hash = __crypto_a 449 struct crypto_ahash *hash = __crypto_ahash_cast(tfm); 465 struct ahash_alg *alg = crypto_ahash_a 450 struct ahash_alg *alg = crypto_ahash_alg(hash); 466 451 467 alg->exit_tfm(hash); 452 alg->exit_tfm(hash); 468 } 453 } 469 454 470 static int crypto_ahash_init_tfm(struct crypto 455 static int crypto_ahash_init_tfm(struct crypto_tfm *tfm) 471 { 456 { 472 struct crypto_ahash *hash = __crypto_a 457 struct crypto_ahash *hash = __crypto_ahash_cast(tfm); 473 struct ahash_alg *alg = crypto_ahash_a 458 struct ahash_alg *alg = crypto_ahash_alg(hash); 474 459 475 crypto_ahash_set_statesize(hash, alg-> !! 460 hash->setkey = ahash_nosetkey; 476 461 477 if (tfm->__crt_alg->cra_type == &crypt !! 462 if (tfm->__crt_alg->cra_type != &crypto_ahash_type) 478 return crypto_init_ahash_using !! 463 return crypto_init_shash_ops_async(tfm); 479 464 480 ahash_set_needkey(hash, alg); !! 465 hash->init = alg->init; >> 466 hash->update = alg->update; >> 467 hash->final = alg->final; >> 468 hash->finup = alg->finup ?: ahash_def_finup; >> 469 hash->digest = alg->digest; >> 470 hash->export = alg->export; >> 471 hash->import = alg->import; >> 472 >> 473 if (alg->setkey) { >> 474 hash->setkey = alg->setkey; >> 475 ahash_set_needkey(hash); >> 476 } 481 477 482 if (alg->exit_tfm) 478 if (alg->exit_tfm) 483 tfm->exit = crypto_ahash_exit_ 479 tfm->exit = crypto_ahash_exit_tfm; 484 480 485 return alg->init_tfm ? alg->init_tfm(h 481 return alg->init_tfm ? alg->init_tfm(hash) : 0; 486 } 482 } 487 483 488 static unsigned int crypto_ahash_extsize(struc 484 static unsigned int crypto_ahash_extsize(struct crypto_alg *alg) 489 { 485 { 490 if (alg->cra_type == &crypto_shash_typ !! 486 if (alg->cra_type != &crypto_ahash_type) 491 return sizeof(struct crypto_sh 487 return sizeof(struct crypto_shash *); 492 488 493 return crypto_alg_extsize(alg); 489 return crypto_alg_extsize(alg); 494 } 490 } 495 491 496 static void crypto_ahash_free_instance(struct 492 static void crypto_ahash_free_instance(struct crypto_instance *inst) 497 { 493 { 498 struct ahash_instance *ahash = ahash_i 494 struct ahash_instance *ahash = ahash_instance(inst); 499 495 500 ahash->free(ahash); 496 ahash->free(ahash); 501 } 497 } 502 498 503 static int __maybe_unused crypto_ahash_report( !! 499 #ifdef CONFIG_NET 504 struct sk_buff *skb, struct crypto_alg !! 500 static int crypto_ahash_report(struct sk_buff *skb, struct crypto_alg *alg) 505 { 501 { 506 struct crypto_report_hash rhash; 502 struct crypto_report_hash rhash; 507 503 508 memset(&rhash, 0, sizeof(rhash)); 504 memset(&rhash, 0, sizeof(rhash)); 509 505 510 strscpy(rhash.type, "ahash", sizeof(rh 506 strscpy(rhash.type, "ahash", sizeof(rhash.type)); 511 507 512 rhash.blocksize = alg->cra_blocksize; 508 rhash.blocksize = alg->cra_blocksize; 513 rhash.digestsize = __crypto_hash_alg_c 509 rhash.digestsize = __crypto_hash_alg_common(alg)->digestsize; 514 510 515 return nla_put(skb, CRYPTOCFGA_REPORT_ 511 return nla_put(skb, CRYPTOCFGA_REPORT_HASH, sizeof(rhash), &rhash); 516 } 512 } >> 513 #else >> 514 static int crypto_ahash_report(struct sk_buff *skb, struct crypto_alg *alg) >> 515 { >> 516 return -ENOSYS; >> 517 } >> 518 #endif 517 519 518 static void crypto_ahash_show(struct seq_file 520 static void crypto_ahash_show(struct seq_file *m, struct crypto_alg *alg) 519 __maybe_unused; 521 __maybe_unused; 520 static void crypto_ahash_show(struct seq_file 522 static void crypto_ahash_show(struct seq_file *m, struct crypto_alg *alg) 521 { 523 { 522 seq_printf(m, "type : ahash\n" 524 seq_printf(m, "type : ahash\n"); 523 seq_printf(m, "async : %s\n", a 525 seq_printf(m, "async : %s\n", alg->cra_flags & CRYPTO_ALG_ASYNC ? 524 " 526 "yes" : "no"); 525 seq_printf(m, "blocksize : %u\n", a 527 seq_printf(m, "blocksize : %u\n", alg->cra_blocksize); 526 seq_printf(m, "digestsize : %u\n", 528 seq_printf(m, "digestsize : %u\n", 527 __crypto_hash_alg_common(al 529 __crypto_hash_alg_common(alg)->digestsize); 528 } 530 } 529 531 530 static const struct crypto_type crypto_ahash_t 532 static const struct crypto_type crypto_ahash_type = { 531 .extsize = crypto_ahash_extsize, 533 .extsize = crypto_ahash_extsize, 532 .init_tfm = crypto_ahash_init_tfm, 534 .init_tfm = crypto_ahash_init_tfm, 533 .free = crypto_ahash_free_instance, 535 .free = crypto_ahash_free_instance, 534 #ifdef CONFIG_PROC_FS 536 #ifdef CONFIG_PROC_FS 535 .show = crypto_ahash_show, 537 .show = crypto_ahash_show, 536 #endif 538 #endif 537 #if IS_ENABLED(CONFIG_CRYPTO_USER) << 538 .report = crypto_ahash_report, 539 .report = crypto_ahash_report, 539 #endif << 540 .maskclear = ~CRYPTO_ALG_TYPE_MASK, 540 .maskclear = ~CRYPTO_ALG_TYPE_MASK, 541 .maskset = CRYPTO_ALG_TYPE_AHASH_MASK, 541 .maskset = CRYPTO_ALG_TYPE_AHASH_MASK, 542 .type = CRYPTO_ALG_TYPE_AHASH, 542 .type = CRYPTO_ALG_TYPE_AHASH, 543 .tfmsize = offsetof(struct crypto_ahas 543 .tfmsize = offsetof(struct crypto_ahash, base), 544 }; 544 }; 545 545 546 int crypto_grab_ahash(struct crypto_ahash_spaw 546 int crypto_grab_ahash(struct crypto_ahash_spawn *spawn, 547 struct crypto_instance * 547 struct crypto_instance *inst, 548 const char *name, u32 ty 548 const char *name, u32 type, u32 mask) 549 { 549 { 550 spawn->base.frontend = &crypto_ahash_t 550 spawn->base.frontend = &crypto_ahash_type; 551 return crypto_grab_spawn(&spawn->base, 551 return crypto_grab_spawn(&spawn->base, inst, name, type, mask); 552 } 552 } 553 EXPORT_SYMBOL_GPL(crypto_grab_ahash); 553 EXPORT_SYMBOL_GPL(crypto_grab_ahash); 554 554 555 struct crypto_ahash *crypto_alloc_ahash(const 555 struct crypto_ahash *crypto_alloc_ahash(const char *alg_name, u32 type, 556 u32 ma 556 u32 mask) 557 { 557 { 558 return crypto_alloc_tfm(alg_name, &cry 558 return crypto_alloc_tfm(alg_name, &crypto_ahash_type, type, mask); 559 } 559 } 560 EXPORT_SYMBOL_GPL(crypto_alloc_ahash); 560 EXPORT_SYMBOL_GPL(crypto_alloc_ahash); 561 561 562 int crypto_has_ahash(const char *alg_name, u32 562 int crypto_has_ahash(const char *alg_name, u32 type, u32 mask) 563 { 563 { 564 return crypto_type_has_alg(alg_name, & 564 return crypto_type_has_alg(alg_name, &crypto_ahash_type, type, mask); 565 } 565 } 566 EXPORT_SYMBOL_GPL(crypto_has_ahash); 566 EXPORT_SYMBOL_GPL(crypto_has_ahash); 567 567 568 static bool crypto_hash_alg_has_setkey(struct << 569 { << 570 struct crypto_alg *alg = &halg->base; << 571 << 572 if (alg->cra_type == &crypto_shash_typ << 573 return crypto_shash_alg_has_se << 574 << 575 return __crypto_ahash_alg(alg)->setkey << 576 } << 577 << 578 struct crypto_ahash *crypto_clone_ahash(struct << 579 { << 580 struct hash_alg_common *halg = crypto_ << 581 struct crypto_tfm *tfm = crypto_ahash_ << 582 struct crypto_ahash *nhash; << 583 struct ahash_alg *alg; << 584 int err; << 585 << 586 if (!crypto_hash_alg_has_setkey(halg)) << 587 tfm = crypto_tfm_get(tfm); << 588 if (IS_ERR(tfm)) << 589 return ERR_CAST(tfm); << 590 << 591 return hash; << 592 } << 593 << 594 nhash = crypto_clone_tfm(&crypto_ahash << 595 << 596 if (IS_ERR(nhash)) << 597 return nhash; << 598 << 599 nhash->reqsize = hash->reqsize; << 600 nhash->statesize = hash->statesize; << 601 << 602 if (likely(hash->using_shash)) { << 603 struct crypto_shash **nctx = c << 604 struct crypto_shash *shash; << 605 << 606 shash = crypto_clone_shash(aha << 607 if (IS_ERR(shash)) { << 608 err = PTR_ERR(shash); << 609 goto out_free_nhash; << 610 } << 611 nhash->using_shash = true; << 612 *nctx = shash; << 613 return nhash; << 614 } << 615 << 616 err = -ENOSYS; << 617 alg = crypto_ahash_alg(hash); << 618 if (!alg->clone_tfm) << 619 goto out_free_nhash; << 620 << 621 err = alg->clone_tfm(nhash, hash); << 622 if (err) << 623 goto out_free_nhash; << 624 << 625 return nhash; << 626 << 627 out_free_nhash: << 628 crypto_free_ahash(nhash); << 629 return ERR_PTR(err); << 630 } << 631 EXPORT_SYMBOL_GPL(crypto_clone_ahash); << 632 << 633 static int ahash_prepare_alg(struct ahash_alg 568 static int ahash_prepare_alg(struct ahash_alg *alg) 634 { 569 { 635 struct crypto_alg *base = &alg->halg.b 570 struct crypto_alg *base = &alg->halg.base; 636 int err; << 637 571 638 if (alg->halg.statesize == 0) !! 572 if (alg->halg.digestsize > HASH_MAX_DIGESTSIZE || >> 573 alg->halg.statesize > HASH_MAX_STATESIZE || >> 574 alg->halg.statesize == 0) 639 return -EINVAL; 575 return -EINVAL; 640 576 641 err = hash_prepare_alg(&alg->halg); << 642 if (err) << 643 return err; << 644 << 645 base->cra_type = &crypto_ahash_type; 577 base->cra_type = &crypto_ahash_type; >> 578 base->cra_flags &= ~CRYPTO_ALG_TYPE_MASK; 646 base->cra_flags |= CRYPTO_ALG_TYPE_AHA 579 base->cra_flags |= CRYPTO_ALG_TYPE_AHASH; 647 580 648 if (!alg->finup) << 649 alg->finup = ahash_def_finup; << 650 if (!alg->setkey) << 651 alg->setkey = ahash_nosetkey; << 652 << 653 return 0; 581 return 0; 654 } 582 } 655 583 656 int crypto_register_ahash(struct ahash_alg *al 584 int crypto_register_ahash(struct ahash_alg *alg) 657 { 585 { 658 struct crypto_alg *base = &alg->halg.b 586 struct crypto_alg *base = &alg->halg.base; 659 int err; 587 int err; 660 588 661 err = ahash_prepare_alg(alg); 589 err = ahash_prepare_alg(alg); 662 if (err) 590 if (err) 663 return err; 591 return err; 664 592 665 return crypto_register_alg(base); 593 return crypto_register_alg(base); 666 } 594 } 667 EXPORT_SYMBOL_GPL(crypto_register_ahash); 595 EXPORT_SYMBOL_GPL(crypto_register_ahash); 668 596 669 void crypto_unregister_ahash(struct ahash_alg 597 void crypto_unregister_ahash(struct ahash_alg *alg) 670 { 598 { 671 crypto_unregister_alg(&alg->halg.base) 599 crypto_unregister_alg(&alg->halg.base); 672 } 600 } 673 EXPORT_SYMBOL_GPL(crypto_unregister_ahash); 601 EXPORT_SYMBOL_GPL(crypto_unregister_ahash); 674 602 675 int crypto_register_ahashes(struct ahash_alg * 603 int crypto_register_ahashes(struct ahash_alg *algs, int count) 676 { 604 { 677 int i, ret; 605 int i, ret; 678 606 679 for (i = 0; i < count; i++) { 607 for (i = 0; i < count; i++) { 680 ret = crypto_register_ahash(&a 608 ret = crypto_register_ahash(&algs[i]); 681 if (ret) 609 if (ret) 682 goto err; 610 goto err; 683 } 611 } 684 612 685 return 0; 613 return 0; 686 614 687 err: 615 err: 688 for (--i; i >= 0; --i) 616 for (--i; i >= 0; --i) 689 crypto_unregister_ahash(&algs[ 617 crypto_unregister_ahash(&algs[i]); 690 618 691 return ret; 619 return ret; 692 } 620 } 693 EXPORT_SYMBOL_GPL(crypto_register_ahashes); 621 EXPORT_SYMBOL_GPL(crypto_register_ahashes); 694 622 695 void crypto_unregister_ahashes(struct ahash_al 623 void crypto_unregister_ahashes(struct ahash_alg *algs, int count) 696 { 624 { 697 int i; 625 int i; 698 626 699 for (i = count - 1; i >= 0; --i) 627 for (i = count - 1; i >= 0; --i) 700 crypto_unregister_ahash(&algs[ 628 crypto_unregister_ahash(&algs[i]); 701 } 629 } 702 EXPORT_SYMBOL_GPL(crypto_unregister_ahashes); 630 EXPORT_SYMBOL_GPL(crypto_unregister_ahashes); 703 631 704 int ahash_register_instance(struct crypto_temp 632 int ahash_register_instance(struct crypto_template *tmpl, 705 struct ahash_insta 633 struct ahash_instance *inst) 706 { 634 { 707 int err; 635 int err; 708 636 709 if (WARN_ON(!inst->free)) 637 if (WARN_ON(!inst->free)) 710 return -EINVAL; 638 return -EINVAL; 711 639 712 err = ahash_prepare_alg(&inst->alg); 640 err = ahash_prepare_alg(&inst->alg); 713 if (err) 641 if (err) 714 return err; 642 return err; 715 643 716 return crypto_register_instance(tmpl, 644 return crypto_register_instance(tmpl, ahash_crypto_instance(inst)); 717 } 645 } 718 EXPORT_SYMBOL_GPL(ahash_register_instance); 646 EXPORT_SYMBOL_GPL(ahash_register_instance); >> 647 >> 648 bool crypto_hash_alg_has_setkey(struct hash_alg_common *halg) >> 649 { >> 650 struct crypto_alg *alg = &halg->base; >> 651 >> 652 if (alg->cra_type != &crypto_ahash_type) >> 653 return crypto_shash_alg_has_setkey(__crypto_shash_alg(alg)); >> 654 >> 655 return __crypto_ahash_alg(alg)->setkey != NULL; >> 656 } >> 657 EXPORT_SYMBOL_GPL(crypto_hash_alg_has_setkey); 719 658 720 MODULE_LICENSE("GPL"); 659 MODULE_LICENSE("GPL"); 721 MODULE_DESCRIPTION("Asynchronous cryptographic 660 MODULE_DESCRIPTION("Asynchronous cryptographic hash type"); 722 661
Linux® is a registered trademark of Linus Torvalds in the United States and other countries.
TOMOYO® is a registered trademark of NTT DATA CORPORATION.