1 // SPDX-License-Identifier: GPL-2.0-or-later << 2 /* 1 /* 3 * Cryptographic API for algorithms (i.e., low 2 * Cryptographic API for algorithms (i.e., low-level API). 4 * 3 * 5 * Copyright (c) 2006 Herbert Xu <herbert@gond 4 * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au> >> 5 * >> 6 * This program is free software; you can redistribute it and/or modify it >> 7 * under the terms of the GNU General Public License as published by the Free >> 8 * Software Foundation; either version 2 of the License, or (at your option) >> 9 * any later version. >> 10 * 6 */ 11 */ 7 12 8 #include <crypto/algapi.h> 13 #include <crypto/algapi.h> 9 #include <crypto/internal/simd.h> << 10 #include <linux/err.h> 14 #include <linux/err.h> 11 #include <linux/errno.h> 15 #include <linux/errno.h> 12 #include <linux/fips.h> 16 #include <linux/fips.h> 13 #include <linux/init.h> 17 #include <linux/init.h> 14 #include <linux/kernel.h> 18 #include <linux/kernel.h> 15 #include <linux/list.h> 19 #include <linux/list.h> 16 #include <linux/module.h> 20 #include <linux/module.h> 17 #include <linux/rtnetlink.h> 21 #include <linux/rtnetlink.h> 18 #include <linux/slab.h> 22 #include <linux/slab.h> 19 #include <linux/string.h> 23 #include <linux/string.h> 20 #include <linux/workqueue.h> << 21 24 22 #include "internal.h" 25 #include "internal.h" 23 26 24 static LIST_HEAD(crypto_template_list); 27 static LIST_HEAD(crypto_template_list); 25 28 26 #ifdef CONFIG_CRYPTO_MANAGER_EXTRA_TESTS !! 29 static inline int crypto_set_driver_name(struct crypto_alg *alg) 27 DEFINE_PER_CPU(bool, crypto_simd_disabled_for_ !! 30 { 28 EXPORT_PER_CPU_SYMBOL_GPL(crypto_simd_disabled !! 31 static const char suffix[] = "-generic"; 29 #endif !! 32 char *driver_name = alg->cra_driver_name; >> 33 int len; >> 34 >> 35 if (*driver_name) >> 36 return 0; >> 37 >> 38 len = strlcpy(driver_name, alg->cra_name, CRYPTO_MAX_ALG_NAME); >> 39 if (len + sizeof(suffix) > CRYPTO_MAX_ALG_NAME) >> 40 return -ENAMETOOLONG; >> 41 >> 42 memcpy(driver_name + len, suffix, sizeof(suffix)); >> 43 return 0; >> 44 } 30 45 31 static inline void crypto_check_module_sig(str 46 static inline void crypto_check_module_sig(struct module *mod) 32 { 47 { 33 if (fips_enabled && mod && !module_sig 48 if (fips_enabled && mod && !module_sig_ok(mod)) 34 panic("Module %s signature ver 49 panic("Module %s signature verification failed in FIPS mode\n", 35 module_name(mod)); 50 module_name(mod)); 36 } 51 } 37 52 38 static int crypto_check_alg(struct crypto_alg 53 static int crypto_check_alg(struct crypto_alg *alg) 39 { 54 { 40 crypto_check_module_sig(alg->cra_modul 55 crypto_check_module_sig(alg->cra_module); 41 56 42 if (!alg->cra_name[0] || !alg->cra_dri << 43 return -EINVAL; << 44 << 45 if (alg->cra_alignmask & (alg->cra_ali 57 if (alg->cra_alignmask & (alg->cra_alignmask + 1)) 46 return -EINVAL; 58 return -EINVAL; 47 59 48 /* General maximums for all algs. */ 60 /* General maximums for all algs. */ 49 if (alg->cra_alignmask > MAX_ALGAPI_AL 61 if (alg->cra_alignmask > MAX_ALGAPI_ALIGNMASK) 50 return -EINVAL; 62 return -EINVAL; 51 63 52 if (alg->cra_blocksize > MAX_ALGAPI_BL 64 if (alg->cra_blocksize > MAX_ALGAPI_BLOCKSIZE) 53 return -EINVAL; 65 return -EINVAL; 54 66 55 /* Lower maximums for specific alg typ 67 /* Lower maximums for specific alg types. */ 56 if (!alg->cra_type && (alg->cra_flags 68 if (!alg->cra_type && (alg->cra_flags & CRYPTO_ALG_TYPE_MASK) == 57 CRYPTO_ALG_TYPE 69 CRYPTO_ALG_TYPE_CIPHER) { 58 if (alg->cra_alignmask > MAX_C 70 if (alg->cra_alignmask > MAX_CIPHER_ALIGNMASK) 59 return -EINVAL; 71 return -EINVAL; 60 72 61 if (alg->cra_blocksize > MAX_C 73 if (alg->cra_blocksize > MAX_CIPHER_BLOCKSIZE) 62 return -EINVAL; 74 return -EINVAL; 63 } 75 } 64 76 65 if (alg->cra_priority < 0) 77 if (alg->cra_priority < 0) 66 return -EINVAL; 78 return -EINVAL; 67 79 68 refcount_set(&alg->cra_refcnt, 1); 80 refcount_set(&alg->cra_refcnt, 1); 69 81 70 return 0; !! 82 return crypto_set_driver_name(alg); 71 } 83 } 72 84 73 static void crypto_free_instance(struct crypto 85 static void crypto_free_instance(struct crypto_instance *inst) 74 { 86 { >> 87 if (!inst->alg.cra_type->free) { >> 88 inst->tmpl->free(inst); >> 89 return; >> 90 } >> 91 75 inst->alg.cra_type->free(inst); 92 inst->alg.cra_type->free(inst); 76 } 93 } 77 94 78 static void crypto_destroy_instance_workfn(str !! 95 static void crypto_destroy_instance(struct crypto_alg *alg) 79 { 96 { 80 struct crypto_instance *inst = contain !! 97 struct crypto_instance *inst = (void *)alg; 81 << 82 struct crypto_template *tmpl = inst->t 98 struct crypto_template *tmpl = inst->tmpl; 83 99 84 crypto_free_instance(inst); 100 crypto_free_instance(inst); 85 crypto_tmpl_put(tmpl); 101 crypto_tmpl_put(tmpl); 86 } 102 } 87 103 88 static void crypto_destroy_instance(struct cry << 89 { << 90 struct crypto_instance *inst = contain << 91 << 92 << 93 << 94 INIT_WORK(&inst->free_work, crypto_des << 95 schedule_work(&inst->free_work); << 96 } << 97 << 98 /* << 99 * This function adds a spawn to the list seco << 100 * will be used at the end of crypto_remove_sp << 101 * instances, unless the spawn happens to be o << 102 * on by the new algorithm (nalg in crypto_rem << 103 * << 104 * This function is also responsible for resur << 105 * in the dependency chain of nalg by unsettin << 106 */ << 107 static struct list_head *crypto_more_spawns(st 104 static struct list_head *crypto_more_spawns(struct crypto_alg *alg, 108 st 105 struct list_head *stack, 109 st 106 struct list_head *top, 110 st 107 struct list_head *secondary_spawns) 111 { 108 { 112 struct crypto_spawn *spawn, *n; 109 struct crypto_spawn *spawn, *n; 113 110 114 spawn = list_first_entry_or_null(stack 111 spawn = list_first_entry_or_null(stack, struct crypto_spawn, list); 115 if (!spawn) 112 if (!spawn) 116 return NULL; 113 return NULL; 117 114 118 n = list_prev_entry(spawn, list); !! 115 n = list_next_entry(spawn, list); 119 list_move(&spawn->list, secondary_spaw << 120 116 121 if (list_is_last(&n->list, stack)) !! 117 if (spawn->alg && &n->list != stack && !n->alg) 122 return top; !! 118 n->alg = (n->list.next == stack) ? alg : >> 119 &list_next_entry(n, list)->inst->alg; 123 120 124 n = list_next_entry(n, list); !! 121 list_move(&spawn->list, secondary_spawns); 125 if (!spawn->dead) << 126 n->dead = false; << 127 122 128 return &n->inst->alg.cra_users; !! 123 return &n->list == stack ? top : &n->inst->alg.cra_users; 129 } 124 } 130 125 131 static void crypto_remove_instance(struct cryp 126 static void crypto_remove_instance(struct crypto_instance *inst, 132 struct list 127 struct list_head *list) 133 { 128 { 134 struct crypto_template *tmpl = inst->t 129 struct crypto_template *tmpl = inst->tmpl; 135 130 136 if (crypto_is_dead(&inst->alg)) 131 if (crypto_is_dead(&inst->alg)) 137 return; 132 return; 138 133 139 inst->alg.cra_flags |= CRYPTO_ALG_DEAD 134 inst->alg.cra_flags |= CRYPTO_ALG_DEAD; >> 135 if (hlist_unhashed(&inst->list)) >> 136 return; 140 137 141 if (!tmpl || !crypto_tmpl_get(tmpl)) 138 if (!tmpl || !crypto_tmpl_get(tmpl)) 142 return; 139 return; 143 140 144 list_move(&inst->alg.cra_list, list); 141 list_move(&inst->alg.cra_list, list); 145 hlist_del(&inst->list); 142 hlist_del(&inst->list); 146 inst->alg.cra_destroy = crypto_destroy 143 inst->alg.cra_destroy = crypto_destroy_instance; 147 144 148 BUG_ON(!list_empty(&inst->alg.cra_user 145 BUG_ON(!list_empty(&inst->alg.cra_users)); 149 } 146 } 150 147 151 /* << 152 * Given an algorithm alg, remove all algorith << 153 * through spawns. If nalg is not null, then << 154 * that is depended on by nalg. This is usefu << 155 * depends on alg. << 156 */ << 157 void crypto_remove_spawns(struct crypto_alg *a 148 void crypto_remove_spawns(struct crypto_alg *alg, struct list_head *list, 158 struct crypto_alg *n 149 struct crypto_alg *nalg) 159 { 150 { 160 u32 new_type = (nalg ?: alg)->cra_flag 151 u32 new_type = (nalg ?: alg)->cra_flags; 161 struct crypto_spawn *spawn, *n; 152 struct crypto_spawn *spawn, *n; 162 LIST_HEAD(secondary_spawns); 153 LIST_HEAD(secondary_spawns); 163 struct list_head *spawns; 154 struct list_head *spawns; 164 LIST_HEAD(stack); 155 LIST_HEAD(stack); 165 LIST_HEAD(top); 156 LIST_HEAD(top); 166 157 167 spawns = &alg->cra_users; 158 spawns = &alg->cra_users; 168 list_for_each_entry_safe(spawn, n, spa 159 list_for_each_entry_safe(spawn, n, spawns, list) { 169 if ((spawn->alg->cra_flags ^ n 160 if ((spawn->alg->cra_flags ^ new_type) & spawn->mask) 170 continue; 161 continue; 171 162 172 list_move(&spawn->list, &top); 163 list_move(&spawn->list, &top); 173 } 164 } 174 165 175 /* << 176 * Perform a depth-first walk starting << 177 * the cra_users tree. The list stack << 178 * from alg to the current spawn. << 179 */ << 180 spawns = ⊤ 166 spawns = ⊤ 181 do { 167 do { 182 while (!list_empty(spawns)) { 168 while (!list_empty(spawns)) { 183 struct crypto_instance 169 struct crypto_instance *inst; 184 170 185 spawn = list_first_ent 171 spawn = list_first_entry(spawns, struct crypto_spawn, 186 172 list); 187 inst = spawn->inst; 173 inst = spawn->inst; 188 174 189 list_move(&spawn->list << 190 spawn->dead = !spawn-> << 191 << 192 if (!spawn->registered << 193 break; << 194 << 195 BUG_ON(&inst->alg == a 175 BUG_ON(&inst->alg == alg); 196 176 >> 177 list_move(&spawn->list, &stack); >> 178 197 if (&inst->alg == nalg 179 if (&inst->alg == nalg) 198 break; 180 break; 199 181 >> 182 spawn->alg = NULL; 200 spawns = &inst->alg.cr 183 spawns = &inst->alg.cra_users; 201 184 202 /* 185 /* 203 * Even if spawn->regi << 204 * instance itself may << 205 * This is because it << 206 * registration. Ther << 207 * make the following << 208 * << 209 * We may encounter an 186 * We may encounter an unregistered instance here, since 210 * an instance's spawn 187 * an instance's spawns are set up prior to the instance 211 * being registered. 188 * being registered. An unregistered instance will have 212 * NULL ->cra_users.ne 189 * NULL ->cra_users.next, since ->cra_users isn't 213 * properly initialize 190 * properly initialized until registration. But an 214 * unregistered instan 191 * unregistered instance cannot have any users, so treat 215 * it the same as ->cr 192 * it the same as ->cra_users being empty. 216 */ 193 */ 217 if (spawns->next == NU 194 if (spawns->next == NULL) 218 break; 195 break; 219 } 196 } 220 } while ((spawns = crypto_more_spawns( 197 } while ((spawns = crypto_more_spawns(alg, &stack, &top, 221 198 &secondary_spawns))); 222 199 223 /* << 224 * Remove all instances that are marke << 225 * complete the resurrection of the ot << 226 * back to the cra_users list. << 227 */ << 228 list_for_each_entry_safe(spawn, n, &se 200 list_for_each_entry_safe(spawn, n, &secondary_spawns, list) { 229 if (!spawn->dead) !! 201 if (spawn->alg) 230 list_move(&spawn->list 202 list_move(&spawn->list, &spawn->alg->cra_users); 231 else if (spawn->registered) !! 203 else 232 crypto_remove_instance 204 crypto_remove_instance(spawn->inst, list); 233 } 205 } 234 } 206 } 235 EXPORT_SYMBOL_GPL(crypto_remove_spawns); 207 EXPORT_SYMBOL_GPL(crypto_remove_spawns); 236 208 237 static void crypto_alg_finish_registration(str !! 209 static struct crypto_larval *__crypto_register_alg(struct crypto_alg *alg) 238 boo << 239 str << 240 { << 241 struct crypto_alg *q; << 242 << 243 list_for_each_entry(q, &crypto_alg_lis << 244 if (q == alg) << 245 continue; << 246 << 247 if (crypto_is_moribund(q)) << 248 continue; << 249 << 250 if (crypto_is_larval(q)) { << 251 struct crypto_larval * << 252 << 253 /* << 254 * Check to see if eit << 255 * specific name can s << 256 * by the larval entry << 257 */ << 258 if (strcmp(alg->cra_na << 259 strcmp(alg->cra_dr << 260 continue; << 261 << 262 if (larval->adult) << 263 continue; << 264 if ((q->cra_flags ^ al << 265 continue; << 266 << 267 if (fulfill_requests & << 268 larval->adult << 269 else << 270 larval->adult << 271 << 272 continue; << 273 } << 274 << 275 if (strcmp(alg->cra_name, q->c << 276 continue; << 277 << 278 if (strcmp(alg->cra_driver_nam << 279 q->cra_priority > alg->cra << 280 continue; << 281 << 282 crypto_remove_spawns(q, algs_t << 283 } << 284 << 285 crypto_notify(CRYPTO_MSG_ALG_LOADED, a << 286 } << 287 << 288 static struct crypto_larval *crypto_alloc_test << 289 { << 290 struct crypto_larval *larval; << 291 << 292 if (!IS_ENABLED(CONFIG_CRYPTO_MANAGER) << 293 IS_ENABLED(CONFIG_CRYPTO_MANAGER_D << 294 (alg->cra_flags & CRYPTO_ALG_INTER << 295 return NULL; /* No self-test n << 296 << 297 larval = crypto_larval_alloc(alg->cra_ << 298 alg->cra_ << 299 if (IS_ERR(larval)) << 300 return larval; << 301 << 302 larval->adult = crypto_mod_get(alg); << 303 if (!larval->adult) { << 304 kfree(larval); << 305 return ERR_PTR(-ENOENT); << 306 } << 307 << 308 refcount_set(&larval->alg.cra_refcnt, << 309 memcpy(larval->alg.cra_driver_name, al << 310 CRYPTO_MAX_ALG_NAME); << 311 larval->alg.cra_priority = alg->cra_pr << 312 << 313 return larval; << 314 } << 315 << 316 static struct crypto_larval * << 317 __crypto_register_alg(struct crypto_alg *alg, << 318 { 210 { 319 struct crypto_alg *q; 211 struct crypto_alg *q; 320 struct crypto_larval *larval; 212 struct crypto_larval *larval; 321 int ret = -EAGAIN; 213 int ret = -EAGAIN; 322 214 323 if (crypto_is_dead(alg)) 215 if (crypto_is_dead(alg)) 324 goto err; 216 goto err; 325 217 326 INIT_LIST_HEAD(&alg->cra_users); 218 INIT_LIST_HEAD(&alg->cra_users); 327 219 >> 220 /* No cheating! */ >> 221 alg->cra_flags &= ~CRYPTO_ALG_TESTED; >> 222 328 ret = -EEXIST; 223 ret = -EEXIST; 329 224 330 list_for_each_entry(q, &crypto_alg_lis 225 list_for_each_entry(q, &crypto_alg_list, cra_list) { 331 if (q == alg) 226 if (q == alg) 332 goto err; 227 goto err; 333 228 334 if (crypto_is_moribund(q)) 229 if (crypto_is_moribund(q)) 335 continue; 230 continue; 336 231 337 if (crypto_is_larval(q)) { 232 if (crypto_is_larval(q)) { 338 if (!strcmp(alg->cra_d 233 if (!strcmp(alg->cra_driver_name, q->cra_driver_name)) 339 goto err; 234 goto err; 340 continue; 235 continue; 341 } 236 } 342 237 343 if (!strcmp(q->cra_driver_name 238 if (!strcmp(q->cra_driver_name, alg->cra_name) || 344 !strcmp(q->cra_driver_name << 345 !strcmp(q->cra_name, alg-> 239 !strcmp(q->cra_name, alg->cra_driver_name)) 346 goto err; 240 goto err; 347 } 241 } 348 242 349 larval = crypto_alloc_test_larval(alg) !! 243 larval = crypto_larval_alloc(alg->cra_name, >> 244 alg->cra_flags | CRYPTO_ALG_TESTED, 0); 350 if (IS_ERR(larval)) 245 if (IS_ERR(larval)) 351 goto out; 246 goto out; 352 247 >> 248 ret = -ENOENT; >> 249 larval->adult = crypto_mod_get(alg); >> 250 if (!larval->adult) >> 251 goto free_larval; >> 252 >> 253 refcount_set(&larval->alg.cra_refcnt, 1); >> 254 memcpy(larval->alg.cra_driver_name, alg->cra_driver_name, >> 255 CRYPTO_MAX_ALG_NAME); >> 256 larval->alg.cra_priority = alg->cra_priority; >> 257 353 list_add(&alg->cra_list, &crypto_alg_l 258 list_add(&alg->cra_list, &crypto_alg_list); >> 259 list_add(&larval->alg.cra_list, &crypto_alg_list); 354 260 355 if (larval) { !! 261 atomic_set(&alg->encrypt_cnt, 0); 356 /* No cheating! */ !! 262 atomic_set(&alg->decrypt_cnt, 0); 357 alg->cra_flags &= ~CRYPTO_ALG_ !! 263 atomic64_set(&alg->encrypt_tlen, 0); 358 !! 264 atomic64_set(&alg->decrypt_tlen, 0); 359 list_add(&larval->alg.cra_list !! 265 atomic_set(&alg->verify_cnt, 0); 360 } else { !! 266 atomic_set(&alg->cipher_err_cnt, 0); 361 alg->cra_flags |= CRYPTO_ALG_T !! 267 atomic_set(&alg->sign_cnt, 0); 362 crypto_alg_finish_registration << 363 } << 364 268 365 out: 269 out: 366 return larval; 270 return larval; 367 271 >> 272 free_larval: >> 273 kfree(larval); 368 err: 274 err: 369 larval = ERR_PTR(ret); 275 larval = ERR_PTR(ret); 370 goto out; 276 goto out; 371 } 277 } 372 278 373 void crypto_alg_tested(const char *name, int e 279 void crypto_alg_tested(const char *name, int err) 374 { 280 { 375 struct crypto_larval *test; 281 struct crypto_larval *test; 376 struct crypto_alg *alg; 282 struct crypto_alg *alg; 377 struct crypto_alg *q; 283 struct crypto_alg *q; 378 LIST_HEAD(list); 284 LIST_HEAD(list); 379 bool best; << 380 285 381 down_write(&crypto_alg_sem); 286 down_write(&crypto_alg_sem); 382 list_for_each_entry(q, &crypto_alg_lis 287 list_for_each_entry(q, &crypto_alg_list, cra_list) { 383 if (crypto_is_moribund(q) || ! 288 if (crypto_is_moribund(q) || !crypto_is_larval(q)) 384 continue; 289 continue; 385 290 386 test = (struct crypto_larval * 291 test = (struct crypto_larval *)q; 387 292 388 if (!strcmp(q->cra_driver_name 293 if (!strcmp(q->cra_driver_name, name)) 389 goto found; 294 goto found; 390 } 295 } 391 296 392 pr_err("alg: Unexpected test result fo 297 pr_err("alg: Unexpected test result for %s: %d\n", name, err); 393 goto unlock; 298 goto unlock; 394 299 395 found: 300 found: 396 q->cra_flags |= CRYPTO_ALG_DEAD; 301 q->cra_flags |= CRYPTO_ALG_DEAD; 397 alg = test->adult; 302 alg = test->adult; 398 !! 303 if (err || list_empty(&alg->cra_list)) 399 if (list_empty(&alg->cra_list)) << 400 goto complete; 304 goto complete; 401 305 402 if (err == -ECANCELED) << 403 alg->cra_flags |= CRYPTO_ALG_F << 404 else if (err) << 405 goto complete; << 406 else << 407 alg->cra_flags &= ~CRYPTO_ALG_ << 408 << 409 alg->cra_flags |= CRYPTO_ALG_TESTED; 306 alg->cra_flags |= CRYPTO_ALG_TESTED; 410 307 411 /* << 412 * If a higher-priority implementation << 413 * currently being tested, then don't << 414 */ << 415 best = true; << 416 list_for_each_entry(q, &crypto_alg_lis 308 list_for_each_entry(q, &crypto_alg_list, cra_list) { 417 if (crypto_is_moribund(q) || ! !! 309 if (q == alg) 418 continue; 310 continue; 419 311 420 if (strcmp(alg->cra_name, q->c !! 312 if (crypto_is_moribund(q)) 421 continue; 313 continue; 422 314 423 if (q->cra_priority > alg->cra !! 315 if (crypto_is_larval(q)) { 424 best = false; !! 316 struct crypto_larval *larval = (void *)q; 425 break; !! 317 >> 318 /* >> 319 * Check to see if either our generic name or >> 320 * specific name can satisfy the name requested >> 321 * by the larval entry q. >> 322 */ >> 323 if (strcmp(alg->cra_name, q->cra_name) && >> 324 strcmp(alg->cra_driver_name, q->cra_name)) >> 325 continue; >> 326 >> 327 if (larval->adult) >> 328 continue; >> 329 if ((q->cra_flags ^ alg->cra_flags) & larval->mask) >> 330 continue; >> 331 if (!crypto_mod_get(alg)) >> 332 continue; >> 333 >> 334 larval->adult = alg; >> 335 continue; 426 } 336 } 427 } << 428 337 429 crypto_alg_finish_registration(alg, be !! 338 if (strcmp(alg->cra_name, q->cra_name)) >> 339 continue; >> 340 >> 341 if (strcmp(alg->cra_driver_name, q->cra_driver_name) && >> 342 q->cra_priority > alg->cra_priority) >> 343 continue; >> 344 >> 345 crypto_remove_spawns(q, &list, alg); >> 346 } 430 347 431 complete: 348 complete: 432 complete_all(&test->completion); 349 complete_all(&test->completion); 433 350 434 unlock: 351 unlock: 435 up_write(&crypto_alg_sem); 352 up_write(&crypto_alg_sem); 436 353 437 crypto_remove_final(&list); 354 crypto_remove_final(&list); 438 } 355 } 439 EXPORT_SYMBOL_GPL(crypto_alg_tested); 356 EXPORT_SYMBOL_GPL(crypto_alg_tested); 440 357 441 void crypto_remove_final(struct list_head *lis 358 void crypto_remove_final(struct list_head *list) 442 { 359 { 443 struct crypto_alg *alg; 360 struct crypto_alg *alg; 444 struct crypto_alg *n; 361 struct crypto_alg *n; 445 362 446 list_for_each_entry_safe(alg, n, list, 363 list_for_each_entry_safe(alg, n, list, cra_list) { 447 list_del_init(&alg->cra_list); 364 list_del_init(&alg->cra_list); 448 crypto_alg_put(alg); 365 crypto_alg_put(alg); 449 } 366 } 450 } 367 } 451 EXPORT_SYMBOL_GPL(crypto_remove_final); 368 EXPORT_SYMBOL_GPL(crypto_remove_final); 452 369 >> 370 static void crypto_wait_for_test(struct crypto_larval *larval) >> 371 { >> 372 int err; >> 373 >> 374 err = crypto_probing_notify(CRYPTO_MSG_ALG_REGISTER, larval->adult); >> 375 if (err != NOTIFY_STOP) { >> 376 if (WARN_ON(err != NOTIFY_DONE)) >> 377 goto out; >> 378 crypto_alg_tested(larval->alg.cra_driver_name, 0); >> 379 } >> 380 >> 381 err = wait_for_completion_killable(&larval->completion); >> 382 WARN_ON(err); >> 383 if (!err) >> 384 crypto_probing_notify(CRYPTO_MSG_ALG_LOADED, larval); >> 385 >> 386 out: >> 387 crypto_larval_kill(&larval->alg); >> 388 } >> 389 453 int crypto_register_alg(struct crypto_alg *alg 390 int crypto_register_alg(struct crypto_alg *alg) 454 { 391 { 455 struct crypto_larval *larval; 392 struct crypto_larval *larval; 456 LIST_HEAD(algs_to_put); << 457 bool test_started = false; << 458 int err; 393 int err; 459 394 460 alg->cra_flags &= ~CRYPTO_ALG_DEAD; 395 alg->cra_flags &= ~CRYPTO_ALG_DEAD; 461 err = crypto_check_alg(alg); 396 err = crypto_check_alg(alg); 462 if (err) 397 if (err) 463 return err; 398 return err; 464 399 465 down_write(&crypto_alg_sem); 400 down_write(&crypto_alg_sem); 466 larval = __crypto_register_alg(alg, &a !! 401 larval = __crypto_register_alg(alg); 467 if (!IS_ERR_OR_NULL(larval)) { << 468 test_started = crypto_boot_tes << 469 larval->test_started = test_st << 470 } << 471 up_write(&crypto_alg_sem); 402 up_write(&crypto_alg_sem); 472 403 473 if (IS_ERR(larval)) 404 if (IS_ERR(larval)) 474 return PTR_ERR(larval); 405 return PTR_ERR(larval); 475 if (test_started) !! 406 476 crypto_wait_for_test(larval); !! 407 crypto_wait_for_test(larval); 477 crypto_remove_final(&algs_to_put); << 478 return 0; 408 return 0; 479 } 409 } 480 EXPORT_SYMBOL_GPL(crypto_register_alg); 410 EXPORT_SYMBOL_GPL(crypto_register_alg); 481 411 482 static int crypto_remove_alg(struct crypto_alg 412 static int crypto_remove_alg(struct crypto_alg *alg, struct list_head *list) 483 { 413 { 484 if (unlikely(list_empty(&alg->cra_list 414 if (unlikely(list_empty(&alg->cra_list))) 485 return -ENOENT; 415 return -ENOENT; 486 416 487 alg->cra_flags |= CRYPTO_ALG_DEAD; 417 alg->cra_flags |= CRYPTO_ALG_DEAD; 488 418 489 list_del_init(&alg->cra_list); 419 list_del_init(&alg->cra_list); 490 crypto_remove_spawns(alg, list, NULL); 420 crypto_remove_spawns(alg, list, NULL); 491 421 492 return 0; 422 return 0; 493 } 423 } 494 424 495 void crypto_unregister_alg(struct crypto_alg * !! 425 int crypto_unregister_alg(struct crypto_alg *alg) 496 { 426 { 497 int ret; 427 int ret; 498 LIST_HEAD(list); 428 LIST_HEAD(list); 499 429 500 down_write(&crypto_alg_sem); 430 down_write(&crypto_alg_sem); 501 ret = crypto_remove_alg(alg, &list); 431 ret = crypto_remove_alg(alg, &list); 502 up_write(&crypto_alg_sem); 432 up_write(&crypto_alg_sem); 503 433 504 if (WARN(ret, "Algorithm %s is not reg !! 434 if (ret) 505 return; !! 435 return ret; 506 << 507 if (WARN_ON(refcount_read(&alg->cra_re << 508 return; << 509 436 >> 437 BUG_ON(refcount_read(&alg->cra_refcnt) != 1); 510 if (alg->cra_destroy) 438 if (alg->cra_destroy) 511 alg->cra_destroy(alg); 439 alg->cra_destroy(alg); 512 440 513 crypto_remove_final(&list); 441 crypto_remove_final(&list); >> 442 return 0; 514 } 443 } 515 EXPORT_SYMBOL_GPL(crypto_unregister_alg); 444 EXPORT_SYMBOL_GPL(crypto_unregister_alg); 516 445 517 int crypto_register_algs(struct crypto_alg *al 446 int crypto_register_algs(struct crypto_alg *algs, int count) 518 { 447 { 519 int i, ret; 448 int i, ret; 520 449 521 for (i = 0; i < count; i++) { 450 for (i = 0; i < count; i++) { 522 ret = crypto_register_alg(&alg 451 ret = crypto_register_alg(&algs[i]); 523 if (ret) 452 if (ret) 524 goto err; 453 goto err; 525 } 454 } 526 455 527 return 0; 456 return 0; 528 457 529 err: 458 err: 530 for (--i; i >= 0; --i) 459 for (--i; i >= 0; --i) 531 crypto_unregister_alg(&algs[i] 460 crypto_unregister_alg(&algs[i]); 532 461 533 return ret; 462 return ret; 534 } 463 } 535 EXPORT_SYMBOL_GPL(crypto_register_algs); 464 EXPORT_SYMBOL_GPL(crypto_register_algs); 536 465 537 void crypto_unregister_algs(struct crypto_alg !! 466 int crypto_unregister_algs(struct crypto_alg *algs, int count) 538 { 467 { 539 int i; !! 468 int i, ret; 540 469 541 for (i = 0; i < count; i++) !! 470 for (i = 0; i < count; i++) { 542 crypto_unregister_alg(&algs[i] !! 471 ret = crypto_unregister_alg(&algs[i]); >> 472 if (ret) >> 473 pr_err("Failed to unregister %s %s: %d\n", >> 474 algs[i].cra_driver_name, algs[i].cra_name, ret); >> 475 } >> 476 >> 477 return 0; 543 } 478 } 544 EXPORT_SYMBOL_GPL(crypto_unregister_algs); 479 EXPORT_SYMBOL_GPL(crypto_unregister_algs); 545 480 546 int crypto_register_template(struct crypto_tem 481 int crypto_register_template(struct crypto_template *tmpl) 547 { 482 { 548 struct crypto_template *q; 483 struct crypto_template *q; 549 int err = -EEXIST; 484 int err = -EEXIST; 550 485 551 down_write(&crypto_alg_sem); 486 down_write(&crypto_alg_sem); 552 487 553 crypto_check_module_sig(tmpl->module); 488 crypto_check_module_sig(tmpl->module); 554 489 555 list_for_each_entry(q, &crypto_templat 490 list_for_each_entry(q, &crypto_template_list, list) { 556 if (q == tmpl) 491 if (q == tmpl) 557 goto out; 492 goto out; 558 } 493 } 559 494 560 list_add(&tmpl->list, &crypto_template 495 list_add(&tmpl->list, &crypto_template_list); 561 err = 0; 496 err = 0; 562 out: 497 out: 563 up_write(&crypto_alg_sem); 498 up_write(&crypto_alg_sem); 564 return err; 499 return err; 565 } 500 } 566 EXPORT_SYMBOL_GPL(crypto_register_template); 501 EXPORT_SYMBOL_GPL(crypto_register_template); 567 502 568 int crypto_register_templates(struct crypto_te << 569 { << 570 int i, err; << 571 << 572 for (i = 0; i < count; i++) { << 573 err = crypto_register_template << 574 if (err) << 575 goto out; << 576 } << 577 return 0; << 578 << 579 out: << 580 for (--i; i >= 0; --i) << 581 crypto_unregister_template(&tm << 582 return err; << 583 } << 584 EXPORT_SYMBOL_GPL(crypto_register_templates); << 585 << 586 void crypto_unregister_template(struct crypto_ 503 void crypto_unregister_template(struct crypto_template *tmpl) 587 { 504 { 588 struct crypto_instance *inst; 505 struct crypto_instance *inst; 589 struct hlist_node *n; 506 struct hlist_node *n; 590 struct hlist_head *list; 507 struct hlist_head *list; 591 LIST_HEAD(users); 508 LIST_HEAD(users); 592 509 593 down_write(&crypto_alg_sem); 510 down_write(&crypto_alg_sem); 594 511 595 BUG_ON(list_empty(&tmpl->list)); 512 BUG_ON(list_empty(&tmpl->list)); 596 list_del_init(&tmpl->list); 513 list_del_init(&tmpl->list); 597 514 598 list = &tmpl->instances; 515 list = &tmpl->instances; 599 hlist_for_each_entry(inst, list, list) 516 hlist_for_each_entry(inst, list, list) { 600 int err = crypto_remove_alg(&i 517 int err = crypto_remove_alg(&inst->alg, &users); 601 518 602 BUG_ON(err); 519 BUG_ON(err); 603 } 520 } 604 521 605 up_write(&crypto_alg_sem); 522 up_write(&crypto_alg_sem); 606 523 607 hlist_for_each_entry_safe(inst, n, lis 524 hlist_for_each_entry_safe(inst, n, list, list) { 608 BUG_ON(refcount_read(&inst->al 525 BUG_ON(refcount_read(&inst->alg.cra_refcnt) != 1); 609 crypto_free_instance(inst); 526 crypto_free_instance(inst); 610 } 527 } 611 crypto_remove_final(&users); 528 crypto_remove_final(&users); 612 } 529 } 613 EXPORT_SYMBOL_GPL(crypto_unregister_template); 530 EXPORT_SYMBOL_GPL(crypto_unregister_template); 614 531 615 void crypto_unregister_templates(struct crypto << 616 { << 617 int i; << 618 << 619 for (i = count - 1; i >= 0; --i) << 620 crypto_unregister_template(&tm << 621 } << 622 EXPORT_SYMBOL_GPL(crypto_unregister_templates) << 623 << 624 static struct crypto_template *__crypto_lookup 532 static struct crypto_template *__crypto_lookup_template(const char *name) 625 { 533 { 626 struct crypto_template *q, *tmpl = NUL 534 struct crypto_template *q, *tmpl = NULL; 627 535 628 down_read(&crypto_alg_sem); 536 down_read(&crypto_alg_sem); 629 list_for_each_entry(q, &crypto_templat 537 list_for_each_entry(q, &crypto_template_list, list) { 630 if (strcmp(q->name, name)) 538 if (strcmp(q->name, name)) 631 continue; 539 continue; 632 if (unlikely(!crypto_tmpl_get( 540 if (unlikely(!crypto_tmpl_get(q))) 633 continue; 541 continue; 634 542 635 tmpl = q; 543 tmpl = q; 636 break; 544 break; 637 } 545 } 638 up_read(&crypto_alg_sem); 546 up_read(&crypto_alg_sem); 639 547 640 return tmpl; 548 return tmpl; 641 } 549 } 642 550 643 struct crypto_template *crypto_lookup_template 551 struct crypto_template *crypto_lookup_template(const char *name) 644 { 552 { 645 return try_then_request_module(__crypt 553 return try_then_request_module(__crypto_lookup_template(name), 646 "crypto 554 "crypto-%s", name); 647 } 555 } 648 EXPORT_SYMBOL_GPL(crypto_lookup_template); 556 EXPORT_SYMBOL_GPL(crypto_lookup_template); 649 557 650 int crypto_register_instance(struct crypto_tem 558 int crypto_register_instance(struct crypto_template *tmpl, 651 struct crypto_ins 559 struct crypto_instance *inst) 652 { 560 { 653 struct crypto_larval *larval; 561 struct crypto_larval *larval; 654 struct crypto_spawn *spawn; << 655 u32 fips_internal = 0; << 656 LIST_HEAD(algs_to_put); << 657 int err; 562 int err; 658 563 659 err = crypto_check_alg(&inst->alg); 564 err = crypto_check_alg(&inst->alg); 660 if (err) 565 if (err) 661 return err; 566 return err; 662 567 663 inst->alg.cra_module = tmpl->module; 568 inst->alg.cra_module = tmpl->module; 664 inst->alg.cra_flags |= CRYPTO_ALG_INST 569 inst->alg.cra_flags |= CRYPTO_ALG_INSTANCE; 665 570 666 down_write(&crypto_alg_sem); 571 down_write(&crypto_alg_sem); 667 572 668 larval = ERR_PTR(-EAGAIN); !! 573 larval = __crypto_register_alg(&inst->alg); 669 for (spawn = inst->spawns; spawn;) { << 670 struct crypto_spawn *next; << 671 << 672 if (spawn->dead) << 673 goto unlock; << 674 << 675 next = spawn->next; << 676 spawn->inst = inst; << 677 spawn->registered = true; << 678 << 679 fips_internal |= spawn->alg->c << 680 << 681 crypto_mod_put(spawn->alg); << 682 << 683 spawn = next; << 684 } << 685 << 686 inst->alg.cra_flags |= (fips_internal << 687 << 688 larval = __crypto_register_alg(&inst-> << 689 if (IS_ERR(larval)) 574 if (IS_ERR(larval)) 690 goto unlock; 575 goto unlock; 691 else if (larval) << 692 larval->test_started = true; << 693 576 694 hlist_add_head(&inst->list, &tmpl->ins 577 hlist_add_head(&inst->list, &tmpl->instances); 695 inst->tmpl = tmpl; 578 inst->tmpl = tmpl; 696 579 697 unlock: 580 unlock: 698 up_write(&crypto_alg_sem); 581 up_write(&crypto_alg_sem); 699 582 >> 583 err = PTR_ERR(larval); 700 if (IS_ERR(larval)) 584 if (IS_ERR(larval)) 701 return PTR_ERR(larval); !! 585 goto err; 702 if (larval) !! 586 703 crypto_wait_for_test(larval); !! 587 crypto_wait_for_test(larval); 704 crypto_remove_final(&algs_to_put); !! 588 err = 0; 705 return 0; !! 589 >> 590 err: >> 591 return err; 706 } 592 } 707 EXPORT_SYMBOL_GPL(crypto_register_instance); 593 EXPORT_SYMBOL_GPL(crypto_register_instance); 708 594 709 void crypto_unregister_instance(struct crypto_ !! 595 int crypto_unregister_instance(struct crypto_instance *inst) 710 { 596 { 711 LIST_HEAD(list); 597 LIST_HEAD(list); 712 598 713 down_write(&crypto_alg_sem); 599 down_write(&crypto_alg_sem); 714 600 715 crypto_remove_spawns(&inst->alg, &list 601 crypto_remove_spawns(&inst->alg, &list, NULL); 716 crypto_remove_instance(inst, &list); 602 crypto_remove_instance(inst, &list); 717 603 718 up_write(&crypto_alg_sem); 604 up_write(&crypto_alg_sem); 719 605 720 crypto_remove_final(&list); 606 crypto_remove_final(&list); >> 607 >> 608 return 0; 721 } 609 } 722 EXPORT_SYMBOL_GPL(crypto_unregister_instance); 610 EXPORT_SYMBOL_GPL(crypto_unregister_instance); 723 611 724 int crypto_grab_spawn(struct crypto_spawn *spa !! 612 int crypto_init_spawn(struct crypto_spawn *spawn, struct crypto_alg *alg, 725 const char *name, u32 ty !! 613 struct crypto_instance *inst, u32 mask) 726 { 614 { 727 struct crypto_alg *alg; << 728 int err = -EAGAIN; 615 int err = -EAGAIN; 729 616 730 if (WARN_ON_ONCE(inst == NULL)) !! 617 spawn->inst = inst; 731 return -EINVAL; !! 618 spawn->mask = mask; 732 << 733 /* Allow the result of crypto_attr_alg << 734 if (IS_ERR(name)) << 735 return PTR_ERR(name); << 736 << 737 alg = crypto_find_alg(name, spawn->fro << 738 type | CRYPTO_AL << 739 if (IS_ERR(alg)) << 740 return PTR_ERR(alg); << 741 619 742 down_write(&crypto_alg_sem); 620 down_write(&crypto_alg_sem); 743 if (!crypto_is_moribund(alg)) { 621 if (!crypto_is_moribund(alg)) { 744 list_add(&spawn->list, &alg->c 622 list_add(&spawn->list, &alg->cra_users); 745 spawn->alg = alg; 623 spawn->alg = alg; 746 spawn->mask = mask; << 747 spawn->next = inst->spawns; << 748 inst->spawns = spawn; << 749 inst->alg.cra_flags |= << 750 (alg->cra_flags & CRYP << 751 err = 0; 624 err = 0; 752 } 625 } 753 up_write(&crypto_alg_sem); 626 up_write(&crypto_alg_sem); 754 if (err) !! 627 755 crypto_mod_put(alg); !! 628 return err; >> 629 } >> 630 EXPORT_SYMBOL_GPL(crypto_init_spawn); >> 631 >> 632 int crypto_init_spawn2(struct crypto_spawn *spawn, struct crypto_alg *alg, >> 633 struct crypto_instance *inst, >> 634 const struct crypto_type *frontend) >> 635 { >> 636 int err = -EINVAL; >> 637 >> 638 if ((alg->cra_flags ^ frontend->type) & frontend->maskset) >> 639 goto out; >> 640 >> 641 spawn->frontend = frontend; >> 642 err = crypto_init_spawn(spawn, alg, inst, frontend->maskset); >> 643 >> 644 out: >> 645 return err; >> 646 } >> 647 EXPORT_SYMBOL_GPL(crypto_init_spawn2); >> 648 >> 649 int crypto_grab_spawn(struct crypto_spawn *spawn, const char *name, >> 650 u32 type, u32 mask) >> 651 { >> 652 struct crypto_alg *alg; >> 653 int err; >> 654 >> 655 alg = crypto_find_alg(name, spawn->frontend, type, mask); >> 656 if (IS_ERR(alg)) >> 657 return PTR_ERR(alg); >> 658 >> 659 err = crypto_init_spawn(spawn, alg, spawn->inst, mask); >> 660 crypto_mod_put(alg); 756 return err; 661 return err; 757 } 662 } 758 EXPORT_SYMBOL_GPL(crypto_grab_spawn); 663 EXPORT_SYMBOL_GPL(crypto_grab_spawn); 759 664 760 void crypto_drop_spawn(struct crypto_spawn *sp 665 void crypto_drop_spawn(struct crypto_spawn *spawn) 761 { 666 { 762 if (!spawn->alg) /* not yet initialize !! 667 if (!spawn->alg) 763 return; 668 return; 764 669 765 down_write(&crypto_alg_sem); 670 down_write(&crypto_alg_sem); 766 if (!spawn->dead) !! 671 list_del(&spawn->list); 767 list_del(&spawn->list); << 768 up_write(&crypto_alg_sem); 672 up_write(&crypto_alg_sem); 769 << 770 if (!spawn->registered) << 771 crypto_mod_put(spawn->alg); << 772 } 673 } 773 EXPORT_SYMBOL_GPL(crypto_drop_spawn); 674 EXPORT_SYMBOL_GPL(crypto_drop_spawn); 774 675 775 static struct crypto_alg *crypto_spawn_alg(str 676 static struct crypto_alg *crypto_spawn_alg(struct crypto_spawn *spawn) 776 { 677 { 777 struct crypto_alg *alg = ERR_PTR(-EAGA !! 678 struct crypto_alg *alg; 778 struct crypto_alg *target; !! 679 struct crypto_alg *alg2; 779 bool shoot = false; << 780 680 781 down_read(&crypto_alg_sem); 681 down_read(&crypto_alg_sem); 782 if (!spawn->dead) { !! 682 alg = spawn->alg; 783 alg = spawn->alg; !! 683 alg2 = alg; 784 if (!crypto_mod_get(alg)) { !! 684 if (alg2) 785 target = crypto_alg_ge !! 685 alg2 = crypto_mod_get(alg2); 786 shoot = true; << 787 alg = ERR_PTR(-EAGAIN) << 788 } << 789 } << 790 up_read(&crypto_alg_sem); 686 up_read(&crypto_alg_sem); 791 687 792 if (shoot) { !! 688 if (!alg2) { 793 crypto_shoot_alg(target); !! 689 if (alg) 794 crypto_alg_put(target); !! 690 crypto_shoot_alg(alg); >> 691 return ERR_PTR(-EAGAIN); 795 } 692 } 796 693 797 return alg; 694 return alg; 798 } 695 } 799 696 800 struct crypto_tfm *crypto_spawn_tfm(struct cry 697 struct crypto_tfm *crypto_spawn_tfm(struct crypto_spawn *spawn, u32 type, 801 u32 mask) 698 u32 mask) 802 { 699 { 803 struct crypto_alg *alg; 700 struct crypto_alg *alg; 804 struct crypto_tfm *tfm; 701 struct crypto_tfm *tfm; 805 702 806 alg = crypto_spawn_alg(spawn); 703 alg = crypto_spawn_alg(spawn); 807 if (IS_ERR(alg)) 704 if (IS_ERR(alg)) 808 return ERR_CAST(alg); 705 return ERR_CAST(alg); 809 706 810 tfm = ERR_PTR(-EINVAL); 707 tfm = ERR_PTR(-EINVAL); 811 if (unlikely((alg->cra_flags ^ type) & 708 if (unlikely((alg->cra_flags ^ type) & mask)) 812 goto out_put_alg; 709 goto out_put_alg; 813 710 814 tfm = __crypto_alloc_tfm(alg, type, ma 711 tfm = __crypto_alloc_tfm(alg, type, mask); 815 if (IS_ERR(tfm)) 712 if (IS_ERR(tfm)) 816 goto out_put_alg; 713 goto out_put_alg; 817 714 818 return tfm; 715 return tfm; 819 716 820 out_put_alg: 717 out_put_alg: 821 crypto_mod_put(alg); 718 crypto_mod_put(alg); 822 return tfm; 719 return tfm; 823 } 720 } 824 EXPORT_SYMBOL_GPL(crypto_spawn_tfm); 721 EXPORT_SYMBOL_GPL(crypto_spawn_tfm); 825 722 826 void *crypto_spawn_tfm2(struct crypto_spawn *s 723 void *crypto_spawn_tfm2(struct crypto_spawn *spawn) 827 { 724 { 828 struct crypto_alg *alg; 725 struct crypto_alg *alg; 829 struct crypto_tfm *tfm; 726 struct crypto_tfm *tfm; 830 727 831 alg = crypto_spawn_alg(spawn); 728 alg = crypto_spawn_alg(spawn); 832 if (IS_ERR(alg)) 729 if (IS_ERR(alg)) 833 return ERR_CAST(alg); 730 return ERR_CAST(alg); 834 731 835 tfm = crypto_create_tfm(alg, spawn->fr 732 tfm = crypto_create_tfm(alg, spawn->frontend); 836 if (IS_ERR(tfm)) 733 if (IS_ERR(tfm)) 837 goto out_put_alg; 734 goto out_put_alg; 838 735 839 return tfm; 736 return tfm; 840 737 841 out_put_alg: 738 out_put_alg: 842 crypto_mod_put(alg); 739 crypto_mod_put(alg); 843 return tfm; 740 return tfm; 844 } 741 } 845 EXPORT_SYMBOL_GPL(crypto_spawn_tfm2); 742 EXPORT_SYMBOL_GPL(crypto_spawn_tfm2); 846 743 847 int crypto_register_notifier(struct notifier_b 744 int crypto_register_notifier(struct notifier_block *nb) 848 { 745 { 849 return blocking_notifier_chain_registe 746 return blocking_notifier_chain_register(&crypto_chain, nb); 850 } 747 } 851 EXPORT_SYMBOL_GPL(crypto_register_notifier); 748 EXPORT_SYMBOL_GPL(crypto_register_notifier); 852 749 853 int crypto_unregister_notifier(struct notifier 750 int crypto_unregister_notifier(struct notifier_block *nb) 854 { 751 { 855 return blocking_notifier_chain_unregis 752 return blocking_notifier_chain_unregister(&crypto_chain, nb); 856 } 753 } 857 EXPORT_SYMBOL_GPL(crypto_unregister_notifier); 754 EXPORT_SYMBOL_GPL(crypto_unregister_notifier); 858 755 859 struct crypto_attr_type *crypto_get_attr_type( 756 struct crypto_attr_type *crypto_get_attr_type(struct rtattr **tb) 860 { 757 { 861 struct rtattr *rta = tb[0]; 758 struct rtattr *rta = tb[0]; 862 struct crypto_attr_type *algt; 759 struct crypto_attr_type *algt; 863 760 864 if (!rta) 761 if (!rta) 865 return ERR_PTR(-ENOENT); 762 return ERR_PTR(-ENOENT); 866 if (RTA_PAYLOAD(rta) < sizeof(*algt)) 763 if (RTA_PAYLOAD(rta) < sizeof(*algt)) 867 return ERR_PTR(-EINVAL); 764 return ERR_PTR(-EINVAL); 868 if (rta->rta_type != CRYPTOA_TYPE) 765 if (rta->rta_type != CRYPTOA_TYPE) 869 return ERR_PTR(-EINVAL); 766 return ERR_PTR(-EINVAL); 870 767 871 algt = RTA_DATA(rta); 768 algt = RTA_DATA(rta); 872 769 873 return algt; 770 return algt; 874 } 771 } 875 EXPORT_SYMBOL_GPL(crypto_get_attr_type); 772 EXPORT_SYMBOL_GPL(crypto_get_attr_type); 876 773 877 /** !! 774 int crypto_check_attr_type(struct rtattr **tb, u32 type) 878 * crypto_check_attr_type() - check algorithm << 879 * @tb: the template parameters << 880 * @type: the algorithm type the template woul << 881 * @mask_ret: (output) the mask that should be << 882 * to restrict the flags of any inn << 883 * << 884 * Validate that the algorithm type the user r << 885 * one the template would actually be instanti << 886 * doing crypto_alloc_shash("cbc(aes)", ...), << 887 * the "cbc" template creates an "skcipher" al << 888 * << 889 * Also compute the mask to use to restrict th << 890 * << 891 * Return: 0 on success; -errno on failure << 892 */ << 893 int crypto_check_attr_type(struct rtattr **tb, << 894 { 775 { 895 struct crypto_attr_type *algt; 776 struct crypto_attr_type *algt; 896 777 897 algt = crypto_get_attr_type(tb); 778 algt = crypto_get_attr_type(tb); 898 if (IS_ERR(algt)) 779 if (IS_ERR(algt)) 899 return PTR_ERR(algt); 780 return PTR_ERR(algt); 900 781 901 if ((algt->type ^ type) & algt->mask) 782 if ((algt->type ^ type) & algt->mask) 902 return -EINVAL; 783 return -EINVAL; 903 784 904 *mask_ret = crypto_algt_inherited_mask << 905 return 0; 785 return 0; 906 } 786 } 907 EXPORT_SYMBOL_GPL(crypto_check_attr_type); 787 EXPORT_SYMBOL_GPL(crypto_check_attr_type); 908 788 909 const char *crypto_attr_alg_name(struct rtattr 789 const char *crypto_attr_alg_name(struct rtattr *rta) 910 { 790 { 911 struct crypto_attr_alg *alga; 791 struct crypto_attr_alg *alga; 912 792 913 if (!rta) 793 if (!rta) 914 return ERR_PTR(-ENOENT); 794 return ERR_PTR(-ENOENT); 915 if (RTA_PAYLOAD(rta) < sizeof(*alga)) 795 if (RTA_PAYLOAD(rta) < sizeof(*alga)) 916 return ERR_PTR(-EINVAL); 796 return ERR_PTR(-EINVAL); 917 if (rta->rta_type != CRYPTOA_ALG) 797 if (rta->rta_type != CRYPTOA_ALG) 918 return ERR_PTR(-EINVAL); 798 return ERR_PTR(-EINVAL); 919 799 920 alga = RTA_DATA(rta); 800 alga = RTA_DATA(rta); 921 alga->name[CRYPTO_MAX_ALG_NAME - 1] = 801 alga->name[CRYPTO_MAX_ALG_NAME - 1] = 0; 922 802 923 return alga->name; 803 return alga->name; 924 } 804 } 925 EXPORT_SYMBOL_GPL(crypto_attr_alg_name); 805 EXPORT_SYMBOL_GPL(crypto_attr_alg_name); 926 806 >> 807 struct crypto_alg *crypto_attr_alg2(struct rtattr *rta, >> 808 const struct crypto_type *frontend, >> 809 u32 type, u32 mask) >> 810 { >> 811 const char *name; >> 812 >> 813 name = crypto_attr_alg_name(rta); >> 814 if (IS_ERR(name)) >> 815 return ERR_CAST(name); >> 816 >> 817 return crypto_find_alg(name, frontend, type, mask); >> 818 } >> 819 EXPORT_SYMBOL_GPL(crypto_attr_alg2); >> 820 >> 821 int crypto_attr_u32(struct rtattr *rta, u32 *num) >> 822 { >> 823 struct crypto_attr_u32 *nu32; >> 824 >> 825 if (!rta) >> 826 return -ENOENT; >> 827 if (RTA_PAYLOAD(rta) < sizeof(*nu32)) >> 828 return -EINVAL; >> 829 if (rta->rta_type != CRYPTOA_U32) >> 830 return -EINVAL; >> 831 >> 832 nu32 = RTA_DATA(rta); >> 833 *num = nu32->num; >> 834 >> 835 return 0; >> 836 } >> 837 EXPORT_SYMBOL_GPL(crypto_attr_u32); >> 838 927 int crypto_inst_setname(struct crypto_instance 839 int crypto_inst_setname(struct crypto_instance *inst, const char *name, 928 struct crypto_alg *alg 840 struct crypto_alg *alg) 929 { 841 { 930 if (snprintf(inst->alg.cra_name, CRYPT 842 if (snprintf(inst->alg.cra_name, CRYPTO_MAX_ALG_NAME, "%s(%s)", name, 931 alg->cra_name) >= CRYPTO_ 843 alg->cra_name) >= CRYPTO_MAX_ALG_NAME) 932 return -ENAMETOOLONG; 844 return -ENAMETOOLONG; 933 845 934 if (snprintf(inst->alg.cra_driver_name 846 if (snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME, "%s(%s)", 935 name, alg->cra_driver_nam 847 name, alg->cra_driver_name) >= CRYPTO_MAX_ALG_NAME) 936 return -ENAMETOOLONG; 848 return -ENAMETOOLONG; 937 849 938 return 0; 850 return 0; 939 } 851 } 940 EXPORT_SYMBOL_GPL(crypto_inst_setname); 852 EXPORT_SYMBOL_GPL(crypto_inst_setname); 941 853 >> 854 void *crypto_alloc_instance2(const char *name, struct crypto_alg *alg, >> 855 unsigned int head) >> 856 { >> 857 struct crypto_instance *inst; >> 858 char *p; >> 859 int err; >> 860 >> 861 p = kzalloc(head + sizeof(*inst) + sizeof(struct crypto_spawn), >> 862 GFP_KERNEL); >> 863 if (!p) >> 864 return ERR_PTR(-ENOMEM); >> 865 >> 866 inst = (void *)(p + head); >> 867 >> 868 err = crypto_inst_setname(inst, name, alg); >> 869 if (err) >> 870 goto err_free_inst; >> 871 >> 872 return p; >> 873 >> 874 err_free_inst: >> 875 kfree(p); >> 876 return ERR_PTR(err); >> 877 } >> 878 EXPORT_SYMBOL_GPL(crypto_alloc_instance2); >> 879 >> 880 struct crypto_instance *crypto_alloc_instance(const char *name, >> 881 struct crypto_alg *alg) >> 882 { >> 883 struct crypto_instance *inst; >> 884 struct crypto_spawn *spawn; >> 885 int err; >> 886 >> 887 inst = crypto_alloc_instance2(name, alg, 0); >> 888 if (IS_ERR(inst)) >> 889 goto out; >> 890 >> 891 spawn = crypto_instance_ctx(inst); >> 892 err = crypto_init_spawn(spawn, alg, inst, >> 893 CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_ASYNC); >> 894 >> 895 if (err) >> 896 goto err_free_inst; >> 897 >> 898 return inst; >> 899 >> 900 err_free_inst: >> 901 kfree(inst); >> 902 inst = ERR_PTR(err); >> 903 >> 904 out: >> 905 return inst; >> 906 } >> 907 EXPORT_SYMBOL_GPL(crypto_alloc_instance); >> 908 942 void crypto_init_queue(struct crypto_queue *qu 909 void crypto_init_queue(struct crypto_queue *queue, unsigned int max_qlen) 943 { 910 { 944 INIT_LIST_HEAD(&queue->list); 911 INIT_LIST_HEAD(&queue->list); 945 queue->backlog = &queue->list; 912 queue->backlog = &queue->list; 946 queue->qlen = 0; 913 queue->qlen = 0; 947 queue->max_qlen = max_qlen; 914 queue->max_qlen = max_qlen; 948 } 915 } 949 EXPORT_SYMBOL_GPL(crypto_init_queue); 916 EXPORT_SYMBOL_GPL(crypto_init_queue); 950 917 951 int crypto_enqueue_request(struct crypto_queue 918 int crypto_enqueue_request(struct crypto_queue *queue, 952 struct crypto_async 919 struct crypto_async_request *request) 953 { 920 { 954 int err = -EINPROGRESS; 921 int err = -EINPROGRESS; 955 922 956 if (unlikely(queue->qlen >= queue->max 923 if (unlikely(queue->qlen >= queue->max_qlen)) { 957 if (!(request->flags & CRYPTO_ 924 if (!(request->flags & CRYPTO_TFM_REQ_MAY_BACKLOG)) { 958 err = -ENOSPC; 925 err = -ENOSPC; 959 goto out; 926 goto out; 960 } 927 } 961 err = -EBUSY; 928 err = -EBUSY; 962 if (queue->backlog == &queue-> 929 if (queue->backlog == &queue->list) 963 queue->backlog = &requ 930 queue->backlog = &request->list; 964 } 931 } 965 932 966 queue->qlen++; 933 queue->qlen++; 967 list_add_tail(&request->list, &queue-> 934 list_add_tail(&request->list, &queue->list); 968 935 969 out: 936 out: 970 return err; 937 return err; 971 } 938 } 972 EXPORT_SYMBOL_GPL(crypto_enqueue_request); 939 EXPORT_SYMBOL_GPL(crypto_enqueue_request); 973 940 974 void crypto_enqueue_request_head(struct crypto << 975 struct crypto << 976 { << 977 if (unlikely(queue->qlen >= queue->max << 978 queue->backlog = queue->backlo << 979 << 980 queue->qlen++; << 981 list_add(&request->list, &queue->list) << 982 } << 983 EXPORT_SYMBOL_GPL(crypto_enqueue_request_head) << 984 << 985 struct crypto_async_request *crypto_dequeue_re 941 struct crypto_async_request *crypto_dequeue_request(struct crypto_queue *queue) 986 { 942 { 987 struct list_head *request; 943 struct list_head *request; 988 944 989 if (unlikely(!queue->qlen)) 945 if (unlikely(!queue->qlen)) 990 return NULL; 946 return NULL; 991 947 992 queue->qlen--; 948 queue->qlen--; 993 949 994 if (queue->backlog != &queue->list) 950 if (queue->backlog != &queue->list) 995 queue->backlog = queue->backlo 951 queue->backlog = queue->backlog->next; 996 952 997 request = queue->list.next; 953 request = queue->list.next; 998 list_del(request); 954 list_del(request); 999 955 1000 return list_entry(request, struct cry 956 return list_entry(request, struct crypto_async_request, list); 1001 } 957 } 1002 EXPORT_SYMBOL_GPL(crypto_dequeue_request); 958 EXPORT_SYMBOL_GPL(crypto_dequeue_request); 1003 959 >> 960 int crypto_tfm_in_queue(struct crypto_queue *queue, struct crypto_tfm *tfm) >> 961 { >> 962 struct crypto_async_request *req; >> 963 >> 964 list_for_each_entry(req, &queue->list, list) { >> 965 if (req->tfm == tfm) >> 966 return 1; >> 967 } >> 968 >> 969 return 0; >> 970 } >> 971 EXPORT_SYMBOL_GPL(crypto_tfm_in_queue); >> 972 1004 static inline void crypto_inc_byte(u8 *a, uns 973 static inline void crypto_inc_byte(u8 *a, unsigned int size) 1005 { 974 { 1006 u8 *b = (a + size); 975 u8 *b = (a + size); 1007 u8 c; 976 u8 c; 1008 977 1009 for (; size; size--) { 978 for (; size; size--) { 1010 c = *--b + 1; 979 c = *--b + 1; 1011 *b = c; 980 *b = c; 1012 if (c) 981 if (c) 1013 break; 982 break; 1014 } 983 } 1015 } 984 } 1016 985 1017 void crypto_inc(u8 *a, unsigned int size) 986 void crypto_inc(u8 *a, unsigned int size) 1018 { 987 { 1019 __be32 *b = (__be32 *)(a + size); 988 __be32 *b = (__be32 *)(a + size); 1020 u32 c; 989 u32 c; 1021 990 1022 if (IS_ENABLED(CONFIG_HAVE_EFFICIENT_ 991 if (IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) || 1023 IS_ALIGNED((unsigned long)b, __al 992 IS_ALIGNED((unsigned long)b, __alignof__(*b))) 1024 for (; size >= 4; size -= 4) 993 for (; size >= 4; size -= 4) { 1025 c = be32_to_cpu(*--b) 994 c = be32_to_cpu(*--b) + 1; 1026 *b = cpu_to_be32(c); 995 *b = cpu_to_be32(c); 1027 if (likely(c)) 996 if (likely(c)) 1028 return; 997 return; 1029 } 998 } 1030 999 1031 crypto_inc_byte(a, size); 1000 crypto_inc_byte(a, size); 1032 } 1001 } 1033 EXPORT_SYMBOL_GPL(crypto_inc); 1002 EXPORT_SYMBOL_GPL(crypto_inc); 1034 1003 >> 1004 void __crypto_xor(u8 *dst, const u8 *src1, const u8 *src2, unsigned int len) >> 1005 { >> 1006 int relalign = 0; >> 1007 >> 1008 if (!IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS)) { >> 1009 int size = sizeof(unsigned long); >> 1010 int d = (((unsigned long)dst ^ (unsigned long)src1) | >> 1011 ((unsigned long)dst ^ (unsigned long)src2)) & >> 1012 (size - 1); >> 1013 >> 1014 relalign = d ? 1 << __ffs(d) : size; >> 1015 >> 1016 /* >> 1017 * If we care about alignment, process as many bytes as >> 1018 * needed to advance dst and src to values whose alignments >> 1019 * equal their relative alignment. This will allow us to >> 1020 * process the remainder of the input using optimal strides. >> 1021 */ >> 1022 while (((unsigned long)dst & (relalign - 1)) && len > 0) { >> 1023 *dst++ = *src1++ ^ *src2++; >> 1024 len--; >> 1025 } >> 1026 } >> 1027 >> 1028 while (IS_ENABLED(CONFIG_64BIT) && len >= 8 && !(relalign & 7)) { >> 1029 *(u64 *)dst = *(u64 *)src1 ^ *(u64 *)src2; >> 1030 dst += 8; >> 1031 src1 += 8; >> 1032 src2 += 8; >> 1033 len -= 8; >> 1034 } >> 1035 >> 1036 while (len >= 4 && !(relalign & 3)) { >> 1037 *(u32 *)dst = *(u32 *)src1 ^ *(u32 *)src2; >> 1038 dst += 4; >> 1039 src1 += 4; >> 1040 src2 += 4; >> 1041 len -= 4; >> 1042 } >> 1043 >> 1044 while (len >= 2 && !(relalign & 1)) { >> 1045 *(u16 *)dst = *(u16 *)src1 ^ *(u16 *)src2; >> 1046 dst += 2; >> 1047 src1 += 2; >> 1048 src2 += 2; >> 1049 len -= 2; >> 1050 } >> 1051 >> 1052 while (len--) >> 1053 *dst++ = *src1++ ^ *src2++; >> 1054 } >> 1055 EXPORT_SYMBOL_GPL(__crypto_xor); >> 1056 1035 unsigned int crypto_alg_extsize(struct crypto 1057 unsigned int crypto_alg_extsize(struct crypto_alg *alg) 1036 { 1058 { 1037 return alg->cra_ctxsize + 1059 return alg->cra_ctxsize + 1038 (alg->cra_alignmask & ~(crypto 1060 (alg->cra_alignmask & ~(crypto_tfm_ctx_alignment() - 1)); 1039 } 1061 } 1040 EXPORT_SYMBOL_GPL(crypto_alg_extsize); 1062 EXPORT_SYMBOL_GPL(crypto_alg_extsize); 1041 1063 1042 int crypto_type_has_alg(const char *name, con 1064 int crypto_type_has_alg(const char *name, const struct crypto_type *frontend, 1043 u32 type, u32 mask) 1065 u32 type, u32 mask) 1044 { 1066 { 1045 int ret = 0; 1067 int ret = 0; 1046 struct crypto_alg *alg = crypto_find_ 1068 struct crypto_alg *alg = crypto_find_alg(name, frontend, type, mask); 1047 1069 1048 if (!IS_ERR(alg)) { 1070 if (!IS_ERR(alg)) { 1049 crypto_mod_put(alg); 1071 crypto_mod_put(alg); 1050 ret = 1; 1072 ret = 1; 1051 } 1073 } 1052 1074 1053 return ret; 1075 return ret; 1054 } 1076 } 1055 EXPORT_SYMBOL_GPL(crypto_type_has_alg); 1077 EXPORT_SYMBOL_GPL(crypto_type_has_alg); 1056 1078 1057 static void __init crypto_start_tests(void) << 1058 { << 1059 if (!IS_BUILTIN(CONFIG_CRYPTO_ALGAPI) << 1060 return; << 1061 << 1062 if (IS_ENABLED(CONFIG_CRYPTO_MANAGER_ << 1063 return; << 1064 << 1065 for (;;) { << 1066 struct crypto_larval *larval << 1067 struct crypto_alg *q; << 1068 << 1069 down_write(&crypto_alg_sem); << 1070 << 1071 list_for_each_entry(q, &crypt << 1072 struct crypto_larval << 1073 << 1074 if (!crypto_is_larval << 1075 continue; << 1076 << 1077 l = (void *)q; << 1078 << 1079 if (!crypto_is_test_l << 1080 continue; << 1081 << 1082 if (l->test_started) << 1083 continue; << 1084 << 1085 l->test_started = tru << 1086 larval = l; << 1087 break; << 1088 } << 1089 << 1090 up_write(&crypto_alg_sem); << 1091 << 1092 if (!larval) << 1093 break; << 1094 << 1095 crypto_wait_for_test(larval); << 1096 } << 1097 << 1098 set_crypto_boot_test_finished(); << 1099 } << 1100 << 1101 static int __init crypto_algapi_init(void) 1079 static int __init crypto_algapi_init(void) 1102 { 1080 { 1103 crypto_init_proc(); 1081 crypto_init_proc(); 1104 crypto_start_tests(); << 1105 return 0; 1082 return 0; 1106 } 1083 } 1107 1084 1108 static void __exit crypto_algapi_exit(void) 1085 static void __exit crypto_algapi_exit(void) 1109 { 1086 { 1110 crypto_exit_proc(); 1087 crypto_exit_proc(); 1111 } 1088 } 1112 1089 1113 /* !! 1090 module_init(crypto_algapi_init); 1114 * We run this at late_initcall so that all t << 1115 * have had a chance to register themselves f << 1116 */ << 1117 late_initcall(crypto_algapi_init); << 1118 module_exit(crypto_algapi_exit); 1091 module_exit(crypto_algapi_exit); 1119 1092 1120 MODULE_LICENSE("GPL"); 1093 MODULE_LICENSE("GPL"); 1121 MODULE_DESCRIPTION("Cryptographic algorithms 1094 MODULE_DESCRIPTION("Cryptographic algorithms API"); 1122 MODULE_SOFTDEP("pre: cryptomgr"); << 1123 1095
Linux® is a registered trademark of Linus Torvalds in the United States and other countries.
TOMOYO® is a registered trademark of NTT DATA CORPORATION.