~ [ source navigation ] ~ [ diff markup ] ~ [ identifier search ] ~

TOMOYO Linux Cross Reference
Linux/crypto/crypto_engine.c

Version: ~ [ linux-6.11.5 ] ~ [ linux-6.10.14 ] ~ [ linux-6.9.12 ] ~ [ linux-6.8.12 ] ~ [ linux-6.7.12 ] ~ [ linux-6.6.58 ] ~ [ linux-6.5.13 ] ~ [ linux-6.4.16 ] ~ [ linux-6.3.13 ] ~ [ linux-6.2.16 ] ~ [ linux-6.1.114 ] ~ [ linux-6.0.19 ] ~ [ linux-5.19.17 ] ~ [ linux-5.18.19 ] ~ [ linux-5.17.15 ] ~ [ linux-5.16.20 ] ~ [ linux-5.15.169 ] ~ [ linux-5.14.21 ] ~ [ linux-5.13.19 ] ~ [ linux-5.12.19 ] ~ [ linux-5.11.22 ] ~ [ linux-5.10.228 ] ~ [ linux-5.9.16 ] ~ [ linux-5.8.18 ] ~ [ linux-5.7.19 ] ~ [ linux-5.6.19 ] ~ [ linux-5.5.19 ] ~ [ linux-5.4.284 ] ~ [ linux-5.3.18 ] ~ [ linux-5.2.21 ] ~ [ linux-5.1.21 ] ~ [ linux-5.0.21 ] ~ [ linux-4.20.17 ] ~ [ linux-4.19.322 ] ~ [ linux-4.18.20 ] ~ [ linux-4.17.19 ] ~ [ linux-4.16.18 ] ~ [ linux-4.15.18 ] ~ [ linux-4.14.336 ] ~ [ linux-4.13.16 ] ~ [ linux-4.12.14 ] ~ [ linux-4.11.12 ] ~ [ linux-4.10.17 ] ~ [ linux-4.9.337 ] ~ [ linux-4.4.302 ] ~ [ linux-3.10.108 ] ~ [ linux-2.6.32.71 ] ~ [ linux-2.6.0 ] ~ [ linux-2.4.37.11 ] ~ [ unix-v6-master ] ~ [ ccs-tools-1.8.9 ] ~ [ policy-sample ] ~
Architecture: ~ [ i386 ] ~ [ alpha ] ~ [ m68k ] ~ [ mips ] ~ [ ppc ] ~ [ sparc ] ~ [ sparc64 ] ~

Diff markup

Differences between /crypto/crypto_engine.c (Version linux-6.11.5) and /crypto/crypto_engine.c (Version linux-2.4.37.11)


  1 // SPDX-License-Identifier: GPL-2.0-or-later        1 
  2 /*                                                
  3  * Handle async block request by crypto hardwa    
  4  *                                                
  5  * Copyright (C) 2016 Linaro, Inc.                
  6  *                                                
  7  * Author: Baolin Wang <baolin.wang@linaro.org    
  8  */                                               
  9                                                   
 10 #include <crypto/internal/aead.h>                 
 11 #include <crypto/internal/akcipher.h>             
 12 #include <crypto/internal/engine.h>               
 13 #include <crypto/internal/hash.h>                 
 14 #include <crypto/internal/kpp.h>                  
 15 #include <crypto/internal/skcipher.h>             
 16 #include <linux/err.h>                            
 17 #include <linux/delay.h>                          
 18 #include <linux/device.h>                         
 19 #include <linux/kernel.h>                         
 20 #include <linux/module.h>                         
 21 #include <uapi/linux/sched/types.h>               
 22 #include "internal.h"                             
 23                                                   
 24 #define CRYPTO_ENGINE_MAX_QLEN 10                 
 25                                                   
 26 /* Temporary algorithm flag used to indicate a    
 27 #define CRYPTO_ALG_ENGINE 0x200                   
 28                                                   
 29 struct crypto_engine_alg {                        
 30         struct crypto_alg base;                   
 31         struct crypto_engine_op op;               
 32 };                                                
 33                                                   
 34 /**                                               
 35  * crypto_finalize_request - finalize one requ    
 36  * @engine: the hardware engine                   
 37  * @req: the request need to be finalized         
 38  * @err: error number                             
 39  */                                               
 40 static void crypto_finalize_request(struct cry    
 41                                     struct cry    
 42 {                                                 
 43         unsigned long flags;                      
 44                                                   
 45         /*                                        
 46          * If hardware cannot enqueue more req    
 47          * and retry mechanism is not supporte    
 48          * make sure we are completing the cur    
 49          */                                       
 50         if (!engine->retry_support) {             
 51                 spin_lock_irqsave(&engine->que    
 52                 if (engine->cur_req == req) {     
 53                         engine->cur_req = NULL    
 54                 }                                 
 55                 spin_unlock_irqrestore(&engine    
 56         }                                         
 57                                                   
 58         lockdep_assert_in_softirq();              
 59         crypto_request_complete(req, err);        
 60                                                   
 61         kthread_queue_work(engine->kworker, &e    
 62 }                                                 
 63                                                   
 64 /**                                               
 65  * crypto_pump_requests - dequeue one request     
 66  * @engine: the hardware engine                   
 67  * @in_kthread: true if we are in the context     
 68  *                                                
 69  * This function checks if there is any reques    
 70  * needs processing and if so call out to the     
 71  * and handle each request.                       
 72  */                                               
 73 static void crypto_pump_requests(struct crypto    
 74                                  bool in_kthre    
 75 {                                                 
 76         struct crypto_async_request *async_req    
 77         struct crypto_engine_alg *alg;            
 78         struct crypto_engine_op *op;              
 79         unsigned long flags;                      
 80         bool was_busy = false;                    
 81         int ret;                                  
 82                                                   
 83         spin_lock_irqsave(&engine->queue_lock,    
 84                                                   
 85         /* Make sure we are not already runnin    
 86         if (!engine->retry_support && engine->    
 87                 goto out;                         
 88                                                   
 89         /* If another context is idling then d    
 90         if (engine->idling) {                     
 91                 kthread_queue_work(engine->kwo    
 92                 goto out;                         
 93         }                                         
 94                                                   
 95         /* Check if the engine queue is idle *    
 96         if (!crypto_queue_len(&engine->queue)     
 97                 if (!engine->busy)                
 98                         goto out;                 
 99                                                   
100                 /* Only do teardown in the thr    
101                 if (!in_kthread) {                
102                         kthread_queue_work(eng    
103                                            &en    
104                         goto out;                 
105                 }                                 
106                                                   
107                 engine->busy = false;             
108                 engine->idling = true;            
109                 spin_unlock_irqrestore(&engine    
110                                                   
111                 if (engine->unprepare_crypt_ha    
112                     engine->unprepare_crypt_ha    
113                         dev_err(engine->dev, "    
114                                                   
115                 spin_lock_irqsave(&engine->que    
116                 engine->idling = false;           
117                 goto out;                         
118         }                                         
119                                                   
120 start_request:                                    
121         /* Get the fist request from the engin    
122         backlog = crypto_get_backlog(&engine->    
123         async_req = crypto_dequeue_request(&en    
124         if (!async_req)                           
125                 goto out;                         
126                                                   
127         /*                                        
128          * If hardware doesn't support the ret    
129          * keep track of the request we are pr    
130          * We'll need it on completion (crypto    
131          */                                       
132         if (!engine->retry_support)               
133                 engine->cur_req = async_req;      
134                                                   
135         if (engine->busy)                         
136                 was_busy = true;                  
137         else                                      
138                 engine->busy = true;              
139                                                   
140         spin_unlock_irqrestore(&engine->queue_    
141                                                   
142         /* Until here we get the request need     
143         if (!was_busy && engine->prepare_crypt    
144                 ret = engine->prepare_crypt_ha    
145                 if (ret) {                        
146                         dev_err(engine->dev, "    
147                         goto req_err_1;           
148                 }                                 
149         }                                         
150                                                   
151         if (async_req->tfm->__crt_alg->cra_fla    
152                 alg = container_of(async_req->    
153                                    struct cryp    
154                 op = &alg->op;                    
155         } else {                                  
156                 dev_err(engine->dev, "failed t    
157                 ret = -EINVAL;                    
158                 goto req_err_1;                   
159         }                                         
160                                                   
161         ret = op->do_one_request(engine, async    
162                                                   
163         /* Request unsuccessfully executed by     
164         if (ret < 0) {                            
165                 /*                                
166                  * If hardware queue is full (    
167                  * regardless of backlog flag.    
168                  * Otherwise, unprepare and co    
169                  */                               
170                 if (!engine->retry_support ||     
171                     (ret != -ENOSPC)) {           
172                         dev_err(engine->dev,      
173                                 "Failed to do     
174                                 ret);             
175                         goto req_err_1;           
176                 }                                 
177                 spin_lock_irqsave(&engine->que    
178                 /*                                
179                  * If hardware was unable to e    
180                  * back in front of crypto-eng    
181                  * of requests.                   
182                  */                               
183                 crypto_enqueue_request_head(&e    
184                                                   
185                 kthread_queue_work(engine->kwo    
186                 goto out;                         
187         }                                         
188                                                   
189         goto retry;                               
190                                                   
191 req_err_1:                                        
192         crypto_request_complete(async_req, ret    
193                                                   
194 retry:                                            
195         if (backlog)                              
196                 crypto_request_complete(backlo    
197                                                   
198         /* If retry mechanism is supported, se    
199         if (engine->retry_support) {              
200                 spin_lock_irqsave(&engine->que    
201                 goto start_request;               
202         }                                         
203         return;                                   
204                                                   
205 out:                                              
206         spin_unlock_irqrestore(&engine->queue_    
207                                                   
208         /*                                        
209          * Batch requests is possible only if     
210          * hardware can enqueue multiple reque    
211          */                                       
212         if (engine->do_batch_requests) {          
213                 ret = engine->do_batch_request    
214                 if (ret)                          
215                         dev_err(engine->dev, "    
216                                 ret);             
217         }                                         
218                                                   
219         return;                                   
220 }                                                 
221                                                   
222 static void crypto_pump_work(struct kthread_wo    
223 {                                                 
224         struct crypto_engine *engine =            
225                 container_of(work, struct cryp    
226                                                   
227         crypto_pump_requests(engine, true);       
228 }                                                 
229                                                   
230 /**                                               
231  * crypto_transfer_request - transfer the new     
232  * @engine: the hardware engine                   
233  * @req: the request need to be listed into th    
234  * @need_pump: indicates whether queue the pum    
235  */                                               
236 static int crypto_transfer_request(struct cryp    
237                                    struct cryp    
238                                    bool need_p    
239 {                                                 
240         unsigned long flags;                      
241         int ret;                                  
242                                                   
243         spin_lock_irqsave(&engine->queue_lock,    
244                                                   
245         if (!engine->running) {                   
246                 spin_unlock_irqrestore(&engine    
247                 return -ESHUTDOWN;                
248         }                                         
249                                                   
250         ret = crypto_enqueue_request(&engine->    
251                                                   
252         if (!engine->busy && need_pump)           
253                 kthread_queue_work(engine->kwo    
254                                                   
255         spin_unlock_irqrestore(&engine->queue_    
256         return ret;                               
257 }                                                 
258                                                   
259 /**                                               
260  * crypto_transfer_request_to_engine - transfe    
261  * into the engine queue                          
262  * @engine: the hardware engine                   
263  * @req: the request need to be listed into th    
264  */                                               
265 static int crypto_transfer_request_to_engine(s    
266                                              s    
267 {                                                 
268         return crypto_transfer_request(engine,    
269 }                                                 
270                                                   
271 /**                                               
272  * crypto_transfer_aead_request_to_engine - tr    
273  * to list into the engine queue                  
274  * @engine: the hardware engine                   
275  * @req: the request need to be listed into th    
276  */                                               
277 int crypto_transfer_aead_request_to_engine(str    
278                                            str    
279 {                                                 
280         return crypto_transfer_request_to_engi    
281 }                                                 
282 EXPORT_SYMBOL_GPL(crypto_transfer_aead_request    
283                                                   
284 /**                                               
285  * crypto_transfer_akcipher_request_to_engine     
286  * to list into the engine queue                  
287  * @engine: the hardware engine                   
288  * @req: the request need to be listed into th    
289  */                                               
290 int crypto_transfer_akcipher_request_to_engine    
291                                                   
292 {                                                 
293         return crypto_transfer_request_to_engi    
294 }                                                 
295 EXPORT_SYMBOL_GPL(crypto_transfer_akcipher_req    
296                                                   
297 /**                                               
298  * crypto_transfer_hash_request_to_engine - tr    
299  * to list into the engine queue                  
300  * @engine: the hardware engine                   
301  * @req: the request need to be listed into th    
302  */                                               
303 int crypto_transfer_hash_request_to_engine(str    
304                                            str    
305 {                                                 
306         return crypto_transfer_request_to_engi    
307 }                                                 
308 EXPORT_SYMBOL_GPL(crypto_transfer_hash_request    
309                                                   
310 /**                                               
311  * crypto_transfer_kpp_request_to_engine - tra    
312  * into the engine queue                          
313  * @engine: the hardware engine                   
314  * @req: the request need to be listed into th    
315  */                                               
316 int crypto_transfer_kpp_request_to_engine(stru    
317                                           stru    
318 {                                                 
319         return crypto_transfer_request_to_engi    
320 }                                                 
321 EXPORT_SYMBOL_GPL(crypto_transfer_kpp_request_    
322                                                   
323 /**                                               
324  * crypto_transfer_skcipher_request_to_engine     
325  * to list into the engine queue                  
326  * @engine: the hardware engine                   
327  * @req: the request need to be listed into th    
328  */                                               
329 int crypto_transfer_skcipher_request_to_engine    
330                                                   
331 {                                                 
332         return crypto_transfer_request_to_engi    
333 }                                                 
334 EXPORT_SYMBOL_GPL(crypto_transfer_skcipher_req    
335                                                   
336 /**                                               
337  * crypto_finalize_aead_request - finalize one    
338  * the request is done                            
339  * @engine: the hardware engine                   
340  * @req: the request need to be finalized         
341  * @err: error number                             
342  */                                               
343 void crypto_finalize_aead_request(struct crypt    
344                                   struct aead_    
345 {                                                 
346         return crypto_finalize_request(engine,    
347 }                                                 
348 EXPORT_SYMBOL_GPL(crypto_finalize_aead_request    
349                                                   
350 /**                                               
351  * crypto_finalize_akcipher_request - finalize    
352  * the request is done                            
353  * @engine: the hardware engine                   
354  * @req: the request need to be finalized         
355  * @err: error number                             
356  */                                               
357 void crypto_finalize_akcipher_request(struct c    
358                                       struct a    
359 {                                                 
360         return crypto_finalize_request(engine,    
361 }                                                 
362 EXPORT_SYMBOL_GPL(crypto_finalize_akcipher_req    
363                                                   
364 /**                                               
365  * crypto_finalize_hash_request - finalize one    
366  * the request is done                            
367  * @engine: the hardware engine                   
368  * @req: the request need to be finalized         
369  * @err: error number                             
370  */                                               
371 void crypto_finalize_hash_request(struct crypt    
372                                   struct ahash    
373 {                                                 
374         return crypto_finalize_request(engine,    
375 }                                                 
376 EXPORT_SYMBOL_GPL(crypto_finalize_hash_request    
377                                                   
378 /**                                               
379  * crypto_finalize_kpp_request - finalize one     
380  * @engine: the hardware engine                   
381  * @req: the request need to be finalized         
382  * @err: error number                             
383  */                                               
384 void crypto_finalize_kpp_request(struct crypto    
385                                  struct kpp_re    
386 {                                                 
387         return crypto_finalize_request(engine,    
388 }                                                 
389 EXPORT_SYMBOL_GPL(crypto_finalize_kpp_request)    
390                                                   
391 /**                                               
392  * crypto_finalize_skcipher_request - finalize    
393  * the request is done                            
394  * @engine: the hardware engine                   
395  * @req: the request need to be finalized         
396  * @err: error number                             
397  */                                               
398 void crypto_finalize_skcipher_request(struct c    
399                                       struct s    
400 {                                                 
401         return crypto_finalize_request(engine,    
402 }                                                 
403 EXPORT_SYMBOL_GPL(crypto_finalize_skcipher_req    
404                                                   
405 /**                                               
406  * crypto_engine_start - start the hardware en    
407  * @engine: the hardware engine need to be sta    
408  *                                                
409  * Return 0 on success, else on fail.             
410  */                                               
411 int crypto_engine_start(struct crypto_engine *    
412 {                                                 
413         unsigned long flags;                      
414                                                   
415         spin_lock_irqsave(&engine->queue_lock,    
416                                                   
417         if (engine->running || engine->busy) {    
418                 spin_unlock_irqrestore(&engine    
419                 return -EBUSY;                    
420         }                                         
421                                                   
422         engine->running = true;                   
423         spin_unlock_irqrestore(&engine->queue_    
424                                                   
425         kthread_queue_work(engine->kworker, &e    
426                                                   
427         return 0;                                 
428 }                                                 
429 EXPORT_SYMBOL_GPL(crypto_engine_start);           
430                                                   
431 /**                                               
432  * crypto_engine_stop - stop the hardware engi    
433  * @engine: the hardware engine need to be sto    
434  *                                                
435  * Return 0 on success, else on fail.             
436  */                                               
437 int crypto_engine_stop(struct crypto_engine *e    
438 {                                                 
439         unsigned long flags;                      
440         unsigned int limit = 500;                 
441         int ret = 0;                              
442                                                   
443         spin_lock_irqsave(&engine->queue_lock,    
444                                                   
445         /*                                        
446          * If the engine queue is not empty or    
447          * we need to wait for a while to pump    
448          */                                       
449         while ((crypto_queue_len(&engine->queu    
450                 spin_unlock_irqrestore(&engine    
451                 msleep(20);                       
452                 spin_lock_irqsave(&engine->que    
453         }                                         
454                                                   
455         if (crypto_queue_len(&engine->queue) |    
456                 ret = -EBUSY;                     
457         else                                      
458                 engine->running = false;          
459                                                   
460         spin_unlock_irqrestore(&engine->queue_    
461                                                   
462         if (ret)                                  
463                 dev_warn(engine->dev, "could n    
464                                                   
465         return ret;                               
466 }                                                 
467 EXPORT_SYMBOL_GPL(crypto_engine_stop);            
468                                                   
469 /**                                               
470  * crypto_engine_alloc_init_and_set - allocate    
471  * and initialize it by setting the maximum nu    
472  * crypto-engine queue.                           
473  * @dev: the device attached with one hardware    
474  * @retry_support: whether hardware has suppor    
475  * @cbk_do_batch: pointer to a callback functi    
476  *                a batch of requests.            
477  *                This has the form:              
478  *                callback(struct crypto_engin    
479  *                where:                          
480  *                engine: the crypto engine st    
481  * @rt: whether this queue is set to run as a     
482  * @qlen: maximum size of the crypto-engine qu    
483  *                                                
484  * This must be called from context that can s    
485  * Return: the crypto engine structure on succ    
486  */                                               
487 struct crypto_engine *crypto_engine_alloc_init    
488                                                   
489                                                   
490                                                   
491 {                                                 
492         struct crypto_engine *engine;             
493                                                   
494         if (!dev)                                 
495                 return NULL;                      
496                                                   
497         engine = devm_kzalloc(dev, sizeof(*eng    
498         if (!engine)                              
499                 return NULL;                      
500                                                   
501         engine->dev = dev;                        
502         engine->rt = rt;                          
503         engine->running = false;                  
504         engine->busy = false;                     
505         engine->idling = false;                   
506         engine->retry_support = retry_support;    
507         engine->priv_data = dev;                  
508         /*                                        
509          * Batch requests is possible only if     
510          * hardware has support for retry mech    
511          */                                       
512         engine->do_batch_requests = retry_supp    
513                                                   
514         snprintf(engine->name, sizeof(engine->    
515                  "%s-engine", dev_name(dev));     
516                                                   
517         crypto_init_queue(&engine->queue, qlen    
518         spin_lock_init(&engine->queue_lock);      
519                                                   
520         engine->kworker = kthread_create_worke    
521         if (IS_ERR(engine->kworker)) {            
522                 dev_err(dev, "failed to create    
523                 return NULL;                      
524         }                                         
525         kthread_init_work(&engine->pump_reques    
526                                                   
527         if (engine->rt) {                         
528                 dev_info(dev, "will run reques    
529                 sched_set_fifo(engine->kworker    
530         }                                         
531                                                   
532         return engine;                            
533 }                                                 
534 EXPORT_SYMBOL_GPL(crypto_engine_alloc_init_and    
535                                                   
536 /**                                               
537  * crypto_engine_alloc_init - allocate crypto     
538  * initialize it.                                 
539  * @dev: the device attached with one hardware    
540  * @rt: whether this queue is set to run as a     
541  *                                                
542  * This must be called from context that can s    
543  * Return: the crypto engine structure on succ    
544  */                                               
545 struct crypto_engine *crypto_engine_alloc_init    
546 {                                                 
547         return crypto_engine_alloc_init_and_se    
548                                                   
549 }                                                 
550 EXPORT_SYMBOL_GPL(crypto_engine_alloc_init);      
551                                                   
552 /**                                               
553  * crypto_engine_exit - free the resources of     
554  * @engine: the hardware engine need to be fre    
555  */                                               
556 void crypto_engine_exit(struct crypto_engine *    
557 {                                                 
558         int ret;                                  
559                                                   
560         ret = crypto_engine_stop(engine);         
561         if (ret)                                  
562                 return;                           
563                                                   
564         kthread_destroy_worker(engine->kworker    
565 }                                                 
566 EXPORT_SYMBOL_GPL(crypto_engine_exit);            
567                                                   
568 int crypto_engine_register_aead(struct aead_en    
569 {                                                 
570         if (!alg->op.do_one_request)              
571                 return -EINVAL;                   
572                                                   
573         alg->base.base.cra_flags |= CRYPTO_ALG    
574                                                   
575         return crypto_register_aead(&alg->base    
576 }                                                 
577 EXPORT_SYMBOL_GPL(crypto_engine_register_aead)    
578                                                   
579 void crypto_engine_unregister_aead(struct aead    
580 {                                                 
581         crypto_unregister_aead(&alg->base);       
582 }                                                 
583 EXPORT_SYMBOL_GPL(crypto_engine_unregister_aea    
584                                                   
585 int crypto_engine_register_aeads(struct aead_e    
586 {                                                 
587         int i, ret;                               
588                                                   
589         for (i = 0; i < count; i++) {             
590                 ret = crypto_engine_register_a    
591                 if (ret)                          
592                         goto err;                 
593         }                                         
594                                                   
595         return 0;                                 
596                                                   
597 err:                                              
598         crypto_engine_unregister_aeads(algs, i    
599                                                   
600         return ret;                               
601 }                                                 
602 EXPORT_SYMBOL_GPL(crypto_engine_register_aeads    
603                                                   
604 void crypto_engine_unregister_aeads(struct aea    
605 {                                                 
606         int i;                                    
607                                                   
608         for (i = count - 1; i >= 0; --i)          
609                 crypto_engine_unregister_aead(    
610 }                                                 
611 EXPORT_SYMBOL_GPL(crypto_engine_unregister_aea    
612                                                   
613 int crypto_engine_register_ahash(struct ahash_    
614 {                                                 
615         if (!alg->op.do_one_request)              
616                 return -EINVAL;                   
617                                                   
618         alg->base.halg.base.cra_flags |= CRYPT    
619                                                   
620         return crypto_register_ahash(&alg->bas    
621 }                                                 
622 EXPORT_SYMBOL_GPL(crypto_engine_register_ahash    
623                                                   
624 void crypto_engine_unregister_ahash(struct aha    
625 {                                                 
626         crypto_unregister_ahash(&alg->base);      
627 }                                                 
628 EXPORT_SYMBOL_GPL(crypto_engine_unregister_aha    
629                                                   
630 int crypto_engine_register_ahashes(struct ahas    
631 {                                                 
632         int i, ret;                               
633                                                   
634         for (i = 0; i < count; i++) {             
635                 ret = crypto_engine_register_a    
636                 if (ret)                          
637                         goto err;                 
638         }                                         
639                                                   
640         return 0;                                 
641                                                   
642 err:                                              
643         crypto_engine_unregister_ahashes(algs,    
644                                                   
645         return ret;                               
646 }                                                 
647 EXPORT_SYMBOL_GPL(crypto_engine_register_ahash    
648                                                   
649 void crypto_engine_unregister_ahashes(struct a    
650                                       int coun    
651 {                                                 
652         int i;                                    
653                                                   
654         for (i = count - 1; i >= 0; --i)          
655                 crypto_engine_unregister_ahash    
656 }                                                 
657 EXPORT_SYMBOL_GPL(crypto_engine_unregister_aha    
658                                                   
659 int crypto_engine_register_akcipher(struct akc    
660 {                                                 
661         if (!alg->op.do_one_request)              
662                 return -EINVAL;                   
663                                                   
664         alg->base.base.cra_flags |= CRYPTO_ALG    
665                                                   
666         return crypto_register_akcipher(&alg->    
667 }                                                 
668 EXPORT_SYMBOL_GPL(crypto_engine_register_akcip    
669                                                   
670 void crypto_engine_unregister_akcipher(struct     
671 {                                                 
672         crypto_unregister_akcipher(&alg->base)    
673 }                                                 
674 EXPORT_SYMBOL_GPL(crypto_engine_unregister_akc    
675                                                   
676 int crypto_engine_register_kpp(struct kpp_engi    
677 {                                                 
678         if (!alg->op.do_one_request)              
679                 return -EINVAL;                   
680                                                   
681         alg->base.base.cra_flags |= CRYPTO_ALG    
682                                                   
683         return crypto_register_kpp(&alg->base)    
684 }                                                 
685 EXPORT_SYMBOL_GPL(crypto_engine_register_kpp);    
686                                                   
687 void crypto_engine_unregister_kpp(struct kpp_e    
688 {                                                 
689         crypto_unregister_kpp(&alg->base);        
690 }                                                 
691 EXPORT_SYMBOL_GPL(crypto_engine_unregister_kpp    
692                                                   
693 int crypto_engine_register_skcipher(struct skc    
694 {                                                 
695         if (!alg->op.do_one_request)              
696                 return -EINVAL;                   
697                                                   
698         alg->base.base.cra_flags |= CRYPTO_ALG    
699                                                   
700         return crypto_register_skcipher(&alg->    
701 }                                                 
702 EXPORT_SYMBOL_GPL(crypto_engine_register_skcip    
703                                                   
704 void crypto_engine_unregister_skcipher(struct     
705 {                                                 
706         return crypto_unregister_skcipher(&alg    
707 }                                                 
708 EXPORT_SYMBOL_GPL(crypto_engine_unregister_skc    
709                                                   
710 int crypto_engine_register_skciphers(struct sk    
711                                      int count    
712 {                                                 
713         int i, ret;                               
714                                                   
715         for (i = 0; i < count; i++) {             
716                 ret = crypto_engine_register_s    
717                 if (ret)                          
718                         goto err;                 
719         }                                         
720                                                   
721         return 0;                                 
722                                                   
723 err:                                              
724         crypto_engine_unregister_skciphers(alg    
725                                                   
726         return ret;                               
727 }                                                 
728 EXPORT_SYMBOL_GPL(crypto_engine_register_skcip    
729                                                   
730 void crypto_engine_unregister_skciphers(struct    
731                                         int co    
732 {                                                 
733         int i;                                    
734                                                   
735         for (i = count - 1; i >= 0; --i)          
736                 crypto_engine_unregister_skcip    
737 }                                                 
738 EXPORT_SYMBOL_GPL(crypto_engine_unregister_skc    
739                                                   
740 MODULE_LICENSE("GPL");                            
741 MODULE_DESCRIPTION("Crypto hardware engine fra    
742                                                   

~ [ source navigation ] ~ [ diff markup ] ~ [ identifier search ] ~

kernel.org | git.kernel.org | LWN.net | Project Home | SVN repository | Mail admin

Linux® is a registered trademark of Linus Torvalds in the United States and other countries.
TOMOYO® is a registered trademark of NTT DATA CORPORATION.

sflogo.php