1 // SPDX-License-Identifier: GPL-2.0-or-later 1 // SPDX-License-Identifier: GPL-2.0-or-later 2 /* 2 /* 3 * Cryptographic API. 3 * Cryptographic API. 4 * 4 * 5 * Single-block cipher operations. 5 * Single-block cipher operations. 6 * 6 * 7 * Copyright (c) 2002 James Morris <jmorris@in 7 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au> 8 * Copyright (c) 2005 Herbert Xu <herbert@gond 8 * Copyright (c) 2005 Herbert Xu <herbert@gondor.apana.org.au> 9 */ 9 */ 10 10 11 #include <crypto/algapi.h> 11 #include <crypto/algapi.h> 12 #include <crypto/internal/cipher.h> 12 #include <crypto/internal/cipher.h> 13 #include <linux/kernel.h> 13 #include <linux/kernel.h> 14 #include <linux/crypto.h> 14 #include <linux/crypto.h> 15 #include <linux/errno.h> 15 #include <linux/errno.h> 16 #include <linux/slab.h> 16 #include <linux/slab.h> 17 #include <linux/string.h> 17 #include <linux/string.h> 18 #include "internal.h" 18 #include "internal.h" 19 19 20 static int setkey_unaligned(struct crypto_ciph 20 static int setkey_unaligned(struct crypto_cipher *tfm, const u8 *key, 21 unsigned int keyle 21 unsigned int keylen) 22 { 22 { 23 struct cipher_alg *cia = crypto_cipher 23 struct cipher_alg *cia = crypto_cipher_alg(tfm); 24 unsigned long alignmask = crypto_ciphe 24 unsigned long alignmask = crypto_cipher_alignmask(tfm); 25 int ret; 25 int ret; 26 u8 *buffer, *alignbuffer; 26 u8 *buffer, *alignbuffer; 27 unsigned long absize; 27 unsigned long absize; 28 28 29 absize = keylen + alignmask; 29 absize = keylen + alignmask; 30 buffer = kmalloc(absize, GFP_ATOMIC); 30 buffer = kmalloc(absize, GFP_ATOMIC); 31 if (!buffer) 31 if (!buffer) 32 return -ENOMEM; 32 return -ENOMEM; 33 33 34 alignbuffer = (u8 *)ALIGN((unsigned lo 34 alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1); 35 memcpy(alignbuffer, key, keylen); 35 memcpy(alignbuffer, key, keylen); 36 ret = cia->cia_setkey(crypto_cipher_tf 36 ret = cia->cia_setkey(crypto_cipher_tfm(tfm), alignbuffer, keylen); 37 kfree_sensitive(buffer); !! 37 memset(alignbuffer, 0, keylen); >> 38 kfree(buffer); 38 return ret; 39 return ret; 39 40 40 } 41 } 41 42 42 int crypto_cipher_setkey(struct crypto_cipher 43 int crypto_cipher_setkey(struct crypto_cipher *tfm, 43 const u8 *key, unsign 44 const u8 *key, unsigned int keylen) 44 { 45 { 45 struct cipher_alg *cia = crypto_cipher 46 struct cipher_alg *cia = crypto_cipher_alg(tfm); 46 unsigned long alignmask = crypto_ciphe 47 unsigned long alignmask = crypto_cipher_alignmask(tfm); 47 48 48 if (keylen < cia->cia_min_keysize || k 49 if (keylen < cia->cia_min_keysize || keylen > cia->cia_max_keysize) 49 return -EINVAL; 50 return -EINVAL; 50 51 51 if ((unsigned long)key & alignmask) 52 if ((unsigned long)key & alignmask) 52 return setkey_unaligned(tfm, k 53 return setkey_unaligned(tfm, key, keylen); 53 54 54 return cia->cia_setkey(crypto_cipher_t 55 return cia->cia_setkey(crypto_cipher_tfm(tfm), key, keylen); 55 } 56 } 56 EXPORT_SYMBOL_NS_GPL(crypto_cipher_setkey, CRY 57 EXPORT_SYMBOL_NS_GPL(crypto_cipher_setkey, CRYPTO_INTERNAL); 57 58 58 static inline void cipher_crypt_one(struct cry 59 static inline void cipher_crypt_one(struct crypto_cipher *tfm, 59 u8 *dst, c 60 u8 *dst, const u8 *src, bool enc) 60 { 61 { 61 unsigned long alignmask = crypto_ciphe 62 unsigned long alignmask = crypto_cipher_alignmask(tfm); 62 struct cipher_alg *cia = crypto_cipher 63 struct cipher_alg *cia = crypto_cipher_alg(tfm); 63 void (*fn)(struct crypto_tfm *, u8 *, 64 void (*fn)(struct crypto_tfm *, u8 *, const u8 *) = 64 enc ? cia->cia_encrypt : cia-> 65 enc ? cia->cia_encrypt : cia->cia_decrypt; 65 66 66 if (unlikely(((unsigned long)dst | (un 67 if (unlikely(((unsigned long)dst | (unsigned long)src) & alignmask)) { 67 unsigned int bs = crypto_ciphe 68 unsigned int bs = crypto_cipher_blocksize(tfm); 68 u8 buffer[MAX_CIPHER_BLOCKSIZE 69 u8 buffer[MAX_CIPHER_BLOCKSIZE + MAX_CIPHER_ALIGNMASK]; 69 u8 *tmp = (u8 *)ALIGN((unsigne 70 u8 *tmp = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1); 70 71 71 memcpy(tmp, src, bs); 72 memcpy(tmp, src, bs); 72 fn(crypto_cipher_tfm(tfm), tmp 73 fn(crypto_cipher_tfm(tfm), tmp, tmp); 73 memcpy(dst, tmp, bs); 74 memcpy(dst, tmp, bs); 74 } else { 75 } else { 75 fn(crypto_cipher_tfm(tfm), dst 76 fn(crypto_cipher_tfm(tfm), dst, src); 76 } 77 } 77 } 78 } 78 79 79 void crypto_cipher_encrypt_one(struct crypto_c 80 void crypto_cipher_encrypt_one(struct crypto_cipher *tfm, 80 u8 *dst, const 81 u8 *dst, const u8 *src) 81 { 82 { 82 cipher_crypt_one(tfm, dst, src, true); 83 cipher_crypt_one(tfm, dst, src, true); 83 } 84 } 84 EXPORT_SYMBOL_NS_GPL(crypto_cipher_encrypt_one 85 EXPORT_SYMBOL_NS_GPL(crypto_cipher_encrypt_one, CRYPTO_INTERNAL); 85 86 86 void crypto_cipher_decrypt_one(struct crypto_c 87 void crypto_cipher_decrypt_one(struct crypto_cipher *tfm, 87 u8 *dst, const 88 u8 *dst, const u8 *src) 88 { 89 { 89 cipher_crypt_one(tfm, dst, src, false) 90 cipher_crypt_one(tfm, dst, src, false); 90 } 91 } 91 EXPORT_SYMBOL_NS_GPL(crypto_cipher_decrypt_one 92 EXPORT_SYMBOL_NS_GPL(crypto_cipher_decrypt_one, CRYPTO_INTERNAL); 92 93 93 struct crypto_cipher *crypto_clone_cipher(stru 94 struct crypto_cipher *crypto_clone_cipher(struct crypto_cipher *cipher) 94 { 95 { 95 struct crypto_tfm *tfm = crypto_cipher 96 struct crypto_tfm *tfm = crypto_cipher_tfm(cipher); 96 struct crypto_alg *alg = tfm->__crt_al 97 struct crypto_alg *alg = tfm->__crt_alg; 97 struct crypto_cipher *ncipher; 98 struct crypto_cipher *ncipher; 98 struct crypto_tfm *ntfm; 99 struct crypto_tfm *ntfm; 99 100 100 if (alg->cra_init) 101 if (alg->cra_init) 101 return ERR_PTR(-ENOSYS); 102 return ERR_PTR(-ENOSYS); 102 103 103 if (unlikely(!crypto_mod_get(alg))) 104 if (unlikely(!crypto_mod_get(alg))) 104 return ERR_PTR(-ESTALE); 105 return ERR_PTR(-ESTALE); 105 106 106 ntfm = __crypto_alloc_tfmgfp(alg, CRYP 107 ntfm = __crypto_alloc_tfmgfp(alg, CRYPTO_ALG_TYPE_CIPHER, 107 CRYPTO_AL 108 CRYPTO_ALG_TYPE_MASK, GFP_ATOMIC); 108 if (IS_ERR(ntfm)) { 109 if (IS_ERR(ntfm)) { 109 crypto_mod_put(alg); 110 crypto_mod_put(alg); 110 return ERR_CAST(ntfm); 111 return ERR_CAST(ntfm); 111 } 112 } 112 113 113 ntfm->crt_flags = tfm->crt_flags; 114 ntfm->crt_flags = tfm->crt_flags; 114 115 115 ncipher = __crypto_cipher_cast(ntfm); 116 ncipher = __crypto_cipher_cast(ntfm); 116 117 117 return ncipher; 118 return ncipher; 118 } 119 } 119 EXPORT_SYMBOL_GPL(crypto_clone_cipher); 120 EXPORT_SYMBOL_GPL(crypto_clone_cipher); 120 121
Linux® is a registered trademark of Linus Torvalds in the United States and other countries.
TOMOYO® is a registered trademark of NTT DATA CORPORATION.