1 // SPDX-License-Identifier: GPL-2.0-or-later 1 // SPDX-License-Identifier: GPL-2.0-or-later 2 /* 2 /* 3 * Cryptographic API. 3 * Cryptographic API. 4 * 4 * 5 * Single-block cipher operations. !! 5 * Cipher operations. 6 * 6 * 7 * Copyright (c) 2002 James Morris <jmorris@in 7 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au> 8 * Copyright (c) 2005 Herbert Xu <herbert@gond 8 * Copyright (c) 2005 Herbert Xu <herbert@gondor.apana.org.au> 9 */ 9 */ 10 10 11 #include <crypto/algapi.h> 11 #include <crypto/algapi.h> 12 #include <crypto/internal/cipher.h> << 13 #include <linux/kernel.h> 12 #include <linux/kernel.h> 14 #include <linux/crypto.h> 13 #include <linux/crypto.h> 15 #include <linux/errno.h> 14 #include <linux/errno.h> 16 #include <linux/slab.h> 15 #include <linux/slab.h> 17 #include <linux/string.h> 16 #include <linux/string.h> 18 #include "internal.h" 17 #include "internal.h" 19 18 20 static int setkey_unaligned(struct crypto_ciph !! 19 static int setkey_unaligned(struct crypto_tfm *tfm, const u8 *key, 21 unsigned int keyle 20 unsigned int keylen) 22 { 21 { 23 struct cipher_alg *cia = crypto_cipher !! 22 struct cipher_alg *cia = &tfm->__crt_alg->cra_cipher; 24 unsigned long alignmask = crypto_ciphe !! 23 unsigned long alignmask = crypto_tfm_alg_alignmask(tfm); 25 int ret; 24 int ret; 26 u8 *buffer, *alignbuffer; 25 u8 *buffer, *alignbuffer; 27 unsigned long absize; 26 unsigned long absize; 28 27 29 absize = keylen + alignmask; 28 absize = keylen + alignmask; 30 buffer = kmalloc(absize, GFP_ATOMIC); 29 buffer = kmalloc(absize, GFP_ATOMIC); 31 if (!buffer) 30 if (!buffer) 32 return -ENOMEM; 31 return -ENOMEM; 33 32 34 alignbuffer = (u8 *)ALIGN((unsigned lo 33 alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1); 35 memcpy(alignbuffer, key, keylen); 34 memcpy(alignbuffer, key, keylen); 36 ret = cia->cia_setkey(crypto_cipher_tf !! 35 ret = cia->cia_setkey(tfm, alignbuffer, keylen); 37 kfree_sensitive(buffer); !! 36 memset(alignbuffer, 0, keylen); >> 37 kfree(buffer); 38 return ret; 38 return ret; 39 39 40 } 40 } 41 41 42 int crypto_cipher_setkey(struct crypto_cipher !! 42 static int setkey(struct crypto_tfm *tfm, const u8 *key, unsigned int keylen) 43 const u8 *key, unsign << 44 { 43 { 45 struct cipher_alg *cia = crypto_cipher !! 44 struct cipher_alg *cia = &tfm->__crt_alg->cra_cipher; 46 unsigned long alignmask = crypto_ciphe !! 45 unsigned long alignmask = crypto_tfm_alg_alignmask(tfm); 47 46 48 if (keylen < cia->cia_min_keysize || k !! 47 tfm->crt_flags &= ~CRYPTO_TFM_RES_MASK; >> 48 if (keylen < cia->cia_min_keysize || keylen > cia->cia_max_keysize) { >> 49 tfm->crt_flags |= CRYPTO_TFM_RES_BAD_KEY_LEN; 49 return -EINVAL; 50 return -EINVAL; >> 51 } 50 52 51 if ((unsigned long)key & alignmask) 53 if ((unsigned long)key & alignmask) 52 return setkey_unaligned(tfm, k 54 return setkey_unaligned(tfm, key, keylen); 53 55 54 return cia->cia_setkey(crypto_cipher_t !! 56 return cia->cia_setkey(tfm, key, keylen); 55 } 57 } 56 EXPORT_SYMBOL_NS_GPL(crypto_cipher_setkey, CRY << 57 58 58 static inline void cipher_crypt_one(struct cry !! 59 static void cipher_crypt_unaligned(void (*fn)(struct crypto_tfm *, u8 *, 59 u8 *dst, c !! 60 const u8 *), >> 61 struct crypto_tfm *tfm, >> 62 u8 *dst, const u8 *src) 60 { 63 { 61 unsigned long alignmask = crypto_ciphe !! 64 unsigned long alignmask = crypto_tfm_alg_alignmask(tfm); 62 struct cipher_alg *cia = crypto_cipher !! 65 unsigned int size = crypto_tfm_alg_blocksize(tfm); 63 void (*fn)(struct crypto_tfm *, u8 *, !! 66 u8 buffer[MAX_CIPHER_BLOCKSIZE + MAX_CIPHER_ALIGNMASK]; 64 enc ? cia->cia_encrypt : cia-> !! 67 u8 *tmp = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1); 65 68 66 if (unlikely(((unsigned long)dst | (un !! 69 memcpy(tmp, src, size); 67 unsigned int bs = crypto_ciphe !! 70 fn(tfm, tmp, tmp); 68 u8 buffer[MAX_CIPHER_BLOCKSIZE !! 71 memcpy(dst, tmp, size); 69 u8 *tmp = (u8 *)ALIGN((unsigne << 70 << 71 memcpy(tmp, src, bs); << 72 fn(crypto_cipher_tfm(tfm), tmp << 73 memcpy(dst, tmp, bs); << 74 } else { << 75 fn(crypto_cipher_tfm(tfm), dst << 76 } << 77 } 72 } 78 73 79 void crypto_cipher_encrypt_one(struct crypto_c !! 74 static void cipher_encrypt_unaligned(struct crypto_tfm *tfm, 80 u8 *dst, const !! 75 u8 *dst, const u8 *src) 81 { 76 { 82 cipher_crypt_one(tfm, dst, src, true); !! 77 unsigned long alignmask = crypto_tfm_alg_alignmask(tfm); 83 } !! 78 struct cipher_alg *cipher = &tfm->__crt_alg->cra_cipher; 84 EXPORT_SYMBOL_NS_GPL(crypto_cipher_encrypt_one << 85 79 86 void crypto_cipher_decrypt_one(struct crypto_c !! 80 if (unlikely(((unsigned long)dst | (unsigned long)src) & alignmask)) { 87 u8 *dst, const !! 81 cipher_crypt_unaligned(cipher->cia_encrypt, tfm, dst, src); 88 { !! 82 return; 89 cipher_crypt_one(tfm, dst, src, false) !! 83 } >> 84 >> 85 cipher->cia_encrypt(tfm, dst, src); 90 } 86 } 91 EXPORT_SYMBOL_NS_GPL(crypto_cipher_decrypt_one << 92 87 93 struct crypto_cipher *crypto_clone_cipher(stru !! 88 static void cipher_decrypt_unaligned(struct crypto_tfm *tfm, >> 89 u8 *dst, const u8 *src) 94 { 90 { 95 struct crypto_tfm *tfm = crypto_cipher !! 91 unsigned long alignmask = crypto_tfm_alg_alignmask(tfm); 96 struct crypto_alg *alg = tfm->__crt_al !! 92 struct cipher_alg *cipher = &tfm->__crt_alg->cra_cipher; 97 struct crypto_cipher *ncipher; << 98 struct crypto_tfm *ntfm; << 99 << 100 if (alg->cra_init) << 101 return ERR_PTR(-ENOSYS); << 102 93 103 if (unlikely(!crypto_mod_get(alg))) !! 94 if (unlikely(((unsigned long)dst | (unsigned long)src) & alignmask)) { 104 return ERR_PTR(-ESTALE); !! 95 cipher_crypt_unaligned(cipher->cia_decrypt, tfm, dst, src); 105 !! 96 return; 106 ntfm = __crypto_alloc_tfmgfp(alg, CRYP << 107 CRYPTO_AL << 108 if (IS_ERR(ntfm)) { << 109 crypto_mod_put(alg); << 110 return ERR_CAST(ntfm); << 111 } 97 } 112 98 113 ntfm->crt_flags = tfm->crt_flags; !! 99 cipher->cia_decrypt(tfm, dst, src); >> 100 } >> 101 >> 102 int crypto_init_cipher_ops(struct crypto_tfm *tfm) >> 103 { >> 104 struct cipher_tfm *ops = &tfm->crt_cipher; >> 105 struct cipher_alg *cipher = &tfm->__crt_alg->cra_cipher; 114 106 115 ncipher = __crypto_cipher_cast(ntfm); !! 107 ops->cit_setkey = setkey; >> 108 ops->cit_encrypt_one = crypto_tfm_alg_alignmask(tfm) ? >> 109 cipher_encrypt_unaligned : cipher->cia_encrypt; >> 110 ops->cit_decrypt_one = crypto_tfm_alg_alignmask(tfm) ? >> 111 cipher_decrypt_unaligned : cipher->cia_decrypt; 116 112 117 return ncipher; !! 113 return 0; 118 } 114 } 119 EXPORT_SYMBOL_GPL(crypto_clone_cipher); << 120 115
Linux® is a registered trademark of Linus Torvalds in the United States and other countries.
TOMOYO® is a registered trademark of NTT DATA CORPORATION.