1 /* SPDX-License-Identifier: GPL-2.0-or-later * 1 /* SPDX-License-Identifier: GPL-2.0-or-later */ 2 /* 2 /* 3 * Scatterlist Cryptographic API. 3 * Scatterlist Cryptographic API. 4 * 4 * 5 * Copyright (c) 2002 James Morris <jmorris@in 5 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au> 6 * Copyright (c) 2002 David S. Miller (davem@r 6 * Copyright (c) 2002 David S. Miller (davem@redhat.com) 7 * Copyright (c) 2005 Herbert Xu <herbert@gond 7 * Copyright (c) 2005 Herbert Xu <herbert@gondor.apana.org.au> 8 * 8 * 9 * Portions derived from Cryptoapi, by Alexand 9 * Portions derived from Cryptoapi, by Alexander Kjeldaas <astor@fast.no> 10 * and Nettle, by Niels Möller. 10 * and Nettle, by Niels Möller. 11 */ 11 */ 12 #ifndef _LINUX_CRYPTO_H 12 #ifndef _LINUX_CRYPTO_H 13 #define _LINUX_CRYPTO_H 13 #define _LINUX_CRYPTO_H 14 14 15 #include <linux/completion.h> !! 15 #include <linux/atomic.h> 16 #include <linux/refcount.h> !! 16 #include <linux/kernel.h> >> 17 #include <linux/list.h> >> 18 #include <linux/bug.h> 17 #include <linux/slab.h> 19 #include <linux/slab.h> 18 #include <linux/types.h> !! 20 #include <linux/string.h> >> 21 #include <linux/uaccess.h> >> 22 #include <linux/completion.h> >> 23 >> 24 /* >> 25 * Autoloaded crypto modules should only use a prefixed name to avoid allowing >> 26 * arbitrary modules to be loaded. Loading from userspace may still need the >> 27 * unprefixed names, so retains those aliases as well. >> 28 * This uses __MODULE_INFO directly instead of MODULE_ALIAS because pre-4.3 >> 29 * gcc (e.g. avr32 toolchain) uses __LINE__ for uniqueness, and this macro >> 30 * expands twice on the same line. Instead, use a separate base name for the >> 31 * alias. >> 32 */ >> 33 #define MODULE_ALIAS_CRYPTO(name) \ >> 34 __MODULE_INFO(alias, alias_userspace, name); \ >> 35 __MODULE_INFO(alias, alias_crypto, "crypto-" name) 19 36 20 /* 37 /* 21 * Algorithm masks and types. 38 * Algorithm masks and types. 22 */ 39 */ 23 #define CRYPTO_ALG_TYPE_MASK 0x0000 40 #define CRYPTO_ALG_TYPE_MASK 0x0000000f 24 #define CRYPTO_ALG_TYPE_CIPHER 0x0000 41 #define CRYPTO_ALG_TYPE_CIPHER 0x00000001 25 #define CRYPTO_ALG_TYPE_COMPRESS 0x0000 42 #define CRYPTO_ALG_TYPE_COMPRESS 0x00000002 26 #define CRYPTO_ALG_TYPE_AEAD 0x0000 43 #define CRYPTO_ALG_TYPE_AEAD 0x00000003 27 #define CRYPTO_ALG_TYPE_LSKCIPHER 0x0000 << 28 #define CRYPTO_ALG_TYPE_SKCIPHER 0x0000 44 #define CRYPTO_ALG_TYPE_SKCIPHER 0x00000005 29 #define CRYPTO_ALG_TYPE_AKCIPHER 0x0000 << 30 #define CRYPTO_ALG_TYPE_SIG 0x0000 << 31 #define CRYPTO_ALG_TYPE_KPP 0x0000 45 #define CRYPTO_ALG_TYPE_KPP 0x00000008 32 #define CRYPTO_ALG_TYPE_ACOMPRESS 0x0000 46 #define CRYPTO_ALG_TYPE_ACOMPRESS 0x0000000a 33 #define CRYPTO_ALG_TYPE_SCOMPRESS 0x0000 47 #define CRYPTO_ALG_TYPE_SCOMPRESS 0x0000000b 34 #define CRYPTO_ALG_TYPE_RNG 0x0000 48 #define CRYPTO_ALG_TYPE_RNG 0x0000000c >> 49 #define CRYPTO_ALG_TYPE_AKCIPHER 0x0000000d 35 #define CRYPTO_ALG_TYPE_HASH 0x0000 50 #define CRYPTO_ALG_TYPE_HASH 0x0000000e 36 #define CRYPTO_ALG_TYPE_SHASH 0x0000 51 #define CRYPTO_ALG_TYPE_SHASH 0x0000000e 37 #define CRYPTO_ALG_TYPE_AHASH 0x0000 52 #define CRYPTO_ALG_TYPE_AHASH 0x0000000f 38 53 >> 54 #define CRYPTO_ALG_TYPE_HASH_MASK 0x0000000e >> 55 #define CRYPTO_ALG_TYPE_AHASH_MASK 0x0000000e 39 #define CRYPTO_ALG_TYPE_ACOMPRESS_MASK 0x0000 56 #define CRYPTO_ALG_TYPE_ACOMPRESS_MASK 0x0000000e 40 57 41 #define CRYPTO_ALG_LARVAL 0x0000 58 #define CRYPTO_ALG_LARVAL 0x00000010 42 #define CRYPTO_ALG_DEAD 0x0000 59 #define CRYPTO_ALG_DEAD 0x00000020 43 #define CRYPTO_ALG_DYING 0x0000 60 #define CRYPTO_ALG_DYING 0x00000040 44 #define CRYPTO_ALG_ASYNC 0x0000 61 #define CRYPTO_ALG_ASYNC 0x00000080 45 62 46 /* 63 /* 47 * Set if the algorithm (or an algorithm which !! 64 * Set this bit if and only if the algorithm requires another algorithm of 48 * algorithm of the same type to handle corner !! 65 * the same type to handle corner cases. 49 */ 66 */ 50 #define CRYPTO_ALG_NEED_FALLBACK 0x0000 67 #define CRYPTO_ALG_NEED_FALLBACK 0x00000100 51 68 52 /* 69 /* 53 * Set if the algorithm has passed automated r 70 * Set if the algorithm has passed automated run-time testing. Note that 54 * if there is no run-time testing for a given 71 * if there is no run-time testing for a given algorithm it is considered 55 * to have passed. 72 * to have passed. 56 */ 73 */ 57 74 58 #define CRYPTO_ALG_TESTED 0x0000 75 #define CRYPTO_ALG_TESTED 0x00000400 59 76 60 /* 77 /* 61 * Set if the algorithm is an instance that is 78 * Set if the algorithm is an instance that is built from templates. 62 */ 79 */ 63 #define CRYPTO_ALG_INSTANCE 0x0000 80 #define CRYPTO_ALG_INSTANCE 0x00000800 64 81 65 /* Set this bit if the algorithm provided is h 82 /* Set this bit if the algorithm provided is hardware accelerated but 66 * not available to userspace via instruction 83 * not available to userspace via instruction set or so. 67 */ 84 */ 68 #define CRYPTO_ALG_KERN_DRIVER_ONLY 0x0000 85 #define CRYPTO_ALG_KERN_DRIVER_ONLY 0x00001000 69 86 70 /* 87 /* 71 * Mark a cipher as a service implementation o 88 * Mark a cipher as a service implementation only usable by another 72 * cipher and never by a normal user of the ke 89 * cipher and never by a normal user of the kernel crypto API 73 */ 90 */ 74 #define CRYPTO_ALG_INTERNAL 0x0000 91 #define CRYPTO_ALG_INTERNAL 0x00002000 75 92 76 /* 93 /* 77 * Set if the algorithm has a ->setkey() metho 94 * Set if the algorithm has a ->setkey() method but can be used without 78 * calling it first, i.e. there is a default k 95 * calling it first, i.e. there is a default key. 79 */ 96 */ 80 #define CRYPTO_ALG_OPTIONAL_KEY 0x0000 97 #define CRYPTO_ALG_OPTIONAL_KEY 0x00004000 81 98 82 /* 99 /* 83 * Don't trigger module loading 100 * Don't trigger module loading 84 */ 101 */ 85 #define CRYPTO_NOLOAD 0x0000 102 #define CRYPTO_NOLOAD 0x00008000 86 103 87 /* 104 /* 88 * The algorithm may allocate memory during re << 89 * encryption, decryption, or hashing. Users << 90 * flag unset if they can't handle memory allo << 91 * << 92 * This flag is currently only implemented for << 93 * "aead", "ahash", "shash", and "cipher". Al << 94 * have this flag set even if they allocate me << 95 * << 96 * In some edge cases, algorithms can allocate << 97 * To avoid these cases, users must obey the f << 98 * skcipher: << 99 * - The IV buffer and all scatterlist el << 100 * algorithm's alignmask. << 101 * - If the data were to be divided into << 102 * crypto_skcipher_walksize() (with any << 103 * chunk can cross a page boundary or a << 104 * aead: << 105 * - The IV buffer and all scatterlist el << 106 * algorithm's alignmask. << 107 * - The first scatterlist element must c << 108 * and its pages must be !PageHighMem. << 109 * - If the plaintext/ciphertext were to << 110 * crypto_aead_walksize() (with the rem << 111 * can cross a page boundary or a scatt << 112 * ahash: << 113 * - crypto_ahash_finup() must not be use << 114 * ->finup() natively. << 115 */ << 116 #define CRYPTO_ALG_ALLOCATES_MEMORY 0x0001 << 117 << 118 /* << 119 * Mark an algorithm as a service implementati << 120 * template and never by a normal user of the << 121 * This is intended to be used by algorithms t << 122 * not FIPS-approved but may instead be used t << 123 * a FIPS-approved algorithm (e.g., dh vs. ffd << 124 */ << 125 #define CRYPTO_ALG_FIPS_INTERNAL 0x0002 << 126 << 127 /* << 128 * Transform masks and values (for crt_flags). 105 * Transform masks and values (for crt_flags). 129 */ 106 */ 130 #define CRYPTO_TFM_NEED_KEY 0x0000 107 #define CRYPTO_TFM_NEED_KEY 0x00000001 131 108 132 #define CRYPTO_TFM_REQ_MASK 0x000f 109 #define CRYPTO_TFM_REQ_MASK 0x000fff00 133 #define CRYPTO_TFM_REQ_FORBID_WEAK_KEYS 0x0000 110 #define CRYPTO_TFM_REQ_FORBID_WEAK_KEYS 0x00000100 134 #define CRYPTO_TFM_REQ_MAY_SLEEP 0x0000 111 #define CRYPTO_TFM_REQ_MAY_SLEEP 0x00000200 135 #define CRYPTO_TFM_REQ_MAY_BACKLOG 0x0000 112 #define CRYPTO_TFM_REQ_MAY_BACKLOG 0x00000400 136 113 137 /* 114 /* 138 * Miscellaneous stuff. 115 * Miscellaneous stuff. 139 */ 116 */ 140 #define CRYPTO_MAX_ALG_NAME 128 117 #define CRYPTO_MAX_ALG_NAME 128 141 118 142 /* 119 /* 143 * The macro CRYPTO_MINALIGN_ATTR (along with 120 * The macro CRYPTO_MINALIGN_ATTR (along with the void * type in the actual 144 * declaration) is used to ensure that the cry 121 * declaration) is used to ensure that the crypto_tfm context structure is 145 * aligned correctly for the given architectur 122 * aligned correctly for the given architecture so that there are no alignment 146 * faults for C data types. On architectures !! 123 * faults for C data types. In particular, this is required on platforms such 147 * DMA, such as ARM or arm64, it also takes in !! 124 * as arm where pointers are 32-bit aligned but there are data types such as 148 * that is required to ensure that the context !! 125 * u64 which require 64-bit alignment. 149 * cachelines with the rest of the struct. Thi << 150 * maintenance for non-coherent DMA (cache inv << 151 * affect data that may be accessed by the CPU << 152 */ 126 */ 153 #define CRYPTO_MINALIGN ARCH_KMALLOC_MINALIGN 127 #define CRYPTO_MINALIGN ARCH_KMALLOC_MINALIGN 154 128 155 #define CRYPTO_MINALIGN_ATTR __attribute__ ((_ 129 #define CRYPTO_MINALIGN_ATTR __attribute__ ((__aligned__(CRYPTO_MINALIGN))) 156 130 >> 131 struct scatterlist; >> 132 struct crypto_async_request; 157 struct crypto_tfm; 133 struct crypto_tfm; 158 struct crypto_type; 134 struct crypto_type; 159 struct module; << 160 135 161 typedef void (*crypto_completion_t)(void *req, !! 136 typedef void (*crypto_completion_t)(struct crypto_async_request *req, int err); 162 137 163 /** 138 /** 164 * DOC: Block Cipher Context Data Structures 139 * DOC: Block Cipher Context Data Structures 165 * 140 * 166 * These data structures define the operating 141 * These data structures define the operating context for each block cipher 167 * type. 142 * type. 168 */ 143 */ 169 144 170 struct crypto_async_request { 145 struct crypto_async_request { 171 struct list_head list; 146 struct list_head list; 172 crypto_completion_t complete; 147 crypto_completion_t complete; 173 void *data; 148 void *data; 174 struct crypto_tfm *tfm; 149 struct crypto_tfm *tfm; 175 150 176 u32 flags; 151 u32 flags; 177 }; 152 }; 178 153 179 /** 154 /** 180 * DOC: Block Cipher Algorithm Definitions 155 * DOC: Block Cipher Algorithm Definitions 181 * 156 * 182 * These data structures define modular crypto 157 * These data structures define modular crypto algorithm implementations, 183 * managed via crypto_register_alg() and crypt 158 * managed via crypto_register_alg() and crypto_unregister_alg(). 184 */ 159 */ 185 160 186 /** 161 /** 187 * struct cipher_alg - single-block symmetric 162 * struct cipher_alg - single-block symmetric ciphers definition 188 * @cia_min_keysize: Minimum key size supporte 163 * @cia_min_keysize: Minimum key size supported by the transformation. This is 189 * the smallest key length s 164 * the smallest key length supported by this transformation 190 * algorithm. This must be s 165 * algorithm. This must be set to one of the pre-defined 191 * values as this is not har 166 * values as this is not hardware specific. Possible values 192 * for this field can be fou 167 * for this field can be found via git grep "_MIN_KEY_SIZE" 193 * include/crypto/ 168 * include/crypto/ 194 * @cia_max_keysize: Maximum key size supporte 169 * @cia_max_keysize: Maximum key size supported by the transformation. This is 195 * the largest key length sup 170 * the largest key length supported by this transformation 196 * algorithm. This must be se 171 * algorithm. This must be set to one of the pre-defined values 197 * as this is not hardware sp 172 * as this is not hardware specific. Possible values for this 198 * field can be found via git 173 * field can be found via git grep "_MAX_KEY_SIZE" 199 * include/crypto/ 174 * include/crypto/ 200 * @cia_setkey: Set key for the transformation 175 * @cia_setkey: Set key for the transformation. This function is used to either 201 * program a supplied key into th 176 * program a supplied key into the hardware or store the key in the 202 * transformation context for pro 177 * transformation context for programming it later. Note that this 203 * function does modify the trans 178 * function does modify the transformation context. This function 204 * can be called multiple times d 179 * can be called multiple times during the existence of the 205 * transformation object, so one 180 * transformation object, so one must make sure the key is properly 206 * reprogrammed into the hardware 181 * reprogrammed into the hardware. This function is also 207 * responsible for checking the k 182 * responsible for checking the key length for validity. 208 * @cia_encrypt: Encrypt a single block. This 183 * @cia_encrypt: Encrypt a single block. This function is used to encrypt a 209 * single block of data, which m 184 * single block of data, which must be @cra_blocksize big. This 210 * always operates on a full @cr 185 * always operates on a full @cra_blocksize and it is not possible 211 * to encrypt a block of smaller 186 * to encrypt a block of smaller size. The supplied buffers must 212 * therefore also be at least of 187 * therefore also be at least of @cra_blocksize size. Both the 213 * input and output buffers are 188 * input and output buffers are always aligned to @cra_alignmask. 214 * In case either of the input o 189 * In case either of the input or output buffer supplied by user 215 * of the crypto API is not alig 190 * of the crypto API is not aligned to @cra_alignmask, the crypto 216 * API will re-align the buffers 191 * API will re-align the buffers. The re-alignment means that a 217 * new buffer will be allocated, 192 * new buffer will be allocated, the data will be copied into the 218 * new buffer, then the processi 193 * new buffer, then the processing will happen on the new buffer, 219 * then the data will be copied 194 * then the data will be copied back into the original buffer and 220 * finally the new buffer will b 195 * finally the new buffer will be freed. In case a software 221 * fallback was put in place in 196 * fallback was put in place in the @cra_init call, this function 222 * might need to use the fallbac 197 * might need to use the fallback if the algorithm doesn't support 223 * all of the key sizes. In case 198 * all of the key sizes. In case the key was stored in 224 * transformation context, the k 199 * transformation context, the key might need to be re-programmed 225 * into the hardware in this fun 200 * into the hardware in this function. This function shall not 226 * modify the transformation con 201 * modify the transformation context, as this function may be 227 * called in parallel with the s 202 * called in parallel with the same transformation object. 228 * @cia_decrypt: Decrypt a single block. This 203 * @cia_decrypt: Decrypt a single block. This is a reverse counterpart to 229 * @cia_encrypt, and the conditi 204 * @cia_encrypt, and the conditions are exactly the same. 230 * 205 * 231 * All fields are mandatory and must be filled 206 * All fields are mandatory and must be filled. 232 */ 207 */ 233 struct cipher_alg { 208 struct cipher_alg { 234 unsigned int cia_min_keysize; 209 unsigned int cia_min_keysize; 235 unsigned int cia_max_keysize; 210 unsigned int cia_max_keysize; 236 int (*cia_setkey)(struct crypto_tfm *t 211 int (*cia_setkey)(struct crypto_tfm *tfm, const u8 *key, 237 unsigned int keylen) 212 unsigned int keylen); 238 void (*cia_encrypt)(struct crypto_tfm 213 void (*cia_encrypt)(struct crypto_tfm *tfm, u8 *dst, const u8 *src); 239 void (*cia_decrypt)(struct crypto_tfm 214 void (*cia_decrypt)(struct crypto_tfm *tfm, u8 *dst, const u8 *src); 240 }; 215 }; 241 216 242 /** 217 /** 243 * struct compress_alg - compression/decompres 218 * struct compress_alg - compression/decompression algorithm 244 * @coa_compress: Compress a buffer of specifi 219 * @coa_compress: Compress a buffer of specified length, storing the resulting 245 * data in the specified buffer 220 * data in the specified buffer. Return the length of the 246 * compressed data in dlen. 221 * compressed data in dlen. 247 * @coa_decompress: Decompress the source buff 222 * @coa_decompress: Decompress the source buffer, storing the uncompressed 248 * data in the specified buff 223 * data in the specified buffer. The length of the data is 249 * returned in dlen. 224 * returned in dlen. 250 * 225 * 251 * All fields are mandatory. 226 * All fields are mandatory. 252 */ 227 */ 253 struct compress_alg { 228 struct compress_alg { 254 int (*coa_compress)(struct crypto_tfm 229 int (*coa_compress)(struct crypto_tfm *tfm, const u8 *src, 255 unsigned int slen, 230 unsigned int slen, u8 *dst, unsigned int *dlen); 256 int (*coa_decompress)(struct crypto_tf 231 int (*coa_decompress)(struct crypto_tfm *tfm, const u8 *src, 257 unsigned int sle 232 unsigned int slen, u8 *dst, unsigned int *dlen); 258 }; 233 }; 259 234 >> 235 #ifdef CONFIG_CRYPTO_STATS >> 236 /* >> 237 * struct crypto_istat_aead - statistics for AEAD algorithm >> 238 * @encrypt_cnt: number of encrypt requests >> 239 * @encrypt_tlen: total data size handled by encrypt requests >> 240 * @decrypt_cnt: number of decrypt requests >> 241 * @decrypt_tlen: total data size handled by decrypt requests >> 242 * @err_cnt: number of error for AEAD requests >> 243 */ >> 244 struct crypto_istat_aead { >> 245 atomic64_t encrypt_cnt; >> 246 atomic64_t encrypt_tlen; >> 247 atomic64_t decrypt_cnt; >> 248 atomic64_t decrypt_tlen; >> 249 atomic64_t err_cnt; >> 250 }; >> 251 >> 252 /* >> 253 * struct crypto_istat_akcipher - statistics for akcipher algorithm >> 254 * @encrypt_cnt: number of encrypt requests >> 255 * @encrypt_tlen: total data size handled by encrypt requests >> 256 * @decrypt_cnt: number of decrypt requests >> 257 * @decrypt_tlen: total data size handled by decrypt requests >> 258 * @verify_cnt: number of verify operation >> 259 * @sign_cnt: number of sign requests >> 260 * @err_cnt: number of error for akcipher requests >> 261 */ >> 262 struct crypto_istat_akcipher { >> 263 atomic64_t encrypt_cnt; >> 264 atomic64_t encrypt_tlen; >> 265 atomic64_t decrypt_cnt; >> 266 atomic64_t decrypt_tlen; >> 267 atomic64_t verify_cnt; >> 268 atomic64_t sign_cnt; >> 269 atomic64_t err_cnt; >> 270 }; >> 271 >> 272 /* >> 273 * struct crypto_istat_cipher - statistics for cipher algorithm >> 274 * @encrypt_cnt: number of encrypt requests >> 275 * @encrypt_tlen: total data size handled by encrypt requests >> 276 * @decrypt_cnt: number of decrypt requests >> 277 * @decrypt_tlen: total data size handled by decrypt requests >> 278 * @err_cnt: number of error for cipher requests >> 279 */ >> 280 struct crypto_istat_cipher { >> 281 atomic64_t encrypt_cnt; >> 282 atomic64_t encrypt_tlen; >> 283 atomic64_t decrypt_cnt; >> 284 atomic64_t decrypt_tlen; >> 285 atomic64_t err_cnt; >> 286 }; >> 287 >> 288 /* >> 289 * struct crypto_istat_compress - statistics for compress algorithm >> 290 * @compress_cnt: number of compress requests >> 291 * @compress_tlen: total data size handled by compress requests >> 292 * @decompress_cnt: number of decompress requests >> 293 * @decompress_tlen: total data size handled by decompress requests >> 294 * @err_cnt: number of error for compress requests >> 295 */ >> 296 struct crypto_istat_compress { >> 297 atomic64_t compress_cnt; >> 298 atomic64_t compress_tlen; >> 299 atomic64_t decompress_cnt; >> 300 atomic64_t decompress_tlen; >> 301 atomic64_t err_cnt; >> 302 }; >> 303 >> 304 /* >> 305 * struct crypto_istat_hash - statistics for has algorithm >> 306 * @hash_cnt: number of hash requests >> 307 * @hash_tlen: total data size hashed >> 308 * @err_cnt: number of error for hash requests >> 309 */ >> 310 struct crypto_istat_hash { >> 311 atomic64_t hash_cnt; >> 312 atomic64_t hash_tlen; >> 313 atomic64_t err_cnt; >> 314 }; >> 315 >> 316 /* >> 317 * struct crypto_istat_kpp - statistics for KPP algorithm >> 318 * @setsecret_cnt: number of setsecrey operation >> 319 * @generate_public_key_cnt: number of generate_public_key operation >> 320 * @compute_shared_secret_cnt: number of compute_shared_secret operation >> 321 * @err_cnt: number of error for KPP requests >> 322 */ >> 323 struct crypto_istat_kpp { >> 324 atomic64_t setsecret_cnt; >> 325 atomic64_t generate_public_key_cnt; >> 326 atomic64_t compute_shared_secret_cnt; >> 327 atomic64_t err_cnt; >> 328 }; >> 329 >> 330 /* >> 331 * struct crypto_istat_rng: statistics for RNG algorithm >> 332 * @generate_cnt: number of RNG generate requests >> 333 * @generate_tlen: total data size of generated data by the RNG >> 334 * @seed_cnt: number of times the RNG was seeded >> 335 * @err_cnt: number of error for RNG requests >> 336 */ >> 337 struct crypto_istat_rng { >> 338 atomic64_t generate_cnt; >> 339 atomic64_t generate_tlen; >> 340 atomic64_t seed_cnt; >> 341 atomic64_t err_cnt; >> 342 }; >> 343 #endif /* CONFIG_CRYPTO_STATS */ >> 344 260 #define cra_cipher cra_u.cipher 345 #define cra_cipher cra_u.cipher 261 #define cra_compress cra_u.compress 346 #define cra_compress cra_u.compress 262 347 263 /** 348 /** 264 * struct crypto_alg - definition of a cryptog 349 * struct crypto_alg - definition of a cryptograpic cipher algorithm 265 * @cra_flags: Flags describing this transform 350 * @cra_flags: Flags describing this transformation. See include/linux/crypto.h 266 * CRYPTO_ALG_* flags for the flag 351 * CRYPTO_ALG_* flags for the flags which go in here. Those are 267 * used for fine-tuning the descri 352 * used for fine-tuning the description of the transformation 268 * algorithm. 353 * algorithm. 269 * @cra_blocksize: Minimum block size of this 354 * @cra_blocksize: Minimum block size of this transformation. The size in bytes 270 * of the smallest possible un 355 * of the smallest possible unit which can be transformed with 271 * this algorithm. The users m 356 * this algorithm. The users must respect this value. 272 * In case of HASH transformat 357 * In case of HASH transformation, it is possible for a smaller 273 * block than @cra_blocksize t 358 * block than @cra_blocksize to be passed to the crypto API for 274 * transformation, in case of 359 * transformation, in case of any other transformation type, an 275 * error will be returned upon 360 * error will be returned upon any attempt to transform smaller 276 * than @cra_blocksize chunks. 361 * than @cra_blocksize chunks. 277 * @cra_ctxsize: Size of the operational conte 362 * @cra_ctxsize: Size of the operational context of the transformation. This 278 * value informs the kernel cryp 363 * value informs the kernel crypto API about the memory size 279 * needed to be allocated for th 364 * needed to be allocated for the transformation context. 280 * @cra_alignmask: For cipher, skcipher, lskci !! 365 * @cra_alignmask: Alignment mask for the input and output data buffer. The data 281 * 1 less than the alignment, !! 366 * buffer containing the input data for the algorithm must be 282 * implementation requires for !! 367 * aligned to this alignment mask. The data buffer for the 283 * the crypto API is invoked w !! 368 * output data must be aligned to this alignment mask. Note that 284 * to this alignment, the cryp !! 369 * the Crypto API will do the re-alignment in software, but 285 * appropriately aligned tempo !! 370 * only under special conditions and there is a performance hit. 286 * the algorithm needs. (For !! 371 * The re-alignment happens at these occasions for different 287 * the algorithm uses the skci !! 372 * @cra_u types: cipher -- For both input data and output data 288 * misalignment handling carri !! 373 * buffer; ahash -- For output hash destination buf; shash -- 289 * preferred that algorithms d !! 374 * For output hash destination buf. 290 * Also, crypto API users may !! 375 * This is needed on hardware which is flawed by design and 291 * to the alignmask of the alg !! 376 * cannot pick data from arbitrary addresses. 292 * avoid the API having to rea << 293 * not supported for hash algo << 294 * @cra_priority: Priority of this transformat 377 * @cra_priority: Priority of this transformation implementation. In case 295 * multiple transformations wit 378 * multiple transformations with same @cra_name are available to 296 * the Crypto API, the kernel w 379 * the Crypto API, the kernel will use the one with highest 297 * @cra_priority. 380 * @cra_priority. 298 * @cra_name: Generic name (usable by multiple 381 * @cra_name: Generic name (usable by multiple implementations) of the 299 * transformation algorithm. This i 382 * transformation algorithm. This is the name of the transformation 300 * itself. This field is used by th 383 * itself. This field is used by the kernel when looking up the 301 * providers of particular transfor 384 * providers of particular transformation. 302 * @cra_driver_name: Unique name of the transf 385 * @cra_driver_name: Unique name of the transformation provider. This is the 303 * name of the provider of t 386 * name of the provider of the transformation. This can be any 304 * arbitrary value, but in t 387 * arbitrary value, but in the usual case, this contains the 305 * name of the chip or provi 388 * name of the chip or provider and the name of the 306 * transformation algorithm. 389 * transformation algorithm. 307 * @cra_type: Type of the cryptographic transf 390 * @cra_type: Type of the cryptographic transformation. This is a pointer to 308 * struct crypto_type, which implem 391 * struct crypto_type, which implements callbacks common for all 309 * transformation types. There are 392 * transformation types. There are multiple options, such as 310 * &crypto_skcipher_type, &crypto_a 393 * &crypto_skcipher_type, &crypto_ahash_type, &crypto_rng_type. 311 * This field might be empty. In th 394 * This field might be empty. In that case, there are no common 312 * callbacks. This is the case for: 395 * callbacks. This is the case for: cipher, compress, shash. 313 * @cra_u: Callbacks implementing the transfor 396 * @cra_u: Callbacks implementing the transformation. This is a union of 314 * multiple structures. Depending on t 397 * multiple structures. Depending on the type of transformation selected 315 * by @cra_type and @cra_flags above, 398 * by @cra_type and @cra_flags above, the associated structure must be 316 * filled with callbacks. This field m 399 * filled with callbacks. This field might be empty. This is the case 317 * for ahash, shash. 400 * for ahash, shash. 318 * @cra_init: Initialize the cryptographic tra 401 * @cra_init: Initialize the cryptographic transformation object. This function 319 * is used to initialize the crypto 402 * is used to initialize the cryptographic transformation object. 320 * This function is called only onc 403 * This function is called only once at the instantiation time, right 321 * after the transformation context 404 * after the transformation context was allocated. In case the 322 * cryptographic hardware has some 405 * cryptographic hardware has some special requirements which need to 323 * be handled by software, this fun 406 * be handled by software, this function shall check for the precise 324 * requirement of the transformatio 407 * requirement of the transformation and put any software fallbacks 325 * in place. 408 * in place. 326 * @cra_exit: Deinitialize the cryptographic t 409 * @cra_exit: Deinitialize the cryptographic transformation object. This is a 327 * counterpart to @cra_init, used t 410 * counterpart to @cra_init, used to remove various changes set in 328 * @cra_init. 411 * @cra_init. 329 * @cra_u.cipher: Union member which contains 412 * @cra_u.cipher: Union member which contains a single-block symmetric cipher 330 * definition. See @struct @cip 413 * definition. See @struct @cipher_alg. 331 * @cra_u.compress: Union member which contain 414 * @cra_u.compress: Union member which contains a (de)compression algorithm. 332 * See @struct @compress_alg. 415 * See @struct @compress_alg. 333 * @cra_module: Owner of this transformation i 416 * @cra_module: Owner of this transformation implementation. Set to THIS_MODULE 334 * @cra_list: internally used 417 * @cra_list: internally used 335 * @cra_users: internally used 418 * @cra_users: internally used 336 * @cra_refcnt: internally used 419 * @cra_refcnt: internally used 337 * @cra_destroy: internally used 420 * @cra_destroy: internally used 338 * 421 * >> 422 * @stats: union of all possible crypto_istat_xxx structures >> 423 * @stats.aead: statistics for AEAD algorithm >> 424 * @stats.akcipher: statistics for akcipher algorithm >> 425 * @stats.cipher: statistics for cipher algorithm >> 426 * @stats.compress: statistics for compress algorithm >> 427 * @stats.hash: statistics for hash algorithm >> 428 * @stats.rng: statistics for rng algorithm >> 429 * @stats.kpp: statistics for KPP algorithm >> 430 * 339 * The struct crypto_alg describes a generic C 431 * The struct crypto_alg describes a generic Crypto API algorithm and is common 340 * for all of the transformations. Any variabl 432 * for all of the transformations. Any variable not documented here shall not 341 * be used by a cipher implementation as it is 433 * be used by a cipher implementation as it is internal to the Crypto API. 342 */ 434 */ 343 struct crypto_alg { 435 struct crypto_alg { 344 struct list_head cra_list; 436 struct list_head cra_list; 345 struct list_head cra_users; 437 struct list_head cra_users; 346 438 347 u32 cra_flags; 439 u32 cra_flags; 348 unsigned int cra_blocksize; 440 unsigned int cra_blocksize; 349 unsigned int cra_ctxsize; 441 unsigned int cra_ctxsize; 350 unsigned int cra_alignmask; 442 unsigned int cra_alignmask; 351 443 352 int cra_priority; 444 int cra_priority; 353 refcount_t cra_refcnt; 445 refcount_t cra_refcnt; 354 446 355 char cra_name[CRYPTO_MAX_ALG_NAME]; 447 char cra_name[CRYPTO_MAX_ALG_NAME]; 356 char cra_driver_name[CRYPTO_MAX_ALG_NA 448 char cra_driver_name[CRYPTO_MAX_ALG_NAME]; 357 449 358 const struct crypto_type *cra_type; 450 const struct crypto_type *cra_type; 359 451 360 union { 452 union { 361 struct cipher_alg cipher; 453 struct cipher_alg cipher; 362 struct compress_alg compress; 454 struct compress_alg compress; 363 } cra_u; 455 } cra_u; 364 456 365 int (*cra_init)(struct crypto_tfm *tfm 457 int (*cra_init)(struct crypto_tfm *tfm); 366 void (*cra_exit)(struct crypto_tfm *tf 458 void (*cra_exit)(struct crypto_tfm *tfm); 367 void (*cra_destroy)(struct crypto_alg 459 void (*cra_destroy)(struct crypto_alg *alg); 368 460 369 struct module *cra_module; 461 struct module *cra_module; >> 462 >> 463 #ifdef CONFIG_CRYPTO_STATS >> 464 union { >> 465 struct crypto_istat_aead aead; >> 466 struct crypto_istat_akcipher akcipher; >> 467 struct crypto_istat_cipher cipher; >> 468 struct crypto_istat_compress compress; >> 469 struct crypto_istat_hash hash; >> 470 struct crypto_istat_rng rng; >> 471 struct crypto_istat_kpp kpp; >> 472 } stats; >> 473 #endif /* CONFIG_CRYPTO_STATS */ >> 474 370 } CRYPTO_MINALIGN_ATTR; 475 } CRYPTO_MINALIGN_ATTR; 371 476 >> 477 #ifdef CONFIG_CRYPTO_STATS >> 478 void crypto_stats_init(struct crypto_alg *alg); >> 479 void crypto_stats_get(struct crypto_alg *alg); >> 480 void crypto_stats_aead_encrypt(unsigned int cryptlen, struct crypto_alg *alg, int ret); >> 481 void crypto_stats_aead_decrypt(unsigned int cryptlen, struct crypto_alg *alg, int ret); >> 482 void crypto_stats_ahash_update(unsigned int nbytes, int ret, struct crypto_alg *alg); >> 483 void crypto_stats_ahash_final(unsigned int nbytes, int ret, struct crypto_alg *alg); >> 484 void crypto_stats_akcipher_encrypt(unsigned int src_len, int ret, struct crypto_alg *alg); >> 485 void crypto_stats_akcipher_decrypt(unsigned int src_len, int ret, struct crypto_alg *alg); >> 486 void crypto_stats_akcipher_sign(int ret, struct crypto_alg *alg); >> 487 void crypto_stats_akcipher_verify(int ret, struct crypto_alg *alg); >> 488 void crypto_stats_compress(unsigned int slen, int ret, struct crypto_alg *alg); >> 489 void crypto_stats_decompress(unsigned int slen, int ret, struct crypto_alg *alg); >> 490 void crypto_stats_kpp_set_secret(struct crypto_alg *alg, int ret); >> 491 void crypto_stats_kpp_generate_public_key(struct crypto_alg *alg, int ret); >> 492 void crypto_stats_kpp_compute_shared_secret(struct crypto_alg *alg, int ret); >> 493 void crypto_stats_rng_seed(struct crypto_alg *alg, int ret); >> 494 void crypto_stats_rng_generate(struct crypto_alg *alg, unsigned int dlen, int ret); >> 495 void crypto_stats_skcipher_encrypt(unsigned int cryptlen, int ret, struct crypto_alg *alg); >> 496 void crypto_stats_skcipher_decrypt(unsigned int cryptlen, int ret, struct crypto_alg *alg); >> 497 #else >> 498 static inline void crypto_stats_init(struct crypto_alg *alg) >> 499 {} >> 500 static inline void crypto_stats_get(struct crypto_alg *alg) >> 501 {} >> 502 static inline void crypto_stats_aead_encrypt(unsigned int cryptlen, struct crypto_alg *alg, int ret) >> 503 {} >> 504 static inline void crypto_stats_aead_decrypt(unsigned int cryptlen, struct crypto_alg *alg, int ret) >> 505 {} >> 506 static inline void crypto_stats_ahash_update(unsigned int nbytes, int ret, struct crypto_alg *alg) >> 507 {} >> 508 static inline void crypto_stats_ahash_final(unsigned int nbytes, int ret, struct crypto_alg *alg) >> 509 {} >> 510 static inline void crypto_stats_akcipher_encrypt(unsigned int src_len, int ret, struct crypto_alg *alg) >> 511 {} >> 512 static inline void crypto_stats_akcipher_decrypt(unsigned int src_len, int ret, struct crypto_alg *alg) >> 513 {} >> 514 static inline void crypto_stats_akcipher_sign(int ret, struct crypto_alg *alg) >> 515 {} >> 516 static inline void crypto_stats_akcipher_verify(int ret, struct crypto_alg *alg) >> 517 {} >> 518 static inline void crypto_stats_compress(unsigned int slen, int ret, struct crypto_alg *alg) >> 519 {} >> 520 static inline void crypto_stats_decompress(unsigned int slen, int ret, struct crypto_alg *alg) >> 521 {} >> 522 static inline void crypto_stats_kpp_set_secret(struct crypto_alg *alg, int ret) >> 523 {} >> 524 static inline void crypto_stats_kpp_generate_public_key(struct crypto_alg *alg, int ret) >> 525 {} >> 526 static inline void crypto_stats_kpp_compute_shared_secret(struct crypto_alg *alg, int ret) >> 527 {} >> 528 static inline void crypto_stats_rng_seed(struct crypto_alg *alg, int ret) >> 529 {} >> 530 static inline void crypto_stats_rng_generate(struct crypto_alg *alg, unsigned int dlen, int ret) >> 531 {} >> 532 static inline void crypto_stats_skcipher_encrypt(unsigned int cryptlen, int ret, struct crypto_alg *alg) >> 533 {} >> 534 static inline void crypto_stats_skcipher_decrypt(unsigned int cryptlen, int ret, struct crypto_alg *alg) >> 535 {} >> 536 #endif 372 /* 537 /* 373 * A helper struct for waiting for completion 538 * A helper struct for waiting for completion of async crypto ops 374 */ 539 */ 375 struct crypto_wait { 540 struct crypto_wait { 376 struct completion completion; 541 struct completion completion; 377 int err; 542 int err; 378 }; 543 }; 379 544 380 /* 545 /* 381 * Macro for declaring a crypto op async wait 546 * Macro for declaring a crypto op async wait object on stack 382 */ 547 */ 383 #define DECLARE_CRYPTO_WAIT(_wait) \ 548 #define DECLARE_CRYPTO_WAIT(_wait) \ 384 struct crypto_wait _wait = { \ 549 struct crypto_wait _wait = { \ 385 COMPLETION_INITIALIZER_ONSTACK 550 COMPLETION_INITIALIZER_ONSTACK((_wait).completion), 0 } 386 551 387 /* 552 /* 388 * Async ops completion helper functioons 553 * Async ops completion helper functioons 389 */ 554 */ 390 void crypto_req_done(void *req, int err); !! 555 void crypto_req_done(struct crypto_async_request *req, int err); 391 556 392 static inline int crypto_wait_req(int err, str 557 static inline int crypto_wait_req(int err, struct crypto_wait *wait) 393 { 558 { 394 switch (err) { 559 switch (err) { 395 case -EINPROGRESS: 560 case -EINPROGRESS: 396 case -EBUSY: 561 case -EBUSY: 397 wait_for_completion(&wait->com 562 wait_for_completion(&wait->completion); 398 reinit_completion(&wait->compl 563 reinit_completion(&wait->completion); 399 err = wait->err; 564 err = wait->err; 400 break; 565 break; 401 } 566 } 402 567 403 return err; 568 return err; 404 } 569 } 405 570 406 static inline void crypto_init_wait(struct cry 571 static inline void crypto_init_wait(struct crypto_wait *wait) 407 { 572 { 408 init_completion(&wait->completion); 573 init_completion(&wait->completion); 409 } 574 } 410 575 411 /* 576 /* >> 577 * Algorithm registration interface. >> 578 */ >> 579 int crypto_register_alg(struct crypto_alg *alg); >> 580 void crypto_unregister_alg(struct crypto_alg *alg); >> 581 int crypto_register_algs(struct crypto_alg *algs, int count); >> 582 void crypto_unregister_algs(struct crypto_alg *algs, int count); >> 583 >> 584 /* 412 * Algorithm query interface. 585 * Algorithm query interface. 413 */ 586 */ 414 int crypto_has_alg(const char *name, u32 type, 587 int crypto_has_alg(const char *name, u32 type, u32 mask); 415 588 416 /* 589 /* 417 * Transforms: user-instantiated objects which 590 * Transforms: user-instantiated objects which encapsulate algorithms 418 * and core processing logic. Managed via cry 591 * and core processing logic. Managed via crypto_alloc_*() and 419 * crypto_free_*(), as well as the various hel 592 * crypto_free_*(), as well as the various helpers below. 420 */ 593 */ 421 594 422 struct crypto_tfm { 595 struct crypto_tfm { 423 refcount_t refcnt; << 424 596 425 u32 crt_flags; 597 u32 crt_flags; 426 << 427 int node; << 428 598 429 void (*exit)(struct crypto_tfm *tfm); 599 void (*exit)(struct crypto_tfm *tfm); 430 600 431 struct crypto_alg *__crt_alg; 601 struct crypto_alg *__crt_alg; 432 602 433 void *__crt_ctx[] CRYPTO_MINALIGN_ATTR 603 void *__crt_ctx[] CRYPTO_MINALIGN_ATTR; 434 }; 604 }; 435 605 >> 606 struct crypto_cipher { >> 607 struct crypto_tfm base; >> 608 }; >> 609 436 struct crypto_comp { 610 struct crypto_comp { 437 struct crypto_tfm base; 611 struct crypto_tfm base; 438 }; 612 }; 439 613 >> 614 enum { >> 615 CRYPTOA_UNSPEC, >> 616 CRYPTOA_ALG, >> 617 CRYPTOA_TYPE, >> 618 CRYPTOA_U32, >> 619 __CRYPTOA_MAX, >> 620 }; >> 621 >> 622 #define CRYPTOA_MAX (__CRYPTOA_MAX - 1) >> 623 >> 624 /* Maximum number of (rtattr) parameters for each template. */ >> 625 #define CRYPTO_MAX_ATTRS 32 >> 626 >> 627 struct crypto_attr_alg { >> 628 char name[CRYPTO_MAX_ALG_NAME]; >> 629 }; >> 630 >> 631 struct crypto_attr_type { >> 632 u32 type; >> 633 u32 mask; >> 634 }; >> 635 >> 636 struct crypto_attr_u32 { >> 637 u32 num; >> 638 }; >> 639 440 /* 640 /* 441 * Transform user interface. 641 * Transform user interface. 442 */ 642 */ 443 643 444 struct crypto_tfm *crypto_alloc_base(const cha 644 struct crypto_tfm *crypto_alloc_base(const char *alg_name, u32 type, u32 mask); 445 void crypto_destroy_tfm(void *mem, struct cryp 645 void crypto_destroy_tfm(void *mem, struct crypto_tfm *tfm); 446 646 447 static inline void crypto_free_tfm(struct cryp 647 static inline void crypto_free_tfm(struct crypto_tfm *tfm) 448 { 648 { 449 return crypto_destroy_tfm(tfm, tfm); 649 return crypto_destroy_tfm(tfm, tfm); 450 } 650 } 451 651 >> 652 int alg_test(const char *driver, const char *alg, u32 type, u32 mask); >> 653 452 /* 654 /* 453 * Transform helpers which query the underlyin 655 * Transform helpers which query the underlying algorithm. 454 */ 656 */ 455 static inline const char *crypto_tfm_alg_name( 657 static inline const char *crypto_tfm_alg_name(struct crypto_tfm *tfm) 456 { 658 { 457 return tfm->__crt_alg->cra_name; 659 return tfm->__crt_alg->cra_name; 458 } 660 } 459 661 460 static inline const char *crypto_tfm_alg_drive 662 static inline const char *crypto_tfm_alg_driver_name(struct crypto_tfm *tfm) 461 { 663 { 462 return tfm->__crt_alg->cra_driver_name 664 return tfm->__crt_alg->cra_driver_name; 463 } 665 } 464 666 >> 667 static inline int crypto_tfm_alg_priority(struct crypto_tfm *tfm) >> 668 { >> 669 return tfm->__crt_alg->cra_priority; >> 670 } >> 671 >> 672 static inline u32 crypto_tfm_alg_type(struct crypto_tfm *tfm) >> 673 { >> 674 return tfm->__crt_alg->cra_flags & CRYPTO_ALG_TYPE_MASK; >> 675 } >> 676 465 static inline unsigned int crypto_tfm_alg_bloc 677 static inline unsigned int crypto_tfm_alg_blocksize(struct crypto_tfm *tfm) 466 { 678 { 467 return tfm->__crt_alg->cra_blocksize; 679 return tfm->__crt_alg->cra_blocksize; 468 } 680 } 469 681 470 static inline unsigned int crypto_tfm_alg_alig 682 static inline unsigned int crypto_tfm_alg_alignmask(struct crypto_tfm *tfm) 471 { 683 { 472 return tfm->__crt_alg->cra_alignmask; 684 return tfm->__crt_alg->cra_alignmask; 473 } 685 } 474 686 475 static inline u32 crypto_tfm_get_flags(struct 687 static inline u32 crypto_tfm_get_flags(struct crypto_tfm *tfm) 476 { 688 { 477 return tfm->crt_flags; 689 return tfm->crt_flags; 478 } 690 } 479 691 480 static inline void crypto_tfm_set_flags(struct 692 static inline void crypto_tfm_set_flags(struct crypto_tfm *tfm, u32 flags) 481 { 693 { 482 tfm->crt_flags |= flags; 694 tfm->crt_flags |= flags; 483 } 695 } 484 696 485 static inline void crypto_tfm_clear_flags(stru 697 static inline void crypto_tfm_clear_flags(struct crypto_tfm *tfm, u32 flags) 486 { 698 { 487 tfm->crt_flags &= ~flags; 699 tfm->crt_flags &= ~flags; 488 } 700 } 489 701 >> 702 static inline void *crypto_tfm_ctx(struct crypto_tfm *tfm) >> 703 { >> 704 return tfm->__crt_ctx; >> 705 } >> 706 490 static inline unsigned int crypto_tfm_ctx_alig 707 static inline unsigned int crypto_tfm_ctx_alignment(void) 491 { 708 { 492 struct crypto_tfm *tfm; 709 struct crypto_tfm *tfm; 493 return __alignof__(tfm->__crt_ctx); 710 return __alignof__(tfm->__crt_ctx); 494 } 711 } >> 712 >> 713 /** >> 714 * DOC: Single Block Cipher API >> 715 * >> 716 * The single block cipher API is used with the ciphers of type >> 717 * CRYPTO_ALG_TYPE_CIPHER (listed as type "cipher" in /proc/crypto). >> 718 * >> 719 * Using the single block cipher API calls, operations with the basic cipher >> 720 * primitive can be implemented. These cipher primitives exclude any block >> 721 * chaining operations including IV handling. >> 722 * >> 723 * The purpose of this single block cipher API is to support the implementation >> 724 * of templates or other concepts that only need to perform the cipher operation >> 725 * on one block at a time. Templates invoke the underlying cipher primitive >> 726 * block-wise and process either the input or the output data of these cipher >> 727 * operations. >> 728 */ >> 729 >> 730 static inline struct crypto_cipher *__crypto_cipher_cast(struct crypto_tfm *tfm) >> 731 { >> 732 return (struct crypto_cipher *)tfm; >> 733 } >> 734 >> 735 /** >> 736 * crypto_alloc_cipher() - allocate single block cipher handle >> 737 * @alg_name: is the cra_name / name or cra_driver_name / driver name of the >> 738 * single block cipher >> 739 * @type: specifies the type of the cipher >> 740 * @mask: specifies the mask for the cipher >> 741 * >> 742 * Allocate a cipher handle for a single block cipher. The returned struct >> 743 * crypto_cipher is the cipher handle that is required for any subsequent API >> 744 * invocation for that single block cipher. >> 745 * >> 746 * Return: allocated cipher handle in case of success; IS_ERR() is true in case >> 747 * of an error, PTR_ERR() returns the error code. >> 748 */ >> 749 static inline struct crypto_cipher *crypto_alloc_cipher(const char *alg_name, >> 750 u32 type, u32 mask) >> 751 { >> 752 type &= ~CRYPTO_ALG_TYPE_MASK; >> 753 type |= CRYPTO_ALG_TYPE_CIPHER; >> 754 mask |= CRYPTO_ALG_TYPE_MASK; >> 755 >> 756 return __crypto_cipher_cast(crypto_alloc_base(alg_name, type, mask)); >> 757 } >> 758 >> 759 static inline struct crypto_tfm *crypto_cipher_tfm(struct crypto_cipher *tfm) >> 760 { >> 761 return &tfm->base; >> 762 } >> 763 >> 764 /** >> 765 * crypto_free_cipher() - zeroize and free the single block cipher handle >> 766 * @tfm: cipher handle to be freed >> 767 */ >> 768 static inline void crypto_free_cipher(struct crypto_cipher *tfm) >> 769 { >> 770 crypto_free_tfm(crypto_cipher_tfm(tfm)); >> 771 } >> 772 >> 773 /** >> 774 * crypto_has_cipher() - Search for the availability of a single block cipher >> 775 * @alg_name: is the cra_name / name or cra_driver_name / driver name of the >> 776 * single block cipher >> 777 * @type: specifies the type of the cipher >> 778 * @mask: specifies the mask for the cipher >> 779 * >> 780 * Return: true when the single block cipher is known to the kernel crypto API; >> 781 * false otherwise >> 782 */ >> 783 static inline int crypto_has_cipher(const char *alg_name, u32 type, u32 mask) >> 784 { >> 785 type &= ~CRYPTO_ALG_TYPE_MASK; >> 786 type |= CRYPTO_ALG_TYPE_CIPHER; >> 787 mask |= CRYPTO_ALG_TYPE_MASK; >> 788 >> 789 return crypto_has_alg(alg_name, type, mask); >> 790 } >> 791 >> 792 /** >> 793 * crypto_cipher_blocksize() - obtain block size for cipher >> 794 * @tfm: cipher handle >> 795 * >> 796 * The block size for the single block cipher referenced with the cipher handle >> 797 * tfm is returned. The caller may use that information to allocate appropriate >> 798 * memory for the data returned by the encryption or decryption operation >> 799 * >> 800 * Return: block size of cipher >> 801 */ >> 802 static inline unsigned int crypto_cipher_blocksize(struct crypto_cipher *tfm) >> 803 { >> 804 return crypto_tfm_alg_blocksize(crypto_cipher_tfm(tfm)); >> 805 } >> 806 >> 807 static inline unsigned int crypto_cipher_alignmask(struct crypto_cipher *tfm) >> 808 { >> 809 return crypto_tfm_alg_alignmask(crypto_cipher_tfm(tfm)); >> 810 } >> 811 >> 812 static inline u32 crypto_cipher_get_flags(struct crypto_cipher *tfm) >> 813 { >> 814 return crypto_tfm_get_flags(crypto_cipher_tfm(tfm)); >> 815 } >> 816 >> 817 static inline void crypto_cipher_set_flags(struct crypto_cipher *tfm, >> 818 u32 flags) >> 819 { >> 820 crypto_tfm_set_flags(crypto_cipher_tfm(tfm), flags); >> 821 } >> 822 >> 823 static inline void crypto_cipher_clear_flags(struct crypto_cipher *tfm, >> 824 u32 flags) >> 825 { >> 826 crypto_tfm_clear_flags(crypto_cipher_tfm(tfm), flags); >> 827 } >> 828 >> 829 /** >> 830 * crypto_cipher_setkey() - set key for cipher >> 831 * @tfm: cipher handle >> 832 * @key: buffer holding the key >> 833 * @keylen: length of the key in bytes >> 834 * >> 835 * The caller provided key is set for the single block cipher referenced by the >> 836 * cipher handle. >> 837 * >> 838 * Note, the key length determines the cipher type. Many block ciphers implement >> 839 * different cipher modes depending on the key size, such as AES-128 vs AES-192 >> 840 * vs. AES-256. When providing a 16 byte key for an AES cipher handle, AES-128 >> 841 * is performed. >> 842 * >> 843 * Return: 0 if the setting of the key was successful; < 0 if an error occurred >> 844 */ >> 845 int crypto_cipher_setkey(struct crypto_cipher *tfm, >> 846 const u8 *key, unsigned int keylen); >> 847 >> 848 /** >> 849 * crypto_cipher_encrypt_one() - encrypt one block of plaintext >> 850 * @tfm: cipher handle >> 851 * @dst: points to the buffer that will be filled with the ciphertext >> 852 * @src: buffer holding the plaintext to be encrypted >> 853 * >> 854 * Invoke the encryption operation of one block. The caller must ensure that >> 855 * the plaintext and ciphertext buffers are at least one block in size. >> 856 */ >> 857 void crypto_cipher_encrypt_one(struct crypto_cipher *tfm, >> 858 u8 *dst, const u8 *src); >> 859 >> 860 /** >> 861 * crypto_cipher_decrypt_one() - decrypt one block of ciphertext >> 862 * @tfm: cipher handle >> 863 * @dst: points to the buffer that will be filled with the plaintext >> 864 * @src: buffer holding the ciphertext to be decrypted >> 865 * >> 866 * Invoke the decryption operation of one block. The caller must ensure that >> 867 * the plaintext and ciphertext buffers are at least one block in size. >> 868 */ >> 869 void crypto_cipher_decrypt_one(struct crypto_cipher *tfm, >> 870 u8 *dst, const u8 *src); 495 871 496 static inline struct crypto_comp *__crypto_com 872 static inline struct crypto_comp *__crypto_comp_cast(struct crypto_tfm *tfm) 497 { 873 { 498 return (struct crypto_comp *)tfm; 874 return (struct crypto_comp *)tfm; 499 } 875 } 500 876 501 static inline struct crypto_comp *crypto_alloc 877 static inline struct crypto_comp *crypto_alloc_comp(const char *alg_name, 502 878 u32 type, u32 mask) 503 { 879 { 504 type &= ~CRYPTO_ALG_TYPE_MASK; 880 type &= ~CRYPTO_ALG_TYPE_MASK; 505 type |= CRYPTO_ALG_TYPE_COMPRESS; 881 type |= CRYPTO_ALG_TYPE_COMPRESS; 506 mask |= CRYPTO_ALG_TYPE_MASK; 882 mask |= CRYPTO_ALG_TYPE_MASK; 507 883 508 return __crypto_comp_cast(crypto_alloc 884 return __crypto_comp_cast(crypto_alloc_base(alg_name, type, mask)); 509 } 885 } 510 886 511 static inline struct crypto_tfm *crypto_comp_t 887 static inline struct crypto_tfm *crypto_comp_tfm(struct crypto_comp *tfm) 512 { 888 { 513 return &tfm->base; 889 return &tfm->base; 514 } 890 } 515 891 516 static inline void crypto_free_comp(struct cry 892 static inline void crypto_free_comp(struct crypto_comp *tfm) 517 { 893 { 518 crypto_free_tfm(crypto_comp_tfm(tfm)); 894 crypto_free_tfm(crypto_comp_tfm(tfm)); 519 } 895 } 520 896 521 static inline int crypto_has_comp(const char * 897 static inline int crypto_has_comp(const char *alg_name, u32 type, u32 mask) 522 { 898 { 523 type &= ~CRYPTO_ALG_TYPE_MASK; 899 type &= ~CRYPTO_ALG_TYPE_MASK; 524 type |= CRYPTO_ALG_TYPE_COMPRESS; 900 type |= CRYPTO_ALG_TYPE_COMPRESS; 525 mask |= CRYPTO_ALG_TYPE_MASK; 901 mask |= CRYPTO_ALG_TYPE_MASK; 526 902 527 return crypto_has_alg(alg_name, type, 903 return crypto_has_alg(alg_name, type, mask); 528 } 904 } 529 905 530 static inline const char *crypto_comp_name(str 906 static inline const char *crypto_comp_name(struct crypto_comp *tfm) 531 { 907 { 532 return crypto_tfm_alg_name(crypto_comp 908 return crypto_tfm_alg_name(crypto_comp_tfm(tfm)); 533 } 909 } 534 910 535 int crypto_comp_compress(struct crypto_comp *t 911 int crypto_comp_compress(struct crypto_comp *tfm, 536 const u8 *src, unsign 912 const u8 *src, unsigned int slen, 537 u8 *dst, unsigned int 913 u8 *dst, unsigned int *dlen); 538 914 539 int crypto_comp_decompress(struct crypto_comp 915 int crypto_comp_decompress(struct crypto_comp *tfm, 540 const u8 *src, unsi 916 const u8 *src, unsigned int slen, 541 u8 *dst, unsigned i 917 u8 *dst, unsigned int *dlen); 542 918 543 #endif /* _LINUX_CRYPTO_H */ 919 #endif /* _LINUX_CRYPTO_H */ 544 920 545 921
Linux® is a registered trademark of Linus Torvalds in the United States and other countries.
TOMOYO® is a registered trademark of NTT DATA CORPORATION.