1 /* SPDX-License-Identifier: GPL-2.0-or-later * 1 /* SPDX-License-Identifier: GPL-2.0-or-later */ 2 /* 2 /* 3 * Scatterlist Cryptographic API. 3 * Scatterlist Cryptographic API. 4 * 4 * 5 * Copyright (c) 2002 James Morris <jmorris@in 5 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au> 6 * Copyright (c) 2002 David S. Miller (davem@r 6 * Copyright (c) 2002 David S. Miller (davem@redhat.com) 7 * Copyright (c) 2005 Herbert Xu <herbert@gond 7 * Copyright (c) 2005 Herbert Xu <herbert@gondor.apana.org.au> 8 * 8 * 9 * Portions derived from Cryptoapi, by Alexand 9 * Portions derived from Cryptoapi, by Alexander Kjeldaas <astor@fast.no> 10 * and Nettle, by Niels Möller. 10 * and Nettle, by Niels Möller. 11 */ 11 */ 12 #ifndef _LINUX_CRYPTO_H 12 #ifndef _LINUX_CRYPTO_H 13 #define _LINUX_CRYPTO_H 13 #define _LINUX_CRYPTO_H 14 14 15 #include <linux/completion.h> !! 15 #include <linux/atomic.h> >> 16 #include <linux/kernel.h> >> 17 #include <linux/list.h> >> 18 #include <linux/bug.h> 16 #include <linux/refcount.h> 19 #include <linux/refcount.h> 17 #include <linux/slab.h> 20 #include <linux/slab.h> 18 #include <linux/types.h> !! 21 #include <linux/completion.h> >> 22 >> 23 /* >> 24 * Autoloaded crypto modules should only use a prefixed name to avoid allowing >> 25 * arbitrary modules to be loaded. Loading from userspace may still need the >> 26 * unprefixed names, so retains those aliases as well. >> 27 * This uses __MODULE_INFO directly instead of MODULE_ALIAS because pre-4.3 >> 28 * gcc (e.g. avr32 toolchain) uses __LINE__ for uniqueness, and this macro >> 29 * expands twice on the same line. Instead, use a separate base name for the >> 30 * alias. >> 31 */ >> 32 #define MODULE_ALIAS_CRYPTO(name) \ >> 33 __MODULE_INFO(alias, alias_userspace, name); \ >> 34 __MODULE_INFO(alias, alias_crypto, "crypto-" name) 19 35 20 /* 36 /* 21 * Algorithm masks and types. 37 * Algorithm masks and types. 22 */ 38 */ 23 #define CRYPTO_ALG_TYPE_MASK 0x0000 39 #define CRYPTO_ALG_TYPE_MASK 0x0000000f 24 #define CRYPTO_ALG_TYPE_CIPHER 0x0000 40 #define CRYPTO_ALG_TYPE_CIPHER 0x00000001 25 #define CRYPTO_ALG_TYPE_COMPRESS 0x0000 41 #define CRYPTO_ALG_TYPE_COMPRESS 0x00000002 26 #define CRYPTO_ALG_TYPE_AEAD 0x0000 42 #define CRYPTO_ALG_TYPE_AEAD 0x00000003 27 #define CRYPTO_ALG_TYPE_LSKCIPHER 0x0000 << 28 #define CRYPTO_ALG_TYPE_SKCIPHER 0x0000 43 #define CRYPTO_ALG_TYPE_SKCIPHER 0x00000005 29 #define CRYPTO_ALG_TYPE_AKCIPHER 0x0000 << 30 #define CRYPTO_ALG_TYPE_SIG 0x0000 << 31 #define CRYPTO_ALG_TYPE_KPP 0x0000 44 #define CRYPTO_ALG_TYPE_KPP 0x00000008 32 #define CRYPTO_ALG_TYPE_ACOMPRESS 0x0000 45 #define CRYPTO_ALG_TYPE_ACOMPRESS 0x0000000a 33 #define CRYPTO_ALG_TYPE_SCOMPRESS 0x0000 46 #define CRYPTO_ALG_TYPE_SCOMPRESS 0x0000000b 34 #define CRYPTO_ALG_TYPE_RNG 0x0000 47 #define CRYPTO_ALG_TYPE_RNG 0x0000000c >> 48 #define CRYPTO_ALG_TYPE_AKCIPHER 0x0000000d 35 #define CRYPTO_ALG_TYPE_HASH 0x0000 49 #define CRYPTO_ALG_TYPE_HASH 0x0000000e 36 #define CRYPTO_ALG_TYPE_SHASH 0x0000 50 #define CRYPTO_ALG_TYPE_SHASH 0x0000000e 37 #define CRYPTO_ALG_TYPE_AHASH 0x0000 51 #define CRYPTO_ALG_TYPE_AHASH 0x0000000f 38 52 >> 53 #define CRYPTO_ALG_TYPE_HASH_MASK 0x0000000e >> 54 #define CRYPTO_ALG_TYPE_AHASH_MASK 0x0000000e 39 #define CRYPTO_ALG_TYPE_ACOMPRESS_MASK 0x0000 55 #define CRYPTO_ALG_TYPE_ACOMPRESS_MASK 0x0000000e 40 56 41 #define CRYPTO_ALG_LARVAL 0x0000 57 #define CRYPTO_ALG_LARVAL 0x00000010 42 #define CRYPTO_ALG_DEAD 0x0000 58 #define CRYPTO_ALG_DEAD 0x00000020 43 #define CRYPTO_ALG_DYING 0x0000 59 #define CRYPTO_ALG_DYING 0x00000040 44 #define CRYPTO_ALG_ASYNC 0x0000 60 #define CRYPTO_ALG_ASYNC 0x00000080 45 61 46 /* 62 /* 47 * Set if the algorithm (or an algorithm which 63 * Set if the algorithm (or an algorithm which it uses) requires another 48 * algorithm of the same type to handle corner 64 * algorithm of the same type to handle corner cases. 49 */ 65 */ 50 #define CRYPTO_ALG_NEED_FALLBACK 0x0000 66 #define CRYPTO_ALG_NEED_FALLBACK 0x00000100 51 67 52 /* 68 /* 53 * Set if the algorithm has passed automated r 69 * Set if the algorithm has passed automated run-time testing. Note that 54 * if there is no run-time testing for a given 70 * if there is no run-time testing for a given algorithm it is considered 55 * to have passed. 71 * to have passed. 56 */ 72 */ 57 73 58 #define CRYPTO_ALG_TESTED 0x0000 74 #define CRYPTO_ALG_TESTED 0x00000400 59 75 60 /* 76 /* 61 * Set if the algorithm is an instance that is 77 * Set if the algorithm is an instance that is built from templates. 62 */ 78 */ 63 #define CRYPTO_ALG_INSTANCE 0x0000 79 #define CRYPTO_ALG_INSTANCE 0x00000800 64 80 65 /* Set this bit if the algorithm provided is h 81 /* Set this bit if the algorithm provided is hardware accelerated but 66 * not available to userspace via instruction 82 * not available to userspace via instruction set or so. 67 */ 83 */ 68 #define CRYPTO_ALG_KERN_DRIVER_ONLY 0x0000 84 #define CRYPTO_ALG_KERN_DRIVER_ONLY 0x00001000 69 85 70 /* 86 /* 71 * Mark a cipher as a service implementation o 87 * Mark a cipher as a service implementation only usable by another 72 * cipher and never by a normal user of the ke 88 * cipher and never by a normal user of the kernel crypto API 73 */ 89 */ 74 #define CRYPTO_ALG_INTERNAL 0x0000 90 #define CRYPTO_ALG_INTERNAL 0x00002000 75 91 76 /* 92 /* 77 * Set if the algorithm has a ->setkey() metho 93 * Set if the algorithm has a ->setkey() method but can be used without 78 * calling it first, i.e. there is a default k 94 * calling it first, i.e. there is a default key. 79 */ 95 */ 80 #define CRYPTO_ALG_OPTIONAL_KEY 0x0000 96 #define CRYPTO_ALG_OPTIONAL_KEY 0x00004000 81 97 82 /* 98 /* 83 * Don't trigger module loading 99 * Don't trigger module loading 84 */ 100 */ 85 #define CRYPTO_NOLOAD 0x0000 101 #define CRYPTO_NOLOAD 0x00008000 86 102 87 /* 103 /* 88 * The algorithm may allocate memory during re 104 * The algorithm may allocate memory during request processing, i.e. during 89 * encryption, decryption, or hashing. Users 105 * encryption, decryption, or hashing. Users can request an algorithm with this 90 * flag unset if they can't handle memory allo 106 * flag unset if they can't handle memory allocation failures. 91 * 107 * 92 * This flag is currently only implemented for 108 * This flag is currently only implemented for algorithms of type "skcipher", 93 * "aead", "ahash", "shash", and "cipher". Al 109 * "aead", "ahash", "shash", and "cipher". Algorithms of other types might not 94 * have this flag set even if they allocate me 110 * have this flag set even if they allocate memory. 95 * 111 * 96 * In some edge cases, algorithms can allocate 112 * In some edge cases, algorithms can allocate memory regardless of this flag. 97 * To avoid these cases, users must obey the f 113 * To avoid these cases, users must obey the following usage constraints: 98 * skcipher: 114 * skcipher: 99 * - The IV buffer and all scatterlist el 115 * - The IV buffer and all scatterlist elements must be aligned to the 100 * algorithm's alignmask. 116 * algorithm's alignmask. 101 * - If the data were to be divided into 117 * - If the data were to be divided into chunks of size 102 * crypto_skcipher_walksize() (with any 118 * crypto_skcipher_walksize() (with any remainder going at the end), no 103 * chunk can cross a page boundary or a 119 * chunk can cross a page boundary or a scatterlist element boundary. 104 * aead: 120 * aead: 105 * - The IV buffer and all scatterlist el 121 * - The IV buffer and all scatterlist elements must be aligned to the 106 * algorithm's alignmask. 122 * algorithm's alignmask. 107 * - The first scatterlist element must c 123 * - The first scatterlist element must contain all the associated data, 108 * and its pages must be !PageHighMem. 124 * and its pages must be !PageHighMem. 109 * - If the plaintext/ciphertext were to 125 * - If the plaintext/ciphertext were to be divided into chunks of size 110 * crypto_aead_walksize() (with the rem 126 * crypto_aead_walksize() (with the remainder going at the end), no chunk 111 * can cross a page boundary or a scatt 127 * can cross a page boundary or a scatterlist element boundary. 112 * ahash: 128 * ahash: >> 129 * - The result buffer must be aligned to the algorithm's alignmask. 113 * - crypto_ahash_finup() must not be use 130 * - crypto_ahash_finup() must not be used unless the algorithm implements 114 * ->finup() natively. 131 * ->finup() natively. 115 */ 132 */ 116 #define CRYPTO_ALG_ALLOCATES_MEMORY 0x0001 133 #define CRYPTO_ALG_ALLOCATES_MEMORY 0x00010000 117 134 118 /* 135 /* 119 * Mark an algorithm as a service implementati 136 * Mark an algorithm as a service implementation only usable by a 120 * template and never by a normal user of the 137 * template and never by a normal user of the kernel crypto API. 121 * This is intended to be used by algorithms t 138 * This is intended to be used by algorithms that are themselves 122 * not FIPS-approved but may instead be used t 139 * not FIPS-approved but may instead be used to implement parts of 123 * a FIPS-approved algorithm (e.g., dh vs. ffd 140 * a FIPS-approved algorithm (e.g., dh vs. ffdhe2048(dh)). 124 */ 141 */ 125 #define CRYPTO_ALG_FIPS_INTERNAL 0x0002 142 #define CRYPTO_ALG_FIPS_INTERNAL 0x00020000 126 143 127 /* 144 /* 128 * Transform masks and values (for crt_flags). 145 * Transform masks and values (for crt_flags). 129 */ 146 */ 130 #define CRYPTO_TFM_NEED_KEY 0x0000 147 #define CRYPTO_TFM_NEED_KEY 0x00000001 131 148 132 #define CRYPTO_TFM_REQ_MASK 0x000f 149 #define CRYPTO_TFM_REQ_MASK 0x000fff00 133 #define CRYPTO_TFM_REQ_FORBID_WEAK_KEYS 0x0000 150 #define CRYPTO_TFM_REQ_FORBID_WEAK_KEYS 0x00000100 134 #define CRYPTO_TFM_REQ_MAY_SLEEP 0x0000 151 #define CRYPTO_TFM_REQ_MAY_SLEEP 0x00000200 135 #define CRYPTO_TFM_REQ_MAY_BACKLOG 0x0000 152 #define CRYPTO_TFM_REQ_MAY_BACKLOG 0x00000400 136 153 137 /* 154 /* 138 * Miscellaneous stuff. 155 * Miscellaneous stuff. 139 */ 156 */ 140 #define CRYPTO_MAX_ALG_NAME 128 157 #define CRYPTO_MAX_ALG_NAME 128 141 158 142 /* 159 /* 143 * The macro CRYPTO_MINALIGN_ATTR (along with 160 * The macro CRYPTO_MINALIGN_ATTR (along with the void * type in the actual 144 * declaration) is used to ensure that the cry 161 * declaration) is used to ensure that the crypto_tfm context structure is 145 * aligned correctly for the given architectur 162 * aligned correctly for the given architecture so that there are no alignment 146 * faults for C data types. On architectures 163 * faults for C data types. On architectures that support non-cache coherent 147 * DMA, such as ARM or arm64, it also takes in 164 * DMA, such as ARM or arm64, it also takes into account the minimal alignment 148 * that is required to ensure that the context 165 * that is required to ensure that the context struct member does not share any 149 * cachelines with the rest of the struct. Thi 166 * cachelines with the rest of the struct. This is needed to ensure that cache 150 * maintenance for non-coherent DMA (cache inv 167 * maintenance for non-coherent DMA (cache invalidation in particular) does not 151 * affect data that may be accessed by the CPU 168 * affect data that may be accessed by the CPU concurrently. 152 */ 169 */ 153 #define CRYPTO_MINALIGN ARCH_KMALLOC_MINALIGN 170 #define CRYPTO_MINALIGN ARCH_KMALLOC_MINALIGN 154 171 155 #define CRYPTO_MINALIGN_ATTR __attribute__ ((_ 172 #define CRYPTO_MINALIGN_ATTR __attribute__ ((__aligned__(CRYPTO_MINALIGN))) 156 173 >> 174 struct scatterlist; >> 175 struct crypto_async_request; 157 struct crypto_tfm; 176 struct crypto_tfm; 158 struct crypto_type; 177 struct crypto_type; 159 struct module; << 160 178 161 typedef void (*crypto_completion_t)(void *req, !! 179 typedef struct crypto_async_request crypto_completion_data_t; >> 180 typedef void (*crypto_completion_t)(struct crypto_async_request *req, int err); 162 181 163 /** 182 /** 164 * DOC: Block Cipher Context Data Structures 183 * DOC: Block Cipher Context Data Structures 165 * 184 * 166 * These data structures define the operating 185 * These data structures define the operating context for each block cipher 167 * type. 186 * type. 168 */ 187 */ 169 188 170 struct crypto_async_request { 189 struct crypto_async_request { 171 struct list_head list; 190 struct list_head list; 172 crypto_completion_t complete; 191 crypto_completion_t complete; 173 void *data; 192 void *data; 174 struct crypto_tfm *tfm; 193 struct crypto_tfm *tfm; 175 194 176 u32 flags; 195 u32 flags; 177 }; 196 }; 178 197 179 /** 198 /** 180 * DOC: Block Cipher Algorithm Definitions 199 * DOC: Block Cipher Algorithm Definitions 181 * 200 * 182 * These data structures define modular crypto 201 * These data structures define modular crypto algorithm implementations, 183 * managed via crypto_register_alg() and crypt 202 * managed via crypto_register_alg() and crypto_unregister_alg(). 184 */ 203 */ 185 204 186 /** 205 /** 187 * struct cipher_alg - single-block symmetric 206 * struct cipher_alg - single-block symmetric ciphers definition 188 * @cia_min_keysize: Minimum key size supporte 207 * @cia_min_keysize: Minimum key size supported by the transformation. This is 189 * the smallest key length s 208 * the smallest key length supported by this transformation 190 * algorithm. This must be s 209 * algorithm. This must be set to one of the pre-defined 191 * values as this is not har 210 * values as this is not hardware specific. Possible values 192 * for this field can be fou 211 * for this field can be found via git grep "_MIN_KEY_SIZE" 193 * include/crypto/ 212 * include/crypto/ 194 * @cia_max_keysize: Maximum key size supporte 213 * @cia_max_keysize: Maximum key size supported by the transformation. This is 195 * the largest key length sup 214 * the largest key length supported by this transformation 196 * algorithm. This must be se 215 * algorithm. This must be set to one of the pre-defined values 197 * as this is not hardware sp 216 * as this is not hardware specific. Possible values for this 198 * field can be found via git 217 * field can be found via git grep "_MAX_KEY_SIZE" 199 * include/crypto/ 218 * include/crypto/ 200 * @cia_setkey: Set key for the transformation 219 * @cia_setkey: Set key for the transformation. This function is used to either 201 * program a supplied key into th 220 * program a supplied key into the hardware or store the key in the 202 * transformation context for pro 221 * transformation context for programming it later. Note that this 203 * function does modify the trans 222 * function does modify the transformation context. This function 204 * can be called multiple times d 223 * can be called multiple times during the existence of the 205 * transformation object, so one 224 * transformation object, so one must make sure the key is properly 206 * reprogrammed into the hardware 225 * reprogrammed into the hardware. This function is also 207 * responsible for checking the k 226 * responsible for checking the key length for validity. 208 * @cia_encrypt: Encrypt a single block. This 227 * @cia_encrypt: Encrypt a single block. This function is used to encrypt a 209 * single block of data, which m 228 * single block of data, which must be @cra_blocksize big. This 210 * always operates on a full @cr 229 * always operates on a full @cra_blocksize and it is not possible 211 * to encrypt a block of smaller 230 * to encrypt a block of smaller size. The supplied buffers must 212 * therefore also be at least of 231 * therefore also be at least of @cra_blocksize size. Both the 213 * input and output buffers are 232 * input and output buffers are always aligned to @cra_alignmask. 214 * In case either of the input o 233 * In case either of the input or output buffer supplied by user 215 * of the crypto API is not alig 234 * of the crypto API is not aligned to @cra_alignmask, the crypto 216 * API will re-align the buffers 235 * API will re-align the buffers. The re-alignment means that a 217 * new buffer will be allocated, 236 * new buffer will be allocated, the data will be copied into the 218 * new buffer, then the processi 237 * new buffer, then the processing will happen on the new buffer, 219 * then the data will be copied 238 * then the data will be copied back into the original buffer and 220 * finally the new buffer will b 239 * finally the new buffer will be freed. In case a software 221 * fallback was put in place in 240 * fallback was put in place in the @cra_init call, this function 222 * might need to use the fallbac 241 * might need to use the fallback if the algorithm doesn't support 223 * all of the key sizes. In case 242 * all of the key sizes. In case the key was stored in 224 * transformation context, the k 243 * transformation context, the key might need to be re-programmed 225 * into the hardware in this fun 244 * into the hardware in this function. This function shall not 226 * modify the transformation con 245 * modify the transformation context, as this function may be 227 * called in parallel with the s 246 * called in parallel with the same transformation object. 228 * @cia_decrypt: Decrypt a single block. This 247 * @cia_decrypt: Decrypt a single block. This is a reverse counterpart to 229 * @cia_encrypt, and the conditi 248 * @cia_encrypt, and the conditions are exactly the same. 230 * 249 * 231 * All fields are mandatory and must be filled 250 * All fields are mandatory and must be filled. 232 */ 251 */ 233 struct cipher_alg { 252 struct cipher_alg { 234 unsigned int cia_min_keysize; 253 unsigned int cia_min_keysize; 235 unsigned int cia_max_keysize; 254 unsigned int cia_max_keysize; 236 int (*cia_setkey)(struct crypto_tfm *t 255 int (*cia_setkey)(struct crypto_tfm *tfm, const u8 *key, 237 unsigned int keylen) 256 unsigned int keylen); 238 void (*cia_encrypt)(struct crypto_tfm 257 void (*cia_encrypt)(struct crypto_tfm *tfm, u8 *dst, const u8 *src); 239 void (*cia_decrypt)(struct crypto_tfm 258 void (*cia_decrypt)(struct crypto_tfm *tfm, u8 *dst, const u8 *src); 240 }; 259 }; 241 260 242 /** 261 /** 243 * struct compress_alg - compression/decompres 262 * struct compress_alg - compression/decompression algorithm 244 * @coa_compress: Compress a buffer of specifi 263 * @coa_compress: Compress a buffer of specified length, storing the resulting 245 * data in the specified buffer 264 * data in the specified buffer. Return the length of the 246 * compressed data in dlen. 265 * compressed data in dlen. 247 * @coa_decompress: Decompress the source buff 266 * @coa_decompress: Decompress the source buffer, storing the uncompressed 248 * data in the specified buff 267 * data in the specified buffer. The length of the data is 249 * returned in dlen. 268 * returned in dlen. 250 * 269 * 251 * All fields are mandatory. 270 * All fields are mandatory. 252 */ 271 */ 253 struct compress_alg { 272 struct compress_alg { 254 int (*coa_compress)(struct crypto_tfm 273 int (*coa_compress)(struct crypto_tfm *tfm, const u8 *src, 255 unsigned int slen, 274 unsigned int slen, u8 *dst, unsigned int *dlen); 256 int (*coa_decompress)(struct crypto_tf 275 int (*coa_decompress)(struct crypto_tfm *tfm, const u8 *src, 257 unsigned int sle 276 unsigned int slen, u8 *dst, unsigned int *dlen); 258 }; 277 }; 259 278 >> 279 #ifdef CONFIG_CRYPTO_STATS >> 280 /* >> 281 * struct crypto_istat_aead - statistics for AEAD algorithm >> 282 * @encrypt_cnt: number of encrypt requests >> 283 * @encrypt_tlen: total data size handled by encrypt requests >> 284 * @decrypt_cnt: number of decrypt requests >> 285 * @decrypt_tlen: total data size handled by decrypt requests >> 286 * @err_cnt: number of error for AEAD requests >> 287 */ >> 288 struct crypto_istat_aead { >> 289 atomic64_t encrypt_cnt; >> 290 atomic64_t encrypt_tlen; >> 291 atomic64_t decrypt_cnt; >> 292 atomic64_t decrypt_tlen; >> 293 atomic64_t err_cnt; >> 294 }; >> 295 >> 296 /* >> 297 * struct crypto_istat_akcipher - statistics for akcipher algorithm >> 298 * @encrypt_cnt: number of encrypt requests >> 299 * @encrypt_tlen: total data size handled by encrypt requests >> 300 * @decrypt_cnt: number of decrypt requests >> 301 * @decrypt_tlen: total data size handled by decrypt requests >> 302 * @verify_cnt: number of verify operation >> 303 * @sign_cnt: number of sign requests >> 304 * @err_cnt: number of error for akcipher requests >> 305 */ >> 306 struct crypto_istat_akcipher { >> 307 atomic64_t encrypt_cnt; >> 308 atomic64_t encrypt_tlen; >> 309 atomic64_t decrypt_cnt; >> 310 atomic64_t decrypt_tlen; >> 311 atomic64_t verify_cnt; >> 312 atomic64_t sign_cnt; >> 313 atomic64_t err_cnt; >> 314 }; >> 315 >> 316 /* >> 317 * struct crypto_istat_cipher - statistics for cipher algorithm >> 318 * @encrypt_cnt: number of encrypt requests >> 319 * @encrypt_tlen: total data size handled by encrypt requests >> 320 * @decrypt_cnt: number of decrypt requests >> 321 * @decrypt_tlen: total data size handled by decrypt requests >> 322 * @err_cnt: number of error for cipher requests >> 323 */ >> 324 struct crypto_istat_cipher { >> 325 atomic64_t encrypt_cnt; >> 326 atomic64_t encrypt_tlen; >> 327 atomic64_t decrypt_cnt; >> 328 atomic64_t decrypt_tlen; >> 329 atomic64_t err_cnt; >> 330 }; >> 331 >> 332 /* >> 333 * struct crypto_istat_compress - statistics for compress algorithm >> 334 * @compress_cnt: number of compress requests >> 335 * @compress_tlen: total data size handled by compress requests >> 336 * @decompress_cnt: number of decompress requests >> 337 * @decompress_tlen: total data size handled by decompress requests >> 338 * @err_cnt: number of error for compress requests >> 339 */ >> 340 struct crypto_istat_compress { >> 341 atomic64_t compress_cnt; >> 342 atomic64_t compress_tlen; >> 343 atomic64_t decompress_cnt; >> 344 atomic64_t decompress_tlen; >> 345 atomic64_t err_cnt; >> 346 }; >> 347 >> 348 /* >> 349 * struct crypto_istat_hash - statistics for has algorithm >> 350 * @hash_cnt: number of hash requests >> 351 * @hash_tlen: total data size hashed >> 352 * @err_cnt: number of error for hash requests >> 353 */ >> 354 struct crypto_istat_hash { >> 355 atomic64_t hash_cnt; >> 356 atomic64_t hash_tlen; >> 357 atomic64_t err_cnt; >> 358 }; >> 359 >> 360 /* >> 361 * struct crypto_istat_kpp - statistics for KPP algorithm >> 362 * @setsecret_cnt: number of setsecrey operation >> 363 * @generate_public_key_cnt: number of generate_public_key operation >> 364 * @compute_shared_secret_cnt: number of compute_shared_secret operation >> 365 * @err_cnt: number of error for KPP requests >> 366 */ >> 367 struct crypto_istat_kpp { >> 368 atomic64_t setsecret_cnt; >> 369 atomic64_t generate_public_key_cnt; >> 370 atomic64_t compute_shared_secret_cnt; >> 371 atomic64_t err_cnt; >> 372 }; >> 373 >> 374 /* >> 375 * struct crypto_istat_rng: statistics for RNG algorithm >> 376 * @generate_cnt: number of RNG generate requests >> 377 * @generate_tlen: total data size of generated data by the RNG >> 378 * @seed_cnt: number of times the RNG was seeded >> 379 * @err_cnt: number of error for RNG requests >> 380 */ >> 381 struct crypto_istat_rng { >> 382 atomic64_t generate_cnt; >> 383 atomic64_t generate_tlen; >> 384 atomic64_t seed_cnt; >> 385 atomic64_t err_cnt; >> 386 }; >> 387 #endif /* CONFIG_CRYPTO_STATS */ >> 388 260 #define cra_cipher cra_u.cipher 389 #define cra_cipher cra_u.cipher 261 #define cra_compress cra_u.compress 390 #define cra_compress cra_u.compress 262 391 263 /** 392 /** 264 * struct crypto_alg - definition of a cryptog 393 * struct crypto_alg - definition of a cryptograpic cipher algorithm 265 * @cra_flags: Flags describing this transform 394 * @cra_flags: Flags describing this transformation. See include/linux/crypto.h 266 * CRYPTO_ALG_* flags for the flag 395 * CRYPTO_ALG_* flags for the flags which go in here. Those are 267 * used for fine-tuning the descri 396 * used for fine-tuning the description of the transformation 268 * algorithm. 397 * algorithm. 269 * @cra_blocksize: Minimum block size of this 398 * @cra_blocksize: Minimum block size of this transformation. The size in bytes 270 * of the smallest possible un 399 * of the smallest possible unit which can be transformed with 271 * this algorithm. The users m 400 * this algorithm. The users must respect this value. 272 * In case of HASH transformat 401 * In case of HASH transformation, it is possible for a smaller 273 * block than @cra_blocksize t 402 * block than @cra_blocksize to be passed to the crypto API for 274 * transformation, in case of 403 * transformation, in case of any other transformation type, an 275 * error will be returned upon 404 * error will be returned upon any attempt to transform smaller 276 * than @cra_blocksize chunks. 405 * than @cra_blocksize chunks. 277 * @cra_ctxsize: Size of the operational conte 406 * @cra_ctxsize: Size of the operational context of the transformation. This 278 * value informs the kernel cryp 407 * value informs the kernel crypto API about the memory size 279 * needed to be allocated for th 408 * needed to be allocated for the transformation context. 280 * @cra_alignmask: For cipher, skcipher, lskci !! 409 * @cra_alignmask: Alignment mask for the input and output data buffer. The data 281 * 1 less than the alignment, !! 410 * buffer containing the input data for the algorithm must be 282 * implementation requires for !! 411 * aligned to this alignment mask. The data buffer for the 283 * the crypto API is invoked w !! 412 * output data must be aligned to this alignment mask. Note that 284 * to this alignment, the cryp !! 413 * the Crypto API will do the re-alignment in software, but 285 * appropriately aligned tempo !! 414 * only under special conditions and there is a performance hit. 286 * the algorithm needs. (For !! 415 * The re-alignment happens at these occasions for different 287 * the algorithm uses the skci !! 416 * @cra_u types: cipher -- For both input data and output data 288 * misalignment handling carri !! 417 * buffer; ahash -- For output hash destination buf; shash -- 289 * preferred that algorithms d !! 418 * For output hash destination buf. 290 * Also, crypto API users may !! 419 * This is needed on hardware which is flawed by design and 291 * to the alignmask of the alg !! 420 * cannot pick data from arbitrary addresses. 292 * avoid the API having to rea << 293 * not supported for hash algo << 294 * @cra_priority: Priority of this transformat 421 * @cra_priority: Priority of this transformation implementation. In case 295 * multiple transformations wit 422 * multiple transformations with same @cra_name are available to 296 * the Crypto API, the kernel w 423 * the Crypto API, the kernel will use the one with highest 297 * @cra_priority. 424 * @cra_priority. 298 * @cra_name: Generic name (usable by multiple 425 * @cra_name: Generic name (usable by multiple implementations) of the 299 * transformation algorithm. This i 426 * transformation algorithm. This is the name of the transformation 300 * itself. This field is used by th 427 * itself. This field is used by the kernel when looking up the 301 * providers of particular transfor 428 * providers of particular transformation. 302 * @cra_driver_name: Unique name of the transf 429 * @cra_driver_name: Unique name of the transformation provider. This is the 303 * name of the provider of t 430 * name of the provider of the transformation. This can be any 304 * arbitrary value, but in t 431 * arbitrary value, but in the usual case, this contains the 305 * name of the chip or provi 432 * name of the chip or provider and the name of the 306 * transformation algorithm. 433 * transformation algorithm. 307 * @cra_type: Type of the cryptographic transf 434 * @cra_type: Type of the cryptographic transformation. This is a pointer to 308 * struct crypto_type, which implem 435 * struct crypto_type, which implements callbacks common for all 309 * transformation types. There are 436 * transformation types. There are multiple options, such as 310 * &crypto_skcipher_type, &crypto_a 437 * &crypto_skcipher_type, &crypto_ahash_type, &crypto_rng_type. 311 * This field might be empty. In th 438 * This field might be empty. In that case, there are no common 312 * callbacks. This is the case for: 439 * callbacks. This is the case for: cipher, compress, shash. 313 * @cra_u: Callbacks implementing the transfor 440 * @cra_u: Callbacks implementing the transformation. This is a union of 314 * multiple structures. Depending on t 441 * multiple structures. Depending on the type of transformation selected 315 * by @cra_type and @cra_flags above, 442 * by @cra_type and @cra_flags above, the associated structure must be 316 * filled with callbacks. This field m 443 * filled with callbacks. This field might be empty. This is the case 317 * for ahash, shash. 444 * for ahash, shash. 318 * @cra_init: Initialize the cryptographic tra 445 * @cra_init: Initialize the cryptographic transformation object. This function 319 * is used to initialize the crypto 446 * is used to initialize the cryptographic transformation object. 320 * This function is called only onc 447 * This function is called only once at the instantiation time, right 321 * after the transformation context 448 * after the transformation context was allocated. In case the 322 * cryptographic hardware has some 449 * cryptographic hardware has some special requirements which need to 323 * be handled by software, this fun 450 * be handled by software, this function shall check for the precise 324 * requirement of the transformatio 451 * requirement of the transformation and put any software fallbacks 325 * in place. 452 * in place. 326 * @cra_exit: Deinitialize the cryptographic t 453 * @cra_exit: Deinitialize the cryptographic transformation object. This is a 327 * counterpart to @cra_init, used t 454 * counterpart to @cra_init, used to remove various changes set in 328 * @cra_init. 455 * @cra_init. 329 * @cra_u.cipher: Union member which contains 456 * @cra_u.cipher: Union member which contains a single-block symmetric cipher 330 * definition. See @struct @cip 457 * definition. See @struct @cipher_alg. 331 * @cra_u.compress: Union member which contain 458 * @cra_u.compress: Union member which contains a (de)compression algorithm. 332 * See @struct @compress_alg. 459 * See @struct @compress_alg. 333 * @cra_module: Owner of this transformation i 460 * @cra_module: Owner of this transformation implementation. Set to THIS_MODULE 334 * @cra_list: internally used 461 * @cra_list: internally used 335 * @cra_users: internally used 462 * @cra_users: internally used 336 * @cra_refcnt: internally used 463 * @cra_refcnt: internally used 337 * @cra_destroy: internally used 464 * @cra_destroy: internally used 338 * 465 * >> 466 * @stats: union of all possible crypto_istat_xxx structures >> 467 * @stats.aead: statistics for AEAD algorithm >> 468 * @stats.akcipher: statistics for akcipher algorithm >> 469 * @stats.cipher: statistics for cipher algorithm >> 470 * @stats.compress: statistics for compress algorithm >> 471 * @stats.hash: statistics for hash algorithm >> 472 * @stats.rng: statistics for rng algorithm >> 473 * @stats.kpp: statistics for KPP algorithm >> 474 * 339 * The struct crypto_alg describes a generic C 475 * The struct crypto_alg describes a generic Crypto API algorithm and is common 340 * for all of the transformations. Any variabl 476 * for all of the transformations. Any variable not documented here shall not 341 * be used by a cipher implementation as it is 477 * be used by a cipher implementation as it is internal to the Crypto API. 342 */ 478 */ 343 struct crypto_alg { 479 struct crypto_alg { 344 struct list_head cra_list; 480 struct list_head cra_list; 345 struct list_head cra_users; 481 struct list_head cra_users; 346 482 347 u32 cra_flags; 483 u32 cra_flags; 348 unsigned int cra_blocksize; 484 unsigned int cra_blocksize; 349 unsigned int cra_ctxsize; 485 unsigned int cra_ctxsize; 350 unsigned int cra_alignmask; 486 unsigned int cra_alignmask; 351 487 352 int cra_priority; 488 int cra_priority; 353 refcount_t cra_refcnt; 489 refcount_t cra_refcnt; 354 490 355 char cra_name[CRYPTO_MAX_ALG_NAME]; 491 char cra_name[CRYPTO_MAX_ALG_NAME]; 356 char cra_driver_name[CRYPTO_MAX_ALG_NA 492 char cra_driver_name[CRYPTO_MAX_ALG_NAME]; 357 493 358 const struct crypto_type *cra_type; 494 const struct crypto_type *cra_type; 359 495 360 union { 496 union { 361 struct cipher_alg cipher; 497 struct cipher_alg cipher; 362 struct compress_alg compress; 498 struct compress_alg compress; 363 } cra_u; 499 } cra_u; 364 500 365 int (*cra_init)(struct crypto_tfm *tfm 501 int (*cra_init)(struct crypto_tfm *tfm); 366 void (*cra_exit)(struct crypto_tfm *tf 502 void (*cra_exit)(struct crypto_tfm *tfm); 367 void (*cra_destroy)(struct crypto_alg 503 void (*cra_destroy)(struct crypto_alg *alg); 368 504 369 struct module *cra_module; 505 struct module *cra_module; >> 506 >> 507 #ifdef CONFIG_CRYPTO_STATS >> 508 union { >> 509 struct crypto_istat_aead aead; >> 510 struct crypto_istat_akcipher akcipher; >> 511 struct crypto_istat_cipher cipher; >> 512 struct crypto_istat_compress compress; >> 513 struct crypto_istat_hash hash; >> 514 struct crypto_istat_rng rng; >> 515 struct crypto_istat_kpp kpp; >> 516 } stats; >> 517 #endif /* CONFIG_CRYPTO_STATS */ >> 518 370 } CRYPTO_MINALIGN_ATTR; 519 } CRYPTO_MINALIGN_ATTR; 371 520 >> 521 #ifdef CONFIG_CRYPTO_STATS >> 522 void crypto_stats_init(struct crypto_alg *alg); >> 523 void crypto_stats_get(struct crypto_alg *alg); >> 524 void crypto_stats_aead_encrypt(unsigned int cryptlen, struct crypto_alg *alg, int ret); >> 525 void crypto_stats_aead_decrypt(unsigned int cryptlen, struct crypto_alg *alg, int ret); >> 526 void crypto_stats_ahash_update(unsigned int nbytes, int ret, struct crypto_alg *alg); >> 527 void crypto_stats_ahash_final(unsigned int nbytes, int ret, struct crypto_alg *alg); >> 528 void crypto_stats_akcipher_encrypt(unsigned int src_len, int ret, struct crypto_alg *alg); >> 529 void crypto_stats_akcipher_decrypt(unsigned int src_len, int ret, struct crypto_alg *alg); >> 530 void crypto_stats_akcipher_sign(int ret, struct crypto_alg *alg); >> 531 void crypto_stats_akcipher_verify(int ret, struct crypto_alg *alg); >> 532 void crypto_stats_compress(unsigned int slen, int ret, struct crypto_alg *alg); >> 533 void crypto_stats_decompress(unsigned int slen, int ret, struct crypto_alg *alg); >> 534 void crypto_stats_kpp_set_secret(struct crypto_alg *alg, int ret); >> 535 void crypto_stats_kpp_generate_public_key(struct crypto_alg *alg, int ret); >> 536 void crypto_stats_kpp_compute_shared_secret(struct crypto_alg *alg, int ret); >> 537 void crypto_stats_rng_seed(struct crypto_alg *alg, int ret); >> 538 void crypto_stats_rng_generate(struct crypto_alg *alg, unsigned int dlen, int ret); >> 539 void crypto_stats_skcipher_encrypt(unsigned int cryptlen, int ret, struct crypto_alg *alg); >> 540 void crypto_stats_skcipher_decrypt(unsigned int cryptlen, int ret, struct crypto_alg *alg); >> 541 #else >> 542 static inline void crypto_stats_init(struct crypto_alg *alg) >> 543 {} >> 544 static inline void crypto_stats_get(struct crypto_alg *alg) >> 545 {} >> 546 static inline void crypto_stats_aead_encrypt(unsigned int cryptlen, struct crypto_alg *alg, int ret) >> 547 {} >> 548 static inline void crypto_stats_aead_decrypt(unsigned int cryptlen, struct crypto_alg *alg, int ret) >> 549 {} >> 550 static inline void crypto_stats_ahash_update(unsigned int nbytes, int ret, struct crypto_alg *alg) >> 551 {} >> 552 static inline void crypto_stats_ahash_final(unsigned int nbytes, int ret, struct crypto_alg *alg) >> 553 {} >> 554 static inline void crypto_stats_akcipher_encrypt(unsigned int src_len, int ret, struct crypto_alg *alg) >> 555 {} >> 556 static inline void crypto_stats_akcipher_decrypt(unsigned int src_len, int ret, struct crypto_alg *alg) >> 557 {} >> 558 static inline void crypto_stats_akcipher_sign(int ret, struct crypto_alg *alg) >> 559 {} >> 560 static inline void crypto_stats_akcipher_verify(int ret, struct crypto_alg *alg) >> 561 {} >> 562 static inline void crypto_stats_compress(unsigned int slen, int ret, struct crypto_alg *alg) >> 563 {} >> 564 static inline void crypto_stats_decompress(unsigned int slen, int ret, struct crypto_alg *alg) >> 565 {} >> 566 static inline void crypto_stats_kpp_set_secret(struct crypto_alg *alg, int ret) >> 567 {} >> 568 static inline void crypto_stats_kpp_generate_public_key(struct crypto_alg *alg, int ret) >> 569 {} >> 570 static inline void crypto_stats_kpp_compute_shared_secret(struct crypto_alg *alg, int ret) >> 571 {} >> 572 static inline void crypto_stats_rng_seed(struct crypto_alg *alg, int ret) >> 573 {} >> 574 static inline void crypto_stats_rng_generate(struct crypto_alg *alg, unsigned int dlen, int ret) >> 575 {} >> 576 static inline void crypto_stats_skcipher_encrypt(unsigned int cryptlen, int ret, struct crypto_alg *alg) >> 577 {} >> 578 static inline void crypto_stats_skcipher_decrypt(unsigned int cryptlen, int ret, struct crypto_alg *alg) >> 579 {} >> 580 #endif 372 /* 581 /* 373 * A helper struct for waiting for completion 582 * A helper struct for waiting for completion of async crypto ops 374 */ 583 */ 375 struct crypto_wait { 584 struct crypto_wait { 376 struct completion completion; 585 struct completion completion; 377 int err; 586 int err; 378 }; 587 }; 379 588 380 /* 589 /* 381 * Macro for declaring a crypto op async wait 590 * Macro for declaring a crypto op async wait object on stack 382 */ 591 */ 383 #define DECLARE_CRYPTO_WAIT(_wait) \ 592 #define DECLARE_CRYPTO_WAIT(_wait) \ 384 struct crypto_wait _wait = { \ 593 struct crypto_wait _wait = { \ 385 COMPLETION_INITIALIZER_ONSTACK 594 COMPLETION_INITIALIZER_ONSTACK((_wait).completion), 0 } 386 595 387 /* 596 /* 388 * Async ops completion helper functioons 597 * Async ops completion helper functioons 389 */ 598 */ 390 void crypto_req_done(void *req, int err); !! 599 static inline void *crypto_get_completion_data(crypto_completion_data_t *req) >> 600 { >> 601 return req->data; >> 602 } >> 603 >> 604 void crypto_req_done(struct crypto_async_request *req, int err); 391 605 392 static inline int crypto_wait_req(int err, str 606 static inline int crypto_wait_req(int err, struct crypto_wait *wait) 393 { 607 { 394 switch (err) { 608 switch (err) { 395 case -EINPROGRESS: 609 case -EINPROGRESS: 396 case -EBUSY: 610 case -EBUSY: 397 wait_for_completion(&wait->com 611 wait_for_completion(&wait->completion); 398 reinit_completion(&wait->compl 612 reinit_completion(&wait->completion); 399 err = wait->err; 613 err = wait->err; 400 break; 614 break; 401 } 615 } 402 616 403 return err; 617 return err; 404 } 618 } 405 619 406 static inline void crypto_init_wait(struct cry 620 static inline void crypto_init_wait(struct crypto_wait *wait) 407 { 621 { 408 init_completion(&wait->completion); 622 init_completion(&wait->completion); 409 } 623 } 410 624 411 /* 625 /* >> 626 * Algorithm registration interface. >> 627 */ >> 628 int crypto_register_alg(struct crypto_alg *alg); >> 629 void crypto_unregister_alg(struct crypto_alg *alg); >> 630 int crypto_register_algs(struct crypto_alg *algs, int count); >> 631 void crypto_unregister_algs(struct crypto_alg *algs, int count); >> 632 >> 633 /* 412 * Algorithm query interface. 634 * Algorithm query interface. 413 */ 635 */ 414 int crypto_has_alg(const char *name, u32 type, 636 int crypto_has_alg(const char *name, u32 type, u32 mask); 415 637 416 /* 638 /* 417 * Transforms: user-instantiated objects which 639 * Transforms: user-instantiated objects which encapsulate algorithms 418 * and core processing logic. Managed via cry 640 * and core processing logic. Managed via crypto_alloc_*() and 419 * crypto_free_*(), as well as the various hel 641 * crypto_free_*(), as well as the various helpers below. 420 */ 642 */ 421 643 422 struct crypto_tfm { 644 struct crypto_tfm { 423 refcount_t refcnt; << 424 645 425 u32 crt_flags; 646 u32 crt_flags; 426 647 427 int node; 648 int node; 428 649 429 void (*exit)(struct crypto_tfm *tfm); 650 void (*exit)(struct crypto_tfm *tfm); 430 651 431 struct crypto_alg *__crt_alg; 652 struct crypto_alg *__crt_alg; 432 653 433 void *__crt_ctx[] CRYPTO_MINALIGN_ATTR 654 void *__crt_ctx[] CRYPTO_MINALIGN_ATTR; 434 }; 655 }; 435 656 436 struct crypto_comp { 657 struct crypto_comp { 437 struct crypto_tfm base; 658 struct crypto_tfm base; 438 }; 659 }; 439 660 440 /* 661 /* 441 * Transform user interface. 662 * Transform user interface. 442 */ 663 */ 443 664 444 struct crypto_tfm *crypto_alloc_base(const cha 665 struct crypto_tfm *crypto_alloc_base(const char *alg_name, u32 type, u32 mask); 445 void crypto_destroy_tfm(void *mem, struct cryp 666 void crypto_destroy_tfm(void *mem, struct crypto_tfm *tfm); 446 667 447 static inline void crypto_free_tfm(struct cryp 668 static inline void crypto_free_tfm(struct crypto_tfm *tfm) 448 { 669 { 449 return crypto_destroy_tfm(tfm, tfm); 670 return crypto_destroy_tfm(tfm, tfm); 450 } 671 } 451 672 >> 673 int alg_test(const char *driver, const char *alg, u32 type, u32 mask); >> 674 452 /* 675 /* 453 * Transform helpers which query the underlyin 676 * Transform helpers which query the underlying algorithm. 454 */ 677 */ 455 static inline const char *crypto_tfm_alg_name( 678 static inline const char *crypto_tfm_alg_name(struct crypto_tfm *tfm) 456 { 679 { 457 return tfm->__crt_alg->cra_name; 680 return tfm->__crt_alg->cra_name; 458 } 681 } 459 682 460 static inline const char *crypto_tfm_alg_drive 683 static inline const char *crypto_tfm_alg_driver_name(struct crypto_tfm *tfm) 461 { 684 { 462 return tfm->__crt_alg->cra_driver_name 685 return tfm->__crt_alg->cra_driver_name; >> 686 } >> 687 >> 688 static inline int crypto_tfm_alg_priority(struct crypto_tfm *tfm) >> 689 { >> 690 return tfm->__crt_alg->cra_priority; >> 691 } >> 692 >> 693 static inline u32 crypto_tfm_alg_type(struct crypto_tfm *tfm) >> 694 { >> 695 return tfm->__crt_alg->cra_flags & CRYPTO_ALG_TYPE_MASK; 463 } 696 } 464 697 465 static inline unsigned int crypto_tfm_alg_bloc 698 static inline unsigned int crypto_tfm_alg_blocksize(struct crypto_tfm *tfm) 466 { 699 { 467 return tfm->__crt_alg->cra_blocksize; 700 return tfm->__crt_alg->cra_blocksize; 468 } 701 } 469 702 470 static inline unsigned int crypto_tfm_alg_alig 703 static inline unsigned int crypto_tfm_alg_alignmask(struct crypto_tfm *tfm) 471 { 704 { 472 return tfm->__crt_alg->cra_alignmask; 705 return tfm->__crt_alg->cra_alignmask; 473 } 706 } 474 707 475 static inline u32 crypto_tfm_get_flags(struct 708 static inline u32 crypto_tfm_get_flags(struct crypto_tfm *tfm) 476 { 709 { 477 return tfm->crt_flags; 710 return tfm->crt_flags; 478 } 711 } 479 712 480 static inline void crypto_tfm_set_flags(struct 713 static inline void crypto_tfm_set_flags(struct crypto_tfm *tfm, u32 flags) 481 { 714 { 482 tfm->crt_flags |= flags; 715 tfm->crt_flags |= flags; 483 } 716 } 484 717 485 static inline void crypto_tfm_clear_flags(stru 718 static inline void crypto_tfm_clear_flags(struct crypto_tfm *tfm, u32 flags) 486 { 719 { 487 tfm->crt_flags &= ~flags; 720 tfm->crt_flags &= ~flags; 488 } 721 } 489 722 490 static inline unsigned int crypto_tfm_ctx_alig 723 static inline unsigned int crypto_tfm_ctx_alignment(void) 491 { 724 { 492 struct crypto_tfm *tfm; 725 struct crypto_tfm *tfm; 493 return __alignof__(tfm->__crt_ctx); 726 return __alignof__(tfm->__crt_ctx); 494 } 727 } 495 728 496 static inline struct crypto_comp *__crypto_com 729 static inline struct crypto_comp *__crypto_comp_cast(struct crypto_tfm *tfm) 497 { 730 { 498 return (struct crypto_comp *)tfm; 731 return (struct crypto_comp *)tfm; 499 } 732 } 500 733 501 static inline struct crypto_comp *crypto_alloc 734 static inline struct crypto_comp *crypto_alloc_comp(const char *alg_name, 502 735 u32 type, u32 mask) 503 { 736 { 504 type &= ~CRYPTO_ALG_TYPE_MASK; 737 type &= ~CRYPTO_ALG_TYPE_MASK; 505 type |= CRYPTO_ALG_TYPE_COMPRESS; 738 type |= CRYPTO_ALG_TYPE_COMPRESS; 506 mask |= CRYPTO_ALG_TYPE_MASK; 739 mask |= CRYPTO_ALG_TYPE_MASK; 507 740 508 return __crypto_comp_cast(crypto_alloc 741 return __crypto_comp_cast(crypto_alloc_base(alg_name, type, mask)); 509 } 742 } 510 743 511 static inline struct crypto_tfm *crypto_comp_t 744 static inline struct crypto_tfm *crypto_comp_tfm(struct crypto_comp *tfm) 512 { 745 { 513 return &tfm->base; 746 return &tfm->base; 514 } 747 } 515 748 516 static inline void crypto_free_comp(struct cry 749 static inline void crypto_free_comp(struct crypto_comp *tfm) 517 { 750 { 518 crypto_free_tfm(crypto_comp_tfm(tfm)); 751 crypto_free_tfm(crypto_comp_tfm(tfm)); 519 } 752 } 520 753 521 static inline int crypto_has_comp(const char * 754 static inline int crypto_has_comp(const char *alg_name, u32 type, u32 mask) 522 { 755 { 523 type &= ~CRYPTO_ALG_TYPE_MASK; 756 type &= ~CRYPTO_ALG_TYPE_MASK; 524 type |= CRYPTO_ALG_TYPE_COMPRESS; 757 type |= CRYPTO_ALG_TYPE_COMPRESS; 525 mask |= CRYPTO_ALG_TYPE_MASK; 758 mask |= CRYPTO_ALG_TYPE_MASK; 526 759 527 return crypto_has_alg(alg_name, type, 760 return crypto_has_alg(alg_name, type, mask); 528 } 761 } 529 762 530 static inline const char *crypto_comp_name(str 763 static inline const char *crypto_comp_name(struct crypto_comp *tfm) 531 { 764 { 532 return crypto_tfm_alg_name(crypto_comp 765 return crypto_tfm_alg_name(crypto_comp_tfm(tfm)); 533 } 766 } 534 767 535 int crypto_comp_compress(struct crypto_comp *t 768 int crypto_comp_compress(struct crypto_comp *tfm, 536 const u8 *src, unsign 769 const u8 *src, unsigned int slen, 537 u8 *dst, unsigned int 770 u8 *dst, unsigned int *dlen); 538 771 539 int crypto_comp_decompress(struct crypto_comp 772 int crypto_comp_decompress(struct crypto_comp *tfm, 540 const u8 *src, unsi 773 const u8 *src, unsigned int slen, 541 u8 *dst, unsigned i 774 u8 *dst, unsigned int *dlen); 542 775 543 #endif /* _LINUX_CRYPTO_H */ 776 #endif /* _LINUX_CRYPTO_H */ 544 777 545 778
Linux® is a registered trademark of Linus Torvalds in the United States and other countries.
TOMOYO® is a registered trademark of NTT DATA CORPORATION.