cachepc-linux

Fork of AMDESE/linux with modifications for CachePC side-channel attack
git clone https://git.sinitax.com/sinitax/cachepc-linux
Log | Files | Refs | README | LICENSE | sfeed.txt

crypto.h (28100B)


      1/* SPDX-License-Identifier: GPL-2.0-or-later */
      2/*
      3 * Scatterlist Cryptographic API.
      4 *
      5 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
      6 * Copyright (c) 2002 David S. Miller (davem@redhat.com)
      7 * Copyright (c) 2005 Herbert Xu <herbert@gondor.apana.org.au>
      8 *
      9 * Portions derived from Cryptoapi, by Alexander Kjeldaas <astor@fast.no>
     10 * and Nettle, by Niels Möller.
     11 */
     12#ifndef _LINUX_CRYPTO_H
     13#define _LINUX_CRYPTO_H
     14
     15#include <linux/atomic.h>
     16#include <linux/kernel.h>
     17#include <linux/list.h>
     18#include <linux/bug.h>
     19#include <linux/refcount.h>
     20#include <linux/slab.h>
     21#include <linux/completion.h>
     22
     23/*
     24 * Autoloaded crypto modules should only use a prefixed name to avoid allowing
     25 * arbitrary modules to be loaded. Loading from userspace may still need the
     26 * unprefixed names, so retains those aliases as well.
     27 * This uses __MODULE_INFO directly instead of MODULE_ALIAS because pre-4.3
     28 * gcc (e.g. avr32 toolchain) uses __LINE__ for uniqueness, and this macro
     29 * expands twice on the same line. Instead, use a separate base name for the
     30 * alias.
     31 */
     32#define MODULE_ALIAS_CRYPTO(name)	\
     33		__MODULE_INFO(alias, alias_userspace, name);	\
     34		__MODULE_INFO(alias, alias_crypto, "crypto-" name)
     35
     36/*
     37 * Algorithm masks and types.
     38 */
     39#define CRYPTO_ALG_TYPE_MASK		0x0000000f
     40#define CRYPTO_ALG_TYPE_CIPHER		0x00000001
     41#define CRYPTO_ALG_TYPE_COMPRESS	0x00000002
     42#define CRYPTO_ALG_TYPE_AEAD		0x00000003
     43#define CRYPTO_ALG_TYPE_SKCIPHER	0x00000005
     44#define CRYPTO_ALG_TYPE_KPP		0x00000008
     45#define CRYPTO_ALG_TYPE_ACOMPRESS	0x0000000a
     46#define CRYPTO_ALG_TYPE_SCOMPRESS	0x0000000b
     47#define CRYPTO_ALG_TYPE_RNG		0x0000000c
     48#define CRYPTO_ALG_TYPE_AKCIPHER	0x0000000d
     49#define CRYPTO_ALG_TYPE_HASH		0x0000000e
     50#define CRYPTO_ALG_TYPE_SHASH		0x0000000e
     51#define CRYPTO_ALG_TYPE_AHASH		0x0000000f
     52
     53#define CRYPTO_ALG_TYPE_HASH_MASK	0x0000000e
     54#define CRYPTO_ALG_TYPE_AHASH_MASK	0x0000000e
     55#define CRYPTO_ALG_TYPE_ACOMPRESS_MASK	0x0000000e
     56
     57#define CRYPTO_ALG_LARVAL		0x00000010
     58#define CRYPTO_ALG_DEAD			0x00000020
     59#define CRYPTO_ALG_DYING		0x00000040
     60#define CRYPTO_ALG_ASYNC		0x00000080
     61
     62/*
     63 * Set if the algorithm (or an algorithm which it uses) requires another
     64 * algorithm of the same type to handle corner cases.
     65 */
     66#define CRYPTO_ALG_NEED_FALLBACK	0x00000100
     67
     68/*
     69 * Set if the algorithm has passed automated run-time testing.  Note that
     70 * if there is no run-time testing for a given algorithm it is considered
     71 * to have passed.
     72 */
     73
     74#define CRYPTO_ALG_TESTED		0x00000400
     75
     76/*
     77 * Set if the algorithm is an instance that is built from templates.
     78 */
     79#define CRYPTO_ALG_INSTANCE		0x00000800
     80
     81/* Set this bit if the algorithm provided is hardware accelerated but
     82 * not available to userspace via instruction set or so.
     83 */
     84#define CRYPTO_ALG_KERN_DRIVER_ONLY	0x00001000
     85
     86/*
     87 * Mark a cipher as a service implementation only usable by another
     88 * cipher and never by a normal user of the kernel crypto API
     89 */
     90#define CRYPTO_ALG_INTERNAL		0x00002000
     91
     92/*
     93 * Set if the algorithm has a ->setkey() method but can be used without
     94 * calling it first, i.e. there is a default key.
     95 */
     96#define CRYPTO_ALG_OPTIONAL_KEY		0x00004000
     97
     98/*
     99 * Don't trigger module loading
    100 */
    101#define CRYPTO_NOLOAD			0x00008000
    102
    103/*
    104 * The algorithm may allocate memory during request processing, i.e. during
    105 * encryption, decryption, or hashing.  Users can request an algorithm with this
    106 * flag unset if they can't handle memory allocation failures.
    107 *
    108 * This flag is currently only implemented for algorithms of type "skcipher",
    109 * "aead", "ahash", "shash", and "cipher".  Algorithms of other types might not
    110 * have this flag set even if they allocate memory.
    111 *
    112 * In some edge cases, algorithms can allocate memory regardless of this flag.
    113 * To avoid these cases, users must obey the following usage constraints:
    114 *    skcipher:
    115 *	- The IV buffer and all scatterlist elements must be aligned to the
    116 *	  algorithm's alignmask.
    117 *	- If the data were to be divided into chunks of size
    118 *	  crypto_skcipher_walksize() (with any remainder going at the end), no
    119 *	  chunk can cross a page boundary or a scatterlist element boundary.
    120 *    aead:
    121 *	- The IV buffer and all scatterlist elements must be aligned to the
    122 *	  algorithm's alignmask.
    123 *	- The first scatterlist element must contain all the associated data,
    124 *	  and its pages must be !PageHighMem.
    125 *	- If the plaintext/ciphertext were to be divided into chunks of size
    126 *	  crypto_aead_walksize() (with the remainder going at the end), no chunk
    127 *	  can cross a page boundary or a scatterlist element boundary.
    128 *    ahash:
    129 *	- The result buffer must be aligned to the algorithm's alignmask.
    130 *	- crypto_ahash_finup() must not be used unless the algorithm implements
    131 *	  ->finup() natively.
    132 */
    133#define CRYPTO_ALG_ALLOCATES_MEMORY	0x00010000
    134
    135/*
    136 * Mark an algorithm as a service implementation only usable by a
    137 * template and never by a normal user of the kernel crypto API.
    138 * This is intended to be used by algorithms that are themselves
    139 * not FIPS-approved but may instead be used to implement parts of
    140 * a FIPS-approved algorithm (e.g., dh vs. ffdhe2048(dh)).
    141 */
    142#define CRYPTO_ALG_FIPS_INTERNAL	0x00020000
    143
    144/*
    145 * Transform masks and values (for crt_flags).
    146 */
    147#define CRYPTO_TFM_NEED_KEY		0x00000001
    148
    149#define CRYPTO_TFM_REQ_MASK		0x000fff00
    150#define CRYPTO_TFM_REQ_FORBID_WEAK_KEYS	0x00000100
    151#define CRYPTO_TFM_REQ_MAY_SLEEP	0x00000200
    152#define CRYPTO_TFM_REQ_MAY_BACKLOG	0x00000400
    153
    154/*
    155 * Miscellaneous stuff.
    156 */
    157#define CRYPTO_MAX_ALG_NAME		128
    158
    159/*
    160 * The macro CRYPTO_MINALIGN_ATTR (along with the void * type in the actual
    161 * declaration) is used to ensure that the crypto_tfm context structure is
    162 * aligned correctly for the given architecture so that there are no alignment
    163 * faults for C data types.  On architectures that support non-cache coherent
    164 * DMA, such as ARM or arm64, it also takes into account the minimal alignment
    165 * that is required to ensure that the context struct member does not share any
    166 * cachelines with the rest of the struct. This is needed to ensure that cache
    167 * maintenance for non-coherent DMA (cache invalidation in particular) does not
    168 * affect data that may be accessed by the CPU concurrently.
    169 */
    170#define CRYPTO_MINALIGN ARCH_KMALLOC_MINALIGN
    171
    172#define CRYPTO_MINALIGN_ATTR __attribute__ ((__aligned__(CRYPTO_MINALIGN)))
    173
    174struct scatterlist;
    175struct crypto_async_request;
    176struct crypto_tfm;
    177struct crypto_type;
    178
    179typedef void (*crypto_completion_t)(struct crypto_async_request *req, int err);
    180
    181/**
    182 * DOC: Block Cipher Context Data Structures
    183 *
    184 * These data structures define the operating context for each block cipher
    185 * type.
    186 */
    187
    188struct crypto_async_request {
    189	struct list_head list;
    190	crypto_completion_t complete;
    191	void *data;
    192	struct crypto_tfm *tfm;
    193
    194	u32 flags;
    195};
    196
    197/**
    198 * DOC: Block Cipher Algorithm Definitions
    199 *
    200 * These data structures define modular crypto algorithm implementations,
    201 * managed via crypto_register_alg() and crypto_unregister_alg().
    202 */
    203
    204/**
    205 * struct cipher_alg - single-block symmetric ciphers definition
    206 * @cia_min_keysize: Minimum key size supported by the transformation. This is
    207 *		     the smallest key length supported by this transformation
    208 *		     algorithm. This must be set to one of the pre-defined
    209 *		     values as this is not hardware specific. Possible values
    210 *		     for this field can be found via git grep "_MIN_KEY_SIZE"
    211 *		     include/crypto/
    212 * @cia_max_keysize: Maximum key size supported by the transformation. This is
    213 *		    the largest key length supported by this transformation
    214 *		    algorithm. This must be set to one of the pre-defined values
    215 *		    as this is not hardware specific. Possible values for this
    216 *		    field can be found via git grep "_MAX_KEY_SIZE"
    217 *		    include/crypto/
    218 * @cia_setkey: Set key for the transformation. This function is used to either
    219 *	        program a supplied key into the hardware or store the key in the
    220 *	        transformation context for programming it later. Note that this
    221 *	        function does modify the transformation context. This function
    222 *	        can be called multiple times during the existence of the
    223 *	        transformation object, so one must make sure the key is properly
    224 *	        reprogrammed into the hardware. This function is also
    225 *	        responsible for checking the key length for validity.
    226 * @cia_encrypt: Encrypt a single block. This function is used to encrypt a
    227 *		 single block of data, which must be @cra_blocksize big. This
    228 *		 always operates on a full @cra_blocksize and it is not possible
    229 *		 to encrypt a block of smaller size. The supplied buffers must
    230 *		 therefore also be at least of @cra_blocksize size. Both the
    231 *		 input and output buffers are always aligned to @cra_alignmask.
    232 *		 In case either of the input or output buffer supplied by user
    233 *		 of the crypto API is not aligned to @cra_alignmask, the crypto
    234 *		 API will re-align the buffers. The re-alignment means that a
    235 *		 new buffer will be allocated, the data will be copied into the
    236 *		 new buffer, then the processing will happen on the new buffer,
    237 *		 then the data will be copied back into the original buffer and
    238 *		 finally the new buffer will be freed. In case a software
    239 *		 fallback was put in place in the @cra_init call, this function
    240 *		 might need to use the fallback if the algorithm doesn't support
    241 *		 all of the key sizes. In case the key was stored in
    242 *		 transformation context, the key might need to be re-programmed
    243 *		 into the hardware in this function. This function shall not
    244 *		 modify the transformation context, as this function may be
    245 *		 called in parallel with the same transformation object.
    246 * @cia_decrypt: Decrypt a single block. This is a reverse counterpart to
    247 *		 @cia_encrypt, and the conditions are exactly the same.
    248 *
    249 * All fields are mandatory and must be filled.
    250 */
    251struct cipher_alg {
    252	unsigned int cia_min_keysize;
    253	unsigned int cia_max_keysize;
    254	int (*cia_setkey)(struct crypto_tfm *tfm, const u8 *key,
    255	                  unsigned int keylen);
    256	void (*cia_encrypt)(struct crypto_tfm *tfm, u8 *dst, const u8 *src);
    257	void (*cia_decrypt)(struct crypto_tfm *tfm, u8 *dst, const u8 *src);
    258};
    259
    260/**
    261 * struct compress_alg - compression/decompression algorithm
    262 * @coa_compress: Compress a buffer of specified length, storing the resulting
    263 *		  data in the specified buffer. Return the length of the
    264 *		  compressed data in dlen.
    265 * @coa_decompress: Decompress the source buffer, storing the uncompressed
    266 *		    data in the specified buffer. The length of the data is
    267 *		    returned in dlen.
    268 *
    269 * All fields are mandatory.
    270 */
    271struct compress_alg {
    272	int (*coa_compress)(struct crypto_tfm *tfm, const u8 *src,
    273			    unsigned int slen, u8 *dst, unsigned int *dlen);
    274	int (*coa_decompress)(struct crypto_tfm *tfm, const u8 *src,
    275			      unsigned int slen, u8 *dst, unsigned int *dlen);
    276};
    277
    278#ifdef CONFIG_CRYPTO_STATS
    279/*
    280 * struct crypto_istat_aead - statistics for AEAD algorithm
    281 * @encrypt_cnt:	number of encrypt requests
    282 * @encrypt_tlen:	total data size handled by encrypt requests
    283 * @decrypt_cnt:	number of decrypt requests
    284 * @decrypt_tlen:	total data size handled by decrypt requests
    285 * @err_cnt:		number of error for AEAD requests
    286 */
    287struct crypto_istat_aead {
    288	atomic64_t encrypt_cnt;
    289	atomic64_t encrypt_tlen;
    290	atomic64_t decrypt_cnt;
    291	atomic64_t decrypt_tlen;
    292	atomic64_t err_cnt;
    293};
    294
    295/*
    296 * struct crypto_istat_akcipher - statistics for akcipher algorithm
    297 * @encrypt_cnt:	number of encrypt requests
    298 * @encrypt_tlen:	total data size handled by encrypt requests
    299 * @decrypt_cnt:	number of decrypt requests
    300 * @decrypt_tlen:	total data size handled by decrypt requests
    301 * @verify_cnt:		number of verify operation
    302 * @sign_cnt:		number of sign requests
    303 * @err_cnt:		number of error for akcipher requests
    304 */
    305struct crypto_istat_akcipher {
    306	atomic64_t encrypt_cnt;
    307	atomic64_t encrypt_tlen;
    308	atomic64_t decrypt_cnt;
    309	atomic64_t decrypt_tlen;
    310	atomic64_t verify_cnt;
    311	atomic64_t sign_cnt;
    312	atomic64_t err_cnt;
    313};
    314
    315/*
    316 * struct crypto_istat_cipher - statistics for cipher algorithm
    317 * @encrypt_cnt:	number of encrypt requests
    318 * @encrypt_tlen:	total data size handled by encrypt requests
    319 * @decrypt_cnt:	number of decrypt requests
    320 * @decrypt_tlen:	total data size handled by decrypt requests
    321 * @err_cnt:		number of error for cipher requests
    322 */
    323struct crypto_istat_cipher {
    324	atomic64_t encrypt_cnt;
    325	atomic64_t encrypt_tlen;
    326	atomic64_t decrypt_cnt;
    327	atomic64_t decrypt_tlen;
    328	atomic64_t err_cnt;
    329};
    330
    331/*
    332 * struct crypto_istat_compress - statistics for compress algorithm
    333 * @compress_cnt:	number of compress requests
    334 * @compress_tlen:	total data size handled by compress requests
    335 * @decompress_cnt:	number of decompress requests
    336 * @decompress_tlen:	total data size handled by decompress requests
    337 * @err_cnt:		number of error for compress requests
    338 */
    339struct crypto_istat_compress {
    340	atomic64_t compress_cnt;
    341	atomic64_t compress_tlen;
    342	atomic64_t decompress_cnt;
    343	atomic64_t decompress_tlen;
    344	atomic64_t err_cnt;
    345};
    346
    347/*
    348 * struct crypto_istat_hash - statistics for has algorithm
    349 * @hash_cnt:		number of hash requests
    350 * @hash_tlen:		total data size hashed
    351 * @err_cnt:		number of error for hash requests
    352 */
    353struct crypto_istat_hash {
    354	atomic64_t hash_cnt;
    355	atomic64_t hash_tlen;
    356	atomic64_t err_cnt;
    357};
    358
    359/*
    360 * struct crypto_istat_kpp - statistics for KPP algorithm
    361 * @setsecret_cnt:		number of setsecrey operation
    362 * @generate_public_key_cnt:	number of generate_public_key operation
    363 * @compute_shared_secret_cnt:	number of compute_shared_secret operation
    364 * @err_cnt:			number of error for KPP requests
    365 */
    366struct crypto_istat_kpp {
    367	atomic64_t setsecret_cnt;
    368	atomic64_t generate_public_key_cnt;
    369	atomic64_t compute_shared_secret_cnt;
    370	atomic64_t err_cnt;
    371};
    372
    373/*
    374 * struct crypto_istat_rng: statistics for RNG algorithm
    375 * @generate_cnt:	number of RNG generate requests
    376 * @generate_tlen:	total data size of generated data by the RNG
    377 * @seed_cnt:		number of times the RNG was seeded
    378 * @err_cnt:		number of error for RNG requests
    379 */
    380struct crypto_istat_rng {
    381	atomic64_t generate_cnt;
    382	atomic64_t generate_tlen;
    383	atomic64_t seed_cnt;
    384	atomic64_t err_cnt;
    385};
    386#endif /* CONFIG_CRYPTO_STATS */
    387
    388#define cra_cipher	cra_u.cipher
    389#define cra_compress	cra_u.compress
    390
    391/**
    392 * struct crypto_alg - definition of a cryptograpic cipher algorithm
    393 * @cra_flags: Flags describing this transformation. See include/linux/crypto.h
    394 *	       CRYPTO_ALG_* flags for the flags which go in here. Those are
    395 *	       used for fine-tuning the description of the transformation
    396 *	       algorithm.
    397 * @cra_blocksize: Minimum block size of this transformation. The size in bytes
    398 *		   of the smallest possible unit which can be transformed with
    399 *		   this algorithm. The users must respect this value.
    400 *		   In case of HASH transformation, it is possible for a smaller
    401 *		   block than @cra_blocksize to be passed to the crypto API for
    402 *		   transformation, in case of any other transformation type, an
    403 * 		   error will be returned upon any attempt to transform smaller
    404 *		   than @cra_blocksize chunks.
    405 * @cra_ctxsize: Size of the operational context of the transformation. This
    406 *		 value informs the kernel crypto API about the memory size
    407 *		 needed to be allocated for the transformation context.
    408 * @cra_alignmask: Alignment mask for the input and output data buffer. The data
    409 *		   buffer containing the input data for the algorithm must be
    410 *		   aligned to this alignment mask. The data buffer for the
    411 *		   output data must be aligned to this alignment mask. Note that
    412 *		   the Crypto API will do the re-alignment in software, but
    413 *		   only under special conditions and there is a performance hit.
    414 *		   The re-alignment happens at these occasions for different
    415 *		   @cra_u types: cipher -- For both input data and output data
    416 *		   buffer; ahash -- For output hash destination buf; shash --
    417 *		   For output hash destination buf.
    418 *		   This is needed on hardware which is flawed by design and
    419 *		   cannot pick data from arbitrary addresses.
    420 * @cra_priority: Priority of this transformation implementation. In case
    421 *		  multiple transformations with same @cra_name are available to
    422 *		  the Crypto API, the kernel will use the one with highest
    423 *		  @cra_priority.
    424 * @cra_name: Generic name (usable by multiple implementations) of the
    425 *	      transformation algorithm. This is the name of the transformation
    426 *	      itself. This field is used by the kernel when looking up the
    427 *	      providers of particular transformation.
    428 * @cra_driver_name: Unique name of the transformation provider. This is the
    429 *		     name of the provider of the transformation. This can be any
    430 *		     arbitrary value, but in the usual case, this contains the
    431 *		     name of the chip or provider and the name of the
    432 *		     transformation algorithm.
    433 * @cra_type: Type of the cryptographic transformation. This is a pointer to
    434 *	      struct crypto_type, which implements callbacks common for all
    435 *	      transformation types. There are multiple options, such as
    436 *	      &crypto_skcipher_type, &crypto_ahash_type, &crypto_rng_type.
    437 *	      This field might be empty. In that case, there are no common
    438 *	      callbacks. This is the case for: cipher, compress, shash.
    439 * @cra_u: Callbacks implementing the transformation. This is a union of
    440 *	   multiple structures. Depending on the type of transformation selected
    441 *	   by @cra_type and @cra_flags above, the associated structure must be
    442 *	   filled with callbacks. This field might be empty. This is the case
    443 *	   for ahash, shash.
    444 * @cra_init: Initialize the cryptographic transformation object. This function
    445 *	      is used to initialize the cryptographic transformation object.
    446 *	      This function is called only once at the instantiation time, right
    447 *	      after the transformation context was allocated. In case the
    448 *	      cryptographic hardware has some special requirements which need to
    449 *	      be handled by software, this function shall check for the precise
    450 *	      requirement of the transformation and put any software fallbacks
    451 *	      in place.
    452 * @cra_exit: Deinitialize the cryptographic transformation object. This is a
    453 *	      counterpart to @cra_init, used to remove various changes set in
    454 *	      @cra_init.
    455 * @cra_u.cipher: Union member which contains a single-block symmetric cipher
    456 *		  definition. See @struct @cipher_alg.
    457 * @cra_u.compress: Union member which contains a (de)compression algorithm.
    458 *		    See @struct @compress_alg.
    459 * @cra_module: Owner of this transformation implementation. Set to THIS_MODULE
    460 * @cra_list: internally used
    461 * @cra_users: internally used
    462 * @cra_refcnt: internally used
    463 * @cra_destroy: internally used
    464 *
    465 * @stats: union of all possible crypto_istat_xxx structures
    466 * @stats.aead:		statistics for AEAD algorithm
    467 * @stats.akcipher:	statistics for akcipher algorithm
    468 * @stats.cipher:	statistics for cipher algorithm
    469 * @stats.compress:	statistics for compress algorithm
    470 * @stats.hash:		statistics for hash algorithm
    471 * @stats.rng:		statistics for rng algorithm
    472 * @stats.kpp:		statistics for KPP algorithm
    473 *
    474 * The struct crypto_alg describes a generic Crypto API algorithm and is common
    475 * for all of the transformations. Any variable not documented here shall not
    476 * be used by a cipher implementation as it is internal to the Crypto API.
    477 */
    478struct crypto_alg {
    479	struct list_head cra_list;
    480	struct list_head cra_users;
    481
    482	u32 cra_flags;
    483	unsigned int cra_blocksize;
    484	unsigned int cra_ctxsize;
    485	unsigned int cra_alignmask;
    486
    487	int cra_priority;
    488	refcount_t cra_refcnt;
    489
    490	char cra_name[CRYPTO_MAX_ALG_NAME];
    491	char cra_driver_name[CRYPTO_MAX_ALG_NAME];
    492
    493	const struct crypto_type *cra_type;
    494
    495	union {
    496		struct cipher_alg cipher;
    497		struct compress_alg compress;
    498	} cra_u;
    499
    500	int (*cra_init)(struct crypto_tfm *tfm);
    501	void (*cra_exit)(struct crypto_tfm *tfm);
    502	void (*cra_destroy)(struct crypto_alg *alg);
    503	
    504	struct module *cra_module;
    505
    506#ifdef CONFIG_CRYPTO_STATS
    507	union {
    508		struct crypto_istat_aead aead;
    509		struct crypto_istat_akcipher akcipher;
    510		struct crypto_istat_cipher cipher;
    511		struct crypto_istat_compress compress;
    512		struct crypto_istat_hash hash;
    513		struct crypto_istat_rng rng;
    514		struct crypto_istat_kpp kpp;
    515	} stats;
    516#endif /* CONFIG_CRYPTO_STATS */
    517
    518} CRYPTO_MINALIGN_ATTR;
    519
    520#ifdef CONFIG_CRYPTO_STATS
    521void crypto_stats_init(struct crypto_alg *alg);
    522void crypto_stats_get(struct crypto_alg *alg);
    523void crypto_stats_aead_encrypt(unsigned int cryptlen, struct crypto_alg *alg, int ret);
    524void crypto_stats_aead_decrypt(unsigned int cryptlen, struct crypto_alg *alg, int ret);
    525void crypto_stats_ahash_update(unsigned int nbytes, int ret, struct crypto_alg *alg);
    526void crypto_stats_ahash_final(unsigned int nbytes, int ret, struct crypto_alg *alg);
    527void crypto_stats_akcipher_encrypt(unsigned int src_len, int ret, struct crypto_alg *alg);
    528void crypto_stats_akcipher_decrypt(unsigned int src_len, int ret, struct crypto_alg *alg);
    529void crypto_stats_akcipher_sign(int ret, struct crypto_alg *alg);
    530void crypto_stats_akcipher_verify(int ret, struct crypto_alg *alg);
    531void crypto_stats_compress(unsigned int slen, int ret, struct crypto_alg *alg);
    532void crypto_stats_decompress(unsigned int slen, int ret, struct crypto_alg *alg);
    533void crypto_stats_kpp_set_secret(struct crypto_alg *alg, int ret);
    534void crypto_stats_kpp_generate_public_key(struct crypto_alg *alg, int ret);
    535void crypto_stats_kpp_compute_shared_secret(struct crypto_alg *alg, int ret);
    536void crypto_stats_rng_seed(struct crypto_alg *alg, int ret);
    537void crypto_stats_rng_generate(struct crypto_alg *alg, unsigned int dlen, int ret);
    538void crypto_stats_skcipher_encrypt(unsigned int cryptlen, int ret, struct crypto_alg *alg);
    539void crypto_stats_skcipher_decrypt(unsigned int cryptlen, int ret, struct crypto_alg *alg);
    540#else
    541static inline void crypto_stats_init(struct crypto_alg *alg)
    542{}
    543static inline void crypto_stats_get(struct crypto_alg *alg)
    544{}
    545static inline void crypto_stats_aead_encrypt(unsigned int cryptlen, struct crypto_alg *alg, int ret)
    546{}
    547static inline void crypto_stats_aead_decrypt(unsigned int cryptlen, struct crypto_alg *alg, int ret)
    548{}
    549static inline void crypto_stats_ahash_update(unsigned int nbytes, int ret, struct crypto_alg *alg)
    550{}
    551static inline void crypto_stats_ahash_final(unsigned int nbytes, int ret, struct crypto_alg *alg)
    552{}
    553static inline void crypto_stats_akcipher_encrypt(unsigned int src_len, int ret, struct crypto_alg *alg)
    554{}
    555static inline void crypto_stats_akcipher_decrypt(unsigned int src_len, int ret, struct crypto_alg *alg)
    556{}
    557static inline void crypto_stats_akcipher_sign(int ret, struct crypto_alg *alg)
    558{}
    559static inline void crypto_stats_akcipher_verify(int ret, struct crypto_alg *alg)
    560{}
    561static inline void crypto_stats_compress(unsigned int slen, int ret, struct crypto_alg *alg)
    562{}
    563static inline void crypto_stats_decompress(unsigned int slen, int ret, struct crypto_alg *alg)
    564{}
    565static inline void crypto_stats_kpp_set_secret(struct crypto_alg *alg, int ret)
    566{}
    567static inline void crypto_stats_kpp_generate_public_key(struct crypto_alg *alg, int ret)
    568{}
    569static inline void crypto_stats_kpp_compute_shared_secret(struct crypto_alg *alg, int ret)
    570{}
    571static inline void crypto_stats_rng_seed(struct crypto_alg *alg, int ret)
    572{}
    573static inline void crypto_stats_rng_generate(struct crypto_alg *alg, unsigned int dlen, int ret)
    574{}
    575static inline void crypto_stats_skcipher_encrypt(unsigned int cryptlen, int ret, struct crypto_alg *alg)
    576{}
    577static inline void crypto_stats_skcipher_decrypt(unsigned int cryptlen, int ret, struct crypto_alg *alg)
    578{}
    579#endif
    580/*
    581 * A helper struct for waiting for completion of async crypto ops
    582 */
    583struct crypto_wait {
    584	struct completion completion;
    585	int err;
    586};
    587
    588/*
    589 * Macro for declaring a crypto op async wait object on stack
    590 */
    591#define DECLARE_CRYPTO_WAIT(_wait) \
    592	struct crypto_wait _wait = { \
    593		COMPLETION_INITIALIZER_ONSTACK((_wait).completion), 0 }
    594
    595/*
    596 * Async ops completion helper functioons
    597 */
    598void crypto_req_done(struct crypto_async_request *req, int err);
    599
    600static inline int crypto_wait_req(int err, struct crypto_wait *wait)
    601{
    602	switch (err) {
    603	case -EINPROGRESS:
    604	case -EBUSY:
    605		wait_for_completion(&wait->completion);
    606		reinit_completion(&wait->completion);
    607		err = wait->err;
    608		break;
    609	}
    610
    611	return err;
    612}
    613
    614static inline void crypto_init_wait(struct crypto_wait *wait)
    615{
    616	init_completion(&wait->completion);
    617}
    618
    619/*
    620 * Algorithm registration interface.
    621 */
    622int crypto_register_alg(struct crypto_alg *alg);
    623void crypto_unregister_alg(struct crypto_alg *alg);
    624int crypto_register_algs(struct crypto_alg *algs, int count);
    625void crypto_unregister_algs(struct crypto_alg *algs, int count);
    626
    627/*
    628 * Algorithm query interface.
    629 */
    630int crypto_has_alg(const char *name, u32 type, u32 mask);
    631
    632/*
    633 * Transforms: user-instantiated objects which encapsulate algorithms
    634 * and core processing logic.  Managed via crypto_alloc_*() and
    635 * crypto_free_*(), as well as the various helpers below.
    636 */
    637
    638struct crypto_tfm {
    639
    640	u32 crt_flags;
    641
    642	int node;
    643	
    644	void (*exit)(struct crypto_tfm *tfm);
    645	
    646	struct crypto_alg *__crt_alg;
    647
    648	void *__crt_ctx[] CRYPTO_MINALIGN_ATTR;
    649};
    650
    651struct crypto_comp {
    652	struct crypto_tfm base;
    653};
    654
    655/* 
    656 * Transform user interface.
    657 */
    658 
    659struct crypto_tfm *crypto_alloc_base(const char *alg_name, u32 type, u32 mask);
    660void crypto_destroy_tfm(void *mem, struct crypto_tfm *tfm);
    661
    662static inline void crypto_free_tfm(struct crypto_tfm *tfm)
    663{
    664	return crypto_destroy_tfm(tfm, tfm);
    665}
    666
    667int alg_test(const char *driver, const char *alg, u32 type, u32 mask);
    668
    669/*
    670 * Transform helpers which query the underlying algorithm.
    671 */
    672static inline const char *crypto_tfm_alg_name(struct crypto_tfm *tfm)
    673{
    674	return tfm->__crt_alg->cra_name;
    675}
    676
    677static inline const char *crypto_tfm_alg_driver_name(struct crypto_tfm *tfm)
    678{
    679	return tfm->__crt_alg->cra_driver_name;
    680}
    681
    682static inline int crypto_tfm_alg_priority(struct crypto_tfm *tfm)
    683{
    684	return tfm->__crt_alg->cra_priority;
    685}
    686
    687static inline u32 crypto_tfm_alg_type(struct crypto_tfm *tfm)
    688{
    689	return tfm->__crt_alg->cra_flags & CRYPTO_ALG_TYPE_MASK;
    690}
    691
    692static inline unsigned int crypto_tfm_alg_blocksize(struct crypto_tfm *tfm)
    693{
    694	return tfm->__crt_alg->cra_blocksize;
    695}
    696
    697static inline unsigned int crypto_tfm_alg_alignmask(struct crypto_tfm *tfm)
    698{
    699	return tfm->__crt_alg->cra_alignmask;
    700}
    701
    702static inline u32 crypto_tfm_get_flags(struct crypto_tfm *tfm)
    703{
    704	return tfm->crt_flags;
    705}
    706
    707static inline void crypto_tfm_set_flags(struct crypto_tfm *tfm, u32 flags)
    708{
    709	tfm->crt_flags |= flags;
    710}
    711
    712static inline void crypto_tfm_clear_flags(struct crypto_tfm *tfm, u32 flags)
    713{
    714	tfm->crt_flags &= ~flags;
    715}
    716
    717static inline void *crypto_tfm_ctx(struct crypto_tfm *tfm)
    718{
    719	return tfm->__crt_ctx;
    720}
    721
    722static inline unsigned int crypto_tfm_ctx_alignment(void)
    723{
    724	struct crypto_tfm *tfm;
    725	return __alignof__(tfm->__crt_ctx);
    726}
    727
    728static inline struct crypto_comp *__crypto_comp_cast(struct crypto_tfm *tfm)
    729{
    730	return (struct crypto_comp *)tfm;
    731}
    732
    733static inline struct crypto_comp *crypto_alloc_comp(const char *alg_name,
    734						    u32 type, u32 mask)
    735{
    736	type &= ~CRYPTO_ALG_TYPE_MASK;
    737	type |= CRYPTO_ALG_TYPE_COMPRESS;
    738	mask |= CRYPTO_ALG_TYPE_MASK;
    739
    740	return __crypto_comp_cast(crypto_alloc_base(alg_name, type, mask));
    741}
    742
    743static inline struct crypto_tfm *crypto_comp_tfm(struct crypto_comp *tfm)
    744{
    745	return &tfm->base;
    746}
    747
    748static inline void crypto_free_comp(struct crypto_comp *tfm)
    749{
    750	crypto_free_tfm(crypto_comp_tfm(tfm));
    751}
    752
    753static inline int crypto_has_comp(const char *alg_name, u32 type, u32 mask)
    754{
    755	type &= ~CRYPTO_ALG_TYPE_MASK;
    756	type |= CRYPTO_ALG_TYPE_COMPRESS;
    757	mask |= CRYPTO_ALG_TYPE_MASK;
    758
    759	return crypto_has_alg(alg_name, type, mask);
    760}
    761
    762static inline const char *crypto_comp_name(struct crypto_comp *tfm)
    763{
    764	return crypto_tfm_alg_name(crypto_comp_tfm(tfm));
    765}
    766
    767int crypto_comp_compress(struct crypto_comp *tfm,
    768			 const u8 *src, unsigned int slen,
    769			 u8 *dst, unsigned int *dlen);
    770
    771int crypto_comp_decompress(struct crypto_comp *tfm,
    772			   const u8 *src, unsigned int slen,
    773			   u8 *dst, unsigned int *dlen);
    774
    775#endif	/* _LINUX_CRYPTO_H */
    776