cachepc-linux

Fork of AMDESE/linux with modifications for CachePC side-channel attack
git clone https://git.sinitax.com/sinitax/cachepc-linux
Log | Files | Refs | README | LICENSE | sfeed.txt

caamalg_desc.c (56457B)


      1// SPDX-License-Identifier: GPL-2.0+
      2/*
      3 * Shared descriptors for aead, skcipher algorithms
      4 *
      5 * Copyright 2016-2019 NXP
      6 */
      7
      8#include "compat.h"
      9#include "desc_constr.h"
     10#include "caamalg_desc.h"
     11
     12/*
     13 * For aead functions, read payload and write payload,
     14 * both of which are specified in req->src and req->dst
     15 */
     16static inline void aead_append_src_dst(u32 *desc, u32 msg_type)
     17{
     18	append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | KEY_VLF);
     19	append_seq_fifo_load(desc, 0, FIFOLD_CLASS_BOTH |
     20			     KEY_VLF | msg_type | FIFOLD_TYPE_LASTBOTH);
     21}
     22
     23/* Set DK bit in class 1 operation if shared */
     24static inline void append_dec_op1(u32 *desc, u32 type)
     25{
     26	u32 *jump_cmd, *uncond_jump_cmd;
     27
     28	/* DK bit is valid only for AES */
     29	if ((type & OP_ALG_ALGSEL_MASK) != OP_ALG_ALGSEL_AES) {
     30		append_operation(desc, type | OP_ALG_AS_INITFINAL |
     31				 OP_ALG_DECRYPT);
     32		return;
     33	}
     34
     35	jump_cmd = append_jump(desc, JUMP_TEST_ALL | JUMP_COND_SHRD);
     36	append_operation(desc, type | OP_ALG_AS_INIT | OP_ALG_DECRYPT);
     37	uncond_jump_cmd = append_jump(desc, JUMP_TEST_ALL);
     38	set_jump_tgt_here(desc, jump_cmd);
     39	append_operation(desc, type | OP_ALG_AS_INIT | OP_ALG_DECRYPT |
     40			 OP_ALG_AAI_DK);
     41	set_jump_tgt_here(desc, uncond_jump_cmd);
     42}
     43
     44/**
     45 * cnstr_shdsc_aead_null_encap - IPSec ESP encapsulation shared descriptor
     46 *                               (non-protocol) with no (null) encryption.
     47 * @desc: pointer to buffer used for descriptor construction
     48 * @adata: pointer to authentication transform definitions.
     49 *         A split key is required for SEC Era < 6; the size of the split key
     50 *         is specified in this case. Valid algorithm values - one of
     51 *         OP_ALG_ALGSEL_{MD5, SHA1, SHA224, SHA256, SHA384, SHA512} ANDed
     52 *         with OP_ALG_AAI_HMAC_PRECOMP.
     53 * @icvsize: integrity check value (ICV) size (truncated or full)
     54 * @era: SEC Era
     55 */
     56void cnstr_shdsc_aead_null_encap(u32 * const desc, struct alginfo *adata,
     57				 unsigned int icvsize, int era)
     58{
     59	u32 *key_jump_cmd, *read_move_cmd, *write_move_cmd;
     60
     61	init_sh_desc(desc, HDR_SHARE_SERIAL);
     62
     63	/* Skip if already shared */
     64	key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
     65				   JUMP_COND_SHRD);
     66	if (era < 6) {
     67		if (adata->key_inline)
     68			append_key_as_imm(desc, adata->key_virt,
     69					  adata->keylen_pad, adata->keylen,
     70					  CLASS_2 | KEY_DEST_MDHA_SPLIT |
     71					  KEY_ENC);
     72		else
     73			append_key(desc, adata->key_dma, adata->keylen,
     74				   CLASS_2 | KEY_DEST_MDHA_SPLIT | KEY_ENC);
     75	} else {
     76		append_proto_dkp(desc, adata);
     77	}
     78	set_jump_tgt_here(desc, key_jump_cmd);
     79
     80	/* assoclen + cryptlen = seqinlen */
     81	append_math_sub(desc, REG3, SEQINLEN, REG0, CAAM_CMD_SZ);
     82
     83	/* Prepare to read and write cryptlen + assoclen bytes */
     84	append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
     85	append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
     86
     87	/*
     88	 * MOVE_LEN opcode is not available in all SEC HW revisions,
     89	 * thus need to do some magic, i.e. self-patch the descriptor
     90	 * buffer.
     91	 */
     92	read_move_cmd = append_move(desc, MOVE_SRC_DESCBUF |
     93				    MOVE_DEST_MATH3 |
     94				    (0x6 << MOVE_LEN_SHIFT));
     95	write_move_cmd = append_move(desc, MOVE_SRC_MATH3 |
     96				     MOVE_DEST_DESCBUF |
     97				     MOVE_WAITCOMP |
     98				     (0x8 << MOVE_LEN_SHIFT));
     99
    100	/* Class 2 operation */
    101	append_operation(desc, adata->algtype | OP_ALG_AS_INITFINAL |
    102			 OP_ALG_ENCRYPT);
    103
    104	/* Read and write cryptlen bytes */
    105	aead_append_src_dst(desc, FIFOLD_TYPE_MSG | FIFOLD_TYPE_FLUSH1);
    106
    107	set_move_tgt_here(desc, read_move_cmd);
    108	set_move_tgt_here(desc, write_move_cmd);
    109	append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
    110	append_move(desc, MOVE_SRC_INFIFO_CL | MOVE_DEST_OUTFIFO |
    111		    MOVE_AUX_LS);
    112
    113	/* Write ICV */
    114	append_seq_store(desc, icvsize, LDST_CLASS_2_CCB |
    115			 LDST_SRCDST_BYTE_CONTEXT);
    116
    117	print_hex_dump_debug("aead null enc shdesc@" __stringify(__LINE__)": ",
    118			     DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc),
    119			     1);
    120}
    121EXPORT_SYMBOL(cnstr_shdsc_aead_null_encap);
    122
    123/**
    124 * cnstr_shdsc_aead_null_decap - IPSec ESP decapsulation shared descriptor
    125 *                               (non-protocol) with no (null) decryption.
    126 * @desc: pointer to buffer used for descriptor construction
    127 * @adata: pointer to authentication transform definitions.
    128 *         A split key is required for SEC Era < 6; the size of the split key
    129 *         is specified in this case. Valid algorithm values - one of
    130 *         OP_ALG_ALGSEL_{MD5, SHA1, SHA224, SHA256, SHA384, SHA512} ANDed
    131 *         with OP_ALG_AAI_HMAC_PRECOMP.
    132 * @icvsize: integrity check value (ICV) size (truncated or full)
    133 * @era: SEC Era
    134 */
    135void cnstr_shdsc_aead_null_decap(u32 * const desc, struct alginfo *adata,
    136				 unsigned int icvsize, int era)
    137{
    138	u32 *key_jump_cmd, *read_move_cmd, *write_move_cmd, *jump_cmd;
    139
    140	init_sh_desc(desc, HDR_SHARE_SERIAL);
    141
    142	/* Skip if already shared */
    143	key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
    144				   JUMP_COND_SHRD);
    145	if (era < 6) {
    146		if (adata->key_inline)
    147			append_key_as_imm(desc, adata->key_virt,
    148					  adata->keylen_pad, adata->keylen,
    149					  CLASS_2 | KEY_DEST_MDHA_SPLIT |
    150					  KEY_ENC);
    151		else
    152			append_key(desc, adata->key_dma, adata->keylen,
    153				   CLASS_2 | KEY_DEST_MDHA_SPLIT | KEY_ENC);
    154	} else {
    155		append_proto_dkp(desc, adata);
    156	}
    157	set_jump_tgt_here(desc, key_jump_cmd);
    158
    159	/* Class 2 operation */
    160	append_operation(desc, adata->algtype | OP_ALG_AS_INITFINAL |
    161			 OP_ALG_DECRYPT | OP_ALG_ICV_ON);
    162
    163	/* assoclen + cryptlen = seqoutlen */
    164	append_math_sub(desc, REG2, SEQOUTLEN, REG0, CAAM_CMD_SZ);
    165
    166	/* Prepare to read and write cryptlen + assoclen bytes */
    167	append_math_add(desc, VARSEQINLEN, ZERO, REG2, CAAM_CMD_SZ);
    168	append_math_add(desc, VARSEQOUTLEN, ZERO, REG2, CAAM_CMD_SZ);
    169
    170	/*
    171	 * MOVE_LEN opcode is not available in all SEC HW revisions,
    172	 * thus need to do some magic, i.e. self-patch the descriptor
    173	 * buffer.
    174	 */
    175	read_move_cmd = append_move(desc, MOVE_SRC_DESCBUF |
    176				    MOVE_DEST_MATH2 |
    177				    (0x6 << MOVE_LEN_SHIFT));
    178	write_move_cmd = append_move(desc, MOVE_SRC_MATH2 |
    179				     MOVE_DEST_DESCBUF |
    180				     MOVE_WAITCOMP |
    181				     (0x8 << MOVE_LEN_SHIFT));
    182
    183	/* Read and write cryptlen bytes */
    184	aead_append_src_dst(desc, FIFOLD_TYPE_MSG | FIFOLD_TYPE_FLUSH1);
    185
    186	/*
    187	 * Insert a NOP here, since we need at least 4 instructions between
    188	 * code patching the descriptor buffer and the location being patched.
    189	 */
    190	jump_cmd = append_jump(desc, JUMP_TEST_ALL);
    191	set_jump_tgt_here(desc, jump_cmd);
    192
    193	set_move_tgt_here(desc, read_move_cmd);
    194	set_move_tgt_here(desc, write_move_cmd);
    195	append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
    196	append_move(desc, MOVE_SRC_INFIFO_CL | MOVE_DEST_OUTFIFO |
    197		    MOVE_AUX_LS);
    198	append_cmd(desc, CMD_LOAD | ENABLE_AUTO_INFO_FIFO);
    199
    200	/* Load ICV */
    201	append_seq_fifo_load(desc, icvsize, FIFOLD_CLASS_CLASS2 |
    202			     FIFOLD_TYPE_LAST2 | FIFOLD_TYPE_ICV);
    203
    204	print_hex_dump_debug("aead null dec shdesc@" __stringify(__LINE__)": ",
    205			     DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc),
    206			     1);
    207}
    208EXPORT_SYMBOL(cnstr_shdsc_aead_null_decap);
    209
    210static void init_sh_desc_key_aead(u32 * const desc,
    211				  struct alginfo * const cdata,
    212				  struct alginfo * const adata,
    213				  const bool is_rfc3686, u32 *nonce, int era)
    214{
    215	u32 *key_jump_cmd;
    216	unsigned int enckeylen = cdata->keylen;
    217
    218	/* Note: Context registers are saved. */
    219	init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
    220
    221	/* Skip if already shared */
    222	key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
    223				   JUMP_COND_SHRD);
    224
    225	/*
    226	 * RFC3686 specific:
    227	 *	| key = {AUTH_KEY, ENC_KEY, NONCE}
    228	 *	| enckeylen = encryption key size + nonce size
    229	 */
    230	if (is_rfc3686)
    231		enckeylen -= CTR_RFC3686_NONCE_SIZE;
    232
    233	if (era < 6) {
    234		if (adata->key_inline)
    235			append_key_as_imm(desc, adata->key_virt,
    236					  adata->keylen_pad, adata->keylen,
    237					  CLASS_2 | KEY_DEST_MDHA_SPLIT |
    238					  KEY_ENC);
    239		else
    240			append_key(desc, adata->key_dma, adata->keylen,
    241				   CLASS_2 | KEY_DEST_MDHA_SPLIT | KEY_ENC);
    242	} else {
    243		append_proto_dkp(desc, adata);
    244	}
    245
    246	if (cdata->key_inline)
    247		append_key_as_imm(desc, cdata->key_virt, enckeylen,
    248				  enckeylen, CLASS_1 | KEY_DEST_CLASS_REG);
    249	else
    250		append_key(desc, cdata->key_dma, enckeylen, CLASS_1 |
    251			   KEY_DEST_CLASS_REG);
    252
    253	/* Load Counter into CONTEXT1 reg */
    254	if (is_rfc3686) {
    255		append_load_as_imm(desc, nonce, CTR_RFC3686_NONCE_SIZE,
    256				   LDST_CLASS_IND_CCB |
    257				   LDST_SRCDST_BYTE_OUTFIFO | LDST_IMM);
    258		append_move(desc,
    259			    MOVE_SRC_OUTFIFO |
    260			    MOVE_DEST_CLASS1CTX |
    261			    (16 << MOVE_OFFSET_SHIFT) |
    262			    (CTR_RFC3686_NONCE_SIZE << MOVE_LEN_SHIFT));
    263	}
    264
    265	set_jump_tgt_here(desc, key_jump_cmd);
    266}
    267
    268/**
    269 * cnstr_shdsc_aead_encap - IPSec ESP encapsulation shared descriptor
    270 *                          (non-protocol).
    271 * @desc: pointer to buffer used for descriptor construction
    272 * @cdata: pointer to block cipher transform definitions
    273 *         Valid algorithm values - one of OP_ALG_ALGSEL_{AES, DES, 3DES} ANDed
    274 *         with OP_ALG_AAI_CBC or OP_ALG_AAI_CTR_MOD128.
    275 * @adata: pointer to authentication transform definitions.
    276 *         A split key is required for SEC Era < 6; the size of the split key
    277 *         is specified in this case. Valid algorithm values - one of
    278 *         OP_ALG_ALGSEL_{MD5, SHA1, SHA224, SHA256, SHA384, SHA512} ANDed
    279 *         with OP_ALG_AAI_HMAC_PRECOMP.
    280 * @ivsize: initialization vector size
    281 * @icvsize: integrity check value (ICV) size (truncated or full)
    282 * @is_rfc3686: true when ctr(aes) is wrapped by rfc3686 template
    283 * @nonce: pointer to rfc3686 nonce
    284 * @ctx1_iv_off: IV offset in CONTEXT1 register
    285 * @is_qi: true when called from caam/qi
    286 * @era: SEC Era
    287 */
    288void cnstr_shdsc_aead_encap(u32 * const desc, struct alginfo *cdata,
    289			    struct alginfo *adata, unsigned int ivsize,
    290			    unsigned int icvsize, const bool is_rfc3686,
    291			    u32 *nonce, const u32 ctx1_iv_off, const bool is_qi,
    292			    int era)
    293{
    294	/* Note: Context registers are saved. */
    295	init_sh_desc_key_aead(desc, cdata, adata, is_rfc3686, nonce, era);
    296
    297	/* Class 2 operation */
    298	append_operation(desc, adata->algtype | OP_ALG_AS_INITFINAL |
    299			 OP_ALG_ENCRYPT);
    300
    301	if (is_qi) {
    302		u32 *wait_load_cmd;
    303
    304		/* REG3 = assoclen */
    305		append_seq_load(desc, 4, LDST_CLASS_DECO |
    306				LDST_SRCDST_WORD_DECO_MATH3 |
    307				(4 << LDST_OFFSET_SHIFT));
    308
    309		wait_load_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
    310					    JUMP_COND_CALM | JUMP_COND_NCP |
    311					    JUMP_COND_NOP | JUMP_COND_NIP |
    312					    JUMP_COND_NIFP);
    313		set_jump_tgt_here(desc, wait_load_cmd);
    314
    315		append_seq_load(desc, ivsize, LDST_CLASS_1_CCB |
    316				LDST_SRCDST_BYTE_CONTEXT |
    317				(ctx1_iv_off << LDST_OFFSET_SHIFT));
    318	}
    319
    320	/* Read and write assoclen bytes */
    321	if (is_qi || era < 3) {
    322		append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
    323		append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
    324	} else {
    325		append_math_add(desc, VARSEQINLEN, ZERO, DPOVRD, CAAM_CMD_SZ);
    326		append_math_add(desc, VARSEQOUTLEN, ZERO, DPOVRD, CAAM_CMD_SZ);
    327	}
    328
    329	/* Skip assoc data */
    330	append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
    331
    332	/* read assoc before reading payload */
    333	append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS2 | FIFOLD_TYPE_MSG |
    334				      FIFOLDST_VLF);
    335
    336	/* Load Counter into CONTEXT1 reg */
    337	if (is_rfc3686)
    338		append_load_imm_be32(desc, 1, LDST_IMM | LDST_CLASS_1_CCB |
    339				     LDST_SRCDST_BYTE_CONTEXT |
    340				     ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) <<
    341				      LDST_OFFSET_SHIFT));
    342
    343	/* Class 1 operation */
    344	append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
    345			 OP_ALG_ENCRYPT);
    346
    347	/* Read and write cryptlen bytes */
    348	append_math_add(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
    349	append_math_add(desc, VARSEQOUTLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
    350	aead_append_src_dst(desc, FIFOLD_TYPE_MSG1OUT2);
    351
    352	/* Write ICV */
    353	append_seq_store(desc, icvsize, LDST_CLASS_2_CCB |
    354			 LDST_SRCDST_BYTE_CONTEXT);
    355
    356	print_hex_dump_debug("aead enc shdesc@" __stringify(__LINE__)": ",
    357			     DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc),
    358			     1);
    359}
    360EXPORT_SYMBOL(cnstr_shdsc_aead_encap);
    361
    362/**
    363 * cnstr_shdsc_aead_decap - IPSec ESP decapsulation shared descriptor
    364 *                          (non-protocol).
    365 * @desc: pointer to buffer used for descriptor construction
    366 * @cdata: pointer to block cipher transform definitions
    367 *         Valid algorithm values - one of OP_ALG_ALGSEL_{AES, DES, 3DES} ANDed
    368 *         with OP_ALG_AAI_CBC or OP_ALG_AAI_CTR_MOD128.
    369 * @adata: pointer to authentication transform definitions.
    370 *         A split key is required for SEC Era < 6; the size of the split key
    371 *         is specified in this case. Valid algorithm values - one of
    372 *         OP_ALG_ALGSEL_{MD5, SHA1, SHA224, SHA256, SHA384, SHA512} ANDed
    373 *         with OP_ALG_AAI_HMAC_PRECOMP.
    374 * @ivsize: initialization vector size
    375 * @icvsize: integrity check value (ICV) size (truncated or full)
    376 * @geniv: whether to generate Encrypted Chain IV
    377 * @is_rfc3686: true when ctr(aes) is wrapped by rfc3686 template
    378 * @nonce: pointer to rfc3686 nonce
    379 * @ctx1_iv_off: IV offset in CONTEXT1 register
    380 * @is_qi: true when called from caam/qi
    381 * @era: SEC Era
    382 */
    383void cnstr_shdsc_aead_decap(u32 * const desc, struct alginfo *cdata,
    384			    struct alginfo *adata, unsigned int ivsize,
    385			    unsigned int icvsize, const bool geniv,
    386			    const bool is_rfc3686, u32 *nonce,
    387			    const u32 ctx1_iv_off, const bool is_qi, int era)
    388{
    389	/* Note: Context registers are saved. */
    390	init_sh_desc_key_aead(desc, cdata, adata, is_rfc3686, nonce, era);
    391
    392	/* Class 2 operation */
    393	append_operation(desc, adata->algtype | OP_ALG_AS_INITFINAL |
    394			 OP_ALG_DECRYPT | OP_ALG_ICV_ON);
    395
    396	if (is_qi) {
    397		u32 *wait_load_cmd;
    398
    399		/* REG3 = assoclen */
    400		append_seq_load(desc, 4, LDST_CLASS_DECO |
    401				LDST_SRCDST_WORD_DECO_MATH3 |
    402				(4 << LDST_OFFSET_SHIFT));
    403
    404		wait_load_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
    405					    JUMP_COND_CALM | JUMP_COND_NCP |
    406					    JUMP_COND_NOP | JUMP_COND_NIP |
    407					    JUMP_COND_NIFP);
    408		set_jump_tgt_here(desc, wait_load_cmd);
    409
    410		if (!geniv)
    411			append_seq_load(desc, ivsize, LDST_CLASS_1_CCB |
    412					LDST_SRCDST_BYTE_CONTEXT |
    413					(ctx1_iv_off << LDST_OFFSET_SHIFT));
    414	}
    415
    416	/* Read and write assoclen bytes */
    417	if (is_qi || era < 3) {
    418		append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
    419		if (geniv)
    420			append_math_add_imm_u32(desc, VARSEQOUTLEN, REG3, IMM,
    421						ivsize);
    422		else
    423			append_math_add(desc, VARSEQOUTLEN, ZERO, REG3,
    424					CAAM_CMD_SZ);
    425	} else {
    426		append_math_add(desc, VARSEQINLEN, ZERO, DPOVRD, CAAM_CMD_SZ);
    427		if (geniv)
    428			append_math_add_imm_u32(desc, VARSEQOUTLEN, DPOVRD, IMM,
    429						ivsize);
    430		else
    431			append_math_add(desc, VARSEQOUTLEN, ZERO, DPOVRD,
    432					CAAM_CMD_SZ);
    433	}
    434
    435	/* Skip assoc data */
    436	append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
    437
    438	/* read assoc before reading payload */
    439	append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS2 | FIFOLD_TYPE_MSG |
    440			     KEY_VLF);
    441
    442	if (geniv) {
    443		append_seq_load(desc, ivsize, LDST_CLASS_1_CCB |
    444				LDST_SRCDST_BYTE_CONTEXT |
    445				(ctx1_iv_off << LDST_OFFSET_SHIFT));
    446		append_move(desc, MOVE_SRC_CLASS1CTX | MOVE_DEST_CLASS2INFIFO |
    447			    (ctx1_iv_off << MOVE_OFFSET_SHIFT) | ivsize);
    448	}
    449
    450	/* Load Counter into CONTEXT1 reg */
    451	if (is_rfc3686)
    452		append_load_imm_be32(desc, 1, LDST_IMM | LDST_CLASS_1_CCB |
    453				     LDST_SRCDST_BYTE_CONTEXT |
    454				     ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) <<
    455				      LDST_OFFSET_SHIFT));
    456
    457	/* Choose operation */
    458	if (ctx1_iv_off)
    459		append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
    460				 OP_ALG_DECRYPT);
    461	else
    462		append_dec_op1(desc, cdata->algtype);
    463
    464	/* Read and write cryptlen bytes */
    465	append_math_add(desc, VARSEQINLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
    466	append_math_add(desc, VARSEQOUTLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
    467	aead_append_src_dst(desc, FIFOLD_TYPE_MSG);
    468
    469	/* Load ICV */
    470	append_seq_fifo_load(desc, icvsize, FIFOLD_CLASS_CLASS2 |
    471			     FIFOLD_TYPE_LAST2 | FIFOLD_TYPE_ICV);
    472
    473	print_hex_dump_debug("aead dec shdesc@" __stringify(__LINE__)": ",
    474			     DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc),
    475			     1);
    476}
    477EXPORT_SYMBOL(cnstr_shdsc_aead_decap);
    478
    479/**
    480 * cnstr_shdsc_aead_givencap - IPSec ESP encapsulation shared descriptor
    481 *                             (non-protocol) with HW-generated initialization
    482 *                             vector.
    483 * @desc: pointer to buffer used for descriptor construction
    484 * @cdata: pointer to block cipher transform definitions
    485 *         Valid algorithm values - one of OP_ALG_ALGSEL_{AES, DES, 3DES} ANDed
    486 *         with OP_ALG_AAI_CBC or OP_ALG_AAI_CTR_MOD128.
    487 * @adata: pointer to authentication transform definitions.
    488 *         A split key is required for SEC Era < 6; the size of the split key
    489 *         is specified in this case. Valid algorithm values - one of
    490 *         OP_ALG_ALGSEL_{MD5, SHA1, SHA224, SHA256, SHA384, SHA512} ANDed
    491 *         with OP_ALG_AAI_HMAC_PRECOMP.
    492 * @ivsize: initialization vector size
    493 * @icvsize: integrity check value (ICV) size (truncated or full)
    494 * @is_rfc3686: true when ctr(aes) is wrapped by rfc3686 template
    495 * @nonce: pointer to rfc3686 nonce
    496 * @ctx1_iv_off: IV offset in CONTEXT1 register
    497 * @is_qi: true when called from caam/qi
    498 * @era: SEC Era
    499 */
    500void cnstr_shdsc_aead_givencap(u32 * const desc, struct alginfo *cdata,
    501			       struct alginfo *adata, unsigned int ivsize,
    502			       unsigned int icvsize, const bool is_rfc3686,
    503			       u32 *nonce, const u32 ctx1_iv_off,
    504			       const bool is_qi, int era)
    505{
    506	u32 geniv, moveiv;
    507	u32 *wait_cmd;
    508
    509	/* Note: Context registers are saved. */
    510	init_sh_desc_key_aead(desc, cdata, adata, is_rfc3686, nonce, era);
    511
    512	if (is_qi) {
    513		u32 *wait_load_cmd;
    514
    515		/* REG3 = assoclen */
    516		append_seq_load(desc, 4, LDST_CLASS_DECO |
    517				LDST_SRCDST_WORD_DECO_MATH3 |
    518				(4 << LDST_OFFSET_SHIFT));
    519
    520		wait_load_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
    521					    JUMP_COND_CALM | JUMP_COND_NCP |
    522					    JUMP_COND_NOP | JUMP_COND_NIP |
    523					    JUMP_COND_NIFP);
    524		set_jump_tgt_here(desc, wait_load_cmd);
    525	}
    526
    527	if (is_rfc3686) {
    528		if (is_qi)
    529			append_seq_load(desc, ivsize, LDST_CLASS_1_CCB |
    530					LDST_SRCDST_BYTE_CONTEXT |
    531					(ctx1_iv_off << LDST_OFFSET_SHIFT));
    532
    533		goto copy_iv;
    534	}
    535
    536	/* Generate IV */
    537	geniv = NFIFOENTRY_STYPE_PAD | NFIFOENTRY_DEST_DECO |
    538		NFIFOENTRY_DTYPE_MSG | NFIFOENTRY_LC1 |
    539		NFIFOENTRY_PTYPE_RND | (ivsize << NFIFOENTRY_DLEN_SHIFT);
    540	append_load_imm_u32(desc, geniv, LDST_CLASS_IND_CCB |
    541			    LDST_SRCDST_WORD_INFO_FIFO | LDST_IMM);
    542	append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
    543	append_move(desc, MOVE_WAITCOMP |
    544		    MOVE_SRC_INFIFO | MOVE_DEST_CLASS1CTX |
    545		    (ctx1_iv_off << MOVE_OFFSET_SHIFT) |
    546		    (ivsize << MOVE_LEN_SHIFT));
    547	append_cmd(desc, CMD_LOAD | ENABLE_AUTO_INFO_FIFO);
    548
    549copy_iv:
    550	/* Copy IV to class 1 context */
    551	append_move(desc, MOVE_SRC_CLASS1CTX | MOVE_DEST_OUTFIFO |
    552		    (ctx1_iv_off << MOVE_OFFSET_SHIFT) |
    553		    (ivsize << MOVE_LEN_SHIFT));
    554
    555	/* Return to encryption */
    556	append_operation(desc, adata->algtype | OP_ALG_AS_INITFINAL |
    557			 OP_ALG_ENCRYPT);
    558
    559	/* Read and write assoclen bytes */
    560	if (is_qi || era < 3) {
    561		append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
    562		append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
    563	} else {
    564		append_math_add(desc, VARSEQINLEN, ZERO, DPOVRD, CAAM_CMD_SZ);
    565		append_math_add(desc, VARSEQOUTLEN, ZERO, DPOVRD, CAAM_CMD_SZ);
    566	}
    567
    568	/* Skip assoc data */
    569	append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
    570
    571	/* read assoc before reading payload */
    572	append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS2 | FIFOLD_TYPE_MSG |
    573			     KEY_VLF);
    574
    575	/* Copy iv from outfifo to class 2 fifo */
    576	moveiv = NFIFOENTRY_STYPE_OFIFO | NFIFOENTRY_DEST_CLASS2 |
    577		 NFIFOENTRY_DTYPE_MSG | (ivsize << NFIFOENTRY_DLEN_SHIFT);
    578	append_load_imm_u32(desc, moveiv, LDST_CLASS_IND_CCB |
    579			    LDST_SRCDST_WORD_INFO_FIFO | LDST_IMM);
    580	append_load_imm_u32(desc, ivsize, LDST_CLASS_2_CCB |
    581			    LDST_SRCDST_WORD_DATASZ_REG | LDST_IMM);
    582
    583	/* Load Counter into CONTEXT1 reg */
    584	if (is_rfc3686)
    585		append_load_imm_be32(desc, 1, LDST_IMM | LDST_CLASS_1_CCB |
    586				     LDST_SRCDST_BYTE_CONTEXT |
    587				     ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) <<
    588				      LDST_OFFSET_SHIFT));
    589
    590	/* Class 1 operation */
    591	append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
    592			 OP_ALG_ENCRYPT);
    593
    594	/* Will write ivsize + cryptlen */
    595	append_math_add(desc, VARSEQOUTLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
    596
    597	/* Not need to reload iv */
    598	append_seq_fifo_load(desc, ivsize,
    599			     FIFOLD_CLASS_SKIP);
    600
    601	/* Will read cryptlen */
    602	append_math_add(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
    603
    604	/*
    605	 * Wait for IV transfer (ofifo -> class2) to finish before starting
    606	 * ciphertext transfer (ofifo -> external memory).
    607	 */
    608	wait_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL | JUMP_COND_NIFP);
    609	set_jump_tgt_here(desc, wait_cmd);
    610
    611	append_seq_fifo_load(desc, 0, FIFOLD_CLASS_BOTH | KEY_VLF |
    612			     FIFOLD_TYPE_MSG1OUT2 | FIFOLD_TYPE_LASTBOTH);
    613	append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | KEY_VLF);
    614
    615	/* Write ICV */
    616	append_seq_store(desc, icvsize, LDST_CLASS_2_CCB |
    617			 LDST_SRCDST_BYTE_CONTEXT);
    618
    619	print_hex_dump_debug("aead givenc shdesc@" __stringify(__LINE__)": ",
    620			     DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc),
    621			     1);
    622}
    623EXPORT_SYMBOL(cnstr_shdsc_aead_givencap);
    624
    625/**
    626 * cnstr_shdsc_gcm_encap - gcm encapsulation shared descriptor
    627 * @desc: pointer to buffer used for descriptor construction
    628 * @cdata: pointer to block cipher transform definitions
    629 *         Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_GCM.
    630 * @ivsize: initialization vector size
    631 * @icvsize: integrity check value (ICV) size (truncated or full)
    632 * @is_qi: true when called from caam/qi
    633 */
    634void cnstr_shdsc_gcm_encap(u32 * const desc, struct alginfo *cdata,
    635			   unsigned int ivsize, unsigned int icvsize,
    636			   const bool is_qi)
    637{
    638	u32 *key_jump_cmd, *zero_payload_jump_cmd, *zero_assoc_jump_cmd1,
    639	    *zero_assoc_jump_cmd2;
    640
    641	init_sh_desc(desc, HDR_SHARE_SERIAL);
    642
    643	/* skip key loading if they are loaded due to sharing */
    644	key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
    645				   JUMP_COND_SHRD);
    646	if (cdata->key_inline)
    647		append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
    648				  cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
    649	else
    650		append_key(desc, cdata->key_dma, cdata->keylen, CLASS_1 |
    651			   KEY_DEST_CLASS_REG);
    652	set_jump_tgt_here(desc, key_jump_cmd);
    653
    654	/* class 1 operation */
    655	append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
    656			 OP_ALG_ENCRYPT);
    657
    658	if (is_qi) {
    659		u32 *wait_load_cmd;
    660
    661		/* REG3 = assoclen */
    662		append_seq_load(desc, 4, LDST_CLASS_DECO |
    663				LDST_SRCDST_WORD_DECO_MATH3 |
    664				(4 << LDST_OFFSET_SHIFT));
    665
    666		wait_load_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
    667					    JUMP_COND_CALM | JUMP_COND_NCP |
    668					    JUMP_COND_NOP | JUMP_COND_NIP |
    669					    JUMP_COND_NIFP);
    670		set_jump_tgt_here(desc, wait_load_cmd);
    671
    672		append_math_sub_imm_u32(desc, VARSEQOUTLEN, SEQINLEN, IMM,
    673					ivsize);
    674	} else {
    675		append_math_sub(desc, VARSEQOUTLEN, SEQINLEN, REG0,
    676				CAAM_CMD_SZ);
    677	}
    678
    679	/* if assoclen + cryptlen is ZERO, skip to ICV write */
    680	zero_assoc_jump_cmd2 = append_jump(desc, JUMP_TEST_ALL |
    681						 JUMP_COND_MATH_Z);
    682
    683	if (is_qi)
    684		append_seq_fifo_load(desc, ivsize, FIFOLD_CLASS_CLASS1 |
    685				     FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1);
    686
    687	/* if assoclen is ZERO, skip reading the assoc data */
    688	append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
    689	zero_assoc_jump_cmd1 = append_jump(desc, JUMP_TEST_ALL |
    690					   JUMP_COND_MATH_Z);
    691
    692	append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
    693
    694	/* skip assoc data */
    695	append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
    696
    697	/* cryptlen = seqinlen - assoclen */
    698	append_math_sub(desc, VARSEQOUTLEN, SEQINLEN, REG3, CAAM_CMD_SZ);
    699
    700	/* if cryptlen is ZERO jump to zero-payload commands */
    701	zero_payload_jump_cmd = append_jump(desc, JUMP_TEST_ALL |
    702					    JUMP_COND_MATH_Z);
    703
    704	/* read assoc data */
    705	append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
    706			     FIFOLD_TYPE_AAD | FIFOLD_TYPE_FLUSH1);
    707	set_jump_tgt_here(desc, zero_assoc_jump_cmd1);
    708
    709	append_math_sub(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
    710
    711	/* write encrypted data */
    712	append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | FIFOLDST_VLF);
    713
    714	/* read payload data */
    715	append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
    716			     FIFOLD_TYPE_MSG | FIFOLD_TYPE_LAST1);
    717
    718	/* jump to ICV writing */
    719	if (is_qi)
    720		append_jump(desc, JUMP_TEST_ALL | 4);
    721	else
    722		append_jump(desc, JUMP_TEST_ALL | 2);
    723
    724	/* zero-payload commands */
    725	set_jump_tgt_here(desc, zero_payload_jump_cmd);
    726
    727	/* read assoc data */
    728	append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
    729			     FIFOLD_TYPE_AAD | FIFOLD_TYPE_LAST1);
    730	if (is_qi)
    731		/* jump to ICV writing */
    732		append_jump(desc, JUMP_TEST_ALL | 2);
    733
    734	/* There is no input data */
    735	set_jump_tgt_here(desc, zero_assoc_jump_cmd2);
    736
    737	if (is_qi)
    738		append_seq_fifo_load(desc, ivsize, FIFOLD_CLASS_CLASS1 |
    739				     FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1 |
    740				     FIFOLD_TYPE_LAST1);
    741
    742	/* write ICV */
    743	append_seq_store(desc, icvsize, LDST_CLASS_1_CCB |
    744			 LDST_SRCDST_BYTE_CONTEXT);
    745
    746	print_hex_dump_debug("gcm enc shdesc@" __stringify(__LINE__)": ",
    747			     DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc),
    748			     1);
    749}
    750EXPORT_SYMBOL(cnstr_shdsc_gcm_encap);
    751
    752/**
    753 * cnstr_shdsc_gcm_decap - gcm decapsulation shared descriptor
    754 * @desc: pointer to buffer used for descriptor construction
    755 * @cdata: pointer to block cipher transform definitions
    756 *         Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_GCM.
    757 * @ivsize: initialization vector size
    758 * @icvsize: integrity check value (ICV) size (truncated or full)
    759 * @is_qi: true when called from caam/qi
    760 */
    761void cnstr_shdsc_gcm_decap(u32 * const desc, struct alginfo *cdata,
    762			   unsigned int ivsize, unsigned int icvsize,
    763			   const bool is_qi)
    764{
    765	u32 *key_jump_cmd, *zero_payload_jump_cmd, *zero_assoc_jump_cmd1;
    766
    767	init_sh_desc(desc, HDR_SHARE_SERIAL);
    768
    769	/* skip key loading if they are loaded due to sharing */
    770	key_jump_cmd = append_jump(desc, JUMP_JSL |
    771				   JUMP_TEST_ALL | JUMP_COND_SHRD);
    772	if (cdata->key_inline)
    773		append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
    774				  cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
    775	else
    776		append_key(desc, cdata->key_dma, cdata->keylen, CLASS_1 |
    777			   KEY_DEST_CLASS_REG);
    778	set_jump_tgt_here(desc, key_jump_cmd);
    779
    780	/* class 1 operation */
    781	append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
    782			 OP_ALG_DECRYPT | OP_ALG_ICV_ON);
    783
    784	if (is_qi) {
    785		u32 *wait_load_cmd;
    786
    787		/* REG3 = assoclen */
    788		append_seq_load(desc, 4, LDST_CLASS_DECO |
    789				LDST_SRCDST_WORD_DECO_MATH3 |
    790				(4 << LDST_OFFSET_SHIFT));
    791
    792		wait_load_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
    793					    JUMP_COND_CALM | JUMP_COND_NCP |
    794					    JUMP_COND_NOP | JUMP_COND_NIP |
    795					    JUMP_COND_NIFP);
    796		set_jump_tgt_here(desc, wait_load_cmd);
    797
    798		append_seq_fifo_load(desc, ivsize, FIFOLD_CLASS_CLASS1 |
    799				     FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1);
    800	}
    801
    802	/* if assoclen is ZERO, skip reading the assoc data */
    803	append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
    804	zero_assoc_jump_cmd1 = append_jump(desc, JUMP_TEST_ALL |
    805						 JUMP_COND_MATH_Z);
    806
    807	append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
    808
    809	/* skip assoc data */
    810	append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
    811
    812	/* read assoc data */
    813	append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
    814			     FIFOLD_TYPE_AAD | FIFOLD_TYPE_FLUSH1);
    815
    816	set_jump_tgt_here(desc, zero_assoc_jump_cmd1);
    817
    818	/* cryptlen = seqoutlen - assoclen */
    819	append_math_sub(desc, VARSEQINLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
    820
    821	/* jump to zero-payload command if cryptlen is zero */
    822	zero_payload_jump_cmd = append_jump(desc, JUMP_TEST_ALL |
    823					    JUMP_COND_MATH_Z);
    824
    825	append_math_sub(desc, VARSEQOUTLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
    826
    827	/* store encrypted data */
    828	append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | FIFOLDST_VLF);
    829
    830	/* read payload data */
    831	append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
    832			     FIFOLD_TYPE_MSG | FIFOLD_TYPE_FLUSH1);
    833
    834	/* zero-payload command */
    835	set_jump_tgt_here(desc, zero_payload_jump_cmd);
    836
    837	/* read ICV */
    838	append_seq_fifo_load(desc, icvsize, FIFOLD_CLASS_CLASS1 |
    839			     FIFOLD_TYPE_ICV | FIFOLD_TYPE_LAST1);
    840
    841	print_hex_dump_debug("gcm dec shdesc@" __stringify(__LINE__)": ",
    842			     DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc),
    843			     1);
    844}
    845EXPORT_SYMBOL(cnstr_shdsc_gcm_decap);
    846
    847/**
    848 * cnstr_shdsc_rfc4106_encap - IPSec ESP gcm encapsulation shared descriptor
    849 *                             (non-protocol).
    850 * @desc: pointer to buffer used for descriptor construction
    851 * @cdata: pointer to block cipher transform definitions
    852 *         Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_GCM.
    853 * @ivsize: initialization vector size
    854 * @icvsize: integrity check value (ICV) size (truncated or full)
    855 * @is_qi: true when called from caam/qi
    856 *
    857 * Input sequence: AAD | PTXT
    858 * Output sequence: AAD | CTXT | ICV
    859 * AAD length (assoclen), which includes the IV length, is available in Math3.
    860 */
    861void cnstr_shdsc_rfc4106_encap(u32 * const desc, struct alginfo *cdata,
    862			       unsigned int ivsize, unsigned int icvsize,
    863			       const bool is_qi)
    864{
    865	u32 *key_jump_cmd, *zero_cryptlen_jump_cmd, *skip_instructions;
    866	init_sh_desc(desc, HDR_SHARE_SERIAL);
    867
    868	/* Skip key loading if it is loaded due to sharing */
    869	key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
    870				   JUMP_COND_SHRD);
    871	if (cdata->key_inline)
    872		append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
    873				  cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
    874	else
    875		append_key(desc, cdata->key_dma, cdata->keylen, CLASS_1 |
    876			   KEY_DEST_CLASS_REG);
    877	set_jump_tgt_here(desc, key_jump_cmd);
    878
    879	/* Class 1 operation */
    880	append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
    881			 OP_ALG_ENCRYPT);
    882
    883	if (is_qi) {
    884		u32 *wait_load_cmd;
    885
    886		/* REG3 = assoclen */
    887		append_seq_load(desc, 4, LDST_CLASS_DECO |
    888				LDST_SRCDST_WORD_DECO_MATH3 |
    889				(4 << LDST_OFFSET_SHIFT));
    890
    891		wait_load_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
    892					    JUMP_COND_CALM | JUMP_COND_NCP |
    893					    JUMP_COND_NOP | JUMP_COND_NIP |
    894					    JUMP_COND_NIFP);
    895		set_jump_tgt_here(desc, wait_load_cmd);
    896
    897		/* Read salt and IV */
    898		append_fifo_load_as_imm(desc, (void *)(cdata->key_virt +
    899					cdata->keylen), 4, FIFOLD_CLASS_CLASS1 |
    900					FIFOLD_TYPE_IV);
    901		append_seq_fifo_load(desc, ivsize, FIFOLD_CLASS_CLASS1 |
    902				     FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1);
    903	}
    904
    905	append_math_sub_imm_u32(desc, VARSEQINLEN, REG3, IMM, ivsize);
    906	append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
    907
    908	/* Skip AAD */
    909	append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
    910
    911	/* Read cryptlen and set this value into VARSEQOUTLEN */
    912	append_math_sub(desc, VARSEQOUTLEN, SEQINLEN, REG3, CAAM_CMD_SZ);
    913
    914	/* If cryptlen is ZERO jump to AAD command */
    915	zero_cryptlen_jump_cmd = append_jump(desc, JUMP_TEST_ALL |
    916					    JUMP_COND_MATH_Z);
    917
    918	/* Read AAD data */
    919	append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
    920			     FIFOLD_TYPE_AAD | FIFOLD_TYPE_FLUSH1);
    921
    922	/* Workaround for erratum A-005473 (simultaneous SEQ FIFO skips) */
    923	append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA);
    924
    925	/* Skip IV */
    926	append_seq_fifo_load(desc, ivsize, FIFOLD_CLASS_SKIP);
    927	append_math_add(desc, VARSEQINLEN, VARSEQOUTLEN, REG0, CAAM_CMD_SZ);
    928
    929	/* Write encrypted data */
    930	append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | FIFOLDST_VLF);
    931
    932	/* Read payload data */
    933	append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
    934			     FIFOLD_TYPE_MSG | FIFOLD_TYPE_LAST1);
    935
    936	/* Jump instructions to avoid double reading of AAD */
    937	skip_instructions = append_jump(desc, JUMP_TEST_ALL);
    938
    939	/* There is no input data, cryptlen = 0 */
    940	set_jump_tgt_here(desc, zero_cryptlen_jump_cmd);
    941
    942	/* Read AAD */
    943	append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
    944			     FIFOLD_TYPE_AAD | FIFOLD_TYPE_LAST1);
    945
    946	set_jump_tgt_here(desc, skip_instructions);
    947
    948	/* Write ICV */
    949	append_seq_store(desc, icvsize, LDST_CLASS_1_CCB |
    950			 LDST_SRCDST_BYTE_CONTEXT);
    951
    952	print_hex_dump_debug("rfc4106 enc shdesc@" __stringify(__LINE__)": ",
    953			     DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc),
    954			     1);
    955}
    956EXPORT_SYMBOL(cnstr_shdsc_rfc4106_encap);
    957
    958/**
    959 * cnstr_shdsc_rfc4106_decap - IPSec ESP gcm decapsulation shared descriptor
    960 *                             (non-protocol).
    961 * @desc: pointer to buffer used for descriptor construction
    962 * @cdata: pointer to block cipher transform definitions
    963 *         Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_GCM.
    964 * @ivsize: initialization vector size
    965 * @icvsize: integrity check value (ICV) size (truncated or full)
    966 * @is_qi: true when called from caam/qi
    967 */
    968void cnstr_shdsc_rfc4106_decap(u32 * const desc, struct alginfo *cdata,
    969			       unsigned int ivsize, unsigned int icvsize,
    970			       const bool is_qi)
    971{
    972	u32 *key_jump_cmd;
    973
    974	init_sh_desc(desc, HDR_SHARE_SERIAL);
    975
    976	/* Skip key loading if it is loaded due to sharing */
    977	key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
    978				   JUMP_COND_SHRD);
    979	if (cdata->key_inline)
    980		append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
    981				  cdata->keylen, CLASS_1 |
    982				  KEY_DEST_CLASS_REG);
    983	else
    984		append_key(desc, cdata->key_dma, cdata->keylen, CLASS_1 |
    985			   KEY_DEST_CLASS_REG);
    986	set_jump_tgt_here(desc, key_jump_cmd);
    987
    988	/* Class 1 operation */
    989	append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
    990			 OP_ALG_DECRYPT | OP_ALG_ICV_ON);
    991
    992	if (is_qi) {
    993		u32 *wait_load_cmd;
    994
    995		/* REG3 = assoclen */
    996		append_seq_load(desc, 4, LDST_CLASS_DECO |
    997				LDST_SRCDST_WORD_DECO_MATH3 |
    998				(4 << LDST_OFFSET_SHIFT));
    999
   1000		wait_load_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
   1001					    JUMP_COND_CALM | JUMP_COND_NCP |
   1002					    JUMP_COND_NOP | JUMP_COND_NIP |
   1003					    JUMP_COND_NIFP);
   1004		set_jump_tgt_here(desc, wait_load_cmd);
   1005
   1006		/* Read salt and IV */
   1007		append_fifo_load_as_imm(desc, (void *)(cdata->key_virt +
   1008					cdata->keylen), 4, FIFOLD_CLASS_CLASS1 |
   1009					FIFOLD_TYPE_IV);
   1010		append_seq_fifo_load(desc, ivsize, FIFOLD_CLASS_CLASS1 |
   1011				     FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1);
   1012	}
   1013
   1014	append_math_sub_imm_u32(desc, VARSEQINLEN, REG3, IMM, ivsize);
   1015	append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
   1016
   1017	/* Read assoc data */
   1018	append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
   1019			     FIFOLD_TYPE_AAD | FIFOLD_TYPE_FLUSH1);
   1020
   1021	/* Skip IV */
   1022	append_seq_fifo_load(desc, ivsize, FIFOLD_CLASS_SKIP);
   1023
   1024	/* Will read cryptlen bytes */
   1025	append_math_sub(desc, VARSEQINLEN, SEQOUTLEN, REG3, CAAM_CMD_SZ);
   1026
   1027	/* Workaround for erratum A-005473 (simultaneous SEQ FIFO skips) */
   1028	append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLD_TYPE_MSG);
   1029
   1030	/* Skip assoc data */
   1031	append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
   1032
   1033	/* Will write cryptlen bytes */
   1034	append_math_sub(desc, VARSEQOUTLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
   1035
   1036	/* Store payload data */
   1037	append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | FIFOLDST_VLF);
   1038
   1039	/* Read encrypted data */
   1040	append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
   1041			     FIFOLD_TYPE_MSG | FIFOLD_TYPE_FLUSH1);
   1042
   1043	/* Read ICV */
   1044	append_seq_fifo_load(desc, icvsize, FIFOLD_CLASS_CLASS1 |
   1045			     FIFOLD_TYPE_ICV | FIFOLD_TYPE_LAST1);
   1046
   1047	print_hex_dump_debug("rfc4106 dec shdesc@" __stringify(__LINE__)": ",
   1048			     DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc),
   1049			     1);
   1050}
   1051EXPORT_SYMBOL(cnstr_shdsc_rfc4106_decap);
   1052
   1053/**
   1054 * cnstr_shdsc_rfc4543_encap - IPSec ESP gmac encapsulation shared descriptor
   1055 *                             (non-protocol).
   1056 * @desc: pointer to buffer used for descriptor construction
   1057 * @cdata: pointer to block cipher transform definitions
   1058 *         Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_GCM.
   1059 * @ivsize: initialization vector size
   1060 * @icvsize: integrity check value (ICV) size (truncated or full)
   1061 * @is_qi: true when called from caam/qi
   1062 */
   1063void cnstr_shdsc_rfc4543_encap(u32 * const desc, struct alginfo *cdata,
   1064			       unsigned int ivsize, unsigned int icvsize,
   1065			       const bool is_qi)
   1066{
   1067	u32 *key_jump_cmd, *read_move_cmd, *write_move_cmd;
   1068
   1069	init_sh_desc(desc, HDR_SHARE_SERIAL);
   1070
   1071	/* Skip key loading if it is loaded due to sharing */
   1072	key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
   1073				   JUMP_COND_SHRD);
   1074	if (cdata->key_inline)
   1075		append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
   1076				  cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
   1077	else
   1078		append_key(desc, cdata->key_dma, cdata->keylen, CLASS_1 |
   1079			   KEY_DEST_CLASS_REG);
   1080	set_jump_tgt_here(desc, key_jump_cmd);
   1081
   1082	/* Class 1 operation */
   1083	append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
   1084			 OP_ALG_ENCRYPT);
   1085
   1086	if (is_qi) {
   1087		/* assoclen is not needed, skip it */
   1088		append_seq_fifo_load(desc, 4, FIFOLD_CLASS_SKIP);
   1089
   1090		/* Read salt and IV */
   1091		append_fifo_load_as_imm(desc, (void *)(cdata->key_virt +
   1092					cdata->keylen), 4, FIFOLD_CLASS_CLASS1 |
   1093					FIFOLD_TYPE_IV);
   1094		append_seq_fifo_load(desc, ivsize, FIFOLD_CLASS_CLASS1 |
   1095				     FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1);
   1096	}
   1097
   1098	/* assoclen + cryptlen = seqinlen */
   1099	append_math_sub(desc, REG3, SEQINLEN, REG0, CAAM_CMD_SZ);
   1100
   1101	/*
   1102	 * MOVE_LEN opcode is not available in all SEC HW revisions,
   1103	 * thus need to do some magic, i.e. self-patch the descriptor
   1104	 * buffer.
   1105	 */
   1106	read_move_cmd = append_move(desc, MOVE_SRC_DESCBUF | MOVE_DEST_MATH3 |
   1107				    (0x6 << MOVE_LEN_SHIFT));
   1108	write_move_cmd = append_move(desc, MOVE_SRC_MATH3 | MOVE_DEST_DESCBUF |
   1109				     (0x8 << MOVE_LEN_SHIFT) | MOVE_WAITCOMP);
   1110
   1111	/* Will read assoclen + cryptlen bytes */
   1112	append_math_sub(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
   1113
   1114	/* Will write assoclen + cryptlen bytes */
   1115	append_math_sub(desc, VARSEQOUTLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
   1116
   1117	/* Read and write assoclen + cryptlen bytes */
   1118	aead_append_src_dst(desc, FIFOLD_TYPE_AAD);
   1119
   1120	set_move_tgt_here(desc, read_move_cmd);
   1121	set_move_tgt_here(desc, write_move_cmd);
   1122	append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
   1123	/* Move payload data to OFIFO */
   1124	append_move(desc, MOVE_SRC_INFIFO_CL | MOVE_DEST_OUTFIFO);
   1125
   1126	/* Write ICV */
   1127	append_seq_store(desc, icvsize, LDST_CLASS_1_CCB |
   1128			 LDST_SRCDST_BYTE_CONTEXT);
   1129
   1130	print_hex_dump_debug("rfc4543 enc shdesc@" __stringify(__LINE__)": ",
   1131			     DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc),
   1132			     1);
   1133}
   1134EXPORT_SYMBOL(cnstr_shdsc_rfc4543_encap);
   1135
   1136/**
   1137 * cnstr_shdsc_rfc4543_decap - IPSec ESP gmac decapsulation shared descriptor
   1138 *                             (non-protocol).
   1139 * @desc: pointer to buffer used for descriptor construction
   1140 * @cdata: pointer to block cipher transform definitions
   1141 *         Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_GCM.
   1142 * @ivsize: initialization vector size
   1143 * @icvsize: integrity check value (ICV) size (truncated or full)
   1144 * @is_qi: true when called from caam/qi
   1145 */
   1146void cnstr_shdsc_rfc4543_decap(u32 * const desc, struct alginfo *cdata,
   1147			       unsigned int ivsize, unsigned int icvsize,
   1148			       const bool is_qi)
   1149{
   1150	u32 *key_jump_cmd, *read_move_cmd, *write_move_cmd;
   1151
   1152	init_sh_desc(desc, HDR_SHARE_SERIAL);
   1153
   1154	/* Skip key loading if it is loaded due to sharing */
   1155	key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
   1156				   JUMP_COND_SHRD);
   1157	if (cdata->key_inline)
   1158		append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
   1159				  cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
   1160	else
   1161		append_key(desc, cdata->key_dma, cdata->keylen, CLASS_1 |
   1162			   KEY_DEST_CLASS_REG);
   1163	set_jump_tgt_here(desc, key_jump_cmd);
   1164
   1165	/* Class 1 operation */
   1166	append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
   1167			 OP_ALG_DECRYPT | OP_ALG_ICV_ON);
   1168
   1169	if (is_qi) {
   1170		/* assoclen is not needed, skip it */
   1171		append_seq_fifo_load(desc, 4, FIFOLD_CLASS_SKIP);
   1172
   1173		/* Read salt and IV */
   1174		append_fifo_load_as_imm(desc, (void *)(cdata->key_virt +
   1175					cdata->keylen), 4, FIFOLD_CLASS_CLASS1 |
   1176					FIFOLD_TYPE_IV);
   1177		append_seq_fifo_load(desc, ivsize, FIFOLD_CLASS_CLASS1 |
   1178				     FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1);
   1179	}
   1180
   1181	/* assoclen + cryptlen = seqoutlen */
   1182	append_math_sub(desc, REG3, SEQOUTLEN, REG0, CAAM_CMD_SZ);
   1183
   1184	/*
   1185	 * MOVE_LEN opcode is not available in all SEC HW revisions,
   1186	 * thus need to do some magic, i.e. self-patch the descriptor
   1187	 * buffer.
   1188	 */
   1189	read_move_cmd = append_move(desc, MOVE_SRC_DESCBUF | MOVE_DEST_MATH3 |
   1190				    (0x6 << MOVE_LEN_SHIFT));
   1191	write_move_cmd = append_move(desc, MOVE_SRC_MATH3 | MOVE_DEST_DESCBUF |
   1192				     (0x8 << MOVE_LEN_SHIFT) | MOVE_WAITCOMP);
   1193
   1194	/* Will read assoclen + cryptlen bytes */
   1195	append_math_sub(desc, VARSEQINLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
   1196
   1197	/* Will write assoclen + cryptlen bytes */
   1198	append_math_sub(desc, VARSEQOUTLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
   1199
   1200	/* Store payload data */
   1201	append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | FIFOLDST_VLF);
   1202
   1203	/* In-snoop assoclen + cryptlen data */
   1204	append_seq_fifo_load(desc, 0, FIFOLD_CLASS_BOTH | FIFOLDST_VLF |
   1205			     FIFOLD_TYPE_AAD | FIFOLD_TYPE_LAST2FLUSH1);
   1206
   1207	set_move_tgt_here(desc, read_move_cmd);
   1208	set_move_tgt_here(desc, write_move_cmd);
   1209	append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
   1210	/* Move payload data to OFIFO */
   1211	append_move(desc, MOVE_SRC_INFIFO_CL | MOVE_DEST_OUTFIFO);
   1212	append_cmd(desc, CMD_LOAD | ENABLE_AUTO_INFO_FIFO);
   1213
   1214	/* Read ICV */
   1215	append_seq_fifo_load(desc, icvsize, FIFOLD_CLASS_CLASS1 |
   1216			     FIFOLD_TYPE_ICV | FIFOLD_TYPE_LAST1);
   1217
   1218	print_hex_dump_debug("rfc4543 dec shdesc@" __stringify(__LINE__)": ",
   1219			     DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc),
   1220			     1);
   1221}
   1222EXPORT_SYMBOL(cnstr_shdsc_rfc4543_decap);
   1223
   1224/**
   1225 * cnstr_shdsc_chachapoly - Chacha20 + Poly1305 generic AEAD (rfc7539) and
   1226 *                          IPsec ESP (rfc7634, a.k.a. rfc7539esp) shared
   1227 *                          descriptor (non-protocol).
   1228 * @desc: pointer to buffer used for descriptor construction
   1229 * @cdata: pointer to block cipher transform definitions
   1230 *         Valid algorithm values - OP_ALG_ALGSEL_CHACHA20 ANDed with
   1231 *         OP_ALG_AAI_AEAD.
   1232 * @adata: pointer to authentication transform definitions
   1233 *         Valid algorithm values - OP_ALG_ALGSEL_POLY1305 ANDed with
   1234 *         OP_ALG_AAI_AEAD.
   1235 * @ivsize: initialization vector size
   1236 * @icvsize: integrity check value (ICV) size (truncated or full)
   1237 * @encap: true if encapsulation, false if decapsulation
   1238 * @is_qi: true when called from caam/qi
   1239 */
   1240void cnstr_shdsc_chachapoly(u32 * const desc, struct alginfo *cdata,
   1241			    struct alginfo *adata, unsigned int ivsize,
   1242			    unsigned int icvsize, const bool encap,
   1243			    const bool is_qi)
   1244{
   1245	u32 *key_jump_cmd, *wait_cmd;
   1246	u32 nfifo;
   1247	const bool is_ipsec = (ivsize != CHACHAPOLY_IV_SIZE);
   1248
   1249	/* Note: Context registers are saved. */
   1250	init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
   1251
   1252	/* skip key loading if they are loaded due to sharing */
   1253	key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
   1254				   JUMP_COND_SHRD);
   1255
   1256	append_key_as_imm(desc, cdata->key_virt, cdata->keylen, cdata->keylen,
   1257			  CLASS_1 | KEY_DEST_CLASS_REG);
   1258
   1259	/* For IPsec load the salt from keymat in the context register */
   1260	if (is_ipsec)
   1261		append_load_as_imm(desc, cdata->key_virt + cdata->keylen, 4,
   1262				   LDST_CLASS_1_CCB | LDST_SRCDST_BYTE_CONTEXT |
   1263				   4 << LDST_OFFSET_SHIFT);
   1264
   1265	set_jump_tgt_here(desc, key_jump_cmd);
   1266
   1267	/* Class 2 and 1 operations: Poly & ChaCha */
   1268	if (encap) {
   1269		append_operation(desc, adata->algtype | OP_ALG_AS_INITFINAL |
   1270				 OP_ALG_ENCRYPT);
   1271		append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
   1272				 OP_ALG_ENCRYPT);
   1273	} else {
   1274		append_operation(desc, adata->algtype | OP_ALG_AS_INITFINAL |
   1275				 OP_ALG_DECRYPT | OP_ALG_ICV_ON);
   1276		append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
   1277				 OP_ALG_DECRYPT);
   1278	}
   1279
   1280	if (is_qi) {
   1281		u32 *wait_load_cmd;
   1282		u32 ctx1_iv_off = is_ipsec ? 8 : 4;
   1283
   1284		/* REG3 = assoclen */
   1285		append_seq_load(desc, 4, LDST_CLASS_DECO |
   1286				LDST_SRCDST_WORD_DECO_MATH3 |
   1287				4 << LDST_OFFSET_SHIFT);
   1288
   1289		wait_load_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
   1290					    JUMP_COND_CALM | JUMP_COND_NCP |
   1291					    JUMP_COND_NOP | JUMP_COND_NIP |
   1292					    JUMP_COND_NIFP);
   1293		set_jump_tgt_here(desc, wait_load_cmd);
   1294
   1295		append_seq_load(desc, ivsize, LDST_CLASS_1_CCB |
   1296				LDST_SRCDST_BYTE_CONTEXT |
   1297				ctx1_iv_off << LDST_OFFSET_SHIFT);
   1298	}
   1299
   1300	/*
   1301	 * MAGIC with NFIFO
   1302	 * Read associated data from the input and send them to class1 and
   1303	 * class2 alignment blocks. From class1 send data to output fifo and
   1304	 * then write it to memory since we don't need to encrypt AD.
   1305	 */
   1306	nfifo = NFIFOENTRY_DEST_BOTH | NFIFOENTRY_FC1 | NFIFOENTRY_FC2 |
   1307		NFIFOENTRY_DTYPE_POLY | NFIFOENTRY_BND;
   1308	append_load_imm_u32(desc, nfifo, LDST_CLASS_IND_CCB |
   1309			    LDST_SRCDST_WORD_INFO_FIFO_SM | LDLEN_MATH3);
   1310
   1311	append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
   1312	append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
   1313	append_seq_fifo_load(desc, 0, FIFOLD_TYPE_NOINFOFIFO |
   1314			     FIFOLD_CLASS_CLASS1 | LDST_VLF);
   1315	append_move_len(desc, MOVE_AUX_LS | MOVE_SRC_AUX_ABLK |
   1316			MOVE_DEST_OUTFIFO | MOVELEN_MRSEL_MATH3);
   1317	append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | LDST_VLF);
   1318
   1319	/* IPsec - copy IV at the output */
   1320	if (is_ipsec)
   1321		append_seq_fifo_store(desc, ivsize, FIFOST_TYPE_METADATA |
   1322				      0x2 << 25);
   1323
   1324	wait_cmd = append_jump(desc, JUMP_JSL | JUMP_TYPE_LOCAL |
   1325			       JUMP_COND_NOP | JUMP_TEST_ALL);
   1326	set_jump_tgt_here(desc, wait_cmd);
   1327
   1328	if (encap) {
   1329		/* Read and write cryptlen bytes */
   1330		append_math_add(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
   1331		append_math_add(desc, VARSEQOUTLEN, SEQINLEN, REG0,
   1332				CAAM_CMD_SZ);
   1333		aead_append_src_dst(desc, FIFOLD_TYPE_MSG1OUT2);
   1334
   1335		/* Write ICV */
   1336		append_seq_store(desc, icvsize, LDST_CLASS_2_CCB |
   1337				 LDST_SRCDST_BYTE_CONTEXT);
   1338	} else {
   1339		/* Read and write cryptlen bytes */
   1340		append_math_add(desc, VARSEQINLEN, SEQOUTLEN, REG0,
   1341				CAAM_CMD_SZ);
   1342		append_math_add(desc, VARSEQOUTLEN, SEQOUTLEN, REG0,
   1343				CAAM_CMD_SZ);
   1344		aead_append_src_dst(desc, FIFOLD_TYPE_MSG);
   1345
   1346		/* Load ICV for verification */
   1347		append_seq_fifo_load(desc, icvsize, FIFOLD_CLASS_CLASS2 |
   1348				     FIFOLD_TYPE_LAST2 | FIFOLD_TYPE_ICV);
   1349	}
   1350
   1351	print_hex_dump_debug("chachapoly shdesc@" __stringify(__LINE__)": ",
   1352			     DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc),
   1353			     1);
   1354}
   1355EXPORT_SYMBOL(cnstr_shdsc_chachapoly);
   1356
   1357/* For skcipher encrypt and decrypt, read from req->src and write to req->dst */
   1358static inline void skcipher_append_src_dst(u32 *desc)
   1359{
   1360	append_math_add(desc, VARSEQOUTLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
   1361	append_math_add(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
   1362	append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 |
   1363			     KEY_VLF | FIFOLD_TYPE_MSG | FIFOLD_TYPE_LAST1);
   1364	append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | KEY_VLF);
   1365}
   1366
   1367/**
   1368 * cnstr_shdsc_skcipher_encap - skcipher encapsulation shared descriptor
   1369 * @desc: pointer to buffer used for descriptor construction
   1370 * @cdata: pointer to block cipher transform definitions
   1371 *         Valid algorithm values - one of OP_ALG_ALGSEL_{AES, DES, 3DES} ANDed
   1372 *         with OP_ALG_AAI_CBC or OP_ALG_AAI_CTR_MOD128
   1373 *                                - OP_ALG_ALGSEL_CHACHA20
   1374 * @ivsize: initialization vector size
   1375 * @is_rfc3686: true when ctr(aes) is wrapped by rfc3686 template
   1376 * @ctx1_iv_off: IV offset in CONTEXT1 register
   1377 */
   1378void cnstr_shdsc_skcipher_encap(u32 * const desc, struct alginfo *cdata,
   1379				unsigned int ivsize, const bool is_rfc3686,
   1380				const u32 ctx1_iv_off)
   1381{
   1382	u32 *key_jump_cmd;
   1383	u32 options = cdata->algtype | OP_ALG_AS_INIT | OP_ALG_ENCRYPT;
   1384	bool is_chacha20 = ((cdata->algtype & OP_ALG_ALGSEL_MASK) ==
   1385			    OP_ALG_ALGSEL_CHACHA20);
   1386
   1387	init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
   1388	/* Skip if already shared */
   1389	key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
   1390				   JUMP_COND_SHRD);
   1391
   1392	/* Load class1 key only */
   1393	append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
   1394			  cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
   1395
   1396	/* Load nonce into CONTEXT1 reg */
   1397	if (is_rfc3686) {
   1398		const u8 *nonce = cdata->key_virt + cdata->keylen;
   1399
   1400		append_load_as_imm(desc, nonce, CTR_RFC3686_NONCE_SIZE,
   1401				   LDST_CLASS_IND_CCB |
   1402				   LDST_SRCDST_BYTE_OUTFIFO | LDST_IMM);
   1403		append_move(desc, MOVE_WAITCOMP | MOVE_SRC_OUTFIFO |
   1404			    MOVE_DEST_CLASS1CTX | (16 << MOVE_OFFSET_SHIFT) |
   1405			    (CTR_RFC3686_NONCE_SIZE << MOVE_LEN_SHIFT));
   1406	}
   1407
   1408	set_jump_tgt_here(desc, key_jump_cmd);
   1409
   1410	/* Load IV, if there is one */
   1411	if (ivsize)
   1412		append_seq_load(desc, ivsize, LDST_SRCDST_BYTE_CONTEXT |
   1413				LDST_CLASS_1_CCB | (ctx1_iv_off <<
   1414				LDST_OFFSET_SHIFT));
   1415
   1416	/* Load counter into CONTEXT1 reg */
   1417	if (is_rfc3686)
   1418		append_load_imm_be32(desc, 1, LDST_IMM | LDST_CLASS_1_CCB |
   1419				     LDST_SRCDST_BYTE_CONTEXT |
   1420				     ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) <<
   1421				      LDST_OFFSET_SHIFT));
   1422
   1423	/* Load operation */
   1424	if (is_chacha20)
   1425		options |= OP_ALG_AS_FINALIZE;
   1426	append_operation(desc, options);
   1427
   1428	/* Perform operation */
   1429	skcipher_append_src_dst(desc);
   1430
   1431	/* Store IV */
   1432	if (!is_chacha20 && ivsize)
   1433		append_seq_store(desc, ivsize, LDST_SRCDST_BYTE_CONTEXT |
   1434				 LDST_CLASS_1_CCB | (ctx1_iv_off <<
   1435				 LDST_OFFSET_SHIFT));
   1436
   1437	print_hex_dump_debug("skcipher enc shdesc@" __stringify(__LINE__)": ",
   1438			     DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc),
   1439			     1);
   1440}
   1441EXPORT_SYMBOL(cnstr_shdsc_skcipher_encap);
   1442
   1443/**
   1444 * cnstr_shdsc_skcipher_decap - skcipher decapsulation shared descriptor
   1445 * @desc: pointer to buffer used for descriptor construction
   1446 * @cdata: pointer to block cipher transform definitions
   1447 *         Valid algorithm values - one of OP_ALG_ALGSEL_{AES, DES, 3DES} ANDed
   1448 *         with OP_ALG_AAI_CBC or OP_ALG_AAI_CTR_MOD128
   1449 *                                - OP_ALG_ALGSEL_CHACHA20
   1450 * @ivsize: initialization vector size
   1451 * @is_rfc3686: true when ctr(aes) is wrapped by rfc3686 template
   1452 * @ctx1_iv_off: IV offset in CONTEXT1 register
   1453 */
   1454void cnstr_shdsc_skcipher_decap(u32 * const desc, struct alginfo *cdata,
   1455				unsigned int ivsize, const bool is_rfc3686,
   1456				const u32 ctx1_iv_off)
   1457{
   1458	u32 *key_jump_cmd;
   1459	bool is_chacha20 = ((cdata->algtype & OP_ALG_ALGSEL_MASK) ==
   1460			    OP_ALG_ALGSEL_CHACHA20);
   1461
   1462	init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
   1463	/* Skip if already shared */
   1464	key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
   1465				   JUMP_COND_SHRD);
   1466
   1467	/* Load class1 key only */
   1468	append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
   1469			  cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
   1470
   1471	/* Load nonce into CONTEXT1 reg */
   1472	if (is_rfc3686) {
   1473		const u8 *nonce = cdata->key_virt + cdata->keylen;
   1474
   1475		append_load_as_imm(desc, nonce, CTR_RFC3686_NONCE_SIZE,
   1476				   LDST_CLASS_IND_CCB |
   1477				   LDST_SRCDST_BYTE_OUTFIFO | LDST_IMM);
   1478		append_move(desc, MOVE_WAITCOMP | MOVE_SRC_OUTFIFO |
   1479			    MOVE_DEST_CLASS1CTX | (16 << MOVE_OFFSET_SHIFT) |
   1480			    (CTR_RFC3686_NONCE_SIZE << MOVE_LEN_SHIFT));
   1481	}
   1482
   1483	set_jump_tgt_here(desc, key_jump_cmd);
   1484
   1485	/* Load IV, if there is one */
   1486	if (ivsize)
   1487		append_seq_load(desc, ivsize, LDST_SRCDST_BYTE_CONTEXT |
   1488				LDST_CLASS_1_CCB | (ctx1_iv_off <<
   1489				LDST_OFFSET_SHIFT));
   1490
   1491	/* Load counter into CONTEXT1 reg */
   1492	if (is_rfc3686)
   1493		append_load_imm_be32(desc, 1, LDST_IMM | LDST_CLASS_1_CCB |
   1494				     LDST_SRCDST_BYTE_CONTEXT |
   1495				     ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) <<
   1496				      LDST_OFFSET_SHIFT));
   1497
   1498	/* Choose operation */
   1499	if (ctx1_iv_off)
   1500		append_operation(desc, cdata->algtype | OP_ALG_AS_INIT |
   1501				 OP_ALG_DECRYPT);
   1502	else
   1503		append_dec_op1(desc, cdata->algtype);
   1504
   1505	/* Perform operation */
   1506	skcipher_append_src_dst(desc);
   1507
   1508	/* Store IV */
   1509	if (!is_chacha20 && ivsize)
   1510		append_seq_store(desc, ivsize, LDST_SRCDST_BYTE_CONTEXT |
   1511				 LDST_CLASS_1_CCB | (ctx1_iv_off <<
   1512				 LDST_OFFSET_SHIFT));
   1513
   1514	print_hex_dump_debug("skcipher dec shdesc@" __stringify(__LINE__)": ",
   1515			     DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc),
   1516			     1);
   1517}
   1518EXPORT_SYMBOL(cnstr_shdsc_skcipher_decap);
   1519
   1520/**
   1521 * cnstr_shdsc_xts_skcipher_encap - xts skcipher encapsulation shared descriptor
   1522 * @desc: pointer to buffer used for descriptor construction
   1523 * @cdata: pointer to block cipher transform definitions
   1524 *         Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_XTS.
   1525 */
   1526void cnstr_shdsc_xts_skcipher_encap(u32 * const desc, struct alginfo *cdata)
   1527{
   1528	/*
   1529	 * Set sector size to a big value, practically disabling
   1530	 * sector size segmentation in xts implementation. We cannot
   1531	 * take full advantage of this HW feature with existing
   1532	 * crypto API / dm-crypt SW architecture.
   1533	 */
   1534	__be64 sector_size = cpu_to_be64(BIT(15));
   1535	u32 *key_jump_cmd;
   1536
   1537	init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
   1538	/* Skip if already shared */
   1539	key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
   1540				   JUMP_COND_SHRD);
   1541
   1542	/* Load class1 keys only */
   1543	append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
   1544			  cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
   1545
   1546	/* Load sector size with index 40 bytes (0x28) */
   1547	append_load_as_imm(desc, (void *)&sector_size, 8, LDST_CLASS_1_CCB |
   1548			   LDST_SRCDST_BYTE_CONTEXT |
   1549			   (0x28 << LDST_OFFSET_SHIFT));
   1550
   1551	set_jump_tgt_here(desc, key_jump_cmd);
   1552
   1553	/*
   1554	 * create sequence for loading the sector index / 16B tweak value
   1555	 * Lower 8B of IV - sector index / tweak lower half
   1556	 * Upper 8B of IV - upper half of 16B tweak
   1557	 */
   1558	append_seq_load(desc, 8, LDST_SRCDST_BYTE_CONTEXT | LDST_CLASS_1_CCB |
   1559			(0x20 << LDST_OFFSET_SHIFT));
   1560	append_seq_load(desc, 8, LDST_SRCDST_BYTE_CONTEXT | LDST_CLASS_1_CCB |
   1561			(0x30 << LDST_OFFSET_SHIFT));
   1562
   1563	/* Load operation */
   1564	append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
   1565			 OP_ALG_ENCRYPT);
   1566
   1567	/* Perform operation */
   1568	skcipher_append_src_dst(desc);
   1569
   1570	/* Store lower 8B and upper 8B of IV */
   1571	append_seq_store(desc, 8, LDST_SRCDST_BYTE_CONTEXT | LDST_CLASS_1_CCB |
   1572			 (0x20 << LDST_OFFSET_SHIFT));
   1573	append_seq_store(desc, 8, LDST_SRCDST_BYTE_CONTEXT | LDST_CLASS_1_CCB |
   1574			 (0x30 << LDST_OFFSET_SHIFT));
   1575
   1576	print_hex_dump_debug("xts skcipher enc shdesc@" __stringify(__LINE__)
   1577			     ": ", DUMP_PREFIX_ADDRESS, 16, 4,
   1578			     desc, desc_bytes(desc), 1);
   1579}
   1580EXPORT_SYMBOL(cnstr_shdsc_xts_skcipher_encap);
   1581
   1582/**
   1583 * cnstr_shdsc_xts_skcipher_decap - xts skcipher decapsulation shared descriptor
   1584 * @desc: pointer to buffer used for descriptor construction
   1585 * @cdata: pointer to block cipher transform definitions
   1586 *         Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_XTS.
   1587 */
   1588void cnstr_shdsc_xts_skcipher_decap(u32 * const desc, struct alginfo *cdata)
   1589{
   1590	/*
   1591	 * Set sector size to a big value, practically disabling
   1592	 * sector size segmentation in xts implementation. We cannot
   1593	 * take full advantage of this HW feature with existing
   1594	 * crypto API / dm-crypt SW architecture.
   1595	 */
   1596	__be64 sector_size = cpu_to_be64(BIT(15));
   1597	u32 *key_jump_cmd;
   1598
   1599	init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
   1600	/* Skip if already shared */
   1601	key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
   1602				   JUMP_COND_SHRD);
   1603
   1604	/* Load class1 key only */
   1605	append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
   1606			  cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
   1607
   1608	/* Load sector size with index 40 bytes (0x28) */
   1609	append_load_as_imm(desc, (void *)&sector_size, 8, LDST_CLASS_1_CCB |
   1610			   LDST_SRCDST_BYTE_CONTEXT |
   1611			   (0x28 << LDST_OFFSET_SHIFT));
   1612
   1613	set_jump_tgt_here(desc, key_jump_cmd);
   1614
   1615	/*
   1616	 * create sequence for loading the sector index / 16B tweak value
   1617	 * Lower 8B of IV - sector index / tweak lower half
   1618	 * Upper 8B of IV - upper half of 16B tweak
   1619	 */
   1620	append_seq_load(desc, 8, LDST_SRCDST_BYTE_CONTEXT | LDST_CLASS_1_CCB |
   1621			(0x20 << LDST_OFFSET_SHIFT));
   1622	append_seq_load(desc, 8, LDST_SRCDST_BYTE_CONTEXT | LDST_CLASS_1_CCB |
   1623			(0x30 << LDST_OFFSET_SHIFT));
   1624	/* Load operation */
   1625	append_dec_op1(desc, cdata->algtype);
   1626
   1627	/* Perform operation */
   1628	skcipher_append_src_dst(desc);
   1629
   1630	/* Store lower 8B and upper 8B of IV */
   1631	append_seq_store(desc, 8, LDST_SRCDST_BYTE_CONTEXT | LDST_CLASS_1_CCB |
   1632			 (0x20 << LDST_OFFSET_SHIFT));
   1633	append_seq_store(desc, 8, LDST_SRCDST_BYTE_CONTEXT | LDST_CLASS_1_CCB |
   1634			 (0x30 << LDST_OFFSET_SHIFT));
   1635
   1636	print_hex_dump_debug("xts skcipher dec shdesc@" __stringify(__LINE__)
   1637			     ": ", DUMP_PREFIX_ADDRESS, 16, 4, desc,
   1638			     desc_bytes(desc), 1);
   1639}
   1640EXPORT_SYMBOL(cnstr_shdsc_xts_skcipher_decap);
   1641
   1642MODULE_LICENSE("GPL");
   1643MODULE_DESCRIPTION("FSL CAAM descriptor support");
   1644MODULE_AUTHOR("Freescale Semiconductor - NMG/STC");