cachepc-linux

Fork of AMDESE/linux with modifications for CachePC side-channel attack
git clone https://git.sinitax.com/sinitax/cachepc-linux
Log | Files | Refs | README | LICENSE | sfeed.txt

crc32-vx.c (8434B)


      1// SPDX-License-Identifier: GPL-2.0
      2/*
      3 * Crypto-API module for CRC-32 algorithms implemented with the
      4 * z/Architecture Vector Extension Facility.
      5 *
      6 * Copyright IBM Corp. 2015
      7 * Author(s): Hendrik Brueckner <brueckner@linux.vnet.ibm.com>
      8 */
      9#define KMSG_COMPONENT	"crc32-vx"
     10#define pr_fmt(fmt)	KMSG_COMPONENT ": " fmt
     11
     12#include <linux/module.h>
     13#include <linux/cpufeature.h>
     14#include <linux/crc32.h>
     15#include <crypto/internal/hash.h>
     16#include <asm/fpu/api.h>
     17
     18
     19#define CRC32_BLOCK_SIZE	1
     20#define CRC32_DIGEST_SIZE	4
     21
     22#define VX_MIN_LEN		64
     23#define VX_ALIGNMENT		16L
     24#define VX_ALIGN_MASK		(VX_ALIGNMENT - 1)
     25
     26struct crc_ctx {
     27	u32 key;
     28};
     29
     30struct crc_desc_ctx {
     31	u32 crc;
     32};
     33
     34/* Prototypes for functions in assembly files */
     35u32 crc32_le_vgfm_16(u32 crc, unsigned char const *buf, size_t size);
     36u32 crc32_be_vgfm_16(u32 crc, unsigned char const *buf, size_t size);
     37u32 crc32c_le_vgfm_16(u32 crc, unsigned char const *buf, size_t size);
     38
     39/*
     40 * DEFINE_CRC32_VX() - Define a CRC-32 function using the vector extension
     41 *
     42 * Creates a function to perform a particular CRC-32 computation. Depending
     43 * on the message buffer, the hardware-accelerated or software implementation
     44 * is used.   Note that the message buffer is aligned to improve fetch
     45 * operations of VECTOR LOAD MULTIPLE instructions.
     46 *
     47 */
     48#define DEFINE_CRC32_VX(___fname, ___crc32_vx, ___crc32_sw)		    \
     49	static u32 __pure ___fname(u32 crc,				    \
     50				unsigned char const *data, size_t datalen)  \
     51	{								    \
     52		struct kernel_fpu vxstate;				    \
     53		unsigned long prealign, aligned, remaining;		    \
     54									    \
     55		if (datalen < VX_MIN_LEN + VX_ALIGN_MASK)		    \
     56			return ___crc32_sw(crc, data, datalen);		    \
     57									    \
     58		if ((unsigned long)data & VX_ALIGN_MASK) {		    \
     59			prealign = VX_ALIGNMENT -			    \
     60				  ((unsigned long)data & VX_ALIGN_MASK);    \
     61			datalen -= prealign;				    \
     62			crc = ___crc32_sw(crc, data, prealign);		    \
     63			data = (void *)((unsigned long)data + prealign);    \
     64		}							    \
     65									    \
     66		aligned = datalen & ~VX_ALIGN_MASK;			    \
     67		remaining = datalen & VX_ALIGN_MASK;			    \
     68									    \
     69		kernel_fpu_begin(&vxstate, KERNEL_VXR_LOW);		    \
     70		crc = ___crc32_vx(crc, data, aligned);			    \
     71		kernel_fpu_end(&vxstate, KERNEL_VXR_LOW);		    \
     72									    \
     73		if (remaining)						    \
     74			crc = ___crc32_sw(crc, data + aligned, remaining);  \
     75									    \
     76		return crc;						    \
     77	}
     78
     79DEFINE_CRC32_VX(crc32_le_vx, crc32_le_vgfm_16, crc32_le)
     80DEFINE_CRC32_VX(crc32_be_vx, crc32_be_vgfm_16, crc32_be)
     81DEFINE_CRC32_VX(crc32c_le_vx, crc32c_le_vgfm_16, __crc32c_le)
     82
     83
     84static int crc32_vx_cra_init_zero(struct crypto_tfm *tfm)
     85{
     86	struct crc_ctx *mctx = crypto_tfm_ctx(tfm);
     87
     88	mctx->key = 0;
     89	return 0;
     90}
     91
     92static int crc32_vx_cra_init_invert(struct crypto_tfm *tfm)
     93{
     94	struct crc_ctx *mctx = crypto_tfm_ctx(tfm);
     95
     96	mctx->key = ~0;
     97	return 0;
     98}
     99
    100static int crc32_vx_init(struct shash_desc *desc)
    101{
    102	struct crc_ctx *mctx = crypto_shash_ctx(desc->tfm);
    103	struct crc_desc_ctx *ctx = shash_desc_ctx(desc);
    104
    105	ctx->crc = mctx->key;
    106	return 0;
    107}
    108
    109static int crc32_vx_setkey(struct crypto_shash *tfm, const u8 *newkey,
    110			   unsigned int newkeylen)
    111{
    112	struct crc_ctx *mctx = crypto_shash_ctx(tfm);
    113
    114	if (newkeylen != sizeof(mctx->key))
    115		return -EINVAL;
    116	mctx->key = le32_to_cpu(*(__le32 *)newkey);
    117	return 0;
    118}
    119
    120static int crc32be_vx_setkey(struct crypto_shash *tfm, const u8 *newkey,
    121			     unsigned int newkeylen)
    122{
    123	struct crc_ctx *mctx = crypto_shash_ctx(tfm);
    124
    125	if (newkeylen != sizeof(mctx->key))
    126		return -EINVAL;
    127	mctx->key = be32_to_cpu(*(__be32 *)newkey);
    128	return 0;
    129}
    130
    131static int crc32le_vx_final(struct shash_desc *desc, u8 *out)
    132{
    133	struct crc_desc_ctx *ctx = shash_desc_ctx(desc);
    134
    135	*(__le32 *)out = cpu_to_le32p(&ctx->crc);
    136	return 0;
    137}
    138
    139static int crc32be_vx_final(struct shash_desc *desc, u8 *out)
    140{
    141	struct crc_desc_ctx *ctx = shash_desc_ctx(desc);
    142
    143	*(__be32 *)out = cpu_to_be32p(&ctx->crc);
    144	return 0;
    145}
    146
    147static int crc32c_vx_final(struct shash_desc *desc, u8 *out)
    148{
    149	struct crc_desc_ctx *ctx = shash_desc_ctx(desc);
    150
    151	/*
    152	 * Perform a final XOR with 0xFFFFFFFF to be in sync
    153	 * with the generic crc32c shash implementation.
    154	 */
    155	*(__le32 *)out = ~cpu_to_le32p(&ctx->crc);
    156	return 0;
    157}
    158
    159static int __crc32le_vx_finup(u32 *crc, const u8 *data, unsigned int len,
    160			      u8 *out)
    161{
    162	*(__le32 *)out = cpu_to_le32(crc32_le_vx(*crc, data, len));
    163	return 0;
    164}
    165
    166static int __crc32be_vx_finup(u32 *crc, const u8 *data, unsigned int len,
    167			      u8 *out)
    168{
    169	*(__be32 *)out = cpu_to_be32(crc32_be_vx(*crc, data, len));
    170	return 0;
    171}
    172
    173static int __crc32c_vx_finup(u32 *crc, const u8 *data, unsigned int len,
    174			     u8 *out)
    175{
    176	/*
    177	 * Perform a final XOR with 0xFFFFFFFF to be in sync
    178	 * with the generic crc32c shash implementation.
    179	 */
    180	*(__le32 *)out = ~cpu_to_le32(crc32c_le_vx(*crc, data, len));
    181	return 0;
    182}
    183
    184
    185#define CRC32_VX_FINUP(alg, func)					      \
    186	static int alg ## _vx_finup(struct shash_desc *desc, const u8 *data,  \
    187				   unsigned int datalen, u8 *out)	      \
    188	{								      \
    189		return __ ## alg ## _vx_finup(shash_desc_ctx(desc),	      \
    190					      data, datalen, out);	      \
    191	}
    192
    193CRC32_VX_FINUP(crc32le, crc32_le_vx)
    194CRC32_VX_FINUP(crc32be, crc32_be_vx)
    195CRC32_VX_FINUP(crc32c, crc32c_le_vx)
    196
    197#define CRC32_VX_DIGEST(alg, func)					      \
    198	static int alg ## _vx_digest(struct shash_desc *desc, const u8 *data, \
    199				     unsigned int len, u8 *out)		      \
    200	{								      \
    201		return __ ## alg ## _vx_finup(crypto_shash_ctx(desc->tfm),    \
    202					      data, len, out);		      \
    203	}
    204
    205CRC32_VX_DIGEST(crc32le, crc32_le_vx)
    206CRC32_VX_DIGEST(crc32be, crc32_be_vx)
    207CRC32_VX_DIGEST(crc32c, crc32c_le_vx)
    208
    209#define CRC32_VX_UPDATE(alg, func)					      \
    210	static int alg ## _vx_update(struct shash_desc *desc, const u8 *data, \
    211				     unsigned int datalen)		      \
    212	{								      \
    213		struct crc_desc_ctx *ctx = shash_desc_ctx(desc);	      \
    214		ctx->crc = func(ctx->crc, data, datalen);		      \
    215		return 0;						      \
    216	}
    217
    218CRC32_VX_UPDATE(crc32le, crc32_le_vx)
    219CRC32_VX_UPDATE(crc32be, crc32_be_vx)
    220CRC32_VX_UPDATE(crc32c, crc32c_le_vx)
    221
    222
    223static struct shash_alg crc32_vx_algs[] = {
    224	/* CRC-32 LE */
    225	{
    226		.init		=	crc32_vx_init,
    227		.setkey		=	crc32_vx_setkey,
    228		.update		=	crc32le_vx_update,
    229		.final		=	crc32le_vx_final,
    230		.finup		=	crc32le_vx_finup,
    231		.digest		=	crc32le_vx_digest,
    232		.descsize	=	sizeof(struct crc_desc_ctx),
    233		.digestsize	=	CRC32_DIGEST_SIZE,
    234		.base		=	{
    235			.cra_name	 = "crc32",
    236			.cra_driver_name = "crc32-vx",
    237			.cra_priority	 = 200,
    238			.cra_flags	 = CRYPTO_ALG_OPTIONAL_KEY,
    239			.cra_blocksize	 = CRC32_BLOCK_SIZE,
    240			.cra_ctxsize	 = sizeof(struct crc_ctx),
    241			.cra_module	 = THIS_MODULE,
    242			.cra_init	 = crc32_vx_cra_init_zero,
    243		},
    244	},
    245	/* CRC-32 BE */
    246	{
    247		.init		=	crc32_vx_init,
    248		.setkey		=	crc32be_vx_setkey,
    249		.update		=	crc32be_vx_update,
    250		.final		=	crc32be_vx_final,
    251		.finup		=	crc32be_vx_finup,
    252		.digest		=	crc32be_vx_digest,
    253		.descsize	=	sizeof(struct crc_desc_ctx),
    254		.digestsize	=	CRC32_DIGEST_SIZE,
    255		.base		=	{
    256			.cra_name	 = "crc32be",
    257			.cra_driver_name = "crc32be-vx",
    258			.cra_priority	 = 200,
    259			.cra_flags	 = CRYPTO_ALG_OPTIONAL_KEY,
    260			.cra_blocksize	 = CRC32_BLOCK_SIZE,
    261			.cra_ctxsize	 = sizeof(struct crc_ctx),
    262			.cra_module	 = THIS_MODULE,
    263			.cra_init	 = crc32_vx_cra_init_zero,
    264		},
    265	},
    266	/* CRC-32C LE */
    267	{
    268		.init		=	crc32_vx_init,
    269		.setkey		=	crc32_vx_setkey,
    270		.update		=	crc32c_vx_update,
    271		.final		=	crc32c_vx_final,
    272		.finup		=	crc32c_vx_finup,
    273		.digest		=	crc32c_vx_digest,
    274		.descsize	=	sizeof(struct crc_desc_ctx),
    275		.digestsize	=	CRC32_DIGEST_SIZE,
    276		.base		=	{
    277			.cra_name	 = "crc32c",
    278			.cra_driver_name = "crc32c-vx",
    279			.cra_priority	 = 200,
    280			.cra_flags	 = CRYPTO_ALG_OPTIONAL_KEY,
    281			.cra_blocksize	 = CRC32_BLOCK_SIZE,
    282			.cra_ctxsize	 = sizeof(struct crc_ctx),
    283			.cra_module	 = THIS_MODULE,
    284			.cra_init	 = crc32_vx_cra_init_invert,
    285		},
    286	},
    287};
    288
    289
    290static int __init crc_vx_mod_init(void)
    291{
    292	return crypto_register_shashes(crc32_vx_algs,
    293				       ARRAY_SIZE(crc32_vx_algs));
    294}
    295
    296static void __exit crc_vx_mod_exit(void)
    297{
    298	crypto_unregister_shashes(crc32_vx_algs, ARRAY_SIZE(crc32_vx_algs));
    299}
    300
    301module_cpu_feature_match(VXRS, crc_vx_mod_init);
    302module_exit(crc_vx_mod_exit);
    303
    304MODULE_AUTHOR("Hendrik Brueckner <brueckner@linux.vnet.ibm.com>");
    305MODULE_LICENSE("GPL");
    306
    307MODULE_ALIAS_CRYPTO("crc32");
    308MODULE_ALIAS_CRYPTO("crc32-vx");
    309MODULE_ALIAS_CRYPTO("crc32c");
    310MODULE_ALIAS_CRYPTO("crc32c-vx");