cachepc-linux

Fork of AMDESE/linux with modifications for CachePC side-channel attack
git clone https://git.sinitax.com/sinitax/cachepc-linux
Log | Files | Refs | README | LICENSE | sfeed.txt

crc32-ce-glue.c (5568B)


      1// SPDX-License-Identifier: GPL-2.0-only
      2/*
      3 * Accelerated CRC32(C) using ARM CRC, NEON and Crypto Extensions instructions
      4 *
      5 * Copyright (C) 2016 Linaro Ltd <ard.biesheuvel@linaro.org>
      6 */
      7
      8#include <linux/cpufeature.h>
      9#include <linux/crc32.h>
     10#include <linux/init.h>
     11#include <linux/kernel.h>
     12#include <linux/module.h>
     13#include <linux/string.h>
     14
     15#include <crypto/internal/hash.h>
     16#include <crypto/internal/simd.h>
     17
     18#include <asm/hwcap.h>
     19#include <asm/neon.h>
     20#include <asm/simd.h>
     21#include <asm/unaligned.h>
     22
     23#define PMULL_MIN_LEN		64L	/* minimum size of buffer
     24					 * for crc32_pmull_le_16 */
     25#define SCALE_F			16L	/* size of NEON register */
     26
     27asmlinkage u32 crc32_pmull_le(const u8 buf[], u32 len, u32 init_crc);
     28asmlinkage u32 crc32_armv8_le(u32 init_crc, const u8 buf[], u32 len);
     29
     30asmlinkage u32 crc32c_pmull_le(const u8 buf[], u32 len, u32 init_crc);
     31asmlinkage u32 crc32c_armv8_le(u32 init_crc, const u8 buf[], u32 len);
     32
     33static u32 (*fallback_crc32)(u32 init_crc, const u8 buf[], u32 len);
     34static u32 (*fallback_crc32c)(u32 init_crc, const u8 buf[], u32 len);
     35
     36static int crc32_cra_init(struct crypto_tfm *tfm)
     37{
     38	u32 *key = crypto_tfm_ctx(tfm);
     39
     40	*key = 0;
     41	return 0;
     42}
     43
     44static int crc32c_cra_init(struct crypto_tfm *tfm)
     45{
     46	u32 *key = crypto_tfm_ctx(tfm);
     47
     48	*key = ~0;
     49	return 0;
     50}
     51
     52static int crc32_setkey(struct crypto_shash *hash, const u8 *key,
     53			unsigned int keylen)
     54{
     55	u32 *mctx = crypto_shash_ctx(hash);
     56
     57	if (keylen != sizeof(u32))
     58		return -EINVAL;
     59	*mctx = le32_to_cpup((__le32 *)key);
     60	return 0;
     61}
     62
     63static int crc32_init(struct shash_desc *desc)
     64{
     65	u32 *mctx = crypto_shash_ctx(desc->tfm);
     66	u32 *crc = shash_desc_ctx(desc);
     67
     68	*crc = *mctx;
     69	return 0;
     70}
     71
     72static int crc32_update(struct shash_desc *desc, const u8 *data,
     73			unsigned int length)
     74{
     75	u32 *crc = shash_desc_ctx(desc);
     76
     77	*crc = crc32_armv8_le(*crc, data, length);
     78	return 0;
     79}
     80
     81static int crc32c_update(struct shash_desc *desc, const u8 *data,
     82			 unsigned int length)
     83{
     84	u32 *crc = shash_desc_ctx(desc);
     85
     86	*crc = crc32c_armv8_le(*crc, data, length);
     87	return 0;
     88}
     89
     90static int crc32_final(struct shash_desc *desc, u8 *out)
     91{
     92	u32 *crc = shash_desc_ctx(desc);
     93
     94	put_unaligned_le32(*crc, out);
     95	return 0;
     96}
     97
     98static int crc32c_final(struct shash_desc *desc, u8 *out)
     99{
    100	u32 *crc = shash_desc_ctx(desc);
    101
    102	put_unaligned_le32(~*crc, out);
    103	return 0;
    104}
    105
    106static int crc32_pmull_update(struct shash_desc *desc, const u8 *data,
    107			      unsigned int length)
    108{
    109	u32 *crc = shash_desc_ctx(desc);
    110	unsigned int l;
    111
    112	if (crypto_simd_usable()) {
    113		if ((u32)data % SCALE_F) {
    114			l = min_t(u32, length, SCALE_F - ((u32)data % SCALE_F));
    115
    116			*crc = fallback_crc32(*crc, data, l);
    117
    118			data += l;
    119			length -= l;
    120		}
    121
    122		if (length >= PMULL_MIN_LEN) {
    123			l = round_down(length, SCALE_F);
    124
    125			kernel_neon_begin();
    126			*crc = crc32_pmull_le(data, l, *crc);
    127			kernel_neon_end();
    128
    129			data += l;
    130			length -= l;
    131		}
    132	}
    133
    134	if (length > 0)
    135		*crc = fallback_crc32(*crc, data, length);
    136
    137	return 0;
    138}
    139
    140static int crc32c_pmull_update(struct shash_desc *desc, const u8 *data,
    141			       unsigned int length)
    142{
    143	u32 *crc = shash_desc_ctx(desc);
    144	unsigned int l;
    145
    146	if (crypto_simd_usable()) {
    147		if ((u32)data % SCALE_F) {
    148			l = min_t(u32, length, SCALE_F - ((u32)data % SCALE_F));
    149
    150			*crc = fallback_crc32c(*crc, data, l);
    151
    152			data += l;
    153			length -= l;
    154		}
    155
    156		if (length >= PMULL_MIN_LEN) {
    157			l = round_down(length, SCALE_F);
    158
    159			kernel_neon_begin();
    160			*crc = crc32c_pmull_le(data, l, *crc);
    161			kernel_neon_end();
    162
    163			data += l;
    164			length -= l;
    165		}
    166	}
    167
    168	if (length > 0)
    169		*crc = fallback_crc32c(*crc, data, length);
    170
    171	return 0;
    172}
    173
    174static struct shash_alg crc32_pmull_algs[] = { {
    175	.setkey			= crc32_setkey,
    176	.init			= crc32_init,
    177	.update			= crc32_update,
    178	.final			= crc32_final,
    179	.descsize		= sizeof(u32),
    180	.digestsize		= sizeof(u32),
    181
    182	.base.cra_ctxsize	= sizeof(u32),
    183	.base.cra_init		= crc32_cra_init,
    184	.base.cra_name		= "crc32",
    185	.base.cra_driver_name	= "crc32-arm-ce",
    186	.base.cra_priority	= 200,
    187	.base.cra_flags		= CRYPTO_ALG_OPTIONAL_KEY,
    188	.base.cra_blocksize	= 1,
    189	.base.cra_module	= THIS_MODULE,
    190}, {
    191	.setkey			= crc32_setkey,
    192	.init			= crc32_init,
    193	.update			= crc32c_update,
    194	.final			= crc32c_final,
    195	.descsize		= sizeof(u32),
    196	.digestsize		= sizeof(u32),
    197
    198	.base.cra_ctxsize	= sizeof(u32),
    199	.base.cra_init		= crc32c_cra_init,
    200	.base.cra_name		= "crc32c",
    201	.base.cra_driver_name	= "crc32c-arm-ce",
    202	.base.cra_priority	= 200,
    203	.base.cra_flags		= CRYPTO_ALG_OPTIONAL_KEY,
    204	.base.cra_blocksize	= 1,
    205	.base.cra_module	= THIS_MODULE,
    206} };
    207
    208static int __init crc32_pmull_mod_init(void)
    209{
    210	if (elf_hwcap2 & HWCAP2_PMULL) {
    211		crc32_pmull_algs[0].update = crc32_pmull_update;
    212		crc32_pmull_algs[1].update = crc32c_pmull_update;
    213
    214		if (elf_hwcap2 & HWCAP2_CRC32) {
    215			fallback_crc32 = crc32_armv8_le;
    216			fallback_crc32c = crc32c_armv8_le;
    217		} else {
    218			fallback_crc32 = crc32_le;
    219			fallback_crc32c = __crc32c_le;
    220		}
    221	} else if (!(elf_hwcap2 & HWCAP2_CRC32)) {
    222		return -ENODEV;
    223	}
    224
    225	return crypto_register_shashes(crc32_pmull_algs,
    226				       ARRAY_SIZE(crc32_pmull_algs));
    227}
    228
    229static void __exit crc32_pmull_mod_exit(void)
    230{
    231	crypto_unregister_shashes(crc32_pmull_algs,
    232				  ARRAY_SIZE(crc32_pmull_algs));
    233}
    234
    235static const struct cpu_feature __maybe_unused crc32_cpu_feature[] = {
    236	{ cpu_feature(CRC32) }, { cpu_feature(PMULL) }, { }
    237};
    238MODULE_DEVICE_TABLE(cpu, crc32_cpu_feature);
    239
    240module_init(crc32_pmull_mod_init);
    241module_exit(crc32_pmull_mod_exit);
    242
    243MODULE_AUTHOR("Ard Biesheuvel <ard.biesheuvel@linaro.org>");
    244MODULE_LICENSE("GPL v2");
    245MODULE_ALIAS_CRYPTO("crc32");
    246MODULE_ALIAS_CRYPTO("crc32c");