cachepc-linux

Fork of AMDESE/linux with modifications for CachePC side-channel attack
git clone https://git.sinitax.com/sinitax/cachepc-linux
Log | Files | Refs | README | LICENSE | sfeed.txt

sha256_ssse3_glue.c (10776B)


      1/*
      2 * Cryptographic API.
      3 *
      4 * Glue code for the SHA256 Secure Hash Algorithm assembler
      5 * implementation using supplemental SSE3 / AVX / AVX2 instructions.
      6 *
      7 * This file is based on sha256_generic.c
      8 *
      9 * Copyright (C) 2013 Intel Corporation.
     10 *
     11 * Author:
     12 *     Tim Chen <tim.c.chen@linux.intel.com>
     13 *
     14 * This program is free software; you can redistribute it and/or modify it
     15 * under the terms of the GNU General Public License as published by the Free
     16 * Software Foundation; either version 2 of the License, or (at your option)
     17 * any later version.
     18 *
     19 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
     20 * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
     21 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
     22 * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
     23 * BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
     24 * ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
     25 * CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
     26 * SOFTWARE.
     27 */
     28
     29
     30#define pr_fmt(fmt)	KBUILD_MODNAME ": " fmt
     31
     32#include <crypto/internal/hash.h>
     33#include <crypto/internal/simd.h>
     34#include <linux/init.h>
     35#include <linux/module.h>
     36#include <linux/mm.h>
     37#include <linux/types.h>
     38#include <crypto/sha2.h>
     39#include <crypto/sha256_base.h>
     40#include <linux/string.h>
     41#include <asm/simd.h>
     42
     43asmlinkage void sha256_transform_ssse3(struct sha256_state *state,
     44				       const u8 *data, int blocks);
     45
     46static int _sha256_update(struct shash_desc *desc, const u8 *data,
     47			  unsigned int len, sha256_block_fn *sha256_xform)
     48{
     49	struct sha256_state *sctx = shash_desc_ctx(desc);
     50
     51	if (!crypto_simd_usable() ||
     52	    (sctx->count % SHA256_BLOCK_SIZE) + len < SHA256_BLOCK_SIZE)
     53		return crypto_sha256_update(desc, data, len);
     54
     55	/*
     56	 * Make sure struct sha256_state begins directly with the SHA256
     57	 * 256-bit internal state, as this is what the asm functions expect.
     58	 */
     59	BUILD_BUG_ON(offsetof(struct sha256_state, state) != 0);
     60
     61	kernel_fpu_begin();
     62	sha256_base_do_update(desc, data, len, sha256_xform);
     63	kernel_fpu_end();
     64
     65	return 0;
     66}
     67
     68static int sha256_finup(struct shash_desc *desc, const u8 *data,
     69	      unsigned int len, u8 *out, sha256_block_fn *sha256_xform)
     70{
     71	if (!crypto_simd_usable())
     72		return crypto_sha256_finup(desc, data, len, out);
     73
     74	kernel_fpu_begin();
     75	if (len)
     76		sha256_base_do_update(desc, data, len, sha256_xform);
     77	sha256_base_do_finalize(desc, sha256_xform);
     78	kernel_fpu_end();
     79
     80	return sha256_base_finish(desc, out);
     81}
     82
     83static int sha256_ssse3_update(struct shash_desc *desc, const u8 *data,
     84			 unsigned int len)
     85{
     86	return _sha256_update(desc, data, len, sha256_transform_ssse3);
     87}
     88
     89static int sha256_ssse3_finup(struct shash_desc *desc, const u8 *data,
     90	      unsigned int len, u8 *out)
     91{
     92	return sha256_finup(desc, data, len, out, sha256_transform_ssse3);
     93}
     94
     95/* Add padding and return the message digest. */
     96static int sha256_ssse3_final(struct shash_desc *desc, u8 *out)
     97{
     98	return sha256_ssse3_finup(desc, NULL, 0, out);
     99}
    100
    101static struct shash_alg sha256_ssse3_algs[] = { {
    102	.digestsize	=	SHA256_DIGEST_SIZE,
    103	.init		=	sha256_base_init,
    104	.update		=	sha256_ssse3_update,
    105	.final		=	sha256_ssse3_final,
    106	.finup		=	sha256_ssse3_finup,
    107	.descsize	=	sizeof(struct sha256_state),
    108	.base		=	{
    109		.cra_name	=	"sha256",
    110		.cra_driver_name =	"sha256-ssse3",
    111		.cra_priority	=	150,
    112		.cra_blocksize	=	SHA256_BLOCK_SIZE,
    113		.cra_module	=	THIS_MODULE,
    114	}
    115}, {
    116	.digestsize	=	SHA224_DIGEST_SIZE,
    117	.init		=	sha224_base_init,
    118	.update		=	sha256_ssse3_update,
    119	.final		=	sha256_ssse3_final,
    120	.finup		=	sha256_ssse3_finup,
    121	.descsize	=	sizeof(struct sha256_state),
    122	.base		=	{
    123		.cra_name	=	"sha224",
    124		.cra_driver_name =	"sha224-ssse3",
    125		.cra_priority	=	150,
    126		.cra_blocksize	=	SHA224_BLOCK_SIZE,
    127		.cra_module	=	THIS_MODULE,
    128	}
    129} };
    130
    131static int register_sha256_ssse3(void)
    132{
    133	if (boot_cpu_has(X86_FEATURE_SSSE3))
    134		return crypto_register_shashes(sha256_ssse3_algs,
    135				ARRAY_SIZE(sha256_ssse3_algs));
    136	return 0;
    137}
    138
    139static void unregister_sha256_ssse3(void)
    140{
    141	if (boot_cpu_has(X86_FEATURE_SSSE3))
    142		crypto_unregister_shashes(sha256_ssse3_algs,
    143				ARRAY_SIZE(sha256_ssse3_algs));
    144}
    145
    146asmlinkage void sha256_transform_avx(struct sha256_state *state,
    147				     const u8 *data, int blocks);
    148
    149static int sha256_avx_update(struct shash_desc *desc, const u8 *data,
    150			 unsigned int len)
    151{
    152	return _sha256_update(desc, data, len, sha256_transform_avx);
    153}
    154
    155static int sha256_avx_finup(struct shash_desc *desc, const u8 *data,
    156		      unsigned int len, u8 *out)
    157{
    158	return sha256_finup(desc, data, len, out, sha256_transform_avx);
    159}
    160
    161static int sha256_avx_final(struct shash_desc *desc, u8 *out)
    162{
    163	return sha256_avx_finup(desc, NULL, 0, out);
    164}
    165
    166static struct shash_alg sha256_avx_algs[] = { {
    167	.digestsize	=	SHA256_DIGEST_SIZE,
    168	.init		=	sha256_base_init,
    169	.update		=	sha256_avx_update,
    170	.final		=	sha256_avx_final,
    171	.finup		=	sha256_avx_finup,
    172	.descsize	=	sizeof(struct sha256_state),
    173	.base		=	{
    174		.cra_name	=	"sha256",
    175		.cra_driver_name =	"sha256-avx",
    176		.cra_priority	=	160,
    177		.cra_blocksize	=	SHA256_BLOCK_SIZE,
    178		.cra_module	=	THIS_MODULE,
    179	}
    180}, {
    181	.digestsize	=	SHA224_DIGEST_SIZE,
    182	.init		=	sha224_base_init,
    183	.update		=	sha256_avx_update,
    184	.final		=	sha256_avx_final,
    185	.finup		=	sha256_avx_finup,
    186	.descsize	=	sizeof(struct sha256_state),
    187	.base		=	{
    188		.cra_name	=	"sha224",
    189		.cra_driver_name =	"sha224-avx",
    190		.cra_priority	=	160,
    191		.cra_blocksize	=	SHA224_BLOCK_SIZE,
    192		.cra_module	=	THIS_MODULE,
    193	}
    194} };
    195
    196static bool avx_usable(void)
    197{
    198	if (!cpu_has_xfeatures(XFEATURE_MASK_SSE | XFEATURE_MASK_YMM, NULL)) {
    199		if (boot_cpu_has(X86_FEATURE_AVX))
    200			pr_info("AVX detected but unusable.\n");
    201		return false;
    202	}
    203
    204	return true;
    205}
    206
    207static int register_sha256_avx(void)
    208{
    209	if (avx_usable())
    210		return crypto_register_shashes(sha256_avx_algs,
    211				ARRAY_SIZE(sha256_avx_algs));
    212	return 0;
    213}
    214
    215static void unregister_sha256_avx(void)
    216{
    217	if (avx_usable())
    218		crypto_unregister_shashes(sha256_avx_algs,
    219				ARRAY_SIZE(sha256_avx_algs));
    220}
    221
    222asmlinkage void sha256_transform_rorx(struct sha256_state *state,
    223				      const u8 *data, int blocks);
    224
    225static int sha256_avx2_update(struct shash_desc *desc, const u8 *data,
    226			 unsigned int len)
    227{
    228	return _sha256_update(desc, data, len, sha256_transform_rorx);
    229}
    230
    231static int sha256_avx2_finup(struct shash_desc *desc, const u8 *data,
    232		      unsigned int len, u8 *out)
    233{
    234	return sha256_finup(desc, data, len, out, sha256_transform_rorx);
    235}
    236
    237static int sha256_avx2_final(struct shash_desc *desc, u8 *out)
    238{
    239	return sha256_avx2_finup(desc, NULL, 0, out);
    240}
    241
    242static struct shash_alg sha256_avx2_algs[] = { {
    243	.digestsize	=	SHA256_DIGEST_SIZE,
    244	.init		=	sha256_base_init,
    245	.update		=	sha256_avx2_update,
    246	.final		=	sha256_avx2_final,
    247	.finup		=	sha256_avx2_finup,
    248	.descsize	=	sizeof(struct sha256_state),
    249	.base		=	{
    250		.cra_name	=	"sha256",
    251		.cra_driver_name =	"sha256-avx2",
    252		.cra_priority	=	170,
    253		.cra_blocksize	=	SHA256_BLOCK_SIZE,
    254		.cra_module	=	THIS_MODULE,
    255	}
    256}, {
    257	.digestsize	=	SHA224_DIGEST_SIZE,
    258	.init		=	sha224_base_init,
    259	.update		=	sha256_avx2_update,
    260	.final		=	sha256_avx2_final,
    261	.finup		=	sha256_avx2_finup,
    262	.descsize	=	sizeof(struct sha256_state),
    263	.base		=	{
    264		.cra_name	=	"sha224",
    265		.cra_driver_name =	"sha224-avx2",
    266		.cra_priority	=	170,
    267		.cra_blocksize	=	SHA224_BLOCK_SIZE,
    268		.cra_module	=	THIS_MODULE,
    269	}
    270} };
    271
    272static bool avx2_usable(void)
    273{
    274	if (avx_usable() && boot_cpu_has(X86_FEATURE_AVX2) &&
    275		    boot_cpu_has(X86_FEATURE_BMI2))
    276		return true;
    277
    278	return false;
    279}
    280
    281static int register_sha256_avx2(void)
    282{
    283	if (avx2_usable())
    284		return crypto_register_shashes(sha256_avx2_algs,
    285				ARRAY_SIZE(sha256_avx2_algs));
    286	return 0;
    287}
    288
    289static void unregister_sha256_avx2(void)
    290{
    291	if (avx2_usable())
    292		crypto_unregister_shashes(sha256_avx2_algs,
    293				ARRAY_SIZE(sha256_avx2_algs));
    294}
    295
    296#ifdef CONFIG_AS_SHA256_NI
    297asmlinkage void sha256_ni_transform(struct sha256_state *digest,
    298				    const u8 *data, int rounds);
    299
    300static int sha256_ni_update(struct shash_desc *desc, const u8 *data,
    301			 unsigned int len)
    302{
    303	return _sha256_update(desc, data, len, sha256_ni_transform);
    304}
    305
    306static int sha256_ni_finup(struct shash_desc *desc, const u8 *data,
    307		      unsigned int len, u8 *out)
    308{
    309	return sha256_finup(desc, data, len, out, sha256_ni_transform);
    310}
    311
    312static int sha256_ni_final(struct shash_desc *desc, u8 *out)
    313{
    314	return sha256_ni_finup(desc, NULL, 0, out);
    315}
    316
    317static struct shash_alg sha256_ni_algs[] = { {
    318	.digestsize	=	SHA256_DIGEST_SIZE,
    319	.init		=	sha256_base_init,
    320	.update		=	sha256_ni_update,
    321	.final		=	sha256_ni_final,
    322	.finup		=	sha256_ni_finup,
    323	.descsize	=	sizeof(struct sha256_state),
    324	.base		=	{
    325		.cra_name	=	"sha256",
    326		.cra_driver_name =	"sha256-ni",
    327		.cra_priority	=	250,
    328		.cra_blocksize	=	SHA256_BLOCK_SIZE,
    329		.cra_module	=	THIS_MODULE,
    330	}
    331}, {
    332	.digestsize	=	SHA224_DIGEST_SIZE,
    333	.init		=	sha224_base_init,
    334	.update		=	sha256_ni_update,
    335	.final		=	sha256_ni_final,
    336	.finup		=	sha256_ni_finup,
    337	.descsize	=	sizeof(struct sha256_state),
    338	.base		=	{
    339		.cra_name	=	"sha224",
    340		.cra_driver_name =	"sha224-ni",
    341		.cra_priority	=	250,
    342		.cra_blocksize	=	SHA224_BLOCK_SIZE,
    343		.cra_module	=	THIS_MODULE,
    344	}
    345} };
    346
    347static int register_sha256_ni(void)
    348{
    349	if (boot_cpu_has(X86_FEATURE_SHA_NI))
    350		return crypto_register_shashes(sha256_ni_algs,
    351				ARRAY_SIZE(sha256_ni_algs));
    352	return 0;
    353}
    354
    355static void unregister_sha256_ni(void)
    356{
    357	if (boot_cpu_has(X86_FEATURE_SHA_NI))
    358		crypto_unregister_shashes(sha256_ni_algs,
    359				ARRAY_SIZE(sha256_ni_algs));
    360}
    361
    362#else
    363static inline int register_sha256_ni(void) { return 0; }
    364static inline void unregister_sha256_ni(void) { }
    365#endif
    366
    367static int __init sha256_ssse3_mod_init(void)
    368{
    369	if (register_sha256_ssse3())
    370		goto fail;
    371
    372	if (register_sha256_avx()) {
    373		unregister_sha256_ssse3();
    374		goto fail;
    375	}
    376
    377	if (register_sha256_avx2()) {
    378		unregister_sha256_avx();
    379		unregister_sha256_ssse3();
    380		goto fail;
    381	}
    382
    383	if (register_sha256_ni()) {
    384		unregister_sha256_avx2();
    385		unregister_sha256_avx();
    386		unregister_sha256_ssse3();
    387		goto fail;
    388	}
    389
    390	return 0;
    391fail:
    392	return -ENODEV;
    393}
    394
    395static void __exit sha256_ssse3_mod_fini(void)
    396{
    397	unregister_sha256_ni();
    398	unregister_sha256_avx2();
    399	unregister_sha256_avx();
    400	unregister_sha256_ssse3();
    401}
    402
    403module_init(sha256_ssse3_mod_init);
    404module_exit(sha256_ssse3_mod_fini);
    405
    406MODULE_LICENSE("GPL");
    407MODULE_DESCRIPTION("SHA256 Secure Hash Algorithm, Supplemental SSE3 accelerated");
    408
    409MODULE_ALIAS_CRYPTO("sha256");
    410MODULE_ALIAS_CRYPTO("sha256-ssse3");
    411MODULE_ALIAS_CRYPTO("sha256-avx");
    412MODULE_ALIAS_CRYPTO("sha256-avx2");
    413MODULE_ALIAS_CRYPTO("sha224");
    414MODULE_ALIAS_CRYPTO("sha224-ssse3");
    415MODULE_ALIAS_CRYPTO("sha224-avx");
    416MODULE_ALIAS_CRYPTO("sha224-avx2");
    417#ifdef CONFIG_AS_SHA256_NI
    418MODULE_ALIAS_CRYPTO("sha256-ni");
    419MODULE_ALIAS_CRYPTO("sha224-ni");
    420#endif