cachepc-linux

Fork of AMDESE/linux with modifications for CachePC side-channel attack
git clone https://git.sinitax.com/sinitax/cachepc-linux
Log | Files | Refs | README | LICENSE | sfeed.txt

crypto_ctx.c (5486B)


      1// SPDX-License-Identifier: GPL-2.0-or-later
      2/*
      3 *   Copyright (C) 2019 Samsung Electronics Co., Ltd.
      4 */
      5
      6#include <linux/kernel.h>
      7#include <linux/string.h>
      8#include <linux/err.h>
      9#include <linux/slab.h>
     10#include <linux/wait.h>
     11#include <linux/sched.h>
     12
     13#include "glob.h"
     14#include "crypto_ctx.h"
     15
     16struct crypto_ctx_list {
     17	spinlock_t		ctx_lock;
     18	int			avail_ctx;
     19	struct list_head	idle_ctx;
     20	wait_queue_head_t	ctx_wait;
     21};
     22
     23static struct crypto_ctx_list ctx_list;
     24
     25static inline void free_aead(struct crypto_aead *aead)
     26{
     27	if (aead)
     28		crypto_free_aead(aead);
     29}
     30
     31static void free_shash(struct shash_desc *shash)
     32{
     33	if (shash) {
     34		crypto_free_shash(shash->tfm);
     35		kfree(shash);
     36	}
     37}
     38
     39static struct crypto_aead *alloc_aead(int id)
     40{
     41	struct crypto_aead *tfm = NULL;
     42
     43	switch (id) {
     44	case CRYPTO_AEAD_AES_GCM:
     45		tfm = crypto_alloc_aead("gcm(aes)", 0, 0);
     46		break;
     47	case CRYPTO_AEAD_AES_CCM:
     48		tfm = crypto_alloc_aead("ccm(aes)", 0, 0);
     49		break;
     50	default:
     51		pr_err("Does not support encrypt ahead(id : %d)\n", id);
     52		return NULL;
     53	}
     54
     55	if (IS_ERR(tfm)) {
     56		pr_err("Failed to alloc encrypt aead : %ld\n", PTR_ERR(tfm));
     57		return NULL;
     58	}
     59
     60	return tfm;
     61}
     62
     63static struct shash_desc *alloc_shash_desc(int id)
     64{
     65	struct crypto_shash *tfm = NULL;
     66	struct shash_desc *shash;
     67
     68	switch (id) {
     69	case CRYPTO_SHASH_HMACMD5:
     70		tfm = crypto_alloc_shash("hmac(md5)", 0, 0);
     71		break;
     72	case CRYPTO_SHASH_HMACSHA256:
     73		tfm = crypto_alloc_shash("hmac(sha256)", 0, 0);
     74		break;
     75	case CRYPTO_SHASH_CMACAES:
     76		tfm = crypto_alloc_shash("cmac(aes)", 0, 0);
     77		break;
     78	case CRYPTO_SHASH_SHA256:
     79		tfm = crypto_alloc_shash("sha256", 0, 0);
     80		break;
     81	case CRYPTO_SHASH_SHA512:
     82		tfm = crypto_alloc_shash("sha512", 0, 0);
     83		break;
     84	default:
     85		return NULL;
     86	}
     87
     88	if (IS_ERR(tfm))
     89		return NULL;
     90
     91	shash = kzalloc(sizeof(*shash) + crypto_shash_descsize(tfm),
     92			GFP_KERNEL);
     93	if (!shash)
     94		crypto_free_shash(tfm);
     95	else
     96		shash->tfm = tfm;
     97	return shash;
     98}
     99
    100static void ctx_free(struct ksmbd_crypto_ctx *ctx)
    101{
    102	int i;
    103
    104	for (i = 0; i < CRYPTO_SHASH_MAX; i++)
    105		free_shash(ctx->desc[i]);
    106	for (i = 0; i < CRYPTO_AEAD_MAX; i++)
    107		free_aead(ctx->ccmaes[i]);
    108	kfree(ctx);
    109}
    110
    111static struct ksmbd_crypto_ctx *ksmbd_find_crypto_ctx(void)
    112{
    113	struct ksmbd_crypto_ctx *ctx;
    114
    115	while (1) {
    116		spin_lock(&ctx_list.ctx_lock);
    117		if (!list_empty(&ctx_list.idle_ctx)) {
    118			ctx = list_entry(ctx_list.idle_ctx.next,
    119					 struct ksmbd_crypto_ctx,
    120					 list);
    121			list_del(&ctx->list);
    122			spin_unlock(&ctx_list.ctx_lock);
    123			return ctx;
    124		}
    125
    126		if (ctx_list.avail_ctx > num_online_cpus()) {
    127			spin_unlock(&ctx_list.ctx_lock);
    128			wait_event(ctx_list.ctx_wait,
    129				   !list_empty(&ctx_list.idle_ctx));
    130			continue;
    131		}
    132
    133		ctx_list.avail_ctx++;
    134		spin_unlock(&ctx_list.ctx_lock);
    135
    136		ctx = kzalloc(sizeof(struct ksmbd_crypto_ctx), GFP_KERNEL);
    137		if (!ctx) {
    138			spin_lock(&ctx_list.ctx_lock);
    139			ctx_list.avail_ctx--;
    140			spin_unlock(&ctx_list.ctx_lock);
    141			wait_event(ctx_list.ctx_wait,
    142				   !list_empty(&ctx_list.idle_ctx));
    143			continue;
    144		}
    145		break;
    146	}
    147	return ctx;
    148}
    149
    150void ksmbd_release_crypto_ctx(struct ksmbd_crypto_ctx *ctx)
    151{
    152	if (!ctx)
    153		return;
    154
    155	spin_lock(&ctx_list.ctx_lock);
    156	if (ctx_list.avail_ctx <= num_online_cpus()) {
    157		list_add(&ctx->list, &ctx_list.idle_ctx);
    158		spin_unlock(&ctx_list.ctx_lock);
    159		wake_up(&ctx_list.ctx_wait);
    160		return;
    161	}
    162
    163	ctx_list.avail_ctx--;
    164	spin_unlock(&ctx_list.ctx_lock);
    165	ctx_free(ctx);
    166}
    167
    168static struct ksmbd_crypto_ctx *____crypto_shash_ctx_find(int id)
    169{
    170	struct ksmbd_crypto_ctx *ctx;
    171
    172	if (id >= CRYPTO_SHASH_MAX)
    173		return NULL;
    174
    175	ctx = ksmbd_find_crypto_ctx();
    176	if (ctx->desc[id])
    177		return ctx;
    178
    179	ctx->desc[id] = alloc_shash_desc(id);
    180	if (ctx->desc[id])
    181		return ctx;
    182	ksmbd_release_crypto_ctx(ctx);
    183	return NULL;
    184}
    185
    186struct ksmbd_crypto_ctx *ksmbd_crypto_ctx_find_hmacmd5(void)
    187{
    188	return ____crypto_shash_ctx_find(CRYPTO_SHASH_HMACMD5);
    189}
    190
    191struct ksmbd_crypto_ctx *ksmbd_crypto_ctx_find_hmacsha256(void)
    192{
    193	return ____crypto_shash_ctx_find(CRYPTO_SHASH_HMACSHA256);
    194}
    195
    196struct ksmbd_crypto_ctx *ksmbd_crypto_ctx_find_cmacaes(void)
    197{
    198	return ____crypto_shash_ctx_find(CRYPTO_SHASH_CMACAES);
    199}
    200
    201struct ksmbd_crypto_ctx *ksmbd_crypto_ctx_find_sha256(void)
    202{
    203	return ____crypto_shash_ctx_find(CRYPTO_SHASH_SHA256);
    204}
    205
    206struct ksmbd_crypto_ctx *ksmbd_crypto_ctx_find_sha512(void)
    207{
    208	return ____crypto_shash_ctx_find(CRYPTO_SHASH_SHA512);
    209}
    210
    211static struct ksmbd_crypto_ctx *____crypto_aead_ctx_find(int id)
    212{
    213	struct ksmbd_crypto_ctx *ctx;
    214
    215	if (id >= CRYPTO_AEAD_MAX)
    216		return NULL;
    217
    218	ctx = ksmbd_find_crypto_ctx();
    219	if (ctx->ccmaes[id])
    220		return ctx;
    221
    222	ctx->ccmaes[id] = alloc_aead(id);
    223	if (ctx->ccmaes[id])
    224		return ctx;
    225	ksmbd_release_crypto_ctx(ctx);
    226	return NULL;
    227}
    228
    229struct ksmbd_crypto_ctx *ksmbd_crypto_ctx_find_gcm(void)
    230{
    231	return ____crypto_aead_ctx_find(CRYPTO_AEAD_AES_GCM);
    232}
    233
    234struct ksmbd_crypto_ctx *ksmbd_crypto_ctx_find_ccm(void)
    235{
    236	return ____crypto_aead_ctx_find(CRYPTO_AEAD_AES_CCM);
    237}
    238
    239void ksmbd_crypto_destroy(void)
    240{
    241	struct ksmbd_crypto_ctx *ctx;
    242
    243	while (!list_empty(&ctx_list.idle_ctx)) {
    244		ctx = list_entry(ctx_list.idle_ctx.next,
    245				 struct ksmbd_crypto_ctx,
    246				 list);
    247		list_del(&ctx->list);
    248		ctx_free(ctx);
    249	}
    250}
    251
    252int ksmbd_crypto_create(void)
    253{
    254	struct ksmbd_crypto_ctx *ctx;
    255
    256	spin_lock_init(&ctx_list.ctx_lock);
    257	INIT_LIST_HEAD(&ctx_list.idle_ctx);
    258	init_waitqueue_head(&ctx_list.ctx_wait);
    259	ctx_list.avail_ctx = 1;
    260
    261	ctx = kzalloc(sizeof(struct ksmbd_crypto_ctx), GFP_KERNEL);
    262	if (!ctx)
    263		return -ENOMEM;
    264	list_add(&ctx->list, &ctx_list.idle_ctx);
    265	return 0;
    266}