cachepc-linux

Fork of AMDESE/linux with modifications for CachePC side-channel attack
git clone https://git.sinitax.com/sinitax/cachepc-linux
Log | Files | Refs | README | LICENSE | sfeed.txt

nx-aes-ctr.c (3872B)


      1// SPDX-License-Identifier: GPL-2.0-only
      2/*
      3 * AES CTR routines supporting the Power 7+ Nest Accelerators driver
      4 *
      5 * Copyright (C) 2011-2012 International Business Machines Inc.
      6 *
      7 * Author: Kent Yoder <yoder1@us.ibm.com>
      8 */
      9
     10#include <crypto/aes.h>
     11#include <crypto/ctr.h>
     12#include <crypto/algapi.h>
     13#include <linux/module.h>
     14#include <linux/types.h>
     15#include <linux/crypto.h>
     16#include <asm/vio.h>
     17
     18#include "nx_csbcpb.h"
     19#include "nx.h"
     20
     21
     22static int ctr_aes_nx_set_key(struct crypto_skcipher *tfm,
     23			      const u8               *in_key,
     24			      unsigned int            key_len)
     25{
     26	struct nx_crypto_ctx *nx_ctx = crypto_skcipher_ctx(tfm);
     27	struct nx_csbcpb *csbcpb = nx_ctx->csbcpb;
     28
     29	nx_ctx_init(nx_ctx, HCOP_FC_AES);
     30
     31	switch (key_len) {
     32	case AES_KEYSIZE_128:
     33		NX_CPB_SET_KEY_SIZE(csbcpb, NX_KS_AES_128);
     34		nx_ctx->ap = &nx_ctx->props[NX_PROPS_AES_128];
     35		break;
     36	case AES_KEYSIZE_192:
     37		NX_CPB_SET_KEY_SIZE(csbcpb, NX_KS_AES_192);
     38		nx_ctx->ap = &nx_ctx->props[NX_PROPS_AES_192];
     39		break;
     40	case AES_KEYSIZE_256:
     41		NX_CPB_SET_KEY_SIZE(csbcpb, NX_KS_AES_256);
     42		nx_ctx->ap = &nx_ctx->props[NX_PROPS_AES_256];
     43		break;
     44	default:
     45		return -EINVAL;
     46	}
     47
     48	csbcpb->cpb.hdr.mode = NX_MODE_AES_CTR;
     49	memcpy(csbcpb->cpb.aes_ctr.key, in_key, key_len);
     50
     51	return 0;
     52}
     53
     54static int ctr3686_aes_nx_set_key(struct crypto_skcipher *tfm,
     55				  const u8               *in_key,
     56				  unsigned int            key_len)
     57{
     58	struct nx_crypto_ctx *nx_ctx = crypto_skcipher_ctx(tfm);
     59
     60	if (key_len < CTR_RFC3686_NONCE_SIZE)
     61		return -EINVAL;
     62
     63	memcpy(nx_ctx->priv.ctr.nonce,
     64	       in_key + key_len - CTR_RFC3686_NONCE_SIZE,
     65	       CTR_RFC3686_NONCE_SIZE);
     66
     67	key_len -= CTR_RFC3686_NONCE_SIZE;
     68
     69	return ctr_aes_nx_set_key(tfm, in_key, key_len);
     70}
     71
     72static int ctr_aes_nx_crypt(struct skcipher_request *req, u8 *iv)
     73{
     74	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
     75	struct nx_crypto_ctx *nx_ctx = crypto_skcipher_ctx(tfm);
     76	struct nx_csbcpb *csbcpb = nx_ctx->csbcpb;
     77	unsigned long irq_flags;
     78	unsigned int processed = 0, to_process;
     79	int rc;
     80
     81	spin_lock_irqsave(&nx_ctx->lock, irq_flags);
     82
     83	do {
     84		to_process = req->cryptlen - processed;
     85
     86		rc = nx_build_sg_lists(nx_ctx, iv, req->dst, req->src,
     87				       &to_process, processed,
     88				       csbcpb->cpb.aes_ctr.iv);
     89		if (rc)
     90			goto out;
     91
     92		if (!nx_ctx->op.inlen || !nx_ctx->op.outlen) {
     93			rc = -EINVAL;
     94			goto out;
     95		}
     96
     97		rc = nx_hcall_sync(nx_ctx, &nx_ctx->op,
     98				   req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP);
     99		if (rc)
    100			goto out;
    101
    102		memcpy(iv, csbcpb->cpb.aes_cbc.cv, AES_BLOCK_SIZE);
    103
    104		atomic_inc(&(nx_ctx->stats->aes_ops));
    105		atomic64_add(be32_to_cpu(csbcpb->csb.processed_byte_count),
    106			     &(nx_ctx->stats->aes_bytes));
    107
    108		processed += to_process;
    109	} while (processed < req->cryptlen);
    110out:
    111	spin_unlock_irqrestore(&nx_ctx->lock, irq_flags);
    112	return rc;
    113}
    114
    115static int ctr3686_aes_nx_crypt(struct skcipher_request *req)
    116{
    117	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
    118	struct nx_crypto_ctx *nx_ctx = crypto_skcipher_ctx(tfm);
    119	u8 iv[16];
    120
    121	memcpy(iv, nx_ctx->priv.ctr.nonce, CTR_RFC3686_NONCE_SIZE);
    122	memcpy(iv + CTR_RFC3686_NONCE_SIZE, req->iv, CTR_RFC3686_IV_SIZE);
    123	iv[12] = iv[13] = iv[14] = 0;
    124	iv[15] = 1;
    125
    126	return ctr_aes_nx_crypt(req, iv);
    127}
    128
    129struct skcipher_alg nx_ctr3686_aes_alg = {
    130	.base.cra_name		= "rfc3686(ctr(aes))",
    131	.base.cra_driver_name	= "rfc3686-ctr-aes-nx",
    132	.base.cra_priority	= 300,
    133	.base.cra_blocksize	= 1,
    134	.base.cra_ctxsize	= sizeof(struct nx_crypto_ctx),
    135	.base.cra_module	= THIS_MODULE,
    136	.init			= nx_crypto_ctx_aes_ctr_init,
    137	.exit			= nx_crypto_ctx_skcipher_exit,
    138	.min_keysize		= AES_MIN_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
    139	.max_keysize		= AES_MAX_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
    140	.ivsize			= CTR_RFC3686_IV_SIZE,
    141	.setkey			= ctr3686_aes_nx_set_key,
    142	.encrypt		= ctr3686_aes_nx_crypt,
    143	.decrypt		= ctr3686_aes_nx_crypt,
    144	.chunksize		= AES_BLOCK_SIZE,
    145};