cachepc-linux

Fork of AMDESE/linux with modifications for CachePC side-channel attack
git clone https://git.sinitax.com/sinitax/cachepc-linux
Log | Files | Refs | README | LICENSE | sfeed.txt

ctr.c (9789B)


      1// SPDX-License-Identifier: GPL-2.0-or-later
      2/*
      3 * CTR: Counter mode
      4 *
      5 * (C) Copyright IBM Corp. 2007 - Joy Latten <latten@us.ibm.com>
      6 */
      7
      8#include <crypto/algapi.h>
      9#include <crypto/ctr.h>
     10#include <crypto/internal/cipher.h>
     11#include <crypto/internal/skcipher.h>
     12#include <linux/err.h>
     13#include <linux/init.h>
     14#include <linux/kernel.h>
     15#include <linux/module.h>
     16#include <linux/slab.h>
     17
     18struct crypto_rfc3686_ctx {
     19	struct crypto_skcipher *child;
     20	u8 nonce[CTR_RFC3686_NONCE_SIZE];
     21};
     22
     23struct crypto_rfc3686_req_ctx {
     24	u8 iv[CTR_RFC3686_BLOCK_SIZE];
     25	struct skcipher_request subreq CRYPTO_MINALIGN_ATTR;
     26};
     27
     28static void crypto_ctr_crypt_final(struct skcipher_walk *walk,
     29				   struct crypto_cipher *tfm)
     30{
     31	unsigned int bsize = crypto_cipher_blocksize(tfm);
     32	unsigned long alignmask = crypto_cipher_alignmask(tfm);
     33	u8 *ctrblk = walk->iv;
     34	u8 tmp[MAX_CIPHER_BLOCKSIZE + MAX_CIPHER_ALIGNMASK];
     35	u8 *keystream = PTR_ALIGN(tmp + 0, alignmask + 1);
     36	u8 *src = walk->src.virt.addr;
     37	u8 *dst = walk->dst.virt.addr;
     38	unsigned int nbytes = walk->nbytes;
     39
     40	crypto_cipher_encrypt_one(tfm, keystream, ctrblk);
     41	crypto_xor_cpy(dst, keystream, src, nbytes);
     42
     43	crypto_inc(ctrblk, bsize);
     44}
     45
     46static int crypto_ctr_crypt_segment(struct skcipher_walk *walk,
     47				    struct crypto_cipher *tfm)
     48{
     49	void (*fn)(struct crypto_tfm *, u8 *, const u8 *) =
     50		   crypto_cipher_alg(tfm)->cia_encrypt;
     51	unsigned int bsize = crypto_cipher_blocksize(tfm);
     52	u8 *ctrblk = walk->iv;
     53	u8 *src = walk->src.virt.addr;
     54	u8 *dst = walk->dst.virt.addr;
     55	unsigned int nbytes = walk->nbytes;
     56
     57	do {
     58		/* create keystream */
     59		fn(crypto_cipher_tfm(tfm), dst, ctrblk);
     60		crypto_xor(dst, src, bsize);
     61
     62		/* increment counter in counterblock */
     63		crypto_inc(ctrblk, bsize);
     64
     65		src += bsize;
     66		dst += bsize;
     67	} while ((nbytes -= bsize) >= bsize);
     68
     69	return nbytes;
     70}
     71
     72static int crypto_ctr_crypt_inplace(struct skcipher_walk *walk,
     73				    struct crypto_cipher *tfm)
     74{
     75	void (*fn)(struct crypto_tfm *, u8 *, const u8 *) =
     76		   crypto_cipher_alg(tfm)->cia_encrypt;
     77	unsigned int bsize = crypto_cipher_blocksize(tfm);
     78	unsigned long alignmask = crypto_cipher_alignmask(tfm);
     79	unsigned int nbytes = walk->nbytes;
     80	u8 *ctrblk = walk->iv;
     81	u8 *src = walk->src.virt.addr;
     82	u8 tmp[MAX_CIPHER_BLOCKSIZE + MAX_CIPHER_ALIGNMASK];
     83	u8 *keystream = PTR_ALIGN(tmp + 0, alignmask + 1);
     84
     85	do {
     86		/* create keystream */
     87		fn(crypto_cipher_tfm(tfm), keystream, ctrblk);
     88		crypto_xor(src, keystream, bsize);
     89
     90		/* increment counter in counterblock */
     91		crypto_inc(ctrblk, bsize);
     92
     93		src += bsize;
     94	} while ((nbytes -= bsize) >= bsize);
     95
     96	return nbytes;
     97}
     98
     99static int crypto_ctr_crypt(struct skcipher_request *req)
    100{
    101	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
    102	struct crypto_cipher *cipher = skcipher_cipher_simple(tfm);
    103	const unsigned int bsize = crypto_cipher_blocksize(cipher);
    104	struct skcipher_walk walk;
    105	unsigned int nbytes;
    106	int err;
    107
    108	err = skcipher_walk_virt(&walk, req, false);
    109
    110	while (walk.nbytes >= bsize) {
    111		if (walk.src.virt.addr == walk.dst.virt.addr)
    112			nbytes = crypto_ctr_crypt_inplace(&walk, cipher);
    113		else
    114			nbytes = crypto_ctr_crypt_segment(&walk, cipher);
    115
    116		err = skcipher_walk_done(&walk, nbytes);
    117	}
    118
    119	if (walk.nbytes) {
    120		crypto_ctr_crypt_final(&walk, cipher);
    121		err = skcipher_walk_done(&walk, 0);
    122	}
    123
    124	return err;
    125}
    126
    127static int crypto_ctr_create(struct crypto_template *tmpl, struct rtattr **tb)
    128{
    129	struct skcipher_instance *inst;
    130	struct crypto_alg *alg;
    131	int err;
    132
    133	inst = skcipher_alloc_instance_simple(tmpl, tb);
    134	if (IS_ERR(inst))
    135		return PTR_ERR(inst);
    136
    137	alg = skcipher_ialg_simple(inst);
    138
    139	/* Block size must be >= 4 bytes. */
    140	err = -EINVAL;
    141	if (alg->cra_blocksize < 4)
    142		goto out_free_inst;
    143
    144	/* If this is false we'd fail the alignment of crypto_inc. */
    145	if (alg->cra_blocksize % 4)
    146		goto out_free_inst;
    147
    148	/* CTR mode is a stream cipher. */
    149	inst->alg.base.cra_blocksize = 1;
    150
    151	/*
    152	 * To simplify the implementation, configure the skcipher walk to only
    153	 * give a partial block at the very end, never earlier.
    154	 */
    155	inst->alg.chunksize = alg->cra_blocksize;
    156
    157	inst->alg.encrypt = crypto_ctr_crypt;
    158	inst->alg.decrypt = crypto_ctr_crypt;
    159
    160	err = skcipher_register_instance(tmpl, inst);
    161	if (err) {
    162out_free_inst:
    163		inst->free(inst);
    164	}
    165
    166	return err;
    167}
    168
    169static int crypto_rfc3686_setkey(struct crypto_skcipher *parent,
    170				 const u8 *key, unsigned int keylen)
    171{
    172	struct crypto_rfc3686_ctx *ctx = crypto_skcipher_ctx(parent);
    173	struct crypto_skcipher *child = ctx->child;
    174
    175	/* the nonce is stored in bytes at end of key */
    176	if (keylen < CTR_RFC3686_NONCE_SIZE)
    177		return -EINVAL;
    178
    179	memcpy(ctx->nonce, key + (keylen - CTR_RFC3686_NONCE_SIZE),
    180	       CTR_RFC3686_NONCE_SIZE);
    181
    182	keylen -= CTR_RFC3686_NONCE_SIZE;
    183
    184	crypto_skcipher_clear_flags(child, CRYPTO_TFM_REQ_MASK);
    185	crypto_skcipher_set_flags(child, crypto_skcipher_get_flags(parent) &
    186					 CRYPTO_TFM_REQ_MASK);
    187	return crypto_skcipher_setkey(child, key, keylen);
    188}
    189
    190static int crypto_rfc3686_crypt(struct skcipher_request *req)
    191{
    192	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
    193	struct crypto_rfc3686_ctx *ctx = crypto_skcipher_ctx(tfm);
    194	struct crypto_skcipher *child = ctx->child;
    195	unsigned long align = crypto_skcipher_alignmask(tfm);
    196	struct crypto_rfc3686_req_ctx *rctx =
    197		(void *)PTR_ALIGN((u8 *)skcipher_request_ctx(req), align + 1);
    198	struct skcipher_request *subreq = &rctx->subreq;
    199	u8 *iv = rctx->iv;
    200
    201	/* set up counter block */
    202	memcpy(iv, ctx->nonce, CTR_RFC3686_NONCE_SIZE);
    203	memcpy(iv + CTR_RFC3686_NONCE_SIZE, req->iv, CTR_RFC3686_IV_SIZE);
    204
    205	/* initialize counter portion of counter block */
    206	*(__be32 *)(iv + CTR_RFC3686_NONCE_SIZE + CTR_RFC3686_IV_SIZE) =
    207		cpu_to_be32(1);
    208
    209	skcipher_request_set_tfm(subreq, child);
    210	skcipher_request_set_callback(subreq, req->base.flags,
    211				      req->base.complete, req->base.data);
    212	skcipher_request_set_crypt(subreq, req->src, req->dst,
    213				   req->cryptlen, iv);
    214
    215	return crypto_skcipher_encrypt(subreq);
    216}
    217
    218static int crypto_rfc3686_init_tfm(struct crypto_skcipher *tfm)
    219{
    220	struct skcipher_instance *inst = skcipher_alg_instance(tfm);
    221	struct crypto_skcipher_spawn *spawn = skcipher_instance_ctx(inst);
    222	struct crypto_rfc3686_ctx *ctx = crypto_skcipher_ctx(tfm);
    223	struct crypto_skcipher *cipher;
    224	unsigned long align;
    225	unsigned int reqsize;
    226
    227	cipher = crypto_spawn_skcipher(spawn);
    228	if (IS_ERR(cipher))
    229		return PTR_ERR(cipher);
    230
    231	ctx->child = cipher;
    232
    233	align = crypto_skcipher_alignmask(tfm);
    234	align &= ~(crypto_tfm_ctx_alignment() - 1);
    235	reqsize = align + sizeof(struct crypto_rfc3686_req_ctx) +
    236		  crypto_skcipher_reqsize(cipher);
    237	crypto_skcipher_set_reqsize(tfm, reqsize);
    238
    239	return 0;
    240}
    241
    242static void crypto_rfc3686_exit_tfm(struct crypto_skcipher *tfm)
    243{
    244	struct crypto_rfc3686_ctx *ctx = crypto_skcipher_ctx(tfm);
    245
    246	crypto_free_skcipher(ctx->child);
    247}
    248
    249static void crypto_rfc3686_free(struct skcipher_instance *inst)
    250{
    251	struct crypto_skcipher_spawn *spawn = skcipher_instance_ctx(inst);
    252
    253	crypto_drop_skcipher(spawn);
    254	kfree(inst);
    255}
    256
    257static int crypto_rfc3686_create(struct crypto_template *tmpl,
    258				 struct rtattr **tb)
    259{
    260	struct skcipher_instance *inst;
    261	struct skcipher_alg *alg;
    262	struct crypto_skcipher_spawn *spawn;
    263	u32 mask;
    264	int err;
    265
    266	err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_SKCIPHER, &mask);
    267	if (err)
    268		return err;
    269
    270	inst = kzalloc(sizeof(*inst) + sizeof(*spawn), GFP_KERNEL);
    271	if (!inst)
    272		return -ENOMEM;
    273
    274	spawn = skcipher_instance_ctx(inst);
    275
    276	err = crypto_grab_skcipher(spawn, skcipher_crypto_instance(inst),
    277				   crypto_attr_alg_name(tb[1]), 0, mask);
    278	if (err)
    279		goto err_free_inst;
    280
    281	alg = crypto_spawn_skcipher_alg(spawn);
    282
    283	/* We only support 16-byte blocks. */
    284	err = -EINVAL;
    285	if (crypto_skcipher_alg_ivsize(alg) != CTR_RFC3686_BLOCK_SIZE)
    286		goto err_free_inst;
    287
    288	/* Not a stream cipher? */
    289	if (alg->base.cra_blocksize != 1)
    290		goto err_free_inst;
    291
    292	err = -ENAMETOOLONG;
    293	if (snprintf(inst->alg.base.cra_name, CRYPTO_MAX_ALG_NAME,
    294		     "rfc3686(%s)", alg->base.cra_name) >= CRYPTO_MAX_ALG_NAME)
    295		goto err_free_inst;
    296	if (snprintf(inst->alg.base.cra_driver_name, CRYPTO_MAX_ALG_NAME,
    297		     "rfc3686(%s)", alg->base.cra_driver_name) >=
    298	    CRYPTO_MAX_ALG_NAME)
    299		goto err_free_inst;
    300
    301	inst->alg.base.cra_priority = alg->base.cra_priority;
    302	inst->alg.base.cra_blocksize = 1;
    303	inst->alg.base.cra_alignmask = alg->base.cra_alignmask;
    304
    305	inst->alg.ivsize = CTR_RFC3686_IV_SIZE;
    306	inst->alg.chunksize = crypto_skcipher_alg_chunksize(alg);
    307	inst->alg.min_keysize = crypto_skcipher_alg_min_keysize(alg) +
    308				CTR_RFC3686_NONCE_SIZE;
    309	inst->alg.max_keysize = crypto_skcipher_alg_max_keysize(alg) +
    310				CTR_RFC3686_NONCE_SIZE;
    311
    312	inst->alg.setkey = crypto_rfc3686_setkey;
    313	inst->alg.encrypt = crypto_rfc3686_crypt;
    314	inst->alg.decrypt = crypto_rfc3686_crypt;
    315
    316	inst->alg.base.cra_ctxsize = sizeof(struct crypto_rfc3686_ctx);
    317
    318	inst->alg.init = crypto_rfc3686_init_tfm;
    319	inst->alg.exit = crypto_rfc3686_exit_tfm;
    320
    321	inst->free = crypto_rfc3686_free;
    322
    323	err = skcipher_register_instance(tmpl, inst);
    324	if (err) {
    325err_free_inst:
    326		crypto_rfc3686_free(inst);
    327	}
    328	return err;
    329}
    330
    331static struct crypto_template crypto_ctr_tmpls[] = {
    332	{
    333		.name = "ctr",
    334		.create = crypto_ctr_create,
    335		.module = THIS_MODULE,
    336	}, {
    337		.name = "rfc3686",
    338		.create = crypto_rfc3686_create,
    339		.module = THIS_MODULE,
    340	},
    341};
    342
    343static int __init crypto_ctr_module_init(void)
    344{
    345	return crypto_register_templates(crypto_ctr_tmpls,
    346					 ARRAY_SIZE(crypto_ctr_tmpls));
    347}
    348
    349static void __exit crypto_ctr_module_exit(void)
    350{
    351	crypto_unregister_templates(crypto_ctr_tmpls,
    352				    ARRAY_SIZE(crypto_ctr_tmpls));
    353}
    354
    355subsys_initcall(crypto_ctr_module_init);
    356module_exit(crypto_ctr_module_exit);
    357
    358MODULE_LICENSE("GPL");
    359MODULE_DESCRIPTION("CTR block cipher mode of operation");
    360MODULE_ALIAS_CRYPTO("rfc3686");
    361MODULE_ALIAS_CRYPTO("ctr");
    362MODULE_IMPORT_NS(CRYPTO_INTERNAL);