cachepc-linux

Fork of AMDESE/linux with modifications for CachePC side-channel attack
git clone https://git.sinitax.com/sinitax/cachepc-linux
Log | Files | Refs | README | LICENSE | sfeed.txt

cbc.c (5309B)


      1// SPDX-License-Identifier: GPL-2.0-or-later
      2/*
      3 * CBC: Cipher Block Chaining mode
      4 *
      5 * Copyright (c) 2006-2016 Herbert Xu <herbert@gondor.apana.org.au>
      6 */
      7
      8#include <crypto/algapi.h>
      9#include <crypto/internal/cipher.h>
     10#include <crypto/internal/skcipher.h>
     11#include <linux/err.h>
     12#include <linux/init.h>
     13#include <linux/kernel.h>
     14#include <linux/log2.h>
     15#include <linux/module.h>
     16
     17static int crypto_cbc_encrypt_segment(struct skcipher_walk *walk,
     18				      struct crypto_skcipher *skcipher)
     19{
     20	unsigned int bsize = crypto_skcipher_blocksize(skcipher);
     21	void (*fn)(struct crypto_tfm *, u8 *, const u8 *);
     22	unsigned int nbytes = walk->nbytes;
     23	u8 *src = walk->src.virt.addr;
     24	u8 *dst = walk->dst.virt.addr;
     25	struct crypto_cipher *cipher;
     26	struct crypto_tfm *tfm;
     27	u8 *iv = walk->iv;
     28
     29	cipher = skcipher_cipher_simple(skcipher);
     30	tfm = crypto_cipher_tfm(cipher);
     31	fn = crypto_cipher_alg(cipher)->cia_encrypt;
     32
     33	do {
     34		crypto_xor(iv, src, bsize);
     35		fn(tfm, dst, iv);
     36		memcpy(iv, dst, bsize);
     37
     38		src += bsize;
     39		dst += bsize;
     40	} while ((nbytes -= bsize) >= bsize);
     41
     42	return nbytes;
     43}
     44
     45static int crypto_cbc_encrypt_inplace(struct skcipher_walk *walk,
     46				      struct crypto_skcipher *skcipher)
     47{
     48	unsigned int bsize = crypto_skcipher_blocksize(skcipher);
     49	void (*fn)(struct crypto_tfm *, u8 *, const u8 *);
     50	unsigned int nbytes = walk->nbytes;
     51	u8 *src = walk->src.virt.addr;
     52	struct crypto_cipher *cipher;
     53	struct crypto_tfm *tfm;
     54	u8 *iv = walk->iv;
     55
     56	cipher = skcipher_cipher_simple(skcipher);
     57	tfm = crypto_cipher_tfm(cipher);
     58	fn = crypto_cipher_alg(cipher)->cia_encrypt;
     59
     60	do {
     61		crypto_xor(src, iv, bsize);
     62		fn(tfm, src, src);
     63		iv = src;
     64
     65		src += bsize;
     66	} while ((nbytes -= bsize) >= bsize);
     67
     68	memcpy(walk->iv, iv, bsize);
     69
     70	return nbytes;
     71}
     72
     73static int crypto_cbc_encrypt(struct skcipher_request *req)
     74{
     75	struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
     76	struct skcipher_walk walk;
     77	int err;
     78
     79	err = skcipher_walk_virt(&walk, req, false);
     80
     81	while (walk.nbytes) {
     82		if (walk.src.virt.addr == walk.dst.virt.addr)
     83			err = crypto_cbc_encrypt_inplace(&walk, skcipher);
     84		else
     85			err = crypto_cbc_encrypt_segment(&walk, skcipher);
     86		err = skcipher_walk_done(&walk, err);
     87	}
     88
     89	return err;
     90}
     91
     92static int crypto_cbc_decrypt_segment(struct skcipher_walk *walk,
     93				      struct crypto_skcipher *skcipher)
     94{
     95	unsigned int bsize = crypto_skcipher_blocksize(skcipher);
     96	void (*fn)(struct crypto_tfm *, u8 *, const u8 *);
     97	unsigned int nbytes = walk->nbytes;
     98	u8 *src = walk->src.virt.addr;
     99	u8 *dst = walk->dst.virt.addr;
    100	struct crypto_cipher *cipher;
    101	struct crypto_tfm *tfm;
    102	u8 *iv = walk->iv;
    103
    104	cipher = skcipher_cipher_simple(skcipher);
    105	tfm = crypto_cipher_tfm(cipher);
    106	fn = crypto_cipher_alg(cipher)->cia_decrypt;
    107
    108	do {
    109		fn(tfm, dst, src);
    110		crypto_xor(dst, iv, bsize);
    111		iv = src;
    112
    113		src += bsize;
    114		dst += bsize;
    115	} while ((nbytes -= bsize) >= bsize);
    116
    117	memcpy(walk->iv, iv, bsize);
    118
    119	return nbytes;
    120}
    121
    122static int crypto_cbc_decrypt_inplace(struct skcipher_walk *walk,
    123				      struct crypto_skcipher *skcipher)
    124{
    125	unsigned int bsize = crypto_skcipher_blocksize(skcipher);
    126	void (*fn)(struct crypto_tfm *, u8 *, const u8 *);
    127	unsigned int nbytes = walk->nbytes;
    128	u8 *src = walk->src.virt.addr;
    129	u8 last_iv[MAX_CIPHER_BLOCKSIZE];
    130	struct crypto_cipher *cipher;
    131	struct crypto_tfm *tfm;
    132
    133	cipher = skcipher_cipher_simple(skcipher);
    134	tfm = crypto_cipher_tfm(cipher);
    135	fn = crypto_cipher_alg(cipher)->cia_decrypt;
    136
    137	/* Start of the last block. */
    138	src += nbytes - (nbytes & (bsize - 1)) - bsize;
    139	memcpy(last_iv, src, bsize);
    140
    141	for (;;) {
    142		fn(tfm, src, src);
    143		if ((nbytes -= bsize) < bsize)
    144			break;
    145		crypto_xor(src, src - bsize, bsize);
    146		src -= bsize;
    147	}
    148
    149	crypto_xor(src, walk->iv, bsize);
    150	memcpy(walk->iv, last_iv, bsize);
    151
    152	return nbytes;
    153}
    154
    155static int crypto_cbc_decrypt(struct skcipher_request *req)
    156{
    157	struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
    158	struct skcipher_walk walk;
    159	int err;
    160
    161	err = skcipher_walk_virt(&walk, req, false);
    162
    163	while (walk.nbytes) {
    164		if (walk.src.virt.addr == walk.dst.virt.addr)
    165			err = crypto_cbc_decrypt_inplace(&walk, skcipher);
    166		else
    167			err = crypto_cbc_decrypt_segment(&walk, skcipher);
    168		err = skcipher_walk_done(&walk, err);
    169	}
    170
    171	return err;
    172}
    173
    174static int crypto_cbc_create(struct crypto_template *tmpl, struct rtattr **tb)
    175{
    176	struct skcipher_instance *inst;
    177	struct crypto_alg *alg;
    178	int err;
    179
    180	inst = skcipher_alloc_instance_simple(tmpl, tb);
    181	if (IS_ERR(inst))
    182		return PTR_ERR(inst);
    183
    184	alg = skcipher_ialg_simple(inst);
    185
    186	err = -EINVAL;
    187	if (!is_power_of_2(alg->cra_blocksize))
    188		goto out_free_inst;
    189
    190	inst->alg.encrypt = crypto_cbc_encrypt;
    191	inst->alg.decrypt = crypto_cbc_decrypt;
    192
    193	err = skcipher_register_instance(tmpl, inst);
    194	if (err) {
    195out_free_inst:
    196		inst->free(inst);
    197	}
    198
    199	return err;
    200}
    201
    202static struct crypto_template crypto_cbc_tmpl = {
    203	.name = "cbc",
    204	.create = crypto_cbc_create,
    205	.module = THIS_MODULE,
    206};
    207
    208static int __init crypto_cbc_module_init(void)
    209{
    210	return crypto_register_template(&crypto_cbc_tmpl);
    211}
    212
    213static void __exit crypto_cbc_module_exit(void)
    214{
    215	crypto_unregister_template(&crypto_cbc_tmpl);
    216}
    217
    218subsys_initcall(crypto_cbc_module_init);
    219module_exit(crypto_cbc_module_exit);
    220
    221MODULE_LICENSE("GPL");
    222MODULE_DESCRIPTION("CBC block cipher mode of operation");
    223MODULE_ALIAS_CRYPTO("cbc");