sun8i-ss-cipher.c (13311B)
1// SPDX-License-Identifier: GPL-2.0 2/* 3 * sun8i-ss-cipher.c - hardware cryptographic offloader for 4 * Allwinner A80/A83T SoC 5 * 6 * Copyright (C) 2016-2019 Corentin LABBE <clabbe.montjoie@gmail.com> 7 * 8 * This file add support for AES cipher with 128,192,256 bits keysize in 9 * CBC and ECB mode. 10 * 11 * You could find a link for the datasheet in Documentation/arm/sunxi.rst 12 */ 13 14#include <linux/bottom_half.h> 15#include <linux/crypto.h> 16#include <linux/dma-mapping.h> 17#include <linux/io.h> 18#include <linux/pm_runtime.h> 19#include <crypto/scatterwalk.h> 20#include <crypto/internal/skcipher.h> 21#include "sun8i-ss.h" 22 23static bool sun8i_ss_need_fallback(struct skcipher_request *areq) 24{ 25 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(areq); 26 struct skcipher_alg *alg = crypto_skcipher_alg(tfm); 27 struct sun8i_ss_alg_template *algt = container_of(alg, struct sun8i_ss_alg_template, alg.skcipher); 28 struct scatterlist *in_sg = areq->src; 29 struct scatterlist *out_sg = areq->dst; 30 struct scatterlist *sg; 31 unsigned int todo, len; 32 33 if (areq->cryptlen == 0 || areq->cryptlen % 16) { 34 algt->stat_fb_len++; 35 return true; 36 } 37 38 if (sg_nents_for_len(areq->src, areq->cryptlen) > 8 || 39 sg_nents_for_len(areq->dst, areq->cryptlen) > 8) { 40 algt->stat_fb_sgnum++; 41 return true; 42 } 43 44 len = areq->cryptlen; 45 sg = areq->src; 46 while (sg) { 47 todo = min(len, sg->length); 48 if ((todo % 16) != 0) { 49 algt->stat_fb_sglen++; 50 return true; 51 } 52 if (!IS_ALIGNED(sg->offset, 16)) { 53 algt->stat_fb_align++; 54 return true; 55 } 56 len -= todo; 57 sg = sg_next(sg); 58 } 59 len = areq->cryptlen; 60 sg = areq->dst; 61 while (sg) { 62 todo = min(len, sg->length); 63 if ((todo % 16) != 0) { 64 algt->stat_fb_sglen++; 65 return true; 66 } 67 if (!IS_ALIGNED(sg->offset, 16)) { 68 algt->stat_fb_align++; 69 return true; 70 } 71 len -= todo; 72 sg = sg_next(sg); 73 } 74 75 /* SS need same numbers of SG (with same length) for source and destination */ 76 in_sg = areq->src; 77 out_sg = areq->dst; 78 while (in_sg && out_sg) { 79 if (in_sg->length != out_sg->length) 80 return true; 81 in_sg = sg_next(in_sg); 82 out_sg = sg_next(out_sg); 83 } 84 if (in_sg || out_sg) 85 return true; 86 return false; 87} 88 89static int sun8i_ss_cipher_fallback(struct skcipher_request *areq) 90{ 91 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(areq); 92 struct sun8i_cipher_tfm_ctx *op = crypto_skcipher_ctx(tfm); 93 struct sun8i_cipher_req_ctx *rctx = skcipher_request_ctx(areq); 94 int err; 95 96#ifdef CONFIG_CRYPTO_DEV_SUN8I_SS_DEBUG 97 struct skcipher_alg *alg = crypto_skcipher_alg(tfm); 98 struct sun8i_ss_alg_template *algt; 99 100 algt = container_of(alg, struct sun8i_ss_alg_template, alg.skcipher); 101 algt->stat_fb++; 102#endif 103 skcipher_request_set_tfm(&rctx->fallback_req, op->fallback_tfm); 104 skcipher_request_set_callback(&rctx->fallback_req, areq->base.flags, 105 areq->base.complete, areq->base.data); 106 skcipher_request_set_crypt(&rctx->fallback_req, areq->src, areq->dst, 107 areq->cryptlen, areq->iv); 108 if (rctx->op_dir & SS_DECRYPTION) 109 err = crypto_skcipher_decrypt(&rctx->fallback_req); 110 else 111 err = crypto_skcipher_encrypt(&rctx->fallback_req); 112 return err; 113} 114 115static int sun8i_ss_setup_ivs(struct skcipher_request *areq) 116{ 117 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(areq); 118 struct sun8i_cipher_tfm_ctx *op = crypto_skcipher_ctx(tfm); 119 struct sun8i_ss_dev *ss = op->ss; 120 struct sun8i_cipher_req_ctx *rctx = skcipher_request_ctx(areq); 121 struct scatterlist *sg = areq->src; 122 unsigned int todo, offset; 123 unsigned int len = areq->cryptlen; 124 unsigned int ivsize = crypto_skcipher_ivsize(tfm); 125 struct sun8i_ss_flow *sf = &ss->flows[rctx->flow]; 126 int i = 0; 127 u32 a; 128 int err; 129 130 rctx->ivlen = ivsize; 131 if (rctx->op_dir & SS_DECRYPTION) { 132 offset = areq->cryptlen - ivsize; 133 scatterwalk_map_and_copy(sf->biv, areq->src, offset, 134 ivsize, 0); 135 } 136 137 /* we need to copy all IVs from source in case DMA is bi-directionnal */ 138 while (sg && len) { 139 if (sg_dma_len(sg) == 0) { 140 sg = sg_next(sg); 141 continue; 142 } 143 if (i == 0) 144 memcpy(sf->iv[0], areq->iv, ivsize); 145 a = dma_map_single(ss->dev, sf->iv[i], ivsize, DMA_TO_DEVICE); 146 if (dma_mapping_error(ss->dev, a)) { 147 memzero_explicit(sf->iv[i], ivsize); 148 dev_err(ss->dev, "Cannot DMA MAP IV\n"); 149 err = -EFAULT; 150 goto dma_iv_error; 151 } 152 rctx->p_iv[i] = a; 153 /* we need to setup all others IVs only in the decrypt way */ 154 if (rctx->op_dir & SS_ENCRYPTION) 155 return 0; 156 todo = min(len, sg_dma_len(sg)); 157 len -= todo; 158 i++; 159 if (i < MAX_SG) { 160 offset = sg->length - ivsize; 161 scatterwalk_map_and_copy(sf->iv[i], sg, offset, ivsize, 0); 162 } 163 rctx->niv = i; 164 sg = sg_next(sg); 165 } 166 167 return 0; 168dma_iv_error: 169 i--; 170 while (i >= 0) { 171 dma_unmap_single(ss->dev, rctx->p_iv[i], ivsize, DMA_TO_DEVICE); 172 memzero_explicit(sf->iv[i], ivsize); 173 } 174 return err; 175} 176 177static int sun8i_ss_cipher(struct skcipher_request *areq) 178{ 179 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(areq); 180 struct sun8i_cipher_tfm_ctx *op = crypto_skcipher_ctx(tfm); 181 struct sun8i_ss_dev *ss = op->ss; 182 struct sun8i_cipher_req_ctx *rctx = skcipher_request_ctx(areq); 183 struct skcipher_alg *alg = crypto_skcipher_alg(tfm); 184 struct sun8i_ss_alg_template *algt; 185 struct sun8i_ss_flow *sf = &ss->flows[rctx->flow]; 186 struct scatterlist *sg; 187 unsigned int todo, len, offset, ivsize; 188 int nr_sgs = 0; 189 int nr_sgd = 0; 190 int err = 0; 191 int nsgs = sg_nents_for_len(areq->src, areq->cryptlen); 192 int nsgd = sg_nents_for_len(areq->dst, areq->cryptlen); 193 int i; 194 195 algt = container_of(alg, struct sun8i_ss_alg_template, alg.skcipher); 196 197 dev_dbg(ss->dev, "%s %s %u %x IV(%p %u) key=%u\n", __func__, 198 crypto_tfm_alg_name(areq->base.tfm), 199 areq->cryptlen, 200 rctx->op_dir, areq->iv, crypto_skcipher_ivsize(tfm), 201 op->keylen); 202 203#ifdef CONFIG_CRYPTO_DEV_SUN8I_SS_DEBUG 204 algt->stat_req++; 205#endif 206 207 rctx->op_mode = ss->variant->op_mode[algt->ss_blockmode]; 208 rctx->method = ss->variant->alg_cipher[algt->ss_algo_id]; 209 rctx->keylen = op->keylen; 210 211 rctx->p_key = dma_map_single(ss->dev, op->key, op->keylen, DMA_TO_DEVICE); 212 if (dma_mapping_error(ss->dev, rctx->p_key)) { 213 dev_err(ss->dev, "Cannot DMA MAP KEY\n"); 214 err = -EFAULT; 215 goto theend; 216 } 217 218 ivsize = crypto_skcipher_ivsize(tfm); 219 if (areq->iv && crypto_skcipher_ivsize(tfm) > 0) { 220 err = sun8i_ss_setup_ivs(areq); 221 if (err) 222 goto theend_key; 223 } 224 if (areq->src == areq->dst) { 225 nr_sgs = dma_map_sg(ss->dev, areq->src, nsgs, DMA_BIDIRECTIONAL); 226 if (nr_sgs <= 0 || nr_sgs > 8) { 227 dev_err(ss->dev, "Invalid sg number %d\n", nr_sgs); 228 err = -EINVAL; 229 goto theend_iv; 230 } 231 nr_sgd = nr_sgs; 232 } else { 233 nr_sgs = dma_map_sg(ss->dev, areq->src, nsgs, DMA_TO_DEVICE); 234 if (nr_sgs <= 0 || nr_sgs > 8) { 235 dev_err(ss->dev, "Invalid sg number %d\n", nr_sgs); 236 err = -EINVAL; 237 goto theend_iv; 238 } 239 nr_sgd = dma_map_sg(ss->dev, areq->dst, nsgd, DMA_FROM_DEVICE); 240 if (nr_sgd <= 0 || nr_sgd > 8) { 241 dev_err(ss->dev, "Invalid sg number %d\n", nr_sgd); 242 err = -EINVAL; 243 goto theend_sgs; 244 } 245 } 246 247 len = areq->cryptlen; 248 i = 0; 249 sg = areq->src; 250 while (i < nr_sgs && sg && len) { 251 if (sg_dma_len(sg) == 0) 252 goto sgs_next; 253 rctx->t_src[i].addr = sg_dma_address(sg); 254 todo = min(len, sg_dma_len(sg)); 255 rctx->t_src[i].len = todo / 4; 256 dev_dbg(ss->dev, "%s total=%u SGS(%d %u off=%d) todo=%u\n", __func__, 257 areq->cryptlen, i, rctx->t_src[i].len, sg->offset, todo); 258 len -= todo; 259 i++; 260sgs_next: 261 sg = sg_next(sg); 262 } 263 if (len > 0) { 264 dev_err(ss->dev, "remaining len %d\n", len); 265 err = -EINVAL; 266 goto theend_sgs; 267 } 268 269 len = areq->cryptlen; 270 i = 0; 271 sg = areq->dst; 272 while (i < nr_sgd && sg && len) { 273 if (sg_dma_len(sg) == 0) 274 goto sgd_next; 275 rctx->t_dst[i].addr = sg_dma_address(sg); 276 todo = min(len, sg_dma_len(sg)); 277 rctx->t_dst[i].len = todo / 4; 278 dev_dbg(ss->dev, "%s total=%u SGD(%d %u off=%d) todo=%u\n", __func__, 279 areq->cryptlen, i, rctx->t_dst[i].len, sg->offset, todo); 280 len -= todo; 281 i++; 282sgd_next: 283 sg = sg_next(sg); 284 } 285 if (len > 0) { 286 dev_err(ss->dev, "remaining len %d\n", len); 287 err = -EINVAL; 288 goto theend_sgs; 289 } 290 291 err = sun8i_ss_run_task(ss, rctx, crypto_tfm_alg_name(areq->base.tfm)); 292 293theend_sgs: 294 if (areq->src == areq->dst) { 295 dma_unmap_sg(ss->dev, areq->src, nsgs, DMA_BIDIRECTIONAL); 296 } else { 297 dma_unmap_sg(ss->dev, areq->src, nsgs, DMA_TO_DEVICE); 298 dma_unmap_sg(ss->dev, areq->dst, nsgd, DMA_FROM_DEVICE); 299 } 300 301theend_iv: 302 if (areq->iv && ivsize > 0) { 303 for (i = 0; i < rctx->niv; i++) { 304 dma_unmap_single(ss->dev, rctx->p_iv[i], ivsize, DMA_TO_DEVICE); 305 memzero_explicit(sf->iv[i], ivsize); 306 } 307 308 offset = areq->cryptlen - ivsize; 309 if (rctx->op_dir & SS_DECRYPTION) { 310 memcpy(areq->iv, sf->biv, ivsize); 311 memzero_explicit(sf->biv, ivsize); 312 } else { 313 scatterwalk_map_and_copy(areq->iv, areq->dst, offset, 314 ivsize, 0); 315 } 316 } 317 318theend_key: 319 dma_unmap_single(ss->dev, rctx->p_key, op->keylen, DMA_TO_DEVICE); 320 321theend: 322 323 return err; 324} 325 326static int sun8i_ss_handle_cipher_request(struct crypto_engine *engine, void *areq) 327{ 328 int err; 329 struct skcipher_request *breq = container_of(areq, struct skcipher_request, base); 330 331 err = sun8i_ss_cipher(breq); 332 local_bh_disable(); 333 crypto_finalize_skcipher_request(engine, breq, err); 334 local_bh_enable(); 335 336 return 0; 337} 338 339int sun8i_ss_skdecrypt(struct skcipher_request *areq) 340{ 341 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(areq); 342 struct sun8i_cipher_tfm_ctx *op = crypto_skcipher_ctx(tfm); 343 struct sun8i_cipher_req_ctx *rctx = skcipher_request_ctx(areq); 344 struct crypto_engine *engine; 345 int e; 346 347 memset(rctx, 0, sizeof(struct sun8i_cipher_req_ctx)); 348 rctx->op_dir = SS_DECRYPTION; 349 350 if (sun8i_ss_need_fallback(areq)) 351 return sun8i_ss_cipher_fallback(areq); 352 353 e = sun8i_ss_get_engine_number(op->ss); 354 engine = op->ss->flows[e].engine; 355 rctx->flow = e; 356 357 return crypto_transfer_skcipher_request_to_engine(engine, areq); 358} 359 360int sun8i_ss_skencrypt(struct skcipher_request *areq) 361{ 362 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(areq); 363 struct sun8i_cipher_tfm_ctx *op = crypto_skcipher_ctx(tfm); 364 struct sun8i_cipher_req_ctx *rctx = skcipher_request_ctx(areq); 365 struct crypto_engine *engine; 366 int e; 367 368 memset(rctx, 0, sizeof(struct sun8i_cipher_req_ctx)); 369 rctx->op_dir = SS_ENCRYPTION; 370 371 if (sun8i_ss_need_fallback(areq)) 372 return sun8i_ss_cipher_fallback(areq); 373 374 e = sun8i_ss_get_engine_number(op->ss); 375 engine = op->ss->flows[e].engine; 376 rctx->flow = e; 377 378 return crypto_transfer_skcipher_request_to_engine(engine, areq); 379} 380 381int sun8i_ss_cipher_init(struct crypto_tfm *tfm) 382{ 383 struct sun8i_cipher_tfm_ctx *op = crypto_tfm_ctx(tfm); 384 struct sun8i_ss_alg_template *algt; 385 const char *name = crypto_tfm_alg_name(tfm); 386 struct crypto_skcipher *sktfm = __crypto_skcipher_cast(tfm); 387 struct skcipher_alg *alg = crypto_skcipher_alg(sktfm); 388 int err; 389 390 memset(op, 0, sizeof(struct sun8i_cipher_tfm_ctx)); 391 392 algt = container_of(alg, struct sun8i_ss_alg_template, alg.skcipher); 393 op->ss = algt->ss; 394 395 op->fallback_tfm = crypto_alloc_skcipher(name, 0, CRYPTO_ALG_NEED_FALLBACK); 396 if (IS_ERR(op->fallback_tfm)) { 397 dev_err(op->ss->dev, "ERROR: Cannot allocate fallback for %s %ld\n", 398 name, PTR_ERR(op->fallback_tfm)); 399 return PTR_ERR(op->fallback_tfm); 400 } 401 402 sktfm->reqsize = sizeof(struct sun8i_cipher_req_ctx) + 403 crypto_skcipher_reqsize(op->fallback_tfm); 404 405 406 memcpy(algt->fbname, 407 crypto_tfm_alg_driver_name(crypto_skcipher_tfm(op->fallback_tfm)), 408 CRYPTO_MAX_ALG_NAME); 409 410 op->enginectx.op.do_one_request = sun8i_ss_handle_cipher_request; 411 op->enginectx.op.prepare_request = NULL; 412 op->enginectx.op.unprepare_request = NULL; 413 414 err = pm_runtime_resume_and_get(op->ss->dev); 415 if (err < 0) { 416 dev_err(op->ss->dev, "pm error %d\n", err); 417 goto error_pm; 418 } 419 420 return 0; 421error_pm: 422 crypto_free_skcipher(op->fallback_tfm); 423 return err; 424} 425 426void sun8i_ss_cipher_exit(struct crypto_tfm *tfm) 427{ 428 struct sun8i_cipher_tfm_ctx *op = crypto_tfm_ctx(tfm); 429 430 kfree_sensitive(op->key); 431 crypto_free_skcipher(op->fallback_tfm); 432 pm_runtime_put_sync(op->ss->dev); 433} 434 435int sun8i_ss_aes_setkey(struct crypto_skcipher *tfm, const u8 *key, 436 unsigned int keylen) 437{ 438 struct sun8i_cipher_tfm_ctx *op = crypto_skcipher_ctx(tfm); 439 struct sun8i_ss_dev *ss = op->ss; 440 441 switch (keylen) { 442 case 128 / 8: 443 break; 444 case 192 / 8: 445 break; 446 case 256 / 8: 447 break; 448 default: 449 dev_dbg(ss->dev, "ERROR: Invalid keylen %u\n", keylen); 450 return -EINVAL; 451 } 452 kfree_sensitive(op->key); 453 op->keylen = keylen; 454 op->key = kmemdup(key, keylen, GFP_KERNEL | GFP_DMA); 455 if (!op->key) 456 return -ENOMEM; 457 458 crypto_skcipher_clear_flags(op->fallback_tfm, CRYPTO_TFM_REQ_MASK); 459 crypto_skcipher_set_flags(op->fallback_tfm, tfm->base.crt_flags & CRYPTO_TFM_REQ_MASK); 460 461 return crypto_skcipher_setkey(op->fallback_tfm, key, keylen); 462} 463 464int sun8i_ss_des3_setkey(struct crypto_skcipher *tfm, const u8 *key, 465 unsigned int keylen) 466{ 467 struct sun8i_cipher_tfm_ctx *op = crypto_skcipher_ctx(tfm); 468 struct sun8i_ss_dev *ss = op->ss; 469 470 if (unlikely(keylen != 3 * DES_KEY_SIZE)) { 471 dev_dbg(ss->dev, "Invalid keylen %u\n", keylen); 472 return -EINVAL; 473 } 474 475 kfree_sensitive(op->key); 476 op->keylen = keylen; 477 op->key = kmemdup(key, keylen, GFP_KERNEL | GFP_DMA); 478 if (!op->key) 479 return -ENOMEM; 480 481 crypto_skcipher_clear_flags(op->fallback_tfm, CRYPTO_TFM_REQ_MASK); 482 crypto_skcipher_set_flags(op->fallback_tfm, tfm->base.crt_flags & CRYPTO_TFM_REQ_MASK); 483 484 return crypto_skcipher_setkey(op->fallback_tfm, key, keylen); 485}