cachepc-linux

Fork of AMDESE/linux with modifications for CachePC side-channel attack
git clone https://git.sinitax.com/sinitax/cachepc-linux
Log | Files | Refs | README | LICENSE | sfeed.txt

cryp.c (12423B)


      1// SPDX-License-Identifier: GPL-2.0-only
      2/*
      3 * Copyright (C) ST-Ericsson SA 2010
      4 * Author: Shujuan Chen <shujuan.chen@stericsson.com> for ST-Ericsson.
      5 * Author: Jonas Linde <jonas.linde@stericsson.com> for ST-Ericsson.
      6 * Author: Niklas Hernaeus <niklas.hernaeus@stericsson.com> for ST-Ericsson.
      7 * Author: Joakim Bech <joakim.xx.bech@stericsson.com> for ST-Ericsson.
      8 * Author: Berne Hebark <berne.herbark@stericsson.com> for ST-Ericsson.
      9 */
     10
     11#include <linux/errno.h>
     12#include <linux/kernel.h>
     13#include <linux/types.h>
     14
     15#include "cryp_p.h"
     16#include "cryp.h"
     17
     18/*
     19 * cryp_wait_until_done - wait until the device logic is not busy
     20 */
     21void cryp_wait_until_done(struct cryp_device_data *device_data)
     22{
     23	while (cryp_is_logic_busy(device_data))
     24		cpu_relax();
     25}
     26
     27/**
     28 * cryp_check - This routine checks Peripheral and PCell Id
     29 * @device_data: Pointer to the device data struct for base address.
     30 */
     31int cryp_check(struct cryp_device_data *device_data)
     32{
     33	int peripheralid2 = 0;
     34
     35	if (NULL == device_data)
     36		return -EINVAL;
     37
     38	peripheralid2 = readl_relaxed(&device_data->base->periphId2);
     39
     40	if (peripheralid2 != CRYP_PERIPHERAL_ID2_DB8500)
     41		return -EPERM;
     42
     43	/* Check Peripheral and Pcell Id Register for CRYP */
     44	if ((CRYP_PERIPHERAL_ID0 ==
     45		readl_relaxed(&device_data->base->periphId0))
     46	    && (CRYP_PERIPHERAL_ID1 ==
     47		    readl_relaxed(&device_data->base->periphId1))
     48	    && (CRYP_PERIPHERAL_ID3 ==
     49		    readl_relaxed(&device_data->base->periphId3))
     50	    && (CRYP_PCELL_ID0 ==
     51		    readl_relaxed(&device_data->base->pcellId0))
     52	    && (CRYP_PCELL_ID1 ==
     53		    readl_relaxed(&device_data->base->pcellId1))
     54	    && (CRYP_PCELL_ID2 ==
     55		    readl_relaxed(&device_data->base->pcellId2))
     56	    && (CRYP_PCELL_ID3 ==
     57		    readl_relaxed(&device_data->base->pcellId3))) {
     58		return 0;
     59	}
     60
     61	return -EPERM;
     62}
     63
     64/**
     65 * cryp_activity - This routine enables/disable the cryptography function.
     66 * @device_data: Pointer to the device data struct for base address.
     67 * @cryp_crypen: Enable/Disable functionality
     68 */
     69void cryp_activity(struct cryp_device_data *device_data,
     70		   enum cryp_crypen cryp_crypen)
     71{
     72	CRYP_PUT_BITS(&device_data->base->cr,
     73		      cryp_crypen,
     74		      CRYP_CR_CRYPEN_POS,
     75		      CRYP_CR_CRYPEN_MASK);
     76}
     77
     78/**
     79 * cryp_flush_inoutfifo - Resets both the input and the output FIFOs
     80 * @device_data: Pointer to the device data struct for base address.
     81 */
     82void cryp_flush_inoutfifo(struct cryp_device_data *device_data)
     83{
     84	/*
     85	 * We always need to disable the hardware before trying to flush the
     86	 * FIFO. This is something that isn't written in the design
     87	 * specification, but we have been informed by the hardware designers
     88	 * that this must be done.
     89	 */
     90	cryp_activity(device_data, CRYP_CRYPEN_DISABLE);
     91	cryp_wait_until_done(device_data);
     92
     93	CRYP_SET_BITS(&device_data->base->cr, CRYP_CR_FFLUSH_MASK);
     94	/*
     95	 * CRYP_SR_INFIFO_READY_MASK is the expected value on the status
     96	 * register when starting a new calculation, which means Input FIFO is
     97	 * not full and input FIFO is empty.
     98	 */
     99	while (readl_relaxed(&device_data->base->sr) !=
    100	       CRYP_SR_INFIFO_READY_MASK)
    101		cpu_relax();
    102}
    103
    104/**
    105 * cryp_set_configuration - This routine set the cr CRYP IP
    106 * @device_data: Pointer to the device data struct for base address.
    107 * @cryp_config: Pointer to the configuration parameter
    108 * @control_register: The control register to be written later on.
    109 */
    110int cryp_set_configuration(struct cryp_device_data *device_data,
    111			   struct cryp_config *cryp_config,
    112			   u32 *control_register)
    113{
    114	u32 cr_for_kse;
    115
    116	if (NULL == device_data || NULL == cryp_config)
    117		return -EINVAL;
    118
    119	*control_register |= (cryp_config->keysize << CRYP_CR_KEYSIZE_POS);
    120
    121	/* Prepare key for decryption in AES_ECB and AES_CBC mode. */
    122	if ((CRYP_ALGORITHM_DECRYPT == cryp_config->algodir) &&
    123	    ((CRYP_ALGO_AES_ECB == cryp_config->algomode) ||
    124	     (CRYP_ALGO_AES_CBC == cryp_config->algomode))) {
    125		cr_for_kse = *control_register;
    126		/*
    127		 * This seems a bit odd, but it is indeed needed to set this to
    128		 * encrypt even though it is a decryption that we are doing. It
    129		 * also mentioned in the design spec that you need to do this.
    130		 * After the keyprepartion for decrypting is done you should set
    131		 * algodir back to decryption, which is done outside this if
    132		 * statement.
    133		 *
    134		 * According to design specification we should set mode ECB
    135		 * during key preparation even though we might be running CBC
    136		 * when enter this function.
    137		 *
    138		 * Writing to KSE_ENABLED will drop CRYPEN when key preparation
    139		 * is done. Therefore we need to set CRYPEN again outside this
    140		 * if statement when running decryption.
    141		 */
    142		cr_for_kse |= ((CRYP_ALGORITHM_ENCRYPT << CRYP_CR_ALGODIR_POS) |
    143			       (CRYP_ALGO_AES_ECB << CRYP_CR_ALGOMODE_POS) |
    144			       (CRYP_CRYPEN_ENABLE << CRYP_CR_CRYPEN_POS) |
    145			       (KSE_ENABLED << CRYP_CR_KSE_POS));
    146
    147		writel_relaxed(cr_for_kse, &device_data->base->cr);
    148		cryp_wait_until_done(device_data);
    149	}
    150
    151	*control_register |=
    152		((cryp_config->algomode << CRYP_CR_ALGOMODE_POS) |
    153		 (cryp_config->algodir << CRYP_CR_ALGODIR_POS));
    154
    155	return 0;
    156}
    157
    158/**
    159 * cryp_configure_protection - set the protection bits in the CRYP logic.
    160 * @device_data: Pointer to the device data struct for base address.
    161 * @p_protect_config:	Pointer to the protection mode and
    162 *			secure mode configuration
    163 */
    164int cryp_configure_protection(struct cryp_device_data *device_data,
    165			      struct cryp_protection_config *p_protect_config)
    166{
    167	if (NULL == p_protect_config)
    168		return -EINVAL;
    169
    170	CRYP_WRITE_BIT(&device_data->base->cr,
    171		       (u32) p_protect_config->secure_access,
    172		       CRYP_CR_SECURE_MASK);
    173	CRYP_PUT_BITS(&device_data->base->cr,
    174		      p_protect_config->privilege_access,
    175		      CRYP_CR_PRLG_POS,
    176		      CRYP_CR_PRLG_MASK);
    177
    178	return 0;
    179}
    180
    181/**
    182 * cryp_is_logic_busy - returns the busy status of the CRYP logic
    183 * @device_data: Pointer to the device data struct for base address.
    184 */
    185int cryp_is_logic_busy(struct cryp_device_data *device_data)
    186{
    187	return CRYP_TEST_BITS(&device_data->base->sr,
    188			      CRYP_SR_BUSY_MASK);
    189}
    190
    191/**
    192 * cryp_configure_for_dma - configures the CRYP IP for DMA operation
    193 * @device_data: Pointer to the device data struct for base address.
    194 * @dma_req: Specifies the DMA request type value.
    195 */
    196void cryp_configure_for_dma(struct cryp_device_data *device_data,
    197			    enum cryp_dma_req_type dma_req)
    198{
    199	CRYP_SET_BITS(&device_data->base->dmacr,
    200		      (u32) dma_req);
    201}
    202
    203/**
    204 * cryp_configure_key_values - configures the key values for CRYP operations
    205 * @device_data: Pointer to the device data struct for base address.
    206 * @key_reg_index: Key value index register
    207 * @key_value: The key value struct
    208 */
    209int cryp_configure_key_values(struct cryp_device_data *device_data,
    210			      enum cryp_key_reg_index key_reg_index,
    211			      struct cryp_key_value key_value)
    212{
    213	while (cryp_is_logic_busy(device_data))
    214		cpu_relax();
    215
    216	switch (key_reg_index) {
    217	case CRYP_KEY_REG_1:
    218		writel_relaxed(key_value.key_value_left,
    219				&device_data->base->key_1_l);
    220		writel_relaxed(key_value.key_value_right,
    221				&device_data->base->key_1_r);
    222		break;
    223	case CRYP_KEY_REG_2:
    224		writel_relaxed(key_value.key_value_left,
    225				&device_data->base->key_2_l);
    226		writel_relaxed(key_value.key_value_right,
    227				&device_data->base->key_2_r);
    228		break;
    229	case CRYP_KEY_REG_3:
    230		writel_relaxed(key_value.key_value_left,
    231				&device_data->base->key_3_l);
    232		writel_relaxed(key_value.key_value_right,
    233				&device_data->base->key_3_r);
    234		break;
    235	case CRYP_KEY_REG_4:
    236		writel_relaxed(key_value.key_value_left,
    237				&device_data->base->key_4_l);
    238		writel_relaxed(key_value.key_value_right,
    239				&device_data->base->key_4_r);
    240		break;
    241	default:
    242		return -EINVAL;
    243	}
    244
    245	return 0;
    246}
    247
    248/**
    249 * cryp_configure_init_vector - configures the initialization vector register
    250 * @device_data: Pointer to the device data struct for base address.
    251 * @init_vector_index: Specifies the index of the init vector.
    252 * @init_vector_value: Specifies the value for the init vector.
    253 */
    254int cryp_configure_init_vector(struct cryp_device_data *device_data,
    255			       enum cryp_init_vector_index
    256			       init_vector_index,
    257			       struct cryp_init_vector_value
    258			       init_vector_value)
    259{
    260	while (cryp_is_logic_busy(device_data))
    261		cpu_relax();
    262
    263	switch (init_vector_index) {
    264	case CRYP_INIT_VECTOR_INDEX_0:
    265		writel_relaxed(init_vector_value.init_value_left,
    266		       &device_data->base->init_vect_0_l);
    267		writel_relaxed(init_vector_value.init_value_right,
    268		       &device_data->base->init_vect_0_r);
    269		break;
    270	case CRYP_INIT_VECTOR_INDEX_1:
    271		writel_relaxed(init_vector_value.init_value_left,
    272		       &device_data->base->init_vect_1_l);
    273		writel_relaxed(init_vector_value.init_value_right,
    274		       &device_data->base->init_vect_1_r);
    275		break;
    276	default:
    277		return -EINVAL;
    278	}
    279
    280	return 0;
    281}
    282
    283/**
    284 * cryp_save_device_context -	Store hardware registers and
    285 *				other device context parameter
    286 * @device_data: Pointer to the device data struct for base address.
    287 * @ctx: Crypto device context
    288 * @cryp_mode: Mode: Polling, Interrupt or DMA
    289 */
    290void cryp_save_device_context(struct cryp_device_data *device_data,
    291			      struct cryp_device_context *ctx,
    292			      int cryp_mode)
    293{
    294	enum cryp_algo_mode algomode;
    295	struct cryp_register __iomem *src_reg = device_data->base;
    296	struct cryp_config *config =
    297		(struct cryp_config *)device_data->current_ctx;
    298
    299	/*
    300	 * Always start by disable the hardware and wait for it to finish the
    301	 * ongoing calculations before trying to reprogram it.
    302	 */
    303	cryp_activity(device_data, CRYP_CRYPEN_DISABLE);
    304	cryp_wait_until_done(device_data);
    305
    306	if (cryp_mode == CRYP_MODE_DMA)
    307		cryp_configure_for_dma(device_data, CRYP_DMA_DISABLE_BOTH);
    308
    309	if (CRYP_TEST_BITS(&src_reg->sr, CRYP_SR_IFEM_MASK) == 0)
    310		ctx->din = readl_relaxed(&src_reg->din);
    311
    312	ctx->cr = readl_relaxed(&src_reg->cr) & CRYP_CR_CONTEXT_SAVE_MASK;
    313
    314	switch (config->keysize) {
    315	case CRYP_KEY_SIZE_256:
    316		ctx->key_4_l = readl_relaxed(&src_reg->key_4_l);
    317		ctx->key_4_r = readl_relaxed(&src_reg->key_4_r);
    318		fallthrough;
    319
    320	case CRYP_KEY_SIZE_192:
    321		ctx->key_3_l = readl_relaxed(&src_reg->key_3_l);
    322		ctx->key_3_r = readl_relaxed(&src_reg->key_3_r);
    323		fallthrough;
    324
    325	case CRYP_KEY_SIZE_128:
    326		ctx->key_2_l = readl_relaxed(&src_reg->key_2_l);
    327		ctx->key_2_r = readl_relaxed(&src_reg->key_2_r);
    328		fallthrough;
    329
    330	default:
    331		ctx->key_1_l = readl_relaxed(&src_reg->key_1_l);
    332		ctx->key_1_r = readl_relaxed(&src_reg->key_1_r);
    333	}
    334
    335	/* Save IV for CBC mode for both AES and DES. */
    336	algomode = ((ctx->cr & CRYP_CR_ALGOMODE_MASK) >> CRYP_CR_ALGOMODE_POS);
    337	if (algomode == CRYP_ALGO_TDES_CBC ||
    338	    algomode == CRYP_ALGO_DES_CBC ||
    339	    algomode == CRYP_ALGO_AES_CBC) {
    340		ctx->init_vect_0_l = readl_relaxed(&src_reg->init_vect_0_l);
    341		ctx->init_vect_0_r = readl_relaxed(&src_reg->init_vect_0_r);
    342		ctx->init_vect_1_l = readl_relaxed(&src_reg->init_vect_1_l);
    343		ctx->init_vect_1_r = readl_relaxed(&src_reg->init_vect_1_r);
    344	}
    345}
    346
    347/**
    348 * cryp_restore_device_context -	Restore hardware registers and
    349 *					other device context parameter
    350 * @device_data: Pointer to the device data struct for base address.
    351 * @ctx: Crypto device context
    352 */
    353void cryp_restore_device_context(struct cryp_device_data *device_data,
    354				 struct cryp_device_context *ctx)
    355{
    356	struct cryp_register __iomem *reg = device_data->base;
    357	struct cryp_config *config =
    358		(struct cryp_config *)device_data->current_ctx;
    359
    360	/*
    361	 * Fall through for all items in switch statement. DES is captured in
    362	 * the default.
    363	 */
    364	switch (config->keysize) {
    365	case CRYP_KEY_SIZE_256:
    366		writel_relaxed(ctx->key_4_l, &reg->key_4_l);
    367		writel_relaxed(ctx->key_4_r, &reg->key_4_r);
    368		fallthrough;
    369
    370	case CRYP_KEY_SIZE_192:
    371		writel_relaxed(ctx->key_3_l, &reg->key_3_l);
    372		writel_relaxed(ctx->key_3_r, &reg->key_3_r);
    373		fallthrough;
    374
    375	case CRYP_KEY_SIZE_128:
    376		writel_relaxed(ctx->key_2_l, &reg->key_2_l);
    377		writel_relaxed(ctx->key_2_r, &reg->key_2_r);
    378		fallthrough;
    379
    380	default:
    381		writel_relaxed(ctx->key_1_l, &reg->key_1_l);
    382		writel_relaxed(ctx->key_1_r, &reg->key_1_r);
    383	}
    384
    385	/* Restore IV for CBC mode for AES and DES. */
    386	if (config->algomode == CRYP_ALGO_TDES_CBC ||
    387	    config->algomode == CRYP_ALGO_DES_CBC ||
    388	    config->algomode == CRYP_ALGO_AES_CBC) {
    389		writel_relaxed(ctx->init_vect_0_l, &reg->init_vect_0_l);
    390		writel_relaxed(ctx->init_vect_0_r, &reg->init_vect_0_r);
    391		writel_relaxed(ctx->init_vect_1_l, &reg->init_vect_1_l);
    392		writel_relaxed(ctx->init_vect_1_r, &reg->init_vect_1_r);
    393	}
    394}