cachepc-linux

Fork of AMDESE/linux with modifications for CachePC side-channel attack
git clone https://git.sinitax.com/sinitax/cachepc-linux
Log | Files | Refs | README | LICENSE | sfeed.txt

regs.h (11840B)


      1/* SPDX-License-Identifier: GPL-2.0 */
      2/*
      3 * Driver for the Synopsys DesignWare AHB DMA Controller
      4 *
      5 * Copyright (C) 2005-2007 Atmel Corporation
      6 * Copyright (C) 2010-2011 ST Microelectronics
      7 * Copyright (C) 2016 Intel Corporation
      8 */
      9
     10#include <linux/bitops.h>
     11#include <linux/interrupt.h>
     12#include <linux/dmaengine.h>
     13
     14#include <linux/io-64-nonatomic-hi-lo.h>
     15
     16#include "internal.h"
     17
     18#define DW_DMA_MAX_NR_REQUESTS	16
     19
     20/* flow controller */
     21enum dw_dma_fc {
     22	DW_DMA_FC_D_M2M,
     23	DW_DMA_FC_D_M2P,
     24	DW_DMA_FC_D_P2M,
     25	DW_DMA_FC_D_P2P,
     26	DW_DMA_FC_P_P2M,
     27	DW_DMA_FC_SP_P2P,
     28	DW_DMA_FC_P_M2P,
     29	DW_DMA_FC_DP_P2P,
     30};
     31
     32/*
     33 * Redefine this macro to handle differences between 32- and 64-bit
     34 * addressing, big vs. little endian, etc.
     35 */
     36#define DW_REG(name)		u32 name; u32 __pad_##name
     37
     38/* Hardware register definitions. */
     39struct dw_dma_chan_regs {
     40	DW_REG(SAR);		/* Source Address Register */
     41	DW_REG(DAR);		/* Destination Address Register */
     42	DW_REG(LLP);		/* Linked List Pointer */
     43	u32	CTL_LO;		/* Control Register Low */
     44	u32	CTL_HI;		/* Control Register High */
     45	DW_REG(SSTAT);
     46	DW_REG(DSTAT);
     47	DW_REG(SSTATAR);
     48	DW_REG(DSTATAR);
     49	u32	CFG_LO;		/* Configuration Register Low */
     50	u32	CFG_HI;		/* Configuration Register High */
     51	DW_REG(SGR);
     52	DW_REG(DSR);
     53};
     54
     55struct dw_dma_irq_regs {
     56	DW_REG(XFER);
     57	DW_REG(BLOCK);
     58	DW_REG(SRC_TRAN);
     59	DW_REG(DST_TRAN);
     60	DW_REG(ERROR);
     61};
     62
     63struct dw_dma_regs {
     64	/* per-channel registers */
     65	struct dw_dma_chan_regs	CHAN[DW_DMA_MAX_NR_CHANNELS];
     66
     67	/* irq handling */
     68	struct dw_dma_irq_regs	RAW;		/* r */
     69	struct dw_dma_irq_regs	STATUS;		/* r (raw & mask) */
     70	struct dw_dma_irq_regs	MASK;		/* rw (set = irq enabled) */
     71	struct dw_dma_irq_regs	CLEAR;		/* w (ack, affects "raw") */
     72
     73	DW_REG(STATUS_INT);			/* r */
     74
     75	/* software handshaking */
     76	DW_REG(REQ_SRC);
     77	DW_REG(REQ_DST);
     78	DW_REG(SGL_REQ_SRC);
     79	DW_REG(SGL_REQ_DST);
     80	DW_REG(LAST_SRC);
     81	DW_REG(LAST_DST);
     82
     83	/* miscellaneous */
     84	DW_REG(CFG);
     85	DW_REG(CH_EN);
     86	DW_REG(ID);
     87	DW_REG(TEST);
     88
     89	/* iDMA 32-bit support */
     90	DW_REG(CLASS_PRIORITY0);
     91	DW_REG(CLASS_PRIORITY1);
     92
     93	/* optional encoded params, 0x3c8..0x3f7 */
     94	u32	__reserved;
     95
     96	/* per-channel configuration registers */
     97	u32	DWC_PARAMS[DW_DMA_MAX_NR_CHANNELS];
     98	u32	MULTI_BLK_TYPE;
     99	u32	MAX_BLK_SIZE;
    100
    101	/* top-level parameters */
    102	u32	DW_PARAMS;
    103
    104	/* component ID */
    105	u32	COMP_TYPE;
    106	u32	COMP_VERSION;
    107
    108	/* iDMA 32-bit support */
    109	DW_REG(FIFO_PARTITION0);
    110	DW_REG(FIFO_PARTITION1);
    111
    112	DW_REG(SAI_ERR);
    113	DW_REG(GLOBAL_CFG);
    114};
    115
    116/* Bitfields in DW_PARAMS */
    117#define DW_PARAMS_NR_CHAN	8		/* number of channels */
    118#define DW_PARAMS_NR_MASTER	11		/* number of AHB masters */
    119#define DW_PARAMS_DATA_WIDTH(n)	(15 + 2 * (n))
    120#define DW_PARAMS_DATA_WIDTH1	15		/* master 1 data width */
    121#define DW_PARAMS_DATA_WIDTH2	17		/* master 2 data width */
    122#define DW_PARAMS_DATA_WIDTH3	19		/* master 3 data width */
    123#define DW_PARAMS_DATA_WIDTH4	21		/* master 4 data width */
    124#define DW_PARAMS_EN		28		/* encoded parameters */
    125
    126/* Bitfields in DWC_PARAMS */
    127#define DWC_PARAMS_MBLK_EN	11		/* multi block transfer */
    128#define DWC_PARAMS_HC_LLP	13		/* set LLP register to zero */
    129#define DWC_PARAMS_MSIZE	16		/* max group transaction size */
    130
    131/* bursts size */
    132enum dw_dma_msize {
    133	DW_DMA_MSIZE_1,
    134	DW_DMA_MSIZE_4,
    135	DW_DMA_MSIZE_8,
    136	DW_DMA_MSIZE_16,
    137	DW_DMA_MSIZE_32,
    138	DW_DMA_MSIZE_64,
    139	DW_DMA_MSIZE_128,
    140	DW_DMA_MSIZE_256,
    141};
    142
    143/* Bitfields in LLP */
    144#define DWC_LLP_LMS(x)		((x) & 3)	/* list master select */
    145#define DWC_LLP_LOC(x)		((x) & ~3)	/* next lli */
    146
    147/* Bitfields in CTL_LO */
    148#define DWC_CTLL_INT_EN		(1 << 0)	/* irqs enabled? */
    149#define DWC_CTLL_DST_WIDTH(n)	((n)<<1)	/* bytes per element */
    150#define DWC_CTLL_SRC_WIDTH(n)	((n)<<4)
    151#define DWC_CTLL_DST_INC	(0<<7)		/* DAR update/not */
    152#define DWC_CTLL_DST_DEC	(1<<7)
    153#define DWC_CTLL_DST_FIX	(2<<7)
    154#define DWC_CTLL_SRC_INC	(0<<9)		/* SAR update/not */
    155#define DWC_CTLL_SRC_DEC	(1<<9)
    156#define DWC_CTLL_SRC_FIX	(2<<9)
    157#define DWC_CTLL_DST_MSIZE(n)	((n)<<11)	/* burst, #elements */
    158#define DWC_CTLL_SRC_MSIZE(n)	((n)<<14)
    159#define DWC_CTLL_S_GATH_EN	(1 << 17)	/* src gather, !FIX */
    160#define DWC_CTLL_D_SCAT_EN	(1 << 18)	/* dst scatter, !FIX */
    161#define DWC_CTLL_FC(n)		((n) << 20)
    162#define DWC_CTLL_FC_M2M		(0 << 20)	/* mem-to-mem */
    163#define DWC_CTLL_FC_M2P		(1 << 20)	/* mem-to-periph */
    164#define DWC_CTLL_FC_P2M		(2 << 20)	/* periph-to-mem */
    165#define DWC_CTLL_FC_P2P		(3 << 20)	/* periph-to-periph */
    166/* plus 4 transfer types for peripheral-as-flow-controller */
    167#define DWC_CTLL_DMS(n)		((n)<<23)	/* dst master select */
    168#define DWC_CTLL_SMS(n)		((n)<<25)	/* src master select */
    169#define DWC_CTLL_LLP_D_EN	(1 << 27)	/* dest block chain */
    170#define DWC_CTLL_LLP_S_EN	(1 << 28)	/* src block chain */
    171
    172/* Bitfields in CTL_HI */
    173#define DWC_CTLH_BLOCK_TS_MASK	GENMASK(11, 0)
    174#define DWC_CTLH_BLOCK_TS(x)	((x) & DWC_CTLH_BLOCK_TS_MASK)
    175#define DWC_CTLH_DONE		(1 << 12)
    176
    177/* Bitfields in CFG_LO */
    178#define DWC_CFGL_CH_PRIOR_MASK	(0x7 << 5)	/* priority mask */
    179#define DWC_CFGL_CH_PRIOR(x)	((x) << 5)	/* priority */
    180#define DWC_CFGL_CH_SUSP	(1 << 8)	/* pause xfer */
    181#define DWC_CFGL_FIFO_EMPTY	(1 << 9)	/* pause xfer */
    182#define DWC_CFGL_HS_DST		(1 << 10)	/* handshake w/dst */
    183#define DWC_CFGL_HS_SRC		(1 << 11)	/* handshake w/src */
    184#define DWC_CFGL_LOCK_CH_XFER	(0 << 12)	/* scope of LOCK_CH */
    185#define DWC_CFGL_LOCK_CH_BLOCK	(1 << 12)
    186#define DWC_CFGL_LOCK_CH_XACT	(2 << 12)
    187#define DWC_CFGL_LOCK_BUS_XFER	(0 << 14)	/* scope of LOCK_BUS */
    188#define DWC_CFGL_LOCK_BUS_BLOCK	(1 << 14)
    189#define DWC_CFGL_LOCK_BUS_XACT	(2 << 14)
    190#define DWC_CFGL_LOCK_CH	(1 << 15)	/* channel lockout */
    191#define DWC_CFGL_LOCK_BUS	(1 << 16)	/* busmaster lockout */
    192#define DWC_CFGL_HS_DST_POL	(1 << 18)	/* dst handshake active low */
    193#define DWC_CFGL_HS_SRC_POL	(1 << 19)	/* src handshake active low */
    194#define DWC_CFGL_MAX_BURST(x)	((x) << 20)
    195#define DWC_CFGL_RELOAD_SAR	(1 << 30)
    196#define DWC_CFGL_RELOAD_DAR	(1 << 31)
    197
    198/* Bitfields in CFG_HI */
    199#define DWC_CFGH_FCMODE		(1 << 0)
    200#define DWC_CFGH_FIFO_MODE	(1 << 1)
    201#define DWC_CFGH_PROTCTL(x)	((x) << 2)
    202#define DWC_CFGH_PROTCTL_DATA	(0 << 2)	/* data access - always set */
    203#define DWC_CFGH_PROTCTL_PRIV	(1 << 2)	/* privileged -> AHB HPROT[1] */
    204#define DWC_CFGH_PROTCTL_BUFFER	(2 << 2)	/* bufferable -> AHB HPROT[2] */
    205#define DWC_CFGH_PROTCTL_CACHE	(4 << 2)	/* cacheable  -> AHB HPROT[3] */
    206#define DWC_CFGH_DS_UPD_EN	(1 << 5)
    207#define DWC_CFGH_SS_UPD_EN	(1 << 6)
    208#define DWC_CFGH_SRC_PER(x)	((x) << 7)
    209#define DWC_CFGH_DST_PER(x)	((x) << 11)
    210
    211/* Bitfields in SGR */
    212#define DWC_SGR_SGI(x)		((x) << 0)
    213#define DWC_SGR_SGC(x)		((x) << 20)
    214
    215/* Bitfields in DSR */
    216#define DWC_DSR_DSI(x)		((x) << 0)
    217#define DWC_DSR_DSC(x)		((x) << 20)
    218
    219/* Bitfields in CFG */
    220#define DW_CFG_DMA_EN		(1 << 0)
    221
    222/* iDMA 32-bit support */
    223
    224/* bursts size */
    225enum idma32_msize {
    226	IDMA32_MSIZE_1,
    227	IDMA32_MSIZE_2,
    228	IDMA32_MSIZE_4,
    229	IDMA32_MSIZE_8,
    230	IDMA32_MSIZE_16,
    231	IDMA32_MSIZE_32,
    232};
    233
    234/* Bitfields in CTL_HI */
    235#define IDMA32C_CTLH_BLOCK_TS_MASK	GENMASK(16, 0)
    236#define IDMA32C_CTLH_BLOCK_TS(x)	((x) & IDMA32C_CTLH_BLOCK_TS_MASK)
    237#define IDMA32C_CTLH_DONE		(1 << 17)
    238
    239/* Bitfields in CFG_LO */
    240#define IDMA32C_CFGL_DST_BURST_ALIGN	(1 << 0)	/* dst burst align */
    241#define IDMA32C_CFGL_SRC_BURST_ALIGN	(1 << 1)	/* src burst align */
    242#define IDMA32C_CFGL_CH_DRAIN		(1 << 10)	/* drain FIFO */
    243#define IDMA32C_CFGL_DST_OPT_BL		(1 << 20)	/* optimize dst burst length */
    244#define IDMA32C_CFGL_SRC_OPT_BL		(1 << 21)	/* optimize src burst length */
    245
    246/* Bitfields in CFG_HI */
    247#define IDMA32C_CFGH_SRC_PER(x)		((x) << 0)
    248#define IDMA32C_CFGH_DST_PER(x)		((x) << 4)
    249#define IDMA32C_CFGH_RD_ISSUE_THD(x)	((x) << 8)
    250#define IDMA32C_CFGH_RW_ISSUE_THD(x)	((x) << 18)
    251#define IDMA32C_CFGH_SRC_PER_EXT(x)	((x) << 28)	/* src peripheral extension */
    252#define IDMA32C_CFGH_DST_PER_EXT(x)	((x) << 30)	/* dst peripheral extension */
    253
    254/* Bitfields in FIFO_PARTITION */
    255#define IDMA32C_FP_PSIZE_CH0(x)		((x) << 0)
    256#define IDMA32C_FP_PSIZE_CH1(x)		((x) << 13)
    257#define IDMA32C_FP_UPDATE		(1 << 26)
    258
    259enum dw_dmac_flags {
    260	DW_DMA_IS_CYCLIC = 0,
    261	DW_DMA_IS_SOFT_LLP = 1,
    262	DW_DMA_IS_PAUSED = 2,
    263	DW_DMA_IS_INITIALIZED = 3,
    264};
    265
    266struct dw_dma_chan {
    267	struct dma_chan			chan;
    268	void __iomem			*ch_regs;
    269	u8				mask;
    270	u8				priority;
    271	enum dma_transfer_direction	direction;
    272
    273	/* software emulation of the LLP transfers */
    274	struct list_head	*tx_node_active;
    275
    276	spinlock_t		lock;
    277
    278	/* these other elements are all protected by lock */
    279	unsigned long		flags;
    280	struct list_head	active_list;
    281	struct list_head	queue;
    282
    283	unsigned int		descs_allocated;
    284
    285	/* hardware configuration */
    286	unsigned int		block_size;
    287	bool			nollp;
    288	u32			max_burst;
    289
    290	/* custom slave configuration */
    291	struct dw_dma_slave	dws;
    292
    293	/* configuration passed via .device_config */
    294	struct dma_slave_config dma_sconfig;
    295};
    296
    297static inline struct dw_dma_chan_regs __iomem *
    298__dwc_regs(struct dw_dma_chan *dwc)
    299{
    300	return dwc->ch_regs;
    301}
    302
    303#define channel_readl(dwc, name) \
    304	readl(&(__dwc_regs(dwc)->name))
    305#define channel_writel(dwc, name, val) \
    306	writel((val), &(__dwc_regs(dwc)->name))
    307
    308static inline struct dw_dma_chan *to_dw_dma_chan(struct dma_chan *chan)
    309{
    310	return container_of(chan, struct dw_dma_chan, chan);
    311}
    312
    313struct dw_dma {
    314	struct dma_device	dma;
    315	char			name[20];
    316	void __iomem		*regs;
    317	struct dma_pool		*desc_pool;
    318	struct tasklet_struct	tasklet;
    319
    320	/* channels */
    321	struct dw_dma_chan	*chan;
    322	u8			all_chan_mask;
    323	u8			in_use;
    324
    325	/* Channel operations */
    326	void	(*initialize_chan)(struct dw_dma_chan *dwc);
    327	void	(*suspend_chan)(struct dw_dma_chan *dwc, bool drain);
    328	void	(*resume_chan)(struct dw_dma_chan *dwc, bool drain);
    329	u32	(*prepare_ctllo)(struct dw_dma_chan *dwc);
    330	void	(*encode_maxburst)(struct dw_dma_chan *dwc, u32 *maxburst);
    331	u32	(*bytes2block)(struct dw_dma_chan *dwc, size_t bytes,
    332			       unsigned int width, size_t *len);
    333	size_t	(*block2bytes)(struct dw_dma_chan *dwc, u32 block, u32 width);
    334
    335	/* Device operations */
    336	void (*set_device_name)(struct dw_dma *dw, int id);
    337	void (*disable)(struct dw_dma *dw);
    338	void (*enable)(struct dw_dma *dw);
    339
    340	/* platform data */
    341	struct dw_dma_platform_data	*pdata;
    342};
    343
    344static inline struct dw_dma_regs __iomem *__dw_regs(struct dw_dma *dw)
    345{
    346	return dw->regs;
    347}
    348
    349#define dma_readl(dw, name) \
    350	readl(&(__dw_regs(dw)->name))
    351#define dma_writel(dw, name, val) \
    352	writel((val), &(__dw_regs(dw)->name))
    353
    354#define idma32_readq(dw, name)				\
    355	hi_lo_readq(&(__dw_regs(dw)->name))
    356#define idma32_writeq(dw, name, val)			\
    357	hi_lo_writeq((val), &(__dw_regs(dw)->name))
    358
    359#define channel_set_bit(dw, reg, mask) \
    360	dma_writel(dw, reg, ((mask) << 8) | (mask))
    361#define channel_clear_bit(dw, reg, mask) \
    362	dma_writel(dw, reg, ((mask) << 8) | 0)
    363
    364static inline struct dw_dma *to_dw_dma(struct dma_device *ddev)
    365{
    366	return container_of(ddev, struct dw_dma, dma);
    367}
    368
    369/* LLI == Linked List Item; a.k.a. DMA block descriptor */
    370struct dw_lli {
    371	/* values that are not changed by hardware */
    372	__le32		sar;
    373	__le32		dar;
    374	__le32		llp;		/* chain to next lli */
    375	__le32		ctllo;
    376	/* values that may get written back: */
    377	__le32		ctlhi;
    378	/* sstat and dstat can snapshot peripheral register state.
    379	 * silicon config may discard either or both...
    380	 */
    381	__le32		sstat;
    382	__le32		dstat;
    383};
    384
    385struct dw_desc {
    386	/* FIRST values the hardware uses */
    387	struct dw_lli			lli;
    388
    389#define lli_set(d, reg, v)		((d)->lli.reg |= cpu_to_le32(v))
    390#define lli_clear(d, reg, v)		((d)->lli.reg &= ~cpu_to_le32(v))
    391#define lli_read(d, reg)		le32_to_cpu((d)->lli.reg)
    392#define lli_write(d, reg, v)		((d)->lli.reg = cpu_to_le32(v))
    393
    394	/* THEN values for driver housekeeping */
    395	struct list_head		desc_node;
    396	struct list_head		tx_list;
    397	struct dma_async_tx_descriptor	txd;
    398	size_t				len;
    399	size_t				total_len;
    400	u32				residue;
    401};
    402
    403#define to_dw_desc(h)	list_entry(h, struct dw_desc, desc_node)
    404
    405static inline struct dw_desc *
    406txd_to_dw_desc(struct dma_async_tx_descriptor *txd)
    407{
    408	return container_of(txd, struct dw_desc, txd);
    409}