cachepc-linux

Fork of AMDESE/linux with modifications for CachePC side-channel attack
git clone https://git.sinitax.com/sinitax/cachepc-linux
Log | Files | Refs | README | LICENSE | sfeed.txt

stm32-mdma.c (50839B)


      1// SPDX-License-Identifier: GPL-2.0-only
      2/*
      3 *
      4 * Copyright (C) STMicroelectronics SA 2017
      5 * Author(s): M'boumba Cedric Madianga <cedric.madianga@gmail.com>
      6 *            Pierre-Yves Mordret <pierre-yves.mordret@st.com>
      7 *
      8 * Driver for STM32 MDMA controller
      9 *
     10 * Inspired by stm32-dma.c and dma-jz4780.c
     11 */
     12
     13#include <linux/bitfield.h>
     14#include <linux/clk.h>
     15#include <linux/delay.h>
     16#include <linux/dmaengine.h>
     17#include <linux/dma-mapping.h>
     18#include <linux/dmapool.h>
     19#include <linux/err.h>
     20#include <linux/init.h>
     21#include <linux/iopoll.h>
     22#include <linux/jiffies.h>
     23#include <linux/list.h>
     24#include <linux/log2.h>
     25#include <linux/module.h>
     26#include <linux/of.h>
     27#include <linux/of_device.h>
     28#include <linux/of_dma.h>
     29#include <linux/platform_device.h>
     30#include <linux/pm_runtime.h>
     31#include <linux/reset.h>
     32#include <linux/slab.h>
     33
     34#include "virt-dma.h"
     35
     36#define STM32_MDMA_GISR0		0x0000 /* MDMA Int Status Reg 1 */
     37
     38/* MDMA Channel x interrupt/status register */
     39#define STM32_MDMA_CISR(x)		(0x40 + 0x40 * (x)) /* x = 0..62 */
     40#define STM32_MDMA_CISR_CRQA		BIT(16)
     41#define STM32_MDMA_CISR_TCIF		BIT(4)
     42#define STM32_MDMA_CISR_BTIF		BIT(3)
     43#define STM32_MDMA_CISR_BRTIF		BIT(2)
     44#define STM32_MDMA_CISR_CTCIF		BIT(1)
     45#define STM32_MDMA_CISR_TEIF		BIT(0)
     46
     47/* MDMA Channel x interrupt flag clear register */
     48#define STM32_MDMA_CIFCR(x)		(0x44 + 0x40 * (x))
     49#define STM32_MDMA_CIFCR_CLTCIF		BIT(4)
     50#define STM32_MDMA_CIFCR_CBTIF		BIT(3)
     51#define STM32_MDMA_CIFCR_CBRTIF		BIT(2)
     52#define STM32_MDMA_CIFCR_CCTCIF		BIT(1)
     53#define STM32_MDMA_CIFCR_CTEIF		BIT(0)
     54#define STM32_MDMA_CIFCR_CLEAR_ALL	(STM32_MDMA_CIFCR_CLTCIF \
     55					| STM32_MDMA_CIFCR_CBTIF \
     56					| STM32_MDMA_CIFCR_CBRTIF \
     57					| STM32_MDMA_CIFCR_CCTCIF \
     58					| STM32_MDMA_CIFCR_CTEIF)
     59
     60/* MDMA Channel x error status register */
     61#define STM32_MDMA_CESR(x)		(0x48 + 0x40 * (x))
     62#define STM32_MDMA_CESR_BSE		BIT(11)
     63#define STM32_MDMA_CESR_ASR		BIT(10)
     64#define STM32_MDMA_CESR_TEMD		BIT(9)
     65#define STM32_MDMA_CESR_TELD		BIT(8)
     66#define STM32_MDMA_CESR_TED		BIT(7)
     67#define STM32_MDMA_CESR_TEA_MASK	GENMASK(6, 0)
     68
     69/* MDMA Channel x control register */
     70#define STM32_MDMA_CCR(x)		(0x4C + 0x40 * (x))
     71#define STM32_MDMA_CCR_SWRQ		BIT(16)
     72#define STM32_MDMA_CCR_WEX		BIT(14)
     73#define STM32_MDMA_CCR_HEX		BIT(13)
     74#define STM32_MDMA_CCR_BEX		BIT(12)
     75#define STM32_MDMA_CCR_SM		BIT(8)
     76#define STM32_MDMA_CCR_PL_MASK		GENMASK(7, 6)
     77#define STM32_MDMA_CCR_PL(n)		FIELD_PREP(STM32_MDMA_CCR_PL_MASK, (n))
     78#define STM32_MDMA_CCR_TCIE		BIT(5)
     79#define STM32_MDMA_CCR_BTIE		BIT(4)
     80#define STM32_MDMA_CCR_BRTIE		BIT(3)
     81#define STM32_MDMA_CCR_CTCIE		BIT(2)
     82#define STM32_MDMA_CCR_TEIE		BIT(1)
     83#define STM32_MDMA_CCR_EN		BIT(0)
     84#define STM32_MDMA_CCR_IRQ_MASK		(STM32_MDMA_CCR_TCIE \
     85					| STM32_MDMA_CCR_BTIE \
     86					| STM32_MDMA_CCR_BRTIE \
     87					| STM32_MDMA_CCR_CTCIE \
     88					| STM32_MDMA_CCR_TEIE)
     89
     90/* MDMA Channel x transfer configuration register */
     91#define STM32_MDMA_CTCR(x)		(0x50 + 0x40 * (x))
     92#define STM32_MDMA_CTCR_BWM		BIT(31)
     93#define STM32_MDMA_CTCR_SWRM		BIT(30)
     94#define STM32_MDMA_CTCR_TRGM_MSK	GENMASK(29, 28)
     95#define STM32_MDMA_CTCR_TRGM(n)		FIELD_PREP(STM32_MDMA_CTCR_TRGM_MSK, (n))
     96#define STM32_MDMA_CTCR_TRGM_GET(n)	FIELD_GET(STM32_MDMA_CTCR_TRGM_MSK, (n))
     97#define STM32_MDMA_CTCR_PAM_MASK	GENMASK(27, 26)
     98#define STM32_MDMA_CTCR_PAM(n)		FIELD_PREP(STM32_MDMA_CTCR_PAM_MASK, (n))
     99#define STM32_MDMA_CTCR_PKE		BIT(25)
    100#define STM32_MDMA_CTCR_TLEN_MSK	GENMASK(24, 18)
    101#define STM32_MDMA_CTCR_TLEN(n)		FIELD_PREP(STM32_MDMA_CTCR_TLEN_MSK, (n))
    102#define STM32_MDMA_CTCR_TLEN_GET(n)	FIELD_GET(STM32_MDMA_CTCR_TLEN_MSK, (n))
    103#define STM32_MDMA_CTCR_LEN2_MSK	GENMASK(25, 18)
    104#define STM32_MDMA_CTCR_LEN2(n)		FIELD_PREP(STM32_MDMA_CTCR_LEN2_MSK, (n))
    105#define STM32_MDMA_CTCR_LEN2_GET(n)	FIELD_GET(STM32_MDMA_CTCR_LEN2_MSK, (n))
    106#define STM32_MDMA_CTCR_DBURST_MASK	GENMASK(17, 15)
    107#define STM32_MDMA_CTCR_DBURST(n)	FIELD_PREP(STM32_MDMA_CTCR_DBURST_MASK, (n))
    108#define STM32_MDMA_CTCR_SBURST_MASK	GENMASK(14, 12)
    109#define STM32_MDMA_CTCR_SBURST(n)	FIELD_PREP(STM32_MDMA_CTCR_SBURST_MASK, (n))
    110#define STM32_MDMA_CTCR_DINCOS_MASK	GENMASK(11, 10)
    111#define STM32_MDMA_CTCR_DINCOS(n)	FIELD_PREP(STM32_MDMA_CTCR_DINCOS_MASK, (n))
    112#define STM32_MDMA_CTCR_SINCOS_MASK	GENMASK(9, 8)
    113#define STM32_MDMA_CTCR_SINCOS(n)	FIELD_PREP(STM32_MDMA_CTCR_SINCOS_MASK, (n))
    114#define STM32_MDMA_CTCR_DSIZE_MASK	GENMASK(7, 6)
    115#define STM32_MDMA_CTCR_DSIZE(n)	FIELD_PREP(STM32_MDMA_CTCR_DSIZE_MASK, (n))
    116#define STM32_MDMA_CTCR_SSIZE_MASK	GENMASK(5, 4)
    117#define STM32_MDMA_CTCR_SSIZE(n)	FIELD_PREP(STM32_MDMA_CTCR_SSIZE_MASK, (n))
    118#define STM32_MDMA_CTCR_DINC_MASK	GENMASK(3, 2)
    119#define STM32_MDMA_CTCR_DINC(n)		FIELD_PREP(STM32_MDMA_CTCR_DINC_MASK, (n))
    120#define STM32_MDMA_CTCR_SINC_MASK	GENMASK(1, 0)
    121#define STM32_MDMA_CTCR_SINC(n)		FIELD_PREP(STM32_MDMA_CTCR_SINC_MASK, (n))
    122#define STM32_MDMA_CTCR_CFG_MASK	(STM32_MDMA_CTCR_SINC_MASK \
    123					| STM32_MDMA_CTCR_DINC_MASK \
    124					| STM32_MDMA_CTCR_SINCOS_MASK \
    125					| STM32_MDMA_CTCR_DINCOS_MASK \
    126					| STM32_MDMA_CTCR_LEN2_MSK \
    127					| STM32_MDMA_CTCR_TRGM_MSK)
    128
    129/* MDMA Channel x block number of data register */
    130#define STM32_MDMA_CBNDTR(x)		(0x54 + 0x40 * (x))
    131#define STM32_MDMA_CBNDTR_BRC_MK	GENMASK(31, 20)
    132#define STM32_MDMA_CBNDTR_BRC(n)	FIELD_PREP(STM32_MDMA_CBNDTR_BRC_MK, (n))
    133#define STM32_MDMA_CBNDTR_BRC_GET(n)	FIELD_GET(STM32_MDMA_CBNDTR_BRC_MK, (n))
    134
    135#define STM32_MDMA_CBNDTR_BRDUM		BIT(19)
    136#define STM32_MDMA_CBNDTR_BRSUM		BIT(18)
    137#define STM32_MDMA_CBNDTR_BNDT_MASK	GENMASK(16, 0)
    138#define STM32_MDMA_CBNDTR_BNDT(n)	FIELD_PREP(STM32_MDMA_CBNDTR_BNDT_MASK, (n))
    139
    140/* MDMA Channel x source address register */
    141#define STM32_MDMA_CSAR(x)		(0x58 + 0x40 * (x))
    142
    143/* MDMA Channel x destination address register */
    144#define STM32_MDMA_CDAR(x)		(0x5C + 0x40 * (x))
    145
    146/* MDMA Channel x block repeat address update register */
    147#define STM32_MDMA_CBRUR(x)		(0x60 + 0x40 * (x))
    148#define STM32_MDMA_CBRUR_DUV_MASK	GENMASK(31, 16)
    149#define STM32_MDMA_CBRUR_DUV(n)		FIELD_PREP(STM32_MDMA_CBRUR_DUV_MASK, (n))
    150#define STM32_MDMA_CBRUR_SUV_MASK	GENMASK(15, 0)
    151#define STM32_MDMA_CBRUR_SUV(n)		FIELD_PREP(STM32_MDMA_CBRUR_SUV_MASK, (n))
    152
    153/* MDMA Channel x link address register */
    154#define STM32_MDMA_CLAR(x)		(0x64 + 0x40 * (x))
    155
    156/* MDMA Channel x trigger and bus selection register */
    157#define STM32_MDMA_CTBR(x)		(0x68 + 0x40 * (x))
    158#define STM32_MDMA_CTBR_DBUS		BIT(17)
    159#define STM32_MDMA_CTBR_SBUS		BIT(16)
    160#define STM32_MDMA_CTBR_TSEL_MASK	GENMASK(5, 0)
    161#define STM32_MDMA_CTBR_TSEL(n)		FIELD_PREP(STM32_MDMA_CTBR_TSEL_MASK, (n))
    162
    163/* MDMA Channel x mask address register */
    164#define STM32_MDMA_CMAR(x)		(0x70 + 0x40 * (x))
    165
    166/* MDMA Channel x mask data register */
    167#define STM32_MDMA_CMDR(x)		(0x74 + 0x40 * (x))
    168
    169#define STM32_MDMA_MAX_BUF_LEN		128
    170#define STM32_MDMA_MAX_BLOCK_LEN	65536
    171#define STM32_MDMA_MAX_CHANNELS		32
    172#define STM32_MDMA_MAX_REQUESTS		256
    173#define STM32_MDMA_MAX_BURST		128
    174#define STM32_MDMA_VERY_HIGH_PRIORITY	0x3
    175
    176enum stm32_mdma_trigger_mode {
    177	STM32_MDMA_BUFFER,
    178	STM32_MDMA_BLOCK,
    179	STM32_MDMA_BLOCK_REP,
    180	STM32_MDMA_LINKED_LIST,
    181};
    182
    183enum stm32_mdma_width {
    184	STM32_MDMA_BYTE,
    185	STM32_MDMA_HALF_WORD,
    186	STM32_MDMA_WORD,
    187	STM32_MDMA_DOUBLE_WORD,
    188};
    189
    190enum stm32_mdma_inc_mode {
    191	STM32_MDMA_FIXED = 0,
    192	STM32_MDMA_INC = 2,
    193	STM32_MDMA_DEC = 3,
    194};
    195
    196struct stm32_mdma_chan_config {
    197	u32 request;
    198	u32 priority_level;
    199	u32 transfer_config;
    200	u32 mask_addr;
    201	u32 mask_data;
    202};
    203
    204struct stm32_mdma_hwdesc {
    205	u32 ctcr;
    206	u32 cbndtr;
    207	u32 csar;
    208	u32 cdar;
    209	u32 cbrur;
    210	u32 clar;
    211	u32 ctbr;
    212	u32 dummy;
    213	u32 cmar;
    214	u32 cmdr;
    215} __aligned(64);
    216
    217struct stm32_mdma_desc_node {
    218	struct stm32_mdma_hwdesc *hwdesc;
    219	dma_addr_t hwdesc_phys;
    220};
    221
    222struct stm32_mdma_desc {
    223	struct virt_dma_desc vdesc;
    224	u32 ccr;
    225	bool cyclic;
    226	u32 count;
    227	struct stm32_mdma_desc_node node[];
    228};
    229
    230struct stm32_mdma_chan {
    231	struct virt_dma_chan vchan;
    232	struct dma_pool *desc_pool;
    233	u32 id;
    234	struct stm32_mdma_desc *desc;
    235	u32 curr_hwdesc;
    236	struct dma_slave_config dma_config;
    237	struct stm32_mdma_chan_config chan_config;
    238	bool busy;
    239	u32 mem_burst;
    240	u32 mem_width;
    241};
    242
    243struct stm32_mdma_device {
    244	struct dma_device ddev;
    245	void __iomem *base;
    246	struct clk *clk;
    247	int irq;
    248	u32 nr_channels;
    249	u32 nr_requests;
    250	u32 nr_ahb_addr_masks;
    251	u32 chan_reserved;
    252	struct stm32_mdma_chan chan[STM32_MDMA_MAX_CHANNELS];
    253	u32 ahb_addr_masks[];
    254};
    255
    256static struct stm32_mdma_device *stm32_mdma_get_dev(
    257	struct stm32_mdma_chan *chan)
    258{
    259	return container_of(chan->vchan.chan.device, struct stm32_mdma_device,
    260			    ddev);
    261}
    262
    263static struct stm32_mdma_chan *to_stm32_mdma_chan(struct dma_chan *c)
    264{
    265	return container_of(c, struct stm32_mdma_chan, vchan.chan);
    266}
    267
    268static struct stm32_mdma_desc *to_stm32_mdma_desc(struct virt_dma_desc *vdesc)
    269{
    270	return container_of(vdesc, struct stm32_mdma_desc, vdesc);
    271}
    272
    273static struct device *chan2dev(struct stm32_mdma_chan *chan)
    274{
    275	return &chan->vchan.chan.dev->device;
    276}
    277
    278static struct device *mdma2dev(struct stm32_mdma_device *mdma_dev)
    279{
    280	return mdma_dev->ddev.dev;
    281}
    282
    283static u32 stm32_mdma_read(struct stm32_mdma_device *dmadev, u32 reg)
    284{
    285	return readl_relaxed(dmadev->base + reg);
    286}
    287
    288static void stm32_mdma_write(struct stm32_mdma_device *dmadev, u32 reg, u32 val)
    289{
    290	writel_relaxed(val, dmadev->base + reg);
    291}
    292
    293static void stm32_mdma_set_bits(struct stm32_mdma_device *dmadev, u32 reg,
    294				u32 mask)
    295{
    296	void __iomem *addr = dmadev->base + reg;
    297
    298	writel_relaxed(readl_relaxed(addr) | mask, addr);
    299}
    300
    301static void stm32_mdma_clr_bits(struct stm32_mdma_device *dmadev, u32 reg,
    302				u32 mask)
    303{
    304	void __iomem *addr = dmadev->base + reg;
    305
    306	writel_relaxed(readl_relaxed(addr) & ~mask, addr);
    307}
    308
    309static struct stm32_mdma_desc *stm32_mdma_alloc_desc(
    310		struct stm32_mdma_chan *chan, u32 count)
    311{
    312	struct stm32_mdma_desc *desc;
    313	int i;
    314
    315	desc = kzalloc(struct_size(desc, node, count), GFP_NOWAIT);
    316	if (!desc)
    317		return NULL;
    318
    319	for (i = 0; i < count; i++) {
    320		desc->node[i].hwdesc =
    321			dma_pool_alloc(chan->desc_pool, GFP_NOWAIT,
    322				       &desc->node[i].hwdesc_phys);
    323		if (!desc->node[i].hwdesc)
    324			goto err;
    325	}
    326
    327	desc->count = count;
    328
    329	return desc;
    330
    331err:
    332	dev_err(chan2dev(chan), "Failed to allocate descriptor\n");
    333	while (--i >= 0)
    334		dma_pool_free(chan->desc_pool, desc->node[i].hwdesc,
    335			      desc->node[i].hwdesc_phys);
    336	kfree(desc);
    337	return NULL;
    338}
    339
    340static void stm32_mdma_desc_free(struct virt_dma_desc *vdesc)
    341{
    342	struct stm32_mdma_desc *desc = to_stm32_mdma_desc(vdesc);
    343	struct stm32_mdma_chan *chan = to_stm32_mdma_chan(vdesc->tx.chan);
    344	int i;
    345
    346	for (i = 0; i < desc->count; i++)
    347		dma_pool_free(chan->desc_pool, desc->node[i].hwdesc,
    348			      desc->node[i].hwdesc_phys);
    349	kfree(desc);
    350}
    351
    352static int stm32_mdma_get_width(struct stm32_mdma_chan *chan,
    353				enum dma_slave_buswidth width)
    354{
    355	switch (width) {
    356	case DMA_SLAVE_BUSWIDTH_1_BYTE:
    357	case DMA_SLAVE_BUSWIDTH_2_BYTES:
    358	case DMA_SLAVE_BUSWIDTH_4_BYTES:
    359	case DMA_SLAVE_BUSWIDTH_8_BYTES:
    360		return ffs(width) - 1;
    361	default:
    362		dev_err(chan2dev(chan), "Dma bus width %i not supported\n",
    363			width);
    364		return -EINVAL;
    365	}
    366}
    367
    368static enum dma_slave_buswidth stm32_mdma_get_max_width(dma_addr_t addr,
    369							u32 buf_len, u32 tlen)
    370{
    371	enum dma_slave_buswidth max_width = DMA_SLAVE_BUSWIDTH_8_BYTES;
    372
    373	for (max_width = DMA_SLAVE_BUSWIDTH_8_BYTES;
    374	     max_width > DMA_SLAVE_BUSWIDTH_1_BYTE;
    375	     max_width >>= 1) {
    376		/*
    377		 * Address and buffer length both have to be aligned on
    378		 * bus width
    379		 */
    380		if ((((buf_len | addr) & (max_width - 1)) == 0) &&
    381		    tlen >= max_width)
    382			break;
    383	}
    384
    385	return max_width;
    386}
    387
    388static u32 stm32_mdma_get_best_burst(u32 buf_len, u32 tlen, u32 max_burst,
    389				     enum dma_slave_buswidth width)
    390{
    391	u32 best_burst;
    392
    393	best_burst = min((u32)1 << __ffs(tlen | buf_len),
    394			 max_burst * width) / width;
    395
    396	return (best_burst > 0) ? best_burst : 1;
    397}
    398
    399static int stm32_mdma_disable_chan(struct stm32_mdma_chan *chan)
    400{
    401	struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
    402	u32 ccr, cisr, id, reg;
    403	int ret;
    404
    405	id = chan->id;
    406	reg = STM32_MDMA_CCR(id);
    407
    408	/* Disable interrupts */
    409	stm32_mdma_clr_bits(dmadev, reg, STM32_MDMA_CCR_IRQ_MASK);
    410
    411	ccr = stm32_mdma_read(dmadev, reg);
    412	if (ccr & STM32_MDMA_CCR_EN) {
    413		stm32_mdma_clr_bits(dmadev, reg, STM32_MDMA_CCR_EN);
    414
    415		/* Ensure that any ongoing transfer has been completed */
    416		ret = readl_relaxed_poll_timeout_atomic(
    417				dmadev->base + STM32_MDMA_CISR(id), cisr,
    418				(cisr & STM32_MDMA_CISR_CTCIF), 10, 1000);
    419		if (ret) {
    420			dev_err(chan2dev(chan), "%s: timeout!\n", __func__);
    421			return -EBUSY;
    422		}
    423	}
    424
    425	return 0;
    426}
    427
    428static void stm32_mdma_stop(struct stm32_mdma_chan *chan)
    429{
    430	struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
    431	u32 status;
    432	int ret;
    433
    434	/* Disable DMA */
    435	ret = stm32_mdma_disable_chan(chan);
    436	if (ret < 0)
    437		return;
    438
    439	/* Clear interrupt status if it is there */
    440	status = stm32_mdma_read(dmadev, STM32_MDMA_CISR(chan->id));
    441	if (status) {
    442		dev_dbg(chan2dev(chan), "%s(): clearing interrupt: 0x%08x\n",
    443			__func__, status);
    444		stm32_mdma_set_bits(dmadev, STM32_MDMA_CIFCR(chan->id), status);
    445	}
    446
    447	chan->busy = false;
    448}
    449
    450static void stm32_mdma_set_bus(struct stm32_mdma_device *dmadev, u32 *ctbr,
    451			       u32 ctbr_mask, u32 src_addr)
    452{
    453	u32 mask;
    454	int i;
    455
    456	/* Check if memory device is on AHB or AXI */
    457	*ctbr &= ~ctbr_mask;
    458	mask = src_addr & 0xF0000000;
    459	for (i = 0; i < dmadev->nr_ahb_addr_masks; i++) {
    460		if (mask == dmadev->ahb_addr_masks[i]) {
    461			*ctbr |= ctbr_mask;
    462			break;
    463		}
    464	}
    465}
    466
    467static int stm32_mdma_set_xfer_param(struct stm32_mdma_chan *chan,
    468				     enum dma_transfer_direction direction,
    469				     u32 *mdma_ccr, u32 *mdma_ctcr,
    470				     u32 *mdma_ctbr, dma_addr_t addr,
    471				     u32 buf_len)
    472{
    473	struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
    474	struct stm32_mdma_chan_config *chan_config = &chan->chan_config;
    475	enum dma_slave_buswidth src_addr_width, dst_addr_width;
    476	phys_addr_t src_addr, dst_addr;
    477	int src_bus_width, dst_bus_width;
    478	u32 src_maxburst, dst_maxburst, src_best_burst, dst_best_burst;
    479	u32 ccr, ctcr, ctbr, tlen;
    480
    481	src_addr_width = chan->dma_config.src_addr_width;
    482	dst_addr_width = chan->dma_config.dst_addr_width;
    483	src_maxburst = chan->dma_config.src_maxburst;
    484	dst_maxburst = chan->dma_config.dst_maxburst;
    485
    486	ccr = stm32_mdma_read(dmadev, STM32_MDMA_CCR(chan->id));
    487	ctcr = stm32_mdma_read(dmadev, STM32_MDMA_CTCR(chan->id));
    488	ctbr = stm32_mdma_read(dmadev, STM32_MDMA_CTBR(chan->id));
    489
    490	/* Enable HW request mode */
    491	ctcr &= ~STM32_MDMA_CTCR_SWRM;
    492
    493	/* Set DINC, SINC, DINCOS, SINCOS, TRGM and TLEN retrieve from DT */
    494	ctcr &= ~STM32_MDMA_CTCR_CFG_MASK;
    495	ctcr |= chan_config->transfer_config & STM32_MDMA_CTCR_CFG_MASK;
    496
    497	/*
    498	 * For buffer transfer length (TLEN) we have to set
    499	 * the number of bytes - 1 in CTCR register
    500	 */
    501	tlen = STM32_MDMA_CTCR_LEN2_GET(ctcr);
    502	ctcr &= ~STM32_MDMA_CTCR_LEN2_MSK;
    503	ctcr |= STM32_MDMA_CTCR_TLEN((tlen - 1));
    504
    505	/* Disable Pack Enable */
    506	ctcr &= ~STM32_MDMA_CTCR_PKE;
    507
    508	/* Check burst size constraints */
    509	if (src_maxburst * src_addr_width > STM32_MDMA_MAX_BURST ||
    510	    dst_maxburst * dst_addr_width > STM32_MDMA_MAX_BURST) {
    511		dev_err(chan2dev(chan),
    512			"burst size * bus width higher than %d bytes\n",
    513			STM32_MDMA_MAX_BURST);
    514		return -EINVAL;
    515	}
    516
    517	if ((!is_power_of_2(src_maxburst) && src_maxburst > 0) ||
    518	    (!is_power_of_2(dst_maxburst) && dst_maxburst > 0)) {
    519		dev_err(chan2dev(chan), "burst size must be a power of 2\n");
    520		return -EINVAL;
    521	}
    522
    523	/*
    524	 * Configure channel control:
    525	 * - Clear SW request as in this case this is a HW one
    526	 * - Clear WEX, HEX and BEX bits
    527	 * - Set priority level
    528	 */
    529	ccr &= ~(STM32_MDMA_CCR_SWRQ | STM32_MDMA_CCR_WEX | STM32_MDMA_CCR_HEX |
    530		 STM32_MDMA_CCR_BEX | STM32_MDMA_CCR_PL_MASK);
    531	ccr |= STM32_MDMA_CCR_PL(chan_config->priority_level);
    532
    533	/* Configure Trigger selection */
    534	ctbr &= ~STM32_MDMA_CTBR_TSEL_MASK;
    535	ctbr |= STM32_MDMA_CTBR_TSEL(chan_config->request);
    536
    537	switch (direction) {
    538	case DMA_MEM_TO_DEV:
    539		dst_addr = chan->dma_config.dst_addr;
    540
    541		/* Set device data size */
    542		dst_bus_width = stm32_mdma_get_width(chan, dst_addr_width);
    543		if (dst_bus_width < 0)
    544			return dst_bus_width;
    545		ctcr &= ~STM32_MDMA_CTCR_DSIZE_MASK;
    546		ctcr |= STM32_MDMA_CTCR_DSIZE(dst_bus_width);
    547
    548		/* Set device burst value */
    549		dst_best_burst = stm32_mdma_get_best_burst(buf_len, tlen,
    550							   dst_maxburst,
    551							   dst_addr_width);
    552		chan->mem_burst = dst_best_burst;
    553		ctcr &= ~STM32_MDMA_CTCR_DBURST_MASK;
    554		ctcr |= STM32_MDMA_CTCR_DBURST((ilog2(dst_best_burst)));
    555
    556		/* Set memory data size */
    557		src_addr_width = stm32_mdma_get_max_width(addr, buf_len, tlen);
    558		chan->mem_width = src_addr_width;
    559		src_bus_width = stm32_mdma_get_width(chan, src_addr_width);
    560		if (src_bus_width < 0)
    561			return src_bus_width;
    562		ctcr &= ~STM32_MDMA_CTCR_SSIZE_MASK |
    563			STM32_MDMA_CTCR_SINCOS_MASK;
    564		ctcr |= STM32_MDMA_CTCR_SSIZE(src_bus_width) |
    565			STM32_MDMA_CTCR_SINCOS(src_bus_width);
    566
    567		/* Set memory burst value */
    568		src_maxburst = STM32_MDMA_MAX_BUF_LEN / src_addr_width;
    569		src_best_burst = stm32_mdma_get_best_burst(buf_len, tlen,
    570							   src_maxburst,
    571							   src_addr_width);
    572		chan->mem_burst = src_best_burst;
    573		ctcr &= ~STM32_MDMA_CTCR_SBURST_MASK;
    574		ctcr |= STM32_MDMA_CTCR_SBURST((ilog2(src_best_burst)));
    575
    576		/* Select bus */
    577		stm32_mdma_set_bus(dmadev, &ctbr, STM32_MDMA_CTBR_DBUS,
    578				   dst_addr);
    579
    580		if (dst_bus_width != src_bus_width)
    581			ctcr |= STM32_MDMA_CTCR_PKE;
    582
    583		/* Set destination address */
    584		stm32_mdma_write(dmadev, STM32_MDMA_CDAR(chan->id), dst_addr);
    585		break;
    586
    587	case DMA_DEV_TO_MEM:
    588		src_addr = chan->dma_config.src_addr;
    589
    590		/* Set device data size */
    591		src_bus_width = stm32_mdma_get_width(chan, src_addr_width);
    592		if (src_bus_width < 0)
    593			return src_bus_width;
    594		ctcr &= ~STM32_MDMA_CTCR_SSIZE_MASK;
    595		ctcr |= STM32_MDMA_CTCR_SSIZE(src_bus_width);
    596
    597		/* Set device burst value */
    598		src_best_burst = stm32_mdma_get_best_burst(buf_len, tlen,
    599							   src_maxburst,
    600							   src_addr_width);
    601		ctcr &= ~STM32_MDMA_CTCR_SBURST_MASK;
    602		ctcr |= STM32_MDMA_CTCR_SBURST((ilog2(src_best_burst)));
    603
    604		/* Set memory data size */
    605		dst_addr_width = stm32_mdma_get_max_width(addr, buf_len, tlen);
    606		chan->mem_width = dst_addr_width;
    607		dst_bus_width = stm32_mdma_get_width(chan, dst_addr_width);
    608		if (dst_bus_width < 0)
    609			return dst_bus_width;
    610		ctcr &= ~(STM32_MDMA_CTCR_DSIZE_MASK |
    611			STM32_MDMA_CTCR_DINCOS_MASK);
    612		ctcr |= STM32_MDMA_CTCR_DSIZE(dst_bus_width) |
    613			STM32_MDMA_CTCR_DINCOS(dst_bus_width);
    614
    615		/* Set memory burst value */
    616		dst_maxburst = STM32_MDMA_MAX_BUF_LEN / dst_addr_width;
    617		dst_best_burst = stm32_mdma_get_best_burst(buf_len, tlen,
    618							   dst_maxburst,
    619							   dst_addr_width);
    620		ctcr &= ~STM32_MDMA_CTCR_DBURST_MASK;
    621		ctcr |= STM32_MDMA_CTCR_DBURST((ilog2(dst_best_burst)));
    622
    623		/* Select bus */
    624		stm32_mdma_set_bus(dmadev, &ctbr, STM32_MDMA_CTBR_SBUS,
    625				   src_addr);
    626
    627		if (dst_bus_width != src_bus_width)
    628			ctcr |= STM32_MDMA_CTCR_PKE;
    629
    630		/* Set source address */
    631		stm32_mdma_write(dmadev, STM32_MDMA_CSAR(chan->id), src_addr);
    632		break;
    633
    634	default:
    635		dev_err(chan2dev(chan), "Dma direction is not supported\n");
    636		return -EINVAL;
    637	}
    638
    639	*mdma_ccr = ccr;
    640	*mdma_ctcr = ctcr;
    641	*mdma_ctbr = ctbr;
    642
    643	return 0;
    644}
    645
    646static void stm32_mdma_dump_hwdesc(struct stm32_mdma_chan *chan,
    647				   struct stm32_mdma_desc_node *node)
    648{
    649	dev_dbg(chan2dev(chan), "hwdesc:  %pad\n", &node->hwdesc_phys);
    650	dev_dbg(chan2dev(chan), "CTCR:    0x%08x\n", node->hwdesc->ctcr);
    651	dev_dbg(chan2dev(chan), "CBNDTR:  0x%08x\n", node->hwdesc->cbndtr);
    652	dev_dbg(chan2dev(chan), "CSAR:    0x%08x\n", node->hwdesc->csar);
    653	dev_dbg(chan2dev(chan), "CDAR:    0x%08x\n", node->hwdesc->cdar);
    654	dev_dbg(chan2dev(chan), "CBRUR:   0x%08x\n", node->hwdesc->cbrur);
    655	dev_dbg(chan2dev(chan), "CLAR:    0x%08x\n", node->hwdesc->clar);
    656	dev_dbg(chan2dev(chan), "CTBR:    0x%08x\n", node->hwdesc->ctbr);
    657	dev_dbg(chan2dev(chan), "CMAR:    0x%08x\n", node->hwdesc->cmar);
    658	dev_dbg(chan2dev(chan), "CMDR:    0x%08x\n\n", node->hwdesc->cmdr);
    659}
    660
    661static void stm32_mdma_setup_hwdesc(struct stm32_mdma_chan *chan,
    662				    struct stm32_mdma_desc *desc,
    663				    enum dma_transfer_direction dir, u32 count,
    664				    dma_addr_t src_addr, dma_addr_t dst_addr,
    665				    u32 len, u32 ctcr, u32 ctbr, bool is_last,
    666				    bool is_first, bool is_cyclic)
    667{
    668	struct stm32_mdma_chan_config *config = &chan->chan_config;
    669	struct stm32_mdma_hwdesc *hwdesc;
    670	u32 next = count + 1;
    671
    672	hwdesc = desc->node[count].hwdesc;
    673	hwdesc->ctcr = ctcr;
    674	hwdesc->cbndtr &= ~(STM32_MDMA_CBNDTR_BRC_MK |
    675			STM32_MDMA_CBNDTR_BRDUM |
    676			STM32_MDMA_CBNDTR_BRSUM |
    677			STM32_MDMA_CBNDTR_BNDT_MASK);
    678	hwdesc->cbndtr |= STM32_MDMA_CBNDTR_BNDT(len);
    679	hwdesc->csar = src_addr;
    680	hwdesc->cdar = dst_addr;
    681	hwdesc->cbrur = 0;
    682	hwdesc->ctbr = ctbr;
    683	hwdesc->cmar = config->mask_addr;
    684	hwdesc->cmdr = config->mask_data;
    685
    686	if (is_last) {
    687		if (is_cyclic)
    688			hwdesc->clar = desc->node[0].hwdesc_phys;
    689		else
    690			hwdesc->clar = 0;
    691	} else {
    692		hwdesc->clar = desc->node[next].hwdesc_phys;
    693	}
    694
    695	stm32_mdma_dump_hwdesc(chan, &desc->node[count]);
    696}
    697
    698static int stm32_mdma_setup_xfer(struct stm32_mdma_chan *chan,
    699				 struct stm32_mdma_desc *desc,
    700				 struct scatterlist *sgl, u32 sg_len,
    701				 enum dma_transfer_direction direction)
    702{
    703	struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
    704	struct dma_slave_config *dma_config = &chan->dma_config;
    705	struct scatterlist *sg;
    706	dma_addr_t src_addr, dst_addr;
    707	u32 ccr, ctcr, ctbr;
    708	int i, ret = 0;
    709
    710	for_each_sg(sgl, sg, sg_len, i) {
    711		if (sg_dma_len(sg) > STM32_MDMA_MAX_BLOCK_LEN) {
    712			dev_err(chan2dev(chan), "Invalid block len\n");
    713			return -EINVAL;
    714		}
    715
    716		if (direction == DMA_MEM_TO_DEV) {
    717			src_addr = sg_dma_address(sg);
    718			dst_addr = dma_config->dst_addr;
    719			ret = stm32_mdma_set_xfer_param(chan, direction, &ccr,
    720							&ctcr, &ctbr, src_addr,
    721							sg_dma_len(sg));
    722			stm32_mdma_set_bus(dmadev, &ctbr, STM32_MDMA_CTBR_SBUS,
    723					   src_addr);
    724		} else {
    725			src_addr = dma_config->src_addr;
    726			dst_addr = sg_dma_address(sg);
    727			ret = stm32_mdma_set_xfer_param(chan, direction, &ccr,
    728							&ctcr, &ctbr, dst_addr,
    729							sg_dma_len(sg));
    730			stm32_mdma_set_bus(dmadev, &ctbr, STM32_MDMA_CTBR_DBUS,
    731					   dst_addr);
    732		}
    733
    734		if (ret < 0)
    735			return ret;
    736
    737		stm32_mdma_setup_hwdesc(chan, desc, direction, i, src_addr,
    738					dst_addr, sg_dma_len(sg), ctcr, ctbr,
    739					i == sg_len - 1, i == 0, false);
    740	}
    741
    742	/* Enable interrupts */
    743	ccr &= ~STM32_MDMA_CCR_IRQ_MASK;
    744	ccr |= STM32_MDMA_CCR_TEIE | STM32_MDMA_CCR_CTCIE;
    745	if (sg_len > 1)
    746		ccr |= STM32_MDMA_CCR_BTIE;
    747	desc->ccr = ccr;
    748
    749	return 0;
    750}
    751
    752static struct dma_async_tx_descriptor *
    753stm32_mdma_prep_slave_sg(struct dma_chan *c, struct scatterlist *sgl,
    754			 u32 sg_len, enum dma_transfer_direction direction,
    755			 unsigned long flags, void *context)
    756{
    757	struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
    758	struct stm32_mdma_desc *desc;
    759	int i, ret;
    760
    761	/*
    762	 * Once DMA is in setup cyclic mode the channel we cannot assign this
    763	 * channel anymore. The DMA channel needs to be aborted or terminated
    764	 * for allowing another request.
    765	 */
    766	if (chan->desc && chan->desc->cyclic) {
    767		dev_err(chan2dev(chan),
    768			"Request not allowed when dma in cyclic mode\n");
    769		return NULL;
    770	}
    771
    772	desc = stm32_mdma_alloc_desc(chan, sg_len);
    773	if (!desc)
    774		return NULL;
    775
    776	ret = stm32_mdma_setup_xfer(chan, desc, sgl, sg_len, direction);
    777	if (ret < 0)
    778		goto xfer_setup_err;
    779
    780	desc->cyclic = false;
    781
    782	return vchan_tx_prep(&chan->vchan, &desc->vdesc, flags);
    783
    784xfer_setup_err:
    785	for (i = 0; i < desc->count; i++)
    786		dma_pool_free(chan->desc_pool, desc->node[i].hwdesc,
    787			      desc->node[i].hwdesc_phys);
    788	kfree(desc);
    789	return NULL;
    790}
    791
    792static struct dma_async_tx_descriptor *
    793stm32_mdma_prep_dma_cyclic(struct dma_chan *c, dma_addr_t buf_addr,
    794			   size_t buf_len, size_t period_len,
    795			   enum dma_transfer_direction direction,
    796			   unsigned long flags)
    797{
    798	struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
    799	struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
    800	struct dma_slave_config *dma_config = &chan->dma_config;
    801	struct stm32_mdma_desc *desc;
    802	dma_addr_t src_addr, dst_addr;
    803	u32 ccr, ctcr, ctbr, count;
    804	int i, ret;
    805
    806	/*
    807	 * Once DMA is in setup cyclic mode the channel we cannot assign this
    808	 * channel anymore. The DMA channel needs to be aborted or terminated
    809	 * for allowing another request.
    810	 */
    811	if (chan->desc && chan->desc->cyclic) {
    812		dev_err(chan2dev(chan),
    813			"Request not allowed when dma in cyclic mode\n");
    814		return NULL;
    815	}
    816
    817	if (!buf_len || !period_len || period_len > STM32_MDMA_MAX_BLOCK_LEN) {
    818		dev_err(chan2dev(chan), "Invalid buffer/period len\n");
    819		return NULL;
    820	}
    821
    822	if (buf_len % period_len) {
    823		dev_err(chan2dev(chan), "buf_len not multiple of period_len\n");
    824		return NULL;
    825	}
    826
    827	count = buf_len / period_len;
    828
    829	desc = stm32_mdma_alloc_desc(chan, count);
    830	if (!desc)
    831		return NULL;
    832
    833	/* Select bus */
    834	if (direction == DMA_MEM_TO_DEV) {
    835		src_addr = buf_addr;
    836		ret = stm32_mdma_set_xfer_param(chan, direction, &ccr, &ctcr,
    837						&ctbr, src_addr, period_len);
    838		stm32_mdma_set_bus(dmadev, &ctbr, STM32_MDMA_CTBR_SBUS,
    839				   src_addr);
    840	} else {
    841		dst_addr = buf_addr;
    842		ret = stm32_mdma_set_xfer_param(chan, direction, &ccr, &ctcr,
    843						&ctbr, dst_addr, period_len);
    844		stm32_mdma_set_bus(dmadev, &ctbr, STM32_MDMA_CTBR_DBUS,
    845				   dst_addr);
    846	}
    847
    848	if (ret < 0)
    849		goto xfer_setup_err;
    850
    851	/* Enable interrupts */
    852	ccr &= ~STM32_MDMA_CCR_IRQ_MASK;
    853	ccr |= STM32_MDMA_CCR_TEIE | STM32_MDMA_CCR_CTCIE | STM32_MDMA_CCR_BTIE;
    854	desc->ccr = ccr;
    855
    856	/* Configure hwdesc list */
    857	for (i = 0; i < count; i++) {
    858		if (direction == DMA_MEM_TO_DEV) {
    859			src_addr = buf_addr + i * period_len;
    860			dst_addr = dma_config->dst_addr;
    861		} else {
    862			src_addr = dma_config->src_addr;
    863			dst_addr = buf_addr + i * period_len;
    864		}
    865
    866		stm32_mdma_setup_hwdesc(chan, desc, direction, i, src_addr,
    867					dst_addr, period_len, ctcr, ctbr,
    868					i == count - 1, i == 0, true);
    869	}
    870
    871	desc->cyclic = true;
    872
    873	return vchan_tx_prep(&chan->vchan, &desc->vdesc, flags);
    874
    875xfer_setup_err:
    876	for (i = 0; i < desc->count; i++)
    877		dma_pool_free(chan->desc_pool, desc->node[i].hwdesc,
    878			      desc->node[i].hwdesc_phys);
    879	kfree(desc);
    880	return NULL;
    881}
    882
    883static struct dma_async_tx_descriptor *
    884stm32_mdma_prep_dma_memcpy(struct dma_chan *c, dma_addr_t dest, dma_addr_t src,
    885			   size_t len, unsigned long flags)
    886{
    887	struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
    888	struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
    889	enum dma_slave_buswidth max_width;
    890	struct stm32_mdma_desc *desc;
    891	struct stm32_mdma_hwdesc *hwdesc;
    892	u32 ccr, ctcr, ctbr, cbndtr, count, max_burst, mdma_burst;
    893	u32 best_burst, tlen;
    894	size_t xfer_count, offset;
    895	int src_bus_width, dst_bus_width;
    896	int i;
    897
    898	/*
    899	 * Once DMA is in setup cyclic mode the channel we cannot assign this
    900	 * channel anymore. The DMA channel needs to be aborted or terminated
    901	 * to allow another request
    902	 */
    903	if (chan->desc && chan->desc->cyclic) {
    904		dev_err(chan2dev(chan),
    905			"Request not allowed when dma in cyclic mode\n");
    906		return NULL;
    907	}
    908
    909	count = DIV_ROUND_UP(len, STM32_MDMA_MAX_BLOCK_LEN);
    910	desc = stm32_mdma_alloc_desc(chan, count);
    911	if (!desc)
    912		return NULL;
    913
    914	ccr = stm32_mdma_read(dmadev, STM32_MDMA_CCR(chan->id));
    915	ctcr = stm32_mdma_read(dmadev, STM32_MDMA_CTCR(chan->id));
    916	ctbr = stm32_mdma_read(dmadev, STM32_MDMA_CTBR(chan->id));
    917	cbndtr = stm32_mdma_read(dmadev, STM32_MDMA_CBNDTR(chan->id));
    918
    919	/* Enable sw req, some interrupts and clear other bits */
    920	ccr &= ~(STM32_MDMA_CCR_WEX | STM32_MDMA_CCR_HEX |
    921		 STM32_MDMA_CCR_BEX | STM32_MDMA_CCR_PL_MASK |
    922		 STM32_MDMA_CCR_IRQ_MASK);
    923	ccr |= STM32_MDMA_CCR_TEIE;
    924
    925	/* Enable SW request mode, dest/src inc and clear other bits */
    926	ctcr &= ~(STM32_MDMA_CTCR_BWM | STM32_MDMA_CTCR_TRGM_MSK |
    927		  STM32_MDMA_CTCR_PAM_MASK | STM32_MDMA_CTCR_PKE |
    928		  STM32_MDMA_CTCR_TLEN_MSK | STM32_MDMA_CTCR_DBURST_MASK |
    929		  STM32_MDMA_CTCR_SBURST_MASK | STM32_MDMA_CTCR_DINCOS_MASK |
    930		  STM32_MDMA_CTCR_SINCOS_MASK | STM32_MDMA_CTCR_DSIZE_MASK |
    931		  STM32_MDMA_CTCR_SSIZE_MASK | STM32_MDMA_CTCR_DINC_MASK |
    932		  STM32_MDMA_CTCR_SINC_MASK);
    933	ctcr |= STM32_MDMA_CTCR_SWRM | STM32_MDMA_CTCR_SINC(STM32_MDMA_INC) |
    934		STM32_MDMA_CTCR_DINC(STM32_MDMA_INC);
    935
    936	/* Reset HW request */
    937	ctbr &= ~STM32_MDMA_CTBR_TSEL_MASK;
    938
    939	/* Select bus */
    940	stm32_mdma_set_bus(dmadev, &ctbr, STM32_MDMA_CTBR_SBUS, src);
    941	stm32_mdma_set_bus(dmadev, &ctbr, STM32_MDMA_CTBR_DBUS, dest);
    942
    943	/* Clear CBNDTR registers */
    944	cbndtr &= ~(STM32_MDMA_CBNDTR_BRC_MK | STM32_MDMA_CBNDTR_BRDUM |
    945			STM32_MDMA_CBNDTR_BRSUM | STM32_MDMA_CBNDTR_BNDT_MASK);
    946
    947	if (len <= STM32_MDMA_MAX_BLOCK_LEN) {
    948		cbndtr |= STM32_MDMA_CBNDTR_BNDT(len);
    949		if (len <= STM32_MDMA_MAX_BUF_LEN) {
    950			/* Setup a buffer transfer */
    951			ccr |= STM32_MDMA_CCR_TCIE | STM32_MDMA_CCR_CTCIE;
    952			ctcr |= STM32_MDMA_CTCR_TRGM(STM32_MDMA_BUFFER);
    953		} else {
    954			/* Setup a block transfer */
    955			ccr |= STM32_MDMA_CCR_BTIE | STM32_MDMA_CCR_CTCIE;
    956			ctcr |= STM32_MDMA_CTCR_TRGM(STM32_MDMA_BLOCK);
    957		}
    958
    959		tlen = STM32_MDMA_MAX_BUF_LEN;
    960		ctcr |= STM32_MDMA_CTCR_TLEN((tlen - 1));
    961
    962		/* Set source best burst size */
    963		max_width = stm32_mdma_get_max_width(src, len, tlen);
    964		src_bus_width = stm32_mdma_get_width(chan, max_width);
    965
    966		max_burst = tlen / max_width;
    967		best_burst = stm32_mdma_get_best_burst(len, tlen, max_burst,
    968						       max_width);
    969		mdma_burst = ilog2(best_burst);
    970
    971		ctcr |= STM32_MDMA_CTCR_SBURST(mdma_burst) |
    972			STM32_MDMA_CTCR_SSIZE(src_bus_width) |
    973			STM32_MDMA_CTCR_SINCOS(src_bus_width);
    974
    975		/* Set destination best burst size */
    976		max_width = stm32_mdma_get_max_width(dest, len, tlen);
    977		dst_bus_width = stm32_mdma_get_width(chan, max_width);
    978
    979		max_burst = tlen / max_width;
    980		best_burst = stm32_mdma_get_best_burst(len, tlen, max_burst,
    981						       max_width);
    982		mdma_burst = ilog2(best_burst);
    983
    984		ctcr |= STM32_MDMA_CTCR_DBURST(mdma_burst) |
    985			STM32_MDMA_CTCR_DSIZE(dst_bus_width) |
    986			STM32_MDMA_CTCR_DINCOS(dst_bus_width);
    987
    988		if (dst_bus_width != src_bus_width)
    989			ctcr |= STM32_MDMA_CTCR_PKE;
    990
    991		/* Prepare hardware descriptor */
    992		hwdesc = desc->node[0].hwdesc;
    993		hwdesc->ctcr = ctcr;
    994		hwdesc->cbndtr = cbndtr;
    995		hwdesc->csar = src;
    996		hwdesc->cdar = dest;
    997		hwdesc->cbrur = 0;
    998		hwdesc->clar = 0;
    999		hwdesc->ctbr = ctbr;
   1000		hwdesc->cmar = 0;
   1001		hwdesc->cmdr = 0;
   1002
   1003		stm32_mdma_dump_hwdesc(chan, &desc->node[0]);
   1004	} else {
   1005		/* Setup a LLI transfer */
   1006		ctcr |= STM32_MDMA_CTCR_TRGM(STM32_MDMA_LINKED_LIST) |
   1007			STM32_MDMA_CTCR_TLEN((STM32_MDMA_MAX_BUF_LEN - 1));
   1008		ccr |= STM32_MDMA_CCR_BTIE | STM32_MDMA_CCR_CTCIE;
   1009		tlen = STM32_MDMA_MAX_BUF_LEN;
   1010
   1011		for (i = 0, offset = 0; offset < len;
   1012		     i++, offset += xfer_count) {
   1013			xfer_count = min_t(size_t, len - offset,
   1014					   STM32_MDMA_MAX_BLOCK_LEN);
   1015
   1016			/* Set source best burst size */
   1017			max_width = stm32_mdma_get_max_width(src, len, tlen);
   1018			src_bus_width = stm32_mdma_get_width(chan, max_width);
   1019
   1020			max_burst = tlen / max_width;
   1021			best_burst = stm32_mdma_get_best_burst(len, tlen,
   1022							       max_burst,
   1023							       max_width);
   1024			mdma_burst = ilog2(best_burst);
   1025
   1026			ctcr |= STM32_MDMA_CTCR_SBURST(mdma_burst) |
   1027				STM32_MDMA_CTCR_SSIZE(src_bus_width) |
   1028				STM32_MDMA_CTCR_SINCOS(src_bus_width);
   1029
   1030			/* Set destination best burst size */
   1031			max_width = stm32_mdma_get_max_width(dest, len, tlen);
   1032			dst_bus_width = stm32_mdma_get_width(chan, max_width);
   1033
   1034			max_burst = tlen / max_width;
   1035			best_burst = stm32_mdma_get_best_burst(len, tlen,
   1036							       max_burst,
   1037							       max_width);
   1038			mdma_burst = ilog2(best_burst);
   1039
   1040			ctcr |= STM32_MDMA_CTCR_DBURST(mdma_burst) |
   1041				STM32_MDMA_CTCR_DSIZE(dst_bus_width) |
   1042				STM32_MDMA_CTCR_DINCOS(dst_bus_width);
   1043
   1044			if (dst_bus_width != src_bus_width)
   1045				ctcr |= STM32_MDMA_CTCR_PKE;
   1046
   1047			/* Prepare hardware descriptor */
   1048			stm32_mdma_setup_hwdesc(chan, desc, DMA_MEM_TO_MEM, i,
   1049						src + offset, dest + offset,
   1050						xfer_count, ctcr, ctbr,
   1051						i == count - 1, i == 0, false);
   1052		}
   1053	}
   1054
   1055	desc->ccr = ccr;
   1056
   1057	desc->cyclic = false;
   1058
   1059	return vchan_tx_prep(&chan->vchan, &desc->vdesc, flags);
   1060}
   1061
   1062static void stm32_mdma_dump_reg(struct stm32_mdma_chan *chan)
   1063{
   1064	struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
   1065
   1066	dev_dbg(chan2dev(chan), "CCR:     0x%08x\n",
   1067		stm32_mdma_read(dmadev, STM32_MDMA_CCR(chan->id)));
   1068	dev_dbg(chan2dev(chan), "CTCR:    0x%08x\n",
   1069		stm32_mdma_read(dmadev, STM32_MDMA_CTCR(chan->id)));
   1070	dev_dbg(chan2dev(chan), "CBNDTR:  0x%08x\n",
   1071		stm32_mdma_read(dmadev, STM32_MDMA_CBNDTR(chan->id)));
   1072	dev_dbg(chan2dev(chan), "CSAR:    0x%08x\n",
   1073		stm32_mdma_read(dmadev, STM32_MDMA_CSAR(chan->id)));
   1074	dev_dbg(chan2dev(chan), "CDAR:    0x%08x\n",
   1075		stm32_mdma_read(dmadev, STM32_MDMA_CDAR(chan->id)));
   1076	dev_dbg(chan2dev(chan), "CBRUR:   0x%08x\n",
   1077		stm32_mdma_read(dmadev, STM32_MDMA_CBRUR(chan->id)));
   1078	dev_dbg(chan2dev(chan), "CLAR:    0x%08x\n",
   1079		stm32_mdma_read(dmadev, STM32_MDMA_CLAR(chan->id)));
   1080	dev_dbg(chan2dev(chan), "CTBR:    0x%08x\n",
   1081		stm32_mdma_read(dmadev, STM32_MDMA_CTBR(chan->id)));
   1082	dev_dbg(chan2dev(chan), "CMAR:    0x%08x\n",
   1083		stm32_mdma_read(dmadev, STM32_MDMA_CMAR(chan->id)));
   1084	dev_dbg(chan2dev(chan), "CMDR:    0x%08x\n",
   1085		stm32_mdma_read(dmadev, STM32_MDMA_CMDR(chan->id)));
   1086}
   1087
   1088static void stm32_mdma_start_transfer(struct stm32_mdma_chan *chan)
   1089{
   1090	struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
   1091	struct virt_dma_desc *vdesc;
   1092	struct stm32_mdma_hwdesc *hwdesc;
   1093	u32 id = chan->id;
   1094	u32 status, reg;
   1095
   1096	vdesc = vchan_next_desc(&chan->vchan);
   1097	if (!vdesc) {
   1098		chan->desc = NULL;
   1099		return;
   1100	}
   1101
   1102	list_del(&vdesc->node);
   1103
   1104	chan->desc = to_stm32_mdma_desc(vdesc);
   1105	hwdesc = chan->desc->node[0].hwdesc;
   1106	chan->curr_hwdesc = 0;
   1107
   1108	stm32_mdma_write(dmadev, STM32_MDMA_CCR(id), chan->desc->ccr);
   1109	stm32_mdma_write(dmadev, STM32_MDMA_CTCR(id), hwdesc->ctcr);
   1110	stm32_mdma_write(dmadev, STM32_MDMA_CBNDTR(id), hwdesc->cbndtr);
   1111	stm32_mdma_write(dmadev, STM32_MDMA_CSAR(id), hwdesc->csar);
   1112	stm32_mdma_write(dmadev, STM32_MDMA_CDAR(id), hwdesc->cdar);
   1113	stm32_mdma_write(dmadev, STM32_MDMA_CBRUR(id), hwdesc->cbrur);
   1114	stm32_mdma_write(dmadev, STM32_MDMA_CLAR(id), hwdesc->clar);
   1115	stm32_mdma_write(dmadev, STM32_MDMA_CTBR(id), hwdesc->ctbr);
   1116	stm32_mdma_write(dmadev, STM32_MDMA_CMAR(id), hwdesc->cmar);
   1117	stm32_mdma_write(dmadev, STM32_MDMA_CMDR(id), hwdesc->cmdr);
   1118
   1119	/* Clear interrupt status if it is there */
   1120	status = stm32_mdma_read(dmadev, STM32_MDMA_CISR(id));
   1121	if (status)
   1122		stm32_mdma_set_bits(dmadev, STM32_MDMA_CIFCR(id), status);
   1123
   1124	stm32_mdma_dump_reg(chan);
   1125
   1126	/* Start DMA */
   1127	stm32_mdma_set_bits(dmadev, STM32_MDMA_CCR(id), STM32_MDMA_CCR_EN);
   1128
   1129	/* Set SW request in case of MEM2MEM transfer */
   1130	if (hwdesc->ctcr & STM32_MDMA_CTCR_SWRM) {
   1131		reg = STM32_MDMA_CCR(id);
   1132		stm32_mdma_set_bits(dmadev, reg, STM32_MDMA_CCR_SWRQ);
   1133	}
   1134
   1135	chan->busy = true;
   1136
   1137	dev_dbg(chan2dev(chan), "vchan %pK: started\n", &chan->vchan);
   1138}
   1139
   1140static void stm32_mdma_issue_pending(struct dma_chan *c)
   1141{
   1142	struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
   1143	unsigned long flags;
   1144
   1145	spin_lock_irqsave(&chan->vchan.lock, flags);
   1146
   1147	if (!vchan_issue_pending(&chan->vchan))
   1148		goto end;
   1149
   1150	dev_dbg(chan2dev(chan), "vchan %pK: issued\n", &chan->vchan);
   1151
   1152	if (!chan->desc && !chan->busy)
   1153		stm32_mdma_start_transfer(chan);
   1154
   1155end:
   1156	spin_unlock_irqrestore(&chan->vchan.lock, flags);
   1157}
   1158
   1159static int stm32_mdma_pause(struct dma_chan *c)
   1160{
   1161	struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
   1162	unsigned long flags;
   1163	int ret;
   1164
   1165	spin_lock_irqsave(&chan->vchan.lock, flags);
   1166	ret = stm32_mdma_disable_chan(chan);
   1167	spin_unlock_irqrestore(&chan->vchan.lock, flags);
   1168
   1169	if (!ret)
   1170		dev_dbg(chan2dev(chan), "vchan %pK: pause\n", &chan->vchan);
   1171
   1172	return ret;
   1173}
   1174
   1175static int stm32_mdma_resume(struct dma_chan *c)
   1176{
   1177	struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
   1178	struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
   1179	struct stm32_mdma_hwdesc *hwdesc;
   1180	unsigned long flags;
   1181	u32 status, reg;
   1182
   1183	hwdesc = chan->desc->node[chan->curr_hwdesc].hwdesc;
   1184
   1185	spin_lock_irqsave(&chan->vchan.lock, flags);
   1186
   1187	/* Re-configure control register */
   1188	stm32_mdma_write(dmadev, STM32_MDMA_CCR(chan->id), chan->desc->ccr);
   1189
   1190	/* Clear interrupt status if it is there */
   1191	status = stm32_mdma_read(dmadev, STM32_MDMA_CISR(chan->id));
   1192	if (status)
   1193		stm32_mdma_set_bits(dmadev, STM32_MDMA_CIFCR(chan->id), status);
   1194
   1195	stm32_mdma_dump_reg(chan);
   1196
   1197	/* Re-start DMA */
   1198	reg = STM32_MDMA_CCR(chan->id);
   1199	stm32_mdma_set_bits(dmadev, reg, STM32_MDMA_CCR_EN);
   1200
   1201	/* Set SW request in case of MEM2MEM transfer */
   1202	if (hwdesc->ctcr & STM32_MDMA_CTCR_SWRM)
   1203		stm32_mdma_set_bits(dmadev, reg, STM32_MDMA_CCR_SWRQ);
   1204
   1205	spin_unlock_irqrestore(&chan->vchan.lock, flags);
   1206
   1207	dev_dbg(chan2dev(chan), "vchan %pK: resume\n", &chan->vchan);
   1208
   1209	return 0;
   1210}
   1211
   1212static int stm32_mdma_terminate_all(struct dma_chan *c)
   1213{
   1214	struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
   1215	unsigned long flags;
   1216	LIST_HEAD(head);
   1217
   1218	spin_lock_irqsave(&chan->vchan.lock, flags);
   1219	if (chan->desc) {
   1220		vchan_terminate_vdesc(&chan->desc->vdesc);
   1221		if (chan->busy)
   1222			stm32_mdma_stop(chan);
   1223		chan->desc = NULL;
   1224	}
   1225	vchan_get_all_descriptors(&chan->vchan, &head);
   1226	spin_unlock_irqrestore(&chan->vchan.lock, flags);
   1227
   1228	vchan_dma_desc_free_list(&chan->vchan, &head);
   1229
   1230	return 0;
   1231}
   1232
   1233static void stm32_mdma_synchronize(struct dma_chan *c)
   1234{
   1235	struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
   1236
   1237	vchan_synchronize(&chan->vchan);
   1238}
   1239
   1240static int stm32_mdma_slave_config(struct dma_chan *c,
   1241				   struct dma_slave_config *config)
   1242{
   1243	struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
   1244
   1245	memcpy(&chan->dma_config, config, sizeof(*config));
   1246
   1247	return 0;
   1248}
   1249
   1250static size_t stm32_mdma_desc_residue(struct stm32_mdma_chan *chan,
   1251				      struct stm32_mdma_desc *desc,
   1252				      u32 curr_hwdesc)
   1253{
   1254	struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
   1255	struct stm32_mdma_hwdesc *hwdesc;
   1256	u32 cbndtr, residue, modulo, burst_size;
   1257	int i;
   1258
   1259	residue = 0;
   1260	for (i = curr_hwdesc + 1; i < desc->count; i++) {
   1261		hwdesc = desc->node[i].hwdesc;
   1262		residue += STM32_MDMA_CBNDTR_BNDT(hwdesc->cbndtr);
   1263	}
   1264	cbndtr = stm32_mdma_read(dmadev, STM32_MDMA_CBNDTR(chan->id));
   1265	residue += cbndtr & STM32_MDMA_CBNDTR_BNDT_MASK;
   1266
   1267	if (!chan->mem_burst)
   1268		return residue;
   1269
   1270	burst_size = chan->mem_burst * chan->mem_width;
   1271	modulo = residue % burst_size;
   1272	if (modulo)
   1273		residue = residue - modulo + burst_size;
   1274
   1275	return residue;
   1276}
   1277
   1278static enum dma_status stm32_mdma_tx_status(struct dma_chan *c,
   1279					    dma_cookie_t cookie,
   1280					    struct dma_tx_state *state)
   1281{
   1282	struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
   1283	struct virt_dma_desc *vdesc;
   1284	enum dma_status status;
   1285	unsigned long flags;
   1286	u32 residue = 0;
   1287
   1288	status = dma_cookie_status(c, cookie, state);
   1289	if ((status == DMA_COMPLETE) || (!state))
   1290		return status;
   1291
   1292	spin_lock_irqsave(&chan->vchan.lock, flags);
   1293
   1294	vdesc = vchan_find_desc(&chan->vchan, cookie);
   1295	if (chan->desc && cookie == chan->desc->vdesc.tx.cookie)
   1296		residue = stm32_mdma_desc_residue(chan, chan->desc,
   1297						  chan->curr_hwdesc);
   1298	else if (vdesc)
   1299		residue = stm32_mdma_desc_residue(chan,
   1300						  to_stm32_mdma_desc(vdesc), 0);
   1301	dma_set_residue(state, residue);
   1302
   1303	spin_unlock_irqrestore(&chan->vchan.lock, flags);
   1304
   1305	return status;
   1306}
   1307
   1308static void stm32_mdma_xfer_end(struct stm32_mdma_chan *chan)
   1309{
   1310	vchan_cookie_complete(&chan->desc->vdesc);
   1311	chan->desc = NULL;
   1312	chan->busy = false;
   1313
   1314	/* Start the next transfer if this driver has a next desc */
   1315	stm32_mdma_start_transfer(chan);
   1316}
   1317
   1318static irqreturn_t stm32_mdma_irq_handler(int irq, void *devid)
   1319{
   1320	struct stm32_mdma_device *dmadev = devid;
   1321	struct stm32_mdma_chan *chan;
   1322	u32 reg, id, ccr, ien, status;
   1323
   1324	/* Find out which channel generates the interrupt */
   1325	status = readl_relaxed(dmadev->base + STM32_MDMA_GISR0);
   1326	if (!status) {
   1327		dev_dbg(mdma2dev(dmadev), "spurious it\n");
   1328		return IRQ_NONE;
   1329	}
   1330	id = __ffs(status);
   1331
   1332	chan = &dmadev->chan[id];
   1333	if (!chan) {
   1334		dev_warn(mdma2dev(dmadev), "MDMA channel not initialized\n");
   1335		return IRQ_NONE;
   1336	}
   1337
   1338	/* Handle interrupt for the channel */
   1339	spin_lock(&chan->vchan.lock);
   1340	status = stm32_mdma_read(dmadev, STM32_MDMA_CISR(id));
   1341	/* Mask Channel ReQuest Active bit which can be set in case of MEM2MEM */
   1342	status &= ~STM32_MDMA_CISR_CRQA;
   1343	ccr = stm32_mdma_read(dmadev, STM32_MDMA_CCR(id));
   1344	ien = (ccr & STM32_MDMA_CCR_IRQ_MASK) >> 1;
   1345
   1346	if (!(status & ien)) {
   1347		spin_unlock(&chan->vchan.lock);
   1348		if (chan->busy)
   1349			dev_warn(chan2dev(chan),
   1350				 "spurious it (status=0x%04x, ien=0x%04x)\n", status, ien);
   1351		else
   1352			dev_dbg(chan2dev(chan),
   1353				"spurious it (status=0x%04x, ien=0x%04x)\n", status, ien);
   1354		return IRQ_NONE;
   1355	}
   1356
   1357	reg = STM32_MDMA_CIFCR(id);
   1358
   1359	if (status & STM32_MDMA_CISR_TEIF) {
   1360		dev_err(chan2dev(chan), "Transfer Err: stat=0x%08x\n",
   1361			readl_relaxed(dmadev->base + STM32_MDMA_CESR(id)));
   1362		stm32_mdma_set_bits(dmadev, reg, STM32_MDMA_CIFCR_CTEIF);
   1363		status &= ~STM32_MDMA_CISR_TEIF;
   1364	}
   1365
   1366	if (status & STM32_MDMA_CISR_CTCIF) {
   1367		stm32_mdma_set_bits(dmadev, reg, STM32_MDMA_CIFCR_CCTCIF);
   1368		status &= ~STM32_MDMA_CISR_CTCIF;
   1369		stm32_mdma_xfer_end(chan);
   1370	}
   1371
   1372	if (status & STM32_MDMA_CISR_BRTIF) {
   1373		stm32_mdma_set_bits(dmadev, reg, STM32_MDMA_CIFCR_CBRTIF);
   1374		status &= ~STM32_MDMA_CISR_BRTIF;
   1375	}
   1376
   1377	if (status & STM32_MDMA_CISR_BTIF) {
   1378		stm32_mdma_set_bits(dmadev, reg, STM32_MDMA_CIFCR_CBTIF);
   1379		status &= ~STM32_MDMA_CISR_BTIF;
   1380		chan->curr_hwdesc++;
   1381		if (chan->desc && chan->desc->cyclic) {
   1382			if (chan->curr_hwdesc == chan->desc->count)
   1383				chan->curr_hwdesc = 0;
   1384			vchan_cyclic_callback(&chan->desc->vdesc);
   1385		}
   1386	}
   1387
   1388	if (status & STM32_MDMA_CISR_TCIF) {
   1389		stm32_mdma_set_bits(dmadev, reg, STM32_MDMA_CIFCR_CLTCIF);
   1390		status &= ~STM32_MDMA_CISR_TCIF;
   1391	}
   1392
   1393	if (status) {
   1394		stm32_mdma_set_bits(dmadev, reg, status);
   1395		dev_err(chan2dev(chan), "DMA error: status=0x%08x\n", status);
   1396		if (!(ccr & STM32_MDMA_CCR_EN))
   1397			dev_err(chan2dev(chan), "chan disabled by HW\n");
   1398	}
   1399
   1400	spin_unlock(&chan->vchan.lock);
   1401
   1402	return IRQ_HANDLED;
   1403}
   1404
   1405static int stm32_mdma_alloc_chan_resources(struct dma_chan *c)
   1406{
   1407	struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
   1408	struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
   1409	int ret;
   1410
   1411	chan->desc_pool = dmam_pool_create(dev_name(&c->dev->device),
   1412					   c->device->dev,
   1413					   sizeof(struct stm32_mdma_hwdesc),
   1414					  __alignof__(struct stm32_mdma_hwdesc),
   1415					   0);
   1416	if (!chan->desc_pool) {
   1417		dev_err(chan2dev(chan), "failed to allocate descriptor pool\n");
   1418		return -ENOMEM;
   1419	}
   1420
   1421	ret = pm_runtime_resume_and_get(dmadev->ddev.dev);
   1422	if (ret < 0)
   1423		return ret;
   1424
   1425	ret = stm32_mdma_disable_chan(chan);
   1426	if (ret < 0)
   1427		pm_runtime_put(dmadev->ddev.dev);
   1428
   1429	return ret;
   1430}
   1431
   1432static void stm32_mdma_free_chan_resources(struct dma_chan *c)
   1433{
   1434	struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
   1435	struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
   1436	unsigned long flags;
   1437
   1438	dev_dbg(chan2dev(chan), "Freeing channel %d\n", chan->id);
   1439
   1440	if (chan->busy) {
   1441		spin_lock_irqsave(&chan->vchan.lock, flags);
   1442		stm32_mdma_stop(chan);
   1443		chan->desc = NULL;
   1444		spin_unlock_irqrestore(&chan->vchan.lock, flags);
   1445	}
   1446
   1447	pm_runtime_put(dmadev->ddev.dev);
   1448	vchan_free_chan_resources(to_virt_chan(c));
   1449	dmam_pool_destroy(chan->desc_pool);
   1450	chan->desc_pool = NULL;
   1451}
   1452
   1453static bool stm32_mdma_filter_fn(struct dma_chan *c, void *fn_param)
   1454{
   1455	struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
   1456	struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
   1457
   1458	/* Check if chan is marked Secure */
   1459	if (dmadev->chan_reserved & BIT(chan->id))
   1460		return false;
   1461
   1462	return true;
   1463}
   1464
   1465static struct dma_chan *stm32_mdma_of_xlate(struct of_phandle_args *dma_spec,
   1466					    struct of_dma *ofdma)
   1467{
   1468	struct stm32_mdma_device *dmadev = ofdma->of_dma_data;
   1469	dma_cap_mask_t mask = dmadev->ddev.cap_mask;
   1470	struct stm32_mdma_chan *chan;
   1471	struct dma_chan *c;
   1472	struct stm32_mdma_chan_config config;
   1473
   1474	if (dma_spec->args_count < 5) {
   1475		dev_err(mdma2dev(dmadev), "Bad number of args\n");
   1476		return NULL;
   1477	}
   1478
   1479	config.request = dma_spec->args[0];
   1480	config.priority_level = dma_spec->args[1];
   1481	config.transfer_config = dma_spec->args[2];
   1482	config.mask_addr = dma_spec->args[3];
   1483	config.mask_data = dma_spec->args[4];
   1484
   1485	if (config.request >= dmadev->nr_requests) {
   1486		dev_err(mdma2dev(dmadev), "Bad request line\n");
   1487		return NULL;
   1488	}
   1489
   1490	if (config.priority_level > STM32_MDMA_VERY_HIGH_PRIORITY) {
   1491		dev_err(mdma2dev(dmadev), "Priority level not supported\n");
   1492		return NULL;
   1493	}
   1494
   1495	c = __dma_request_channel(&mask, stm32_mdma_filter_fn, &config, ofdma->of_node);
   1496	if (!c) {
   1497		dev_err(mdma2dev(dmadev), "No more channels available\n");
   1498		return NULL;
   1499	}
   1500
   1501	chan = to_stm32_mdma_chan(c);
   1502	chan->chan_config = config;
   1503
   1504	return c;
   1505}
   1506
   1507static const struct of_device_id stm32_mdma_of_match[] = {
   1508	{ .compatible = "st,stm32h7-mdma", },
   1509	{ /* sentinel */ },
   1510};
   1511MODULE_DEVICE_TABLE(of, stm32_mdma_of_match);
   1512
   1513static int stm32_mdma_probe(struct platform_device *pdev)
   1514{
   1515	struct stm32_mdma_chan *chan;
   1516	struct stm32_mdma_device *dmadev;
   1517	struct dma_device *dd;
   1518	struct device_node *of_node;
   1519	struct resource *res;
   1520	struct reset_control *rst;
   1521	u32 nr_channels, nr_requests;
   1522	int i, count, ret;
   1523
   1524	of_node = pdev->dev.of_node;
   1525	if (!of_node)
   1526		return -ENODEV;
   1527
   1528	ret = device_property_read_u32(&pdev->dev, "dma-channels",
   1529				       &nr_channels);
   1530	if (ret) {
   1531		nr_channels = STM32_MDMA_MAX_CHANNELS;
   1532		dev_warn(&pdev->dev, "MDMA defaulting on %i channels\n",
   1533			 nr_channels);
   1534	}
   1535
   1536	ret = device_property_read_u32(&pdev->dev, "dma-requests",
   1537				       &nr_requests);
   1538	if (ret) {
   1539		nr_requests = STM32_MDMA_MAX_REQUESTS;
   1540		dev_warn(&pdev->dev, "MDMA defaulting on %i request lines\n",
   1541			 nr_requests);
   1542	}
   1543
   1544	count = device_property_count_u32(&pdev->dev, "st,ahb-addr-masks");
   1545	if (count < 0)
   1546		count = 0;
   1547
   1548	dmadev = devm_kzalloc(&pdev->dev,
   1549			      struct_size(dmadev, ahb_addr_masks, count),
   1550			      GFP_KERNEL);
   1551	if (!dmadev)
   1552		return -ENOMEM;
   1553
   1554	dmadev->nr_channels = nr_channels;
   1555	dmadev->nr_requests = nr_requests;
   1556	device_property_read_u32_array(&pdev->dev, "st,ahb-addr-masks",
   1557				       dmadev->ahb_addr_masks,
   1558				       count);
   1559	dmadev->nr_ahb_addr_masks = count;
   1560
   1561	res = platform_get_resource(pdev, IORESOURCE_MEM, 0);
   1562	dmadev->base = devm_ioremap_resource(&pdev->dev, res);
   1563	if (IS_ERR(dmadev->base))
   1564		return PTR_ERR(dmadev->base);
   1565
   1566	dmadev->clk = devm_clk_get(&pdev->dev, NULL);
   1567	if (IS_ERR(dmadev->clk))
   1568		return dev_err_probe(&pdev->dev, PTR_ERR(dmadev->clk),
   1569				     "Missing clock controller\n");
   1570
   1571	ret = clk_prepare_enable(dmadev->clk);
   1572	if (ret < 0) {
   1573		dev_err(&pdev->dev, "clk_prep_enable error: %d\n", ret);
   1574		return ret;
   1575	}
   1576
   1577	rst = devm_reset_control_get(&pdev->dev, NULL);
   1578	if (IS_ERR(rst)) {
   1579		ret = PTR_ERR(rst);
   1580		if (ret == -EPROBE_DEFER)
   1581			goto err_clk;
   1582	} else {
   1583		reset_control_assert(rst);
   1584		udelay(2);
   1585		reset_control_deassert(rst);
   1586	}
   1587
   1588	dd = &dmadev->ddev;
   1589	dma_cap_set(DMA_SLAVE, dd->cap_mask);
   1590	dma_cap_set(DMA_PRIVATE, dd->cap_mask);
   1591	dma_cap_set(DMA_CYCLIC, dd->cap_mask);
   1592	dma_cap_set(DMA_MEMCPY, dd->cap_mask);
   1593	dd->device_alloc_chan_resources = stm32_mdma_alloc_chan_resources;
   1594	dd->device_free_chan_resources = stm32_mdma_free_chan_resources;
   1595	dd->device_tx_status = stm32_mdma_tx_status;
   1596	dd->device_issue_pending = stm32_mdma_issue_pending;
   1597	dd->device_prep_slave_sg = stm32_mdma_prep_slave_sg;
   1598	dd->device_prep_dma_cyclic = stm32_mdma_prep_dma_cyclic;
   1599	dd->device_prep_dma_memcpy = stm32_mdma_prep_dma_memcpy;
   1600	dd->device_config = stm32_mdma_slave_config;
   1601	dd->device_pause = stm32_mdma_pause;
   1602	dd->device_resume = stm32_mdma_resume;
   1603	dd->device_terminate_all = stm32_mdma_terminate_all;
   1604	dd->device_synchronize = stm32_mdma_synchronize;
   1605	dd->descriptor_reuse = true;
   1606
   1607	dd->src_addr_widths = BIT(DMA_SLAVE_BUSWIDTH_1_BYTE) |
   1608		BIT(DMA_SLAVE_BUSWIDTH_2_BYTES) |
   1609		BIT(DMA_SLAVE_BUSWIDTH_4_BYTES) |
   1610		BIT(DMA_SLAVE_BUSWIDTH_8_BYTES);
   1611	dd->dst_addr_widths = BIT(DMA_SLAVE_BUSWIDTH_1_BYTE) |
   1612		BIT(DMA_SLAVE_BUSWIDTH_2_BYTES) |
   1613		BIT(DMA_SLAVE_BUSWIDTH_4_BYTES) |
   1614		BIT(DMA_SLAVE_BUSWIDTH_8_BYTES);
   1615	dd->directions = BIT(DMA_DEV_TO_MEM) | BIT(DMA_MEM_TO_DEV) |
   1616		BIT(DMA_MEM_TO_MEM);
   1617	dd->residue_granularity = DMA_RESIDUE_GRANULARITY_BURST;
   1618	dd->max_burst = STM32_MDMA_MAX_BURST;
   1619	dd->dev = &pdev->dev;
   1620	INIT_LIST_HEAD(&dd->channels);
   1621
   1622	for (i = 0; i < dmadev->nr_channels; i++) {
   1623		chan = &dmadev->chan[i];
   1624		chan->id = i;
   1625
   1626		if (stm32_mdma_read(dmadev, STM32_MDMA_CCR(i)) & STM32_MDMA_CCR_SM)
   1627			dmadev->chan_reserved |= BIT(i);
   1628
   1629		chan->vchan.desc_free = stm32_mdma_desc_free;
   1630		vchan_init(&chan->vchan, dd);
   1631	}
   1632
   1633	dmadev->irq = platform_get_irq(pdev, 0);
   1634	if (dmadev->irq < 0) {
   1635		ret = dmadev->irq;
   1636		goto err_clk;
   1637	}
   1638
   1639	ret = devm_request_irq(&pdev->dev, dmadev->irq, stm32_mdma_irq_handler,
   1640			       0, dev_name(&pdev->dev), dmadev);
   1641	if (ret) {
   1642		dev_err(&pdev->dev, "failed to request IRQ\n");
   1643		goto err_clk;
   1644	}
   1645
   1646	ret = dmaenginem_async_device_register(dd);
   1647	if (ret)
   1648		goto err_clk;
   1649
   1650	ret = of_dma_controller_register(of_node, stm32_mdma_of_xlate, dmadev);
   1651	if (ret < 0) {
   1652		dev_err(&pdev->dev,
   1653			"STM32 MDMA DMA OF registration failed %d\n", ret);
   1654		goto err_clk;
   1655	}
   1656
   1657	platform_set_drvdata(pdev, dmadev);
   1658	pm_runtime_set_active(&pdev->dev);
   1659	pm_runtime_enable(&pdev->dev);
   1660	pm_runtime_get_noresume(&pdev->dev);
   1661	pm_runtime_put(&pdev->dev);
   1662
   1663	dev_info(&pdev->dev, "STM32 MDMA driver registered\n");
   1664
   1665	return 0;
   1666
   1667err_clk:
   1668	clk_disable_unprepare(dmadev->clk);
   1669
   1670	return ret;
   1671}
   1672
   1673#ifdef CONFIG_PM
   1674static int stm32_mdma_runtime_suspend(struct device *dev)
   1675{
   1676	struct stm32_mdma_device *dmadev = dev_get_drvdata(dev);
   1677
   1678	clk_disable_unprepare(dmadev->clk);
   1679
   1680	return 0;
   1681}
   1682
   1683static int stm32_mdma_runtime_resume(struct device *dev)
   1684{
   1685	struct stm32_mdma_device *dmadev = dev_get_drvdata(dev);
   1686	int ret;
   1687
   1688	ret = clk_prepare_enable(dmadev->clk);
   1689	if (ret) {
   1690		dev_err(dev, "failed to prepare_enable clock\n");
   1691		return ret;
   1692	}
   1693
   1694	return 0;
   1695}
   1696#endif
   1697
   1698#ifdef CONFIG_PM_SLEEP
   1699static int stm32_mdma_pm_suspend(struct device *dev)
   1700{
   1701	struct stm32_mdma_device *dmadev = dev_get_drvdata(dev);
   1702	u32 ccr, id;
   1703	int ret;
   1704
   1705	ret = pm_runtime_resume_and_get(dev);
   1706	if (ret < 0)
   1707		return ret;
   1708
   1709	for (id = 0; id < dmadev->nr_channels; id++) {
   1710		ccr = stm32_mdma_read(dmadev, STM32_MDMA_CCR(id));
   1711		if (ccr & STM32_MDMA_CCR_EN) {
   1712			dev_warn(dev, "Suspend is prevented by Chan %i\n", id);
   1713			return -EBUSY;
   1714		}
   1715	}
   1716
   1717	pm_runtime_put_sync(dev);
   1718
   1719	pm_runtime_force_suspend(dev);
   1720
   1721	return 0;
   1722}
   1723
   1724static int stm32_mdma_pm_resume(struct device *dev)
   1725{
   1726	return pm_runtime_force_resume(dev);
   1727}
   1728#endif
   1729
   1730static const struct dev_pm_ops stm32_mdma_pm_ops = {
   1731	SET_SYSTEM_SLEEP_PM_OPS(stm32_mdma_pm_suspend, stm32_mdma_pm_resume)
   1732	SET_RUNTIME_PM_OPS(stm32_mdma_runtime_suspend,
   1733			   stm32_mdma_runtime_resume, NULL)
   1734};
   1735
   1736static struct platform_driver stm32_mdma_driver = {
   1737	.probe = stm32_mdma_probe,
   1738	.driver = {
   1739		.name = "stm32-mdma",
   1740		.of_match_table = stm32_mdma_of_match,
   1741		.pm = &stm32_mdma_pm_ops,
   1742	},
   1743};
   1744
   1745static int __init stm32_mdma_init(void)
   1746{
   1747	return platform_driver_register(&stm32_mdma_driver);
   1748}
   1749
   1750subsys_initcall(stm32_mdma_init);
   1751
   1752MODULE_DESCRIPTION("Driver for STM32 MDMA controller");
   1753MODULE_AUTHOR("M'boumba Cedric Madianga <cedric.madianga@gmail.com>");
   1754MODULE_AUTHOR("Pierre-Yves Mordret <pierre-yves.mordret@st.com>");
   1755MODULE_LICENSE("GPL v2");