cachepc-linux

Fork of AMDESE/linux with modifications for CachePC side-channel attack
git clone https://git.sinitax.com/sinitax/cachepc-linux
Log | Files | Refs | README | LICENSE | sfeed.txt

spi-stm32.c (58287B)


      1// SPDX-License-Identifier: GPL-2.0
      2//
      3// STMicroelectronics STM32 SPI Controller driver (master mode only)
      4//
      5// Copyright (C) 2017, STMicroelectronics - All Rights Reserved
      6// Author(s): Amelie Delaunay <amelie.delaunay@st.com> for STMicroelectronics.
      7
      8#include <linux/bitfield.h>
      9#include <linux/debugfs.h>
     10#include <linux/clk.h>
     11#include <linux/delay.h>
     12#include <linux/dmaengine.h>
     13#include <linux/interrupt.h>
     14#include <linux/iopoll.h>
     15#include <linux/module.h>
     16#include <linux/of_platform.h>
     17#include <linux/pinctrl/consumer.h>
     18#include <linux/pm_runtime.h>
     19#include <linux/reset.h>
     20#include <linux/spi/spi.h>
     21
     22#define DRIVER_NAME "spi_stm32"
     23
     24/* STM32F4 SPI registers */
     25#define STM32F4_SPI_CR1			0x00
     26#define STM32F4_SPI_CR2			0x04
     27#define STM32F4_SPI_SR			0x08
     28#define STM32F4_SPI_DR			0x0C
     29#define STM32F4_SPI_I2SCFGR		0x1C
     30
     31/* STM32F4_SPI_CR1 bit fields */
     32#define STM32F4_SPI_CR1_CPHA		BIT(0)
     33#define STM32F4_SPI_CR1_CPOL		BIT(1)
     34#define STM32F4_SPI_CR1_MSTR		BIT(2)
     35#define STM32F4_SPI_CR1_BR_SHIFT	3
     36#define STM32F4_SPI_CR1_BR		GENMASK(5, 3)
     37#define STM32F4_SPI_CR1_SPE		BIT(6)
     38#define STM32F4_SPI_CR1_LSBFRST		BIT(7)
     39#define STM32F4_SPI_CR1_SSI		BIT(8)
     40#define STM32F4_SPI_CR1_SSM		BIT(9)
     41#define STM32F4_SPI_CR1_RXONLY		BIT(10)
     42#define STM32F4_SPI_CR1_DFF		BIT(11)
     43#define STM32F4_SPI_CR1_CRCNEXT		BIT(12)
     44#define STM32F4_SPI_CR1_CRCEN		BIT(13)
     45#define STM32F4_SPI_CR1_BIDIOE		BIT(14)
     46#define STM32F4_SPI_CR1_BIDIMODE	BIT(15)
     47#define STM32F4_SPI_CR1_BR_MIN		0
     48#define STM32F4_SPI_CR1_BR_MAX		(GENMASK(5, 3) >> 3)
     49
     50/* STM32F4_SPI_CR2 bit fields */
     51#define STM32F4_SPI_CR2_RXDMAEN		BIT(0)
     52#define STM32F4_SPI_CR2_TXDMAEN		BIT(1)
     53#define STM32F4_SPI_CR2_SSOE		BIT(2)
     54#define STM32F4_SPI_CR2_FRF		BIT(4)
     55#define STM32F4_SPI_CR2_ERRIE		BIT(5)
     56#define STM32F4_SPI_CR2_RXNEIE		BIT(6)
     57#define STM32F4_SPI_CR2_TXEIE		BIT(7)
     58
     59/* STM32F4_SPI_SR bit fields */
     60#define STM32F4_SPI_SR_RXNE		BIT(0)
     61#define STM32F4_SPI_SR_TXE		BIT(1)
     62#define STM32F4_SPI_SR_CHSIDE		BIT(2)
     63#define STM32F4_SPI_SR_UDR		BIT(3)
     64#define STM32F4_SPI_SR_CRCERR		BIT(4)
     65#define STM32F4_SPI_SR_MODF		BIT(5)
     66#define STM32F4_SPI_SR_OVR		BIT(6)
     67#define STM32F4_SPI_SR_BSY		BIT(7)
     68#define STM32F4_SPI_SR_FRE		BIT(8)
     69
     70/* STM32F4_SPI_I2SCFGR bit fields */
     71#define STM32F4_SPI_I2SCFGR_I2SMOD	BIT(11)
     72
     73/* STM32F4 SPI Baud Rate min/max divisor */
     74#define STM32F4_SPI_BR_DIV_MIN		(2 << STM32F4_SPI_CR1_BR_MIN)
     75#define STM32F4_SPI_BR_DIV_MAX		(2 << STM32F4_SPI_CR1_BR_MAX)
     76
     77/* STM32H7 SPI registers */
     78#define STM32H7_SPI_CR1			0x00
     79#define STM32H7_SPI_CR2			0x04
     80#define STM32H7_SPI_CFG1		0x08
     81#define STM32H7_SPI_CFG2		0x0C
     82#define STM32H7_SPI_IER			0x10
     83#define STM32H7_SPI_SR			0x14
     84#define STM32H7_SPI_IFCR		0x18
     85#define STM32H7_SPI_TXDR		0x20
     86#define STM32H7_SPI_RXDR		0x30
     87#define STM32H7_SPI_I2SCFGR		0x50
     88
     89/* STM32H7_SPI_CR1 bit fields */
     90#define STM32H7_SPI_CR1_SPE		BIT(0)
     91#define STM32H7_SPI_CR1_MASRX		BIT(8)
     92#define STM32H7_SPI_CR1_CSTART		BIT(9)
     93#define STM32H7_SPI_CR1_CSUSP		BIT(10)
     94#define STM32H7_SPI_CR1_HDDIR		BIT(11)
     95#define STM32H7_SPI_CR1_SSI		BIT(12)
     96
     97/* STM32H7_SPI_CR2 bit fields */
     98#define STM32H7_SPI_CR2_TSIZE		GENMASK(15, 0)
     99#define STM32H7_SPI_TSIZE_MAX		GENMASK(15, 0)
    100
    101/* STM32H7_SPI_CFG1 bit fields */
    102#define STM32H7_SPI_CFG1_DSIZE		GENMASK(4, 0)
    103#define STM32H7_SPI_CFG1_FTHLV		GENMASK(8, 5)
    104#define STM32H7_SPI_CFG1_RXDMAEN	BIT(14)
    105#define STM32H7_SPI_CFG1_TXDMAEN	BIT(15)
    106#define STM32H7_SPI_CFG1_MBR		GENMASK(30, 28)
    107#define STM32H7_SPI_CFG1_MBR_SHIFT	28
    108#define STM32H7_SPI_CFG1_MBR_MIN	0
    109#define STM32H7_SPI_CFG1_MBR_MAX	(GENMASK(30, 28) >> 28)
    110
    111/* STM32H7_SPI_CFG2 bit fields */
    112#define STM32H7_SPI_CFG2_MIDI		GENMASK(7, 4)
    113#define STM32H7_SPI_CFG2_COMM		GENMASK(18, 17)
    114#define STM32H7_SPI_CFG2_SP		GENMASK(21, 19)
    115#define STM32H7_SPI_CFG2_MASTER		BIT(22)
    116#define STM32H7_SPI_CFG2_LSBFRST	BIT(23)
    117#define STM32H7_SPI_CFG2_CPHA		BIT(24)
    118#define STM32H7_SPI_CFG2_CPOL		BIT(25)
    119#define STM32H7_SPI_CFG2_SSM		BIT(26)
    120#define STM32H7_SPI_CFG2_AFCNTR		BIT(31)
    121
    122/* STM32H7_SPI_IER bit fields */
    123#define STM32H7_SPI_IER_RXPIE		BIT(0)
    124#define STM32H7_SPI_IER_TXPIE		BIT(1)
    125#define STM32H7_SPI_IER_DXPIE		BIT(2)
    126#define STM32H7_SPI_IER_EOTIE		BIT(3)
    127#define STM32H7_SPI_IER_TXTFIE		BIT(4)
    128#define STM32H7_SPI_IER_OVRIE		BIT(6)
    129#define STM32H7_SPI_IER_MODFIE		BIT(9)
    130#define STM32H7_SPI_IER_ALL		GENMASK(10, 0)
    131
    132/* STM32H7_SPI_SR bit fields */
    133#define STM32H7_SPI_SR_RXP		BIT(0)
    134#define STM32H7_SPI_SR_TXP		BIT(1)
    135#define STM32H7_SPI_SR_EOT		BIT(3)
    136#define STM32H7_SPI_SR_OVR		BIT(6)
    137#define STM32H7_SPI_SR_MODF		BIT(9)
    138#define STM32H7_SPI_SR_SUSP		BIT(11)
    139#define STM32H7_SPI_SR_RXPLVL		GENMASK(14, 13)
    140#define STM32H7_SPI_SR_RXWNE		BIT(15)
    141
    142/* STM32H7_SPI_IFCR bit fields */
    143#define STM32H7_SPI_IFCR_ALL		GENMASK(11, 3)
    144
    145/* STM32H7_SPI_I2SCFGR bit fields */
    146#define STM32H7_SPI_I2SCFGR_I2SMOD	BIT(0)
    147
    148/* STM32H7 SPI Master Baud Rate min/max divisor */
    149#define STM32H7_SPI_MBR_DIV_MIN		(2 << STM32H7_SPI_CFG1_MBR_MIN)
    150#define STM32H7_SPI_MBR_DIV_MAX		(2 << STM32H7_SPI_CFG1_MBR_MAX)
    151
    152/* STM32H7 SPI Communication mode */
    153#define STM32H7_SPI_FULL_DUPLEX		0
    154#define STM32H7_SPI_SIMPLEX_TX		1
    155#define STM32H7_SPI_SIMPLEX_RX		2
    156#define STM32H7_SPI_HALF_DUPLEX		3
    157
    158/* SPI Communication type */
    159#define SPI_FULL_DUPLEX		0
    160#define SPI_SIMPLEX_TX		1
    161#define SPI_SIMPLEX_RX		2
    162#define SPI_3WIRE_TX		3
    163#define SPI_3WIRE_RX		4
    164
    165#define STM32_SPI_AUTOSUSPEND_DELAY		1	/* 1 ms */
    166
    167/*
    168 * use PIO for small transfers, avoiding DMA setup/teardown overhead for drivers
    169 * without fifo buffers.
    170 */
    171#define SPI_DMA_MIN_BYTES	16
    172
    173/**
    174 * struct stm32_spi_reg - stm32 SPI register & bitfield desc
    175 * @reg:		register offset
    176 * @mask:		bitfield mask
    177 * @shift:		left shift
    178 */
    179struct stm32_spi_reg {
    180	int reg;
    181	int mask;
    182	int shift;
    183};
    184
    185/**
    186 * struct stm32_spi_regspec - stm32 registers definition, compatible dependent data
    187 * @en: enable register and SPI enable bit
    188 * @dma_rx_en: SPI DMA RX enable register end SPI DMA RX enable bit
    189 * @dma_tx_en: SPI DMA TX enable register end SPI DMA TX enable bit
    190 * @cpol: clock polarity register and polarity bit
    191 * @cpha: clock phase register and phase bit
    192 * @lsb_first: LSB transmitted first register and bit
    193 * @br: baud rate register and bitfields
    194 * @rx: SPI RX data register
    195 * @tx: SPI TX data register
    196 */
    197struct stm32_spi_regspec {
    198	const struct stm32_spi_reg en;
    199	const struct stm32_spi_reg dma_rx_en;
    200	const struct stm32_spi_reg dma_tx_en;
    201	const struct stm32_spi_reg cpol;
    202	const struct stm32_spi_reg cpha;
    203	const struct stm32_spi_reg lsb_first;
    204	const struct stm32_spi_reg br;
    205	const struct stm32_spi_reg rx;
    206	const struct stm32_spi_reg tx;
    207};
    208
    209struct stm32_spi;
    210
    211/**
    212 * struct stm32_spi_cfg - stm32 compatible configuration data
    213 * @regs: registers descriptions
    214 * @get_fifo_size: routine to get fifo size
    215 * @get_bpw_mask: routine to get bits per word mask
    216 * @disable: routine to disable controller
    217 * @config: routine to configure controller as SPI Master
    218 * @set_bpw: routine to configure registers to for bits per word
    219 * @set_mode: routine to configure registers to desired mode
    220 * @set_data_idleness: optional routine to configure registers to desired idle
    221 * time between frames (if driver has this functionality)
    222 * @set_number_of_data: optional routine to configure registers to desired
    223 * number of data (if driver has this functionality)
    224 * @transfer_one_dma_start: routine to start transfer a single spi_transfer
    225 * using DMA
    226 * @dma_rx_cb: routine to call after DMA RX channel operation is complete
    227 * @dma_tx_cb: routine to call after DMA TX channel operation is complete
    228 * @transfer_one_irq: routine to configure interrupts for driver
    229 * @irq_handler_event: Interrupt handler for SPI controller events
    230 * @irq_handler_thread: thread of interrupt handler for SPI controller
    231 * @baud_rate_div_min: minimum baud rate divisor
    232 * @baud_rate_div_max: maximum baud rate divisor
    233 * @has_fifo: boolean to know if fifo is used for driver
    234 * @flags: compatible specific SPI controller flags used at registration time
    235 */
    236struct stm32_spi_cfg {
    237	const struct stm32_spi_regspec *regs;
    238	int (*get_fifo_size)(struct stm32_spi *spi);
    239	int (*get_bpw_mask)(struct stm32_spi *spi);
    240	void (*disable)(struct stm32_spi *spi);
    241	int (*config)(struct stm32_spi *spi);
    242	void (*set_bpw)(struct stm32_spi *spi);
    243	int (*set_mode)(struct stm32_spi *spi, unsigned int comm_type);
    244	void (*set_data_idleness)(struct stm32_spi *spi, u32 length);
    245	int (*set_number_of_data)(struct stm32_spi *spi, u32 length);
    246	void (*transfer_one_dma_start)(struct stm32_spi *spi);
    247	void (*dma_rx_cb)(void *data);
    248	void (*dma_tx_cb)(void *data);
    249	int (*transfer_one_irq)(struct stm32_spi *spi);
    250	irqreturn_t (*irq_handler_event)(int irq, void *dev_id);
    251	irqreturn_t (*irq_handler_thread)(int irq, void *dev_id);
    252	unsigned int baud_rate_div_min;
    253	unsigned int baud_rate_div_max;
    254	bool has_fifo;
    255	u16 flags;
    256};
    257
    258/**
    259 * struct stm32_spi - private data of the SPI controller
    260 * @dev: driver model representation of the controller
    261 * @master: controller master interface
    262 * @cfg: compatible configuration data
    263 * @base: virtual memory area
    264 * @clk: hw kernel clock feeding the SPI clock generator
    265 * @clk_rate: rate of the hw kernel clock feeding the SPI clock generator
    266 * @lock: prevent I/O concurrent access
    267 * @irq: SPI controller interrupt line
    268 * @fifo_size: size of the embedded fifo in bytes
    269 * @cur_midi: master inter-data idleness in ns
    270 * @cur_speed: speed configured in Hz
    271 * @cur_bpw: number of bits in a single SPI data frame
    272 * @cur_fthlv: fifo threshold level (data frames in a single data packet)
    273 * @cur_comm: SPI communication mode
    274 * @cur_xferlen: current transfer length in bytes
    275 * @cur_usedma: boolean to know if dma is used in current transfer
    276 * @tx_buf: data to be written, or NULL
    277 * @rx_buf: data to be read, or NULL
    278 * @tx_len: number of data to be written in bytes
    279 * @rx_len: number of data to be read in bytes
    280 * @dma_tx: dma channel for TX transfer
    281 * @dma_rx: dma channel for RX transfer
    282 * @phys_addr: SPI registers physical base address
    283 */
    284struct stm32_spi {
    285	struct device *dev;
    286	struct spi_master *master;
    287	const struct stm32_spi_cfg *cfg;
    288	void __iomem *base;
    289	struct clk *clk;
    290	u32 clk_rate;
    291	spinlock_t lock; /* prevent I/O concurrent access */
    292	int irq;
    293	unsigned int fifo_size;
    294
    295	unsigned int cur_midi;
    296	unsigned int cur_speed;
    297	unsigned int cur_bpw;
    298	unsigned int cur_fthlv;
    299	unsigned int cur_comm;
    300	unsigned int cur_xferlen;
    301	bool cur_usedma;
    302
    303	const void *tx_buf;
    304	void *rx_buf;
    305	int tx_len;
    306	int rx_len;
    307	struct dma_chan *dma_tx;
    308	struct dma_chan *dma_rx;
    309	dma_addr_t phys_addr;
    310};
    311
    312static const struct stm32_spi_regspec stm32f4_spi_regspec = {
    313	.en = { STM32F4_SPI_CR1, STM32F4_SPI_CR1_SPE },
    314
    315	.dma_rx_en = { STM32F4_SPI_CR2, STM32F4_SPI_CR2_RXDMAEN },
    316	.dma_tx_en = { STM32F4_SPI_CR2, STM32F4_SPI_CR2_TXDMAEN },
    317
    318	.cpol = { STM32F4_SPI_CR1, STM32F4_SPI_CR1_CPOL },
    319	.cpha = { STM32F4_SPI_CR1, STM32F4_SPI_CR1_CPHA },
    320	.lsb_first = { STM32F4_SPI_CR1, STM32F4_SPI_CR1_LSBFRST },
    321	.br = { STM32F4_SPI_CR1, STM32F4_SPI_CR1_BR, STM32F4_SPI_CR1_BR_SHIFT },
    322
    323	.rx = { STM32F4_SPI_DR },
    324	.tx = { STM32F4_SPI_DR },
    325};
    326
    327static const struct stm32_spi_regspec stm32h7_spi_regspec = {
    328	/* SPI data transfer is enabled but spi_ker_ck is idle.
    329	 * CFG1 and CFG2 registers are write protected when SPE is enabled.
    330	 */
    331	.en = { STM32H7_SPI_CR1, STM32H7_SPI_CR1_SPE },
    332
    333	.dma_rx_en = { STM32H7_SPI_CFG1, STM32H7_SPI_CFG1_RXDMAEN },
    334	.dma_tx_en = { STM32H7_SPI_CFG1, STM32H7_SPI_CFG1_TXDMAEN },
    335
    336	.cpol = { STM32H7_SPI_CFG2, STM32H7_SPI_CFG2_CPOL },
    337	.cpha = { STM32H7_SPI_CFG2, STM32H7_SPI_CFG2_CPHA },
    338	.lsb_first = { STM32H7_SPI_CFG2, STM32H7_SPI_CFG2_LSBFRST },
    339	.br = { STM32H7_SPI_CFG1, STM32H7_SPI_CFG1_MBR,
    340		STM32H7_SPI_CFG1_MBR_SHIFT },
    341
    342	.rx = { STM32H7_SPI_RXDR },
    343	.tx = { STM32H7_SPI_TXDR },
    344};
    345
    346static inline void stm32_spi_set_bits(struct stm32_spi *spi,
    347				      u32 offset, u32 bits)
    348{
    349	writel_relaxed(readl_relaxed(spi->base + offset) | bits,
    350		       spi->base + offset);
    351}
    352
    353static inline void stm32_spi_clr_bits(struct stm32_spi *spi,
    354				      u32 offset, u32 bits)
    355{
    356	writel_relaxed(readl_relaxed(spi->base + offset) & ~bits,
    357		       spi->base + offset);
    358}
    359
    360/**
    361 * stm32h7_spi_get_fifo_size - Return fifo size
    362 * @spi: pointer to the spi controller data structure
    363 */
    364static int stm32h7_spi_get_fifo_size(struct stm32_spi *spi)
    365{
    366	unsigned long flags;
    367	u32 count = 0;
    368
    369	spin_lock_irqsave(&spi->lock, flags);
    370
    371	stm32_spi_set_bits(spi, STM32H7_SPI_CR1, STM32H7_SPI_CR1_SPE);
    372
    373	while (readl_relaxed(spi->base + STM32H7_SPI_SR) & STM32H7_SPI_SR_TXP)
    374		writeb_relaxed(++count, spi->base + STM32H7_SPI_TXDR);
    375
    376	stm32_spi_clr_bits(spi, STM32H7_SPI_CR1, STM32H7_SPI_CR1_SPE);
    377
    378	spin_unlock_irqrestore(&spi->lock, flags);
    379
    380	dev_dbg(spi->dev, "%d x 8-bit fifo size\n", count);
    381
    382	return count;
    383}
    384
    385/**
    386 * stm32f4_spi_get_bpw_mask - Return bits per word mask
    387 * @spi: pointer to the spi controller data structure
    388 */
    389static int stm32f4_spi_get_bpw_mask(struct stm32_spi *spi)
    390{
    391	dev_dbg(spi->dev, "8-bit or 16-bit data frame supported\n");
    392	return SPI_BPW_MASK(8) | SPI_BPW_MASK(16);
    393}
    394
    395/**
    396 * stm32h7_spi_get_bpw_mask - Return bits per word mask
    397 * @spi: pointer to the spi controller data structure
    398 */
    399static int stm32h7_spi_get_bpw_mask(struct stm32_spi *spi)
    400{
    401	unsigned long flags;
    402	u32 cfg1, max_bpw;
    403
    404	spin_lock_irqsave(&spi->lock, flags);
    405
    406	/*
    407	 * The most significant bit at DSIZE bit field is reserved when the
    408	 * maximum data size of periperal instances is limited to 16-bit
    409	 */
    410	stm32_spi_set_bits(spi, STM32H7_SPI_CFG1, STM32H7_SPI_CFG1_DSIZE);
    411
    412	cfg1 = readl_relaxed(spi->base + STM32H7_SPI_CFG1);
    413	max_bpw = FIELD_GET(STM32H7_SPI_CFG1_DSIZE, cfg1) + 1;
    414
    415	spin_unlock_irqrestore(&spi->lock, flags);
    416
    417	dev_dbg(spi->dev, "%d-bit maximum data frame\n", max_bpw);
    418
    419	return SPI_BPW_RANGE_MASK(4, max_bpw);
    420}
    421
    422/**
    423 * stm32_spi_prepare_mbr - Determine baud rate divisor value
    424 * @spi: pointer to the spi controller data structure
    425 * @speed_hz: requested speed
    426 * @min_div: minimum baud rate divisor
    427 * @max_div: maximum baud rate divisor
    428 *
    429 * Return baud rate divisor value in case of success or -EINVAL
    430 */
    431static int stm32_spi_prepare_mbr(struct stm32_spi *spi, u32 speed_hz,
    432				 u32 min_div, u32 max_div)
    433{
    434	u32 div, mbrdiv;
    435
    436	/* Ensure spi->clk_rate is even */
    437	div = DIV_ROUND_UP(spi->clk_rate & ~0x1, speed_hz);
    438
    439	/*
    440	 * SPI framework set xfer->speed_hz to master->max_speed_hz if
    441	 * xfer->speed_hz is greater than master->max_speed_hz, and it returns
    442	 * an error when xfer->speed_hz is lower than master->min_speed_hz, so
    443	 * no need to check it there.
    444	 * However, we need to ensure the following calculations.
    445	 */
    446	if ((div < min_div) || (div > max_div))
    447		return -EINVAL;
    448
    449	/* Determine the first power of 2 greater than or equal to div */
    450	if (div & (div - 1))
    451		mbrdiv = fls(div);
    452	else
    453		mbrdiv = fls(div) - 1;
    454
    455	spi->cur_speed = spi->clk_rate / (1 << mbrdiv);
    456
    457	return mbrdiv - 1;
    458}
    459
    460/**
    461 * stm32h7_spi_prepare_fthlv - Determine FIFO threshold level
    462 * @spi: pointer to the spi controller data structure
    463 * @xfer_len: length of the message to be transferred
    464 */
    465static u32 stm32h7_spi_prepare_fthlv(struct stm32_spi *spi, u32 xfer_len)
    466{
    467	u32 packet, bpw;
    468
    469	/* data packet should not exceed 1/2 of fifo space */
    470	packet = clamp(xfer_len, 1U, spi->fifo_size / 2);
    471
    472	/* align packet size with data registers access */
    473	bpw = DIV_ROUND_UP(spi->cur_bpw, 8);
    474	return DIV_ROUND_UP(packet, bpw);
    475}
    476
    477/**
    478 * stm32f4_spi_write_tx - Write bytes to Transmit Data Register
    479 * @spi: pointer to the spi controller data structure
    480 *
    481 * Read from tx_buf depends on remaining bytes to avoid to read beyond
    482 * tx_buf end.
    483 */
    484static void stm32f4_spi_write_tx(struct stm32_spi *spi)
    485{
    486	if ((spi->tx_len > 0) && (readl_relaxed(spi->base + STM32F4_SPI_SR) &
    487				  STM32F4_SPI_SR_TXE)) {
    488		u32 offs = spi->cur_xferlen - spi->tx_len;
    489
    490		if (spi->cur_bpw == 16) {
    491			const u16 *tx_buf16 = (const u16 *)(spi->tx_buf + offs);
    492
    493			writew_relaxed(*tx_buf16, spi->base + STM32F4_SPI_DR);
    494			spi->tx_len -= sizeof(u16);
    495		} else {
    496			const u8 *tx_buf8 = (const u8 *)(spi->tx_buf + offs);
    497
    498			writeb_relaxed(*tx_buf8, spi->base + STM32F4_SPI_DR);
    499			spi->tx_len -= sizeof(u8);
    500		}
    501	}
    502
    503	dev_dbg(spi->dev, "%s: %d bytes left\n", __func__, spi->tx_len);
    504}
    505
    506/**
    507 * stm32h7_spi_write_txfifo - Write bytes in Transmit Data Register
    508 * @spi: pointer to the spi controller data structure
    509 *
    510 * Read from tx_buf depends on remaining bytes to avoid to read beyond
    511 * tx_buf end.
    512 */
    513static void stm32h7_spi_write_txfifo(struct stm32_spi *spi)
    514{
    515	while ((spi->tx_len > 0) &&
    516		       (readl_relaxed(spi->base + STM32H7_SPI_SR) &
    517			STM32H7_SPI_SR_TXP)) {
    518		u32 offs = spi->cur_xferlen - spi->tx_len;
    519
    520		if (spi->tx_len >= sizeof(u32)) {
    521			const u32 *tx_buf32 = (const u32 *)(spi->tx_buf + offs);
    522
    523			writel_relaxed(*tx_buf32, spi->base + STM32H7_SPI_TXDR);
    524			spi->tx_len -= sizeof(u32);
    525		} else if (spi->tx_len >= sizeof(u16)) {
    526			const u16 *tx_buf16 = (const u16 *)(spi->tx_buf + offs);
    527
    528			writew_relaxed(*tx_buf16, spi->base + STM32H7_SPI_TXDR);
    529			spi->tx_len -= sizeof(u16);
    530		} else {
    531			const u8 *tx_buf8 = (const u8 *)(spi->tx_buf + offs);
    532
    533			writeb_relaxed(*tx_buf8, spi->base + STM32H7_SPI_TXDR);
    534			spi->tx_len -= sizeof(u8);
    535		}
    536	}
    537
    538	dev_dbg(spi->dev, "%s: %d bytes left\n", __func__, spi->tx_len);
    539}
    540
    541/**
    542 * stm32f4_spi_read_rx - Read bytes from Receive Data Register
    543 * @spi: pointer to the spi controller data structure
    544 *
    545 * Write in rx_buf depends on remaining bytes to avoid to write beyond
    546 * rx_buf end.
    547 */
    548static void stm32f4_spi_read_rx(struct stm32_spi *spi)
    549{
    550	if ((spi->rx_len > 0) && (readl_relaxed(spi->base + STM32F4_SPI_SR) &
    551				  STM32F4_SPI_SR_RXNE)) {
    552		u32 offs = spi->cur_xferlen - spi->rx_len;
    553
    554		if (spi->cur_bpw == 16) {
    555			u16 *rx_buf16 = (u16 *)(spi->rx_buf + offs);
    556
    557			*rx_buf16 = readw_relaxed(spi->base + STM32F4_SPI_DR);
    558			spi->rx_len -= sizeof(u16);
    559		} else {
    560			u8 *rx_buf8 = (u8 *)(spi->rx_buf + offs);
    561
    562			*rx_buf8 = readb_relaxed(spi->base + STM32F4_SPI_DR);
    563			spi->rx_len -= sizeof(u8);
    564		}
    565	}
    566
    567	dev_dbg(spi->dev, "%s: %d bytes left\n", __func__, spi->rx_len);
    568}
    569
    570/**
    571 * stm32h7_spi_read_rxfifo - Read bytes in Receive Data Register
    572 * @spi: pointer to the spi controller data structure
    573 *
    574 * Write in rx_buf depends on remaining bytes to avoid to write beyond
    575 * rx_buf end.
    576 */
    577static void stm32h7_spi_read_rxfifo(struct stm32_spi *spi)
    578{
    579	u32 sr = readl_relaxed(spi->base + STM32H7_SPI_SR);
    580	u32 rxplvl = FIELD_GET(STM32H7_SPI_SR_RXPLVL, sr);
    581
    582	while ((spi->rx_len > 0) &&
    583	       ((sr & STM32H7_SPI_SR_RXP) ||
    584		((sr & STM32H7_SPI_SR_EOT) &&
    585		 ((sr & STM32H7_SPI_SR_RXWNE) || (rxplvl > 0))))) {
    586		u32 offs = spi->cur_xferlen - spi->rx_len;
    587
    588		if ((spi->rx_len >= sizeof(u32)) ||
    589		    (sr & STM32H7_SPI_SR_RXWNE)) {
    590			u32 *rx_buf32 = (u32 *)(spi->rx_buf + offs);
    591
    592			*rx_buf32 = readl_relaxed(spi->base + STM32H7_SPI_RXDR);
    593			spi->rx_len -= sizeof(u32);
    594		} else if ((spi->rx_len >= sizeof(u16)) ||
    595			   (!(sr & STM32H7_SPI_SR_RXWNE) &&
    596			    (rxplvl >= 2 || spi->cur_bpw > 8))) {
    597			u16 *rx_buf16 = (u16 *)(spi->rx_buf + offs);
    598
    599			*rx_buf16 = readw_relaxed(spi->base + STM32H7_SPI_RXDR);
    600			spi->rx_len -= sizeof(u16);
    601		} else {
    602			u8 *rx_buf8 = (u8 *)(spi->rx_buf + offs);
    603
    604			*rx_buf8 = readb_relaxed(spi->base + STM32H7_SPI_RXDR);
    605			spi->rx_len -= sizeof(u8);
    606		}
    607
    608		sr = readl_relaxed(spi->base + STM32H7_SPI_SR);
    609		rxplvl = FIELD_GET(STM32H7_SPI_SR_RXPLVL, sr);
    610	}
    611
    612	dev_dbg(spi->dev, "%s: %d bytes left (sr=%08x)\n",
    613		__func__, spi->rx_len, sr);
    614}
    615
    616/**
    617 * stm32_spi_enable - Enable SPI controller
    618 * @spi: pointer to the spi controller data structure
    619 */
    620static void stm32_spi_enable(struct stm32_spi *spi)
    621{
    622	dev_dbg(spi->dev, "enable controller\n");
    623
    624	stm32_spi_set_bits(spi, spi->cfg->regs->en.reg,
    625			   spi->cfg->regs->en.mask);
    626}
    627
    628/**
    629 * stm32f4_spi_disable - Disable SPI controller
    630 * @spi: pointer to the spi controller data structure
    631 */
    632static void stm32f4_spi_disable(struct stm32_spi *spi)
    633{
    634	unsigned long flags;
    635	u32 sr;
    636
    637	dev_dbg(spi->dev, "disable controller\n");
    638
    639	spin_lock_irqsave(&spi->lock, flags);
    640
    641	if (!(readl_relaxed(spi->base + STM32F4_SPI_CR1) &
    642	      STM32F4_SPI_CR1_SPE)) {
    643		spin_unlock_irqrestore(&spi->lock, flags);
    644		return;
    645	}
    646
    647	/* Disable interrupts */
    648	stm32_spi_clr_bits(spi, STM32F4_SPI_CR2, STM32F4_SPI_CR2_TXEIE |
    649						 STM32F4_SPI_CR2_RXNEIE |
    650						 STM32F4_SPI_CR2_ERRIE);
    651
    652	/* Wait until BSY = 0 */
    653	if (readl_relaxed_poll_timeout_atomic(spi->base + STM32F4_SPI_SR,
    654					      sr, !(sr & STM32F4_SPI_SR_BSY),
    655					      10, 100000) < 0) {
    656		dev_warn(spi->dev, "disabling condition timeout\n");
    657	}
    658
    659	if (spi->cur_usedma && spi->dma_tx)
    660		dmaengine_terminate_all(spi->dma_tx);
    661	if (spi->cur_usedma && spi->dma_rx)
    662		dmaengine_terminate_all(spi->dma_rx);
    663
    664	stm32_spi_clr_bits(spi, STM32F4_SPI_CR1, STM32F4_SPI_CR1_SPE);
    665
    666	stm32_spi_clr_bits(spi, STM32F4_SPI_CR2, STM32F4_SPI_CR2_TXDMAEN |
    667						 STM32F4_SPI_CR2_RXDMAEN);
    668
    669	/* Sequence to clear OVR flag */
    670	readl_relaxed(spi->base + STM32F4_SPI_DR);
    671	readl_relaxed(spi->base + STM32F4_SPI_SR);
    672
    673	spin_unlock_irqrestore(&spi->lock, flags);
    674}
    675
    676/**
    677 * stm32h7_spi_disable - Disable SPI controller
    678 * @spi: pointer to the spi controller data structure
    679 *
    680 * RX-Fifo is flushed when SPI controller is disabled.
    681 */
    682static void stm32h7_spi_disable(struct stm32_spi *spi)
    683{
    684	unsigned long flags;
    685	u32 cr1;
    686
    687	dev_dbg(spi->dev, "disable controller\n");
    688
    689	spin_lock_irqsave(&spi->lock, flags);
    690
    691	cr1 = readl_relaxed(spi->base + STM32H7_SPI_CR1);
    692
    693	if (!(cr1 & STM32H7_SPI_CR1_SPE)) {
    694		spin_unlock_irqrestore(&spi->lock, flags);
    695		return;
    696	}
    697
    698	if (spi->cur_usedma && spi->dma_tx)
    699		dmaengine_terminate_all(spi->dma_tx);
    700	if (spi->cur_usedma && spi->dma_rx)
    701		dmaengine_terminate_all(spi->dma_rx);
    702
    703	stm32_spi_clr_bits(spi, STM32H7_SPI_CR1, STM32H7_SPI_CR1_SPE);
    704
    705	stm32_spi_clr_bits(spi, STM32H7_SPI_CFG1, STM32H7_SPI_CFG1_TXDMAEN |
    706						STM32H7_SPI_CFG1_RXDMAEN);
    707
    708	/* Disable interrupts and clear status flags */
    709	writel_relaxed(0, spi->base + STM32H7_SPI_IER);
    710	writel_relaxed(STM32H7_SPI_IFCR_ALL, spi->base + STM32H7_SPI_IFCR);
    711
    712	spin_unlock_irqrestore(&spi->lock, flags);
    713}
    714
    715/**
    716 * stm32_spi_can_dma - Determine if the transfer is eligible for DMA use
    717 * @master: controller master interface
    718 * @spi_dev: pointer to the spi device
    719 * @transfer: pointer to spi transfer
    720 *
    721 * If driver has fifo and the current transfer size is greater than fifo size,
    722 * use DMA. Otherwise use DMA for transfer longer than defined DMA min bytes.
    723 */
    724static bool stm32_spi_can_dma(struct spi_master *master,
    725			      struct spi_device *spi_dev,
    726			      struct spi_transfer *transfer)
    727{
    728	unsigned int dma_size;
    729	struct stm32_spi *spi = spi_master_get_devdata(master);
    730
    731	if (spi->cfg->has_fifo)
    732		dma_size = spi->fifo_size;
    733	else
    734		dma_size = SPI_DMA_MIN_BYTES;
    735
    736	dev_dbg(spi->dev, "%s: %s\n", __func__,
    737		(transfer->len > dma_size) ? "true" : "false");
    738
    739	return (transfer->len > dma_size);
    740}
    741
    742/**
    743 * stm32f4_spi_irq_event - Interrupt handler for SPI controller events
    744 * @irq: interrupt line
    745 * @dev_id: SPI controller master interface
    746 */
    747static irqreturn_t stm32f4_spi_irq_event(int irq, void *dev_id)
    748{
    749	struct spi_master *master = dev_id;
    750	struct stm32_spi *spi = spi_master_get_devdata(master);
    751	u32 sr, mask = 0;
    752	bool end = false;
    753
    754	spin_lock(&spi->lock);
    755
    756	sr = readl_relaxed(spi->base + STM32F4_SPI_SR);
    757	/*
    758	 * BSY flag is not handled in interrupt but it is normal behavior when
    759	 * this flag is set.
    760	 */
    761	sr &= ~STM32F4_SPI_SR_BSY;
    762
    763	if (!spi->cur_usedma && (spi->cur_comm == SPI_SIMPLEX_TX ||
    764				 spi->cur_comm == SPI_3WIRE_TX)) {
    765		/* OVR flag shouldn't be handled for TX only mode */
    766		sr &= ~(STM32F4_SPI_SR_OVR | STM32F4_SPI_SR_RXNE);
    767		mask |= STM32F4_SPI_SR_TXE;
    768	}
    769
    770	if (!spi->cur_usedma && (spi->cur_comm == SPI_FULL_DUPLEX ||
    771				spi->cur_comm == SPI_SIMPLEX_RX ||
    772				spi->cur_comm == SPI_3WIRE_RX)) {
    773		/* TXE flag is set and is handled when RXNE flag occurs */
    774		sr &= ~STM32F4_SPI_SR_TXE;
    775		mask |= STM32F4_SPI_SR_RXNE | STM32F4_SPI_SR_OVR;
    776	}
    777
    778	if (!(sr & mask)) {
    779		dev_dbg(spi->dev, "spurious IT (sr=0x%08x)\n", sr);
    780		spin_unlock(&spi->lock);
    781		return IRQ_NONE;
    782	}
    783
    784	if (sr & STM32F4_SPI_SR_OVR) {
    785		dev_warn(spi->dev, "Overrun: received value discarded\n");
    786
    787		/* Sequence to clear OVR flag */
    788		readl_relaxed(spi->base + STM32F4_SPI_DR);
    789		readl_relaxed(spi->base + STM32F4_SPI_SR);
    790
    791		/*
    792		 * If overrun is detected, it means that something went wrong,
    793		 * so stop the current transfer. Transfer can wait for next
    794		 * RXNE but DR is already read and end never happens.
    795		 */
    796		end = true;
    797		goto end_irq;
    798	}
    799
    800	if (sr & STM32F4_SPI_SR_TXE) {
    801		if (spi->tx_buf)
    802			stm32f4_spi_write_tx(spi);
    803		if (spi->tx_len == 0)
    804			end = true;
    805	}
    806
    807	if (sr & STM32F4_SPI_SR_RXNE) {
    808		stm32f4_spi_read_rx(spi);
    809		if (spi->rx_len == 0)
    810			end = true;
    811		else if (spi->tx_buf)/* Load data for discontinuous mode */
    812			stm32f4_spi_write_tx(spi);
    813	}
    814
    815end_irq:
    816	if (end) {
    817		/* Immediately disable interrupts to do not generate new one */
    818		stm32_spi_clr_bits(spi, STM32F4_SPI_CR2,
    819					STM32F4_SPI_CR2_TXEIE |
    820					STM32F4_SPI_CR2_RXNEIE |
    821					STM32F4_SPI_CR2_ERRIE);
    822		spin_unlock(&spi->lock);
    823		return IRQ_WAKE_THREAD;
    824	}
    825
    826	spin_unlock(&spi->lock);
    827	return IRQ_HANDLED;
    828}
    829
    830/**
    831 * stm32f4_spi_irq_thread - Thread of interrupt handler for SPI controller
    832 * @irq: interrupt line
    833 * @dev_id: SPI controller master interface
    834 */
    835static irqreturn_t stm32f4_spi_irq_thread(int irq, void *dev_id)
    836{
    837	struct spi_master *master = dev_id;
    838	struct stm32_spi *spi = spi_master_get_devdata(master);
    839
    840	spi_finalize_current_transfer(master);
    841	stm32f4_spi_disable(spi);
    842
    843	return IRQ_HANDLED;
    844}
    845
    846/**
    847 * stm32h7_spi_irq_thread - Thread of interrupt handler for SPI controller
    848 * @irq: interrupt line
    849 * @dev_id: SPI controller master interface
    850 */
    851static irqreturn_t stm32h7_spi_irq_thread(int irq, void *dev_id)
    852{
    853	struct spi_master *master = dev_id;
    854	struct stm32_spi *spi = spi_master_get_devdata(master);
    855	u32 sr, ier, mask;
    856	unsigned long flags;
    857	bool end = false;
    858
    859	spin_lock_irqsave(&spi->lock, flags);
    860
    861	sr = readl_relaxed(spi->base + STM32H7_SPI_SR);
    862	ier = readl_relaxed(spi->base + STM32H7_SPI_IER);
    863
    864	mask = ier;
    865	/*
    866	 * EOTIE enables irq from EOT, SUSP and TXC events. We need to set
    867	 * SUSP to acknowledge it later. TXC is automatically cleared
    868	 */
    869
    870	mask |= STM32H7_SPI_SR_SUSP;
    871	/*
    872	 * DXPIE is set in Full-Duplex, one IT will be raised if TXP and RXP
    873	 * are set. So in case of Full-Duplex, need to poll TXP and RXP event.
    874	 */
    875	if ((spi->cur_comm == SPI_FULL_DUPLEX) && !spi->cur_usedma)
    876		mask |= STM32H7_SPI_SR_TXP | STM32H7_SPI_SR_RXP;
    877
    878	if (!(sr & mask)) {
    879		dev_warn(spi->dev, "spurious IT (sr=0x%08x, ier=0x%08x)\n",
    880			 sr, ier);
    881		spin_unlock_irqrestore(&spi->lock, flags);
    882		return IRQ_NONE;
    883	}
    884
    885	if (sr & STM32H7_SPI_SR_SUSP) {
    886		static DEFINE_RATELIMIT_STATE(rs,
    887					      DEFAULT_RATELIMIT_INTERVAL * 10,
    888					      1);
    889		if (__ratelimit(&rs))
    890			dev_dbg_ratelimited(spi->dev, "Communication suspended\n");
    891		if (!spi->cur_usedma && (spi->rx_buf && (spi->rx_len > 0)))
    892			stm32h7_spi_read_rxfifo(spi);
    893		/*
    894		 * If communication is suspended while using DMA, it means
    895		 * that something went wrong, so stop the current transfer
    896		 */
    897		if (spi->cur_usedma)
    898			end = true;
    899	}
    900
    901	if (sr & STM32H7_SPI_SR_MODF) {
    902		dev_warn(spi->dev, "Mode fault: transfer aborted\n");
    903		end = true;
    904	}
    905
    906	if (sr & STM32H7_SPI_SR_OVR) {
    907		dev_err(spi->dev, "Overrun: RX data lost\n");
    908		end = true;
    909	}
    910
    911	if (sr & STM32H7_SPI_SR_EOT) {
    912		if (!spi->cur_usedma && (spi->rx_buf && (spi->rx_len > 0)))
    913			stm32h7_spi_read_rxfifo(spi);
    914		if (!spi->cur_usedma ||
    915		    (spi->cur_comm == SPI_SIMPLEX_TX || spi->cur_comm == SPI_3WIRE_TX))
    916			end = true;
    917	}
    918
    919	if (sr & STM32H7_SPI_SR_TXP)
    920		if (!spi->cur_usedma && (spi->tx_buf && (spi->tx_len > 0)))
    921			stm32h7_spi_write_txfifo(spi);
    922
    923	if (sr & STM32H7_SPI_SR_RXP)
    924		if (!spi->cur_usedma && (spi->rx_buf && (spi->rx_len > 0)))
    925			stm32h7_spi_read_rxfifo(spi);
    926
    927	writel_relaxed(sr & mask, spi->base + STM32H7_SPI_IFCR);
    928
    929	spin_unlock_irqrestore(&spi->lock, flags);
    930
    931	if (end) {
    932		stm32h7_spi_disable(spi);
    933		spi_finalize_current_transfer(master);
    934	}
    935
    936	return IRQ_HANDLED;
    937}
    938
    939/**
    940 * stm32_spi_prepare_msg - set up the controller to transfer a single message
    941 * @master: controller master interface
    942 * @msg: pointer to spi message
    943 */
    944static int stm32_spi_prepare_msg(struct spi_master *master,
    945				 struct spi_message *msg)
    946{
    947	struct stm32_spi *spi = spi_master_get_devdata(master);
    948	struct spi_device *spi_dev = msg->spi;
    949	struct device_node *np = spi_dev->dev.of_node;
    950	unsigned long flags;
    951	u32 clrb = 0, setb = 0;
    952
    953	/* SPI slave device may need time between data frames */
    954	spi->cur_midi = 0;
    955	if (np && !of_property_read_u32(np, "st,spi-midi-ns", &spi->cur_midi))
    956		dev_dbg(spi->dev, "%dns inter-data idleness\n", spi->cur_midi);
    957
    958	if (spi_dev->mode & SPI_CPOL)
    959		setb |= spi->cfg->regs->cpol.mask;
    960	else
    961		clrb |= spi->cfg->regs->cpol.mask;
    962
    963	if (spi_dev->mode & SPI_CPHA)
    964		setb |= spi->cfg->regs->cpha.mask;
    965	else
    966		clrb |= spi->cfg->regs->cpha.mask;
    967
    968	if (spi_dev->mode & SPI_LSB_FIRST)
    969		setb |= spi->cfg->regs->lsb_first.mask;
    970	else
    971		clrb |= spi->cfg->regs->lsb_first.mask;
    972
    973	dev_dbg(spi->dev, "cpol=%d cpha=%d lsb_first=%d cs_high=%d\n",
    974		!!(spi_dev->mode & SPI_CPOL),
    975		!!(spi_dev->mode & SPI_CPHA),
    976		!!(spi_dev->mode & SPI_LSB_FIRST),
    977		!!(spi_dev->mode & SPI_CS_HIGH));
    978
    979	/* On STM32H7, messages should not exceed a maximum size setted
    980	 * afterward via the set_number_of_data function. In order to
    981	 * ensure that, split large messages into several messages
    982	 */
    983	if (spi->cfg->set_number_of_data) {
    984		int ret;
    985
    986		ret = spi_split_transfers_maxsize(master, msg,
    987						  STM32H7_SPI_TSIZE_MAX,
    988						  GFP_KERNEL | GFP_DMA);
    989		if (ret)
    990			return ret;
    991	}
    992
    993	spin_lock_irqsave(&spi->lock, flags);
    994
    995	/* CPOL, CPHA and LSB FIRST bits have common register */
    996	if (clrb || setb)
    997		writel_relaxed(
    998			(readl_relaxed(spi->base + spi->cfg->regs->cpol.reg) &
    999			 ~clrb) | setb,
   1000			spi->base + spi->cfg->regs->cpol.reg);
   1001
   1002	spin_unlock_irqrestore(&spi->lock, flags);
   1003
   1004	return 0;
   1005}
   1006
   1007/**
   1008 * stm32f4_spi_dma_tx_cb - dma callback
   1009 * @data: pointer to the spi controller data structure
   1010 *
   1011 * DMA callback is called when the transfer is complete for DMA TX channel.
   1012 */
   1013static void stm32f4_spi_dma_tx_cb(void *data)
   1014{
   1015	struct stm32_spi *spi = data;
   1016
   1017	if (spi->cur_comm == SPI_SIMPLEX_TX || spi->cur_comm == SPI_3WIRE_TX) {
   1018		spi_finalize_current_transfer(spi->master);
   1019		stm32f4_spi_disable(spi);
   1020	}
   1021}
   1022
   1023/**
   1024 * stm32_spi_dma_rx_cb - dma callback
   1025 * @data: pointer to the spi controller data structure
   1026 *
   1027 * DMA callback is called when the transfer is complete for DMA RX channel.
   1028 */
   1029static void stm32_spi_dma_rx_cb(void *data)
   1030{
   1031	struct stm32_spi *spi = data;
   1032
   1033	spi_finalize_current_transfer(spi->master);
   1034	spi->cfg->disable(spi);
   1035}
   1036
   1037/**
   1038 * stm32_spi_dma_config - configure dma slave channel depending on current
   1039 *			  transfer bits_per_word.
   1040 * @spi: pointer to the spi controller data structure
   1041 * @dma_conf: pointer to the dma_slave_config structure
   1042 * @dir: direction of the dma transfer
   1043 */
   1044static void stm32_spi_dma_config(struct stm32_spi *spi,
   1045				 struct dma_slave_config *dma_conf,
   1046				 enum dma_transfer_direction dir)
   1047{
   1048	enum dma_slave_buswidth buswidth;
   1049	u32 maxburst;
   1050
   1051	if (spi->cur_bpw <= 8)
   1052		buswidth = DMA_SLAVE_BUSWIDTH_1_BYTE;
   1053	else if (spi->cur_bpw <= 16)
   1054		buswidth = DMA_SLAVE_BUSWIDTH_2_BYTES;
   1055	else
   1056		buswidth = DMA_SLAVE_BUSWIDTH_4_BYTES;
   1057
   1058	if (spi->cfg->has_fifo) {
   1059		/* Valid for DMA Half or Full Fifo threshold */
   1060		if (spi->cur_fthlv == 2)
   1061			maxburst = 1;
   1062		else
   1063			maxburst = spi->cur_fthlv;
   1064	} else {
   1065		maxburst = 1;
   1066	}
   1067
   1068	memset(dma_conf, 0, sizeof(struct dma_slave_config));
   1069	dma_conf->direction = dir;
   1070	if (dma_conf->direction == DMA_DEV_TO_MEM) { /* RX */
   1071		dma_conf->src_addr = spi->phys_addr + spi->cfg->regs->rx.reg;
   1072		dma_conf->src_addr_width = buswidth;
   1073		dma_conf->src_maxburst = maxburst;
   1074
   1075		dev_dbg(spi->dev, "Rx DMA config buswidth=%d, maxburst=%d\n",
   1076			buswidth, maxburst);
   1077	} else if (dma_conf->direction == DMA_MEM_TO_DEV) { /* TX */
   1078		dma_conf->dst_addr = spi->phys_addr + spi->cfg->regs->tx.reg;
   1079		dma_conf->dst_addr_width = buswidth;
   1080		dma_conf->dst_maxburst = maxburst;
   1081
   1082		dev_dbg(spi->dev, "Tx DMA config buswidth=%d, maxburst=%d\n",
   1083			buswidth, maxburst);
   1084	}
   1085}
   1086
   1087/**
   1088 * stm32f4_spi_transfer_one_irq - transfer a single spi_transfer using
   1089 *				  interrupts
   1090 * @spi: pointer to the spi controller data structure
   1091 *
   1092 * It must returns 0 if the transfer is finished or 1 if the transfer is still
   1093 * in progress.
   1094 */
   1095static int stm32f4_spi_transfer_one_irq(struct stm32_spi *spi)
   1096{
   1097	unsigned long flags;
   1098	u32 cr2 = 0;
   1099
   1100	/* Enable the interrupts relative to the current communication mode */
   1101	if (spi->cur_comm == SPI_SIMPLEX_TX || spi->cur_comm == SPI_3WIRE_TX) {
   1102		cr2 |= STM32F4_SPI_CR2_TXEIE;
   1103	} else if (spi->cur_comm == SPI_FULL_DUPLEX ||
   1104				spi->cur_comm == SPI_SIMPLEX_RX ||
   1105				spi->cur_comm == SPI_3WIRE_RX) {
   1106		/* In transmit-only mode, the OVR flag is set in the SR register
   1107		 * since the received data are never read. Therefore set OVR
   1108		 * interrupt only when rx buffer is available.
   1109		 */
   1110		cr2 |= STM32F4_SPI_CR2_RXNEIE | STM32F4_SPI_CR2_ERRIE;
   1111	} else {
   1112		return -EINVAL;
   1113	}
   1114
   1115	spin_lock_irqsave(&spi->lock, flags);
   1116
   1117	stm32_spi_set_bits(spi, STM32F4_SPI_CR2, cr2);
   1118
   1119	stm32_spi_enable(spi);
   1120
   1121	/* starting data transfer when buffer is loaded */
   1122	if (spi->tx_buf)
   1123		stm32f4_spi_write_tx(spi);
   1124
   1125	spin_unlock_irqrestore(&spi->lock, flags);
   1126
   1127	return 1;
   1128}
   1129
   1130/**
   1131 * stm32h7_spi_transfer_one_irq - transfer a single spi_transfer using
   1132 *				  interrupts
   1133 * @spi: pointer to the spi controller data structure
   1134 *
   1135 * It must returns 0 if the transfer is finished or 1 if the transfer is still
   1136 * in progress.
   1137 */
   1138static int stm32h7_spi_transfer_one_irq(struct stm32_spi *spi)
   1139{
   1140	unsigned long flags;
   1141	u32 ier = 0;
   1142
   1143	/* Enable the interrupts relative to the current communication mode */
   1144	if (spi->tx_buf && spi->rx_buf)	/* Full Duplex */
   1145		ier |= STM32H7_SPI_IER_DXPIE;
   1146	else if (spi->tx_buf)		/* Half-Duplex TX dir or Simplex TX */
   1147		ier |= STM32H7_SPI_IER_TXPIE;
   1148	else if (spi->rx_buf)		/* Half-Duplex RX dir or Simplex RX */
   1149		ier |= STM32H7_SPI_IER_RXPIE;
   1150
   1151	/* Enable the interrupts relative to the end of transfer */
   1152	ier |= STM32H7_SPI_IER_EOTIE | STM32H7_SPI_IER_TXTFIE |
   1153	       STM32H7_SPI_IER_OVRIE | STM32H7_SPI_IER_MODFIE;
   1154
   1155	spin_lock_irqsave(&spi->lock, flags);
   1156
   1157	stm32_spi_enable(spi);
   1158
   1159	/* Be sure to have data in fifo before starting data transfer */
   1160	if (spi->tx_buf)
   1161		stm32h7_spi_write_txfifo(spi);
   1162
   1163	stm32_spi_set_bits(spi, STM32H7_SPI_CR1, STM32H7_SPI_CR1_CSTART);
   1164
   1165	writel_relaxed(ier, spi->base + STM32H7_SPI_IER);
   1166
   1167	spin_unlock_irqrestore(&spi->lock, flags);
   1168
   1169	return 1;
   1170}
   1171
   1172/**
   1173 * stm32f4_spi_transfer_one_dma_start - Set SPI driver registers to start
   1174 *					transfer using DMA
   1175 * @spi: pointer to the spi controller data structure
   1176 */
   1177static void stm32f4_spi_transfer_one_dma_start(struct stm32_spi *spi)
   1178{
   1179	/* In DMA mode end of transfer is handled by DMA TX or RX callback. */
   1180	if (spi->cur_comm == SPI_SIMPLEX_RX || spi->cur_comm == SPI_3WIRE_RX ||
   1181	    spi->cur_comm == SPI_FULL_DUPLEX) {
   1182		/*
   1183		 * In transmit-only mode, the OVR flag is set in the SR register
   1184		 * since the received data are never read. Therefore set OVR
   1185		 * interrupt only when rx buffer is available.
   1186		 */
   1187		stm32_spi_set_bits(spi, STM32F4_SPI_CR2, STM32F4_SPI_CR2_ERRIE);
   1188	}
   1189
   1190	stm32_spi_enable(spi);
   1191}
   1192
   1193/**
   1194 * stm32h7_spi_transfer_one_dma_start - Set SPI driver registers to start
   1195 *					transfer using DMA
   1196 * @spi: pointer to the spi controller data structure
   1197 */
   1198static void stm32h7_spi_transfer_one_dma_start(struct stm32_spi *spi)
   1199{
   1200	uint32_t ier = STM32H7_SPI_IER_OVRIE | STM32H7_SPI_IER_MODFIE;
   1201
   1202	/* Enable the interrupts */
   1203	if (spi->cur_comm == SPI_SIMPLEX_TX || spi->cur_comm == SPI_3WIRE_TX)
   1204		ier |= STM32H7_SPI_IER_EOTIE | STM32H7_SPI_IER_TXTFIE;
   1205
   1206	stm32_spi_set_bits(spi, STM32H7_SPI_IER, ier);
   1207
   1208	stm32_spi_enable(spi);
   1209
   1210	stm32_spi_set_bits(spi, STM32H7_SPI_CR1, STM32H7_SPI_CR1_CSTART);
   1211}
   1212
   1213/**
   1214 * stm32_spi_transfer_one_dma - transfer a single spi_transfer using DMA
   1215 * @spi: pointer to the spi controller data structure
   1216 * @xfer: pointer to the spi_transfer structure
   1217 *
   1218 * It must returns 0 if the transfer is finished or 1 if the transfer is still
   1219 * in progress.
   1220 */
   1221static int stm32_spi_transfer_one_dma(struct stm32_spi *spi,
   1222				      struct spi_transfer *xfer)
   1223{
   1224	struct dma_slave_config tx_dma_conf, rx_dma_conf;
   1225	struct dma_async_tx_descriptor *tx_dma_desc, *rx_dma_desc;
   1226	unsigned long flags;
   1227
   1228	spin_lock_irqsave(&spi->lock, flags);
   1229
   1230	rx_dma_desc = NULL;
   1231	if (spi->rx_buf && spi->dma_rx) {
   1232		stm32_spi_dma_config(spi, &rx_dma_conf, DMA_DEV_TO_MEM);
   1233		dmaengine_slave_config(spi->dma_rx, &rx_dma_conf);
   1234
   1235		/* Enable Rx DMA request */
   1236		stm32_spi_set_bits(spi, spi->cfg->regs->dma_rx_en.reg,
   1237				   spi->cfg->regs->dma_rx_en.mask);
   1238
   1239		rx_dma_desc = dmaengine_prep_slave_sg(
   1240					spi->dma_rx, xfer->rx_sg.sgl,
   1241					xfer->rx_sg.nents,
   1242					rx_dma_conf.direction,
   1243					DMA_PREP_INTERRUPT);
   1244	}
   1245
   1246	tx_dma_desc = NULL;
   1247	if (spi->tx_buf && spi->dma_tx) {
   1248		stm32_spi_dma_config(spi, &tx_dma_conf, DMA_MEM_TO_DEV);
   1249		dmaengine_slave_config(spi->dma_tx, &tx_dma_conf);
   1250
   1251		tx_dma_desc = dmaengine_prep_slave_sg(
   1252					spi->dma_tx, xfer->tx_sg.sgl,
   1253					xfer->tx_sg.nents,
   1254					tx_dma_conf.direction,
   1255					DMA_PREP_INTERRUPT);
   1256	}
   1257
   1258	if ((spi->tx_buf && spi->dma_tx && !tx_dma_desc) ||
   1259	    (spi->rx_buf && spi->dma_rx && !rx_dma_desc))
   1260		goto dma_desc_error;
   1261
   1262	if (spi->cur_comm == SPI_FULL_DUPLEX && (!tx_dma_desc || !rx_dma_desc))
   1263		goto dma_desc_error;
   1264
   1265	if (rx_dma_desc) {
   1266		rx_dma_desc->callback = spi->cfg->dma_rx_cb;
   1267		rx_dma_desc->callback_param = spi;
   1268
   1269		if (dma_submit_error(dmaengine_submit(rx_dma_desc))) {
   1270			dev_err(spi->dev, "Rx DMA submit failed\n");
   1271			goto dma_desc_error;
   1272		}
   1273		/* Enable Rx DMA channel */
   1274		dma_async_issue_pending(spi->dma_rx);
   1275	}
   1276
   1277	if (tx_dma_desc) {
   1278		if (spi->cur_comm == SPI_SIMPLEX_TX ||
   1279		    spi->cur_comm == SPI_3WIRE_TX) {
   1280			tx_dma_desc->callback = spi->cfg->dma_tx_cb;
   1281			tx_dma_desc->callback_param = spi;
   1282		}
   1283
   1284		if (dma_submit_error(dmaengine_submit(tx_dma_desc))) {
   1285			dev_err(spi->dev, "Tx DMA submit failed\n");
   1286			goto dma_submit_error;
   1287		}
   1288		/* Enable Tx DMA channel */
   1289		dma_async_issue_pending(spi->dma_tx);
   1290
   1291		/* Enable Tx DMA request */
   1292		stm32_spi_set_bits(spi, spi->cfg->regs->dma_tx_en.reg,
   1293				   spi->cfg->regs->dma_tx_en.mask);
   1294	}
   1295
   1296	spi->cfg->transfer_one_dma_start(spi);
   1297
   1298	spin_unlock_irqrestore(&spi->lock, flags);
   1299
   1300	return 1;
   1301
   1302dma_submit_error:
   1303	if (spi->dma_rx)
   1304		dmaengine_terminate_all(spi->dma_rx);
   1305
   1306dma_desc_error:
   1307	stm32_spi_clr_bits(spi, spi->cfg->regs->dma_rx_en.reg,
   1308			   spi->cfg->regs->dma_rx_en.mask);
   1309
   1310	spin_unlock_irqrestore(&spi->lock, flags);
   1311
   1312	dev_info(spi->dev, "DMA issue: fall back to irq transfer\n");
   1313
   1314	spi->cur_usedma = false;
   1315	return spi->cfg->transfer_one_irq(spi);
   1316}
   1317
   1318/**
   1319 * stm32f4_spi_set_bpw - Configure bits per word
   1320 * @spi: pointer to the spi controller data structure
   1321 */
   1322static void stm32f4_spi_set_bpw(struct stm32_spi *spi)
   1323{
   1324	if (spi->cur_bpw == 16)
   1325		stm32_spi_set_bits(spi, STM32F4_SPI_CR1, STM32F4_SPI_CR1_DFF);
   1326	else
   1327		stm32_spi_clr_bits(spi, STM32F4_SPI_CR1, STM32F4_SPI_CR1_DFF);
   1328}
   1329
   1330/**
   1331 * stm32h7_spi_set_bpw - configure bits per word
   1332 * @spi: pointer to the spi controller data structure
   1333 */
   1334static void stm32h7_spi_set_bpw(struct stm32_spi *spi)
   1335{
   1336	u32 bpw, fthlv;
   1337	u32 cfg1_clrb = 0, cfg1_setb = 0;
   1338
   1339	bpw = spi->cur_bpw - 1;
   1340
   1341	cfg1_clrb |= STM32H7_SPI_CFG1_DSIZE;
   1342	cfg1_setb |= FIELD_PREP(STM32H7_SPI_CFG1_DSIZE, bpw);
   1343
   1344	spi->cur_fthlv = stm32h7_spi_prepare_fthlv(spi, spi->cur_xferlen);
   1345	fthlv = spi->cur_fthlv - 1;
   1346
   1347	cfg1_clrb |= STM32H7_SPI_CFG1_FTHLV;
   1348	cfg1_setb |= FIELD_PREP(STM32H7_SPI_CFG1_FTHLV, fthlv);
   1349
   1350	writel_relaxed(
   1351		(readl_relaxed(spi->base + STM32H7_SPI_CFG1) &
   1352		 ~cfg1_clrb) | cfg1_setb,
   1353		spi->base + STM32H7_SPI_CFG1);
   1354}
   1355
   1356/**
   1357 * stm32_spi_set_mbr - Configure baud rate divisor in master mode
   1358 * @spi: pointer to the spi controller data structure
   1359 * @mbrdiv: baud rate divisor value
   1360 */
   1361static void stm32_spi_set_mbr(struct stm32_spi *spi, u32 mbrdiv)
   1362{
   1363	u32 clrb = 0, setb = 0;
   1364
   1365	clrb |= spi->cfg->regs->br.mask;
   1366	setb |= (mbrdiv << spi->cfg->regs->br.shift) & spi->cfg->regs->br.mask;
   1367
   1368	writel_relaxed((readl_relaxed(spi->base + spi->cfg->regs->br.reg) &
   1369			~clrb) | setb,
   1370		       spi->base + spi->cfg->regs->br.reg);
   1371}
   1372
   1373/**
   1374 * stm32_spi_communication_type - return transfer communication type
   1375 * @spi_dev: pointer to the spi device
   1376 * @transfer: pointer to spi transfer
   1377 */
   1378static unsigned int stm32_spi_communication_type(struct spi_device *spi_dev,
   1379						 struct spi_transfer *transfer)
   1380{
   1381	unsigned int type = SPI_FULL_DUPLEX;
   1382
   1383	if (spi_dev->mode & SPI_3WIRE) { /* MISO/MOSI signals shared */
   1384		/*
   1385		 * SPI_3WIRE and xfer->tx_buf != NULL and xfer->rx_buf != NULL
   1386		 * is forbidden and unvalidated by SPI subsystem so depending
   1387		 * on the valid buffer, we can determine the direction of the
   1388		 * transfer.
   1389		 */
   1390		if (!transfer->tx_buf)
   1391			type = SPI_3WIRE_RX;
   1392		else
   1393			type = SPI_3WIRE_TX;
   1394	} else {
   1395		if (!transfer->tx_buf)
   1396			type = SPI_SIMPLEX_RX;
   1397		else if (!transfer->rx_buf)
   1398			type = SPI_SIMPLEX_TX;
   1399	}
   1400
   1401	return type;
   1402}
   1403
   1404/**
   1405 * stm32f4_spi_set_mode - configure communication mode
   1406 * @spi: pointer to the spi controller data structure
   1407 * @comm_type: type of communication to configure
   1408 */
   1409static int stm32f4_spi_set_mode(struct stm32_spi *spi, unsigned int comm_type)
   1410{
   1411	if (comm_type == SPI_3WIRE_TX || comm_type == SPI_SIMPLEX_TX) {
   1412		stm32_spi_set_bits(spi, STM32F4_SPI_CR1,
   1413					STM32F4_SPI_CR1_BIDIMODE |
   1414					STM32F4_SPI_CR1_BIDIOE);
   1415	} else if (comm_type == SPI_FULL_DUPLEX ||
   1416				comm_type == SPI_SIMPLEX_RX) {
   1417		stm32_spi_clr_bits(spi, STM32F4_SPI_CR1,
   1418					STM32F4_SPI_CR1_BIDIMODE |
   1419					STM32F4_SPI_CR1_BIDIOE);
   1420	} else if (comm_type == SPI_3WIRE_RX) {
   1421		stm32_spi_set_bits(spi, STM32F4_SPI_CR1,
   1422					STM32F4_SPI_CR1_BIDIMODE);
   1423		stm32_spi_clr_bits(spi, STM32F4_SPI_CR1,
   1424					STM32F4_SPI_CR1_BIDIOE);
   1425	} else {
   1426		return -EINVAL;
   1427	}
   1428
   1429	return 0;
   1430}
   1431
   1432/**
   1433 * stm32h7_spi_set_mode - configure communication mode
   1434 * @spi: pointer to the spi controller data structure
   1435 * @comm_type: type of communication to configure
   1436 */
   1437static int stm32h7_spi_set_mode(struct stm32_spi *spi, unsigned int comm_type)
   1438{
   1439	u32 mode;
   1440	u32 cfg2_clrb = 0, cfg2_setb = 0;
   1441
   1442	if (comm_type == SPI_3WIRE_RX) {
   1443		mode = STM32H7_SPI_HALF_DUPLEX;
   1444		stm32_spi_clr_bits(spi, STM32H7_SPI_CR1, STM32H7_SPI_CR1_HDDIR);
   1445	} else if (comm_type == SPI_3WIRE_TX) {
   1446		mode = STM32H7_SPI_HALF_DUPLEX;
   1447		stm32_spi_set_bits(spi, STM32H7_SPI_CR1, STM32H7_SPI_CR1_HDDIR);
   1448	} else if (comm_type == SPI_SIMPLEX_RX) {
   1449		mode = STM32H7_SPI_SIMPLEX_RX;
   1450	} else if (comm_type == SPI_SIMPLEX_TX) {
   1451		mode = STM32H7_SPI_SIMPLEX_TX;
   1452	} else {
   1453		mode = STM32H7_SPI_FULL_DUPLEX;
   1454	}
   1455
   1456	cfg2_clrb |= STM32H7_SPI_CFG2_COMM;
   1457	cfg2_setb |= FIELD_PREP(STM32H7_SPI_CFG2_COMM, mode);
   1458
   1459	writel_relaxed(
   1460		(readl_relaxed(spi->base + STM32H7_SPI_CFG2) &
   1461		 ~cfg2_clrb) | cfg2_setb,
   1462		spi->base + STM32H7_SPI_CFG2);
   1463
   1464	return 0;
   1465}
   1466
   1467/**
   1468 * stm32h7_spi_data_idleness - configure minimum time delay inserted between two
   1469 *			       consecutive data frames in master mode
   1470 * @spi: pointer to the spi controller data structure
   1471 * @len: transfer len
   1472 */
   1473static void stm32h7_spi_data_idleness(struct stm32_spi *spi, u32 len)
   1474{
   1475	u32 cfg2_clrb = 0, cfg2_setb = 0;
   1476
   1477	cfg2_clrb |= STM32H7_SPI_CFG2_MIDI;
   1478	if ((len > 1) && (spi->cur_midi > 0)) {
   1479		u32 sck_period_ns = DIV_ROUND_UP(NSEC_PER_SEC, spi->cur_speed);
   1480		u32 midi = min_t(u32,
   1481				 DIV_ROUND_UP(spi->cur_midi, sck_period_ns),
   1482				 FIELD_GET(STM32H7_SPI_CFG2_MIDI,
   1483				 STM32H7_SPI_CFG2_MIDI));
   1484
   1485
   1486		dev_dbg(spi->dev, "period=%dns, midi=%d(=%dns)\n",
   1487			sck_period_ns, midi, midi * sck_period_ns);
   1488		cfg2_setb |= FIELD_PREP(STM32H7_SPI_CFG2_MIDI, midi);
   1489	}
   1490
   1491	writel_relaxed((readl_relaxed(spi->base + STM32H7_SPI_CFG2) &
   1492			~cfg2_clrb) | cfg2_setb,
   1493		       spi->base + STM32H7_SPI_CFG2);
   1494}
   1495
   1496/**
   1497 * stm32h7_spi_number_of_data - configure number of data at current transfer
   1498 * @spi: pointer to the spi controller data structure
   1499 * @nb_words: transfer length (in words)
   1500 */
   1501static int stm32h7_spi_number_of_data(struct stm32_spi *spi, u32 nb_words)
   1502{
   1503	if (nb_words <= STM32H7_SPI_TSIZE_MAX) {
   1504		writel_relaxed(FIELD_PREP(STM32H7_SPI_CR2_TSIZE, nb_words),
   1505			       spi->base + STM32H7_SPI_CR2);
   1506	} else {
   1507		return -EMSGSIZE;
   1508	}
   1509
   1510	return 0;
   1511}
   1512
   1513/**
   1514 * stm32_spi_transfer_one_setup - common setup to transfer a single
   1515 *				  spi_transfer either using DMA or
   1516 *				  interrupts.
   1517 * @spi: pointer to the spi controller data structure
   1518 * @spi_dev: pointer to the spi device
   1519 * @transfer: pointer to spi transfer
   1520 */
   1521static int stm32_spi_transfer_one_setup(struct stm32_spi *spi,
   1522					struct spi_device *spi_dev,
   1523					struct spi_transfer *transfer)
   1524{
   1525	unsigned long flags;
   1526	unsigned int comm_type;
   1527	int nb_words, ret = 0;
   1528	int mbr;
   1529
   1530	spin_lock_irqsave(&spi->lock, flags);
   1531
   1532	spi->cur_xferlen = transfer->len;
   1533
   1534	spi->cur_bpw = transfer->bits_per_word;
   1535	spi->cfg->set_bpw(spi);
   1536
   1537	/* Update spi->cur_speed with real clock speed */
   1538	mbr = stm32_spi_prepare_mbr(spi, transfer->speed_hz,
   1539				    spi->cfg->baud_rate_div_min,
   1540				    spi->cfg->baud_rate_div_max);
   1541	if (mbr < 0) {
   1542		ret = mbr;
   1543		goto out;
   1544	}
   1545
   1546	transfer->speed_hz = spi->cur_speed;
   1547	stm32_spi_set_mbr(spi, mbr);
   1548
   1549	comm_type = stm32_spi_communication_type(spi_dev, transfer);
   1550	ret = spi->cfg->set_mode(spi, comm_type);
   1551	if (ret < 0)
   1552		goto out;
   1553
   1554	spi->cur_comm = comm_type;
   1555
   1556	if (spi->cfg->set_data_idleness)
   1557		spi->cfg->set_data_idleness(spi, transfer->len);
   1558
   1559	if (spi->cur_bpw <= 8)
   1560		nb_words = transfer->len;
   1561	else if (spi->cur_bpw <= 16)
   1562		nb_words = DIV_ROUND_UP(transfer->len * 8, 16);
   1563	else
   1564		nb_words = DIV_ROUND_UP(transfer->len * 8, 32);
   1565
   1566	if (spi->cfg->set_number_of_data) {
   1567		ret = spi->cfg->set_number_of_data(spi, nb_words);
   1568		if (ret < 0)
   1569			goto out;
   1570	}
   1571
   1572	dev_dbg(spi->dev, "transfer communication mode set to %d\n",
   1573		spi->cur_comm);
   1574	dev_dbg(spi->dev,
   1575		"data frame of %d-bit, data packet of %d data frames\n",
   1576		spi->cur_bpw, spi->cur_fthlv);
   1577	dev_dbg(spi->dev, "speed set to %dHz\n", spi->cur_speed);
   1578	dev_dbg(spi->dev, "transfer of %d bytes (%d data frames)\n",
   1579		spi->cur_xferlen, nb_words);
   1580	dev_dbg(spi->dev, "dma %s\n",
   1581		(spi->cur_usedma) ? "enabled" : "disabled");
   1582
   1583out:
   1584	spin_unlock_irqrestore(&spi->lock, flags);
   1585
   1586	return ret;
   1587}
   1588
   1589/**
   1590 * stm32_spi_transfer_one - transfer a single spi_transfer
   1591 * @master: controller master interface
   1592 * @spi_dev: pointer to the spi device
   1593 * @transfer: pointer to spi transfer
   1594 *
   1595 * It must return 0 if the transfer is finished or 1 if the transfer is still
   1596 * in progress.
   1597 */
   1598static int stm32_spi_transfer_one(struct spi_master *master,
   1599				  struct spi_device *spi_dev,
   1600				  struct spi_transfer *transfer)
   1601{
   1602	struct stm32_spi *spi = spi_master_get_devdata(master);
   1603	int ret;
   1604
   1605	spi->tx_buf = transfer->tx_buf;
   1606	spi->rx_buf = transfer->rx_buf;
   1607	spi->tx_len = spi->tx_buf ? transfer->len : 0;
   1608	spi->rx_len = spi->rx_buf ? transfer->len : 0;
   1609
   1610	spi->cur_usedma = (master->can_dma &&
   1611			   master->can_dma(master, spi_dev, transfer));
   1612
   1613	ret = stm32_spi_transfer_one_setup(spi, spi_dev, transfer);
   1614	if (ret) {
   1615		dev_err(spi->dev, "SPI transfer setup failed\n");
   1616		return ret;
   1617	}
   1618
   1619	if (spi->cur_usedma)
   1620		return stm32_spi_transfer_one_dma(spi, transfer);
   1621	else
   1622		return spi->cfg->transfer_one_irq(spi);
   1623}
   1624
   1625/**
   1626 * stm32_spi_unprepare_msg - relax the hardware
   1627 * @master: controller master interface
   1628 * @msg: pointer to the spi message
   1629 */
   1630static int stm32_spi_unprepare_msg(struct spi_master *master,
   1631				   struct spi_message *msg)
   1632{
   1633	struct stm32_spi *spi = spi_master_get_devdata(master);
   1634
   1635	spi->cfg->disable(spi);
   1636
   1637	return 0;
   1638}
   1639
   1640/**
   1641 * stm32f4_spi_config - Configure SPI controller as SPI master
   1642 * @spi: pointer to the spi controller data structure
   1643 */
   1644static int stm32f4_spi_config(struct stm32_spi *spi)
   1645{
   1646	unsigned long flags;
   1647
   1648	spin_lock_irqsave(&spi->lock, flags);
   1649
   1650	/* Ensure I2SMOD bit is kept cleared */
   1651	stm32_spi_clr_bits(spi, STM32F4_SPI_I2SCFGR,
   1652			   STM32F4_SPI_I2SCFGR_I2SMOD);
   1653
   1654	/*
   1655	 * - SS input value high
   1656	 * - transmitter half duplex direction
   1657	 * - Set the master mode (default Motorola mode)
   1658	 * - Consider 1 master/n slaves configuration and
   1659	 *   SS input value is determined by the SSI bit
   1660	 */
   1661	stm32_spi_set_bits(spi, STM32F4_SPI_CR1, STM32F4_SPI_CR1_SSI |
   1662						 STM32F4_SPI_CR1_BIDIOE |
   1663						 STM32F4_SPI_CR1_MSTR |
   1664						 STM32F4_SPI_CR1_SSM);
   1665
   1666	spin_unlock_irqrestore(&spi->lock, flags);
   1667
   1668	return 0;
   1669}
   1670
   1671/**
   1672 * stm32h7_spi_config - Configure SPI controller as SPI master
   1673 * @spi: pointer to the spi controller data structure
   1674 */
   1675static int stm32h7_spi_config(struct stm32_spi *spi)
   1676{
   1677	unsigned long flags;
   1678
   1679	spin_lock_irqsave(&spi->lock, flags);
   1680
   1681	/* Ensure I2SMOD bit is kept cleared */
   1682	stm32_spi_clr_bits(spi, STM32H7_SPI_I2SCFGR,
   1683			   STM32H7_SPI_I2SCFGR_I2SMOD);
   1684
   1685	/*
   1686	 * - SS input value high
   1687	 * - transmitter half duplex direction
   1688	 * - automatic communication suspend when RX-Fifo is full
   1689	 */
   1690	stm32_spi_set_bits(spi, STM32H7_SPI_CR1, STM32H7_SPI_CR1_SSI |
   1691						 STM32H7_SPI_CR1_HDDIR |
   1692						 STM32H7_SPI_CR1_MASRX);
   1693
   1694	/*
   1695	 * - Set the master mode (default Motorola mode)
   1696	 * - Consider 1 master/n slaves configuration and
   1697	 *   SS input value is determined by the SSI bit
   1698	 * - keep control of all associated GPIOs
   1699	 */
   1700	stm32_spi_set_bits(spi, STM32H7_SPI_CFG2, STM32H7_SPI_CFG2_MASTER |
   1701						  STM32H7_SPI_CFG2_SSM |
   1702						  STM32H7_SPI_CFG2_AFCNTR);
   1703
   1704	spin_unlock_irqrestore(&spi->lock, flags);
   1705
   1706	return 0;
   1707}
   1708
   1709static const struct stm32_spi_cfg stm32f4_spi_cfg = {
   1710	.regs = &stm32f4_spi_regspec,
   1711	.get_bpw_mask = stm32f4_spi_get_bpw_mask,
   1712	.disable = stm32f4_spi_disable,
   1713	.config = stm32f4_spi_config,
   1714	.set_bpw = stm32f4_spi_set_bpw,
   1715	.set_mode = stm32f4_spi_set_mode,
   1716	.transfer_one_dma_start = stm32f4_spi_transfer_one_dma_start,
   1717	.dma_tx_cb = stm32f4_spi_dma_tx_cb,
   1718	.dma_rx_cb = stm32_spi_dma_rx_cb,
   1719	.transfer_one_irq = stm32f4_spi_transfer_one_irq,
   1720	.irq_handler_event = stm32f4_spi_irq_event,
   1721	.irq_handler_thread = stm32f4_spi_irq_thread,
   1722	.baud_rate_div_min = STM32F4_SPI_BR_DIV_MIN,
   1723	.baud_rate_div_max = STM32F4_SPI_BR_DIV_MAX,
   1724	.has_fifo = false,
   1725	.flags = SPI_MASTER_MUST_TX,
   1726};
   1727
   1728static const struct stm32_spi_cfg stm32h7_spi_cfg = {
   1729	.regs = &stm32h7_spi_regspec,
   1730	.get_fifo_size = stm32h7_spi_get_fifo_size,
   1731	.get_bpw_mask = stm32h7_spi_get_bpw_mask,
   1732	.disable = stm32h7_spi_disable,
   1733	.config = stm32h7_spi_config,
   1734	.set_bpw = stm32h7_spi_set_bpw,
   1735	.set_mode = stm32h7_spi_set_mode,
   1736	.set_data_idleness = stm32h7_spi_data_idleness,
   1737	.set_number_of_data = stm32h7_spi_number_of_data,
   1738	.transfer_one_dma_start = stm32h7_spi_transfer_one_dma_start,
   1739	.dma_rx_cb = stm32_spi_dma_rx_cb,
   1740	/*
   1741	 * dma_tx_cb is not necessary since in case of TX, dma is followed by
   1742	 * SPI access hence handling is performed within the SPI interrupt
   1743	 */
   1744	.transfer_one_irq = stm32h7_spi_transfer_one_irq,
   1745	.irq_handler_thread = stm32h7_spi_irq_thread,
   1746	.baud_rate_div_min = STM32H7_SPI_MBR_DIV_MIN,
   1747	.baud_rate_div_max = STM32H7_SPI_MBR_DIV_MAX,
   1748	.has_fifo = true,
   1749};
   1750
   1751static const struct of_device_id stm32_spi_of_match[] = {
   1752	{ .compatible = "st,stm32h7-spi", .data = (void *)&stm32h7_spi_cfg },
   1753	{ .compatible = "st,stm32f4-spi", .data = (void *)&stm32f4_spi_cfg },
   1754	{},
   1755};
   1756MODULE_DEVICE_TABLE(of, stm32_spi_of_match);
   1757
   1758static int stm32_spi_probe(struct platform_device *pdev)
   1759{
   1760	struct spi_master *master;
   1761	struct stm32_spi *spi;
   1762	struct resource *res;
   1763	struct reset_control *rst;
   1764	int ret;
   1765
   1766	master = devm_spi_alloc_master(&pdev->dev, sizeof(struct stm32_spi));
   1767	if (!master) {
   1768		dev_err(&pdev->dev, "spi master allocation failed\n");
   1769		return -ENOMEM;
   1770	}
   1771	platform_set_drvdata(pdev, master);
   1772
   1773	spi = spi_master_get_devdata(master);
   1774	spi->dev = &pdev->dev;
   1775	spi->master = master;
   1776	spin_lock_init(&spi->lock);
   1777
   1778	spi->cfg = (const struct stm32_spi_cfg *)
   1779		of_match_device(pdev->dev.driver->of_match_table,
   1780				&pdev->dev)->data;
   1781
   1782	res = platform_get_resource(pdev, IORESOURCE_MEM, 0);
   1783	spi->base = devm_ioremap_resource(&pdev->dev, res);
   1784	if (IS_ERR(spi->base))
   1785		return PTR_ERR(spi->base);
   1786
   1787	spi->phys_addr = (dma_addr_t)res->start;
   1788
   1789	spi->irq = platform_get_irq(pdev, 0);
   1790	if (spi->irq <= 0)
   1791		return dev_err_probe(&pdev->dev, spi->irq,
   1792				     "failed to get irq\n");
   1793
   1794	ret = devm_request_threaded_irq(&pdev->dev, spi->irq,
   1795					spi->cfg->irq_handler_event,
   1796					spi->cfg->irq_handler_thread,
   1797					IRQF_ONESHOT, pdev->name, master);
   1798	if (ret) {
   1799		dev_err(&pdev->dev, "irq%d request failed: %d\n", spi->irq,
   1800			ret);
   1801		return ret;
   1802	}
   1803
   1804	spi->clk = devm_clk_get(&pdev->dev, NULL);
   1805	if (IS_ERR(spi->clk)) {
   1806		ret = PTR_ERR(spi->clk);
   1807		dev_err(&pdev->dev, "clk get failed: %d\n", ret);
   1808		return ret;
   1809	}
   1810
   1811	ret = clk_prepare_enable(spi->clk);
   1812	if (ret) {
   1813		dev_err(&pdev->dev, "clk enable failed: %d\n", ret);
   1814		return ret;
   1815	}
   1816	spi->clk_rate = clk_get_rate(spi->clk);
   1817	if (!spi->clk_rate) {
   1818		dev_err(&pdev->dev, "clk rate = 0\n");
   1819		ret = -EINVAL;
   1820		goto err_clk_disable;
   1821	}
   1822
   1823	rst = devm_reset_control_get_optional_exclusive(&pdev->dev, NULL);
   1824	if (rst) {
   1825		if (IS_ERR(rst)) {
   1826			ret = dev_err_probe(&pdev->dev, PTR_ERR(rst),
   1827					    "failed to get reset\n");
   1828			goto err_clk_disable;
   1829		}
   1830
   1831		reset_control_assert(rst);
   1832		udelay(2);
   1833		reset_control_deassert(rst);
   1834	}
   1835
   1836	if (spi->cfg->has_fifo)
   1837		spi->fifo_size = spi->cfg->get_fifo_size(spi);
   1838
   1839	ret = spi->cfg->config(spi);
   1840	if (ret) {
   1841		dev_err(&pdev->dev, "controller configuration failed: %d\n",
   1842			ret);
   1843		goto err_clk_disable;
   1844	}
   1845
   1846	master->dev.of_node = pdev->dev.of_node;
   1847	master->auto_runtime_pm = true;
   1848	master->bus_num = pdev->id;
   1849	master->mode_bits = SPI_CPHA | SPI_CPOL | SPI_CS_HIGH | SPI_LSB_FIRST |
   1850			    SPI_3WIRE;
   1851	master->bits_per_word_mask = spi->cfg->get_bpw_mask(spi);
   1852	master->max_speed_hz = spi->clk_rate / spi->cfg->baud_rate_div_min;
   1853	master->min_speed_hz = spi->clk_rate / spi->cfg->baud_rate_div_max;
   1854	master->use_gpio_descriptors = true;
   1855	master->prepare_message = stm32_spi_prepare_msg;
   1856	master->transfer_one = stm32_spi_transfer_one;
   1857	master->unprepare_message = stm32_spi_unprepare_msg;
   1858	master->flags = spi->cfg->flags;
   1859
   1860	spi->dma_tx = dma_request_chan(spi->dev, "tx");
   1861	if (IS_ERR(spi->dma_tx)) {
   1862		ret = PTR_ERR(spi->dma_tx);
   1863		spi->dma_tx = NULL;
   1864		if (ret == -EPROBE_DEFER)
   1865			goto err_clk_disable;
   1866
   1867		dev_warn(&pdev->dev, "failed to request tx dma channel\n");
   1868	} else {
   1869		master->dma_tx = spi->dma_tx;
   1870	}
   1871
   1872	spi->dma_rx = dma_request_chan(spi->dev, "rx");
   1873	if (IS_ERR(spi->dma_rx)) {
   1874		ret = PTR_ERR(spi->dma_rx);
   1875		spi->dma_rx = NULL;
   1876		if (ret == -EPROBE_DEFER)
   1877			goto err_dma_release;
   1878
   1879		dev_warn(&pdev->dev, "failed to request rx dma channel\n");
   1880	} else {
   1881		master->dma_rx = spi->dma_rx;
   1882	}
   1883
   1884	if (spi->dma_tx || spi->dma_rx)
   1885		master->can_dma = stm32_spi_can_dma;
   1886
   1887	pm_runtime_set_autosuspend_delay(&pdev->dev,
   1888					 STM32_SPI_AUTOSUSPEND_DELAY);
   1889	pm_runtime_use_autosuspend(&pdev->dev);
   1890	pm_runtime_set_active(&pdev->dev);
   1891	pm_runtime_get_noresume(&pdev->dev);
   1892	pm_runtime_enable(&pdev->dev);
   1893
   1894	ret = spi_register_master(master);
   1895	if (ret) {
   1896		dev_err(&pdev->dev, "spi master registration failed: %d\n",
   1897			ret);
   1898		goto err_pm_disable;
   1899	}
   1900
   1901	pm_runtime_mark_last_busy(&pdev->dev);
   1902	pm_runtime_put_autosuspend(&pdev->dev);
   1903
   1904	dev_info(&pdev->dev, "driver initialized\n");
   1905
   1906	return 0;
   1907
   1908err_pm_disable:
   1909	pm_runtime_disable(&pdev->dev);
   1910	pm_runtime_put_noidle(&pdev->dev);
   1911	pm_runtime_set_suspended(&pdev->dev);
   1912	pm_runtime_dont_use_autosuspend(&pdev->dev);
   1913err_dma_release:
   1914	if (spi->dma_tx)
   1915		dma_release_channel(spi->dma_tx);
   1916	if (spi->dma_rx)
   1917		dma_release_channel(spi->dma_rx);
   1918err_clk_disable:
   1919	clk_disable_unprepare(spi->clk);
   1920
   1921	return ret;
   1922}
   1923
   1924static int stm32_spi_remove(struct platform_device *pdev)
   1925{
   1926	struct spi_master *master = platform_get_drvdata(pdev);
   1927	struct stm32_spi *spi = spi_master_get_devdata(master);
   1928
   1929	pm_runtime_get_sync(&pdev->dev);
   1930
   1931	spi_unregister_master(master);
   1932	spi->cfg->disable(spi);
   1933
   1934	pm_runtime_disable(&pdev->dev);
   1935	pm_runtime_put_noidle(&pdev->dev);
   1936	pm_runtime_set_suspended(&pdev->dev);
   1937	pm_runtime_dont_use_autosuspend(&pdev->dev);
   1938
   1939	if (master->dma_tx)
   1940		dma_release_channel(master->dma_tx);
   1941	if (master->dma_rx)
   1942		dma_release_channel(master->dma_rx);
   1943
   1944	clk_disable_unprepare(spi->clk);
   1945
   1946
   1947	pinctrl_pm_select_sleep_state(&pdev->dev);
   1948
   1949	return 0;
   1950}
   1951
   1952static int __maybe_unused stm32_spi_runtime_suspend(struct device *dev)
   1953{
   1954	struct spi_master *master = dev_get_drvdata(dev);
   1955	struct stm32_spi *spi = spi_master_get_devdata(master);
   1956
   1957	clk_disable_unprepare(spi->clk);
   1958
   1959	return pinctrl_pm_select_sleep_state(dev);
   1960}
   1961
   1962static int __maybe_unused stm32_spi_runtime_resume(struct device *dev)
   1963{
   1964	struct spi_master *master = dev_get_drvdata(dev);
   1965	struct stm32_spi *spi = spi_master_get_devdata(master);
   1966	int ret;
   1967
   1968	ret = pinctrl_pm_select_default_state(dev);
   1969	if (ret)
   1970		return ret;
   1971
   1972	return clk_prepare_enable(spi->clk);
   1973}
   1974
   1975static int __maybe_unused stm32_spi_suspend(struct device *dev)
   1976{
   1977	struct spi_master *master = dev_get_drvdata(dev);
   1978	int ret;
   1979
   1980	ret = spi_master_suspend(master);
   1981	if (ret)
   1982		return ret;
   1983
   1984	return pm_runtime_force_suspend(dev);
   1985}
   1986
   1987static int __maybe_unused stm32_spi_resume(struct device *dev)
   1988{
   1989	struct spi_master *master = dev_get_drvdata(dev);
   1990	struct stm32_spi *spi = spi_master_get_devdata(master);
   1991	int ret;
   1992
   1993	ret = pm_runtime_force_resume(dev);
   1994	if (ret)
   1995		return ret;
   1996
   1997	ret = spi_master_resume(master);
   1998	if (ret) {
   1999		clk_disable_unprepare(spi->clk);
   2000		return ret;
   2001	}
   2002
   2003	ret = pm_runtime_resume_and_get(dev);
   2004	if (ret < 0) {
   2005		dev_err(dev, "Unable to power device:%d\n", ret);
   2006		return ret;
   2007	}
   2008
   2009	spi->cfg->config(spi);
   2010
   2011	pm_runtime_mark_last_busy(dev);
   2012	pm_runtime_put_autosuspend(dev);
   2013
   2014	return 0;
   2015}
   2016
   2017static const struct dev_pm_ops stm32_spi_pm_ops = {
   2018	SET_SYSTEM_SLEEP_PM_OPS(stm32_spi_suspend, stm32_spi_resume)
   2019	SET_RUNTIME_PM_OPS(stm32_spi_runtime_suspend,
   2020			   stm32_spi_runtime_resume, NULL)
   2021};
   2022
   2023static struct platform_driver stm32_spi_driver = {
   2024	.probe = stm32_spi_probe,
   2025	.remove = stm32_spi_remove,
   2026	.driver = {
   2027		.name = DRIVER_NAME,
   2028		.pm = &stm32_spi_pm_ops,
   2029		.of_match_table = stm32_spi_of_match,
   2030	},
   2031};
   2032
   2033module_platform_driver(stm32_spi_driver);
   2034
   2035MODULE_ALIAS("platform:" DRIVER_NAME);
   2036MODULE_DESCRIPTION("STMicroelectronics STM32 SPI Controller driver");
   2037MODULE_AUTHOR("Amelie Delaunay <amelie.delaunay@st.com>");
   2038MODULE_LICENSE("GPL v2");