cachepc-linux

Fork of AMDESE/linux with modifications for CachePC side-channel attack
git clone https://git.sinitax.com/sinitax/cachepc-linux
Log | Files | Refs | README | LICENSE | sfeed.txt

dma.c (14385B)


      1// SPDX-License-Identifier: ISC
      2/* Copyright (C) 2020 MediaTek Inc. */
      3
      4#include "mt7915.h"
      5#include "../dma.h"
      6#include "mac.h"
      7
      8static int
      9mt7915_init_tx_queues(struct mt7915_phy *phy, int idx, int n_desc, int ring_base)
     10{
     11	struct mt7915_dev *dev = phy->dev;
     12	int i, err;
     13
     14	if (mtk_wed_device_active(&phy->dev->mt76.mmio.wed)) {
     15		ring_base = MT_WED_TX_RING_BASE;
     16		idx -= MT_TXQ_ID(0);
     17	}
     18
     19	err = mt76_init_tx_queue(phy->mt76, 0, idx, n_desc, ring_base,
     20				 MT_WED_Q_TX(idx));
     21	if (err < 0)
     22		return err;
     23
     24	for (i = 0; i <= MT_TXQ_PSD; i++)
     25		phy->mt76->q_tx[i] = phy->mt76->q_tx[0];
     26
     27	return 0;
     28}
     29
     30static void
     31mt7915_tx_cleanup(struct mt7915_dev *dev)
     32{
     33	mt76_queue_tx_cleanup(dev, dev->mt76.q_mcu[MT_MCUQ_WM], false);
     34	mt76_queue_tx_cleanup(dev, dev->mt76.q_mcu[MT_MCUQ_WA], false);
     35}
     36
     37static int mt7915_poll_tx(struct napi_struct *napi, int budget)
     38{
     39	struct mt7915_dev *dev;
     40
     41	dev = container_of(napi, struct mt7915_dev, mt76.tx_napi);
     42
     43	mt7915_tx_cleanup(dev);
     44
     45	if (napi_complete_done(napi, 0))
     46		mt7915_irq_enable(dev, MT_INT_TX_DONE_MCU);
     47
     48	return 0;
     49}
     50
     51static void mt7915_dma_config(struct mt7915_dev *dev)
     52{
     53#define Q_CONFIG(q, wfdma, int, id) do {		\
     54		if (wfdma)				\
     55			dev->wfdma_mask |= (1 << (q));	\
     56		dev->q_int_mask[(q)] = int;		\
     57		dev->q_id[(q)] = id;			\
     58	} while (0)
     59
     60#define MCUQ_CONFIG(q, wfdma, int, id)	Q_CONFIG(q, (wfdma), (int), (id))
     61#define RXQ_CONFIG(q, wfdma, int, id)	Q_CONFIG(__RXQ(q), (wfdma), (int), (id))
     62#define TXQ_CONFIG(q, wfdma, int, id)	Q_CONFIG(__TXQ(q), (wfdma), (int), (id))
     63
     64	if (is_mt7915(&dev->mt76)) {
     65		RXQ_CONFIG(MT_RXQ_MAIN, WFDMA0, MT_INT_RX_DONE_BAND0, MT7915_RXQ_BAND0);
     66		RXQ_CONFIG(MT_RXQ_MCU, WFDMA1, MT_INT_RX_DONE_WM, MT7915_RXQ_MCU_WM);
     67		RXQ_CONFIG(MT_RXQ_MCU_WA, WFDMA1, MT_INT_RX_DONE_WA, MT7915_RXQ_MCU_WA);
     68		RXQ_CONFIG(MT_RXQ_EXT, WFDMA0, MT_INT_RX_DONE_BAND1, MT7915_RXQ_BAND1);
     69		RXQ_CONFIG(MT_RXQ_EXT_WA, WFDMA1, MT_INT_RX_DONE_WA_EXT, MT7915_RXQ_MCU_WA_EXT);
     70		RXQ_CONFIG(MT_RXQ_MAIN_WA, WFDMA1, MT_INT_RX_DONE_WA_MAIN, MT7915_RXQ_MCU_WA);
     71		TXQ_CONFIG(0, WFDMA1, MT_INT_TX_DONE_BAND0, MT7915_TXQ_BAND0);
     72		TXQ_CONFIG(1, WFDMA1, MT_INT_TX_DONE_BAND1, MT7915_TXQ_BAND1);
     73		MCUQ_CONFIG(MT_MCUQ_WM, WFDMA1, MT_INT_TX_DONE_MCU_WM, MT7915_TXQ_MCU_WM);
     74		MCUQ_CONFIG(MT_MCUQ_WA, WFDMA1, MT_INT_TX_DONE_MCU_WA, MT7915_TXQ_MCU_WA);
     75		MCUQ_CONFIG(MT_MCUQ_FWDL, WFDMA1, MT_INT_TX_DONE_FWDL, MT7915_TXQ_FWDL);
     76	} else {
     77		RXQ_CONFIG(MT_RXQ_MAIN, WFDMA0, MT_INT_RX_DONE_BAND0_MT7916, MT7916_RXQ_BAND0);
     78		RXQ_CONFIG(MT_RXQ_MCU, WFDMA0, MT_INT_RX_DONE_WM, MT7916_RXQ_MCU_WM);
     79		RXQ_CONFIG(MT_RXQ_MCU_WA, WFDMA0, MT_INT_RX_DONE_WA, MT7916_RXQ_MCU_WA);
     80		RXQ_CONFIG(MT_RXQ_EXT, WFDMA0, MT_INT_RX_DONE_BAND1_MT7916, MT7916_RXQ_BAND1);
     81		RXQ_CONFIG(MT_RXQ_EXT_WA, WFDMA0, MT_INT_RX_DONE_WA_EXT_MT7916, MT7916_RXQ_MCU_WA_EXT);
     82		RXQ_CONFIG(MT_RXQ_MAIN_WA, WFDMA0, MT_INT_RX_DONE_WA_MAIN_MT7916, MT7916_RXQ_MCU_WA_MAIN);
     83		TXQ_CONFIG(0, WFDMA0, MT_INT_TX_DONE_BAND0, MT7915_TXQ_BAND0);
     84		TXQ_CONFIG(1, WFDMA0, MT_INT_TX_DONE_BAND1, MT7915_TXQ_BAND1);
     85		MCUQ_CONFIG(MT_MCUQ_WM, WFDMA0, MT_INT_TX_DONE_MCU_WM, MT7915_TXQ_MCU_WM);
     86		MCUQ_CONFIG(MT_MCUQ_WA, WFDMA0, MT_INT_TX_DONE_MCU_WA_MT7916, MT7915_TXQ_MCU_WA);
     87		MCUQ_CONFIG(MT_MCUQ_FWDL, WFDMA0, MT_INT_TX_DONE_FWDL, MT7915_TXQ_FWDL);
     88	}
     89}
     90
     91static void __mt7915_dma_prefetch(struct mt7915_dev *dev, u32 ofs)
     92{
     93#define PREFETCH(_base, _depth)	((_base) << 16 | (_depth))
     94	u32 base = 0;
     95
     96	/* prefetch SRAM wrapping boundary for tx/rx ring. */
     97	mt76_wr(dev, MT_MCUQ_EXT_CTRL(MT_MCUQ_FWDL) + ofs, PREFETCH(0x0, 0x4));
     98	mt76_wr(dev, MT_MCUQ_EXT_CTRL(MT_MCUQ_WM) + ofs, PREFETCH(0x40, 0x4));
     99	mt76_wr(dev, MT_TXQ_EXT_CTRL(0) + ofs, PREFETCH(0x80, 0x4));
    100	mt76_wr(dev, MT_TXQ_EXT_CTRL(1) + ofs, PREFETCH(0xc0, 0x4));
    101	mt76_wr(dev, MT_MCUQ_EXT_CTRL(MT_MCUQ_WA) + ofs, PREFETCH(0x100, 0x4));
    102
    103	mt76_wr(dev, MT_RXQ_EXT_CTRL(MT_RXQ_MCU) + ofs, PREFETCH(0x140, 0x4));
    104	mt76_wr(dev, MT_RXQ_EXT_CTRL(MT_RXQ_MCU_WA) + ofs, PREFETCH(0x180, 0x4));
    105	if (!is_mt7915(&dev->mt76)) {
    106		mt76_wr(dev, MT_RXQ_EXT_CTRL(MT_RXQ_MAIN_WA) + ofs, PREFETCH(0x1c0, 0x4));
    107		base = 0x40;
    108	}
    109	mt76_wr(dev, MT_RXQ_EXT_CTRL(MT_RXQ_EXT_WA) + ofs, PREFETCH(0x1c0 + base, 0x4));
    110	mt76_wr(dev, MT_RXQ_EXT_CTRL(MT_RXQ_MAIN) + ofs, PREFETCH(0x200 + base, 0x4));
    111	mt76_wr(dev, MT_RXQ_EXT_CTRL(MT_RXQ_EXT) + ofs, PREFETCH(0x240 + base, 0x4));
    112
    113	/* for mt7915, the ring which is next the last
    114	 * used ring must be initialized.
    115	 */
    116	if (is_mt7915(&dev->mt76)) {
    117		ofs += 0x4;
    118		mt76_wr(dev, MT_MCUQ_EXT_CTRL(MT_MCUQ_WA) + ofs, PREFETCH(0x140, 0x0));
    119		mt76_wr(dev, MT_RXQ_EXT_CTRL(MT_RXQ_EXT_WA) + ofs, PREFETCH(0x200 + base, 0x0));
    120		mt76_wr(dev, MT_RXQ_EXT_CTRL(MT_RXQ_EXT) + ofs, PREFETCH(0x280 + base, 0x0));
    121	}
    122}
    123
    124void mt7915_dma_prefetch(struct mt7915_dev *dev)
    125{
    126	__mt7915_dma_prefetch(dev, 0);
    127	if (dev->hif2)
    128		__mt7915_dma_prefetch(dev, MT_WFDMA0_PCIE1(0) - MT_WFDMA0(0));
    129}
    130
    131static void mt7915_dma_disable(struct mt7915_dev *dev, bool rst)
    132{
    133	struct mt76_dev *mdev = &dev->mt76;
    134	u32 hif1_ofs = 0;
    135
    136	if (dev->hif2)
    137		hif1_ofs = MT_WFDMA0_PCIE1(0) - MT_WFDMA0(0);
    138
    139	/* reset */
    140	if (rst) {
    141		mt76_clear(dev, MT_WFDMA0_RST,
    142			   MT_WFDMA0_RST_DMASHDL_ALL_RST |
    143			   MT_WFDMA0_RST_LOGIC_RST);
    144
    145		mt76_set(dev, MT_WFDMA0_RST,
    146			 MT_WFDMA0_RST_DMASHDL_ALL_RST |
    147			 MT_WFDMA0_RST_LOGIC_RST);
    148
    149		if (is_mt7915(mdev)) {
    150			mt76_clear(dev, MT_WFDMA1_RST,
    151				   MT_WFDMA1_RST_DMASHDL_ALL_RST |
    152				   MT_WFDMA1_RST_LOGIC_RST);
    153
    154			mt76_set(dev, MT_WFDMA1_RST,
    155				 MT_WFDMA1_RST_DMASHDL_ALL_RST |
    156				 MT_WFDMA1_RST_LOGIC_RST);
    157		}
    158
    159		if (dev->hif2) {
    160			mt76_clear(dev, MT_WFDMA0_RST + hif1_ofs,
    161				   MT_WFDMA0_RST_DMASHDL_ALL_RST |
    162				   MT_WFDMA0_RST_LOGIC_RST);
    163
    164			mt76_set(dev, MT_WFDMA0_RST + hif1_ofs,
    165				 MT_WFDMA0_RST_DMASHDL_ALL_RST |
    166				 MT_WFDMA0_RST_LOGIC_RST);
    167
    168			if (is_mt7915(mdev)) {
    169				mt76_clear(dev, MT_WFDMA1_RST + hif1_ofs,
    170					   MT_WFDMA1_RST_DMASHDL_ALL_RST |
    171					   MT_WFDMA1_RST_LOGIC_RST);
    172
    173				mt76_set(dev, MT_WFDMA1_RST + hif1_ofs,
    174					 MT_WFDMA1_RST_DMASHDL_ALL_RST |
    175					 MT_WFDMA1_RST_LOGIC_RST);
    176			}
    177		}
    178	}
    179
    180	/* disable */
    181	mt76_clear(dev, MT_WFDMA0_GLO_CFG,
    182		   MT_WFDMA0_GLO_CFG_TX_DMA_EN |
    183		   MT_WFDMA0_GLO_CFG_RX_DMA_EN |
    184		   MT_WFDMA0_GLO_CFG_OMIT_TX_INFO |
    185		   MT_WFDMA0_GLO_CFG_OMIT_RX_INFO |
    186		   MT_WFDMA0_GLO_CFG_OMIT_RX_INFO_PFET2);
    187
    188	if (is_mt7915(mdev))
    189		mt76_clear(dev, MT_WFDMA1_GLO_CFG,
    190			   MT_WFDMA1_GLO_CFG_TX_DMA_EN |
    191			   MT_WFDMA1_GLO_CFG_RX_DMA_EN |
    192			   MT_WFDMA1_GLO_CFG_OMIT_TX_INFO |
    193			   MT_WFDMA1_GLO_CFG_OMIT_RX_INFO |
    194			   MT_WFDMA1_GLO_CFG_OMIT_RX_INFO_PFET2);
    195
    196	if (dev->hif2) {
    197		mt76_clear(dev, MT_WFDMA0_GLO_CFG + hif1_ofs,
    198			   MT_WFDMA0_GLO_CFG_TX_DMA_EN |
    199			   MT_WFDMA0_GLO_CFG_RX_DMA_EN |
    200			   MT_WFDMA0_GLO_CFG_OMIT_TX_INFO |
    201			   MT_WFDMA0_GLO_CFG_OMIT_RX_INFO |
    202			   MT_WFDMA0_GLO_CFG_OMIT_RX_INFO_PFET2);
    203
    204		if (is_mt7915(mdev))
    205			mt76_clear(dev, MT_WFDMA1_GLO_CFG + hif1_ofs,
    206				   MT_WFDMA1_GLO_CFG_TX_DMA_EN |
    207				   MT_WFDMA1_GLO_CFG_RX_DMA_EN |
    208				   MT_WFDMA1_GLO_CFG_OMIT_TX_INFO |
    209				   MT_WFDMA1_GLO_CFG_OMIT_RX_INFO |
    210				   MT_WFDMA1_GLO_CFG_OMIT_RX_INFO_PFET2);
    211	}
    212}
    213
    214static int mt7915_dma_enable(struct mt7915_dev *dev)
    215{
    216	struct mt76_dev *mdev = &dev->mt76;
    217	u32 hif1_ofs = 0;
    218	u32 irq_mask;
    219
    220	if (dev->hif2)
    221		hif1_ofs = MT_WFDMA0_PCIE1(0) - MT_WFDMA0(0);
    222
    223	/* reset dma idx */
    224	mt76_wr(dev, MT_WFDMA0_RST_DTX_PTR, ~0);
    225	if (is_mt7915(mdev))
    226		mt76_wr(dev, MT_WFDMA1_RST_DTX_PTR, ~0);
    227	if (dev->hif2) {
    228		mt76_wr(dev, MT_WFDMA0_RST_DTX_PTR + hif1_ofs, ~0);
    229		if (is_mt7915(mdev))
    230			mt76_wr(dev, MT_WFDMA1_RST_DTX_PTR + hif1_ofs, ~0);
    231	}
    232
    233	/* configure delay interrupt off */
    234	mt76_wr(dev, MT_WFDMA0_PRI_DLY_INT_CFG0, 0);
    235	if (is_mt7915(mdev)) {
    236		mt76_wr(dev, MT_WFDMA1_PRI_DLY_INT_CFG0, 0);
    237	} else {
    238		mt76_wr(dev, MT_WFDMA0_PRI_DLY_INT_CFG1, 0);
    239		mt76_wr(dev, MT_WFDMA0_PRI_DLY_INT_CFG2, 0);
    240	}
    241
    242	if (dev->hif2) {
    243		mt76_wr(dev, MT_WFDMA0_PRI_DLY_INT_CFG0 + hif1_ofs, 0);
    244		if (is_mt7915(mdev)) {
    245			mt76_wr(dev, MT_WFDMA1_PRI_DLY_INT_CFG0 +
    246				hif1_ofs, 0);
    247		} else {
    248			mt76_wr(dev, MT_WFDMA0_PRI_DLY_INT_CFG1 +
    249				hif1_ofs, 0);
    250			mt76_wr(dev, MT_WFDMA0_PRI_DLY_INT_CFG2 +
    251				hif1_ofs, 0);
    252		}
    253	}
    254
    255	/* configure perfetch settings */
    256	mt7915_dma_prefetch(dev);
    257
    258	/* hif wait WFDMA idle */
    259	mt76_set(dev, MT_WFDMA0_BUSY_ENA,
    260		 MT_WFDMA0_BUSY_ENA_TX_FIFO0 |
    261		 MT_WFDMA0_BUSY_ENA_TX_FIFO1 |
    262		 MT_WFDMA0_BUSY_ENA_RX_FIFO);
    263
    264	if (is_mt7915(mdev))
    265		mt76_set(dev, MT_WFDMA1_BUSY_ENA,
    266			 MT_WFDMA1_BUSY_ENA_TX_FIFO0 |
    267			 MT_WFDMA1_BUSY_ENA_TX_FIFO1 |
    268			 MT_WFDMA1_BUSY_ENA_RX_FIFO);
    269
    270	if (dev->hif2) {
    271		mt76_set(dev, MT_WFDMA0_BUSY_ENA + hif1_ofs,
    272			 MT_WFDMA0_PCIE1_BUSY_ENA_TX_FIFO0 |
    273			 MT_WFDMA0_PCIE1_BUSY_ENA_TX_FIFO1 |
    274			 MT_WFDMA0_PCIE1_BUSY_ENA_RX_FIFO);
    275
    276		if (is_mt7915(mdev))
    277			mt76_set(dev, MT_WFDMA1_BUSY_ENA + hif1_ofs,
    278				 MT_WFDMA1_PCIE1_BUSY_ENA_TX_FIFO0 |
    279				 MT_WFDMA1_PCIE1_BUSY_ENA_TX_FIFO1 |
    280				 MT_WFDMA1_PCIE1_BUSY_ENA_RX_FIFO);
    281	}
    282
    283	mt76_poll(dev, MT_WFDMA_EXT_CSR_HIF_MISC,
    284		  MT_WFDMA_EXT_CSR_HIF_MISC_BUSY, 0, 1000);
    285
    286	/* set WFDMA Tx/Rx */
    287	mt76_set(dev, MT_WFDMA0_GLO_CFG,
    288		 MT_WFDMA0_GLO_CFG_TX_DMA_EN |
    289		 MT_WFDMA0_GLO_CFG_RX_DMA_EN |
    290		 MT_WFDMA0_GLO_CFG_OMIT_TX_INFO |
    291		 MT_WFDMA0_GLO_CFG_OMIT_RX_INFO_PFET2);
    292
    293	if (is_mt7915(mdev))
    294		mt76_set(dev, MT_WFDMA1_GLO_CFG,
    295			 MT_WFDMA1_GLO_CFG_TX_DMA_EN |
    296			 MT_WFDMA1_GLO_CFG_RX_DMA_EN |
    297			 MT_WFDMA1_GLO_CFG_OMIT_TX_INFO |
    298			 MT_WFDMA1_GLO_CFG_OMIT_RX_INFO);
    299
    300	if (dev->hif2) {
    301		mt76_set(dev, MT_WFDMA0_GLO_CFG + hif1_ofs,
    302			 MT_WFDMA0_GLO_CFG_TX_DMA_EN |
    303			 MT_WFDMA0_GLO_CFG_RX_DMA_EN |
    304			 MT_WFDMA0_GLO_CFG_OMIT_TX_INFO |
    305			 MT_WFDMA0_GLO_CFG_OMIT_RX_INFO_PFET2);
    306
    307		if (is_mt7915(mdev))
    308			mt76_set(dev, MT_WFDMA1_GLO_CFG + hif1_ofs,
    309				 MT_WFDMA1_GLO_CFG_TX_DMA_EN |
    310				 MT_WFDMA1_GLO_CFG_RX_DMA_EN |
    311				 MT_WFDMA1_GLO_CFG_OMIT_TX_INFO |
    312				 MT_WFDMA1_GLO_CFG_OMIT_RX_INFO);
    313
    314		mt76_set(dev, MT_WFDMA_HOST_CONFIG,
    315			 MT_WFDMA_HOST_CONFIG_PDMA_BAND);
    316	}
    317
    318	/* enable interrupts for TX/RX rings */
    319	irq_mask = MT_INT_RX_DONE_MCU |
    320		   MT_INT_TX_DONE_MCU |
    321		   MT_INT_MCU_CMD;
    322
    323	if (!dev->phy.band_idx)
    324		irq_mask |= MT_INT_BAND0_RX_DONE;
    325
    326	if (dev->dbdc_support || dev->phy.band_idx)
    327		irq_mask |= MT_INT_BAND1_RX_DONE;
    328
    329	if (mtk_wed_device_active(&dev->mt76.mmio.wed)) {
    330		u32 wed_irq_mask = irq_mask;
    331
    332		wed_irq_mask |= MT_INT_TX_DONE_BAND0 | MT_INT_TX_DONE_BAND1;
    333		mt76_wr(dev, MT_INT_WED_MASK_CSR, wed_irq_mask);
    334		mtk_wed_device_start(&dev->mt76.mmio.wed, wed_irq_mask);
    335	}
    336
    337	mt7915_irq_enable(dev, irq_mask);
    338
    339	return 0;
    340}
    341
    342int mt7915_dma_init(struct mt7915_dev *dev, struct mt7915_phy *phy2)
    343{
    344	struct mt76_dev *mdev = &dev->mt76;
    345	u32 wa_rx_base, wa_rx_idx;
    346	u32 hif1_ofs = 0;
    347	int ret;
    348
    349	mt7915_dma_config(dev);
    350
    351	mt76_dma_attach(&dev->mt76);
    352
    353	if (dev->hif2)
    354		hif1_ofs = MT_WFDMA0_PCIE1(0) - MT_WFDMA0(0);
    355
    356	mt7915_dma_disable(dev, true);
    357
    358	if (mtk_wed_device_active(&dev->mt76.mmio.wed)) {
    359		mt76_set(dev, MT_WFDMA_HOST_CONFIG, MT_WFDMA_HOST_CONFIG_WED);
    360
    361		mt76_wr(dev, MT_WFDMA_WED_RING_CONTROL,
    362			FIELD_PREP(MT_WFDMA_WED_RING_CONTROL_TX0, 18) |
    363			FIELD_PREP(MT_WFDMA_WED_RING_CONTROL_TX1, 19) |
    364			FIELD_PREP(MT_WFDMA_WED_RING_CONTROL_RX1, 1));
    365	} else {
    366		mt76_clear(dev, MT_WFDMA_HOST_CONFIG, MT_WFDMA_HOST_CONFIG_WED);
    367	}
    368
    369	/* init tx queue */
    370	ret = mt7915_init_tx_queues(&dev->phy,
    371				    MT_TXQ_ID(dev->phy.band_idx),
    372				    MT7915_TX_RING_SIZE,
    373				    MT_TXQ_RING_BASE(0));
    374	if (ret)
    375		return ret;
    376
    377	if (phy2) {
    378		ret = mt7915_init_tx_queues(phy2,
    379					    MT_TXQ_ID(phy2->band_idx),
    380					    MT7915_TX_RING_SIZE,
    381					    MT_TXQ_RING_BASE(1));
    382		if (ret)
    383			return ret;
    384	}
    385
    386	/* command to WM */
    387	ret = mt76_init_mcu_queue(&dev->mt76, MT_MCUQ_WM,
    388				  MT_MCUQ_ID(MT_MCUQ_WM),
    389				  MT7915_TX_MCU_RING_SIZE,
    390				  MT_MCUQ_RING_BASE(MT_MCUQ_WM));
    391	if (ret)
    392		return ret;
    393
    394	/* command to WA */
    395	ret = mt76_init_mcu_queue(&dev->mt76, MT_MCUQ_WA,
    396				  MT_MCUQ_ID(MT_MCUQ_WA),
    397				  MT7915_TX_MCU_RING_SIZE,
    398				  MT_MCUQ_RING_BASE(MT_MCUQ_WA));
    399	if (ret)
    400		return ret;
    401
    402	/* firmware download */
    403	ret = mt76_init_mcu_queue(&dev->mt76, MT_MCUQ_FWDL,
    404				  MT_MCUQ_ID(MT_MCUQ_FWDL),
    405				  MT7915_TX_FWDL_RING_SIZE,
    406				  MT_MCUQ_RING_BASE(MT_MCUQ_FWDL));
    407	if (ret)
    408		return ret;
    409
    410	/* event from WM */
    411	ret = mt76_queue_alloc(dev, &dev->mt76.q_rx[MT_RXQ_MCU],
    412			       MT_RXQ_ID(MT_RXQ_MCU),
    413			       MT7915_RX_MCU_RING_SIZE,
    414			       MT_RX_BUF_SIZE,
    415			       MT_RXQ_RING_BASE(MT_RXQ_MCU));
    416	if (ret)
    417		return ret;
    418
    419	/* event from WA */
    420	if (mtk_wed_device_active(&dev->mt76.mmio.wed)) {
    421		wa_rx_base = MT_WED_RX_RING_BASE;
    422		wa_rx_idx = MT7915_RXQ_MCU_WA;
    423		dev->mt76.q_rx[MT_RXQ_MCU_WA].flags = MT_WED_Q_TXFREE;
    424	} else {
    425		wa_rx_base = MT_RXQ_RING_BASE(MT_RXQ_MCU_WA);
    426		wa_rx_idx = MT_RXQ_ID(MT_RXQ_MCU_WA);
    427	}
    428	ret = mt76_queue_alloc(dev, &dev->mt76.q_rx[MT_RXQ_MCU_WA],
    429			       wa_rx_idx, MT7915_RX_MCU_RING_SIZE,
    430			       MT_RX_BUF_SIZE, wa_rx_base);
    431	if (ret)
    432		return ret;
    433
    434	/* rx data queue for band0 */
    435	if (!dev->phy.band_idx) {
    436		ret = mt76_queue_alloc(dev, &dev->mt76.q_rx[MT_RXQ_MAIN],
    437				       MT_RXQ_ID(MT_RXQ_MAIN),
    438				       MT7915_RX_RING_SIZE,
    439				       MT_RX_BUF_SIZE,
    440				       MT_RXQ_RING_BASE(MT_RXQ_MAIN));
    441		if (ret)
    442			return ret;
    443	}
    444
    445	/* tx free notify event from WA for band0 */
    446	if (!is_mt7915(mdev)) {
    447		ret = mt76_queue_alloc(dev, &dev->mt76.q_rx[MT_RXQ_MAIN_WA],
    448				       MT_RXQ_ID(MT_RXQ_MAIN_WA),
    449				       MT7915_RX_MCU_RING_SIZE,
    450				       MT_RX_BUF_SIZE,
    451				       MT_RXQ_RING_BASE(MT_RXQ_MAIN_WA));
    452		if (ret)
    453			return ret;
    454	}
    455
    456	if (dev->dbdc_support || dev->phy.band_idx) {
    457		/* rx data queue for band1 */
    458		ret = mt76_queue_alloc(dev, &dev->mt76.q_rx[MT_RXQ_EXT],
    459				       MT_RXQ_ID(MT_RXQ_EXT),
    460				       MT7915_RX_RING_SIZE,
    461				       MT_RX_BUF_SIZE,
    462				       MT_RXQ_RING_BASE(MT_RXQ_EXT) + hif1_ofs);
    463		if (ret)
    464			return ret;
    465
    466		/* tx free notify event from WA for band1 */
    467		ret = mt76_queue_alloc(dev, &dev->mt76.q_rx[MT_RXQ_EXT_WA],
    468				       MT_RXQ_ID(MT_RXQ_EXT_WA),
    469				       MT7915_RX_MCU_RING_SIZE,
    470				       MT_RX_BUF_SIZE,
    471				       MT_RXQ_RING_BASE(MT_RXQ_EXT_WA) + hif1_ofs);
    472		if (ret)
    473			return ret;
    474	}
    475
    476	ret = mt76_init_queues(dev, mt76_dma_rx_poll);
    477	if (ret < 0)
    478		return ret;
    479
    480	netif_napi_add_tx(&dev->mt76.tx_napi_dev, &dev->mt76.tx_napi,
    481			  mt7915_poll_tx);
    482	napi_enable(&dev->mt76.tx_napi);
    483
    484	mt7915_dma_enable(dev);
    485
    486	return 0;
    487}
    488
    489void mt7915_dma_cleanup(struct mt7915_dev *dev)
    490{
    491	mt7915_dma_disable(dev, true);
    492
    493	mt76_dma_cleanup(&dev->mt76);
    494}