1 // SPDX-License-Identifier: ISC
2 /* Copyright (C) 2020 MediaTek Inc. */
6 #include "../mt76_connac2_mac.h"
8 static int mt7921_poll_tx(struct napi_struct *napi, int budget)
10 struct mt7921_dev *dev;
12 dev = container_of(napi, struct mt7921_dev, mt76.tx_napi);
14 if (!mt76_connac_pm_ref(&dev->mphy, &dev->pm)) {
16 queue_work(dev->mt76.wq, &dev->pm.wake_work);
20 mt76_connac_tx_cleanup(&dev->mt76);
21 if (napi_complete(napi))
22 mt76_connac_irq_enable(&dev->mt76, MT_INT_TX_DONE_ALL);
23 mt76_connac_pm_unref(&dev->mphy, &dev->pm);
28 static int mt7921_poll_rx(struct napi_struct *napi, int budget)
30 struct mt7921_dev *dev;
33 dev = container_of(napi->dev, struct mt7921_dev, mt76.napi_dev);
35 if (!mt76_connac_pm_ref(&dev->mphy, &dev->pm)) {
37 queue_work(dev->mt76.wq, &dev->pm.wake_work);
40 done = mt76_dma_rx_poll(napi, budget);
41 mt76_connac_pm_unref(&dev->mphy, &dev->pm);
46 static void mt7921_dma_prefetch(struct mt7921_dev *dev)
48 #define PREFETCH(base, depth) ((base) << 16 | (depth))
50 mt76_wr(dev, MT_WFDMA0_RX_RING0_EXT_CTRL, PREFETCH(0x0, 0x4));
51 mt76_wr(dev, MT_WFDMA0_RX_RING2_EXT_CTRL, PREFETCH(0x40, 0x4));
52 mt76_wr(dev, MT_WFDMA0_RX_RING3_EXT_CTRL, PREFETCH(0x80, 0x4));
53 mt76_wr(dev, MT_WFDMA0_RX_RING4_EXT_CTRL, PREFETCH(0xc0, 0x4));
54 mt76_wr(dev, MT_WFDMA0_RX_RING5_EXT_CTRL, PREFETCH(0x100, 0x4));
56 mt76_wr(dev, MT_WFDMA0_TX_RING0_EXT_CTRL, PREFETCH(0x140, 0x4));
57 mt76_wr(dev, MT_WFDMA0_TX_RING1_EXT_CTRL, PREFETCH(0x180, 0x4));
58 mt76_wr(dev, MT_WFDMA0_TX_RING2_EXT_CTRL, PREFETCH(0x1c0, 0x4));
59 mt76_wr(dev, MT_WFDMA0_TX_RING3_EXT_CTRL, PREFETCH(0x200, 0x4));
60 mt76_wr(dev, MT_WFDMA0_TX_RING4_EXT_CTRL, PREFETCH(0x240, 0x4));
61 mt76_wr(dev, MT_WFDMA0_TX_RING5_EXT_CTRL, PREFETCH(0x280, 0x4));
62 mt76_wr(dev, MT_WFDMA0_TX_RING6_EXT_CTRL, PREFETCH(0x2c0, 0x4));
63 mt76_wr(dev, MT_WFDMA0_TX_RING16_EXT_CTRL, PREFETCH(0x340, 0x4));
64 mt76_wr(dev, MT_WFDMA0_TX_RING17_EXT_CTRL, PREFETCH(0x380, 0x4));
67 static int mt7921_dma_disable(struct mt7921_dev *dev, bool force)
70 mt76_clear(dev, MT_WFDMA0_GLO_CFG,
71 MT_WFDMA0_GLO_CFG_TX_DMA_EN | MT_WFDMA0_GLO_CFG_RX_DMA_EN |
72 MT_WFDMA0_GLO_CFG_CSR_DISP_BASE_PTR_CHAIN_EN |
73 MT_WFDMA0_GLO_CFG_OMIT_TX_INFO |
74 MT_WFDMA0_GLO_CFG_OMIT_RX_INFO |
75 MT_WFDMA0_GLO_CFG_OMIT_RX_INFO_PFET2);
77 if (!mt76_poll_msec_tick(dev, MT_WFDMA0_GLO_CFG,
78 MT_WFDMA0_GLO_CFG_TX_DMA_BUSY |
79 MT_WFDMA0_GLO_CFG_RX_DMA_BUSY, 0, 100, 1))
83 mt76_clear(dev, MT_WFDMA0_GLO_CFG_EXT0,
84 MT_WFDMA0_CSR_TX_DMASHDL_ENABLE);
85 mt76_set(dev, MT_DMASHDL_SW_CONTROL, MT_DMASHDL_DMASHDL_BYPASS);
89 mt76_clear(dev, MT_WFDMA0_RST,
90 MT_WFDMA0_RST_DMASHDL_ALL_RST |
91 MT_WFDMA0_RST_LOGIC_RST);
93 mt76_set(dev, MT_WFDMA0_RST,
94 MT_WFDMA0_RST_DMASHDL_ALL_RST |
95 MT_WFDMA0_RST_LOGIC_RST);
101 static int mt7921_dma_enable(struct mt7921_dev *dev)
103 /* configure perfetch settings */
104 mt7921_dma_prefetch(dev);
107 mt76_wr(dev, MT_WFDMA0_RST_DTX_PTR, ~0);
109 /* configure delay interrupt */
110 mt76_wr(dev, MT_WFDMA0_PRI_DLY_INT_CFG0, 0);
112 mt76_set(dev, MT_WFDMA0_GLO_CFG,
113 MT_WFDMA0_GLO_CFG_TX_WB_DDONE |
114 MT_WFDMA0_GLO_CFG_FIFO_LITTLE_ENDIAN |
115 MT_WFDMA0_GLO_CFG_CLK_GAT_DIS |
116 MT_WFDMA0_GLO_CFG_OMIT_TX_INFO |
117 MT_WFDMA0_GLO_CFG_CSR_DISP_BASE_PTR_CHAIN_EN |
118 MT_WFDMA0_GLO_CFG_OMIT_RX_INFO_PFET2);
120 mt76_set(dev, MT_WFDMA0_GLO_CFG,
121 MT_WFDMA0_GLO_CFG_TX_DMA_EN | MT_WFDMA0_GLO_CFG_RX_DMA_EN);
123 mt76_set(dev, MT_WFDMA_DUMMY_CR, MT_WFDMA_NEED_REINIT);
125 /* enable interrupts for TX/RX rings */
126 mt76_connac_irq_enable(&dev->mt76,
127 MT_INT_RX_DONE_ALL | MT_INT_TX_DONE_ALL |
129 mt76_set(dev, MT_MCU2HOST_SW_INT_ENA, MT_MCU_CMD_WAKE_RX_PCIE);
134 static int mt7921_dma_reset(struct mt7921_dev *dev, bool force)
138 err = mt7921_dma_disable(dev, force);
142 /* reset hw queues */
143 for (i = 0; i < __MT_TXQ_MAX; i++)
144 mt76_queue_reset(dev, dev->mphy.q_tx[i]);
146 for (i = 0; i < __MT_MCUQ_MAX; i++)
147 mt76_queue_reset(dev, dev->mt76.q_mcu[i]);
149 mt76_for_each_q_rx(&dev->mt76, i)
150 mt76_queue_reset(dev, &dev->mt76.q_rx[i]);
152 mt76_tx_status_check(&dev->mt76, true);
154 return mt7921_dma_enable(dev);
157 int mt7921_wfsys_reset(struct mt7921_dev *dev)
159 mt76_clear(dev, MT_WFSYS_SW_RST_B, WFSYS_SW_RST_B);
161 mt76_set(dev, MT_WFSYS_SW_RST_B, WFSYS_SW_RST_B);
163 if (!__mt76_poll_msec(&dev->mt76, MT_WFSYS_SW_RST_B,
164 WFSYS_SW_INIT_DONE, WFSYS_SW_INIT_DONE, 500))
170 int mt7921_wpdma_reset(struct mt7921_dev *dev, bool force)
174 /* clean up hw queues */
175 for (i = 0; i < ARRAY_SIZE(dev->mt76.phy.q_tx); i++)
176 mt76_queue_tx_cleanup(dev, dev->mphy.q_tx[i], true);
178 for (i = 0; i < ARRAY_SIZE(dev->mt76.q_mcu); i++)
179 mt76_queue_tx_cleanup(dev, dev->mt76.q_mcu[i], true);
181 mt76_for_each_q_rx(&dev->mt76, i)
182 mt76_queue_rx_cleanup(dev, &dev->mt76.q_rx[i]);
185 err = mt7921_wfsys_reset(dev);
189 err = mt7921_dma_reset(dev, force);
193 mt76_for_each_q_rx(&dev->mt76, i)
194 mt76_queue_rx_reset(dev, i);
199 int mt7921_wpdma_reinit_cond(struct mt7921_dev *dev)
201 struct mt76_connac_pm *pm = &dev->pm;
204 /* check if the wpdma must be reinitialized */
205 if (mt7921_dma_need_reinit(dev)) {
206 /* disable interrutpts */
207 mt76_wr(dev, MT_WFDMA0_HOST_INT_ENA, 0);
208 mt76_wr(dev, MT_PCIE_MAC_INT_ENABLE, 0x0);
210 err = mt7921_wpdma_reset(dev, false);
212 dev_err(dev->mt76.dev, "wpdma reset failed\n");
216 /* enable interrutpts */
217 mt76_wr(dev, MT_PCIE_MAC_INT_ENABLE, 0xff);
224 int mt7921_dma_init(struct mt7921_dev *dev)
228 mt76_dma_attach(&dev->mt76);
230 ret = mt7921_dma_disable(dev, true);
234 ret = mt7921_wfsys_reset(dev);
239 ret = mt76_connac_init_tx_queues(dev->phy.mt76, MT7921_TXQ_BAND0,
245 mt76_wr(dev, MT_WFDMA0_TX_RING0_EXT_CTRL, 0x4);
248 ret = mt76_init_mcu_queue(&dev->mt76, MT_MCUQ_WM, MT7921_TXQ_MCU_WM,
249 MT7921_TX_MCU_RING_SIZE, MT_TX_RING_BASE);
253 /* firmware download */
254 ret = mt76_init_mcu_queue(&dev->mt76, MT_MCUQ_FWDL, MT7921_TXQ_FWDL,
255 MT7921_TX_FWDL_RING_SIZE, MT_TX_RING_BASE);
259 /* event from WM before firmware download */
260 ret = mt76_queue_alloc(dev, &dev->mt76.q_rx[MT_RXQ_MCU],
262 MT7921_RX_MCU_RING_SIZE,
263 MT_RX_BUF_SIZE, MT_RX_EVENT_RING_BASE);
267 /* Change mcu queue after firmware download */
268 ret = mt76_queue_alloc(dev, &dev->mt76.q_rx[MT_RXQ_MCU_WA],
270 MT7921_RX_MCU_RING_SIZE,
271 MT_RX_BUF_SIZE, MT_WFDMA0(0x540));
276 ret = mt76_queue_alloc(dev, &dev->mt76.q_rx[MT_RXQ_MAIN],
277 MT7921_RXQ_BAND0, MT7921_RX_RING_SIZE,
278 MT_RX_BUF_SIZE, MT_RX_DATA_RING_BASE);
282 ret = mt76_init_queues(dev, mt7921_poll_rx);
286 netif_napi_add_tx(&dev->mt76.tx_napi_dev, &dev->mt76.tx_napi,
288 napi_enable(&dev->mt76.tx_napi);
290 return mt7921_dma_enable(dev);
293 void mt7921_dma_cleanup(struct mt7921_dev *dev)
296 mt76_clear(dev, MT_WFDMA0_GLO_CFG,
297 MT_WFDMA0_GLO_CFG_TX_DMA_EN |
298 MT_WFDMA0_GLO_CFG_RX_DMA_EN |
299 MT_WFDMA0_GLO_CFG_CSR_DISP_BASE_PTR_CHAIN_EN |
300 MT_WFDMA0_GLO_CFG_OMIT_TX_INFO |
301 MT_WFDMA0_GLO_CFG_OMIT_RX_INFO |
302 MT_WFDMA0_GLO_CFG_OMIT_RX_INFO_PFET2);
304 mt76_poll_msec_tick(dev, MT_WFDMA0_GLO_CFG,
305 MT_WFDMA0_GLO_CFG_TX_DMA_BUSY |
306 MT_WFDMA0_GLO_CFG_RX_DMA_BUSY, 0, 100, 1);
309 mt76_clear(dev, MT_WFDMA0_RST,
310 MT_WFDMA0_RST_DMASHDL_ALL_RST |
311 MT_WFDMA0_RST_LOGIC_RST);
313 mt76_set(dev, MT_WFDMA0_RST,
314 MT_WFDMA0_RST_DMASHDL_ALL_RST |
315 MT_WFDMA0_RST_LOGIC_RST);
317 mt76_dma_cleanup(&dev->mt76);