Path: blob/main/sys/contrib/dev/mediatek/mt76/dma.h
48378 views
/* SPDX-License-Identifier: ISC */1/*2* Copyright (C) 2016 Felix Fietkau <[email protected]>3*/4#ifndef __MT76_DMA_H5#define __MT76_DMA_H67#define DMA_DUMMY_DATA ((void *)~0)89#define MT_RING_SIZE 0x101011#define MT_DMA_CTL_SD_LEN1 GENMASK(13, 0)12#define MT_DMA_CTL_LAST_SEC1 BIT(14)13#define MT_DMA_CTL_BURST BIT(15)14#define MT_DMA_CTL_SD_LEN0 GENMASK(29, 16)15#define MT_DMA_CTL_LAST_SEC0 BIT(30)16#define MT_DMA_CTL_DMA_DONE BIT(31)17#define MT_DMA_CTL_TO_HOST BIT(8)18#define MT_DMA_CTL_TO_HOST_A BIT(12)19#define MT_DMA_CTL_DROP BIT(14)20#define MT_DMA_CTL_TOKEN GENMASK(31, 16)21#define MT_DMA_CTL_SDP1_H GENMASK(19, 16)22#define MT_DMA_CTL_SDP0_H GENMASK(3, 0)23#define MT_DMA_CTL_WO_DROP BIT(8)2425#define MT_DMA_PPE_CPU_REASON GENMASK(15, 11)26#define MT_DMA_PPE_ENTRY GENMASK(30, 16)27#define MT_DMA_INFO_DMA_FRAG BIT(9)28#define MT_DMA_INFO_PPE_VLD BIT(31)2930#define MT_DMA_CTL_PN_CHK_FAIL BIT(13)31#define MT_DMA_CTL_VER_MASK BIT(7)3233#define MT_DMA_RRO_EN BIT(13)3435#define MT_DMA_WED_IND_CMD_CNT 836#define MT_DMA_WED_IND_REASON GENMASK(15, 12)3738#define MT_DMA_HDR_LEN 439#define MT_RX_INFO_LEN 440#define MT_FCE_INFO_LEN 441#define MT_RX_RXWI_LEN 324243struct mt76_desc {44__le32 buf0;45__le32 ctrl;46__le32 buf1;47__le32 info;48} __packed __aligned(4);4950struct mt76_wed_rro_desc {51__le32 buf0;52__le32 buf1;53} __packed __aligned(4);5455enum mt76_qsel {56MT_QSEL_MGMT,57MT_QSEL_HCCA,58MT_QSEL_EDCA,59MT_QSEL_EDCA_2,60};6162enum mt76_mcu_evt_type {63EVT_CMD_DONE,64EVT_CMD_ERROR,65EVT_CMD_RETRY,66EVT_EVENT_PWR_RSP,67EVT_EVENT_WOW_RSP,68EVT_EVENT_CARRIER_DETECT_RSP,69EVT_EVENT_DFS_DETECT_RSP,70};7172enum mt76_dma_wed_ind_reason {73MT_DMA_WED_IND_REASON_NORMAL,74MT_DMA_WED_IND_REASON_REPEAT,75MT_DMA_WED_IND_REASON_OLDPKT,76};7778int mt76_dma_rx_poll(struct napi_struct *napi, int budget);79void mt76_dma_attach(struct mt76_dev *dev);80void mt76_dma_cleanup(struct mt76_dev *dev);81int mt76_dma_rx_fill(struct mt76_dev *dev, struct mt76_queue *q,82bool allow_direct);83void __mt76_dma_queue_reset(struct mt76_dev *dev, struct mt76_queue *q,84bool reset_idx);85void mt76_dma_queue_reset(struct mt76_dev *dev, struct mt76_queue *q);8687static inline void88mt76_dma_reset_tx_queue(struct mt76_dev *dev, struct mt76_queue *q)89{90dev->queue_ops->reset_q(dev, q);91if (mtk_wed_device_active(&dev->mmio.wed))92mt76_wed_dma_setup(dev, q, true);93}9495static inline void96mt76_dma_should_drop_buf(bool *drop, u32 ctrl, u32 buf1, u32 info)97{98if (!drop)99return;100101*drop = !!(ctrl & (MT_DMA_CTL_TO_HOST_A | MT_DMA_CTL_DROP));102if (!(ctrl & MT_DMA_CTL_VER_MASK))103return;104105switch (FIELD_GET(MT_DMA_WED_IND_REASON, buf1)) {106case MT_DMA_WED_IND_REASON_REPEAT:107*drop = true;108break;109case MT_DMA_WED_IND_REASON_OLDPKT:110*drop = !(info & MT_DMA_INFO_DMA_FRAG);111break;112default:113*drop = !!(ctrl & MT_DMA_CTL_PN_CHK_FAIL);114break;115}116}117118static inline void *mt76_priv(struct net_device *dev)119{120struct mt76_dev **priv;121122priv = netdev_priv(dev);123124return *priv;125}126127#endif128129130