Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
freebsd
GitHub Repository: freebsd/freebsd-src
Path: blob/main/sys/contrib/dev/mediatek/mt76/mt76_connac_mac.c
48375 views
1
// SPDX-License-Identifier: ISC
2
/* Copyright (C) 2020 MediaTek Inc. */
3
4
#include "mt76_connac.h"
5
#include "mt76_connac2_mac.h"
6
#include "dma.h"
7
8
#define HE_BITS(f) cpu_to_le16(IEEE80211_RADIOTAP_HE_##f)
9
#define HE_PREP(f, m, v) le16_encode_bits(le32_get_bits(v, MT_CRXV_HE_##m),\
10
IEEE80211_RADIOTAP_HE_##f)
11
12
void mt76_connac_gen_ppe_thresh(u8 *he_ppet, int nss, enum nl80211_band band)
13
{
14
static const u8 ppet16_ppet8_ru3_ru0[] = { 0x1c, 0xc7, 0x71 };
15
u8 i, ppet_bits, ppet_size, ru_bit_mask = 0xf;
16
17
if (band == NL80211_BAND_2GHZ)
18
ru_bit_mask = 0x3;
19
20
he_ppet[0] = FIELD_PREP(IEEE80211_PPE_THRES_NSS_MASK, nss - 1) |
21
FIELD_PREP(IEEE80211_PPE_THRES_RU_INDEX_BITMASK_MASK,
22
ru_bit_mask);
23
24
ppet_bits = IEEE80211_PPE_THRES_INFO_PPET_SIZE *
25
nss * hweight8(ru_bit_mask) * 2;
26
ppet_size = DIV_ROUND_UP(ppet_bits, 8);
27
28
for (i = 0; i < ppet_size - 1; i++)
29
he_ppet[i + 1] = ppet16_ppet8_ru3_ru0[i % 3];
30
31
he_ppet[i + 1] = ppet16_ppet8_ru3_ru0[i % 3] &
32
(0xff >> (8 - (ppet_bits - 1) % 8));
33
}
34
EXPORT_SYMBOL_GPL(mt76_connac_gen_ppe_thresh);
35
36
int mt76_connac_pm_wake(struct mt76_phy *phy, struct mt76_connac_pm *pm)
37
{
38
struct mt76_dev *dev = phy->dev;
39
40
if (mt76_is_usb(dev))
41
return 0;
42
43
cancel_delayed_work_sync(&pm->ps_work);
44
if (!test_bit(MT76_STATE_PM, &phy->state))
45
return 0;
46
47
if (pm->suspended)
48
return 0;
49
50
queue_work(dev->wq, &pm->wake_work);
51
if (!wait_event_timeout(pm->wait,
52
!test_bit(MT76_STATE_PM, &phy->state),
53
3 * HZ)) {
54
ieee80211_wake_queues(phy->hw);
55
return -ETIMEDOUT;
56
}
57
58
return 0;
59
}
60
EXPORT_SYMBOL_GPL(mt76_connac_pm_wake);
61
62
void mt76_connac_power_save_sched(struct mt76_phy *phy,
63
struct mt76_connac_pm *pm)
64
{
65
struct mt76_dev *dev = phy->dev;
66
67
if (mt76_is_usb(dev))
68
return;
69
70
if (!pm->enable)
71
return;
72
73
if (pm->suspended)
74
return;
75
76
pm->last_activity = jiffies;
77
78
if (!test_bit(MT76_STATE_PM, &phy->state)) {
79
cancel_delayed_work(&phy->mac_work);
80
queue_delayed_work(dev->wq, &pm->ps_work, pm->idle_timeout);
81
}
82
}
83
EXPORT_SYMBOL_GPL(mt76_connac_power_save_sched);
84
85
void mt76_connac_free_pending_tx_skbs(struct mt76_connac_pm *pm,
86
struct mt76_wcid *wcid)
87
{
88
int i;
89
90
spin_lock_bh(&pm->txq_lock);
91
for (i = 0; i < IEEE80211_NUM_ACS; i++) {
92
if (wcid && pm->tx_q[i].wcid != wcid)
93
continue;
94
95
dev_kfree_skb(pm->tx_q[i].skb);
96
pm->tx_q[i].skb = NULL;
97
}
98
spin_unlock_bh(&pm->txq_lock);
99
}
100
EXPORT_SYMBOL_GPL(mt76_connac_free_pending_tx_skbs);
101
102
void mt76_connac_pm_queue_skb(struct ieee80211_hw *hw,
103
struct mt76_connac_pm *pm,
104
struct mt76_wcid *wcid,
105
struct sk_buff *skb)
106
{
107
int qid = skb_get_queue_mapping(skb);
108
struct mt76_phy *phy = hw->priv;
109
110
spin_lock_bh(&pm->txq_lock);
111
if (!pm->tx_q[qid].skb) {
112
ieee80211_stop_queues(hw);
113
pm->tx_q[qid].wcid = wcid;
114
pm->tx_q[qid].skb = skb;
115
queue_work(phy->dev->wq, &pm->wake_work);
116
} else {
117
dev_kfree_skb(skb);
118
}
119
spin_unlock_bh(&pm->txq_lock);
120
}
121
EXPORT_SYMBOL_GPL(mt76_connac_pm_queue_skb);
122
123
void mt76_connac_pm_dequeue_skbs(struct mt76_phy *phy,
124
struct mt76_connac_pm *pm)
125
{
126
int i;
127
128
spin_lock_bh(&pm->txq_lock);
129
for (i = 0; i < IEEE80211_NUM_ACS; i++) {
130
struct mt76_wcid *wcid = pm->tx_q[i].wcid;
131
struct ieee80211_sta *sta = NULL;
132
133
if (!pm->tx_q[i].skb)
134
continue;
135
136
if (wcid && wcid->sta)
137
sta = container_of((void *)wcid, struct ieee80211_sta,
138
drv_priv);
139
140
mt76_tx(phy, sta, wcid, pm->tx_q[i].skb);
141
pm->tx_q[i].skb = NULL;
142
}
143
spin_unlock_bh(&pm->txq_lock);
144
145
mt76_worker_schedule(&phy->dev->tx_worker);
146
}
147
EXPORT_SYMBOL_GPL(mt76_connac_pm_dequeue_skbs);
148
149
void mt76_connac_tx_complete_skb(struct mt76_dev *mdev,
150
struct mt76_queue_entry *e)
151
{
152
if (!e->txwi) {
153
dev_kfree_skb_any(e->skb);
154
return;
155
}
156
157
if (e->skb)
158
mt76_tx_complete_skb(mdev, e->wcid, e->skb);
159
}
160
EXPORT_SYMBOL_GPL(mt76_connac_tx_complete_skb);
161
162
void mt76_connac_write_hw_txp(struct mt76_dev *dev,
163
struct mt76_tx_info *tx_info,
164
void *txp_ptr, u32 id)
165
{
166
struct mt76_connac_hw_txp *txp = txp_ptr;
167
struct mt76_connac_txp_ptr *ptr = &txp->ptr[0];
168
int i, nbuf = tx_info->nbuf - 1;
169
u32 last_mask;
170
171
tx_info->buf[0].len = MT_TXD_SIZE + sizeof(*txp);
172
tx_info->nbuf = 1;
173
174
txp->msdu_id[0] = cpu_to_le16(id | MT_MSDU_ID_VALID);
175
176
if (is_mt7663(dev) || is_mt7921(dev) || is_mt7925(dev))
177
last_mask = MT_TXD_LEN_LAST;
178
else
179
last_mask = MT_TXD_LEN_AMSDU_LAST |
180
MT_TXD_LEN_MSDU_LAST;
181
182
for (i = 0; i < nbuf; i++) {
183
u16 len = tx_info->buf[i + 1].len & MT_TXD_LEN_MASK;
184
u32 addr = tx_info->buf[i + 1].addr;
185
186
if (i == nbuf - 1)
187
len |= last_mask;
188
189
if (i & 1) {
190
ptr->buf1 = cpu_to_le32(addr);
191
ptr->len1 = cpu_to_le16(len);
192
ptr++;
193
} else {
194
ptr->buf0 = cpu_to_le32(addr);
195
ptr->len0 = cpu_to_le16(len);
196
}
197
}
198
}
199
EXPORT_SYMBOL_GPL(mt76_connac_write_hw_txp);
200
201
static void
202
mt76_connac_txp_skb_unmap_fw(struct mt76_dev *mdev,
203
struct mt76_connac_fw_txp *txp)
204
{
205
struct device *dev = is_connac_v1(mdev) ? mdev->dev : mdev->dma_dev;
206
int i;
207
208
for (i = 0; i < txp->nbuf; i++)
209
dma_unmap_single(dev, le32_to_cpu(txp->buf[i]),
210
le16_to_cpu(txp->len[i]), DMA_TO_DEVICE);
211
}
212
213
static void
214
mt76_connac_txp_skb_unmap_hw(struct mt76_dev *dev,
215
struct mt76_connac_hw_txp *txp)
216
{
217
u32 last_mask;
218
int i;
219
220
if (is_mt7663(dev) || is_mt7921(dev) || is_mt7925(dev))
221
last_mask = MT_TXD_LEN_LAST;
222
else
223
last_mask = MT_TXD_LEN_MSDU_LAST;
224
225
for (i = 0; i < ARRAY_SIZE(txp->ptr); i++) {
226
struct mt76_connac_txp_ptr *ptr = &txp->ptr[i];
227
bool last;
228
u16 len;
229
230
len = le16_to_cpu(ptr->len0);
231
last = len & last_mask;
232
len &= MT_TXD_LEN_MASK;
233
dma_unmap_single(dev->dev, le32_to_cpu(ptr->buf0), len,
234
DMA_TO_DEVICE);
235
if (last)
236
break;
237
238
len = le16_to_cpu(ptr->len1);
239
last = len & last_mask;
240
len &= MT_TXD_LEN_MASK;
241
dma_unmap_single(dev->dev, le32_to_cpu(ptr->buf1), len,
242
DMA_TO_DEVICE);
243
if (last)
244
break;
245
}
246
}
247
248
void mt76_connac_txp_skb_unmap(struct mt76_dev *dev,
249
struct mt76_txwi_cache *t)
250
{
251
struct mt76_connac_txp_common *txp;
252
253
txp = mt76_connac_txwi_to_txp(dev, t);
254
if (is_mt76_fw_txp(dev))
255
mt76_connac_txp_skb_unmap_fw(dev, &txp->fw);
256
else
257
mt76_connac_txp_skb_unmap_hw(dev, &txp->hw);
258
}
259
EXPORT_SYMBOL_GPL(mt76_connac_txp_skb_unmap);
260
261
int mt76_connac_init_tx_queues(struct mt76_phy *phy, int idx, int n_desc,
262
int ring_base, void *wed, u32 flags)
263
{
264
int i, err;
265
266
err = mt76_init_tx_queue(phy, 0, idx, n_desc, ring_base,
267
wed, flags);
268
if (err < 0)
269
return err;
270
271
for (i = 1; i <= MT_TXQ_PSD; i++)
272
phy->q_tx[i] = phy->q_tx[0];
273
274
return 0;
275
}
276
EXPORT_SYMBOL_GPL(mt76_connac_init_tx_queues);
277
278
#define __bitrate_mask_check(_mcs, _mode) \
279
({ \
280
u8 i = 0; \
281
for (nss = 0; i < ARRAY_SIZE(mask->control[band]._mcs); i++) { \
282
if (!mask->control[band]._mcs[i]) \
283
continue; \
284
if (hweight16(mask->control[band]._mcs[i]) == 1) { \
285
mode = MT_PHY_TYPE_##_mode; \
286
rateidx = ffs(mask->control[band]._mcs[i]) - 1; \
287
if (mode == MT_PHY_TYPE_HT) \
288
rateidx += 8 * i; \
289
else \
290
nss = i + 1; \
291
goto out; \
292
} \
293
} \
294
})
295
296
u16 mt76_connac2_mac_tx_rate_val(struct mt76_phy *mphy,
297
struct ieee80211_bss_conf *conf,
298
bool beacon, bool mcast)
299
{
300
struct mt76_vif_link *mvif = mt76_vif_conf_link(mphy->dev, conf->vif, conf);
301
struct cfg80211_chan_def *chandef = mvif->ctx ?
302
&mvif->ctx->def : &mphy->chandef;
303
u8 nss = 0, mode = 0, band = chandef->chan->band;
304
int rateidx = 0, mcast_rate;
305
int offset = 0;
306
307
if (!conf)
308
goto legacy;
309
310
if (is_mt7921(mphy->dev)) {
311
rateidx = ffs(conf->basic_rates) - 1;
312
goto legacy;
313
}
314
315
if (beacon) {
316
struct cfg80211_bitrate_mask *mask;
317
318
mask = &conf->beacon_tx_rate;
319
320
__bitrate_mask_check(he_mcs, HE_SU);
321
__bitrate_mask_check(vht_mcs, VHT);
322
__bitrate_mask_check(ht_mcs, HT);
323
324
if (hweight32(mask->control[band].legacy) == 1) {
325
rateidx = ffs(mask->control[band].legacy) - 1;
326
goto legacy;
327
}
328
}
329
330
mcast_rate = conf->mcast_rate[band];
331
if (mcast && mcast_rate > 0)
332
rateidx = mcast_rate - 1;
333
else
334
rateidx = ffs(conf->basic_rates) - 1;
335
336
legacy:
337
if (band != NL80211_BAND_2GHZ)
338
offset = 4;
339
340
/* pick the lowest rate for hidden nodes */
341
if (rateidx < 0)
342
rateidx = 0;
343
344
rateidx += offset;
345
if (rateidx >= ARRAY_SIZE(mt76_rates))
346
rateidx = offset;
347
348
rateidx = mt76_rates[rateidx].hw_value;
349
mode = rateidx >> 8;
350
rateidx &= GENMASK(7, 0);
351
out:
352
return FIELD_PREP(MT_TX_RATE_NSS, nss) |
353
FIELD_PREP(MT_TX_RATE_IDX, rateidx) |
354
FIELD_PREP(MT_TX_RATE_MODE, mode);
355
}
356
EXPORT_SYMBOL_GPL(mt76_connac2_mac_tx_rate_val);
357
358
static void
359
mt76_connac2_mac_write_txwi_8023(__le32 *txwi, struct sk_buff *skb,
360
struct mt76_wcid *wcid)
361
{
362
u8 tid = skb->priority & IEEE80211_QOS_CTL_TID_MASK;
363
u8 fc_type, fc_stype;
364
u16 ethertype;
365
bool wmm = false;
366
u32 val;
367
368
if (wcid->sta) {
369
struct ieee80211_sta *sta;
370
371
sta = container_of((void *)wcid, struct ieee80211_sta, drv_priv);
372
wmm = sta->wme;
373
}
374
375
val = FIELD_PREP(MT_TXD1_HDR_FORMAT, MT_HDR_FORMAT_802_3) |
376
FIELD_PREP(MT_TXD1_TID, tid);
377
378
ethertype = get_unaligned_be16(&skb->data[12]);
379
if (ethertype >= ETH_P_802_3_MIN)
380
val |= MT_TXD1_ETH_802_3;
381
382
txwi[1] |= cpu_to_le32(val);
383
384
fc_type = IEEE80211_FTYPE_DATA >> 2;
385
fc_stype = wmm ? IEEE80211_STYPE_QOS_DATA >> 4 : 0;
386
387
val = FIELD_PREP(MT_TXD2_FRAME_TYPE, fc_type) |
388
FIELD_PREP(MT_TXD2_SUB_TYPE, fc_stype);
389
390
txwi[2] |= cpu_to_le32(val);
391
392
val = FIELD_PREP(MT_TXD7_TYPE, fc_type) |
393
FIELD_PREP(MT_TXD7_SUB_TYPE, fc_stype);
394
395
txwi[7] |= cpu_to_le32(val);
396
}
397
398
static void
399
mt76_connac2_mac_write_txwi_80211(struct mt76_dev *dev, __le32 *txwi,
400
struct sk_buff *skb,
401
struct ieee80211_key_conf *key)
402
{
403
struct ieee80211_hdr *hdr = (struct ieee80211_hdr *)skb->data;
404
struct ieee80211_mgmt *mgmt = (struct ieee80211_mgmt *)skb->data;
405
struct ieee80211_tx_info *info = IEEE80211_SKB_CB(skb);
406
bool multicast = is_multicast_ether_addr(hdr->addr1);
407
u8 tid = skb->priority & IEEE80211_QOS_CTL_TID_MASK;
408
__le16 fc = hdr->frame_control;
409
__le16 sc = hdr->seq_ctrl;
410
u8 fc_type, fc_stype;
411
u32 val;
412
413
if (ieee80211_is_action(fc) &&
414
mgmt->u.action.category == WLAN_CATEGORY_BACK &&
415
mgmt->u.action.u.addba_req.action_code == WLAN_ACTION_ADDBA_REQ) {
416
u16 capab = le16_to_cpu(mgmt->u.action.u.addba_req.capab);
417
418
txwi[5] |= cpu_to_le32(MT_TXD5_ADD_BA);
419
tid = (capab >> 2) & IEEE80211_QOS_CTL_TID_MASK;
420
} else if (ieee80211_is_back_req(hdr->frame_control)) {
421
struct ieee80211_bar *bar = (struct ieee80211_bar *)hdr;
422
u16 control = le16_to_cpu(bar->control);
423
424
tid = FIELD_GET(IEEE80211_BAR_CTRL_TID_INFO_MASK, control);
425
}
426
427
val = FIELD_PREP(MT_TXD1_HDR_FORMAT, MT_HDR_FORMAT_802_11) |
428
FIELD_PREP(MT_TXD1_HDR_INFO,
429
ieee80211_get_hdrlen_from_skb(skb) / 2) |
430
FIELD_PREP(MT_TXD1_TID, tid);
431
432
txwi[1] |= cpu_to_le32(val);
433
434
fc_type = (le16_to_cpu(fc) & IEEE80211_FCTL_FTYPE) >> 2;
435
fc_stype = (le16_to_cpu(fc) & IEEE80211_FCTL_STYPE) >> 4;
436
437
val = FIELD_PREP(MT_TXD2_FRAME_TYPE, fc_type) |
438
FIELD_PREP(MT_TXD2_SUB_TYPE, fc_stype) |
439
FIELD_PREP(MT_TXD2_MULTICAST, multicast);
440
441
if (key && multicast && ieee80211_is_robust_mgmt_frame(skb) &&
442
key->cipher == WLAN_CIPHER_SUITE_AES_CMAC) {
443
val |= MT_TXD2_BIP;
444
txwi[3] &= ~cpu_to_le32(MT_TXD3_PROTECT_FRAME);
445
}
446
447
if (!ieee80211_is_data(fc) || multicast ||
448
info->flags & IEEE80211_TX_CTL_USE_MINRATE)
449
val |= MT_TXD2_FIX_RATE;
450
451
if (ieee80211_has_morefrags(fc) && ieee80211_is_first_frag(sc))
452
val |= FIELD_PREP(MT_TXD2_FRAG, MT_TX_FRAG_FIRST);
453
else if (ieee80211_has_morefrags(fc) && !ieee80211_is_first_frag(sc))
454
val |= FIELD_PREP(MT_TXD2_FRAG, MT_TX_FRAG_MID);
455
else if (!ieee80211_has_morefrags(fc) && !ieee80211_is_first_frag(sc))
456
val |= FIELD_PREP(MT_TXD2_FRAG, MT_TX_FRAG_LAST);
457
458
txwi[2] |= cpu_to_le32(val);
459
460
if (ieee80211_is_beacon(fc)) {
461
txwi[3] &= ~cpu_to_le32(MT_TXD3_SW_POWER_MGMT);
462
txwi[3] |= cpu_to_le32(MT_TXD3_REM_TX_COUNT);
463
}
464
465
if (info->flags & IEEE80211_TX_CTL_INJECTED) {
466
u16 seqno = le16_to_cpu(sc);
467
468
if (ieee80211_is_back_req(hdr->frame_control)) {
469
struct ieee80211_bar *bar;
470
471
bar = (struct ieee80211_bar *)skb->data;
472
seqno = le16_to_cpu(bar->start_seq_num);
473
}
474
475
val = MT_TXD3_SN_VALID |
476
FIELD_PREP(MT_TXD3_SEQ, IEEE80211_SEQ_TO_SN(seqno));
477
txwi[3] |= cpu_to_le32(val);
478
txwi[7] &= ~cpu_to_le32(MT_TXD7_HW_AMSDU);
479
}
480
481
if (mt76_is_mmio(dev)) {
482
val = FIELD_PREP(MT_TXD7_TYPE, fc_type) |
483
FIELD_PREP(MT_TXD7_SUB_TYPE, fc_stype);
484
txwi[7] |= cpu_to_le32(val);
485
} else {
486
val = FIELD_PREP(MT_TXD8_L_TYPE, fc_type) |
487
FIELD_PREP(MT_TXD8_L_SUB_TYPE, fc_stype);
488
txwi[8] |= cpu_to_le32(val);
489
}
490
}
491
492
void mt76_connac2_mac_write_txwi(struct mt76_dev *dev, __le32 *txwi,
493
struct sk_buff *skb, struct mt76_wcid *wcid,
494
struct ieee80211_key_conf *key, int pid,
495
enum mt76_txq_id qid, u32 changed)
496
{
497
struct ieee80211_tx_info *info = IEEE80211_SKB_CB(skb);
498
u8 phy_idx = (info->hw_queue & MT_TX_HW_QUEUE_PHY) >> 2;
499
struct ieee80211_vif *vif = info->control.vif;
500
struct mt76_phy *mphy = &dev->phy;
501
u8 p_fmt, q_idx, omac_idx = 0, wmm_idx = 0, band_idx = 0;
502
u32 val, sz_txd = mt76_is_mmio(dev) ? MT_TXD_SIZE : MT_SDIO_TXD_SIZE;
503
bool is_8023 = info->flags & IEEE80211_TX_CTL_HW_80211_ENCAP;
504
bool beacon = !!(changed & (BSS_CHANGED_BEACON |
505
BSS_CHANGED_BEACON_ENABLED));
506
bool inband_disc = !!(changed & (BSS_CHANGED_UNSOL_BCAST_PROBE_RESP |
507
BSS_CHANGED_FILS_DISCOVERY));
508
bool amsdu_en = wcid->amsdu;
509
510
if (vif) {
511
struct mt76_vif_link *mvif = (struct mt76_vif_link *)vif->drv_priv;
512
513
omac_idx = mvif->omac_idx;
514
wmm_idx = mvif->wmm_idx;
515
band_idx = mvif->band_idx;
516
}
517
518
if (phy_idx && dev->phys[MT_BAND1])
519
mphy = dev->phys[MT_BAND1];
520
521
if (inband_disc) {
522
p_fmt = MT_TX_TYPE_FW;
523
q_idx = MT_LMAC_ALTX0;
524
} else if (beacon) {
525
p_fmt = MT_TX_TYPE_FW;
526
q_idx = MT_LMAC_BCN0;
527
} else if (qid >= MT_TXQ_PSD) {
528
p_fmt = mt76_is_mmio(dev) ? MT_TX_TYPE_CT : MT_TX_TYPE_SF;
529
q_idx = MT_LMAC_ALTX0;
530
} else {
531
p_fmt = mt76_is_mmio(dev) ? MT_TX_TYPE_CT : MT_TX_TYPE_SF;
532
q_idx = wmm_idx * MT76_CONNAC_MAX_WMM_SETS +
533
mt76_connac_lmac_mapping(skb_get_queue_mapping(skb));
534
535
/* mt7915 WA only counts WED path */
536
if (is_mt7915(dev) && mtk_wed_device_active(&dev->mmio.wed))
537
wcid->stats.tx_packets++;
538
}
539
540
val = FIELD_PREP(MT_TXD0_TX_BYTES, skb->len + sz_txd) |
541
FIELD_PREP(MT_TXD0_PKT_FMT, p_fmt) |
542
FIELD_PREP(MT_TXD0_Q_IDX, q_idx);
543
txwi[0] = cpu_to_le32(val);
544
545
val = MT_TXD1_LONG_FORMAT |
546
FIELD_PREP(MT_TXD1_WLAN_IDX, wcid->idx) |
547
FIELD_PREP(MT_TXD1_OWN_MAC, omac_idx);
548
if (!is_mt7921(dev))
549
val |= MT_TXD1_VTA;
550
if (phy_idx || band_idx)
551
val |= MT_TXD1_TGID;
552
553
txwi[1] = cpu_to_le32(val);
554
txwi[2] = 0;
555
556
val = FIELD_PREP(MT_TXD3_REM_TX_COUNT, 15);
557
if (!is_mt7921(dev))
558
val |= MT_TXD3_SW_POWER_MGMT;
559
if (key)
560
val |= MT_TXD3_PROTECT_FRAME;
561
if (info->flags & IEEE80211_TX_CTL_NO_ACK)
562
val |= MT_TXD3_NO_ACK;
563
564
txwi[3] = cpu_to_le32(val);
565
txwi[4] = 0;
566
567
val = FIELD_PREP(MT_TXD5_PID, pid);
568
if (pid >= MT_PACKET_ID_FIRST) {
569
val |= MT_TXD5_TX_STATUS_HOST;
570
amsdu_en = 0;
571
}
572
573
txwi[5] = cpu_to_le32(val);
574
txwi[6] = 0;
575
txwi[7] = amsdu_en ? cpu_to_le32(MT_TXD7_HW_AMSDU) : 0;
576
577
if (is_8023)
578
mt76_connac2_mac_write_txwi_8023(txwi, skb, wcid);
579
else
580
mt76_connac2_mac_write_txwi_80211(dev, txwi, skb, key);
581
582
if (txwi[2] & cpu_to_le32(MT_TXD2_FIX_RATE)) {
583
/* Fixed rata is available just for 802.11 txd */
584
struct ieee80211_hdr *hdr = (struct ieee80211_hdr *)skb->data;
585
bool multicast = ieee80211_is_data(hdr->frame_control) &&
586
is_multicast_ether_addr(hdr->addr1);
587
u16 rate = mt76_connac2_mac_tx_rate_val(mphy, &vif->bss_conf, beacon,
588
multicast);
589
u32 val = MT_TXD6_FIXED_BW;
590
591
/* hardware won't add HTC for mgmt/ctrl frame */
592
txwi[2] |= cpu_to_le32(MT_TXD2_HTC_VLD);
593
594
val |= FIELD_PREP(MT_TXD6_TX_RATE, rate);
595
txwi[6] |= cpu_to_le32(val);
596
txwi[3] |= cpu_to_le32(MT_TXD3_BA_DISABLE);
597
598
if (!is_mt7921(dev)) {
599
u8 spe_idx = mt76_connac_spe_idx(mphy->antenna_mask);
600
601
if (!spe_idx)
602
spe_idx = 24 + phy_idx;
603
txwi[7] |= cpu_to_le32(FIELD_PREP(MT_TXD7_SPE_IDX, spe_idx));
604
}
605
606
txwi[7] &= ~cpu_to_le32(MT_TXD7_HW_AMSDU);
607
}
608
}
609
EXPORT_SYMBOL_GPL(mt76_connac2_mac_write_txwi);
610
611
bool mt76_connac2_mac_fill_txs(struct mt76_dev *dev, struct mt76_wcid *wcid,
612
__le32 *txs_data)
613
{
614
struct mt76_sta_stats *stats = &wcid->stats;
615
struct ieee80211_supported_band *sband;
616
struct mt76_phy *mphy;
617
struct rate_info rate = {};
618
bool cck = false;
619
u32 txrate, txs, mode, stbc;
620
621
txs = le32_to_cpu(txs_data[0]);
622
623
/* PPDU based reporting */
624
if (mtk_wed_device_active(&dev->mmio.wed) &&
625
FIELD_GET(MT_TXS0_TXS_FORMAT, txs) > 1) {
626
stats->tx_bytes +=
627
le32_get_bits(txs_data[5], MT_TXS5_MPDU_TX_BYTE) -
628
le32_get_bits(txs_data[7], MT_TXS7_MPDU_RETRY_BYTE);
629
stats->tx_failed +=
630
le32_get_bits(txs_data[6], MT_TXS6_MPDU_FAIL_CNT);
631
stats->tx_retries +=
632
le32_get_bits(txs_data[7], MT_TXS7_MPDU_RETRY_CNT);
633
634
if (wcid->sta) {
635
struct ieee80211_sta *sta;
636
u8 tid;
637
638
sta = container_of((void *)wcid, struct ieee80211_sta,
639
drv_priv);
640
tid = FIELD_GET(MT_TXS0_TID, txs);
641
642
ieee80211_refresh_tx_agg_session_timer(sta, tid);
643
}
644
}
645
646
txrate = FIELD_GET(MT_TXS0_TX_RATE, txs);
647
648
rate.mcs = FIELD_GET(MT_TX_RATE_IDX, txrate);
649
rate.nss = FIELD_GET(MT_TX_RATE_NSS, txrate) + 1;
650
stbc = FIELD_GET(MT_TX_RATE_STBC, txrate);
651
652
if (stbc && rate.nss > 1)
653
rate.nss >>= 1;
654
655
if (rate.nss - 1 < ARRAY_SIZE(stats->tx_nss))
656
stats->tx_nss[rate.nss - 1]++;
657
if (rate.mcs < ARRAY_SIZE(stats->tx_mcs))
658
stats->tx_mcs[rate.mcs]++;
659
660
mode = FIELD_GET(MT_TX_RATE_MODE, txrate);
661
switch (mode) {
662
case MT_PHY_TYPE_CCK:
663
cck = true;
664
fallthrough;
665
case MT_PHY_TYPE_OFDM:
666
mphy = &dev->phy;
667
if (wcid->phy_idx == MT_BAND1 && dev->phys[MT_BAND1])
668
mphy = dev->phys[MT_BAND1];
669
670
if (mphy->chandef.chan->band == NL80211_BAND_5GHZ)
671
sband = &mphy->sband_5g.sband;
672
else if (mphy->chandef.chan->band == NL80211_BAND_6GHZ)
673
sband = &mphy->sband_6g.sband;
674
else
675
sband = &mphy->sband_2g.sband;
676
677
rate.mcs = mt76_get_rate(mphy->dev, sband, rate.mcs, cck);
678
rate.legacy = sband->bitrates[rate.mcs].bitrate;
679
break;
680
case MT_PHY_TYPE_HT:
681
case MT_PHY_TYPE_HT_GF:
682
if (rate.mcs > 31)
683
return false;
684
685
rate.flags = RATE_INFO_FLAGS_MCS;
686
if (wcid->rate.flags & RATE_INFO_FLAGS_SHORT_GI)
687
rate.flags |= RATE_INFO_FLAGS_SHORT_GI;
688
break;
689
case MT_PHY_TYPE_VHT:
690
if (rate.mcs > 9)
691
return false;
692
693
rate.flags = RATE_INFO_FLAGS_VHT_MCS;
694
break;
695
case MT_PHY_TYPE_HE_SU:
696
case MT_PHY_TYPE_HE_EXT_SU:
697
case MT_PHY_TYPE_HE_TB:
698
case MT_PHY_TYPE_HE_MU:
699
if (rate.mcs > 11)
700
return false;
701
702
rate.he_gi = wcid->rate.he_gi;
703
rate.he_dcm = FIELD_GET(MT_TX_RATE_DCM, txrate);
704
rate.flags = RATE_INFO_FLAGS_HE_MCS;
705
break;
706
default:
707
return false;
708
}
709
710
stats->tx_mode[mode]++;
711
712
switch (FIELD_GET(MT_TXS0_BW, txs)) {
713
case IEEE80211_STA_RX_BW_160:
714
rate.bw = RATE_INFO_BW_160;
715
stats->tx_bw[3]++;
716
break;
717
case IEEE80211_STA_RX_BW_80:
718
rate.bw = RATE_INFO_BW_80;
719
stats->tx_bw[2]++;
720
break;
721
case IEEE80211_STA_RX_BW_40:
722
rate.bw = RATE_INFO_BW_40;
723
stats->tx_bw[1]++;
724
break;
725
default:
726
rate.bw = RATE_INFO_BW_20;
727
stats->tx_bw[0]++;
728
break;
729
}
730
wcid->rate = rate;
731
732
return true;
733
}
734
EXPORT_SYMBOL_GPL(mt76_connac2_mac_fill_txs);
735
736
bool mt76_connac2_mac_add_txs_skb(struct mt76_dev *dev, struct mt76_wcid *wcid,
737
int pid, __le32 *txs_data)
738
{
739
struct sk_buff_head list;
740
struct sk_buff *skb;
741
742
if (le32_get_bits(txs_data[0], MT_TXS0_TXS_FORMAT) == MT_TXS_PPDU_FMT)
743
return false;
744
745
mt76_tx_status_lock(dev, &list);
746
skb = mt76_tx_status_skb_get(dev, wcid, pid, &list);
747
if (skb) {
748
struct ieee80211_tx_info *info = IEEE80211_SKB_CB(skb);
749
750
if (!(le32_to_cpu(txs_data[0]) & MT_TXS0_ACK_ERROR_MASK))
751
info->flags |= IEEE80211_TX_STAT_ACK;
752
753
info->status.ampdu_len = 1;
754
info->status.ampdu_ack_len =
755
!!(info->flags & IEEE80211_TX_STAT_ACK);
756
info->status.rates[0].idx = -1;
757
758
mt76_connac2_mac_fill_txs(dev, wcid, txs_data);
759
mt76_tx_status_skb_done(dev, skb, &list);
760
}
761
mt76_tx_status_unlock(dev, &list);
762
763
return !!skb;
764
}
765
EXPORT_SYMBOL_GPL(mt76_connac2_mac_add_txs_skb);
766
767
static void
768
mt76_connac2_mac_decode_he_radiotap_ru(struct mt76_rx_status *status,
769
struct ieee80211_radiotap_he *he,
770
__le32 *rxv)
771
{
772
u32 ru_h, ru_l;
773
u8 ru, offs = 0;
774
775
ru_l = le32_get_bits(rxv[0], MT_PRXV_HE_RU_ALLOC_L);
776
ru_h = le32_get_bits(rxv[1], MT_PRXV_HE_RU_ALLOC_H);
777
ru = (u8)(ru_l | ru_h << 4);
778
779
status->bw = RATE_INFO_BW_HE_RU;
780
781
switch (ru) {
782
case 0 ... 36:
783
status->he_ru = NL80211_RATE_INFO_HE_RU_ALLOC_26;
784
offs = ru;
785
break;
786
case 37 ... 52:
787
status->he_ru = NL80211_RATE_INFO_HE_RU_ALLOC_52;
788
offs = ru - 37;
789
break;
790
case 53 ... 60:
791
status->he_ru = NL80211_RATE_INFO_HE_RU_ALLOC_106;
792
offs = ru - 53;
793
break;
794
case 61 ... 64:
795
status->he_ru = NL80211_RATE_INFO_HE_RU_ALLOC_242;
796
offs = ru - 61;
797
break;
798
case 65 ... 66:
799
status->he_ru = NL80211_RATE_INFO_HE_RU_ALLOC_484;
800
offs = ru - 65;
801
break;
802
case 67:
803
status->he_ru = NL80211_RATE_INFO_HE_RU_ALLOC_996;
804
break;
805
case 68:
806
status->he_ru = NL80211_RATE_INFO_HE_RU_ALLOC_2x996;
807
break;
808
}
809
810
he->data1 |= HE_BITS(DATA1_BW_RU_ALLOC_KNOWN);
811
he->data2 |= HE_BITS(DATA2_RU_OFFSET_KNOWN) |
812
le16_encode_bits(offs,
813
IEEE80211_RADIOTAP_HE_DATA2_RU_OFFSET);
814
}
815
816
static void
817
mt76_connac2_mac_decode_he_mu_radiotap(struct mt76_dev *dev, struct sk_buff *skb,
818
__le32 *rxv)
819
{
820
struct mt76_rx_status *status = (struct mt76_rx_status *)skb->cb;
821
static struct ieee80211_radiotap_he_mu mu_known = {
822
.flags1 = HE_BITS(MU_FLAGS1_SIG_B_MCS_KNOWN) |
823
HE_BITS(MU_FLAGS1_SIG_B_DCM_KNOWN) |
824
HE_BITS(MU_FLAGS1_CH1_RU_KNOWN) |
825
HE_BITS(MU_FLAGS1_SIG_B_SYMS_USERS_KNOWN),
826
.flags2 = HE_BITS(MU_FLAGS2_BW_FROM_SIG_A_BW_KNOWN),
827
};
828
struct ieee80211_radiotap_he_mu *he_mu;
829
830
if (is_mt7921(dev)) {
831
mu_known.flags1 |= HE_BITS(MU_FLAGS1_SIG_B_COMP_KNOWN);
832
mu_known.flags2 |= HE_BITS(MU_FLAGS2_PUNC_FROM_SIG_A_BW_KNOWN);
833
}
834
835
status->flag |= RX_FLAG_RADIOTAP_HE_MU;
836
837
he_mu = skb_push(skb, sizeof(mu_known));
838
memcpy(he_mu, &mu_known, sizeof(mu_known));
839
840
#define MU_PREP(f, v) le16_encode_bits(v, IEEE80211_RADIOTAP_HE_MU_##f)
841
842
he_mu->flags1 |= MU_PREP(FLAGS1_SIG_B_MCS, status->rate_idx);
843
if (status->he_dcm)
844
he_mu->flags1 |= MU_PREP(FLAGS1_SIG_B_DCM, status->he_dcm);
845
846
he_mu->flags2 |= MU_PREP(FLAGS2_BW_FROM_SIG_A_BW, status->bw) |
847
MU_PREP(FLAGS2_SIG_B_SYMS_USERS,
848
le32_get_bits(rxv[2], MT_CRXV_HE_NUM_USER));
849
850
he_mu->ru_ch1[0] = le32_get_bits(rxv[3], MT_CRXV_HE_RU0);
851
852
if (status->bw >= RATE_INFO_BW_40) {
853
he_mu->flags1 |= HE_BITS(MU_FLAGS1_CH2_RU_KNOWN);
854
he_mu->ru_ch2[0] =
855
le32_get_bits(rxv[3], MT_CRXV_HE_RU1);
856
}
857
858
if (status->bw >= RATE_INFO_BW_80) {
859
he_mu->ru_ch1[1] =
860
le32_get_bits(rxv[3], MT_CRXV_HE_RU2);
861
he_mu->ru_ch2[1] =
862
le32_get_bits(rxv[3], MT_CRXV_HE_RU3);
863
}
864
}
865
866
void mt76_connac2_mac_decode_he_radiotap(struct mt76_dev *dev,
867
struct sk_buff *skb,
868
__le32 *rxv, u32 mode)
869
{
870
struct mt76_rx_status *status = (struct mt76_rx_status *)skb->cb;
871
static const struct ieee80211_radiotap_he known = {
872
.data1 = HE_BITS(DATA1_DATA_MCS_KNOWN) |
873
HE_BITS(DATA1_DATA_DCM_KNOWN) |
874
HE_BITS(DATA1_STBC_KNOWN) |
875
HE_BITS(DATA1_CODING_KNOWN) |
876
HE_BITS(DATA1_LDPC_XSYMSEG_KNOWN) |
877
HE_BITS(DATA1_DOPPLER_KNOWN) |
878
HE_BITS(DATA1_SPTL_REUSE_KNOWN) |
879
HE_BITS(DATA1_BSS_COLOR_KNOWN),
880
.data2 = HE_BITS(DATA2_GI_KNOWN) |
881
HE_BITS(DATA2_TXBF_KNOWN) |
882
HE_BITS(DATA2_PE_DISAMBIG_KNOWN) |
883
HE_BITS(DATA2_TXOP_KNOWN),
884
};
885
u32 ltf_size = le32_get_bits(rxv[2], MT_CRXV_HE_LTF_SIZE) + 1;
886
struct ieee80211_radiotap_he *he;
887
888
status->flag |= RX_FLAG_RADIOTAP_HE;
889
890
he = skb_push(skb, sizeof(known));
891
memcpy(he, &known, sizeof(known));
892
893
he->data3 = HE_PREP(DATA3_BSS_COLOR, BSS_COLOR, rxv[14]) |
894
HE_PREP(DATA3_LDPC_XSYMSEG, LDPC_EXT_SYM, rxv[2]);
895
he->data4 = HE_PREP(DATA4_SU_MU_SPTL_REUSE, SR_MASK, rxv[11]);
896
he->data5 = HE_PREP(DATA5_PE_DISAMBIG, PE_DISAMBIG, rxv[2]) |
897
le16_encode_bits(ltf_size,
898
IEEE80211_RADIOTAP_HE_DATA5_LTF_SIZE);
899
if (le32_to_cpu(rxv[0]) & MT_PRXV_TXBF)
900
he->data5 |= HE_BITS(DATA5_TXBF);
901
he->data6 = HE_PREP(DATA6_TXOP, TXOP_DUR, rxv[14]) |
902
HE_PREP(DATA6_DOPPLER, DOPPLER, rxv[14]);
903
904
switch (mode) {
905
case MT_PHY_TYPE_HE_SU:
906
he->data1 |= HE_BITS(DATA1_FORMAT_SU) |
907
HE_BITS(DATA1_UL_DL_KNOWN) |
908
HE_BITS(DATA1_BEAM_CHANGE_KNOWN) |
909
HE_BITS(DATA1_BW_RU_ALLOC_KNOWN);
910
911
he->data3 |= HE_PREP(DATA3_BEAM_CHANGE, BEAM_CHNG, rxv[14]) |
912
HE_PREP(DATA3_UL_DL, UPLINK, rxv[2]);
913
break;
914
case MT_PHY_TYPE_HE_EXT_SU:
915
he->data1 |= HE_BITS(DATA1_FORMAT_EXT_SU) |
916
HE_BITS(DATA1_UL_DL_KNOWN) |
917
HE_BITS(DATA1_BW_RU_ALLOC_KNOWN);
918
919
he->data3 |= HE_PREP(DATA3_UL_DL, UPLINK, rxv[2]);
920
break;
921
case MT_PHY_TYPE_HE_MU:
922
he->data1 |= HE_BITS(DATA1_FORMAT_MU) |
923
HE_BITS(DATA1_UL_DL_KNOWN);
924
925
he->data3 |= HE_PREP(DATA3_UL_DL, UPLINK, rxv[2]);
926
he->data4 |= HE_PREP(DATA4_MU_STA_ID, MU_AID, rxv[7]);
927
928
mt76_connac2_mac_decode_he_radiotap_ru(status, he, rxv);
929
mt76_connac2_mac_decode_he_mu_radiotap(dev, skb, rxv);
930
break;
931
case MT_PHY_TYPE_HE_TB:
932
he->data1 |= HE_BITS(DATA1_FORMAT_TRIG) |
933
HE_BITS(DATA1_SPTL_REUSE2_KNOWN) |
934
HE_BITS(DATA1_SPTL_REUSE3_KNOWN) |
935
HE_BITS(DATA1_SPTL_REUSE4_KNOWN);
936
937
he->data4 |= HE_PREP(DATA4_TB_SPTL_REUSE1, SR_MASK, rxv[11]) |
938
HE_PREP(DATA4_TB_SPTL_REUSE2, SR1_MASK, rxv[11]) |
939
HE_PREP(DATA4_TB_SPTL_REUSE3, SR2_MASK, rxv[11]) |
940
HE_PREP(DATA4_TB_SPTL_REUSE4, SR3_MASK, rxv[11]);
941
942
mt76_connac2_mac_decode_he_radiotap_ru(status, he, rxv);
943
break;
944
default:
945
break;
946
}
947
}
948
EXPORT_SYMBOL_GPL(mt76_connac2_mac_decode_he_radiotap);
949
950
/* The HW does not translate the mac header to 802.3 for mesh point */
951
int mt76_connac2_reverse_frag0_hdr_trans(struct ieee80211_vif *vif,
952
struct sk_buff *skb, u16 hdr_offset)
953
{
954
struct mt76_rx_status *status = (struct mt76_rx_status *)skb->cb;
955
struct ethhdr *eth_hdr = (struct ethhdr *)(skb->data + hdr_offset);
956
__le32 *rxd = (__le32 *)skb->data;
957
struct ieee80211_sta *sta;
958
struct ieee80211_hdr hdr;
959
u16 frame_control;
960
961
if (le32_get_bits(rxd[3], MT_RXD3_NORMAL_ADDR_TYPE) !=
962
MT_RXD3_NORMAL_U2M)
963
return -EINVAL;
964
965
if (!(le32_to_cpu(rxd[1]) & MT_RXD1_NORMAL_GROUP_4))
966
return -EINVAL;
967
968
sta = container_of((void *)status->wcid, struct ieee80211_sta, drv_priv);
969
970
/* store the info from RXD and ethhdr to avoid being overridden */
971
frame_control = le32_get_bits(rxd[6], MT_RXD6_FRAME_CONTROL);
972
hdr.frame_control = cpu_to_le16(frame_control);
973
hdr.seq_ctrl = cpu_to_le16(le32_get_bits(rxd[8], MT_RXD8_SEQ_CTRL));
974
hdr.duration_id = 0;
975
976
ether_addr_copy(hdr.addr1, vif->addr);
977
ether_addr_copy(hdr.addr2, sta->addr);
978
switch (frame_control & (IEEE80211_FCTL_TODS |
979
IEEE80211_FCTL_FROMDS)) {
980
case 0:
981
ether_addr_copy(hdr.addr3, vif->bss_conf.bssid);
982
break;
983
case IEEE80211_FCTL_FROMDS:
984
ether_addr_copy(hdr.addr3, eth_hdr->h_source);
985
break;
986
case IEEE80211_FCTL_TODS:
987
ether_addr_copy(hdr.addr3, eth_hdr->h_dest);
988
break;
989
case IEEE80211_FCTL_TODS | IEEE80211_FCTL_FROMDS:
990
ether_addr_copy(hdr.addr3, eth_hdr->h_dest);
991
ether_addr_copy(hdr.addr4, eth_hdr->h_source);
992
break;
993
default:
994
return -EINVAL;
995
}
996
997
skb_pull(skb, hdr_offset + sizeof(struct ethhdr) - 2);
998
if (eth_hdr->h_proto == cpu_to_be16(ETH_P_AARP) ||
999
eth_hdr->h_proto == cpu_to_be16(ETH_P_IPX))
1000
ether_addr_copy(skb_push(skb, ETH_ALEN), bridge_tunnel_header);
1001
else if (be16_to_cpu(eth_hdr->h_proto) >= ETH_P_802_3_MIN)
1002
ether_addr_copy(skb_push(skb, ETH_ALEN), rfc1042_header);
1003
else
1004
skb_pull(skb, 2);
1005
1006
if (ieee80211_has_order(hdr.frame_control))
1007
memcpy(skb_push(skb, IEEE80211_HT_CTL_LEN), &rxd[9],
1008
IEEE80211_HT_CTL_LEN);
1009
if (ieee80211_is_data_qos(hdr.frame_control)) {
1010
__le16 qos_ctrl;
1011
1012
qos_ctrl = cpu_to_le16(le32_get_bits(rxd[8], MT_RXD8_QOS_CTL));
1013
memcpy(skb_push(skb, IEEE80211_QOS_CTL_LEN), &qos_ctrl,
1014
IEEE80211_QOS_CTL_LEN);
1015
}
1016
1017
if (ieee80211_has_a4(hdr.frame_control))
1018
memcpy(skb_push(skb, sizeof(hdr)), &hdr, sizeof(hdr));
1019
else
1020
memcpy(skb_push(skb, sizeof(hdr) - 6), &hdr, sizeof(hdr) - 6);
1021
1022
return 0;
1023
}
1024
EXPORT_SYMBOL_GPL(mt76_connac2_reverse_frag0_hdr_trans);
1025
1026
int mt76_connac2_mac_fill_rx_rate(struct mt76_dev *dev,
1027
struct mt76_rx_status *status,
1028
struct ieee80211_supported_band *sband,
1029
__le32 *rxv, u8 *mode)
1030
{
1031
u32 v0, v2;
1032
u8 stbc, gi, bw, dcm, nss;
1033
int i, idx;
1034
bool cck = false;
1035
1036
v0 = le32_to_cpu(rxv[0]);
1037
v2 = le32_to_cpu(rxv[2]);
1038
1039
idx = i = FIELD_GET(MT_PRXV_TX_RATE, v0);
1040
nss = FIELD_GET(MT_PRXV_NSTS, v0) + 1;
1041
1042
if (!is_mt7915(dev)) {
1043
stbc = FIELD_GET(MT_PRXV_HT_STBC, v0);
1044
gi = FIELD_GET(MT_PRXV_HT_SGI, v0);
1045
*mode = FIELD_GET(MT_PRXV_TX_MODE, v0);
1046
if (is_mt7921(dev))
1047
dcm = !!(idx & MT_PRXV_TX_DCM);
1048
else
1049
dcm = FIELD_GET(MT_PRXV_DCM, v0);
1050
bw = FIELD_GET(MT_PRXV_FRAME_MODE, v0);
1051
} else {
1052
stbc = FIELD_GET(MT_CRXV_HT_STBC, v2);
1053
gi = FIELD_GET(MT_CRXV_HT_SHORT_GI, v2);
1054
*mode = FIELD_GET(MT_CRXV_TX_MODE, v2);
1055
dcm = !!(idx & GENMASK(3, 0) & MT_PRXV_TX_DCM);
1056
bw = FIELD_GET(MT_CRXV_FRAME_MODE, v2);
1057
}
1058
1059
switch (*mode) {
1060
case MT_PHY_TYPE_CCK:
1061
cck = true;
1062
fallthrough;
1063
case MT_PHY_TYPE_OFDM:
1064
i = mt76_get_rate(dev, sband, i, cck);
1065
break;
1066
case MT_PHY_TYPE_HT_GF:
1067
case MT_PHY_TYPE_HT:
1068
status->encoding = RX_ENC_HT;
1069
if (gi)
1070
status->enc_flags |= RX_ENC_FLAG_SHORT_GI;
1071
if (i > 31)
1072
return -EINVAL;
1073
break;
1074
case MT_PHY_TYPE_VHT:
1075
status->nss = nss;
1076
status->encoding = RX_ENC_VHT;
1077
if (gi)
1078
status->enc_flags |= RX_ENC_FLAG_SHORT_GI;
1079
if (i > 11)
1080
return -EINVAL;
1081
break;
1082
case MT_PHY_TYPE_HE_MU:
1083
case MT_PHY_TYPE_HE_SU:
1084
case MT_PHY_TYPE_HE_EXT_SU:
1085
case MT_PHY_TYPE_HE_TB:
1086
status->nss = nss;
1087
status->encoding = RX_ENC_HE;
1088
i &= GENMASK(3, 0);
1089
1090
if (gi <= NL80211_RATE_INFO_HE_GI_3_2)
1091
status->he_gi = gi;
1092
1093
status->he_dcm = dcm;
1094
break;
1095
default:
1096
return -EINVAL;
1097
}
1098
status->rate_idx = i;
1099
1100
switch (bw) {
1101
case IEEE80211_STA_RX_BW_20:
1102
break;
1103
case IEEE80211_STA_RX_BW_40:
1104
if (*mode & MT_PHY_TYPE_HE_EXT_SU &&
1105
(idx & MT_PRXV_TX_ER_SU_106T)) {
1106
status->bw = RATE_INFO_BW_HE_RU;
1107
status->he_ru =
1108
NL80211_RATE_INFO_HE_RU_ALLOC_106;
1109
} else {
1110
status->bw = RATE_INFO_BW_40;
1111
}
1112
break;
1113
case IEEE80211_STA_RX_BW_80:
1114
status->bw = RATE_INFO_BW_80;
1115
break;
1116
case IEEE80211_STA_RX_BW_160:
1117
status->bw = RATE_INFO_BW_160;
1118
break;
1119
default:
1120
return -EINVAL;
1121
}
1122
1123
status->enc_flags |= RX_ENC_FLAG_STBC_MASK * stbc;
1124
if (*mode < MT_PHY_TYPE_HE_SU && gi)
1125
status->enc_flags |= RX_ENC_FLAG_SHORT_GI;
1126
1127
return 0;
1128
}
1129
EXPORT_SYMBOL_GPL(mt76_connac2_mac_fill_rx_rate);
1130
1131
void mt76_connac2_tx_check_aggr(struct ieee80211_sta *sta, __le32 *txwi)
1132
{
1133
struct mt76_wcid *wcid;
1134
u16 fc, tid;
1135
u32 val;
1136
1137
if (!sta ||
1138
!(sta->deflink.ht_cap.ht_supported || sta->deflink.he_cap.has_he))
1139
return;
1140
1141
tid = le32_get_bits(txwi[1], MT_TXD1_TID);
1142
if (tid >= 6) /* skip VO queue */
1143
return;
1144
1145
val = le32_to_cpu(txwi[2]);
1146
fc = FIELD_GET(MT_TXD2_FRAME_TYPE, val) << 2 |
1147
FIELD_GET(MT_TXD2_SUB_TYPE, val) << 4;
1148
if (unlikely(fc != (IEEE80211_FTYPE_DATA | IEEE80211_STYPE_QOS_DATA)))
1149
return;
1150
1151
wcid = (struct mt76_wcid *)sta->drv_priv;
1152
if (!test_and_set_bit(tid, &wcid->ampdu_state))
1153
ieee80211_start_tx_ba_session(sta, tid, 0);
1154
}
1155
EXPORT_SYMBOL_GPL(mt76_connac2_tx_check_aggr);
1156
1157
void mt76_connac2_txwi_free(struct mt76_dev *dev, struct mt76_txwi_cache *t,
1158
struct ieee80211_sta *sta,
1159
struct list_head *free_list)
1160
{
1161
struct mt76_wcid *wcid;
1162
__le32 *txwi;
1163
u16 wcid_idx;
1164
1165
mt76_connac_txp_skb_unmap(dev, t);
1166
if (!t->skb)
1167
goto out;
1168
1169
txwi = (__le32 *)mt76_get_txwi_ptr(dev, t);
1170
if (sta) {
1171
wcid = (struct mt76_wcid *)sta->drv_priv;
1172
wcid_idx = wcid->idx;
1173
} else {
1174
wcid_idx = le32_get_bits(txwi[1], MT_TXD1_WLAN_IDX);
1175
wcid = __mt76_wcid_ptr(dev, wcid_idx);
1176
1177
if (wcid && wcid->sta) {
1178
sta = container_of((void *)wcid, struct ieee80211_sta,
1179
drv_priv);
1180
mt76_wcid_add_poll(dev, wcid);
1181
}
1182
}
1183
1184
if (sta && likely(t->skb->protocol != cpu_to_be16(ETH_P_PAE)))
1185
mt76_connac2_tx_check_aggr(sta, txwi);
1186
1187
__mt76_tx_complete_skb(dev, wcid_idx, t->skb, free_list);
1188
out:
1189
t->skb = NULL;
1190
mt76_put_txwi(dev, t);
1191
}
1192
EXPORT_SYMBOL_GPL(mt76_connac2_txwi_free);
1193
1194
void mt76_connac2_tx_token_put(struct mt76_dev *dev)
1195
{
1196
struct mt76_txwi_cache *txwi;
1197
int id;
1198
1199
spin_lock_bh(&dev->token_lock);
1200
idr_for_each_entry(&dev->token, txwi, id) {
1201
mt76_connac2_txwi_free(dev, txwi, NULL, NULL);
1202
dev->token_count--;
1203
}
1204
spin_unlock_bh(&dev->token_lock);
1205
idr_destroy(&dev->token);
1206
}
1207
EXPORT_SYMBOL_GPL(mt76_connac2_tx_token_put);
1208
1209