1 // SPDX-License-Identifier: ISC 2 /* Copyright (C) 2020 MediaTek Inc. */ 3 4 #include "mt76_connac.h" 5 #include "mt76_connac2_mac.h" 6 #include "dma.h" 7 8 #define HE_BITS(f) cpu_to_le16(IEEE80211_RADIOTAP_HE_##f) 9 #define HE_PREP(f, m, v) le16_encode_bits(le32_get_bits(v, MT_CRXV_HE_##m),\ 10 IEEE80211_RADIOTAP_HE_##f) 11 12 void mt76_connac_gen_ppe_thresh(u8 *he_ppet, int nss) 13 { 14 static const u8 ppet16_ppet8_ru3_ru0[] = { 0x1c, 0xc7, 0x71 }; 15 u8 i, ppet_bits, ppet_size, ru_bit_mask = 0x7; /* HE80 */ 16 17 he_ppet[0] = FIELD_PREP(IEEE80211_PPE_THRES_NSS_MASK, nss - 1) | 18 FIELD_PREP(IEEE80211_PPE_THRES_RU_INDEX_BITMASK_MASK, 19 ru_bit_mask); 20 21 ppet_bits = IEEE80211_PPE_THRES_INFO_PPET_SIZE * 22 nss * hweight8(ru_bit_mask) * 2; 23 ppet_size = DIV_ROUND_UP(ppet_bits, 8); 24 25 for (i = 0; i < ppet_size - 1; i++) 26 he_ppet[i + 1] = ppet16_ppet8_ru3_ru0[i % 3]; 27 28 he_ppet[i + 1] = ppet16_ppet8_ru3_ru0[i % 3] & 29 (0xff >> (8 - (ppet_bits - 1) % 8)); 30 } 31 EXPORT_SYMBOL_GPL(mt76_connac_gen_ppe_thresh); 32 33 int mt76_connac_pm_wake(struct mt76_phy *phy, struct mt76_connac_pm *pm) 34 { 35 struct mt76_dev *dev = phy->dev; 36 37 if (mt76_is_usb(dev)) 38 return 0; 39 40 cancel_delayed_work_sync(&pm->ps_work); 41 if (!test_bit(MT76_STATE_PM, &phy->state)) 42 return 0; 43 44 if (pm->suspended) 45 return 0; 46 47 queue_work(dev->wq, &pm->wake_work); 48 if (!wait_event_timeout(pm->wait, 49 !test_bit(MT76_STATE_PM, &phy->state), 50 3 * HZ)) { 51 ieee80211_wake_queues(phy->hw); 52 return -ETIMEDOUT; 53 } 54 55 return 0; 56 } 57 EXPORT_SYMBOL_GPL(mt76_connac_pm_wake); 58 59 void mt76_connac_power_save_sched(struct mt76_phy *phy, 60 struct mt76_connac_pm *pm) 61 { 62 struct mt76_dev *dev = phy->dev; 63 64 if (mt76_is_usb(dev)) 65 return; 66 67 if (!pm->enable) 68 return; 69 70 if (pm->suspended) 71 return; 72 73 pm->last_activity = jiffies; 74 75 if (!test_bit(MT76_STATE_PM, &phy->state)) { 76 cancel_delayed_work(&phy->mac_work); 77 queue_delayed_work(dev->wq, &pm->ps_work, pm->idle_timeout); 78 } 79 } 80 EXPORT_SYMBOL_GPL(mt76_connac_power_save_sched); 81 82 void mt76_connac_free_pending_tx_skbs(struct mt76_connac_pm *pm, 83 struct mt76_wcid *wcid) 84 { 85 int i; 86 87 spin_lock_bh(&pm->txq_lock); 88 for (i = 0; i < IEEE80211_NUM_ACS; i++) { 89 if (wcid && pm->tx_q[i].wcid != wcid) 90 continue; 91 92 dev_kfree_skb(pm->tx_q[i].skb); 93 pm->tx_q[i].skb = NULL; 94 } 95 spin_unlock_bh(&pm->txq_lock); 96 } 97 EXPORT_SYMBOL_GPL(mt76_connac_free_pending_tx_skbs); 98 99 void mt76_connac_pm_queue_skb(struct ieee80211_hw *hw, 100 struct mt76_connac_pm *pm, 101 struct mt76_wcid *wcid, 102 struct sk_buff *skb) 103 { 104 int qid = skb_get_queue_mapping(skb); 105 struct mt76_phy *phy = hw->priv; 106 107 spin_lock_bh(&pm->txq_lock); 108 if (!pm->tx_q[qid].skb) { 109 ieee80211_stop_queues(hw); 110 pm->tx_q[qid].wcid = wcid; 111 pm->tx_q[qid].skb = skb; 112 queue_work(phy->dev->wq, &pm->wake_work); 113 } else { 114 dev_kfree_skb(skb); 115 } 116 spin_unlock_bh(&pm->txq_lock); 117 } 118 EXPORT_SYMBOL_GPL(mt76_connac_pm_queue_skb); 119 120 void mt76_connac_pm_dequeue_skbs(struct mt76_phy *phy, 121 struct mt76_connac_pm *pm) 122 { 123 int i; 124 125 spin_lock_bh(&pm->txq_lock); 126 for (i = 0; i < IEEE80211_NUM_ACS; i++) { 127 struct mt76_wcid *wcid = pm->tx_q[i].wcid; 128 struct ieee80211_sta *sta = NULL; 129 130 if (!pm->tx_q[i].skb) 131 continue; 132 133 if (wcid && wcid->sta) 134 sta = container_of((void *)wcid, struct ieee80211_sta, 135 drv_priv); 136 137 mt76_tx(phy, sta, wcid, pm->tx_q[i].skb); 138 pm->tx_q[i].skb = NULL; 139 } 140 spin_unlock_bh(&pm->txq_lock); 141 142 mt76_worker_schedule(&phy->dev->tx_worker); 143 } 144 EXPORT_SYMBOL_GPL(mt76_connac_pm_dequeue_skbs); 145 146 void mt76_connac_tx_complete_skb(struct mt76_dev *mdev, 147 struct mt76_queue_entry *e) 148 { 149 if (!e->txwi) { 150 dev_kfree_skb_any(e->skb); 151 return; 152 } 153 154 /* error path */ 155 if (e->skb == DMA_DUMMY_DATA) { 156 struct mt76_connac_txp_common *txp; 157 struct mt76_txwi_cache *t; 158 u16 token; 159 160 txp = mt76_connac_txwi_to_txp(mdev, e->txwi); 161 if (is_mt76_fw_txp(mdev)) 162 token = le16_to_cpu(txp->fw.token); 163 else 164 token = le16_to_cpu(txp->hw.msdu_id[0]) & 165 ~MT_MSDU_ID_VALID; 166 167 t = mt76_token_put(mdev, token); 168 e->skb = t ? t->skb : NULL; 169 } 170 171 if (e->skb) 172 mt76_tx_complete_skb(mdev, e->wcid, e->skb); 173 } 174 EXPORT_SYMBOL_GPL(mt76_connac_tx_complete_skb); 175 176 void mt76_connac_write_hw_txp(struct mt76_dev *dev, 177 struct mt76_tx_info *tx_info, 178 void *txp_ptr, u32 id) 179 { 180 struct mt76_connac_hw_txp *txp = txp_ptr; 181 struct mt76_connac_txp_ptr *ptr = &txp->ptr[0]; 182 int i, nbuf = tx_info->nbuf - 1; 183 u32 last_mask; 184 185 tx_info->buf[0].len = MT_TXD_SIZE + sizeof(*txp); 186 tx_info->nbuf = 1; 187 188 txp->msdu_id[0] = cpu_to_le16(id | MT_MSDU_ID_VALID); 189 190 if (is_mt7663(dev) || is_mt7921(dev)) 191 last_mask = MT_TXD_LEN_LAST; 192 else 193 last_mask = MT_TXD_LEN_AMSDU_LAST | 194 MT_TXD_LEN_MSDU_LAST; 195 196 for (i = 0; i < nbuf; i++) { 197 u16 len = tx_info->buf[i + 1].len & MT_TXD_LEN_MASK; 198 u32 addr = tx_info->buf[i + 1].addr; 199 200 if (i == nbuf - 1) 201 len |= last_mask; 202 203 if (i & 1) { 204 ptr->buf1 = cpu_to_le32(addr); 205 ptr->len1 = cpu_to_le16(len); 206 ptr++; 207 } else { 208 ptr->buf0 = cpu_to_le32(addr); 209 ptr->len0 = cpu_to_le16(len); 210 } 211 } 212 } 213 EXPORT_SYMBOL_GPL(mt76_connac_write_hw_txp); 214 215 static void 216 mt76_connac_txp_skb_unmap_fw(struct mt76_dev *mdev, 217 struct mt76_connac_fw_txp *txp) 218 { 219 struct device *dev = is_connac_v1(mdev) ? mdev->dev : mdev->dma_dev; 220 int i; 221 222 for (i = 0; i < txp->nbuf; i++) 223 dma_unmap_single(dev, le32_to_cpu(txp->buf[i]), 224 le16_to_cpu(txp->len[i]), DMA_TO_DEVICE); 225 } 226 227 static void 228 mt76_connac_txp_skb_unmap_hw(struct mt76_dev *dev, 229 struct mt76_connac_hw_txp *txp) 230 { 231 u32 last_mask; 232 int i; 233 234 if (is_mt7663(dev) || is_mt7921(dev)) 235 last_mask = MT_TXD_LEN_LAST; 236 else 237 last_mask = MT_TXD_LEN_MSDU_LAST; 238 239 for (i = 0; i < ARRAY_SIZE(txp->ptr); i++) { 240 struct mt76_connac_txp_ptr *ptr = &txp->ptr[i]; 241 bool last; 242 u16 len; 243 244 len = le16_to_cpu(ptr->len0); 245 last = len & last_mask; 246 len &= MT_TXD_LEN_MASK; 247 dma_unmap_single(dev->dev, le32_to_cpu(ptr->buf0), len, 248 DMA_TO_DEVICE); 249 if (last) 250 break; 251 252 len = le16_to_cpu(ptr->len1); 253 last = len & last_mask; 254 len &= MT_TXD_LEN_MASK; 255 dma_unmap_single(dev->dev, le32_to_cpu(ptr->buf1), len, 256 DMA_TO_DEVICE); 257 if (last) 258 break; 259 } 260 } 261 262 void mt76_connac_txp_skb_unmap(struct mt76_dev *dev, 263 struct mt76_txwi_cache *t) 264 { 265 struct mt76_connac_txp_common *txp; 266 267 txp = mt76_connac_txwi_to_txp(dev, t); 268 if (is_mt76_fw_txp(dev)) 269 mt76_connac_txp_skb_unmap_fw(dev, &txp->fw); 270 else 271 mt76_connac_txp_skb_unmap_hw(dev, &txp->hw); 272 } 273 EXPORT_SYMBOL_GPL(mt76_connac_txp_skb_unmap); 274 275 int mt76_connac_init_tx_queues(struct mt76_phy *phy, int idx, int n_desc, 276 int ring_base, u32 flags) 277 { 278 int i, err; 279 280 err = mt76_init_tx_queue(phy, 0, idx, n_desc, ring_base, flags); 281 if (err < 0) 282 return err; 283 284 for (i = 1; i <= MT_TXQ_PSD; i++) 285 phy->q_tx[i] = phy->q_tx[0]; 286 287 return 0; 288 } 289 EXPORT_SYMBOL_GPL(mt76_connac_init_tx_queues); 290 291 #define __bitrate_mask_check(_mcs, _mode) \ 292 ({ \ 293 u8 i = 0; \ 294 for (nss = 0; i < ARRAY_SIZE(mask->control[band]._mcs); i++) { \ 295 if (!mask->control[band]._mcs[i]) \ 296 continue; \ 297 if (hweight16(mask->control[band]._mcs[i]) == 1) { \ 298 mode = MT_PHY_TYPE_##_mode; \ 299 rateidx = ffs(mask->control[band]._mcs[i]) - 1; \ 300 if (mode == MT_PHY_TYPE_HT) \ 301 rateidx += 8 * i; \ 302 else \ 303 nss = i + 1; \ 304 goto out; \ 305 } \ 306 } \ 307 }) 308 309 u16 mt76_connac2_mac_tx_rate_val(struct mt76_phy *mphy, 310 struct ieee80211_vif *vif, 311 bool beacon, bool mcast) 312 { 313 u8 nss = 0, mode = 0, band = mphy->chandef.chan->band; 314 int rateidx = 0, mcast_rate; 315 316 if (!vif) 317 goto legacy; 318 319 if (is_mt7921(mphy->dev)) { 320 rateidx = ffs(vif->bss_conf.basic_rates) - 1; 321 goto legacy; 322 } 323 324 if (beacon) { 325 struct cfg80211_bitrate_mask *mask; 326 327 mask = &vif->bss_conf.beacon_tx_rate; 328 329 __bitrate_mask_check(he_mcs, HE_SU); 330 __bitrate_mask_check(vht_mcs, VHT); 331 __bitrate_mask_check(ht_mcs, HT); 332 333 if (hweight32(mask->control[band].legacy) == 1) { 334 rateidx = ffs(mask->control[band].legacy) - 1; 335 goto legacy; 336 } 337 } 338 339 mcast_rate = vif->bss_conf.mcast_rate[band]; 340 if (mcast && mcast_rate > 0) 341 rateidx = mcast_rate - 1; 342 else 343 rateidx = ffs(vif->bss_conf.basic_rates) - 1; 344 345 legacy: 346 rateidx = mt76_calculate_default_rate(mphy, rateidx); 347 mode = rateidx >> 8; 348 rateidx &= GENMASK(7, 0); 349 out: 350 return FIELD_PREP(MT_TX_RATE_NSS, nss) | 351 FIELD_PREP(MT_TX_RATE_IDX, rateidx) | 352 FIELD_PREP(MT_TX_RATE_MODE, mode); 353 } 354 EXPORT_SYMBOL_GPL(mt76_connac2_mac_tx_rate_val); 355 356 static void 357 mt76_connac2_mac_write_txwi_8023(__le32 *txwi, struct sk_buff *skb, 358 struct mt76_wcid *wcid) 359 { 360 u8 tid = skb->priority & IEEE80211_QOS_CTL_TID_MASK; 361 u8 fc_type, fc_stype; 362 u16 ethertype; 363 bool wmm = false; 364 u32 val; 365 366 if (wcid->sta) { 367 struct ieee80211_sta *sta; 368 369 sta = container_of((void *)wcid, struct ieee80211_sta, drv_priv); 370 wmm = sta->wme; 371 } 372 373 val = FIELD_PREP(MT_TXD1_HDR_FORMAT, MT_HDR_FORMAT_802_3) | 374 FIELD_PREP(MT_TXD1_TID, tid); 375 376 ethertype = get_unaligned_be16(&skb->data[12]); 377 if (ethertype >= ETH_P_802_3_MIN) 378 val |= MT_TXD1_ETH_802_3; 379 380 txwi[1] |= cpu_to_le32(val); 381 382 fc_type = IEEE80211_FTYPE_DATA >> 2; 383 fc_stype = wmm ? IEEE80211_STYPE_QOS_DATA >> 4 : 0; 384 385 val = FIELD_PREP(MT_TXD2_FRAME_TYPE, fc_type) | 386 FIELD_PREP(MT_TXD2_SUB_TYPE, fc_stype); 387 388 txwi[2] |= cpu_to_le32(val); 389 390 val = FIELD_PREP(MT_TXD7_TYPE, fc_type) | 391 FIELD_PREP(MT_TXD7_SUB_TYPE, fc_stype); 392 393 txwi[7] |= cpu_to_le32(val); 394 } 395 396 static void 397 mt76_connac2_mac_write_txwi_80211(struct mt76_dev *dev, __le32 *txwi, 398 struct sk_buff *skb, 399 struct ieee80211_key_conf *key) 400 { 401 struct ieee80211_hdr *hdr = (struct ieee80211_hdr *)skb->data; 402 struct ieee80211_mgmt *mgmt = (struct ieee80211_mgmt *)skb->data; 403 struct ieee80211_tx_info *info = IEEE80211_SKB_CB(skb); 404 bool multicast = is_multicast_ether_addr(hdr->addr1); 405 u8 tid = skb->priority & IEEE80211_QOS_CTL_TID_MASK; 406 __le16 fc = hdr->frame_control; 407 u8 fc_type, fc_stype; 408 u32 val; 409 410 if (ieee80211_is_action(fc) && 411 mgmt->u.action.category == WLAN_CATEGORY_BACK && 412 mgmt->u.action.u.addba_req.action_code == WLAN_ACTION_ADDBA_REQ) { 413 u16 capab = le16_to_cpu(mgmt->u.action.u.addba_req.capab); 414 415 txwi[5] |= cpu_to_le32(MT_TXD5_ADD_BA); 416 tid = (capab >> 2) & IEEE80211_QOS_CTL_TID_MASK; 417 } else if (ieee80211_is_back_req(hdr->frame_control)) { 418 struct ieee80211_bar *bar = (struct ieee80211_bar *)hdr; 419 u16 control = le16_to_cpu(bar->control); 420 421 tid = FIELD_GET(IEEE80211_BAR_CTRL_TID_INFO_MASK, control); 422 } 423 424 val = FIELD_PREP(MT_TXD1_HDR_FORMAT, MT_HDR_FORMAT_802_11) | 425 FIELD_PREP(MT_TXD1_HDR_INFO, 426 ieee80211_get_hdrlen_from_skb(skb) / 2) | 427 FIELD_PREP(MT_TXD1_TID, tid); 428 429 txwi[1] |= cpu_to_le32(val); 430 431 fc_type = (le16_to_cpu(fc) & IEEE80211_FCTL_FTYPE) >> 2; 432 fc_stype = (le16_to_cpu(fc) & IEEE80211_FCTL_STYPE) >> 4; 433 434 val = FIELD_PREP(MT_TXD2_FRAME_TYPE, fc_type) | 435 FIELD_PREP(MT_TXD2_SUB_TYPE, fc_stype) | 436 FIELD_PREP(MT_TXD2_MULTICAST, multicast); 437 438 if (key && multicast && ieee80211_is_robust_mgmt_frame(skb) && 439 key->cipher == WLAN_CIPHER_SUITE_AES_CMAC) { 440 val |= MT_TXD2_BIP; 441 txwi[3] &= ~cpu_to_le32(MT_TXD3_PROTECT_FRAME); 442 } 443 444 if (!ieee80211_is_data(fc) || multicast || 445 info->flags & IEEE80211_TX_CTL_USE_MINRATE) 446 val |= MT_TXD2_FIX_RATE; 447 448 txwi[2] |= cpu_to_le32(val); 449 450 if (ieee80211_is_beacon(fc)) { 451 txwi[3] &= ~cpu_to_le32(MT_TXD3_SW_POWER_MGMT); 452 txwi[3] |= cpu_to_le32(MT_TXD3_REM_TX_COUNT); 453 } 454 455 if (info->flags & IEEE80211_TX_CTL_INJECTED) { 456 u16 seqno = le16_to_cpu(hdr->seq_ctrl); 457 458 if (ieee80211_is_back_req(hdr->frame_control)) { 459 struct ieee80211_bar *bar; 460 461 bar = (struct ieee80211_bar *)skb->data; 462 seqno = le16_to_cpu(bar->start_seq_num); 463 } 464 465 val = MT_TXD3_SN_VALID | 466 FIELD_PREP(MT_TXD3_SEQ, IEEE80211_SEQ_TO_SN(seqno)); 467 txwi[3] |= cpu_to_le32(val); 468 txwi[7] &= ~cpu_to_le32(MT_TXD7_HW_AMSDU); 469 } 470 471 if (mt76_is_mmio(dev)) { 472 val = FIELD_PREP(MT_TXD7_TYPE, fc_type) | 473 FIELD_PREP(MT_TXD7_SUB_TYPE, fc_stype); 474 txwi[7] |= cpu_to_le32(val); 475 } else { 476 val = FIELD_PREP(MT_TXD8_L_TYPE, fc_type) | 477 FIELD_PREP(MT_TXD8_L_SUB_TYPE, fc_stype); 478 txwi[8] |= cpu_to_le32(val); 479 } 480 } 481 482 void mt76_connac2_mac_write_txwi(struct mt76_dev *dev, __le32 *txwi, 483 struct sk_buff *skb, struct mt76_wcid *wcid, 484 struct ieee80211_key_conf *key, int pid, 485 enum mt76_txq_id qid, u32 changed) 486 { 487 struct ieee80211_tx_info *info = IEEE80211_SKB_CB(skb); 488 u8 phy_idx = (info->hw_queue & MT_TX_HW_QUEUE_PHY) >> 2; 489 struct ieee80211_vif *vif = info->control.vif; 490 struct mt76_phy *mphy = &dev->phy; 491 u8 p_fmt, q_idx, omac_idx = 0, wmm_idx = 0, band_idx = 0; 492 u32 val, sz_txd = mt76_is_mmio(dev) ? MT_TXD_SIZE : MT_SDIO_TXD_SIZE; 493 bool is_8023 = info->flags & IEEE80211_TX_CTL_HW_80211_ENCAP; 494 bool beacon = !!(changed & (BSS_CHANGED_BEACON | 495 BSS_CHANGED_BEACON_ENABLED)); 496 bool inband_disc = !!(changed & (BSS_CHANGED_UNSOL_BCAST_PROBE_RESP | 497 BSS_CHANGED_FILS_DISCOVERY)); 498 499 if (vif) { 500 struct mt76_vif *mvif = (struct mt76_vif *)vif->drv_priv; 501 502 omac_idx = mvif->omac_idx; 503 wmm_idx = mvif->wmm_idx; 504 band_idx = mvif->band_idx; 505 } 506 507 if (phy_idx && dev->phys[MT_BAND1]) 508 mphy = dev->phys[MT_BAND1]; 509 510 if (inband_disc) { 511 p_fmt = MT_TX_TYPE_FW; 512 q_idx = MT_LMAC_ALTX0; 513 } else if (beacon) { 514 p_fmt = MT_TX_TYPE_FW; 515 q_idx = MT_LMAC_BCN0; 516 } else if (qid >= MT_TXQ_PSD) { 517 p_fmt = mt76_is_mmio(dev) ? MT_TX_TYPE_CT : MT_TX_TYPE_SF; 518 q_idx = MT_LMAC_ALTX0; 519 } else { 520 p_fmt = mt76_is_mmio(dev) ? MT_TX_TYPE_CT : MT_TX_TYPE_SF; 521 q_idx = wmm_idx * MT76_CONNAC_MAX_WMM_SETS + 522 mt76_connac_lmac_mapping(skb_get_queue_mapping(skb)); 523 524 /* counting non-offloading skbs */ 525 wcid->stats.tx_bytes += skb->len; 526 wcid->stats.tx_packets++; 527 } 528 529 val = FIELD_PREP(MT_TXD0_TX_BYTES, skb->len + sz_txd) | 530 FIELD_PREP(MT_TXD0_PKT_FMT, p_fmt) | 531 FIELD_PREP(MT_TXD0_Q_IDX, q_idx); 532 txwi[0] = cpu_to_le32(val); 533 534 val = MT_TXD1_LONG_FORMAT | 535 FIELD_PREP(MT_TXD1_WLAN_IDX, wcid->idx) | 536 FIELD_PREP(MT_TXD1_OWN_MAC, omac_idx); 537 if (!is_mt7921(dev)) 538 val |= MT_TXD1_VTA; 539 if (phy_idx || band_idx) 540 val |= MT_TXD1_TGID; 541 542 txwi[1] = cpu_to_le32(val); 543 txwi[2] = 0; 544 545 val = FIELD_PREP(MT_TXD3_REM_TX_COUNT, 15); 546 if (!is_mt7921(dev)) 547 val |= MT_TXD3_SW_POWER_MGMT; 548 if (key) 549 val |= MT_TXD3_PROTECT_FRAME; 550 if (info->flags & IEEE80211_TX_CTL_NO_ACK) 551 val |= MT_TXD3_NO_ACK; 552 553 txwi[3] = cpu_to_le32(val); 554 txwi[4] = 0; 555 556 val = FIELD_PREP(MT_TXD5_PID, pid); 557 if (pid >= MT_PACKET_ID_FIRST) 558 val |= MT_TXD5_TX_STATUS_HOST; 559 560 txwi[5] = cpu_to_le32(val); 561 txwi[6] = 0; 562 txwi[7] = wcid->amsdu ? cpu_to_le32(MT_TXD7_HW_AMSDU) : 0; 563 564 if (is_8023) 565 mt76_connac2_mac_write_txwi_8023(txwi, skb, wcid); 566 else 567 mt76_connac2_mac_write_txwi_80211(dev, txwi, skb, key); 568 569 if (txwi[2] & cpu_to_le32(MT_TXD2_FIX_RATE)) { 570 /* Fixed rata is available just for 802.11 txd */ 571 struct ieee80211_hdr *hdr = (struct ieee80211_hdr *)skb->data; 572 bool multicast = ieee80211_is_data(hdr->frame_control) && 573 is_multicast_ether_addr(hdr->addr1); 574 u16 rate = mt76_connac2_mac_tx_rate_val(mphy, vif, beacon, 575 multicast); 576 u32 val = MT_TXD6_FIXED_BW; 577 578 /* hardware won't add HTC for mgmt/ctrl frame */ 579 txwi[2] |= cpu_to_le32(MT_TXD2_HTC_VLD); 580 581 val |= FIELD_PREP(MT_TXD6_TX_RATE, rate); 582 txwi[6] |= cpu_to_le32(val); 583 txwi[3] |= cpu_to_le32(MT_TXD3_BA_DISABLE); 584 585 if (!is_mt7921(dev)) { 586 u8 spe_idx = mt76_connac_spe_idx(mphy->antenna_mask); 587 588 if (!spe_idx) 589 spe_idx = 24 + phy_idx; 590 txwi[7] |= cpu_to_le32(FIELD_PREP(MT_TXD7_SPE_IDX, spe_idx)); 591 } 592 } 593 } 594 EXPORT_SYMBOL_GPL(mt76_connac2_mac_write_txwi); 595 596 bool mt76_connac2_mac_fill_txs(struct mt76_dev *dev, struct mt76_wcid *wcid, 597 __le32 *txs_data) 598 { 599 struct mt76_sta_stats *stats = &wcid->stats; 600 struct ieee80211_supported_band *sband; 601 struct mt76_phy *mphy; 602 struct rate_info rate = {}; 603 bool cck = false; 604 u32 txrate, txs, mode, stbc; 605 606 txs = le32_to_cpu(txs_data[0]); 607 608 /* PPDU based reporting */ 609 if (FIELD_GET(MT_TXS0_TXS_FORMAT, txs) > 1) { 610 stats->tx_bytes += 611 le32_get_bits(txs_data[5], MT_TXS5_MPDU_TX_BYTE) - 612 le32_get_bits(txs_data[7], MT_TXS7_MPDU_RETRY_BYTE); 613 stats->tx_packets += 614 le32_get_bits(txs_data[5], MT_TXS5_MPDU_TX_CNT); 615 stats->tx_failed += 616 le32_get_bits(txs_data[6], MT_TXS6_MPDU_FAIL_CNT); 617 stats->tx_retries += 618 le32_get_bits(txs_data[7], MT_TXS7_MPDU_RETRY_CNT); 619 620 if (wcid->sta) { 621 struct ieee80211_sta *sta; 622 u8 tid; 623 624 sta = container_of((void *)wcid, struct ieee80211_sta, 625 drv_priv); 626 tid = FIELD_GET(MT_TXS0_TID, txs); 627 628 ieee80211_refresh_tx_agg_session_timer(sta, tid); 629 } 630 } 631 632 txrate = FIELD_GET(MT_TXS0_TX_RATE, txs); 633 634 rate.mcs = FIELD_GET(MT_TX_RATE_IDX, txrate); 635 rate.nss = FIELD_GET(MT_TX_RATE_NSS, txrate) + 1; 636 stbc = FIELD_GET(MT_TX_RATE_STBC, txrate); 637 638 if (stbc && rate.nss > 1) 639 rate.nss >>= 1; 640 641 if (rate.nss - 1 < ARRAY_SIZE(stats->tx_nss)) 642 stats->tx_nss[rate.nss - 1]++; 643 if (rate.mcs < ARRAY_SIZE(stats->tx_mcs)) 644 stats->tx_mcs[rate.mcs]++; 645 646 mode = FIELD_GET(MT_TX_RATE_MODE, txrate); 647 switch (mode) { 648 case MT_PHY_TYPE_CCK: 649 cck = true; 650 fallthrough; 651 case MT_PHY_TYPE_OFDM: 652 mphy = &dev->phy; 653 if (wcid->phy_idx == MT_BAND1 && dev->phys[MT_BAND1]) 654 mphy = dev->phys[MT_BAND1]; 655 656 if (mphy->chandef.chan->band == NL80211_BAND_5GHZ) 657 sband = &mphy->sband_5g.sband; 658 else if (mphy->chandef.chan->band == NL80211_BAND_6GHZ) 659 sband = &mphy->sband_6g.sband; 660 else 661 sband = &mphy->sband_2g.sband; 662 663 rate.mcs = mt76_get_rate(mphy->dev, sband, rate.mcs, cck); 664 rate.legacy = sband->bitrates[rate.mcs].bitrate; 665 break; 666 case MT_PHY_TYPE_HT: 667 case MT_PHY_TYPE_HT_GF: 668 if (rate.mcs > 31) 669 return false; 670 671 rate.flags = RATE_INFO_FLAGS_MCS; 672 if (wcid->rate.flags & RATE_INFO_FLAGS_SHORT_GI) 673 rate.flags |= RATE_INFO_FLAGS_SHORT_GI; 674 break; 675 case MT_PHY_TYPE_VHT: 676 if (rate.mcs > 9) 677 return false; 678 679 rate.flags = RATE_INFO_FLAGS_VHT_MCS; 680 break; 681 case MT_PHY_TYPE_HE_SU: 682 case MT_PHY_TYPE_HE_EXT_SU: 683 case MT_PHY_TYPE_HE_TB: 684 case MT_PHY_TYPE_HE_MU: 685 if (rate.mcs > 11) 686 return false; 687 688 rate.he_gi = wcid->rate.he_gi; 689 rate.he_dcm = FIELD_GET(MT_TX_RATE_DCM, txrate); 690 rate.flags = RATE_INFO_FLAGS_HE_MCS; 691 break; 692 default: 693 return false; 694 } 695 696 stats->tx_mode[mode]++; 697 698 switch (FIELD_GET(MT_TXS0_BW, txs)) { 699 case IEEE80211_STA_RX_BW_160: 700 rate.bw = RATE_INFO_BW_160; 701 stats->tx_bw[3]++; 702 break; 703 case IEEE80211_STA_RX_BW_80: 704 rate.bw = RATE_INFO_BW_80; 705 stats->tx_bw[2]++; 706 break; 707 case IEEE80211_STA_RX_BW_40: 708 rate.bw = RATE_INFO_BW_40; 709 stats->tx_bw[1]++; 710 break; 711 default: 712 rate.bw = RATE_INFO_BW_20; 713 stats->tx_bw[0]++; 714 break; 715 } 716 wcid->rate = rate; 717 718 return true; 719 } 720 EXPORT_SYMBOL_GPL(mt76_connac2_mac_fill_txs); 721 722 bool mt76_connac2_mac_add_txs_skb(struct mt76_dev *dev, struct mt76_wcid *wcid, 723 int pid, __le32 *txs_data) 724 { 725 struct sk_buff_head list; 726 struct sk_buff *skb; 727 728 mt76_tx_status_lock(dev, &list); 729 skb = mt76_tx_status_skb_get(dev, wcid, pid, &list); 730 if (skb) { 731 struct ieee80211_tx_info *info = IEEE80211_SKB_CB(skb); 732 bool noacked = !(info->flags & IEEE80211_TX_STAT_ACK); 733 734 if (!(le32_to_cpu(txs_data[0]) & MT_TXS0_ACK_ERROR_MASK)) 735 info->flags |= IEEE80211_TX_STAT_ACK; 736 737 info->status.ampdu_len = 1; 738 info->status.ampdu_ack_len = !noacked; 739 info->status.rates[0].idx = -1; 740 741 wcid->stats.tx_failed += noacked; 742 743 mt76_connac2_mac_fill_txs(dev, wcid, txs_data); 744 mt76_tx_status_skb_done(dev, skb, &list); 745 } 746 mt76_tx_status_unlock(dev, &list); 747 748 return !!skb; 749 } 750 EXPORT_SYMBOL_GPL(mt76_connac2_mac_add_txs_skb); 751 752 static void 753 mt76_connac2_mac_decode_he_radiotap_ru(struct mt76_rx_status *status, 754 struct ieee80211_radiotap_he *he, 755 __le32 *rxv) 756 { 757 u32 ru_h, ru_l; 758 u8 ru, offs = 0; 759 760 ru_l = le32_get_bits(rxv[0], MT_PRXV_HE_RU_ALLOC_L); 761 ru_h = le32_get_bits(rxv[1], MT_PRXV_HE_RU_ALLOC_H); 762 ru = (u8)(ru_l | ru_h << 4); 763 764 status->bw = RATE_INFO_BW_HE_RU; 765 766 switch (ru) { 767 case 0 ... 36: 768 status->he_ru = NL80211_RATE_INFO_HE_RU_ALLOC_26; 769 offs = ru; 770 break; 771 case 37 ... 52: 772 status->he_ru = NL80211_RATE_INFO_HE_RU_ALLOC_52; 773 offs = ru - 37; 774 break; 775 case 53 ... 60: 776 status->he_ru = NL80211_RATE_INFO_HE_RU_ALLOC_106; 777 offs = ru - 53; 778 break; 779 case 61 ... 64: 780 status->he_ru = NL80211_RATE_INFO_HE_RU_ALLOC_242; 781 offs = ru - 61; 782 break; 783 case 65 ... 66: 784 status->he_ru = NL80211_RATE_INFO_HE_RU_ALLOC_484; 785 offs = ru - 65; 786 break; 787 case 67: 788 status->he_ru = NL80211_RATE_INFO_HE_RU_ALLOC_996; 789 break; 790 case 68: 791 status->he_ru = NL80211_RATE_INFO_HE_RU_ALLOC_2x996; 792 break; 793 } 794 795 he->data1 |= HE_BITS(DATA1_BW_RU_ALLOC_KNOWN); 796 he->data2 |= HE_BITS(DATA2_RU_OFFSET_KNOWN) | 797 le16_encode_bits(offs, 798 IEEE80211_RADIOTAP_HE_DATA2_RU_OFFSET); 799 } 800 801 static void 802 mt76_connac2_mac_decode_he_mu_radiotap(struct mt76_dev *dev, struct sk_buff *skb, 803 __le32 *rxv) 804 { 805 struct mt76_rx_status *status = (struct mt76_rx_status *)skb->cb; 806 static struct ieee80211_radiotap_he_mu mu_known = { 807 .flags1 = HE_BITS(MU_FLAGS1_SIG_B_MCS_KNOWN) | 808 HE_BITS(MU_FLAGS1_SIG_B_DCM_KNOWN) | 809 HE_BITS(MU_FLAGS1_CH1_RU_KNOWN) | 810 HE_BITS(MU_FLAGS1_SIG_B_SYMS_USERS_KNOWN), 811 .flags2 = HE_BITS(MU_FLAGS2_BW_FROM_SIG_A_BW_KNOWN), 812 }; 813 struct ieee80211_radiotap_he_mu *he_mu; 814 815 if (is_mt7921(dev)) { 816 mu_known.flags1 |= HE_BITS(MU_FLAGS1_SIG_B_COMP_KNOWN); 817 mu_known.flags2 |= HE_BITS(MU_FLAGS2_PUNC_FROM_SIG_A_BW_KNOWN); 818 } 819 820 status->flag |= RX_FLAG_RADIOTAP_HE_MU; 821 822 he_mu = skb_push(skb, sizeof(mu_known)); 823 memcpy(he_mu, &mu_known, sizeof(mu_known)); 824 825 #define MU_PREP(f, v) le16_encode_bits(v, IEEE80211_RADIOTAP_HE_MU_##f) 826 827 he_mu->flags1 |= MU_PREP(FLAGS1_SIG_B_MCS, status->rate_idx); 828 if (status->he_dcm) 829 he_mu->flags1 |= MU_PREP(FLAGS1_SIG_B_DCM, status->he_dcm); 830 831 he_mu->flags2 |= MU_PREP(FLAGS2_BW_FROM_SIG_A_BW, status->bw) | 832 MU_PREP(FLAGS2_SIG_B_SYMS_USERS, 833 le32_get_bits(rxv[2], MT_CRXV_HE_NUM_USER)); 834 835 he_mu->ru_ch1[0] = le32_get_bits(rxv[3], MT_CRXV_HE_RU0); 836 837 if (status->bw >= RATE_INFO_BW_40) { 838 he_mu->flags1 |= HE_BITS(MU_FLAGS1_CH2_RU_KNOWN); 839 he_mu->ru_ch2[0] = 840 le32_get_bits(rxv[3], MT_CRXV_HE_RU1); 841 } 842 843 if (status->bw >= RATE_INFO_BW_80) { 844 he_mu->ru_ch1[1] = 845 le32_get_bits(rxv[3], MT_CRXV_HE_RU2); 846 he_mu->ru_ch2[1] = 847 le32_get_bits(rxv[3], MT_CRXV_HE_RU3); 848 } 849 } 850 851 void mt76_connac2_mac_decode_he_radiotap(struct mt76_dev *dev, 852 struct sk_buff *skb, 853 __le32 *rxv, u32 mode) 854 { 855 struct mt76_rx_status *status = (struct mt76_rx_status *)skb->cb; 856 static const struct ieee80211_radiotap_he known = { 857 .data1 = HE_BITS(DATA1_DATA_MCS_KNOWN) | 858 HE_BITS(DATA1_DATA_DCM_KNOWN) | 859 HE_BITS(DATA1_STBC_KNOWN) | 860 HE_BITS(DATA1_CODING_KNOWN) | 861 HE_BITS(DATA1_LDPC_XSYMSEG_KNOWN) | 862 HE_BITS(DATA1_DOPPLER_KNOWN) | 863 HE_BITS(DATA1_SPTL_REUSE_KNOWN) | 864 HE_BITS(DATA1_BSS_COLOR_KNOWN), 865 .data2 = HE_BITS(DATA2_GI_KNOWN) | 866 HE_BITS(DATA2_TXBF_KNOWN) | 867 HE_BITS(DATA2_PE_DISAMBIG_KNOWN) | 868 HE_BITS(DATA2_TXOP_KNOWN), 869 }; 870 u32 ltf_size = le32_get_bits(rxv[2], MT_CRXV_HE_LTF_SIZE) + 1; 871 struct ieee80211_radiotap_he *he; 872 873 status->flag |= RX_FLAG_RADIOTAP_HE; 874 875 he = skb_push(skb, sizeof(known)); 876 memcpy(he, &known, sizeof(known)); 877 878 he->data3 = HE_PREP(DATA3_BSS_COLOR, BSS_COLOR, rxv[14]) | 879 HE_PREP(DATA3_LDPC_XSYMSEG, LDPC_EXT_SYM, rxv[2]); 880 he->data4 = HE_PREP(DATA4_SU_MU_SPTL_REUSE, SR_MASK, rxv[11]); 881 he->data5 = HE_PREP(DATA5_PE_DISAMBIG, PE_DISAMBIG, rxv[2]) | 882 le16_encode_bits(ltf_size, 883 IEEE80211_RADIOTAP_HE_DATA5_LTF_SIZE); 884 if (le32_to_cpu(rxv[0]) & MT_PRXV_TXBF) 885 he->data5 |= HE_BITS(DATA5_TXBF); 886 he->data6 = HE_PREP(DATA6_TXOP, TXOP_DUR, rxv[14]) | 887 HE_PREP(DATA6_DOPPLER, DOPPLER, rxv[14]); 888 889 switch (mode) { 890 case MT_PHY_TYPE_HE_SU: 891 he->data1 |= HE_BITS(DATA1_FORMAT_SU) | 892 HE_BITS(DATA1_UL_DL_KNOWN) | 893 HE_BITS(DATA1_BEAM_CHANGE_KNOWN) | 894 HE_BITS(DATA1_BW_RU_ALLOC_KNOWN); 895 896 he->data3 |= HE_PREP(DATA3_BEAM_CHANGE, BEAM_CHNG, rxv[14]) | 897 HE_PREP(DATA3_UL_DL, UPLINK, rxv[2]); 898 break; 899 case MT_PHY_TYPE_HE_EXT_SU: 900 he->data1 |= HE_BITS(DATA1_FORMAT_EXT_SU) | 901 HE_BITS(DATA1_UL_DL_KNOWN) | 902 HE_BITS(DATA1_BW_RU_ALLOC_KNOWN); 903 904 he->data3 |= HE_PREP(DATA3_UL_DL, UPLINK, rxv[2]); 905 break; 906 case MT_PHY_TYPE_HE_MU: 907 he->data1 |= HE_BITS(DATA1_FORMAT_MU) | 908 HE_BITS(DATA1_UL_DL_KNOWN); 909 910 he->data3 |= HE_PREP(DATA3_UL_DL, UPLINK, rxv[2]); 911 he->data4 |= HE_PREP(DATA4_MU_STA_ID, MU_AID, rxv[7]); 912 913 mt76_connac2_mac_decode_he_radiotap_ru(status, he, rxv); 914 mt76_connac2_mac_decode_he_mu_radiotap(dev, skb, rxv); 915 break; 916 case MT_PHY_TYPE_HE_TB: 917 he->data1 |= HE_BITS(DATA1_FORMAT_TRIG) | 918 HE_BITS(DATA1_SPTL_REUSE2_KNOWN) | 919 HE_BITS(DATA1_SPTL_REUSE3_KNOWN) | 920 HE_BITS(DATA1_SPTL_REUSE4_KNOWN); 921 922 he->data4 |= HE_PREP(DATA4_TB_SPTL_REUSE1, SR_MASK, rxv[11]) | 923 HE_PREP(DATA4_TB_SPTL_REUSE2, SR1_MASK, rxv[11]) | 924 HE_PREP(DATA4_TB_SPTL_REUSE3, SR2_MASK, rxv[11]) | 925 HE_PREP(DATA4_TB_SPTL_REUSE4, SR3_MASK, rxv[11]); 926 927 mt76_connac2_mac_decode_he_radiotap_ru(status, he, rxv); 928 break; 929 default: 930 break; 931 } 932 } 933 EXPORT_SYMBOL_GPL(mt76_connac2_mac_decode_he_radiotap); 934 935 /* The HW does not translate the mac header to 802.3 for mesh point */ 936 int mt76_connac2_reverse_frag0_hdr_trans(struct ieee80211_vif *vif, 937 struct sk_buff *skb, u16 hdr_offset) 938 { 939 struct mt76_rx_status *status = (struct mt76_rx_status *)skb->cb; 940 struct ethhdr *eth_hdr = (struct ethhdr *)(skb->data + hdr_offset); 941 __le32 *rxd = (__le32 *)skb->data; 942 struct ieee80211_sta *sta; 943 struct ieee80211_hdr hdr; 944 u16 frame_control; 945 946 if (le32_get_bits(rxd[3], MT_RXD3_NORMAL_ADDR_TYPE) != 947 MT_RXD3_NORMAL_U2M) 948 return -EINVAL; 949 950 if (!(le32_to_cpu(rxd[1]) & MT_RXD1_NORMAL_GROUP_4)) 951 return -EINVAL; 952 953 sta = container_of((void *)status->wcid, struct ieee80211_sta, drv_priv); 954 955 /* store the info from RXD and ethhdr to avoid being overridden */ 956 frame_control = le32_get_bits(rxd[6], MT_RXD6_FRAME_CONTROL); 957 hdr.frame_control = cpu_to_le16(frame_control); 958 hdr.seq_ctrl = cpu_to_le16(le32_get_bits(rxd[8], MT_RXD8_SEQ_CTRL)); 959 hdr.duration_id = 0; 960 961 ether_addr_copy(hdr.addr1, vif->addr); 962 ether_addr_copy(hdr.addr2, sta->addr); 963 switch (frame_control & (IEEE80211_FCTL_TODS | 964 IEEE80211_FCTL_FROMDS)) { 965 case 0: 966 ether_addr_copy(hdr.addr3, vif->bss_conf.bssid); 967 break; 968 case IEEE80211_FCTL_FROMDS: 969 ether_addr_copy(hdr.addr3, eth_hdr->h_source); 970 break; 971 case IEEE80211_FCTL_TODS: 972 ether_addr_copy(hdr.addr3, eth_hdr->h_dest); 973 break; 974 case IEEE80211_FCTL_TODS | IEEE80211_FCTL_FROMDS: 975 ether_addr_copy(hdr.addr3, eth_hdr->h_dest); 976 ether_addr_copy(hdr.addr4, eth_hdr->h_source); 977 break; 978 default: 979 return -EINVAL; 980 } 981 982 skb_pull(skb, hdr_offset + sizeof(struct ethhdr) - 2); 983 if (eth_hdr->h_proto == cpu_to_be16(ETH_P_AARP) || 984 eth_hdr->h_proto == cpu_to_be16(ETH_P_IPX)) 985 ether_addr_copy(skb_push(skb, ETH_ALEN), bridge_tunnel_header); 986 else if (be16_to_cpu(eth_hdr->h_proto) >= ETH_P_802_3_MIN) 987 ether_addr_copy(skb_push(skb, ETH_ALEN), rfc1042_header); 988 else 989 skb_pull(skb, 2); 990 991 if (ieee80211_has_order(hdr.frame_control)) 992 memcpy(skb_push(skb, IEEE80211_HT_CTL_LEN), &rxd[9], 993 IEEE80211_HT_CTL_LEN); 994 if (ieee80211_is_data_qos(hdr.frame_control)) { 995 __le16 qos_ctrl; 996 997 qos_ctrl = cpu_to_le16(le32_get_bits(rxd[8], MT_RXD8_QOS_CTL)); 998 memcpy(skb_push(skb, IEEE80211_QOS_CTL_LEN), &qos_ctrl, 999 IEEE80211_QOS_CTL_LEN); 1000 } 1001 1002 if (ieee80211_has_a4(hdr.frame_control)) 1003 memcpy(skb_push(skb, sizeof(hdr)), &hdr, sizeof(hdr)); 1004 else 1005 memcpy(skb_push(skb, sizeof(hdr) - 6), &hdr, sizeof(hdr) - 6); 1006 1007 return 0; 1008 } 1009 EXPORT_SYMBOL_GPL(mt76_connac2_reverse_frag0_hdr_trans); 1010 1011 int mt76_connac2_mac_fill_rx_rate(struct mt76_dev *dev, 1012 struct mt76_rx_status *status, 1013 struct ieee80211_supported_band *sband, 1014 __le32 *rxv, u8 *mode) 1015 { 1016 u32 v0, v2; 1017 u8 stbc, gi, bw, dcm, nss; 1018 int i, idx; 1019 bool cck = false; 1020 1021 v0 = le32_to_cpu(rxv[0]); 1022 v2 = le32_to_cpu(rxv[2]); 1023 1024 idx = i = FIELD_GET(MT_PRXV_TX_RATE, v0); 1025 nss = FIELD_GET(MT_PRXV_NSTS, v0) + 1; 1026 1027 if (!is_mt7915(dev)) { 1028 stbc = FIELD_GET(MT_PRXV_HT_STBC, v0); 1029 gi = FIELD_GET(MT_PRXV_HT_SGI, v0); 1030 *mode = FIELD_GET(MT_PRXV_TX_MODE, v0); 1031 if (is_mt7921(dev)) 1032 dcm = !!(idx & MT_PRXV_TX_DCM); 1033 else 1034 dcm = FIELD_GET(MT_PRXV_DCM, v0); 1035 bw = FIELD_GET(MT_PRXV_FRAME_MODE, v0); 1036 } else { 1037 stbc = FIELD_GET(MT_CRXV_HT_STBC, v2); 1038 gi = FIELD_GET(MT_CRXV_HT_SHORT_GI, v2); 1039 *mode = FIELD_GET(MT_CRXV_TX_MODE, v2); 1040 dcm = !!(idx & GENMASK(3, 0) & MT_PRXV_TX_DCM); 1041 bw = FIELD_GET(MT_CRXV_FRAME_MODE, v2); 1042 } 1043 1044 switch (*mode) { 1045 case MT_PHY_TYPE_CCK: 1046 cck = true; 1047 fallthrough; 1048 case MT_PHY_TYPE_OFDM: 1049 i = mt76_get_rate(dev, sband, i, cck); 1050 break; 1051 case MT_PHY_TYPE_HT_GF: 1052 case MT_PHY_TYPE_HT: 1053 status->encoding = RX_ENC_HT; 1054 if (gi) 1055 status->enc_flags |= RX_ENC_FLAG_SHORT_GI; 1056 if (i > 31) 1057 return -EINVAL; 1058 break; 1059 case MT_PHY_TYPE_VHT: 1060 status->nss = nss; 1061 status->encoding = RX_ENC_VHT; 1062 if (gi) 1063 status->enc_flags |= RX_ENC_FLAG_SHORT_GI; 1064 if (i > 11) 1065 return -EINVAL; 1066 break; 1067 case MT_PHY_TYPE_HE_MU: 1068 case MT_PHY_TYPE_HE_SU: 1069 case MT_PHY_TYPE_HE_EXT_SU: 1070 case MT_PHY_TYPE_HE_TB: 1071 status->nss = nss; 1072 status->encoding = RX_ENC_HE; 1073 i &= GENMASK(3, 0); 1074 1075 if (gi <= NL80211_RATE_INFO_HE_GI_3_2) 1076 status->he_gi = gi; 1077 1078 status->he_dcm = dcm; 1079 break; 1080 default: 1081 return -EINVAL; 1082 } 1083 status->rate_idx = i; 1084 1085 switch (bw) { 1086 case IEEE80211_STA_RX_BW_20: 1087 break; 1088 case IEEE80211_STA_RX_BW_40: 1089 if (*mode & MT_PHY_TYPE_HE_EXT_SU && 1090 (idx & MT_PRXV_TX_ER_SU_106T)) { 1091 status->bw = RATE_INFO_BW_HE_RU; 1092 status->he_ru = 1093 NL80211_RATE_INFO_HE_RU_ALLOC_106; 1094 } else { 1095 status->bw = RATE_INFO_BW_40; 1096 } 1097 break; 1098 case IEEE80211_STA_RX_BW_80: 1099 status->bw = RATE_INFO_BW_80; 1100 break; 1101 case IEEE80211_STA_RX_BW_160: 1102 status->bw = RATE_INFO_BW_160; 1103 break; 1104 default: 1105 return -EINVAL; 1106 } 1107 1108 status->enc_flags |= RX_ENC_FLAG_STBC_MASK * stbc; 1109 if (*mode < MT_PHY_TYPE_HE_SU && gi) 1110 status->enc_flags |= RX_ENC_FLAG_SHORT_GI; 1111 1112 return 0; 1113 } 1114 EXPORT_SYMBOL_GPL(mt76_connac2_mac_fill_rx_rate); 1115