1 // SPDX-License-Identifier: ISC 2 /* 3 * Copyright (C) 2016 Felix Fietkau <nbd@nbd.name> 4 */ 5 #include <linux/of.h> 6 #include "mt76.h" 7 8 #define CHAN2G(_idx, _freq) { \ 9 .band = NL80211_BAND_2GHZ, \ 10 .center_freq = (_freq), \ 11 .hw_value = (_idx), \ 12 .max_power = 30, \ 13 } 14 15 #define CHAN5G(_idx, _freq) { \ 16 .band = NL80211_BAND_5GHZ, \ 17 .center_freq = (_freq), \ 18 .hw_value = (_idx), \ 19 .max_power = 30, \ 20 } 21 22 static const struct ieee80211_channel mt76_channels_2ghz[] = { 23 CHAN2G(1, 2412), 24 CHAN2G(2, 2417), 25 CHAN2G(3, 2422), 26 CHAN2G(4, 2427), 27 CHAN2G(5, 2432), 28 CHAN2G(6, 2437), 29 CHAN2G(7, 2442), 30 CHAN2G(8, 2447), 31 CHAN2G(9, 2452), 32 CHAN2G(10, 2457), 33 CHAN2G(11, 2462), 34 CHAN2G(12, 2467), 35 CHAN2G(13, 2472), 36 CHAN2G(14, 2484), 37 }; 38 39 static const struct ieee80211_channel mt76_channels_5ghz[] = { 40 CHAN5G(36, 5180), 41 CHAN5G(40, 5200), 42 CHAN5G(44, 5220), 43 CHAN5G(48, 5240), 44 45 CHAN5G(52, 5260), 46 CHAN5G(56, 5280), 47 CHAN5G(60, 5300), 48 CHAN5G(64, 5320), 49 50 CHAN5G(100, 5500), 51 CHAN5G(104, 5520), 52 CHAN5G(108, 5540), 53 CHAN5G(112, 5560), 54 CHAN5G(116, 5580), 55 CHAN5G(120, 5600), 56 CHAN5G(124, 5620), 57 CHAN5G(128, 5640), 58 CHAN5G(132, 5660), 59 CHAN5G(136, 5680), 60 CHAN5G(140, 5700), 61 CHAN5G(144, 5720), 62 63 CHAN5G(149, 5745), 64 CHAN5G(153, 5765), 65 CHAN5G(157, 5785), 66 CHAN5G(161, 5805), 67 CHAN5G(165, 5825), 68 CHAN5G(169, 5845), 69 CHAN5G(173, 5865), 70 }; 71 72 static const struct ieee80211_tpt_blink mt76_tpt_blink[] = { 73 { .throughput = 0 * 1024, .blink_time = 334 }, 74 { .throughput = 1 * 1024, .blink_time = 260 }, 75 { .throughput = 5 * 1024, .blink_time = 220 }, 76 { .throughput = 10 * 1024, .blink_time = 190 }, 77 { .throughput = 20 * 1024, .blink_time = 170 }, 78 { .throughput = 50 * 1024, .blink_time = 150 }, 79 { .throughput = 70 * 1024, .blink_time = 130 }, 80 { .throughput = 100 * 1024, .blink_time = 110 }, 81 { .throughput = 200 * 1024, .blink_time = 80 }, 82 { .throughput = 300 * 1024, .blink_time = 50 }, 83 }; 84 85 static int mt76_led_init(struct mt76_dev *dev) 86 { 87 struct device_node *np = dev->dev->of_node; 88 struct ieee80211_hw *hw = dev->hw; 89 int led_pin; 90 91 if (!dev->led_cdev.brightness_set && !dev->led_cdev.blink_set) 92 return 0; 93 94 snprintf(dev->led_name, sizeof(dev->led_name), 95 "mt76-%s", wiphy_name(hw->wiphy)); 96 97 dev->led_cdev.name = dev->led_name; 98 dev->led_cdev.default_trigger = 99 ieee80211_create_tpt_led_trigger(hw, 100 IEEE80211_TPT_LEDTRIG_FL_RADIO, 101 mt76_tpt_blink, 102 ARRAY_SIZE(mt76_tpt_blink)); 103 104 np = of_get_child_by_name(np, "led"); 105 if (np) { 106 if (!of_property_read_u32(np, "led-sources", &led_pin)) 107 dev->led_pin = led_pin; 108 dev->led_al = of_property_read_bool(np, "led-active-low"); 109 } 110 111 return led_classdev_register(dev->dev, &dev->led_cdev); 112 } 113 114 static void mt76_led_cleanup(struct mt76_dev *dev) 115 { 116 if (!dev->led_cdev.brightness_set && !dev->led_cdev.blink_set) 117 return; 118 119 led_classdev_unregister(&dev->led_cdev); 120 } 121 122 static void mt76_init_stream_cap(struct mt76_phy *phy, 123 struct ieee80211_supported_band *sband, 124 bool vht) 125 { 126 struct ieee80211_sta_ht_cap *ht_cap = &sband->ht_cap; 127 int i, nstream = hweight8(phy->antenna_mask); 128 struct ieee80211_sta_vht_cap *vht_cap; 129 u16 mcs_map = 0; 130 131 if (nstream > 1) 132 ht_cap->cap |= IEEE80211_HT_CAP_TX_STBC; 133 else 134 ht_cap->cap &= ~IEEE80211_HT_CAP_TX_STBC; 135 136 for (i = 0; i < IEEE80211_HT_MCS_MASK_LEN; i++) 137 ht_cap->mcs.rx_mask[i] = i < nstream ? 0xff : 0; 138 139 if (!vht) 140 return; 141 142 vht_cap = &sband->vht_cap; 143 if (nstream > 1) 144 vht_cap->cap |= IEEE80211_VHT_CAP_TXSTBC; 145 else 146 vht_cap->cap &= ~IEEE80211_VHT_CAP_TXSTBC; 147 148 for (i = 0; i < 8; i++) { 149 if (i < nstream) 150 mcs_map |= (IEEE80211_VHT_MCS_SUPPORT_0_9 << (i * 2)); 151 else 152 mcs_map |= 153 (IEEE80211_VHT_MCS_NOT_SUPPORTED << (i * 2)); 154 } 155 vht_cap->vht_mcs.rx_mcs_map = cpu_to_le16(mcs_map); 156 vht_cap->vht_mcs.tx_mcs_map = cpu_to_le16(mcs_map); 157 } 158 159 void mt76_set_stream_caps(struct mt76_phy *phy, bool vht) 160 { 161 if (phy->dev->cap.has_2ghz) 162 mt76_init_stream_cap(phy, &phy->sband_2g.sband, false); 163 if (phy->dev->cap.has_5ghz) 164 mt76_init_stream_cap(phy, &phy->sband_5g.sband, vht); 165 } 166 EXPORT_SYMBOL_GPL(mt76_set_stream_caps); 167 168 static int 169 mt76_init_sband(struct mt76_dev *dev, struct mt76_sband *msband, 170 const struct ieee80211_channel *chan, int n_chan, 171 struct ieee80211_rate *rates, int n_rates, bool vht) 172 { 173 struct ieee80211_supported_band *sband = &msband->sband; 174 struct ieee80211_sta_ht_cap *ht_cap; 175 struct ieee80211_sta_vht_cap *vht_cap; 176 void *chanlist; 177 int size; 178 179 size = n_chan * sizeof(*chan); 180 chanlist = devm_kmemdup(dev->dev, chan, size, GFP_KERNEL); 181 if (!chanlist) 182 return -ENOMEM; 183 184 msband->chan = devm_kcalloc(dev->dev, n_chan, sizeof(*msband->chan), 185 GFP_KERNEL); 186 if (!msband->chan) 187 return -ENOMEM; 188 189 sband->channels = chanlist; 190 sband->n_channels = n_chan; 191 sband->bitrates = rates; 192 sband->n_bitrates = n_rates; 193 194 ht_cap = &sband->ht_cap; 195 ht_cap->ht_supported = true; 196 ht_cap->cap |= IEEE80211_HT_CAP_SUP_WIDTH_20_40 | 197 IEEE80211_HT_CAP_GRN_FLD | 198 IEEE80211_HT_CAP_SGI_20 | 199 IEEE80211_HT_CAP_SGI_40 | 200 (1 << IEEE80211_HT_CAP_RX_STBC_SHIFT); 201 202 ht_cap->mcs.tx_params = IEEE80211_HT_MCS_TX_DEFINED; 203 ht_cap->ampdu_factor = IEEE80211_HT_MAX_AMPDU_64K; 204 205 mt76_init_stream_cap(&dev->phy, sband, vht); 206 207 if (!vht) 208 return 0; 209 210 vht_cap = &sband->vht_cap; 211 vht_cap->vht_supported = true; 212 vht_cap->cap |= IEEE80211_VHT_CAP_RXLDPC | 213 IEEE80211_VHT_CAP_RXSTBC_1 | 214 IEEE80211_VHT_CAP_SHORT_GI_80 | 215 IEEE80211_VHT_CAP_RX_ANTENNA_PATTERN | 216 IEEE80211_VHT_CAP_TX_ANTENNA_PATTERN | 217 (3 << IEEE80211_VHT_CAP_MAX_A_MPDU_LENGTH_EXPONENT_SHIFT); 218 219 return 0; 220 } 221 222 static int 223 mt76_init_sband_2g(struct mt76_dev *dev, struct ieee80211_rate *rates, 224 int n_rates) 225 { 226 dev->hw->wiphy->bands[NL80211_BAND_2GHZ] = &dev->phy.sband_2g.sband; 227 228 return mt76_init_sband(dev, &dev->phy.sband_2g, 229 mt76_channels_2ghz, 230 ARRAY_SIZE(mt76_channels_2ghz), 231 rates, n_rates, false); 232 } 233 234 static int 235 mt76_init_sband_5g(struct mt76_dev *dev, struct ieee80211_rate *rates, 236 int n_rates, bool vht) 237 { 238 dev->hw->wiphy->bands[NL80211_BAND_5GHZ] = &dev->phy.sband_5g.sband; 239 240 return mt76_init_sband(dev, &dev->phy.sband_5g, 241 mt76_channels_5ghz, 242 ARRAY_SIZE(mt76_channels_5ghz), 243 rates, n_rates, vht); 244 } 245 246 static void 247 mt76_check_sband(struct mt76_phy *phy, struct mt76_sband *msband, 248 enum nl80211_band band) 249 { 250 struct ieee80211_supported_band *sband = &msband->sband; 251 bool found = false; 252 int i; 253 254 if (!sband) 255 return; 256 257 for (i = 0; i < sband->n_channels; i++) { 258 if (sband->channels[i].flags & IEEE80211_CHAN_DISABLED) 259 continue; 260 261 found = true; 262 break; 263 } 264 265 if (found) { 266 phy->chandef.chan = &sband->channels[0]; 267 phy->chan_state = &msband->chan[0]; 268 return; 269 } 270 271 sband->n_channels = 0; 272 phy->hw->wiphy->bands[band] = NULL; 273 } 274 275 static void 276 mt76_phy_init(struct mt76_dev *dev, struct ieee80211_hw *hw) 277 { 278 struct wiphy *wiphy = hw->wiphy; 279 280 SET_IEEE80211_DEV(hw, dev->dev); 281 SET_IEEE80211_PERM_ADDR(hw, dev->macaddr); 282 283 wiphy->features |= NL80211_FEATURE_ACTIVE_MONITOR; 284 wiphy->flags |= WIPHY_FLAG_HAS_CHANNEL_SWITCH | 285 WIPHY_FLAG_SUPPORTS_TDLS | 286 WIPHY_FLAG_AP_UAPSD; 287 288 wiphy_ext_feature_set(wiphy, NL80211_EXT_FEATURE_CQM_RSSI_LIST); 289 wiphy_ext_feature_set(wiphy, NL80211_EXT_FEATURE_AIRTIME_FAIRNESS); 290 wiphy_ext_feature_set(wiphy, NL80211_EXT_FEATURE_AQL); 291 292 wiphy->available_antennas_tx = dev->phy.antenna_mask; 293 wiphy->available_antennas_rx = dev->phy.antenna_mask; 294 295 hw->txq_data_size = sizeof(struct mt76_txq); 296 hw->uapsd_max_sp_len = IEEE80211_WMM_IE_STA_QOSINFO_SP_ALL; 297 298 if (!hw->max_tx_fragments) 299 hw->max_tx_fragments = 16; 300 301 ieee80211_hw_set(hw, SIGNAL_DBM); 302 ieee80211_hw_set(hw, AMPDU_AGGREGATION); 303 ieee80211_hw_set(hw, SUPPORTS_RC_TABLE); 304 ieee80211_hw_set(hw, SUPPORT_FAST_XMIT); 305 ieee80211_hw_set(hw, SUPPORTS_CLONED_SKBS); 306 ieee80211_hw_set(hw, SUPPORTS_AMSDU_IN_AMPDU); 307 ieee80211_hw_set(hw, TX_AMSDU); 308 309 /* TODO: avoid linearization for SDIO */ 310 if (!mt76_is_sdio(dev)) 311 ieee80211_hw_set(hw, TX_FRAG_LIST); 312 313 ieee80211_hw_set(hw, MFP_CAPABLE); 314 ieee80211_hw_set(hw, AP_LINK_PS); 315 ieee80211_hw_set(hw, REPORTS_TX_ACK_STATUS); 316 ieee80211_hw_set(hw, NEEDS_UNIQUE_STA_ADDR); 317 318 wiphy->flags |= WIPHY_FLAG_IBSS_RSN; 319 wiphy->interface_modes = 320 BIT(NL80211_IFTYPE_STATION) | 321 BIT(NL80211_IFTYPE_AP) | 322 #ifdef CONFIG_MAC80211_MESH 323 BIT(NL80211_IFTYPE_MESH_POINT) | 324 #endif 325 BIT(NL80211_IFTYPE_P2P_CLIENT) | 326 BIT(NL80211_IFTYPE_P2P_GO) | 327 BIT(NL80211_IFTYPE_ADHOC); 328 } 329 330 struct mt76_phy * 331 mt76_alloc_phy(struct mt76_dev *dev, unsigned int size, 332 const struct ieee80211_ops *ops) 333 { 334 struct ieee80211_hw *hw; 335 struct mt76_phy *phy; 336 unsigned int phy_size, chan_size; 337 unsigned int size_2g, size_5g; 338 void *priv; 339 340 phy_size = ALIGN(sizeof(*phy), 8); 341 chan_size = sizeof(dev->phy.sband_2g.chan[0]); 342 size_2g = ALIGN(ARRAY_SIZE(mt76_channels_2ghz) * chan_size, 8); 343 size_5g = ALIGN(ARRAY_SIZE(mt76_channels_5ghz) * chan_size, 8); 344 345 size += phy_size + size_2g + size_5g; 346 hw = ieee80211_alloc_hw(size, ops); 347 if (!hw) 348 return NULL; 349 350 phy = hw->priv; 351 phy->dev = dev; 352 phy->hw = hw; 353 354 mt76_phy_init(dev, hw); 355 356 priv = hw->priv + phy_size; 357 358 phy->sband_2g = dev->phy.sband_2g; 359 phy->sband_2g.chan = priv; 360 priv += size_2g; 361 362 phy->sband_5g = dev->phy.sband_5g; 363 phy->sband_5g.chan = priv; 364 priv += size_5g; 365 366 phy->priv = priv; 367 368 hw->wiphy->bands[NL80211_BAND_2GHZ] = &phy->sband_2g.sband; 369 hw->wiphy->bands[NL80211_BAND_5GHZ] = &phy->sband_5g.sband; 370 371 mt76_check_sband(phy, &phy->sband_2g, NL80211_BAND_2GHZ); 372 mt76_check_sband(phy, &phy->sband_5g, NL80211_BAND_5GHZ); 373 374 return phy; 375 } 376 EXPORT_SYMBOL_GPL(mt76_alloc_phy); 377 378 int 379 mt76_register_phy(struct mt76_phy *phy) 380 { 381 int ret; 382 383 ret = ieee80211_register_hw(phy->hw); 384 if (ret) 385 return ret; 386 387 phy->dev->phy2 = phy; 388 return 0; 389 } 390 EXPORT_SYMBOL_GPL(mt76_register_phy); 391 392 void 393 mt76_unregister_phy(struct mt76_phy *phy) 394 { 395 struct mt76_dev *dev = phy->dev; 396 397 dev->phy2 = NULL; 398 mt76_tx_status_check(dev, NULL, true); 399 ieee80211_unregister_hw(phy->hw); 400 } 401 EXPORT_SYMBOL_GPL(mt76_unregister_phy); 402 403 struct mt76_dev * 404 mt76_alloc_device(struct device *pdev, unsigned int size, 405 const struct ieee80211_ops *ops, 406 const struct mt76_driver_ops *drv_ops) 407 { 408 struct ieee80211_hw *hw; 409 struct mt76_phy *phy; 410 struct mt76_dev *dev; 411 int i; 412 413 hw = ieee80211_alloc_hw(size, ops); 414 if (!hw) 415 return NULL; 416 417 dev = hw->priv; 418 dev->hw = hw; 419 dev->dev = pdev; 420 dev->drv = drv_ops; 421 422 phy = &dev->phy; 423 phy->dev = dev; 424 phy->hw = hw; 425 426 spin_lock_init(&dev->rx_lock); 427 spin_lock_init(&dev->lock); 428 spin_lock_init(&dev->cc_lock); 429 mutex_init(&dev->mutex); 430 init_waitqueue_head(&dev->tx_wait); 431 skb_queue_head_init(&dev->status_list); 432 433 skb_queue_head_init(&dev->mcu.res_q); 434 init_waitqueue_head(&dev->mcu.wait); 435 mutex_init(&dev->mcu.mutex); 436 437 INIT_LIST_HEAD(&dev->txwi_cache); 438 439 for (i = 0; i < ARRAY_SIZE(dev->q_rx); i++) 440 skb_queue_head_init(&dev->rx_skb[i]); 441 442 tasklet_init(&dev->tx_tasklet, mt76_tx_tasklet, (unsigned long)dev); 443 444 dev->wq = alloc_ordered_workqueue("mt76", 0); 445 if (!dev->wq) { 446 ieee80211_free_hw(hw); 447 return NULL; 448 } 449 450 return dev; 451 } 452 EXPORT_SYMBOL_GPL(mt76_alloc_device); 453 454 int mt76_register_device(struct mt76_dev *dev, bool vht, 455 struct ieee80211_rate *rates, int n_rates) 456 { 457 struct ieee80211_hw *hw = dev->hw; 458 struct mt76_phy *phy = &dev->phy; 459 int ret; 460 461 dev_set_drvdata(dev->dev, dev); 462 mt76_phy_init(dev, hw); 463 464 if (dev->cap.has_2ghz) { 465 ret = mt76_init_sband_2g(dev, rates, n_rates); 466 if (ret) 467 return ret; 468 } 469 470 if (dev->cap.has_5ghz) { 471 ret = mt76_init_sband_5g(dev, rates + 4, n_rates - 4, vht); 472 if (ret) 473 return ret; 474 } 475 476 wiphy_read_of_freq_limits(hw->wiphy); 477 mt76_check_sband(&dev->phy, &phy->sband_2g, NL80211_BAND_2GHZ); 478 mt76_check_sband(&dev->phy, &phy->sband_5g, NL80211_BAND_5GHZ); 479 480 if (IS_ENABLED(CONFIG_MT76_LEDS)) { 481 ret = mt76_led_init(dev); 482 if (ret) 483 return ret; 484 } 485 486 return ieee80211_register_hw(hw); 487 } 488 EXPORT_SYMBOL_GPL(mt76_register_device); 489 490 void mt76_unregister_device(struct mt76_dev *dev) 491 { 492 struct ieee80211_hw *hw = dev->hw; 493 494 if (IS_ENABLED(CONFIG_MT76_LEDS)) 495 mt76_led_cleanup(dev); 496 mt76_tx_status_check(dev, NULL, true); 497 ieee80211_unregister_hw(hw); 498 } 499 EXPORT_SYMBOL_GPL(mt76_unregister_device); 500 501 void mt76_free_device(struct mt76_dev *dev) 502 { 503 if (dev->wq) { 504 destroy_workqueue(dev->wq); 505 dev->wq = NULL; 506 } 507 if (mt76_is_mmio(dev)) 508 mt76_tx_free(dev); 509 ieee80211_free_hw(dev->hw); 510 } 511 EXPORT_SYMBOL_GPL(mt76_free_device); 512 513 void mt76_rx(struct mt76_dev *dev, enum mt76_rxq_id q, struct sk_buff *skb) 514 { 515 struct mt76_rx_status *status = (struct mt76_rx_status *)skb->cb; 516 struct mt76_phy *phy = mt76_dev_phy(dev, status->ext_phy); 517 518 if (!test_bit(MT76_STATE_RUNNING, &phy->state)) { 519 dev_kfree_skb(skb); 520 return; 521 } 522 523 #ifdef CONFIG_NL80211_TESTMODE 524 if (dev->test.state == MT76_TM_STATE_RX_FRAMES) { 525 dev->test.rx_stats.packets[q]++; 526 if (status->flag & RX_FLAG_FAILED_FCS_CRC) 527 dev->test.rx_stats.fcs_error[q]++; 528 } 529 #endif 530 __skb_queue_tail(&dev->rx_skb[q], skb); 531 } 532 EXPORT_SYMBOL_GPL(mt76_rx); 533 534 bool mt76_has_tx_pending(struct mt76_phy *phy) 535 { 536 struct mt76_dev *dev = phy->dev; 537 struct mt76_queue *q; 538 int i, offset; 539 540 offset = __MT_TXQ_MAX * (phy != &dev->phy); 541 542 for (i = 0; i < __MT_TXQ_MAX; i++) { 543 q = dev->q_tx[offset + i].q; 544 if (q && q->queued) 545 return true; 546 } 547 548 return false; 549 } 550 EXPORT_SYMBOL_GPL(mt76_has_tx_pending); 551 552 static struct mt76_channel_state * 553 mt76_channel_state(struct mt76_phy *phy, struct ieee80211_channel *c) 554 { 555 struct mt76_sband *msband; 556 int idx; 557 558 if (c->band == NL80211_BAND_2GHZ) 559 msband = &phy->sband_2g; 560 else 561 msband = &phy->sband_5g; 562 563 idx = c - &msband->sband.channels[0]; 564 return &msband->chan[idx]; 565 } 566 567 void mt76_update_survey_active_time(struct mt76_phy *phy, ktime_t time) 568 { 569 struct mt76_channel_state *state = phy->chan_state; 570 571 state->cc_active += ktime_to_us(ktime_sub(time, 572 phy->survey_time)); 573 phy->survey_time = time; 574 } 575 EXPORT_SYMBOL_GPL(mt76_update_survey_active_time); 576 577 void mt76_update_survey(struct mt76_dev *dev) 578 { 579 ktime_t cur_time; 580 581 if (dev->drv->update_survey) 582 dev->drv->update_survey(dev); 583 584 cur_time = ktime_get_boottime(); 585 mt76_update_survey_active_time(&dev->phy, cur_time); 586 if (dev->phy2) 587 mt76_update_survey_active_time(dev->phy2, cur_time); 588 589 if (dev->drv->drv_flags & MT_DRV_SW_RX_AIRTIME) { 590 struct mt76_channel_state *state = dev->phy.chan_state; 591 592 spin_lock_bh(&dev->cc_lock); 593 state->cc_bss_rx += dev->cur_cc_bss_rx; 594 dev->cur_cc_bss_rx = 0; 595 spin_unlock_bh(&dev->cc_lock); 596 } 597 } 598 EXPORT_SYMBOL_GPL(mt76_update_survey); 599 600 void mt76_set_channel(struct mt76_phy *phy) 601 { 602 struct mt76_dev *dev = phy->dev; 603 struct ieee80211_hw *hw = phy->hw; 604 struct cfg80211_chan_def *chandef = &hw->conf.chandef; 605 bool offchannel = hw->conf.flags & IEEE80211_CONF_OFFCHANNEL; 606 int timeout = HZ / 5; 607 608 wait_event_timeout(dev->tx_wait, !mt76_has_tx_pending(phy), timeout); 609 mt76_update_survey(dev); 610 611 phy->chandef = *chandef; 612 phy->chan_state = mt76_channel_state(phy, chandef->chan); 613 614 if (!offchannel) 615 phy->main_chan = chandef->chan; 616 617 if (chandef->chan != phy->main_chan) 618 memset(phy->chan_state, 0, sizeof(*phy->chan_state)); 619 } 620 EXPORT_SYMBOL_GPL(mt76_set_channel); 621 622 int mt76_get_survey(struct ieee80211_hw *hw, int idx, 623 struct survey_info *survey) 624 { 625 struct mt76_phy *phy = hw->priv; 626 struct mt76_dev *dev = phy->dev; 627 struct mt76_sband *sband; 628 struct ieee80211_channel *chan; 629 struct mt76_channel_state *state; 630 int ret = 0; 631 632 mutex_lock(&dev->mutex); 633 if (idx == 0 && dev->drv->update_survey) 634 mt76_update_survey(dev); 635 636 sband = &phy->sband_2g; 637 if (idx >= sband->sband.n_channels) { 638 idx -= sband->sband.n_channels; 639 sband = &phy->sband_5g; 640 } 641 642 if (idx >= sband->sband.n_channels) { 643 ret = -ENOENT; 644 goto out; 645 } 646 647 chan = &sband->sband.channels[idx]; 648 state = mt76_channel_state(phy, chan); 649 650 memset(survey, 0, sizeof(*survey)); 651 survey->channel = chan; 652 survey->filled = SURVEY_INFO_TIME | SURVEY_INFO_TIME_BUSY; 653 survey->filled |= dev->drv->survey_flags; 654 if (state->noise) 655 survey->filled |= SURVEY_INFO_NOISE_DBM; 656 657 if (chan == phy->main_chan) { 658 survey->filled |= SURVEY_INFO_IN_USE; 659 660 if (dev->drv->drv_flags & MT_DRV_SW_RX_AIRTIME) 661 survey->filled |= SURVEY_INFO_TIME_BSS_RX; 662 } 663 664 survey->time_busy = div_u64(state->cc_busy, 1000); 665 survey->time_rx = div_u64(state->cc_rx, 1000); 666 survey->time = div_u64(state->cc_active, 1000); 667 survey->noise = state->noise; 668 669 spin_lock_bh(&dev->cc_lock); 670 survey->time_bss_rx = div_u64(state->cc_bss_rx, 1000); 671 survey->time_tx = div_u64(state->cc_tx, 1000); 672 spin_unlock_bh(&dev->cc_lock); 673 674 out: 675 mutex_unlock(&dev->mutex); 676 677 return ret; 678 } 679 EXPORT_SYMBOL_GPL(mt76_get_survey); 680 681 void mt76_wcid_key_setup(struct mt76_dev *dev, struct mt76_wcid *wcid, 682 struct ieee80211_key_conf *key) 683 { 684 struct ieee80211_key_seq seq; 685 int i; 686 687 wcid->rx_check_pn = false; 688 689 if (!key) 690 return; 691 692 if (key->cipher != WLAN_CIPHER_SUITE_CCMP) 693 return; 694 695 wcid->rx_check_pn = true; 696 for (i = 0; i < IEEE80211_NUM_TIDS; i++) { 697 ieee80211_get_key_rx_seq(key, i, &seq); 698 memcpy(wcid->rx_key_pn[i], seq.ccmp.pn, sizeof(seq.ccmp.pn)); 699 } 700 } 701 EXPORT_SYMBOL(mt76_wcid_key_setup); 702 703 static void 704 mt76_rx_convert(struct mt76_dev *dev, struct sk_buff *skb, 705 struct ieee80211_hw **hw, 706 struct ieee80211_sta **sta) 707 { 708 struct ieee80211_rx_status *status = IEEE80211_SKB_RXCB(skb); 709 struct mt76_rx_status mstat; 710 711 mstat = *((struct mt76_rx_status *)skb->cb); 712 memset(status, 0, sizeof(*status)); 713 714 status->flag = mstat.flag; 715 status->freq = mstat.freq; 716 status->enc_flags = mstat.enc_flags; 717 status->encoding = mstat.encoding; 718 status->bw = mstat.bw; 719 status->he_ru = mstat.he_ru; 720 status->he_gi = mstat.he_gi; 721 status->he_dcm = mstat.he_dcm; 722 status->rate_idx = mstat.rate_idx; 723 status->nss = mstat.nss; 724 status->band = mstat.band; 725 status->signal = mstat.signal; 726 status->chains = mstat.chains; 727 status->ampdu_reference = mstat.ampdu_ref; 728 729 BUILD_BUG_ON(sizeof(mstat) > sizeof(skb->cb)); 730 BUILD_BUG_ON(sizeof(status->chain_signal) != 731 sizeof(mstat.chain_signal)); 732 memcpy(status->chain_signal, mstat.chain_signal, 733 sizeof(mstat.chain_signal)); 734 735 *sta = wcid_to_sta(mstat.wcid); 736 *hw = mt76_phy_hw(dev, mstat.ext_phy); 737 } 738 739 static int 740 mt76_check_ccmp_pn(struct sk_buff *skb) 741 { 742 struct mt76_rx_status *status = (struct mt76_rx_status *)skb->cb; 743 struct mt76_wcid *wcid = status->wcid; 744 struct ieee80211_hdr *hdr; 745 int ret; 746 747 if (!(status->flag & RX_FLAG_DECRYPTED)) 748 return 0; 749 750 if (!wcid || !wcid->rx_check_pn) 751 return 0; 752 753 if (!(status->flag & RX_FLAG_IV_STRIPPED)) { 754 /* 755 * Validate the first fragment both here and in mac80211 756 * All further fragments will be validated by mac80211 only. 757 */ 758 hdr = mt76_skb_get_hdr(skb); 759 if (ieee80211_is_frag(hdr) && 760 !ieee80211_is_first_frag(hdr->frame_control)) 761 return 0; 762 } 763 764 BUILD_BUG_ON(sizeof(status->iv) != sizeof(wcid->rx_key_pn[0])); 765 ret = memcmp(status->iv, wcid->rx_key_pn[status->tid], 766 sizeof(status->iv)); 767 if (ret <= 0) 768 return -EINVAL; /* replay */ 769 770 memcpy(wcid->rx_key_pn[status->tid], status->iv, sizeof(status->iv)); 771 772 if (status->flag & RX_FLAG_IV_STRIPPED) 773 status->flag |= RX_FLAG_PN_VALIDATED; 774 775 return 0; 776 } 777 778 static void 779 mt76_airtime_report(struct mt76_dev *dev, struct mt76_rx_status *status, 780 int len) 781 { 782 struct mt76_wcid *wcid = status->wcid; 783 struct ieee80211_rx_status info = { 784 .enc_flags = status->enc_flags, 785 .rate_idx = status->rate_idx, 786 .encoding = status->encoding, 787 .band = status->band, 788 .nss = status->nss, 789 .bw = status->bw, 790 }; 791 struct ieee80211_sta *sta; 792 u32 airtime; 793 794 airtime = ieee80211_calc_rx_airtime(dev->hw, &info, len); 795 spin_lock(&dev->cc_lock); 796 dev->cur_cc_bss_rx += airtime; 797 spin_unlock(&dev->cc_lock); 798 799 if (!wcid || !wcid->sta) 800 return; 801 802 sta = container_of((void *)wcid, struct ieee80211_sta, drv_priv); 803 ieee80211_sta_register_airtime(sta, status->tid, 0, airtime); 804 } 805 806 static void 807 mt76_airtime_flush_ampdu(struct mt76_dev *dev) 808 { 809 struct mt76_wcid *wcid; 810 int wcid_idx; 811 812 if (!dev->rx_ampdu_len) 813 return; 814 815 wcid_idx = dev->rx_ampdu_status.wcid_idx; 816 if (wcid_idx < ARRAY_SIZE(dev->wcid)) 817 wcid = rcu_dereference(dev->wcid[wcid_idx]); 818 else 819 wcid = NULL; 820 dev->rx_ampdu_status.wcid = wcid; 821 822 mt76_airtime_report(dev, &dev->rx_ampdu_status, dev->rx_ampdu_len); 823 824 dev->rx_ampdu_len = 0; 825 dev->rx_ampdu_ref = 0; 826 } 827 828 static void 829 mt76_airtime_check(struct mt76_dev *dev, struct sk_buff *skb) 830 { 831 struct ieee80211_hdr *hdr = mt76_skb_get_hdr(skb); 832 struct mt76_rx_status *status = (struct mt76_rx_status *)skb->cb; 833 struct mt76_wcid *wcid = status->wcid; 834 835 if (!(dev->drv->drv_flags & MT_DRV_SW_RX_AIRTIME)) 836 return; 837 838 if (!wcid || !wcid->sta) { 839 if (!ether_addr_equal(hdr->addr1, dev->macaddr)) 840 return; 841 842 wcid = NULL; 843 } 844 845 if (!(status->flag & RX_FLAG_AMPDU_DETAILS) || 846 status->ampdu_ref != dev->rx_ampdu_ref) 847 mt76_airtime_flush_ampdu(dev); 848 849 if (status->flag & RX_FLAG_AMPDU_DETAILS) { 850 if (!dev->rx_ampdu_len || 851 status->ampdu_ref != dev->rx_ampdu_ref) { 852 dev->rx_ampdu_status = *status; 853 dev->rx_ampdu_status.wcid_idx = wcid ? wcid->idx : 0xff; 854 dev->rx_ampdu_ref = status->ampdu_ref; 855 } 856 857 dev->rx_ampdu_len += skb->len; 858 return; 859 } 860 861 mt76_airtime_report(dev, status, skb->len); 862 } 863 864 static void 865 mt76_check_sta(struct mt76_dev *dev, struct sk_buff *skb) 866 { 867 struct mt76_rx_status *status = (struct mt76_rx_status *)skb->cb; 868 struct ieee80211_hdr *hdr = mt76_skb_get_hdr(skb); 869 struct ieee80211_sta *sta; 870 struct ieee80211_hw *hw; 871 struct mt76_wcid *wcid = status->wcid; 872 bool ps; 873 int i; 874 875 hw = mt76_phy_hw(dev, status->ext_phy); 876 if (ieee80211_is_pspoll(hdr->frame_control) && !wcid) { 877 sta = ieee80211_find_sta_by_ifaddr(hw, hdr->addr2, NULL); 878 if (sta) 879 wcid = status->wcid = (struct mt76_wcid *)sta->drv_priv; 880 } 881 882 mt76_airtime_check(dev, skb); 883 884 if (!wcid || !wcid->sta) 885 return; 886 887 sta = container_of((void *)wcid, struct ieee80211_sta, drv_priv); 888 889 if (status->signal <= 0) 890 ewma_signal_add(&wcid->rssi, -status->signal); 891 892 wcid->inactive_count = 0; 893 894 if (!test_bit(MT_WCID_FLAG_CHECK_PS, &wcid->flags)) 895 return; 896 897 if (ieee80211_is_pspoll(hdr->frame_control)) { 898 ieee80211_sta_pspoll(sta); 899 return; 900 } 901 902 if (ieee80211_has_morefrags(hdr->frame_control) || 903 !(ieee80211_is_mgmt(hdr->frame_control) || 904 ieee80211_is_data(hdr->frame_control))) 905 return; 906 907 ps = ieee80211_has_pm(hdr->frame_control); 908 909 if (ps && (ieee80211_is_data_qos(hdr->frame_control) || 910 ieee80211_is_qos_nullfunc(hdr->frame_control))) 911 ieee80211_sta_uapsd_trigger(sta, status->tid); 912 913 if (!!test_bit(MT_WCID_FLAG_PS, &wcid->flags) == ps) 914 return; 915 916 if (ps) 917 set_bit(MT_WCID_FLAG_PS, &wcid->flags); 918 else 919 clear_bit(MT_WCID_FLAG_PS, &wcid->flags); 920 921 dev->drv->sta_ps(dev, sta, ps); 922 ieee80211_sta_ps_transition(sta, ps); 923 924 if (ps) 925 return; 926 927 for (i = 0; i < ARRAY_SIZE(sta->txq); i++) { 928 struct mt76_txq *mtxq; 929 930 if (!sta->txq[i]) 931 continue; 932 933 mtxq = (struct mt76_txq *)sta->txq[i]->drv_priv; 934 if (!skb_queue_empty(&mtxq->retry_q)) 935 ieee80211_schedule_txq(hw, sta->txq[i]); 936 } 937 } 938 939 void mt76_rx_complete(struct mt76_dev *dev, struct sk_buff_head *frames, 940 struct napi_struct *napi) 941 { 942 struct ieee80211_sta *sta; 943 struct ieee80211_hw *hw; 944 struct sk_buff *skb; 945 946 spin_lock(&dev->rx_lock); 947 while ((skb = __skb_dequeue(frames)) != NULL) { 948 if (mt76_check_ccmp_pn(skb)) { 949 dev_kfree_skb(skb); 950 continue; 951 } 952 953 mt76_rx_convert(dev, skb, &hw, &sta); 954 ieee80211_rx_napi(hw, sta, skb, napi); 955 } 956 spin_unlock(&dev->rx_lock); 957 } 958 959 void mt76_rx_poll_complete(struct mt76_dev *dev, enum mt76_rxq_id q, 960 struct napi_struct *napi) 961 { 962 struct sk_buff_head frames; 963 struct sk_buff *skb; 964 965 __skb_queue_head_init(&frames); 966 967 while ((skb = __skb_dequeue(&dev->rx_skb[q])) != NULL) { 968 mt76_check_sta(dev, skb); 969 mt76_rx_aggr_reorder(skb, &frames); 970 } 971 972 mt76_rx_complete(dev, &frames, napi); 973 } 974 EXPORT_SYMBOL_GPL(mt76_rx_poll_complete); 975 976 static int 977 mt76_sta_add(struct mt76_dev *dev, struct ieee80211_vif *vif, 978 struct ieee80211_sta *sta, bool ext_phy) 979 { 980 struct mt76_wcid *wcid = (struct mt76_wcid *)sta->drv_priv; 981 int ret; 982 int i; 983 984 mutex_lock(&dev->mutex); 985 986 ret = dev->drv->sta_add(dev, vif, sta); 987 if (ret) 988 goto out; 989 990 for (i = 0; i < ARRAY_SIZE(sta->txq); i++) { 991 struct mt76_txq *mtxq; 992 993 if (!sta->txq[i]) 994 continue; 995 996 mtxq = (struct mt76_txq *)sta->txq[i]->drv_priv; 997 mtxq->wcid = wcid; 998 999 mt76_txq_init(dev, sta->txq[i]); 1000 } 1001 1002 ewma_signal_init(&wcid->rssi); 1003 if (ext_phy) 1004 mt76_wcid_mask_set(dev->wcid_phy_mask, wcid->idx); 1005 wcid->ext_phy = ext_phy; 1006 rcu_assign_pointer(dev->wcid[wcid->idx], wcid); 1007 1008 out: 1009 mutex_unlock(&dev->mutex); 1010 1011 return ret; 1012 } 1013 1014 void __mt76_sta_remove(struct mt76_dev *dev, struct ieee80211_vif *vif, 1015 struct ieee80211_sta *sta) 1016 { 1017 struct mt76_wcid *wcid = (struct mt76_wcid *)sta->drv_priv; 1018 int i, idx = wcid->idx; 1019 1020 for (i = 0; i < ARRAY_SIZE(wcid->aggr); i++) 1021 mt76_rx_aggr_stop(dev, wcid, i); 1022 1023 if (dev->drv->sta_remove) 1024 dev->drv->sta_remove(dev, vif, sta); 1025 1026 mt76_tx_status_check(dev, wcid, true); 1027 for (i = 0; i < ARRAY_SIZE(sta->txq); i++) 1028 mt76_txq_remove(dev, sta->txq[i]); 1029 mt76_wcid_mask_clear(dev->wcid_mask, idx); 1030 mt76_wcid_mask_clear(dev->wcid_phy_mask, idx); 1031 } 1032 EXPORT_SYMBOL_GPL(__mt76_sta_remove); 1033 1034 static void 1035 mt76_sta_remove(struct mt76_dev *dev, struct ieee80211_vif *vif, 1036 struct ieee80211_sta *sta) 1037 { 1038 mutex_lock(&dev->mutex); 1039 __mt76_sta_remove(dev, vif, sta); 1040 mutex_unlock(&dev->mutex); 1041 } 1042 1043 int mt76_sta_state(struct ieee80211_hw *hw, struct ieee80211_vif *vif, 1044 struct ieee80211_sta *sta, 1045 enum ieee80211_sta_state old_state, 1046 enum ieee80211_sta_state new_state) 1047 { 1048 struct mt76_phy *phy = hw->priv; 1049 struct mt76_dev *dev = phy->dev; 1050 bool ext_phy = phy != &dev->phy; 1051 1052 if (old_state == IEEE80211_STA_NOTEXIST && 1053 new_state == IEEE80211_STA_NONE) 1054 return mt76_sta_add(dev, vif, sta, ext_phy); 1055 1056 if (old_state == IEEE80211_STA_AUTH && 1057 new_state == IEEE80211_STA_ASSOC && 1058 dev->drv->sta_assoc) 1059 dev->drv->sta_assoc(dev, vif, sta); 1060 1061 if (old_state == IEEE80211_STA_NONE && 1062 new_state == IEEE80211_STA_NOTEXIST) 1063 mt76_sta_remove(dev, vif, sta); 1064 1065 return 0; 1066 } 1067 EXPORT_SYMBOL_GPL(mt76_sta_state); 1068 1069 void mt76_sta_pre_rcu_remove(struct ieee80211_hw *hw, struct ieee80211_vif *vif, 1070 struct ieee80211_sta *sta) 1071 { 1072 struct mt76_phy *phy = hw->priv; 1073 struct mt76_dev *dev = phy->dev; 1074 struct mt76_wcid *wcid = (struct mt76_wcid *)sta->drv_priv; 1075 1076 mutex_lock(&dev->mutex); 1077 rcu_assign_pointer(dev->wcid[wcid->idx], NULL); 1078 mutex_unlock(&dev->mutex); 1079 } 1080 EXPORT_SYMBOL_GPL(mt76_sta_pre_rcu_remove); 1081 1082 int mt76_get_txpower(struct ieee80211_hw *hw, struct ieee80211_vif *vif, 1083 int *dbm) 1084 { 1085 struct mt76_phy *phy = hw->priv; 1086 int n_chains = hweight8(phy->antenna_mask); 1087 int delta = mt76_tx_power_nss_delta(n_chains); 1088 1089 *dbm = DIV_ROUND_UP(phy->txpower_cur + delta, 2); 1090 1091 return 0; 1092 } 1093 EXPORT_SYMBOL_GPL(mt76_get_txpower); 1094 1095 static void 1096 __mt76_csa_finish(void *priv, u8 *mac, struct ieee80211_vif *vif) 1097 { 1098 if (vif->csa_active && ieee80211_csa_is_complete(vif)) 1099 ieee80211_csa_finish(vif); 1100 } 1101 1102 void mt76_csa_finish(struct mt76_dev *dev) 1103 { 1104 if (!dev->csa_complete) 1105 return; 1106 1107 ieee80211_iterate_active_interfaces_atomic(dev->hw, 1108 IEEE80211_IFACE_ITER_RESUME_ALL, 1109 __mt76_csa_finish, dev); 1110 1111 dev->csa_complete = 0; 1112 } 1113 EXPORT_SYMBOL_GPL(mt76_csa_finish); 1114 1115 static void 1116 __mt76_csa_check(void *priv, u8 *mac, struct ieee80211_vif *vif) 1117 { 1118 struct mt76_dev *dev = priv; 1119 1120 if (!vif->csa_active) 1121 return; 1122 1123 dev->csa_complete |= ieee80211_csa_is_complete(vif); 1124 } 1125 1126 void mt76_csa_check(struct mt76_dev *dev) 1127 { 1128 ieee80211_iterate_active_interfaces_atomic(dev->hw, 1129 IEEE80211_IFACE_ITER_RESUME_ALL, 1130 __mt76_csa_check, dev); 1131 } 1132 EXPORT_SYMBOL_GPL(mt76_csa_check); 1133 1134 int 1135 mt76_set_tim(struct ieee80211_hw *hw, struct ieee80211_sta *sta, bool set) 1136 { 1137 return 0; 1138 } 1139 EXPORT_SYMBOL_GPL(mt76_set_tim); 1140 1141 void mt76_insert_ccmp_hdr(struct sk_buff *skb, u8 key_id) 1142 { 1143 struct mt76_rx_status *status = (struct mt76_rx_status *)skb->cb; 1144 int hdr_len = ieee80211_get_hdrlen_from_skb(skb); 1145 u8 *hdr, *pn = status->iv; 1146 1147 __skb_push(skb, 8); 1148 memmove(skb->data, skb->data + 8, hdr_len); 1149 hdr = skb->data + hdr_len; 1150 1151 hdr[0] = pn[5]; 1152 hdr[1] = pn[4]; 1153 hdr[2] = 0; 1154 hdr[3] = 0x20 | (key_id << 6); 1155 hdr[4] = pn[3]; 1156 hdr[5] = pn[2]; 1157 hdr[6] = pn[1]; 1158 hdr[7] = pn[0]; 1159 1160 status->flag &= ~RX_FLAG_IV_STRIPPED; 1161 } 1162 EXPORT_SYMBOL_GPL(mt76_insert_ccmp_hdr); 1163 1164 int mt76_get_rate(struct mt76_dev *dev, 1165 struct ieee80211_supported_band *sband, 1166 int idx, bool cck) 1167 { 1168 int i, offset = 0, len = sband->n_bitrates; 1169 1170 if (cck) { 1171 if (sband == &dev->phy.sband_5g.sband) 1172 return 0; 1173 1174 idx &= ~BIT(2); /* short preamble */ 1175 } else if (sband == &dev->phy.sband_2g.sband) { 1176 offset = 4; 1177 } 1178 1179 for (i = offset; i < len; i++) { 1180 if ((sband->bitrates[i].hw_value & GENMASK(7, 0)) == idx) 1181 return i; 1182 } 1183 1184 return 0; 1185 } 1186 EXPORT_SYMBOL_GPL(mt76_get_rate); 1187 1188 void mt76_sw_scan(struct ieee80211_hw *hw, struct ieee80211_vif *vif, 1189 const u8 *mac) 1190 { 1191 struct mt76_phy *phy = hw->priv; 1192 1193 set_bit(MT76_SCANNING, &phy->state); 1194 } 1195 EXPORT_SYMBOL_GPL(mt76_sw_scan); 1196 1197 void mt76_sw_scan_complete(struct ieee80211_hw *hw, struct ieee80211_vif *vif) 1198 { 1199 struct mt76_phy *phy = hw->priv; 1200 1201 clear_bit(MT76_SCANNING, &phy->state); 1202 } 1203 EXPORT_SYMBOL_GPL(mt76_sw_scan_complete); 1204 1205 int mt76_get_antenna(struct ieee80211_hw *hw, u32 *tx_ant, u32 *rx_ant) 1206 { 1207 struct mt76_phy *phy = hw->priv; 1208 struct mt76_dev *dev = phy->dev; 1209 1210 mutex_lock(&dev->mutex); 1211 *tx_ant = phy->antenna_mask; 1212 *rx_ant = phy->antenna_mask; 1213 mutex_unlock(&dev->mutex); 1214 1215 return 0; 1216 } 1217 EXPORT_SYMBOL_GPL(mt76_get_antenna); 1218