1 // SPDX-License-Identifier: ISC 2 /* 3 * Copyright (C) 2016 Felix Fietkau <nbd@nbd.name> 4 */ 5 #include <linux/sched.h> 6 #include <linux/of.h> 7 #include "mt76.h" 8 9 #define CHAN2G(_idx, _freq) { \ 10 .band = NL80211_BAND_2GHZ, \ 11 .center_freq = (_freq), \ 12 .hw_value = (_idx), \ 13 .max_power = 30, \ 14 } 15 16 #define CHAN5G(_idx, _freq) { \ 17 .band = NL80211_BAND_5GHZ, \ 18 .center_freq = (_freq), \ 19 .hw_value = (_idx), \ 20 .max_power = 30, \ 21 } 22 23 static const struct ieee80211_channel mt76_channels_2ghz[] = { 24 CHAN2G(1, 2412), 25 CHAN2G(2, 2417), 26 CHAN2G(3, 2422), 27 CHAN2G(4, 2427), 28 CHAN2G(5, 2432), 29 CHAN2G(6, 2437), 30 CHAN2G(7, 2442), 31 CHAN2G(8, 2447), 32 CHAN2G(9, 2452), 33 CHAN2G(10, 2457), 34 CHAN2G(11, 2462), 35 CHAN2G(12, 2467), 36 CHAN2G(13, 2472), 37 CHAN2G(14, 2484), 38 }; 39 40 static const struct ieee80211_channel mt76_channels_5ghz[] = { 41 CHAN5G(36, 5180), 42 CHAN5G(40, 5200), 43 CHAN5G(44, 5220), 44 CHAN5G(48, 5240), 45 46 CHAN5G(52, 5260), 47 CHAN5G(56, 5280), 48 CHAN5G(60, 5300), 49 CHAN5G(64, 5320), 50 51 CHAN5G(100, 5500), 52 CHAN5G(104, 5520), 53 CHAN5G(108, 5540), 54 CHAN5G(112, 5560), 55 CHAN5G(116, 5580), 56 CHAN5G(120, 5600), 57 CHAN5G(124, 5620), 58 CHAN5G(128, 5640), 59 CHAN5G(132, 5660), 60 CHAN5G(136, 5680), 61 CHAN5G(140, 5700), 62 CHAN5G(144, 5720), 63 64 CHAN5G(149, 5745), 65 CHAN5G(153, 5765), 66 CHAN5G(157, 5785), 67 CHAN5G(161, 5805), 68 CHAN5G(165, 5825), 69 CHAN5G(169, 5845), 70 CHAN5G(173, 5865), 71 }; 72 73 static const struct ieee80211_tpt_blink mt76_tpt_blink[] = { 74 { .throughput = 0 * 1024, .blink_time = 334 }, 75 { .throughput = 1 * 1024, .blink_time = 260 }, 76 { .throughput = 5 * 1024, .blink_time = 220 }, 77 { .throughput = 10 * 1024, .blink_time = 190 }, 78 { .throughput = 20 * 1024, .blink_time = 170 }, 79 { .throughput = 50 * 1024, .blink_time = 150 }, 80 { .throughput = 70 * 1024, .blink_time = 130 }, 81 { .throughput = 100 * 1024, .blink_time = 110 }, 82 { .throughput = 200 * 1024, .blink_time = 80 }, 83 { .throughput = 300 * 1024, .blink_time = 50 }, 84 }; 85 86 static int mt76_led_init(struct mt76_dev *dev) 87 { 88 struct device_node *np = dev->dev->of_node; 89 struct ieee80211_hw *hw = dev->hw; 90 int led_pin; 91 92 if (!dev->led_cdev.brightness_set && !dev->led_cdev.blink_set) 93 return 0; 94 95 snprintf(dev->led_name, sizeof(dev->led_name), 96 "mt76-%s", wiphy_name(hw->wiphy)); 97 98 dev->led_cdev.name = dev->led_name; 99 dev->led_cdev.default_trigger = 100 ieee80211_create_tpt_led_trigger(hw, 101 IEEE80211_TPT_LEDTRIG_FL_RADIO, 102 mt76_tpt_blink, 103 ARRAY_SIZE(mt76_tpt_blink)); 104 105 np = of_get_child_by_name(np, "led"); 106 if (np) { 107 if (!of_property_read_u32(np, "led-sources", &led_pin)) 108 dev->led_pin = led_pin; 109 dev->led_al = of_property_read_bool(np, "led-active-low"); 110 } 111 112 return led_classdev_register(dev->dev, &dev->led_cdev); 113 } 114 115 static void mt76_led_cleanup(struct mt76_dev *dev) 116 { 117 if (!dev->led_cdev.brightness_set && !dev->led_cdev.blink_set) 118 return; 119 120 led_classdev_unregister(&dev->led_cdev); 121 } 122 123 static void mt76_init_stream_cap(struct mt76_phy *phy, 124 struct ieee80211_supported_band *sband, 125 bool vht) 126 { 127 struct ieee80211_sta_ht_cap *ht_cap = &sband->ht_cap; 128 int i, nstream = hweight8(phy->antenna_mask); 129 struct ieee80211_sta_vht_cap *vht_cap; 130 u16 mcs_map = 0; 131 132 if (nstream > 1) 133 ht_cap->cap |= IEEE80211_HT_CAP_TX_STBC; 134 else 135 ht_cap->cap &= ~IEEE80211_HT_CAP_TX_STBC; 136 137 for (i = 0; i < IEEE80211_HT_MCS_MASK_LEN; i++) 138 ht_cap->mcs.rx_mask[i] = i < nstream ? 0xff : 0; 139 140 if (!vht) 141 return; 142 143 vht_cap = &sband->vht_cap; 144 if (nstream > 1) 145 vht_cap->cap |= IEEE80211_VHT_CAP_TXSTBC; 146 else 147 vht_cap->cap &= ~IEEE80211_VHT_CAP_TXSTBC; 148 149 for (i = 0; i < 8; i++) { 150 if (i < nstream) 151 mcs_map |= (IEEE80211_VHT_MCS_SUPPORT_0_9 << (i * 2)); 152 else 153 mcs_map |= 154 (IEEE80211_VHT_MCS_NOT_SUPPORTED << (i * 2)); 155 } 156 vht_cap->vht_mcs.rx_mcs_map = cpu_to_le16(mcs_map); 157 vht_cap->vht_mcs.tx_mcs_map = cpu_to_le16(mcs_map); 158 } 159 160 void mt76_set_stream_caps(struct mt76_phy *phy, bool vht) 161 { 162 if (phy->dev->cap.has_2ghz) 163 mt76_init_stream_cap(phy, &phy->sband_2g.sband, false); 164 if (phy->dev->cap.has_5ghz) 165 mt76_init_stream_cap(phy, &phy->sband_5g.sband, vht); 166 } 167 EXPORT_SYMBOL_GPL(mt76_set_stream_caps); 168 169 static int 170 mt76_init_sband(struct mt76_dev *dev, struct mt76_sband *msband, 171 const struct ieee80211_channel *chan, int n_chan, 172 struct ieee80211_rate *rates, int n_rates, bool vht) 173 { 174 struct ieee80211_supported_band *sband = &msband->sband; 175 struct ieee80211_sta_ht_cap *ht_cap; 176 struct ieee80211_sta_vht_cap *vht_cap; 177 void *chanlist; 178 int size; 179 180 size = n_chan * sizeof(*chan); 181 chanlist = devm_kmemdup(dev->dev, chan, size, GFP_KERNEL); 182 if (!chanlist) 183 return -ENOMEM; 184 185 msband->chan = devm_kcalloc(dev->dev, n_chan, sizeof(*msband->chan), 186 GFP_KERNEL); 187 if (!msband->chan) 188 return -ENOMEM; 189 190 sband->channels = chanlist; 191 sband->n_channels = n_chan; 192 sband->bitrates = rates; 193 sband->n_bitrates = n_rates; 194 195 ht_cap = &sband->ht_cap; 196 ht_cap->ht_supported = true; 197 ht_cap->cap |= IEEE80211_HT_CAP_SUP_WIDTH_20_40 | 198 IEEE80211_HT_CAP_GRN_FLD | 199 IEEE80211_HT_CAP_SGI_20 | 200 IEEE80211_HT_CAP_SGI_40 | 201 (1 << IEEE80211_HT_CAP_RX_STBC_SHIFT); 202 203 ht_cap->mcs.tx_params = IEEE80211_HT_MCS_TX_DEFINED; 204 ht_cap->ampdu_factor = IEEE80211_HT_MAX_AMPDU_64K; 205 206 mt76_init_stream_cap(&dev->phy, sband, vht); 207 208 if (!vht) 209 return 0; 210 211 vht_cap = &sband->vht_cap; 212 vht_cap->vht_supported = true; 213 vht_cap->cap |= IEEE80211_VHT_CAP_RXLDPC | 214 IEEE80211_VHT_CAP_RXSTBC_1 | 215 IEEE80211_VHT_CAP_SHORT_GI_80 | 216 IEEE80211_VHT_CAP_RX_ANTENNA_PATTERN | 217 IEEE80211_VHT_CAP_TX_ANTENNA_PATTERN | 218 (3 << IEEE80211_VHT_CAP_MAX_A_MPDU_LENGTH_EXPONENT_SHIFT); 219 220 return 0; 221 } 222 223 static int 224 mt76_init_sband_2g(struct mt76_dev *dev, struct ieee80211_rate *rates, 225 int n_rates) 226 { 227 dev->hw->wiphy->bands[NL80211_BAND_2GHZ] = &dev->phy.sband_2g.sband; 228 229 return mt76_init_sband(dev, &dev->phy.sband_2g, 230 mt76_channels_2ghz, 231 ARRAY_SIZE(mt76_channels_2ghz), 232 rates, n_rates, false); 233 } 234 235 static int 236 mt76_init_sband_5g(struct mt76_dev *dev, struct ieee80211_rate *rates, 237 int n_rates, bool vht) 238 { 239 dev->hw->wiphy->bands[NL80211_BAND_5GHZ] = &dev->phy.sband_5g.sband; 240 241 return mt76_init_sband(dev, &dev->phy.sband_5g, 242 mt76_channels_5ghz, 243 ARRAY_SIZE(mt76_channels_5ghz), 244 rates, n_rates, vht); 245 } 246 247 static void 248 mt76_check_sband(struct mt76_phy *phy, struct mt76_sband *msband, 249 enum nl80211_band band) 250 { 251 struct ieee80211_supported_band *sband = &msband->sband; 252 bool found = false; 253 int i; 254 255 if (!sband) 256 return; 257 258 for (i = 0; i < sband->n_channels; i++) { 259 if (sband->channels[i].flags & IEEE80211_CHAN_DISABLED) 260 continue; 261 262 found = true; 263 break; 264 } 265 266 if (found) { 267 phy->chandef.chan = &sband->channels[0]; 268 phy->chan_state = &msband->chan[0]; 269 return; 270 } 271 272 sband->n_channels = 0; 273 phy->hw->wiphy->bands[band] = NULL; 274 } 275 276 static void 277 mt76_phy_init(struct mt76_dev *dev, struct ieee80211_hw *hw) 278 { 279 struct wiphy *wiphy = hw->wiphy; 280 281 SET_IEEE80211_DEV(hw, dev->dev); 282 SET_IEEE80211_PERM_ADDR(hw, dev->macaddr); 283 284 wiphy->features |= NL80211_FEATURE_ACTIVE_MONITOR; 285 wiphy->flags |= WIPHY_FLAG_HAS_CHANNEL_SWITCH | 286 WIPHY_FLAG_SUPPORTS_TDLS | 287 WIPHY_FLAG_AP_UAPSD; 288 289 wiphy_ext_feature_set(wiphy, NL80211_EXT_FEATURE_CQM_RSSI_LIST); 290 wiphy_ext_feature_set(wiphy, NL80211_EXT_FEATURE_AIRTIME_FAIRNESS); 291 wiphy_ext_feature_set(wiphy, NL80211_EXT_FEATURE_AQL); 292 293 wiphy->available_antennas_tx = dev->phy.antenna_mask; 294 wiphy->available_antennas_rx = dev->phy.antenna_mask; 295 296 hw->txq_data_size = sizeof(struct mt76_txq); 297 hw->uapsd_max_sp_len = IEEE80211_WMM_IE_STA_QOSINFO_SP_ALL; 298 299 if (!hw->max_tx_fragments) 300 hw->max_tx_fragments = 16; 301 302 ieee80211_hw_set(hw, SIGNAL_DBM); 303 ieee80211_hw_set(hw, AMPDU_AGGREGATION); 304 ieee80211_hw_set(hw, SUPPORTS_RC_TABLE); 305 ieee80211_hw_set(hw, SUPPORT_FAST_XMIT); 306 ieee80211_hw_set(hw, SUPPORTS_CLONED_SKBS); 307 ieee80211_hw_set(hw, SUPPORTS_AMSDU_IN_AMPDU); 308 309 if (!(dev->drv->drv_flags & MT_DRV_AMSDU_OFFLOAD)) { 310 ieee80211_hw_set(hw, TX_AMSDU); 311 ieee80211_hw_set(hw, TX_FRAG_LIST); 312 } 313 314 ieee80211_hw_set(hw, MFP_CAPABLE); 315 ieee80211_hw_set(hw, AP_LINK_PS); 316 ieee80211_hw_set(hw, REPORTS_TX_ACK_STATUS); 317 ieee80211_hw_set(hw, NEEDS_UNIQUE_STA_ADDR); 318 319 wiphy->flags |= WIPHY_FLAG_IBSS_RSN; 320 wiphy->interface_modes = 321 BIT(NL80211_IFTYPE_STATION) | 322 BIT(NL80211_IFTYPE_AP) | 323 #ifdef CONFIG_MAC80211_MESH 324 BIT(NL80211_IFTYPE_MESH_POINT) | 325 #endif 326 BIT(NL80211_IFTYPE_P2P_CLIENT) | 327 BIT(NL80211_IFTYPE_P2P_GO) | 328 BIT(NL80211_IFTYPE_ADHOC); 329 } 330 331 struct mt76_phy * 332 mt76_alloc_phy(struct mt76_dev *dev, unsigned int size, 333 const struct ieee80211_ops *ops) 334 { 335 struct ieee80211_hw *hw; 336 struct mt76_phy *phy; 337 unsigned int phy_size, chan_size; 338 unsigned int size_2g, size_5g; 339 void *priv; 340 341 phy_size = ALIGN(sizeof(*phy), 8); 342 chan_size = sizeof(dev->phy.sband_2g.chan[0]); 343 size_2g = ALIGN(ARRAY_SIZE(mt76_channels_2ghz) * chan_size, 8); 344 size_5g = ALIGN(ARRAY_SIZE(mt76_channels_5ghz) * chan_size, 8); 345 346 size += phy_size + size_2g + size_5g; 347 hw = ieee80211_alloc_hw(size, ops); 348 if (!hw) 349 return NULL; 350 351 phy = hw->priv; 352 phy->dev = dev; 353 phy->hw = hw; 354 355 mt76_phy_init(dev, hw); 356 357 priv = hw->priv + phy_size; 358 359 phy->sband_2g = dev->phy.sband_2g; 360 phy->sband_2g.chan = priv; 361 priv += size_2g; 362 363 phy->sband_5g = dev->phy.sband_5g; 364 phy->sband_5g.chan = priv; 365 priv += size_5g; 366 367 phy->priv = priv; 368 369 hw->wiphy->bands[NL80211_BAND_2GHZ] = &phy->sband_2g.sband; 370 hw->wiphy->bands[NL80211_BAND_5GHZ] = &phy->sband_5g.sband; 371 372 mt76_check_sband(phy, &phy->sband_2g, NL80211_BAND_2GHZ); 373 mt76_check_sband(phy, &phy->sband_5g, NL80211_BAND_5GHZ); 374 375 return phy; 376 } 377 EXPORT_SYMBOL_GPL(mt76_alloc_phy); 378 379 int 380 mt76_register_phy(struct mt76_phy *phy) 381 { 382 int ret; 383 384 ret = ieee80211_register_hw(phy->hw); 385 if (ret) 386 return ret; 387 388 phy->dev->phy2 = phy; 389 return 0; 390 } 391 EXPORT_SYMBOL_GPL(mt76_register_phy); 392 393 void 394 mt76_unregister_phy(struct mt76_phy *phy) 395 { 396 struct mt76_dev *dev = phy->dev; 397 398 dev->phy2 = NULL; 399 mt76_tx_status_check(dev, NULL, true); 400 ieee80211_unregister_hw(phy->hw); 401 } 402 EXPORT_SYMBOL_GPL(mt76_unregister_phy); 403 404 struct mt76_dev * 405 mt76_alloc_device(struct device *pdev, unsigned int size, 406 const struct ieee80211_ops *ops, 407 const struct mt76_driver_ops *drv_ops) 408 { 409 struct ieee80211_hw *hw; 410 struct mt76_phy *phy; 411 struct mt76_dev *dev; 412 int i; 413 414 hw = ieee80211_alloc_hw(size, ops); 415 if (!hw) 416 return NULL; 417 418 dev = hw->priv; 419 dev->hw = hw; 420 dev->dev = pdev; 421 dev->drv = drv_ops; 422 423 phy = &dev->phy; 424 phy->dev = dev; 425 phy->hw = hw; 426 427 spin_lock_init(&dev->rx_lock); 428 spin_lock_init(&dev->lock); 429 spin_lock_init(&dev->cc_lock); 430 mutex_init(&dev->mutex); 431 init_waitqueue_head(&dev->tx_wait); 432 skb_queue_head_init(&dev->status_list); 433 434 skb_queue_head_init(&dev->mcu.res_q); 435 init_waitqueue_head(&dev->mcu.wait); 436 mutex_init(&dev->mcu.mutex); 437 dev->tx_worker.fn = mt76_tx_worker; 438 439 INIT_LIST_HEAD(&dev->txwi_cache); 440 441 for (i = 0; i < ARRAY_SIZE(dev->q_rx); i++) 442 skb_queue_head_init(&dev->rx_skb[i]); 443 444 dev->wq = alloc_ordered_workqueue("mt76", 0); 445 if (!dev->wq) { 446 ieee80211_free_hw(hw); 447 return NULL; 448 } 449 450 return dev; 451 } 452 EXPORT_SYMBOL_GPL(mt76_alloc_device); 453 454 int mt76_register_device(struct mt76_dev *dev, bool vht, 455 struct ieee80211_rate *rates, int n_rates) 456 { 457 struct ieee80211_hw *hw = dev->hw; 458 struct mt76_phy *phy = &dev->phy; 459 int ret; 460 461 dev_set_drvdata(dev->dev, dev); 462 mt76_phy_init(dev, hw); 463 464 if (dev->cap.has_2ghz) { 465 ret = mt76_init_sband_2g(dev, rates, n_rates); 466 if (ret) 467 return ret; 468 } 469 470 if (dev->cap.has_5ghz) { 471 ret = mt76_init_sband_5g(dev, rates + 4, n_rates - 4, vht); 472 if (ret) 473 return ret; 474 } 475 476 wiphy_read_of_freq_limits(hw->wiphy); 477 mt76_check_sband(&dev->phy, &phy->sband_2g, NL80211_BAND_2GHZ); 478 mt76_check_sband(&dev->phy, &phy->sband_5g, NL80211_BAND_5GHZ); 479 480 if (IS_ENABLED(CONFIG_MT76_LEDS)) { 481 ret = mt76_led_init(dev); 482 if (ret) 483 return ret; 484 } 485 486 ret = ieee80211_register_hw(hw); 487 if (ret) 488 return ret; 489 490 WARN_ON(mt76_worker_setup(hw, &dev->tx_worker, NULL, "tx")); 491 sched_set_fifo_low(dev->tx_worker.task); 492 493 return 0; 494 } 495 EXPORT_SYMBOL_GPL(mt76_register_device); 496 497 void mt76_unregister_device(struct mt76_dev *dev) 498 { 499 struct ieee80211_hw *hw = dev->hw; 500 501 if (IS_ENABLED(CONFIG_MT76_LEDS)) 502 mt76_led_cleanup(dev); 503 mt76_tx_status_check(dev, NULL, true); 504 ieee80211_unregister_hw(hw); 505 } 506 EXPORT_SYMBOL_GPL(mt76_unregister_device); 507 508 void mt76_free_device(struct mt76_dev *dev) 509 { 510 mt76_worker_teardown(&dev->tx_worker); 511 if (dev->wq) { 512 destroy_workqueue(dev->wq); 513 dev->wq = NULL; 514 } 515 ieee80211_free_hw(dev->hw); 516 } 517 EXPORT_SYMBOL_GPL(mt76_free_device); 518 519 void mt76_rx(struct mt76_dev *dev, enum mt76_rxq_id q, struct sk_buff *skb) 520 { 521 struct mt76_rx_status *status = (struct mt76_rx_status *)skb->cb; 522 struct mt76_phy *phy = mt76_dev_phy(dev, status->ext_phy); 523 524 if (!test_bit(MT76_STATE_RUNNING, &phy->state)) { 525 dev_kfree_skb(skb); 526 return; 527 } 528 529 #ifdef CONFIG_NL80211_TESTMODE 530 if (dev->test.state == MT76_TM_STATE_RX_FRAMES) { 531 dev->test.rx_stats.packets[q]++; 532 if (status->flag & RX_FLAG_FAILED_FCS_CRC) 533 dev->test.rx_stats.fcs_error[q]++; 534 } 535 #endif 536 __skb_queue_tail(&dev->rx_skb[q], skb); 537 } 538 EXPORT_SYMBOL_GPL(mt76_rx); 539 540 bool mt76_has_tx_pending(struct mt76_phy *phy) 541 { 542 struct mt76_dev *dev = phy->dev; 543 struct mt76_queue *q; 544 int i, offset; 545 546 offset = __MT_TXQ_MAX * (phy != &dev->phy); 547 548 for (i = 0; i < __MT_TXQ_MAX; i++) { 549 q = dev->q_tx[offset + i]; 550 if (q && q->queued) 551 return true; 552 } 553 554 return false; 555 } 556 EXPORT_SYMBOL_GPL(mt76_has_tx_pending); 557 558 static struct mt76_channel_state * 559 mt76_channel_state(struct mt76_phy *phy, struct ieee80211_channel *c) 560 { 561 struct mt76_sband *msband; 562 int idx; 563 564 if (c->band == NL80211_BAND_2GHZ) 565 msband = &phy->sband_2g; 566 else 567 msband = &phy->sband_5g; 568 569 idx = c - &msband->sband.channels[0]; 570 return &msband->chan[idx]; 571 } 572 573 void mt76_update_survey_active_time(struct mt76_phy *phy, ktime_t time) 574 { 575 struct mt76_channel_state *state = phy->chan_state; 576 577 state->cc_active += ktime_to_us(ktime_sub(time, 578 phy->survey_time)); 579 phy->survey_time = time; 580 } 581 EXPORT_SYMBOL_GPL(mt76_update_survey_active_time); 582 583 void mt76_update_survey(struct mt76_dev *dev) 584 { 585 ktime_t cur_time; 586 587 if (dev->drv->update_survey) 588 dev->drv->update_survey(dev); 589 590 cur_time = ktime_get_boottime(); 591 mt76_update_survey_active_time(&dev->phy, cur_time); 592 if (dev->phy2) 593 mt76_update_survey_active_time(dev->phy2, cur_time); 594 595 if (dev->drv->drv_flags & MT_DRV_SW_RX_AIRTIME) { 596 struct mt76_channel_state *state = dev->phy.chan_state; 597 598 spin_lock_bh(&dev->cc_lock); 599 state->cc_bss_rx += dev->cur_cc_bss_rx; 600 dev->cur_cc_bss_rx = 0; 601 spin_unlock_bh(&dev->cc_lock); 602 } 603 } 604 EXPORT_SYMBOL_GPL(mt76_update_survey); 605 606 void mt76_set_channel(struct mt76_phy *phy) 607 { 608 struct mt76_dev *dev = phy->dev; 609 struct ieee80211_hw *hw = phy->hw; 610 struct cfg80211_chan_def *chandef = &hw->conf.chandef; 611 bool offchannel = hw->conf.flags & IEEE80211_CONF_OFFCHANNEL; 612 int timeout = HZ / 5; 613 614 wait_event_timeout(dev->tx_wait, !mt76_has_tx_pending(phy), timeout); 615 mt76_update_survey(dev); 616 617 phy->chandef = *chandef; 618 phy->chan_state = mt76_channel_state(phy, chandef->chan); 619 620 if (!offchannel) 621 phy->main_chan = chandef->chan; 622 623 if (chandef->chan != phy->main_chan) 624 memset(phy->chan_state, 0, sizeof(*phy->chan_state)); 625 } 626 EXPORT_SYMBOL_GPL(mt76_set_channel); 627 628 int mt76_get_survey(struct ieee80211_hw *hw, int idx, 629 struct survey_info *survey) 630 { 631 struct mt76_phy *phy = hw->priv; 632 struct mt76_dev *dev = phy->dev; 633 struct mt76_sband *sband; 634 struct ieee80211_channel *chan; 635 struct mt76_channel_state *state; 636 int ret = 0; 637 638 mutex_lock(&dev->mutex); 639 if (idx == 0 && dev->drv->update_survey) 640 mt76_update_survey(dev); 641 642 sband = &phy->sband_2g; 643 if (idx >= sband->sband.n_channels) { 644 idx -= sband->sband.n_channels; 645 sband = &phy->sband_5g; 646 } 647 648 if (idx >= sband->sband.n_channels) { 649 ret = -ENOENT; 650 goto out; 651 } 652 653 chan = &sband->sband.channels[idx]; 654 state = mt76_channel_state(phy, chan); 655 656 memset(survey, 0, sizeof(*survey)); 657 survey->channel = chan; 658 survey->filled = SURVEY_INFO_TIME | SURVEY_INFO_TIME_BUSY; 659 survey->filled |= dev->drv->survey_flags; 660 if (state->noise) 661 survey->filled |= SURVEY_INFO_NOISE_DBM; 662 663 if (chan == phy->main_chan) { 664 survey->filled |= SURVEY_INFO_IN_USE; 665 666 if (dev->drv->drv_flags & MT_DRV_SW_RX_AIRTIME) 667 survey->filled |= SURVEY_INFO_TIME_BSS_RX; 668 } 669 670 survey->time_busy = div_u64(state->cc_busy, 1000); 671 survey->time_rx = div_u64(state->cc_rx, 1000); 672 survey->time = div_u64(state->cc_active, 1000); 673 survey->noise = state->noise; 674 675 spin_lock_bh(&dev->cc_lock); 676 survey->time_bss_rx = div_u64(state->cc_bss_rx, 1000); 677 survey->time_tx = div_u64(state->cc_tx, 1000); 678 spin_unlock_bh(&dev->cc_lock); 679 680 out: 681 mutex_unlock(&dev->mutex); 682 683 return ret; 684 } 685 EXPORT_SYMBOL_GPL(mt76_get_survey); 686 687 void mt76_wcid_key_setup(struct mt76_dev *dev, struct mt76_wcid *wcid, 688 struct ieee80211_key_conf *key) 689 { 690 struct ieee80211_key_seq seq; 691 int i; 692 693 wcid->rx_check_pn = false; 694 695 if (!key) 696 return; 697 698 if (key->cipher != WLAN_CIPHER_SUITE_CCMP) 699 return; 700 701 wcid->rx_check_pn = true; 702 for (i = 0; i < IEEE80211_NUM_TIDS; i++) { 703 ieee80211_get_key_rx_seq(key, i, &seq); 704 memcpy(wcid->rx_key_pn[i], seq.ccmp.pn, sizeof(seq.ccmp.pn)); 705 } 706 } 707 EXPORT_SYMBOL(mt76_wcid_key_setup); 708 709 static void 710 mt76_rx_convert(struct mt76_dev *dev, struct sk_buff *skb, 711 struct ieee80211_hw **hw, 712 struct ieee80211_sta **sta) 713 { 714 struct ieee80211_rx_status *status = IEEE80211_SKB_RXCB(skb); 715 struct mt76_rx_status mstat; 716 717 mstat = *((struct mt76_rx_status *)skb->cb); 718 memset(status, 0, sizeof(*status)); 719 720 status->flag = mstat.flag; 721 status->freq = mstat.freq; 722 status->enc_flags = mstat.enc_flags; 723 status->encoding = mstat.encoding; 724 status->bw = mstat.bw; 725 status->he_ru = mstat.he_ru; 726 status->he_gi = mstat.he_gi; 727 status->he_dcm = mstat.he_dcm; 728 status->rate_idx = mstat.rate_idx; 729 status->nss = mstat.nss; 730 status->band = mstat.band; 731 status->signal = mstat.signal; 732 status->chains = mstat.chains; 733 status->ampdu_reference = mstat.ampdu_ref; 734 735 BUILD_BUG_ON(sizeof(mstat) > sizeof(skb->cb)); 736 BUILD_BUG_ON(sizeof(status->chain_signal) != 737 sizeof(mstat.chain_signal)); 738 memcpy(status->chain_signal, mstat.chain_signal, 739 sizeof(mstat.chain_signal)); 740 741 *sta = wcid_to_sta(mstat.wcid); 742 *hw = mt76_phy_hw(dev, mstat.ext_phy); 743 } 744 745 static int 746 mt76_check_ccmp_pn(struct sk_buff *skb) 747 { 748 struct mt76_rx_status *status = (struct mt76_rx_status *)skb->cb; 749 struct mt76_wcid *wcid = status->wcid; 750 struct ieee80211_hdr *hdr; 751 int ret; 752 753 if (!(status->flag & RX_FLAG_DECRYPTED)) 754 return 0; 755 756 if (!wcid || !wcid->rx_check_pn) 757 return 0; 758 759 if (!(status->flag & RX_FLAG_IV_STRIPPED)) { 760 /* 761 * Validate the first fragment both here and in mac80211 762 * All further fragments will be validated by mac80211 only. 763 */ 764 hdr = mt76_skb_get_hdr(skb); 765 if (ieee80211_is_frag(hdr) && 766 !ieee80211_is_first_frag(hdr->frame_control)) 767 return 0; 768 } 769 770 BUILD_BUG_ON(sizeof(status->iv) != sizeof(wcid->rx_key_pn[0])); 771 ret = memcmp(status->iv, wcid->rx_key_pn[status->tid], 772 sizeof(status->iv)); 773 if (ret <= 0) 774 return -EINVAL; /* replay */ 775 776 memcpy(wcid->rx_key_pn[status->tid], status->iv, sizeof(status->iv)); 777 778 if (status->flag & RX_FLAG_IV_STRIPPED) 779 status->flag |= RX_FLAG_PN_VALIDATED; 780 781 return 0; 782 } 783 784 static void 785 mt76_airtime_report(struct mt76_dev *dev, struct mt76_rx_status *status, 786 int len) 787 { 788 struct mt76_wcid *wcid = status->wcid; 789 struct ieee80211_rx_status info = { 790 .enc_flags = status->enc_flags, 791 .rate_idx = status->rate_idx, 792 .encoding = status->encoding, 793 .band = status->band, 794 .nss = status->nss, 795 .bw = status->bw, 796 }; 797 struct ieee80211_sta *sta; 798 u32 airtime; 799 800 airtime = ieee80211_calc_rx_airtime(dev->hw, &info, len); 801 spin_lock(&dev->cc_lock); 802 dev->cur_cc_bss_rx += airtime; 803 spin_unlock(&dev->cc_lock); 804 805 if (!wcid || !wcid->sta) 806 return; 807 808 sta = container_of((void *)wcid, struct ieee80211_sta, drv_priv); 809 ieee80211_sta_register_airtime(sta, status->tid, 0, airtime); 810 } 811 812 static void 813 mt76_airtime_flush_ampdu(struct mt76_dev *dev) 814 { 815 struct mt76_wcid *wcid; 816 int wcid_idx; 817 818 if (!dev->rx_ampdu_len) 819 return; 820 821 wcid_idx = dev->rx_ampdu_status.wcid_idx; 822 if (wcid_idx < ARRAY_SIZE(dev->wcid)) 823 wcid = rcu_dereference(dev->wcid[wcid_idx]); 824 else 825 wcid = NULL; 826 dev->rx_ampdu_status.wcid = wcid; 827 828 mt76_airtime_report(dev, &dev->rx_ampdu_status, dev->rx_ampdu_len); 829 830 dev->rx_ampdu_len = 0; 831 dev->rx_ampdu_ref = 0; 832 } 833 834 static void 835 mt76_airtime_check(struct mt76_dev *dev, struct sk_buff *skb) 836 { 837 struct ieee80211_hdr *hdr = mt76_skb_get_hdr(skb); 838 struct mt76_rx_status *status = (struct mt76_rx_status *)skb->cb; 839 struct mt76_wcid *wcid = status->wcid; 840 841 if (!(dev->drv->drv_flags & MT_DRV_SW_RX_AIRTIME)) 842 return; 843 844 if (!wcid || !wcid->sta) { 845 if (!ether_addr_equal(hdr->addr1, dev->macaddr)) 846 return; 847 848 wcid = NULL; 849 } 850 851 if (!(status->flag & RX_FLAG_AMPDU_DETAILS) || 852 status->ampdu_ref != dev->rx_ampdu_ref) 853 mt76_airtime_flush_ampdu(dev); 854 855 if (status->flag & RX_FLAG_AMPDU_DETAILS) { 856 if (!dev->rx_ampdu_len || 857 status->ampdu_ref != dev->rx_ampdu_ref) { 858 dev->rx_ampdu_status = *status; 859 dev->rx_ampdu_status.wcid_idx = wcid ? wcid->idx : 0xff; 860 dev->rx_ampdu_ref = status->ampdu_ref; 861 } 862 863 dev->rx_ampdu_len += skb->len; 864 return; 865 } 866 867 mt76_airtime_report(dev, status, skb->len); 868 } 869 870 static void 871 mt76_check_sta(struct mt76_dev *dev, struct sk_buff *skb) 872 { 873 struct mt76_rx_status *status = (struct mt76_rx_status *)skb->cb; 874 struct ieee80211_hdr *hdr = mt76_skb_get_hdr(skb); 875 struct ieee80211_sta *sta; 876 struct ieee80211_hw *hw; 877 struct mt76_wcid *wcid = status->wcid; 878 bool ps; 879 880 hw = mt76_phy_hw(dev, status->ext_phy); 881 if (ieee80211_is_pspoll(hdr->frame_control) && !wcid) { 882 sta = ieee80211_find_sta_by_ifaddr(hw, hdr->addr2, NULL); 883 if (sta) 884 wcid = status->wcid = (struct mt76_wcid *)sta->drv_priv; 885 } 886 887 mt76_airtime_check(dev, skb); 888 889 if (!wcid || !wcid->sta) 890 return; 891 892 sta = container_of((void *)wcid, struct ieee80211_sta, drv_priv); 893 894 if (status->signal <= 0) 895 ewma_signal_add(&wcid->rssi, -status->signal); 896 897 wcid->inactive_count = 0; 898 899 if (!test_bit(MT_WCID_FLAG_CHECK_PS, &wcid->flags)) 900 return; 901 902 if (ieee80211_is_pspoll(hdr->frame_control)) { 903 ieee80211_sta_pspoll(sta); 904 return; 905 } 906 907 if (ieee80211_has_morefrags(hdr->frame_control) || 908 !(ieee80211_is_mgmt(hdr->frame_control) || 909 ieee80211_is_data(hdr->frame_control))) 910 return; 911 912 ps = ieee80211_has_pm(hdr->frame_control); 913 914 if (ps && (ieee80211_is_data_qos(hdr->frame_control) || 915 ieee80211_is_qos_nullfunc(hdr->frame_control))) 916 ieee80211_sta_uapsd_trigger(sta, status->tid); 917 918 if (!!test_bit(MT_WCID_FLAG_PS, &wcid->flags) == ps) 919 return; 920 921 if (ps) 922 set_bit(MT_WCID_FLAG_PS, &wcid->flags); 923 else 924 clear_bit(MT_WCID_FLAG_PS, &wcid->flags); 925 926 dev->drv->sta_ps(dev, sta, ps); 927 ieee80211_sta_ps_transition(sta, ps); 928 } 929 930 void mt76_rx_complete(struct mt76_dev *dev, struct sk_buff_head *frames, 931 struct napi_struct *napi) 932 { 933 struct ieee80211_sta *sta; 934 struct ieee80211_hw *hw; 935 struct sk_buff *skb; 936 937 spin_lock(&dev->rx_lock); 938 while ((skb = __skb_dequeue(frames)) != NULL) { 939 if (mt76_check_ccmp_pn(skb)) { 940 dev_kfree_skb(skb); 941 continue; 942 } 943 944 mt76_rx_convert(dev, skb, &hw, &sta); 945 ieee80211_rx_napi(hw, sta, skb, napi); 946 } 947 spin_unlock(&dev->rx_lock); 948 } 949 950 void mt76_rx_poll_complete(struct mt76_dev *dev, enum mt76_rxq_id q, 951 struct napi_struct *napi) 952 { 953 struct sk_buff_head frames; 954 struct sk_buff *skb; 955 956 __skb_queue_head_init(&frames); 957 958 while ((skb = __skb_dequeue(&dev->rx_skb[q])) != NULL) { 959 mt76_check_sta(dev, skb); 960 mt76_rx_aggr_reorder(skb, &frames); 961 } 962 963 mt76_rx_complete(dev, &frames, napi); 964 } 965 EXPORT_SYMBOL_GPL(mt76_rx_poll_complete); 966 967 static int 968 mt76_sta_add(struct mt76_dev *dev, struct ieee80211_vif *vif, 969 struct ieee80211_sta *sta, bool ext_phy) 970 { 971 struct mt76_wcid *wcid = (struct mt76_wcid *)sta->drv_priv; 972 int ret; 973 int i; 974 975 mutex_lock(&dev->mutex); 976 977 ret = dev->drv->sta_add(dev, vif, sta); 978 if (ret) 979 goto out; 980 981 for (i = 0; i < ARRAY_SIZE(sta->txq); i++) { 982 struct mt76_txq *mtxq; 983 984 if (!sta->txq[i]) 985 continue; 986 987 mtxq = (struct mt76_txq *)sta->txq[i]->drv_priv; 988 mtxq->wcid = wcid; 989 } 990 991 ewma_signal_init(&wcid->rssi); 992 if (ext_phy) 993 mt76_wcid_mask_set(dev->wcid_phy_mask, wcid->idx); 994 wcid->ext_phy = ext_phy; 995 rcu_assign_pointer(dev->wcid[wcid->idx], wcid); 996 997 out: 998 mutex_unlock(&dev->mutex); 999 1000 return ret; 1001 } 1002 1003 void __mt76_sta_remove(struct mt76_dev *dev, struct ieee80211_vif *vif, 1004 struct ieee80211_sta *sta) 1005 { 1006 struct mt76_wcid *wcid = (struct mt76_wcid *)sta->drv_priv; 1007 int i, idx = wcid->idx; 1008 1009 for (i = 0; i < ARRAY_SIZE(wcid->aggr); i++) 1010 mt76_rx_aggr_stop(dev, wcid, i); 1011 1012 if (dev->drv->sta_remove) 1013 dev->drv->sta_remove(dev, vif, sta); 1014 1015 mt76_tx_status_check(dev, wcid, true); 1016 mt76_wcid_mask_clear(dev->wcid_mask, idx); 1017 mt76_wcid_mask_clear(dev->wcid_phy_mask, idx); 1018 } 1019 EXPORT_SYMBOL_GPL(__mt76_sta_remove); 1020 1021 static void 1022 mt76_sta_remove(struct mt76_dev *dev, struct ieee80211_vif *vif, 1023 struct ieee80211_sta *sta) 1024 { 1025 mutex_lock(&dev->mutex); 1026 __mt76_sta_remove(dev, vif, sta); 1027 mutex_unlock(&dev->mutex); 1028 } 1029 1030 int mt76_sta_state(struct ieee80211_hw *hw, struct ieee80211_vif *vif, 1031 struct ieee80211_sta *sta, 1032 enum ieee80211_sta_state old_state, 1033 enum ieee80211_sta_state new_state) 1034 { 1035 struct mt76_phy *phy = hw->priv; 1036 struct mt76_dev *dev = phy->dev; 1037 bool ext_phy = phy != &dev->phy; 1038 1039 if (old_state == IEEE80211_STA_NOTEXIST && 1040 new_state == IEEE80211_STA_NONE) 1041 return mt76_sta_add(dev, vif, sta, ext_phy); 1042 1043 if (old_state == IEEE80211_STA_AUTH && 1044 new_state == IEEE80211_STA_ASSOC && 1045 dev->drv->sta_assoc) 1046 dev->drv->sta_assoc(dev, vif, sta); 1047 1048 if (old_state == IEEE80211_STA_NONE && 1049 new_state == IEEE80211_STA_NOTEXIST) 1050 mt76_sta_remove(dev, vif, sta); 1051 1052 return 0; 1053 } 1054 EXPORT_SYMBOL_GPL(mt76_sta_state); 1055 1056 void mt76_sta_pre_rcu_remove(struct ieee80211_hw *hw, struct ieee80211_vif *vif, 1057 struct ieee80211_sta *sta) 1058 { 1059 struct mt76_phy *phy = hw->priv; 1060 struct mt76_dev *dev = phy->dev; 1061 struct mt76_wcid *wcid = (struct mt76_wcid *)sta->drv_priv; 1062 1063 mutex_lock(&dev->mutex); 1064 rcu_assign_pointer(dev->wcid[wcid->idx], NULL); 1065 mutex_unlock(&dev->mutex); 1066 } 1067 EXPORT_SYMBOL_GPL(mt76_sta_pre_rcu_remove); 1068 1069 int mt76_get_txpower(struct ieee80211_hw *hw, struct ieee80211_vif *vif, 1070 int *dbm) 1071 { 1072 struct mt76_phy *phy = hw->priv; 1073 int n_chains = hweight8(phy->antenna_mask); 1074 int delta = mt76_tx_power_nss_delta(n_chains); 1075 1076 *dbm = DIV_ROUND_UP(phy->txpower_cur + delta, 2); 1077 1078 return 0; 1079 } 1080 EXPORT_SYMBOL_GPL(mt76_get_txpower); 1081 1082 static void 1083 __mt76_csa_finish(void *priv, u8 *mac, struct ieee80211_vif *vif) 1084 { 1085 if (vif->csa_active && ieee80211_beacon_cntdwn_is_complete(vif)) 1086 ieee80211_csa_finish(vif); 1087 } 1088 1089 void mt76_csa_finish(struct mt76_dev *dev) 1090 { 1091 if (!dev->csa_complete) 1092 return; 1093 1094 ieee80211_iterate_active_interfaces_atomic(dev->hw, 1095 IEEE80211_IFACE_ITER_RESUME_ALL, 1096 __mt76_csa_finish, dev); 1097 1098 dev->csa_complete = 0; 1099 } 1100 EXPORT_SYMBOL_GPL(mt76_csa_finish); 1101 1102 static void 1103 __mt76_csa_check(void *priv, u8 *mac, struct ieee80211_vif *vif) 1104 { 1105 struct mt76_dev *dev = priv; 1106 1107 if (!vif->csa_active) 1108 return; 1109 1110 dev->csa_complete |= ieee80211_beacon_cntdwn_is_complete(vif); 1111 } 1112 1113 void mt76_csa_check(struct mt76_dev *dev) 1114 { 1115 ieee80211_iterate_active_interfaces_atomic(dev->hw, 1116 IEEE80211_IFACE_ITER_RESUME_ALL, 1117 __mt76_csa_check, dev); 1118 } 1119 EXPORT_SYMBOL_GPL(mt76_csa_check); 1120 1121 int 1122 mt76_set_tim(struct ieee80211_hw *hw, struct ieee80211_sta *sta, bool set) 1123 { 1124 return 0; 1125 } 1126 EXPORT_SYMBOL_GPL(mt76_set_tim); 1127 1128 void mt76_insert_ccmp_hdr(struct sk_buff *skb, u8 key_id) 1129 { 1130 struct mt76_rx_status *status = (struct mt76_rx_status *)skb->cb; 1131 int hdr_len = ieee80211_get_hdrlen_from_skb(skb); 1132 u8 *hdr, *pn = status->iv; 1133 1134 __skb_push(skb, 8); 1135 memmove(skb->data, skb->data + 8, hdr_len); 1136 hdr = skb->data + hdr_len; 1137 1138 hdr[0] = pn[5]; 1139 hdr[1] = pn[4]; 1140 hdr[2] = 0; 1141 hdr[3] = 0x20 | (key_id << 6); 1142 hdr[4] = pn[3]; 1143 hdr[5] = pn[2]; 1144 hdr[6] = pn[1]; 1145 hdr[7] = pn[0]; 1146 1147 status->flag &= ~RX_FLAG_IV_STRIPPED; 1148 } 1149 EXPORT_SYMBOL_GPL(mt76_insert_ccmp_hdr); 1150 1151 int mt76_get_rate(struct mt76_dev *dev, 1152 struct ieee80211_supported_band *sband, 1153 int idx, bool cck) 1154 { 1155 int i, offset = 0, len = sband->n_bitrates; 1156 1157 if (cck) { 1158 if (sband == &dev->phy.sband_5g.sband) 1159 return 0; 1160 1161 idx &= ~BIT(2); /* short preamble */ 1162 } else if (sband == &dev->phy.sband_2g.sband) { 1163 offset = 4; 1164 } 1165 1166 for (i = offset; i < len; i++) { 1167 if ((sband->bitrates[i].hw_value & GENMASK(7, 0)) == idx) 1168 return i; 1169 } 1170 1171 return 0; 1172 } 1173 EXPORT_SYMBOL_GPL(mt76_get_rate); 1174 1175 void mt76_sw_scan(struct ieee80211_hw *hw, struct ieee80211_vif *vif, 1176 const u8 *mac) 1177 { 1178 struct mt76_phy *phy = hw->priv; 1179 1180 set_bit(MT76_SCANNING, &phy->state); 1181 } 1182 EXPORT_SYMBOL_GPL(mt76_sw_scan); 1183 1184 void mt76_sw_scan_complete(struct ieee80211_hw *hw, struct ieee80211_vif *vif) 1185 { 1186 struct mt76_phy *phy = hw->priv; 1187 1188 clear_bit(MT76_SCANNING, &phy->state); 1189 } 1190 EXPORT_SYMBOL_GPL(mt76_sw_scan_complete); 1191 1192 int mt76_get_antenna(struct ieee80211_hw *hw, u32 *tx_ant, u32 *rx_ant) 1193 { 1194 struct mt76_phy *phy = hw->priv; 1195 struct mt76_dev *dev = phy->dev; 1196 1197 mutex_lock(&dev->mutex); 1198 *tx_ant = phy->antenna_mask; 1199 *rx_ant = phy->antenna_mask; 1200 mutex_unlock(&dev->mutex); 1201 1202 return 0; 1203 } 1204 EXPORT_SYMBOL_GPL(mt76_get_antenna); 1205