1 // SPDX-License-Identifier: ISC
2 /*
3  * Copyright (C) 2016 Felix Fietkau <nbd@nbd.name>
4  */
5 #include <linux/of.h>
6 #include "mt76.h"
7 
8 #define CHAN2G(_idx, _freq) {			\
9 	.band = NL80211_BAND_2GHZ,		\
10 	.center_freq = (_freq),			\
11 	.hw_value = (_idx),			\
12 	.max_power = 30,			\
13 }
14 
15 #define CHAN5G(_idx, _freq) {			\
16 	.band = NL80211_BAND_5GHZ,		\
17 	.center_freq = (_freq),			\
18 	.hw_value = (_idx),			\
19 	.max_power = 30,			\
20 }
21 
22 static const struct ieee80211_channel mt76_channels_2ghz[] = {
23 	CHAN2G(1, 2412),
24 	CHAN2G(2, 2417),
25 	CHAN2G(3, 2422),
26 	CHAN2G(4, 2427),
27 	CHAN2G(5, 2432),
28 	CHAN2G(6, 2437),
29 	CHAN2G(7, 2442),
30 	CHAN2G(8, 2447),
31 	CHAN2G(9, 2452),
32 	CHAN2G(10, 2457),
33 	CHAN2G(11, 2462),
34 	CHAN2G(12, 2467),
35 	CHAN2G(13, 2472),
36 	CHAN2G(14, 2484),
37 };
38 
39 static const struct ieee80211_channel mt76_channels_5ghz[] = {
40 	CHAN5G(36, 5180),
41 	CHAN5G(40, 5200),
42 	CHAN5G(44, 5220),
43 	CHAN5G(48, 5240),
44 
45 	CHAN5G(52, 5260),
46 	CHAN5G(56, 5280),
47 	CHAN5G(60, 5300),
48 	CHAN5G(64, 5320),
49 
50 	CHAN5G(100, 5500),
51 	CHAN5G(104, 5520),
52 	CHAN5G(108, 5540),
53 	CHAN5G(112, 5560),
54 	CHAN5G(116, 5580),
55 	CHAN5G(120, 5600),
56 	CHAN5G(124, 5620),
57 	CHAN5G(128, 5640),
58 	CHAN5G(132, 5660),
59 	CHAN5G(136, 5680),
60 	CHAN5G(140, 5700),
61 
62 	CHAN5G(149, 5745),
63 	CHAN5G(153, 5765),
64 	CHAN5G(157, 5785),
65 	CHAN5G(161, 5805),
66 	CHAN5G(165, 5825),
67 };
68 
69 static const struct ieee80211_tpt_blink mt76_tpt_blink[] = {
70 	{ .throughput =   0 * 1024, .blink_time = 334 },
71 	{ .throughput =   1 * 1024, .blink_time = 260 },
72 	{ .throughput =   5 * 1024, .blink_time = 220 },
73 	{ .throughput =  10 * 1024, .blink_time = 190 },
74 	{ .throughput =  20 * 1024, .blink_time = 170 },
75 	{ .throughput =  50 * 1024, .blink_time = 150 },
76 	{ .throughput =  70 * 1024, .blink_time = 130 },
77 	{ .throughput = 100 * 1024, .blink_time = 110 },
78 	{ .throughput = 200 * 1024, .blink_time =  80 },
79 	{ .throughput = 300 * 1024, .blink_time =  50 },
80 };
81 
82 static int mt76_led_init(struct mt76_dev *dev)
83 {
84 	struct device_node *np = dev->dev->of_node;
85 	struct ieee80211_hw *hw = dev->hw;
86 	int led_pin;
87 
88 	if (!dev->led_cdev.brightness_set && !dev->led_cdev.blink_set)
89 		return 0;
90 
91 	snprintf(dev->led_name, sizeof(dev->led_name),
92 		 "mt76-%s", wiphy_name(hw->wiphy));
93 
94 	dev->led_cdev.name = dev->led_name;
95 	dev->led_cdev.default_trigger =
96 		ieee80211_create_tpt_led_trigger(hw,
97 					IEEE80211_TPT_LEDTRIG_FL_RADIO,
98 					mt76_tpt_blink,
99 					ARRAY_SIZE(mt76_tpt_blink));
100 
101 	np = of_get_child_by_name(np, "led");
102 	if (np) {
103 		if (!of_property_read_u32(np, "led-sources", &led_pin))
104 			dev->led_pin = led_pin;
105 		dev->led_al = of_property_read_bool(np, "led-active-low");
106 	}
107 
108 	return led_classdev_register(dev->dev, &dev->led_cdev);
109 }
110 
111 static void mt76_led_cleanup(struct mt76_dev *dev)
112 {
113 	if (!dev->led_cdev.brightness_set && !dev->led_cdev.blink_set)
114 		return;
115 
116 	led_classdev_unregister(&dev->led_cdev);
117 }
118 
119 static void mt76_init_stream_cap(struct mt76_phy *phy,
120 				 struct ieee80211_supported_band *sband,
121 				 bool vht)
122 {
123 	struct ieee80211_sta_ht_cap *ht_cap = &sband->ht_cap;
124 	int i, nstream = hweight8(phy->antenna_mask);
125 	struct ieee80211_sta_vht_cap *vht_cap;
126 	u16 mcs_map = 0;
127 
128 	if (nstream > 1)
129 		ht_cap->cap |= IEEE80211_HT_CAP_TX_STBC;
130 	else
131 		ht_cap->cap &= ~IEEE80211_HT_CAP_TX_STBC;
132 
133 	for (i = 0; i < IEEE80211_HT_MCS_MASK_LEN; i++)
134 		ht_cap->mcs.rx_mask[i] = i < nstream ? 0xff : 0;
135 
136 	if (!vht)
137 		return;
138 
139 	vht_cap = &sband->vht_cap;
140 	if (nstream > 1)
141 		vht_cap->cap |= IEEE80211_VHT_CAP_TXSTBC;
142 	else
143 		vht_cap->cap &= ~IEEE80211_VHT_CAP_TXSTBC;
144 
145 	for (i = 0; i < 8; i++) {
146 		if (i < nstream)
147 			mcs_map |= (IEEE80211_VHT_MCS_SUPPORT_0_9 << (i * 2));
148 		else
149 			mcs_map |=
150 				(IEEE80211_VHT_MCS_NOT_SUPPORTED << (i * 2));
151 	}
152 	vht_cap->vht_mcs.rx_mcs_map = cpu_to_le16(mcs_map);
153 	vht_cap->vht_mcs.tx_mcs_map = cpu_to_le16(mcs_map);
154 }
155 
156 void mt76_set_stream_caps(struct mt76_phy *phy, bool vht)
157 {
158 	if (phy->dev->cap.has_2ghz)
159 		mt76_init_stream_cap(phy, &phy->sband_2g.sband, false);
160 	if (phy->dev->cap.has_5ghz)
161 		mt76_init_stream_cap(phy, &phy->sband_5g.sband, vht);
162 }
163 EXPORT_SYMBOL_GPL(mt76_set_stream_caps);
164 
165 static int
166 mt76_init_sband(struct mt76_dev *dev, struct mt76_sband *msband,
167 		const struct ieee80211_channel *chan, int n_chan,
168 		struct ieee80211_rate *rates, int n_rates, bool vht)
169 {
170 	struct ieee80211_supported_band *sband = &msband->sband;
171 	struct ieee80211_sta_ht_cap *ht_cap;
172 	struct ieee80211_sta_vht_cap *vht_cap;
173 	void *chanlist;
174 	int size;
175 
176 	size = n_chan * sizeof(*chan);
177 	chanlist = devm_kmemdup(dev->dev, chan, size, GFP_KERNEL);
178 	if (!chanlist)
179 		return -ENOMEM;
180 
181 	msband->chan = devm_kcalloc(dev->dev, n_chan, sizeof(*msband->chan),
182 				    GFP_KERNEL);
183 	if (!msband->chan)
184 		return -ENOMEM;
185 
186 	sband->channels = chanlist;
187 	sband->n_channels = n_chan;
188 	sband->bitrates = rates;
189 	sband->n_bitrates = n_rates;
190 
191 	ht_cap = &sband->ht_cap;
192 	ht_cap->ht_supported = true;
193 	ht_cap->cap |= IEEE80211_HT_CAP_SUP_WIDTH_20_40 |
194 		       IEEE80211_HT_CAP_GRN_FLD |
195 		       IEEE80211_HT_CAP_SGI_20 |
196 		       IEEE80211_HT_CAP_SGI_40 |
197 		       (1 << IEEE80211_HT_CAP_RX_STBC_SHIFT);
198 
199 	ht_cap->mcs.tx_params = IEEE80211_HT_MCS_TX_DEFINED;
200 	ht_cap->ampdu_factor = IEEE80211_HT_MAX_AMPDU_64K;
201 
202 	mt76_init_stream_cap(&dev->phy, sband, vht);
203 
204 	if (!vht)
205 		return 0;
206 
207 	vht_cap = &sband->vht_cap;
208 	vht_cap->vht_supported = true;
209 	vht_cap->cap |= IEEE80211_VHT_CAP_RXLDPC |
210 			IEEE80211_VHT_CAP_RXSTBC_1 |
211 			IEEE80211_VHT_CAP_SHORT_GI_80 |
212 			IEEE80211_VHT_CAP_RX_ANTENNA_PATTERN |
213 			IEEE80211_VHT_CAP_TX_ANTENNA_PATTERN |
214 			(3 << IEEE80211_VHT_CAP_MAX_A_MPDU_LENGTH_EXPONENT_SHIFT);
215 
216 	return 0;
217 }
218 
219 static int
220 mt76_init_sband_2g(struct mt76_dev *dev, struct ieee80211_rate *rates,
221 		   int n_rates)
222 {
223 	dev->hw->wiphy->bands[NL80211_BAND_2GHZ] = &dev->phy.sband_2g.sband;
224 
225 	return mt76_init_sband(dev, &dev->phy.sband_2g,
226 			       mt76_channels_2ghz,
227 			       ARRAY_SIZE(mt76_channels_2ghz),
228 			       rates, n_rates, false);
229 }
230 
231 static int
232 mt76_init_sband_5g(struct mt76_dev *dev, struct ieee80211_rate *rates,
233 		   int n_rates, bool vht)
234 {
235 	dev->hw->wiphy->bands[NL80211_BAND_5GHZ] = &dev->phy.sband_5g.sband;
236 
237 	return mt76_init_sband(dev, &dev->phy.sband_5g,
238 			       mt76_channels_5ghz,
239 			       ARRAY_SIZE(mt76_channels_5ghz),
240 			       rates, n_rates, vht);
241 }
242 
243 static void
244 mt76_check_sband(struct mt76_phy *phy, struct mt76_sband *msband,
245 		 enum nl80211_band band)
246 {
247 	struct ieee80211_supported_band *sband = &msband->sband;
248 	bool found = false;
249 	int i;
250 
251 	if (!sband)
252 		return;
253 
254 	for (i = 0; i < sband->n_channels; i++) {
255 		if (sband->channels[i].flags & IEEE80211_CHAN_DISABLED)
256 			continue;
257 
258 		found = true;
259 		break;
260 	}
261 
262 	if (found) {
263 		phy->chandef.chan = &sband->channels[0];
264 		phy->chan_state = &msband->chan[0];
265 		return;
266 	}
267 
268 	sband->n_channels = 0;
269 	phy->hw->wiphy->bands[band] = NULL;
270 }
271 
272 static void
273 mt76_phy_init(struct mt76_dev *dev, struct ieee80211_hw *hw)
274 {
275 	struct wiphy *wiphy = hw->wiphy;
276 
277 	SET_IEEE80211_DEV(hw, dev->dev);
278 	SET_IEEE80211_PERM_ADDR(hw, dev->macaddr);
279 
280 	wiphy->features |= NL80211_FEATURE_ACTIVE_MONITOR;
281 	wiphy->flags |= WIPHY_FLAG_HAS_CHANNEL_SWITCH |
282 			WIPHY_FLAG_SUPPORTS_TDLS;
283 
284 	wiphy_ext_feature_set(wiphy, NL80211_EXT_FEATURE_CQM_RSSI_LIST);
285 	wiphy_ext_feature_set(wiphy, NL80211_EXT_FEATURE_AIRTIME_FAIRNESS);
286 	wiphy_ext_feature_set(wiphy, NL80211_EXT_FEATURE_AQL);
287 
288 	wiphy->available_antennas_tx = dev->phy.antenna_mask;
289 	wiphy->available_antennas_rx = dev->phy.antenna_mask;
290 
291 	hw->txq_data_size = sizeof(struct mt76_txq);
292 
293 	if (!hw->max_tx_fragments)
294 		hw->max_tx_fragments = 16;
295 
296 	ieee80211_hw_set(hw, SIGNAL_DBM);
297 	ieee80211_hw_set(hw, AMPDU_AGGREGATION);
298 	ieee80211_hw_set(hw, SUPPORTS_RC_TABLE);
299 	ieee80211_hw_set(hw, SUPPORT_FAST_XMIT);
300 	ieee80211_hw_set(hw, SUPPORTS_CLONED_SKBS);
301 	ieee80211_hw_set(hw, SUPPORTS_AMSDU_IN_AMPDU);
302 	ieee80211_hw_set(hw, TX_AMSDU);
303 	ieee80211_hw_set(hw, TX_FRAG_LIST);
304 	ieee80211_hw_set(hw, MFP_CAPABLE);
305 	ieee80211_hw_set(hw, AP_LINK_PS);
306 	ieee80211_hw_set(hw, REPORTS_TX_ACK_STATUS);
307 	ieee80211_hw_set(hw, NEEDS_UNIQUE_STA_ADDR);
308 
309 	wiphy->flags |= WIPHY_FLAG_IBSS_RSN;
310 	wiphy->interface_modes =
311 		BIT(NL80211_IFTYPE_STATION) |
312 		BIT(NL80211_IFTYPE_AP) |
313 #ifdef CONFIG_MAC80211_MESH
314 		BIT(NL80211_IFTYPE_MESH_POINT) |
315 #endif
316 		BIT(NL80211_IFTYPE_P2P_CLIENT) |
317 		BIT(NL80211_IFTYPE_P2P_GO) |
318 		BIT(NL80211_IFTYPE_ADHOC);
319 }
320 
321 struct mt76_phy *
322 mt76_alloc_phy(struct mt76_dev *dev, unsigned int size,
323 	       const struct ieee80211_ops *ops)
324 {
325 	struct ieee80211_hw *hw;
326 	struct mt76_phy *phy;
327 	unsigned int phy_size, chan_size;
328 	unsigned int size_2g, size_5g;
329 	void *priv;
330 
331 	phy_size = ALIGN(sizeof(*phy), 8);
332 	chan_size = sizeof(dev->phy.sband_2g.chan[0]);
333 	size_2g = ALIGN(ARRAY_SIZE(mt76_channels_2ghz) * chan_size, 8);
334 	size_5g = ALIGN(ARRAY_SIZE(mt76_channels_5ghz) * chan_size, 8);
335 
336 	size += phy_size + size_2g + size_5g;
337 	hw = ieee80211_alloc_hw(size, ops);
338 	if (!hw)
339 		return NULL;
340 
341 	phy = hw->priv;
342 	phy->dev = dev;
343 	phy->hw = hw;
344 
345 	mt76_phy_init(dev, hw);
346 
347 	priv = hw->priv + phy_size;
348 
349 	phy->sband_2g = dev->phy.sband_2g;
350 	phy->sband_2g.chan = priv;
351 	priv += size_2g;
352 
353 	phy->sband_5g = dev->phy.sband_5g;
354 	phy->sband_5g.chan = priv;
355 	priv += size_5g;
356 
357 	phy->priv = priv;
358 
359 	hw->wiphy->bands[NL80211_BAND_2GHZ] = &phy->sband_2g.sband;
360 	hw->wiphy->bands[NL80211_BAND_5GHZ] = &phy->sband_5g.sband;
361 
362 	mt76_check_sband(phy, &phy->sband_2g, NL80211_BAND_2GHZ);
363 	mt76_check_sband(phy, &phy->sband_5g, NL80211_BAND_5GHZ);
364 
365 	return phy;
366 }
367 EXPORT_SYMBOL_GPL(mt76_alloc_phy);
368 
369 int
370 mt76_register_phy(struct mt76_phy *phy)
371 {
372 	int ret;
373 
374 	ret = ieee80211_register_hw(phy->hw);
375 	if (ret)
376 		return ret;
377 
378 	phy->dev->phy2 = phy;
379 	return 0;
380 }
381 EXPORT_SYMBOL_GPL(mt76_register_phy);
382 
383 void
384 mt76_unregister_phy(struct mt76_phy *phy)
385 {
386 	struct mt76_dev *dev = phy->dev;
387 
388 	dev->phy2 = NULL;
389 	mt76_tx_status_check(dev, NULL, true);
390 	ieee80211_unregister_hw(phy->hw);
391 }
392 EXPORT_SYMBOL_GPL(mt76_unregister_phy);
393 
394 struct mt76_dev *
395 mt76_alloc_device(struct device *pdev, unsigned int size,
396 		  const struct ieee80211_ops *ops,
397 		  const struct mt76_driver_ops *drv_ops)
398 {
399 	struct ieee80211_hw *hw;
400 	struct mt76_phy *phy;
401 	struct mt76_dev *dev;
402 	int i;
403 
404 	hw = ieee80211_alloc_hw(size, ops);
405 	if (!hw)
406 		return NULL;
407 
408 	dev = hw->priv;
409 	dev->hw = hw;
410 	dev->dev = pdev;
411 	dev->drv = drv_ops;
412 
413 	phy = &dev->phy;
414 	phy->dev = dev;
415 	phy->hw = hw;
416 
417 	spin_lock_init(&dev->rx_lock);
418 	spin_lock_init(&dev->lock);
419 	spin_lock_init(&dev->cc_lock);
420 	mutex_init(&dev->mutex);
421 	init_waitqueue_head(&dev->tx_wait);
422 	skb_queue_head_init(&dev->status_list);
423 
424 	skb_queue_head_init(&dev->mcu.res_q);
425 	init_waitqueue_head(&dev->mcu.wait);
426 	mutex_init(&dev->mcu.mutex);
427 
428 	INIT_LIST_HEAD(&dev->txwi_cache);
429 
430 	for (i = 0; i < ARRAY_SIZE(dev->q_rx); i++)
431 		skb_queue_head_init(&dev->rx_skb[i]);
432 
433 	tasklet_init(&dev->tx_tasklet, mt76_tx_tasklet, (unsigned long)dev);
434 
435 	return dev;
436 }
437 EXPORT_SYMBOL_GPL(mt76_alloc_device);
438 
439 int mt76_register_device(struct mt76_dev *dev, bool vht,
440 			 struct ieee80211_rate *rates, int n_rates)
441 {
442 	struct ieee80211_hw *hw = dev->hw;
443 	struct mt76_phy *phy = &dev->phy;
444 	int ret;
445 
446 	dev_set_drvdata(dev->dev, dev);
447 	mt76_phy_init(dev, hw);
448 
449 	if (dev->cap.has_2ghz) {
450 		ret = mt76_init_sband_2g(dev, rates, n_rates);
451 		if (ret)
452 			return ret;
453 	}
454 
455 	if (dev->cap.has_5ghz) {
456 		ret = mt76_init_sband_5g(dev, rates + 4, n_rates - 4, vht);
457 		if (ret)
458 			return ret;
459 	}
460 
461 	wiphy_read_of_freq_limits(hw->wiphy);
462 	mt76_check_sband(&dev->phy, &phy->sband_2g, NL80211_BAND_2GHZ);
463 	mt76_check_sband(&dev->phy, &phy->sband_5g, NL80211_BAND_5GHZ);
464 
465 	if (IS_ENABLED(CONFIG_MT76_LEDS)) {
466 		ret = mt76_led_init(dev);
467 		if (ret)
468 			return ret;
469 	}
470 
471 	return ieee80211_register_hw(hw);
472 }
473 EXPORT_SYMBOL_GPL(mt76_register_device);
474 
475 void mt76_unregister_device(struct mt76_dev *dev)
476 {
477 	struct ieee80211_hw *hw = dev->hw;
478 
479 	if (IS_ENABLED(CONFIG_MT76_LEDS))
480 		mt76_led_cleanup(dev);
481 	mt76_tx_status_check(dev, NULL, true);
482 	ieee80211_unregister_hw(hw);
483 }
484 EXPORT_SYMBOL_GPL(mt76_unregister_device);
485 
486 void mt76_free_device(struct mt76_dev *dev)
487 {
488 	mt76_tx_free(dev);
489 	ieee80211_free_hw(dev->hw);
490 }
491 EXPORT_SYMBOL_GPL(mt76_free_device);
492 
493 void mt76_rx(struct mt76_dev *dev, enum mt76_rxq_id q, struct sk_buff *skb)
494 {
495 	struct mt76_rx_status *status = (struct mt76_rx_status *)skb->cb;
496 	struct mt76_phy *phy = mt76_dev_phy(dev, status->ext_phy);
497 
498 	if (!test_bit(MT76_STATE_RUNNING, &phy->state)) {
499 		dev_kfree_skb(skb);
500 		return;
501 	}
502 
503 	__skb_queue_tail(&dev->rx_skb[q], skb);
504 }
505 EXPORT_SYMBOL_GPL(mt76_rx);
506 
507 bool mt76_has_tx_pending(struct mt76_phy *phy)
508 {
509 	struct mt76_dev *dev = phy->dev;
510 	struct mt76_queue *q;
511 	int i, offset;
512 
513 	offset = __MT_TXQ_MAX * (phy != &dev->phy);
514 
515 	for (i = 0; i < __MT_TXQ_MAX; i++) {
516 		q = dev->q_tx[offset + i].q;
517 		if (q && q->queued)
518 			return true;
519 	}
520 
521 	return false;
522 }
523 EXPORT_SYMBOL_GPL(mt76_has_tx_pending);
524 
525 static struct mt76_channel_state *
526 mt76_channel_state(struct mt76_phy *phy, struct ieee80211_channel *c)
527 {
528 	struct mt76_sband *msband;
529 	int idx;
530 
531 	if (c->band == NL80211_BAND_2GHZ)
532 		msband = &phy->sband_2g;
533 	else
534 		msband = &phy->sband_5g;
535 
536 	idx = c - &msband->sband.channels[0];
537 	return &msband->chan[idx];
538 }
539 
540 static void
541 mt76_update_survey_active_time(struct mt76_phy *phy, ktime_t time)
542 {
543 	struct mt76_channel_state *state = phy->chan_state;
544 
545 	state->cc_active += ktime_to_us(ktime_sub(time,
546 						  phy->survey_time));
547 	phy->survey_time = time;
548 }
549 
550 void mt76_update_survey(struct mt76_dev *dev)
551 {
552 	ktime_t cur_time;
553 
554 	if (dev->drv->update_survey)
555 		dev->drv->update_survey(dev);
556 
557 	cur_time = ktime_get_boottime();
558 	mt76_update_survey_active_time(&dev->phy, cur_time);
559 	if (dev->phy2)
560 		mt76_update_survey_active_time(dev->phy2, cur_time);
561 
562 	if (dev->drv->drv_flags & MT_DRV_SW_RX_AIRTIME) {
563 		struct mt76_channel_state *state = dev->phy.chan_state;
564 
565 		spin_lock_bh(&dev->cc_lock);
566 		state->cc_bss_rx += dev->cur_cc_bss_rx;
567 		dev->cur_cc_bss_rx = 0;
568 		spin_unlock_bh(&dev->cc_lock);
569 	}
570 }
571 EXPORT_SYMBOL_GPL(mt76_update_survey);
572 
573 void mt76_set_channel(struct mt76_phy *phy)
574 {
575 	struct mt76_dev *dev = phy->dev;
576 	struct ieee80211_hw *hw = phy->hw;
577 	struct cfg80211_chan_def *chandef = &hw->conf.chandef;
578 	bool offchannel = hw->conf.flags & IEEE80211_CONF_OFFCHANNEL;
579 	int timeout = HZ / 5;
580 
581 	wait_event_timeout(dev->tx_wait, !mt76_has_tx_pending(phy), timeout);
582 	mt76_update_survey(dev);
583 
584 	phy->chandef = *chandef;
585 	phy->chan_state = mt76_channel_state(phy, chandef->chan);
586 
587 	if (!offchannel)
588 		phy->main_chan = chandef->chan;
589 
590 	if (chandef->chan != phy->main_chan)
591 		memset(phy->chan_state, 0, sizeof(*phy->chan_state));
592 }
593 EXPORT_SYMBOL_GPL(mt76_set_channel);
594 
595 int mt76_get_survey(struct ieee80211_hw *hw, int idx,
596 		    struct survey_info *survey)
597 {
598 	struct mt76_phy *phy = hw->priv;
599 	struct mt76_dev *dev = phy->dev;
600 	struct mt76_sband *sband;
601 	struct ieee80211_channel *chan;
602 	struct mt76_channel_state *state;
603 	int ret = 0;
604 
605 	mutex_lock(&dev->mutex);
606 	if (idx == 0 && dev->drv->update_survey)
607 		mt76_update_survey(dev);
608 
609 	sband = &phy->sband_2g;
610 	if (idx >= sband->sband.n_channels) {
611 		idx -= sband->sband.n_channels;
612 		sband = &phy->sband_5g;
613 	}
614 
615 	if (idx >= sband->sband.n_channels) {
616 		ret = -ENOENT;
617 		goto out;
618 	}
619 
620 	chan = &sband->sband.channels[idx];
621 	state = mt76_channel_state(phy, chan);
622 
623 	memset(survey, 0, sizeof(*survey));
624 	survey->channel = chan;
625 	survey->filled = SURVEY_INFO_TIME | SURVEY_INFO_TIME_BUSY;
626 	survey->filled |= dev->drv->survey_flags;
627 	if (state->noise)
628 		survey->filled |= SURVEY_INFO_NOISE_DBM;
629 
630 	if (chan == phy->main_chan) {
631 		survey->filled |= SURVEY_INFO_IN_USE;
632 
633 		if (dev->drv->drv_flags & MT_DRV_SW_RX_AIRTIME)
634 			survey->filled |= SURVEY_INFO_TIME_BSS_RX;
635 	}
636 
637 	survey->time_busy = div_u64(state->cc_busy, 1000);
638 	survey->time_rx = div_u64(state->cc_rx, 1000);
639 	survey->time = div_u64(state->cc_active, 1000);
640 	survey->noise = state->noise;
641 
642 	spin_lock_bh(&dev->cc_lock);
643 	survey->time_bss_rx = div_u64(state->cc_bss_rx, 1000);
644 	survey->time_tx = div_u64(state->cc_tx, 1000);
645 	spin_unlock_bh(&dev->cc_lock);
646 
647 out:
648 	mutex_unlock(&dev->mutex);
649 
650 	return ret;
651 }
652 EXPORT_SYMBOL_GPL(mt76_get_survey);
653 
654 void mt76_wcid_key_setup(struct mt76_dev *dev, struct mt76_wcid *wcid,
655 			 struct ieee80211_key_conf *key)
656 {
657 	struct ieee80211_key_seq seq;
658 	int i;
659 
660 	wcid->rx_check_pn = false;
661 
662 	if (!key)
663 		return;
664 
665 	if (key->cipher != WLAN_CIPHER_SUITE_CCMP)
666 		return;
667 
668 	wcid->rx_check_pn = true;
669 	for (i = 0; i < IEEE80211_NUM_TIDS; i++) {
670 		ieee80211_get_key_rx_seq(key, i, &seq);
671 		memcpy(wcid->rx_key_pn[i], seq.ccmp.pn, sizeof(seq.ccmp.pn));
672 	}
673 }
674 EXPORT_SYMBOL(mt76_wcid_key_setup);
675 
676 static void
677 mt76_rx_convert(struct mt76_dev *dev, struct sk_buff *skb,
678 		struct ieee80211_hw **hw,
679 		struct ieee80211_sta **sta)
680 {
681 	struct ieee80211_rx_status *status = IEEE80211_SKB_RXCB(skb);
682 	struct mt76_rx_status mstat;
683 
684 	mstat = *((struct mt76_rx_status *)skb->cb);
685 	memset(status, 0, sizeof(*status));
686 
687 	status->flag = mstat.flag;
688 	status->freq = mstat.freq;
689 	status->enc_flags = mstat.enc_flags;
690 	status->encoding = mstat.encoding;
691 	status->bw = mstat.bw;
692 	status->he_ru = mstat.he_ru;
693 	status->he_gi = mstat.he_gi;
694 	status->he_dcm = mstat.he_dcm;
695 	status->rate_idx = mstat.rate_idx;
696 	status->nss = mstat.nss;
697 	status->band = mstat.band;
698 	status->signal = mstat.signal;
699 	status->chains = mstat.chains;
700 	status->ampdu_reference = mstat.ampdu_ref;
701 
702 	BUILD_BUG_ON(sizeof(mstat) > sizeof(skb->cb));
703 	BUILD_BUG_ON(sizeof(status->chain_signal) !=
704 		     sizeof(mstat.chain_signal));
705 	memcpy(status->chain_signal, mstat.chain_signal,
706 	       sizeof(mstat.chain_signal));
707 
708 	*sta = wcid_to_sta(mstat.wcid);
709 	*hw = mt76_phy_hw(dev, mstat.ext_phy);
710 }
711 
712 static int
713 mt76_check_ccmp_pn(struct sk_buff *skb)
714 {
715 	struct mt76_rx_status *status = (struct mt76_rx_status *)skb->cb;
716 	struct mt76_wcid *wcid = status->wcid;
717 	struct ieee80211_hdr *hdr;
718 	int ret;
719 
720 	if (!(status->flag & RX_FLAG_DECRYPTED))
721 		return 0;
722 
723 	if (!wcid || !wcid->rx_check_pn)
724 		return 0;
725 
726 	if (!(status->flag & RX_FLAG_IV_STRIPPED)) {
727 		/*
728 		 * Validate the first fragment both here and in mac80211
729 		 * All further fragments will be validated by mac80211 only.
730 		 */
731 		hdr = mt76_skb_get_hdr(skb);
732 		if (ieee80211_is_frag(hdr) &&
733 		    !ieee80211_is_first_frag(hdr->frame_control))
734 			return 0;
735 	}
736 
737 	BUILD_BUG_ON(sizeof(status->iv) != sizeof(wcid->rx_key_pn[0]));
738 	ret = memcmp(status->iv, wcid->rx_key_pn[status->tid],
739 		     sizeof(status->iv));
740 	if (ret <= 0)
741 		return -EINVAL; /* replay */
742 
743 	memcpy(wcid->rx_key_pn[status->tid], status->iv, sizeof(status->iv));
744 
745 	if (status->flag & RX_FLAG_IV_STRIPPED)
746 		status->flag |= RX_FLAG_PN_VALIDATED;
747 
748 	return 0;
749 }
750 
751 static void
752 mt76_airtime_report(struct mt76_dev *dev, struct mt76_rx_status *status,
753 		    int len)
754 {
755 	struct mt76_wcid *wcid = status->wcid;
756 	struct ieee80211_rx_status info = {
757 		.enc_flags = status->enc_flags,
758 		.rate_idx = status->rate_idx,
759 		.encoding = status->encoding,
760 		.band = status->band,
761 		.nss = status->nss,
762 		.bw = status->bw,
763 	};
764 	struct ieee80211_sta *sta;
765 	u32 airtime;
766 
767 	airtime = ieee80211_calc_rx_airtime(dev->hw, &info, len);
768 	spin_lock(&dev->cc_lock);
769 	dev->cur_cc_bss_rx += airtime;
770 	spin_unlock(&dev->cc_lock);
771 
772 	if (!wcid || !wcid->sta)
773 		return;
774 
775 	sta = container_of((void *)wcid, struct ieee80211_sta, drv_priv);
776 	ieee80211_sta_register_airtime(sta, status->tid, 0, airtime);
777 }
778 
779 static void
780 mt76_airtime_flush_ampdu(struct mt76_dev *dev)
781 {
782 	struct mt76_wcid *wcid;
783 	int wcid_idx;
784 
785 	if (!dev->rx_ampdu_len)
786 		return;
787 
788 	wcid_idx = dev->rx_ampdu_status.wcid_idx;
789 	if (wcid_idx < ARRAY_SIZE(dev->wcid))
790 		wcid = rcu_dereference(dev->wcid[wcid_idx]);
791 	else
792 		wcid = NULL;
793 	dev->rx_ampdu_status.wcid = wcid;
794 
795 	mt76_airtime_report(dev, &dev->rx_ampdu_status, dev->rx_ampdu_len);
796 
797 	dev->rx_ampdu_len = 0;
798 	dev->rx_ampdu_ref = 0;
799 }
800 
801 static void
802 mt76_airtime_check(struct mt76_dev *dev, struct sk_buff *skb)
803 {
804 	struct ieee80211_hdr *hdr = mt76_skb_get_hdr(skb);
805 	struct mt76_rx_status *status = (struct mt76_rx_status *)skb->cb;
806 	struct mt76_wcid *wcid = status->wcid;
807 
808 	if (!(dev->drv->drv_flags & MT_DRV_SW_RX_AIRTIME))
809 		return;
810 
811 	if (!wcid || !wcid->sta) {
812 		if (!ether_addr_equal(hdr->addr1, dev->macaddr))
813 			return;
814 
815 		wcid = NULL;
816 	}
817 
818 	if (!(status->flag & RX_FLAG_AMPDU_DETAILS) ||
819 	    status->ampdu_ref != dev->rx_ampdu_ref)
820 		mt76_airtime_flush_ampdu(dev);
821 
822 	if (status->flag & RX_FLAG_AMPDU_DETAILS) {
823 		if (!dev->rx_ampdu_len ||
824 		    status->ampdu_ref != dev->rx_ampdu_ref) {
825 			dev->rx_ampdu_status = *status;
826 			dev->rx_ampdu_status.wcid_idx = wcid ? wcid->idx : 0xff;
827 			dev->rx_ampdu_ref = status->ampdu_ref;
828 		}
829 
830 		dev->rx_ampdu_len += skb->len;
831 		return;
832 	}
833 
834 	mt76_airtime_report(dev, status, skb->len);
835 }
836 
837 static void
838 mt76_check_sta(struct mt76_dev *dev, struct sk_buff *skb)
839 {
840 	struct mt76_rx_status *status = (struct mt76_rx_status *)skb->cb;
841 	struct ieee80211_hdr *hdr = mt76_skb_get_hdr(skb);
842 	struct ieee80211_sta *sta;
843 	struct ieee80211_hw *hw;
844 	struct mt76_wcid *wcid = status->wcid;
845 	bool ps;
846 	int i;
847 
848 	hw = mt76_phy_hw(dev, status->ext_phy);
849 	if (ieee80211_is_pspoll(hdr->frame_control) && !wcid) {
850 		sta = ieee80211_find_sta_by_ifaddr(hw, hdr->addr2, NULL);
851 		if (sta)
852 			wcid = status->wcid = (struct mt76_wcid *)sta->drv_priv;
853 	}
854 
855 	mt76_airtime_check(dev, skb);
856 
857 	if (!wcid || !wcid->sta)
858 		return;
859 
860 	sta = container_of((void *)wcid, struct ieee80211_sta, drv_priv);
861 
862 	if (status->signal <= 0)
863 		ewma_signal_add(&wcid->rssi, -status->signal);
864 
865 	wcid->inactive_count = 0;
866 
867 	if (!test_bit(MT_WCID_FLAG_CHECK_PS, &wcid->flags))
868 		return;
869 
870 	if (ieee80211_is_pspoll(hdr->frame_control)) {
871 		ieee80211_sta_pspoll(sta);
872 		return;
873 	}
874 
875 	if (ieee80211_has_morefrags(hdr->frame_control) ||
876 	    !(ieee80211_is_mgmt(hdr->frame_control) ||
877 	      ieee80211_is_data(hdr->frame_control)))
878 		return;
879 
880 	ps = ieee80211_has_pm(hdr->frame_control);
881 
882 	if (ps && (ieee80211_is_data_qos(hdr->frame_control) ||
883 		   ieee80211_is_qos_nullfunc(hdr->frame_control)))
884 		ieee80211_sta_uapsd_trigger(sta, status->tid);
885 
886 	if (!!test_bit(MT_WCID_FLAG_PS, &wcid->flags) == ps)
887 		return;
888 
889 	if (ps)
890 		set_bit(MT_WCID_FLAG_PS, &wcid->flags);
891 	else
892 		clear_bit(MT_WCID_FLAG_PS, &wcid->flags);
893 
894 	dev->drv->sta_ps(dev, sta, ps);
895 	ieee80211_sta_ps_transition(sta, ps);
896 
897 	if (ps)
898 		return;
899 
900 	for (i = 0; i < ARRAY_SIZE(sta->txq); i++) {
901 		struct mt76_txq *mtxq;
902 
903 		if (!sta->txq[i])
904 			continue;
905 
906 		mtxq = (struct mt76_txq *)sta->txq[i]->drv_priv;
907 		if (!skb_queue_empty(&mtxq->retry_q))
908 			ieee80211_schedule_txq(hw, sta->txq[i]);
909 	}
910 }
911 
912 void mt76_rx_complete(struct mt76_dev *dev, struct sk_buff_head *frames,
913 		      struct napi_struct *napi)
914 {
915 	struct ieee80211_sta *sta;
916 	struct ieee80211_hw *hw;
917 	struct sk_buff *skb;
918 
919 	spin_lock(&dev->rx_lock);
920 	while ((skb = __skb_dequeue(frames)) != NULL) {
921 		if (mt76_check_ccmp_pn(skb)) {
922 			dev_kfree_skb(skb);
923 			continue;
924 		}
925 
926 		mt76_rx_convert(dev, skb, &hw, &sta);
927 		ieee80211_rx_napi(hw, sta, skb, napi);
928 	}
929 	spin_unlock(&dev->rx_lock);
930 }
931 
932 void mt76_rx_poll_complete(struct mt76_dev *dev, enum mt76_rxq_id q,
933 			   struct napi_struct *napi)
934 {
935 	struct sk_buff_head frames;
936 	struct sk_buff *skb;
937 
938 	__skb_queue_head_init(&frames);
939 
940 	while ((skb = __skb_dequeue(&dev->rx_skb[q])) != NULL) {
941 		mt76_check_sta(dev, skb);
942 		mt76_rx_aggr_reorder(skb, &frames);
943 	}
944 
945 	mt76_rx_complete(dev, &frames, napi);
946 }
947 EXPORT_SYMBOL_GPL(mt76_rx_poll_complete);
948 
949 static int
950 mt76_sta_add(struct mt76_dev *dev, struct ieee80211_vif *vif,
951 	     struct ieee80211_sta *sta, bool ext_phy)
952 {
953 	struct mt76_wcid *wcid = (struct mt76_wcid *)sta->drv_priv;
954 	int ret;
955 	int i;
956 
957 	mutex_lock(&dev->mutex);
958 
959 	ret = dev->drv->sta_add(dev, vif, sta);
960 	if (ret)
961 		goto out;
962 
963 	for (i = 0; i < ARRAY_SIZE(sta->txq); i++) {
964 		struct mt76_txq *mtxq;
965 
966 		if (!sta->txq[i])
967 			continue;
968 
969 		mtxq = (struct mt76_txq *)sta->txq[i]->drv_priv;
970 		mtxq->wcid = wcid;
971 
972 		mt76_txq_init(dev, sta->txq[i]);
973 	}
974 
975 	ewma_signal_init(&wcid->rssi);
976 	if (ext_phy)
977 		mt76_wcid_mask_set(dev->wcid_phy_mask, wcid->idx);
978 	wcid->ext_phy = ext_phy;
979 	rcu_assign_pointer(dev->wcid[wcid->idx], wcid);
980 
981 out:
982 	mutex_unlock(&dev->mutex);
983 
984 	return ret;
985 }
986 
987 void __mt76_sta_remove(struct mt76_dev *dev, struct ieee80211_vif *vif,
988 		       struct ieee80211_sta *sta)
989 {
990 	struct mt76_wcid *wcid = (struct mt76_wcid *)sta->drv_priv;
991 	int i, idx = wcid->idx;
992 
993 	for (i = 0; i < ARRAY_SIZE(wcid->aggr); i++)
994 		mt76_rx_aggr_stop(dev, wcid, i);
995 
996 	if (dev->drv->sta_remove)
997 		dev->drv->sta_remove(dev, vif, sta);
998 
999 	mt76_tx_status_check(dev, wcid, true);
1000 	for (i = 0; i < ARRAY_SIZE(sta->txq); i++)
1001 		mt76_txq_remove(dev, sta->txq[i]);
1002 	mt76_wcid_mask_clear(dev->wcid_mask, idx);
1003 	mt76_wcid_mask_clear(dev->wcid_phy_mask, idx);
1004 }
1005 EXPORT_SYMBOL_GPL(__mt76_sta_remove);
1006 
1007 static void
1008 mt76_sta_remove(struct mt76_dev *dev, struct ieee80211_vif *vif,
1009 		struct ieee80211_sta *sta)
1010 {
1011 	mutex_lock(&dev->mutex);
1012 	__mt76_sta_remove(dev, vif, sta);
1013 	mutex_unlock(&dev->mutex);
1014 }
1015 
1016 int mt76_sta_state(struct ieee80211_hw *hw, struct ieee80211_vif *vif,
1017 		   struct ieee80211_sta *sta,
1018 		   enum ieee80211_sta_state old_state,
1019 		   enum ieee80211_sta_state new_state)
1020 {
1021 	struct mt76_phy *phy = hw->priv;
1022 	struct mt76_dev *dev = phy->dev;
1023 	bool ext_phy = phy != &dev->phy;
1024 
1025 	if (old_state == IEEE80211_STA_NOTEXIST &&
1026 	    new_state == IEEE80211_STA_NONE)
1027 		return mt76_sta_add(dev, vif, sta, ext_phy);
1028 
1029 	if (old_state == IEEE80211_STA_AUTH &&
1030 	    new_state == IEEE80211_STA_ASSOC &&
1031 	    dev->drv->sta_assoc)
1032 		dev->drv->sta_assoc(dev, vif, sta);
1033 
1034 	if (old_state == IEEE80211_STA_NONE &&
1035 	    new_state == IEEE80211_STA_NOTEXIST)
1036 		mt76_sta_remove(dev, vif, sta);
1037 
1038 	return 0;
1039 }
1040 EXPORT_SYMBOL_GPL(mt76_sta_state);
1041 
1042 void mt76_sta_pre_rcu_remove(struct ieee80211_hw *hw, struct ieee80211_vif *vif,
1043 			     struct ieee80211_sta *sta)
1044 {
1045 	struct mt76_phy *phy = hw->priv;
1046 	struct mt76_dev *dev = phy->dev;
1047 	struct mt76_wcid *wcid = (struct mt76_wcid *)sta->drv_priv;
1048 
1049 	mutex_lock(&dev->mutex);
1050 	rcu_assign_pointer(dev->wcid[wcid->idx], NULL);
1051 	mutex_unlock(&dev->mutex);
1052 }
1053 EXPORT_SYMBOL_GPL(mt76_sta_pre_rcu_remove);
1054 
1055 int mt76_get_txpower(struct ieee80211_hw *hw, struct ieee80211_vif *vif,
1056 		     int *dbm)
1057 {
1058 	struct mt76_phy *phy = hw->priv;
1059 	int n_chains = hweight8(phy->antenna_mask);
1060 	int delta = mt76_tx_power_nss_delta(n_chains);
1061 
1062 	*dbm = DIV_ROUND_UP(phy->txpower_cur + delta, 2);
1063 
1064 	return 0;
1065 }
1066 EXPORT_SYMBOL_GPL(mt76_get_txpower);
1067 
1068 static void
1069 __mt76_csa_finish(void *priv, u8 *mac, struct ieee80211_vif *vif)
1070 {
1071 	if (vif->csa_active && ieee80211_csa_is_complete(vif))
1072 		ieee80211_csa_finish(vif);
1073 }
1074 
1075 void mt76_csa_finish(struct mt76_dev *dev)
1076 {
1077 	if (!dev->csa_complete)
1078 		return;
1079 
1080 	ieee80211_iterate_active_interfaces_atomic(dev->hw,
1081 		IEEE80211_IFACE_ITER_RESUME_ALL,
1082 		__mt76_csa_finish, dev);
1083 
1084 	dev->csa_complete = 0;
1085 }
1086 EXPORT_SYMBOL_GPL(mt76_csa_finish);
1087 
1088 static void
1089 __mt76_csa_check(void *priv, u8 *mac, struct ieee80211_vif *vif)
1090 {
1091 	struct mt76_dev *dev = priv;
1092 
1093 	if (!vif->csa_active)
1094 		return;
1095 
1096 	dev->csa_complete |= ieee80211_csa_is_complete(vif);
1097 }
1098 
1099 void mt76_csa_check(struct mt76_dev *dev)
1100 {
1101 	ieee80211_iterate_active_interfaces_atomic(dev->hw,
1102 		IEEE80211_IFACE_ITER_RESUME_ALL,
1103 		__mt76_csa_check, dev);
1104 }
1105 EXPORT_SYMBOL_GPL(mt76_csa_check);
1106 
1107 int
1108 mt76_set_tim(struct ieee80211_hw *hw, struct ieee80211_sta *sta, bool set)
1109 {
1110 	return 0;
1111 }
1112 EXPORT_SYMBOL_GPL(mt76_set_tim);
1113 
1114 void mt76_insert_ccmp_hdr(struct sk_buff *skb, u8 key_id)
1115 {
1116 	struct mt76_rx_status *status = (struct mt76_rx_status *)skb->cb;
1117 	int hdr_len = ieee80211_get_hdrlen_from_skb(skb);
1118 	u8 *hdr, *pn = status->iv;
1119 
1120 	__skb_push(skb, 8);
1121 	memmove(skb->data, skb->data + 8, hdr_len);
1122 	hdr = skb->data + hdr_len;
1123 
1124 	hdr[0] = pn[5];
1125 	hdr[1] = pn[4];
1126 	hdr[2] = 0;
1127 	hdr[3] = 0x20 | (key_id << 6);
1128 	hdr[4] = pn[3];
1129 	hdr[5] = pn[2];
1130 	hdr[6] = pn[1];
1131 	hdr[7] = pn[0];
1132 
1133 	status->flag &= ~RX_FLAG_IV_STRIPPED;
1134 }
1135 EXPORT_SYMBOL_GPL(mt76_insert_ccmp_hdr);
1136 
1137 int mt76_get_rate(struct mt76_dev *dev,
1138 		  struct ieee80211_supported_band *sband,
1139 		  int idx, bool cck)
1140 {
1141 	int i, offset = 0, len = sband->n_bitrates;
1142 
1143 	if (cck) {
1144 		if (sband == &dev->phy.sband_5g.sband)
1145 			return 0;
1146 
1147 		idx &= ~BIT(2); /* short preamble */
1148 	} else if (sband == &dev->phy.sband_2g.sband) {
1149 		offset = 4;
1150 	}
1151 
1152 	for (i = offset; i < len; i++) {
1153 		if ((sband->bitrates[i].hw_value & GENMASK(7, 0)) == idx)
1154 			return i;
1155 	}
1156 
1157 	return 0;
1158 }
1159 EXPORT_SYMBOL_GPL(mt76_get_rate);
1160 
1161 void mt76_sw_scan(struct ieee80211_hw *hw, struct ieee80211_vif *vif,
1162 		  const u8 *mac)
1163 {
1164 	struct mt76_phy *phy = hw->priv;
1165 
1166 	set_bit(MT76_SCANNING, &phy->state);
1167 }
1168 EXPORT_SYMBOL_GPL(mt76_sw_scan);
1169 
1170 void mt76_sw_scan_complete(struct ieee80211_hw *hw, struct ieee80211_vif *vif)
1171 {
1172 	struct mt76_phy *phy = hw->priv;
1173 
1174 	clear_bit(MT76_SCANNING, &phy->state);
1175 }
1176 EXPORT_SYMBOL_GPL(mt76_sw_scan_complete);
1177 
1178 int mt76_get_antenna(struct ieee80211_hw *hw, u32 *tx_ant, u32 *rx_ant)
1179 {
1180 	struct mt76_phy *phy = hw->priv;
1181 	struct mt76_dev *dev = phy->dev;
1182 
1183 	mutex_lock(&dev->mutex);
1184 	*tx_ant = phy->antenna_mask;
1185 	*rx_ant = phy->antenna_mask;
1186 	mutex_unlock(&dev->mutex);
1187 
1188 	return 0;
1189 }
1190 EXPORT_SYMBOL_GPL(mt76_get_antenna);
1191