1 // SPDX-License-Identifier: ISC
2 /*
3  * Copyright (C) 2016 Felix Fietkau <nbd@nbd.name>
4  */
5 #include <linux/of.h>
6 #include "mt76.h"
7 
8 #define CHAN2G(_idx, _freq) {			\
9 	.band = NL80211_BAND_2GHZ,		\
10 	.center_freq = (_freq),			\
11 	.hw_value = (_idx),			\
12 	.max_power = 30,			\
13 }
14 
15 #define CHAN5G(_idx, _freq) {			\
16 	.band = NL80211_BAND_5GHZ,		\
17 	.center_freq = (_freq),			\
18 	.hw_value = (_idx),			\
19 	.max_power = 30,			\
20 }
21 
22 static const struct ieee80211_channel mt76_channels_2ghz[] = {
23 	CHAN2G(1, 2412),
24 	CHAN2G(2, 2417),
25 	CHAN2G(3, 2422),
26 	CHAN2G(4, 2427),
27 	CHAN2G(5, 2432),
28 	CHAN2G(6, 2437),
29 	CHAN2G(7, 2442),
30 	CHAN2G(8, 2447),
31 	CHAN2G(9, 2452),
32 	CHAN2G(10, 2457),
33 	CHAN2G(11, 2462),
34 	CHAN2G(12, 2467),
35 	CHAN2G(13, 2472),
36 	CHAN2G(14, 2484),
37 };
38 
39 static const struct ieee80211_channel mt76_channels_5ghz[] = {
40 	CHAN5G(36, 5180),
41 	CHAN5G(40, 5200),
42 	CHAN5G(44, 5220),
43 	CHAN5G(48, 5240),
44 
45 	CHAN5G(52, 5260),
46 	CHAN5G(56, 5280),
47 	CHAN5G(60, 5300),
48 	CHAN5G(64, 5320),
49 
50 	CHAN5G(100, 5500),
51 	CHAN5G(104, 5520),
52 	CHAN5G(108, 5540),
53 	CHAN5G(112, 5560),
54 	CHAN5G(116, 5580),
55 	CHAN5G(120, 5600),
56 	CHAN5G(124, 5620),
57 	CHAN5G(128, 5640),
58 	CHAN5G(132, 5660),
59 	CHAN5G(136, 5680),
60 	CHAN5G(140, 5700),
61 
62 	CHAN5G(149, 5745),
63 	CHAN5G(153, 5765),
64 	CHAN5G(157, 5785),
65 	CHAN5G(161, 5805),
66 	CHAN5G(165, 5825),
67 };
68 
69 static const struct ieee80211_tpt_blink mt76_tpt_blink[] = {
70 	{ .throughput =   0 * 1024, .blink_time = 334 },
71 	{ .throughput =   1 * 1024, .blink_time = 260 },
72 	{ .throughput =   5 * 1024, .blink_time = 220 },
73 	{ .throughput =  10 * 1024, .blink_time = 190 },
74 	{ .throughput =  20 * 1024, .blink_time = 170 },
75 	{ .throughput =  50 * 1024, .blink_time = 150 },
76 	{ .throughput =  70 * 1024, .blink_time = 130 },
77 	{ .throughput = 100 * 1024, .blink_time = 110 },
78 	{ .throughput = 200 * 1024, .blink_time =  80 },
79 	{ .throughput = 300 * 1024, .blink_time =  50 },
80 };
81 
82 static int mt76_led_init(struct mt76_dev *dev)
83 {
84 	struct device_node *np = dev->dev->of_node;
85 	struct ieee80211_hw *hw = dev->hw;
86 	int led_pin;
87 
88 	if (!dev->led_cdev.brightness_set && !dev->led_cdev.blink_set)
89 		return 0;
90 
91 	snprintf(dev->led_name, sizeof(dev->led_name),
92 		 "mt76-%s", wiphy_name(hw->wiphy));
93 
94 	dev->led_cdev.name = dev->led_name;
95 	dev->led_cdev.default_trigger =
96 		ieee80211_create_tpt_led_trigger(hw,
97 					IEEE80211_TPT_LEDTRIG_FL_RADIO,
98 					mt76_tpt_blink,
99 					ARRAY_SIZE(mt76_tpt_blink));
100 
101 	np = of_get_child_by_name(np, "led");
102 	if (np) {
103 		if (!of_property_read_u32(np, "led-sources", &led_pin))
104 			dev->led_pin = led_pin;
105 		dev->led_al = of_property_read_bool(np, "led-active-low");
106 	}
107 
108 	return led_classdev_register(dev->dev, &dev->led_cdev);
109 }
110 
111 static void mt76_led_cleanup(struct mt76_dev *dev)
112 {
113 	if (!dev->led_cdev.brightness_set && !dev->led_cdev.blink_set)
114 		return;
115 
116 	led_classdev_unregister(&dev->led_cdev);
117 }
118 
119 static void mt76_init_stream_cap(struct mt76_dev *dev,
120 				 struct ieee80211_supported_band *sband,
121 				 bool vht)
122 {
123 	struct ieee80211_sta_ht_cap *ht_cap = &sband->ht_cap;
124 	int i, nstream = hweight8(dev->antenna_mask);
125 	struct ieee80211_sta_vht_cap *vht_cap;
126 	u16 mcs_map = 0;
127 
128 	if (nstream > 1)
129 		ht_cap->cap |= IEEE80211_HT_CAP_TX_STBC;
130 	else
131 		ht_cap->cap &= ~IEEE80211_HT_CAP_TX_STBC;
132 
133 	for (i = 0; i < IEEE80211_HT_MCS_MASK_LEN; i++)
134 		ht_cap->mcs.rx_mask[i] = i < nstream ? 0xff : 0;
135 
136 	if (!vht)
137 		return;
138 
139 	vht_cap = &sband->vht_cap;
140 	if (nstream > 1)
141 		vht_cap->cap |= IEEE80211_VHT_CAP_TXSTBC;
142 	else
143 		vht_cap->cap &= ~IEEE80211_VHT_CAP_TXSTBC;
144 
145 	for (i = 0; i < 8; i++) {
146 		if (i < nstream)
147 			mcs_map |= (IEEE80211_VHT_MCS_SUPPORT_0_9 << (i * 2));
148 		else
149 			mcs_map |=
150 				(IEEE80211_VHT_MCS_NOT_SUPPORTED << (i * 2));
151 	}
152 	vht_cap->vht_mcs.rx_mcs_map = cpu_to_le16(mcs_map);
153 	vht_cap->vht_mcs.tx_mcs_map = cpu_to_le16(mcs_map);
154 }
155 
156 void mt76_set_stream_caps(struct mt76_dev *dev, bool vht)
157 {
158 	if (dev->cap.has_2ghz)
159 		mt76_init_stream_cap(dev, &dev->sband_2g.sband, false);
160 	if (dev->cap.has_5ghz)
161 		mt76_init_stream_cap(dev, &dev->sband_5g.sband, vht);
162 }
163 EXPORT_SYMBOL_GPL(mt76_set_stream_caps);
164 
165 static int
166 mt76_init_sband(struct mt76_dev *dev, struct mt76_sband *msband,
167 		const struct ieee80211_channel *chan, int n_chan,
168 		struct ieee80211_rate *rates, int n_rates, bool vht)
169 {
170 	struct ieee80211_supported_band *sband = &msband->sband;
171 	struct ieee80211_sta_ht_cap *ht_cap;
172 	struct ieee80211_sta_vht_cap *vht_cap;
173 	void *chanlist;
174 	int size;
175 
176 	size = n_chan * sizeof(*chan);
177 	chanlist = devm_kmemdup(dev->dev, chan, size, GFP_KERNEL);
178 	if (!chanlist)
179 		return -ENOMEM;
180 
181 	msband->chan = devm_kcalloc(dev->dev, n_chan, sizeof(*msband->chan),
182 				    GFP_KERNEL);
183 	if (!msband->chan)
184 		return -ENOMEM;
185 
186 	sband->channels = chanlist;
187 	sband->n_channels = n_chan;
188 	sband->bitrates = rates;
189 	sband->n_bitrates = n_rates;
190 	dev->chandef.chan = &sband->channels[0];
191 	dev->chan_state = &msband->chan[0];
192 
193 	ht_cap = &sband->ht_cap;
194 	ht_cap->ht_supported = true;
195 	ht_cap->cap |= IEEE80211_HT_CAP_SUP_WIDTH_20_40 |
196 		       IEEE80211_HT_CAP_GRN_FLD |
197 		       IEEE80211_HT_CAP_SGI_20 |
198 		       IEEE80211_HT_CAP_SGI_40 |
199 		       (1 << IEEE80211_HT_CAP_RX_STBC_SHIFT);
200 
201 	ht_cap->mcs.tx_params = IEEE80211_HT_MCS_TX_DEFINED;
202 	ht_cap->ampdu_factor = IEEE80211_HT_MAX_AMPDU_64K;
203 	ht_cap->ampdu_density = IEEE80211_HT_MPDU_DENSITY_4;
204 
205 	mt76_init_stream_cap(dev, sband, vht);
206 
207 	if (!vht)
208 		return 0;
209 
210 	vht_cap = &sband->vht_cap;
211 	vht_cap->vht_supported = true;
212 	vht_cap->cap |= IEEE80211_VHT_CAP_RXLDPC |
213 			IEEE80211_VHT_CAP_RXSTBC_1 |
214 			IEEE80211_VHT_CAP_SHORT_GI_80 |
215 			IEEE80211_VHT_CAP_RX_ANTENNA_PATTERN |
216 			IEEE80211_VHT_CAP_TX_ANTENNA_PATTERN |
217 			(3 << IEEE80211_VHT_CAP_MAX_A_MPDU_LENGTH_EXPONENT_SHIFT);
218 
219 	return 0;
220 }
221 
222 static int
223 mt76_init_sband_2g(struct mt76_dev *dev, struct ieee80211_rate *rates,
224 		   int n_rates)
225 {
226 	dev->hw->wiphy->bands[NL80211_BAND_2GHZ] = &dev->sband_2g.sband;
227 
228 	return mt76_init_sband(dev, &dev->sband_2g,
229 			       mt76_channels_2ghz,
230 			       ARRAY_SIZE(mt76_channels_2ghz),
231 			       rates, n_rates, false);
232 }
233 
234 static int
235 mt76_init_sband_5g(struct mt76_dev *dev, struct ieee80211_rate *rates,
236 		   int n_rates, bool vht)
237 {
238 	dev->hw->wiphy->bands[NL80211_BAND_5GHZ] = &dev->sband_5g.sband;
239 
240 	return mt76_init_sband(dev, &dev->sband_5g,
241 			       mt76_channels_5ghz,
242 			       ARRAY_SIZE(mt76_channels_5ghz),
243 			       rates, n_rates, vht);
244 }
245 
246 static void
247 mt76_check_sband(struct mt76_dev *dev, int band)
248 {
249 	struct ieee80211_supported_band *sband = dev->hw->wiphy->bands[band];
250 	bool found = false;
251 	int i;
252 
253 	if (!sband)
254 		return;
255 
256 	for (i = 0; i < sband->n_channels; i++) {
257 		if (sband->channels[i].flags & IEEE80211_CHAN_DISABLED)
258 			continue;
259 
260 		found = true;
261 		break;
262 	}
263 
264 	if (found)
265 		return;
266 
267 	sband->n_channels = 0;
268 	dev->hw->wiphy->bands[band] = NULL;
269 }
270 
271 struct mt76_dev *
272 mt76_alloc_device(struct device *pdev, unsigned int size,
273 		  const struct ieee80211_ops *ops,
274 		  const struct mt76_driver_ops *drv_ops)
275 {
276 	struct ieee80211_hw *hw;
277 	struct mt76_dev *dev;
278 
279 	hw = ieee80211_alloc_hw(size, ops);
280 	if (!hw)
281 		return NULL;
282 
283 	dev = hw->priv;
284 	dev->hw = hw;
285 	dev->dev = pdev;
286 	dev->drv = drv_ops;
287 
288 	spin_lock_init(&dev->rx_lock);
289 	spin_lock_init(&dev->lock);
290 	spin_lock_init(&dev->cc_lock);
291 	mutex_init(&dev->mutex);
292 	init_waitqueue_head(&dev->tx_wait);
293 	skb_queue_head_init(&dev->status_list);
294 
295 	tasklet_init(&dev->tx_tasklet, mt76_tx_tasklet, (unsigned long)dev);
296 
297 	return dev;
298 }
299 EXPORT_SYMBOL_GPL(mt76_alloc_device);
300 
301 int mt76_register_device(struct mt76_dev *dev, bool vht,
302 			 struct ieee80211_rate *rates, int n_rates)
303 {
304 	struct ieee80211_hw *hw = dev->hw;
305 	struct wiphy *wiphy = hw->wiphy;
306 	int ret;
307 
308 	dev_set_drvdata(dev->dev, dev);
309 
310 	INIT_LIST_HEAD(&dev->txwi_cache);
311 
312 	SET_IEEE80211_DEV(hw, dev->dev);
313 	SET_IEEE80211_PERM_ADDR(hw, dev->macaddr);
314 
315 	wiphy->features |= NL80211_FEATURE_ACTIVE_MONITOR;
316 
317 	wiphy_ext_feature_set(wiphy, NL80211_EXT_FEATURE_CQM_RSSI_LIST);
318 	wiphy_ext_feature_set(wiphy, NL80211_EXT_FEATURE_AIRTIME_FAIRNESS);
319 
320 	wiphy->available_antennas_tx = dev->antenna_mask;
321 	wiphy->available_antennas_rx = dev->antenna_mask;
322 
323 	hw->txq_data_size = sizeof(struct mt76_txq);
324 	hw->max_tx_fragments = 16;
325 
326 	ieee80211_hw_set(hw, SIGNAL_DBM);
327 	ieee80211_hw_set(hw, PS_NULLFUNC_STACK);
328 	ieee80211_hw_set(hw, HOST_BROADCAST_PS_BUFFERING);
329 	ieee80211_hw_set(hw, AMPDU_AGGREGATION);
330 	ieee80211_hw_set(hw, SUPPORTS_RC_TABLE);
331 	ieee80211_hw_set(hw, SUPPORT_FAST_XMIT);
332 	ieee80211_hw_set(hw, SUPPORTS_CLONED_SKBS);
333 	ieee80211_hw_set(hw, SUPPORTS_AMSDU_IN_AMPDU);
334 	ieee80211_hw_set(hw, TX_AMSDU);
335 	ieee80211_hw_set(hw, TX_FRAG_LIST);
336 	ieee80211_hw_set(hw, MFP_CAPABLE);
337 	ieee80211_hw_set(hw, AP_LINK_PS);
338 	ieee80211_hw_set(hw, REPORTS_TX_ACK_STATUS);
339 	ieee80211_hw_set(hw, NEEDS_UNIQUE_STA_ADDR);
340 	ieee80211_hw_set(hw, SUPPORTS_REORDERING_BUFFER);
341 
342 	wiphy->flags |= WIPHY_FLAG_IBSS_RSN;
343 	wiphy->interface_modes =
344 		BIT(NL80211_IFTYPE_STATION) |
345 		BIT(NL80211_IFTYPE_AP) |
346 #ifdef CONFIG_MAC80211_MESH
347 		BIT(NL80211_IFTYPE_MESH_POINT) |
348 #endif
349 		BIT(NL80211_IFTYPE_ADHOC);
350 
351 	if (dev->cap.has_2ghz) {
352 		ret = mt76_init_sband_2g(dev, rates, n_rates);
353 		if (ret)
354 			return ret;
355 	}
356 
357 	if (dev->cap.has_5ghz) {
358 		ret = mt76_init_sband_5g(dev, rates + 4, n_rates - 4, vht);
359 		if (ret)
360 			return ret;
361 	}
362 
363 	wiphy_read_of_freq_limits(dev->hw->wiphy);
364 	mt76_check_sband(dev, NL80211_BAND_2GHZ);
365 	mt76_check_sband(dev, NL80211_BAND_5GHZ);
366 
367 	if (IS_ENABLED(CONFIG_MT76_LEDS)) {
368 		ret = mt76_led_init(dev);
369 		if (ret)
370 			return ret;
371 	}
372 
373 	return ieee80211_register_hw(hw);
374 }
375 EXPORT_SYMBOL_GPL(mt76_register_device);
376 
377 void mt76_unregister_device(struct mt76_dev *dev)
378 {
379 	struct ieee80211_hw *hw = dev->hw;
380 
381 	if (IS_ENABLED(CONFIG_MT76_LEDS))
382 		mt76_led_cleanup(dev);
383 	mt76_tx_status_check(dev, NULL, true);
384 	ieee80211_unregister_hw(hw);
385 }
386 EXPORT_SYMBOL_GPL(mt76_unregister_device);
387 
388 void mt76_free_device(struct mt76_dev *dev)
389 {
390 	mt76_tx_free(dev);
391 	ieee80211_free_hw(dev->hw);
392 }
393 EXPORT_SYMBOL_GPL(mt76_free_device);
394 
395 void mt76_rx(struct mt76_dev *dev, enum mt76_rxq_id q, struct sk_buff *skb)
396 {
397 	if (!test_bit(MT76_STATE_RUNNING, &dev->state)) {
398 		dev_kfree_skb(skb);
399 		return;
400 	}
401 
402 	__skb_queue_tail(&dev->rx_skb[q], skb);
403 }
404 EXPORT_SYMBOL_GPL(mt76_rx);
405 
406 bool mt76_has_tx_pending(struct mt76_dev *dev)
407 {
408 	struct mt76_queue *q;
409 	int i;
410 
411 	for (i = 0; i < ARRAY_SIZE(dev->q_tx); i++) {
412 		q = dev->q_tx[i].q;
413 		if (q && q->queued)
414 			return true;
415 	}
416 
417 	return false;
418 }
419 EXPORT_SYMBOL_GPL(mt76_has_tx_pending);
420 
421 static struct mt76_channel_state *
422 mt76_channel_state(struct mt76_dev *dev, struct ieee80211_channel *c)
423 {
424 	struct mt76_sband *msband;
425 	int idx;
426 
427 	if (c->band == NL80211_BAND_2GHZ)
428 		msband = &dev->sband_2g;
429 	else
430 		msband = &dev->sband_5g;
431 
432 	idx = c - &msband->sband.channels[0];
433 	return &msband->chan[idx];
434 }
435 
436 void mt76_update_survey(struct mt76_dev *dev)
437 {
438 	struct mt76_channel_state *state = dev->chan_state;
439 	ktime_t cur_time;
440 
441 	if (!test_bit(MT76_STATE_RUNNING, &dev->state))
442 		return;
443 
444 	if (dev->drv->update_survey)
445 		dev->drv->update_survey(dev);
446 
447 	cur_time = ktime_get_boottime();
448 	state->cc_active += ktime_to_us(ktime_sub(cur_time,
449 						  dev->survey_time));
450 	dev->survey_time = cur_time;
451 
452 	if (dev->drv->drv_flags & MT_DRV_SW_RX_AIRTIME) {
453 		spin_lock_bh(&dev->cc_lock);
454 		state->cc_bss_rx += dev->cur_cc_bss_rx;
455 		dev->cur_cc_bss_rx = 0;
456 		spin_unlock_bh(&dev->cc_lock);
457 	}
458 }
459 EXPORT_SYMBOL_GPL(mt76_update_survey);
460 
461 void mt76_set_channel(struct mt76_dev *dev)
462 {
463 	struct ieee80211_hw *hw = dev->hw;
464 	struct cfg80211_chan_def *chandef = &hw->conf.chandef;
465 	bool offchannel = hw->conf.flags & IEEE80211_CONF_OFFCHANNEL;
466 	int timeout = HZ / 5;
467 
468 	wait_event_timeout(dev->tx_wait, !mt76_has_tx_pending(dev), timeout);
469 	mt76_update_survey(dev);
470 
471 	dev->chandef = *chandef;
472 	dev->chan_state = mt76_channel_state(dev, chandef->chan);
473 
474 	if (!offchannel)
475 		dev->main_chan = chandef->chan;
476 
477 	if (chandef->chan != dev->main_chan)
478 		memset(dev->chan_state, 0, sizeof(*dev->chan_state));
479 }
480 EXPORT_SYMBOL_GPL(mt76_set_channel);
481 
482 int mt76_get_survey(struct ieee80211_hw *hw, int idx,
483 		    struct survey_info *survey)
484 {
485 	struct mt76_dev *dev = hw->priv;
486 	struct mt76_sband *sband;
487 	struct ieee80211_channel *chan;
488 	struct mt76_channel_state *state;
489 	int ret = 0;
490 
491 	mutex_lock(&dev->mutex);
492 	if (idx == 0 && dev->drv->update_survey)
493 		mt76_update_survey(dev);
494 
495 	sband = &dev->sband_2g;
496 	if (idx >= sband->sband.n_channels) {
497 		idx -= sband->sband.n_channels;
498 		sband = &dev->sband_5g;
499 	}
500 
501 	if (idx >= sband->sband.n_channels) {
502 		ret = -ENOENT;
503 		goto out;
504 	}
505 
506 	chan = &sband->sband.channels[idx];
507 	state = mt76_channel_state(dev, chan);
508 
509 	memset(survey, 0, sizeof(*survey));
510 	survey->channel = chan;
511 	survey->filled = SURVEY_INFO_TIME | SURVEY_INFO_TIME_BUSY;
512 	survey->filled |= dev->drv->survey_flags;
513 	if (chan == dev->main_chan) {
514 		survey->filled |= SURVEY_INFO_IN_USE;
515 
516 		if (dev->drv->drv_flags & MT_DRV_SW_RX_AIRTIME)
517 			survey->filled |= SURVEY_INFO_TIME_BSS_RX;
518 	}
519 
520 	survey->time_busy = div_u64(state->cc_busy, 1000);
521 	survey->time_rx = div_u64(state->cc_rx, 1000);
522 	survey->time = div_u64(state->cc_active, 1000);
523 
524 	spin_lock_bh(&dev->cc_lock);
525 	survey->time_bss_rx = div_u64(state->cc_bss_rx, 1000);
526 	survey->time_tx = div_u64(state->cc_tx, 1000);
527 	spin_unlock_bh(&dev->cc_lock);
528 
529 out:
530 	mutex_unlock(&dev->mutex);
531 
532 	return ret;
533 }
534 EXPORT_SYMBOL_GPL(mt76_get_survey);
535 
536 void mt76_wcid_key_setup(struct mt76_dev *dev, struct mt76_wcid *wcid,
537 			 struct ieee80211_key_conf *key)
538 {
539 	struct ieee80211_key_seq seq;
540 	int i;
541 
542 	wcid->rx_check_pn = false;
543 
544 	if (!key)
545 		return;
546 
547 	if (key->cipher != WLAN_CIPHER_SUITE_CCMP)
548 		return;
549 
550 	wcid->rx_check_pn = true;
551 	for (i = 0; i < IEEE80211_NUM_TIDS; i++) {
552 		ieee80211_get_key_rx_seq(key, i, &seq);
553 		memcpy(wcid->rx_key_pn[i], seq.ccmp.pn, sizeof(seq.ccmp.pn));
554 	}
555 }
556 EXPORT_SYMBOL(mt76_wcid_key_setup);
557 
558 static struct ieee80211_sta *mt76_rx_convert(struct sk_buff *skb)
559 {
560 	struct ieee80211_rx_status *status = IEEE80211_SKB_RXCB(skb);
561 	struct mt76_rx_status mstat;
562 
563 	mstat = *((struct mt76_rx_status *)skb->cb);
564 	memset(status, 0, sizeof(*status));
565 
566 	status->flag = mstat.flag;
567 	status->freq = mstat.freq;
568 	status->enc_flags = mstat.enc_flags;
569 	status->encoding = mstat.encoding;
570 	status->bw = mstat.bw;
571 	status->rate_idx = mstat.rate_idx;
572 	status->nss = mstat.nss;
573 	status->band = mstat.band;
574 	status->signal = mstat.signal;
575 	status->chains = mstat.chains;
576 	status->ampdu_reference = mstat.ampdu_ref;
577 
578 	BUILD_BUG_ON(sizeof(mstat) > sizeof(skb->cb));
579 	BUILD_BUG_ON(sizeof(status->chain_signal) !=
580 		     sizeof(mstat.chain_signal));
581 	memcpy(status->chain_signal, mstat.chain_signal,
582 	       sizeof(mstat.chain_signal));
583 
584 	return wcid_to_sta(mstat.wcid);
585 }
586 
587 static int
588 mt76_check_ccmp_pn(struct sk_buff *skb)
589 {
590 	struct mt76_rx_status *status = (struct mt76_rx_status *)skb->cb;
591 	struct mt76_wcid *wcid = status->wcid;
592 	struct ieee80211_hdr *hdr;
593 	int ret;
594 
595 	if (!(status->flag & RX_FLAG_DECRYPTED))
596 		return 0;
597 
598 	if (!wcid || !wcid->rx_check_pn)
599 		return 0;
600 
601 	if (!(status->flag & RX_FLAG_IV_STRIPPED)) {
602 		/*
603 		 * Validate the first fragment both here and in mac80211
604 		 * All further fragments will be validated by mac80211 only.
605 		 */
606 		hdr = (struct ieee80211_hdr *)skb->data;
607 		if (ieee80211_is_frag(hdr) &&
608 		    !ieee80211_is_first_frag(hdr->frame_control))
609 			return 0;
610 	}
611 
612 	BUILD_BUG_ON(sizeof(status->iv) != sizeof(wcid->rx_key_pn[0]));
613 	ret = memcmp(status->iv, wcid->rx_key_pn[status->tid],
614 		     sizeof(status->iv));
615 	if (ret <= 0)
616 		return -EINVAL; /* replay */
617 
618 	memcpy(wcid->rx_key_pn[status->tid], status->iv, sizeof(status->iv));
619 
620 	if (status->flag & RX_FLAG_IV_STRIPPED)
621 		status->flag |= RX_FLAG_PN_VALIDATED;
622 
623 	return 0;
624 }
625 
626 static void
627 mt76_airtime_report(struct mt76_dev *dev, struct mt76_rx_status *status,
628 		    int len)
629 {
630 	struct mt76_wcid *wcid = status->wcid;
631 	struct ieee80211_sta *sta;
632 	u32 airtime;
633 
634 	airtime = mt76_calc_rx_airtime(dev, status, len);
635 	spin_lock(&dev->cc_lock);
636 	dev->cur_cc_bss_rx += airtime;
637 	spin_unlock(&dev->cc_lock);
638 
639 	if (!wcid || !wcid->sta)
640 		return;
641 
642 	sta = container_of((void *)wcid, struct ieee80211_sta, drv_priv);
643 	ieee80211_sta_register_airtime(sta, status->tid, 0, airtime);
644 }
645 
646 static void
647 mt76_airtime_flush_ampdu(struct mt76_dev *dev)
648 {
649 	struct mt76_wcid *wcid;
650 	int wcid_idx;
651 
652 	if (!dev->rx_ampdu_len)
653 		return;
654 
655 	wcid_idx = dev->rx_ampdu_status.wcid_idx;
656 	if (wcid_idx < ARRAY_SIZE(dev->wcid))
657 		wcid = rcu_dereference(dev->wcid[wcid_idx]);
658 	else
659 		wcid = NULL;
660 	dev->rx_ampdu_status.wcid = wcid;
661 
662 	mt76_airtime_report(dev, &dev->rx_ampdu_status, dev->rx_ampdu_len);
663 
664 	dev->rx_ampdu_len = 0;
665 	dev->rx_ampdu_ref = 0;
666 }
667 
668 static void
669 mt76_airtime_check(struct mt76_dev *dev, struct sk_buff *skb)
670 {
671 	struct ieee80211_hdr *hdr = (struct ieee80211_hdr *)skb->data;
672 	struct mt76_rx_status *status = (struct mt76_rx_status *)skb->cb;
673 	struct mt76_wcid *wcid = status->wcid;
674 
675 	if (!(dev->drv->drv_flags & MT_DRV_SW_RX_AIRTIME))
676 		return;
677 
678 	if (!wcid || !wcid->sta) {
679 		if (!ether_addr_equal(hdr->addr1, dev->macaddr))
680 			return;
681 
682 		wcid = NULL;
683 	}
684 
685 	if (!(status->flag & RX_FLAG_AMPDU_DETAILS) ||
686 	    status->ampdu_ref != dev->rx_ampdu_ref)
687 		mt76_airtime_flush_ampdu(dev);
688 
689 	if (status->flag & RX_FLAG_AMPDU_DETAILS) {
690 		if (!dev->rx_ampdu_len ||
691 		    status->ampdu_ref != dev->rx_ampdu_ref) {
692 			dev->rx_ampdu_status = *status;
693 			dev->rx_ampdu_status.wcid_idx = wcid ? wcid->idx : 0xff;
694 			dev->rx_ampdu_ref = status->ampdu_ref;
695 		}
696 
697 		dev->rx_ampdu_len += skb->len;
698 		return;
699 	}
700 
701 	mt76_airtime_report(dev, status, skb->len);
702 }
703 
704 static void
705 mt76_check_sta(struct mt76_dev *dev, struct sk_buff *skb)
706 {
707 	struct mt76_rx_status *status = (struct mt76_rx_status *)skb->cb;
708 	struct ieee80211_hdr *hdr = (struct ieee80211_hdr *)skb->data;
709 	struct ieee80211_sta *sta;
710 	struct mt76_wcid *wcid = status->wcid;
711 	bool ps;
712 	int i;
713 
714 	if (ieee80211_is_pspoll(hdr->frame_control) && !wcid) {
715 		sta = ieee80211_find_sta_by_ifaddr(dev->hw, hdr->addr2, NULL);
716 		if (sta)
717 			wcid = status->wcid = (struct mt76_wcid *)sta->drv_priv;
718 	}
719 
720 	mt76_airtime_check(dev, skb);
721 
722 	if (!wcid || !wcid->sta)
723 		return;
724 
725 	sta = container_of((void *)wcid, struct ieee80211_sta, drv_priv);
726 
727 	if (status->signal <= 0)
728 		ewma_signal_add(&wcid->rssi, -status->signal);
729 
730 	wcid->inactive_count = 0;
731 
732 	if (!test_bit(MT_WCID_FLAG_CHECK_PS, &wcid->flags))
733 		return;
734 
735 	if (ieee80211_is_pspoll(hdr->frame_control)) {
736 		ieee80211_sta_pspoll(sta);
737 		return;
738 	}
739 
740 	if (ieee80211_has_morefrags(hdr->frame_control) ||
741 	    !(ieee80211_is_mgmt(hdr->frame_control) ||
742 	      ieee80211_is_data(hdr->frame_control)))
743 		return;
744 
745 	ps = ieee80211_has_pm(hdr->frame_control);
746 
747 	if (ps && (ieee80211_is_data_qos(hdr->frame_control) ||
748 		   ieee80211_is_qos_nullfunc(hdr->frame_control)))
749 		ieee80211_sta_uapsd_trigger(sta, status->tid);
750 
751 	if (!!test_bit(MT_WCID_FLAG_PS, &wcid->flags) == ps)
752 		return;
753 
754 	if (ps)
755 		set_bit(MT_WCID_FLAG_PS, &wcid->flags);
756 	else
757 		clear_bit(MT_WCID_FLAG_PS, &wcid->flags);
758 
759 	dev->drv->sta_ps(dev, sta, ps);
760 	ieee80211_sta_ps_transition(sta, ps);
761 
762 	if (ps)
763 		return;
764 
765 	for (i = 0; i < ARRAY_SIZE(sta->txq); i++) {
766 		struct mt76_txq *mtxq;
767 
768 		if (!sta->txq[i])
769 			continue;
770 
771 		mtxq = (struct mt76_txq *)sta->txq[i]->drv_priv;
772 		if (!skb_queue_empty(&mtxq->retry_q))
773 			ieee80211_schedule_txq(dev->hw, sta->txq[i]);
774 	}
775 }
776 
777 void mt76_rx_complete(struct mt76_dev *dev, struct sk_buff_head *frames,
778 		      struct napi_struct *napi)
779 {
780 	struct ieee80211_sta *sta;
781 	struct sk_buff *skb;
782 
783 	spin_lock(&dev->rx_lock);
784 	while ((skb = __skb_dequeue(frames)) != NULL) {
785 		if (mt76_check_ccmp_pn(skb)) {
786 			dev_kfree_skb(skb);
787 			continue;
788 		}
789 
790 		sta = mt76_rx_convert(skb);
791 		ieee80211_rx_napi(dev->hw, sta, skb, napi);
792 	}
793 	spin_unlock(&dev->rx_lock);
794 }
795 
796 void mt76_rx_poll_complete(struct mt76_dev *dev, enum mt76_rxq_id q,
797 			   struct napi_struct *napi)
798 {
799 	struct sk_buff_head frames;
800 	struct sk_buff *skb;
801 
802 	__skb_queue_head_init(&frames);
803 
804 	while ((skb = __skb_dequeue(&dev->rx_skb[q])) != NULL) {
805 		mt76_check_sta(dev, skb);
806 		mt76_rx_aggr_reorder(skb, &frames);
807 	}
808 
809 	mt76_rx_complete(dev, &frames, napi);
810 }
811 EXPORT_SYMBOL_GPL(mt76_rx_poll_complete);
812 
813 static int
814 mt76_sta_add(struct mt76_dev *dev, struct ieee80211_vif *vif,
815 	     struct ieee80211_sta *sta)
816 {
817 	struct mt76_wcid *wcid = (struct mt76_wcid *)sta->drv_priv;
818 	int ret;
819 	int i;
820 
821 	mutex_lock(&dev->mutex);
822 
823 	ret = dev->drv->sta_add(dev, vif, sta);
824 	if (ret)
825 		goto out;
826 
827 	for (i = 0; i < ARRAY_SIZE(sta->txq); i++) {
828 		struct mt76_txq *mtxq;
829 
830 		if (!sta->txq[i])
831 			continue;
832 
833 		mtxq = (struct mt76_txq *)sta->txq[i]->drv_priv;
834 		mtxq->wcid = wcid;
835 
836 		mt76_txq_init(dev, sta->txq[i]);
837 	}
838 
839 	ewma_signal_init(&wcid->rssi);
840 	rcu_assign_pointer(dev->wcid[wcid->idx], wcid);
841 
842 out:
843 	mutex_unlock(&dev->mutex);
844 
845 	return ret;
846 }
847 
848 void __mt76_sta_remove(struct mt76_dev *dev, struct ieee80211_vif *vif,
849 		       struct ieee80211_sta *sta)
850 {
851 	struct mt76_wcid *wcid = (struct mt76_wcid *)sta->drv_priv;
852 	int i, idx = wcid->idx;
853 
854 	rcu_assign_pointer(dev->wcid[idx], NULL);
855 	synchronize_rcu();
856 
857 	for (i = 0; i < ARRAY_SIZE(wcid->aggr); i++)
858 		mt76_rx_aggr_stop(dev, wcid, i);
859 
860 	if (dev->drv->sta_remove)
861 		dev->drv->sta_remove(dev, vif, sta);
862 
863 	mt76_tx_status_check(dev, wcid, true);
864 	for (i = 0; i < ARRAY_SIZE(sta->txq); i++)
865 		mt76_txq_remove(dev, sta->txq[i]);
866 	mt76_wcid_free(dev->wcid_mask, idx);
867 }
868 EXPORT_SYMBOL_GPL(__mt76_sta_remove);
869 
870 static void
871 mt76_sta_remove(struct mt76_dev *dev, struct ieee80211_vif *vif,
872 		struct ieee80211_sta *sta)
873 {
874 	mutex_lock(&dev->mutex);
875 	__mt76_sta_remove(dev, vif, sta);
876 	mutex_unlock(&dev->mutex);
877 }
878 
879 int mt76_sta_state(struct ieee80211_hw *hw, struct ieee80211_vif *vif,
880 		   struct ieee80211_sta *sta,
881 		   enum ieee80211_sta_state old_state,
882 		   enum ieee80211_sta_state new_state)
883 {
884 	struct mt76_dev *dev = hw->priv;
885 
886 	if (old_state == IEEE80211_STA_NOTEXIST &&
887 	    new_state == IEEE80211_STA_NONE)
888 		return mt76_sta_add(dev, vif, sta);
889 
890 	if (old_state == IEEE80211_STA_AUTH &&
891 	    new_state == IEEE80211_STA_ASSOC &&
892 	    dev->drv->sta_assoc)
893 		dev->drv->sta_assoc(dev, vif, sta);
894 
895 	if (old_state == IEEE80211_STA_NONE &&
896 	    new_state == IEEE80211_STA_NOTEXIST)
897 		mt76_sta_remove(dev, vif, sta);
898 
899 	return 0;
900 }
901 EXPORT_SYMBOL_GPL(mt76_sta_state);
902 
903 int mt76_get_txpower(struct ieee80211_hw *hw, struct ieee80211_vif *vif,
904 		     int *dbm)
905 {
906 	struct mt76_dev *dev = hw->priv;
907 	int n_chains = hweight8(dev->antenna_mask);
908 
909 	*dbm = DIV_ROUND_UP(dev->txpower_cur, 2);
910 
911 	/* convert from per-chain power to combined
912 	 * output power
913 	 */
914 	switch (n_chains) {
915 	case 4:
916 		*dbm += 6;
917 		break;
918 	case 3:
919 		*dbm += 4;
920 		break;
921 	case 2:
922 		*dbm += 3;
923 		break;
924 	default:
925 		break;
926 	}
927 
928 	return 0;
929 }
930 EXPORT_SYMBOL_GPL(mt76_get_txpower);
931 
932 static void
933 __mt76_csa_finish(void *priv, u8 *mac, struct ieee80211_vif *vif)
934 {
935 	if (vif->csa_active && ieee80211_csa_is_complete(vif))
936 		ieee80211_csa_finish(vif);
937 }
938 
939 void mt76_csa_finish(struct mt76_dev *dev)
940 {
941 	if (!dev->csa_complete)
942 		return;
943 
944 	ieee80211_iterate_active_interfaces_atomic(dev->hw,
945 		IEEE80211_IFACE_ITER_RESUME_ALL,
946 		__mt76_csa_finish, dev);
947 
948 	dev->csa_complete = 0;
949 }
950 EXPORT_SYMBOL_GPL(mt76_csa_finish);
951 
952 static void
953 __mt76_csa_check(void *priv, u8 *mac, struct ieee80211_vif *vif)
954 {
955 	struct mt76_dev *dev = priv;
956 
957 	if (!vif->csa_active)
958 		return;
959 
960 	dev->csa_complete |= ieee80211_csa_is_complete(vif);
961 }
962 
963 void mt76_csa_check(struct mt76_dev *dev)
964 {
965 	ieee80211_iterate_active_interfaces_atomic(dev->hw,
966 		IEEE80211_IFACE_ITER_RESUME_ALL,
967 		__mt76_csa_check, dev);
968 }
969 EXPORT_SYMBOL_GPL(mt76_csa_check);
970 
971 int
972 mt76_set_tim(struct ieee80211_hw *hw, struct ieee80211_sta *sta, bool set)
973 {
974 	return 0;
975 }
976 EXPORT_SYMBOL_GPL(mt76_set_tim);
977 
978 void mt76_insert_ccmp_hdr(struct sk_buff *skb, u8 key_id)
979 {
980 	struct mt76_rx_status *status = (struct mt76_rx_status *)skb->cb;
981 	int hdr_len = ieee80211_get_hdrlen_from_skb(skb);
982 	u8 *hdr, *pn = status->iv;
983 
984 	__skb_push(skb, 8);
985 	memmove(skb->data, skb->data + 8, hdr_len);
986 	hdr = skb->data + hdr_len;
987 
988 	hdr[0] = pn[5];
989 	hdr[1] = pn[4];
990 	hdr[2] = 0;
991 	hdr[3] = 0x20 | (key_id << 6);
992 	hdr[4] = pn[3];
993 	hdr[5] = pn[2];
994 	hdr[6] = pn[1];
995 	hdr[7] = pn[0];
996 
997 	status->flag &= ~RX_FLAG_IV_STRIPPED;
998 }
999 EXPORT_SYMBOL_GPL(mt76_insert_ccmp_hdr);
1000 
1001 int mt76_get_rate(struct mt76_dev *dev,
1002 		  struct ieee80211_supported_band *sband,
1003 		  int idx, bool cck)
1004 {
1005 	int i, offset = 0, len = sband->n_bitrates;
1006 
1007 	if (cck) {
1008 		if (sband == &dev->sband_5g.sband)
1009 			return 0;
1010 
1011 		idx &= ~BIT(2); /* short preamble */
1012 	} else if (sband == &dev->sband_2g.sband) {
1013 		offset = 4;
1014 	}
1015 
1016 	for (i = offset; i < len; i++) {
1017 		if ((sband->bitrates[i].hw_value & GENMASK(7, 0)) == idx)
1018 			return i;
1019 	}
1020 
1021 	return 0;
1022 }
1023 EXPORT_SYMBOL_GPL(mt76_get_rate);
1024 
1025 void mt76_sw_scan(struct ieee80211_hw *hw, struct ieee80211_vif *vif,
1026 		  const u8 *mac)
1027 {
1028 	struct mt76_dev *dev = hw->priv;
1029 
1030 	set_bit(MT76_SCANNING, &dev->state);
1031 }
1032 EXPORT_SYMBOL_GPL(mt76_sw_scan);
1033 
1034 void mt76_sw_scan_complete(struct ieee80211_hw *hw, struct ieee80211_vif *vif)
1035 {
1036 	struct mt76_dev *dev = hw->priv;
1037 
1038 	clear_bit(MT76_SCANNING, &dev->state);
1039 }
1040 EXPORT_SYMBOL_GPL(mt76_sw_scan_complete);
1041 
1042 int mt76_get_antenna(struct ieee80211_hw *hw, u32 *tx_ant, u32 *rx_ant)
1043 {
1044 	struct mt76_dev *dev = hw->priv;
1045 
1046 	mutex_lock(&dev->mutex);
1047 	*tx_ant = dev->antenna_mask;
1048 	*rx_ant = dev->antenna_mask;
1049 	mutex_unlock(&dev->mutex);
1050 
1051 	return 0;
1052 }
1053 EXPORT_SYMBOL_GPL(mt76_get_antenna);
1054