1 // SPDX-License-Identifier: ISC
2 /* Copyright (C) 2020 Felix Fietkau <nbd@nbd.name> */
3 #include "mt76.h"
4 
5 static const struct nla_policy mt76_tm_policy[NUM_MT76_TM_ATTRS] = {
6 	[MT76_TM_ATTR_RESET] = { .type = NLA_FLAG },
7 	[MT76_TM_ATTR_STATE] = { .type = NLA_U8 },
8 	[MT76_TM_ATTR_TX_COUNT] = { .type = NLA_U32 },
9 	[MT76_TM_ATTR_TX_RATE_MODE] = { .type = NLA_U8 },
10 	[MT76_TM_ATTR_TX_RATE_NSS] = { .type = NLA_U8 },
11 	[MT76_TM_ATTR_TX_RATE_IDX] = { .type = NLA_U8 },
12 	[MT76_TM_ATTR_TX_RATE_SGI] = { .type = NLA_U8 },
13 	[MT76_TM_ATTR_TX_RATE_LDPC] = { .type = NLA_U8 },
14 	[MT76_TM_ATTR_TX_ANTENNA] = { .type = NLA_U8 },
15 	[MT76_TM_ATTR_TX_POWER_CONTROL] = { .type = NLA_U8 },
16 	[MT76_TM_ATTR_TX_POWER] = { .type = NLA_NESTED },
17 	[MT76_TM_ATTR_FREQ_OFFSET] = { .type = NLA_U32 },
18 };
19 
20 void mt76_testmode_tx_pending(struct mt76_dev *dev)
21 {
22 	struct mt76_testmode_data *td = &dev->test;
23 	struct mt76_wcid *wcid = &dev->global_wcid;
24 	struct sk_buff *skb = td->tx_skb;
25 	struct mt76_queue *q;
26 	int qid;
27 
28 	if (!skb || !td->tx_pending)
29 		return;
30 
31 	qid = skb_get_queue_mapping(skb);
32 	q = dev->q_tx[qid];
33 
34 	spin_lock_bh(&q->lock);
35 
36 	while (td->tx_pending > 0 && td->tx_queued - td->tx_done < 1000 &&
37 	       q->queued < q->ndesc / 2) {
38 		int ret;
39 
40 		ret = dev->queue_ops->tx_queue_skb(dev, qid, skb_get(skb), wcid, NULL);
41 		if (ret < 0)
42 			break;
43 
44 		td->tx_pending--;
45 		td->tx_queued++;
46 	}
47 
48 	dev->queue_ops->kick(dev, q);
49 
50 	spin_unlock_bh(&q->lock);
51 }
52 
53 
54 static int
55 mt76_testmode_tx_init(struct mt76_dev *dev)
56 {
57 	struct mt76_testmode_data *td = &dev->test;
58 	struct ieee80211_tx_info *info;
59 	struct ieee80211_hdr *hdr;
60 	struct sk_buff *skb;
61 	u16 fc = IEEE80211_FTYPE_DATA | IEEE80211_STYPE_DATA |
62 		 IEEE80211_FCTL_FROMDS;
63 	struct ieee80211_tx_rate *rate;
64 	u8 max_nss = hweight8(dev->phy.antenna_mask);
65 
66 	if (td->tx_antenna_mask)
67 		max_nss = min_t(u8, max_nss, hweight8(td->tx_antenna_mask));
68 
69 	skb = alloc_skb(td->tx_msdu_len, GFP_KERNEL);
70 	if (!skb)
71 		return -ENOMEM;
72 
73 	dev_kfree_skb(td->tx_skb);
74 	td->tx_skb = skb;
75 	hdr = __skb_put_zero(skb, td->tx_msdu_len);
76 	hdr->frame_control = cpu_to_le16(fc);
77 	memcpy(hdr->addr1, dev->macaddr, sizeof(dev->macaddr));
78 	memcpy(hdr->addr2, dev->macaddr, sizeof(dev->macaddr));
79 	memcpy(hdr->addr3, dev->macaddr, sizeof(dev->macaddr));
80 
81 	info = IEEE80211_SKB_CB(skb);
82 	info->flags = IEEE80211_TX_CTL_INJECTED |
83 		      IEEE80211_TX_CTL_NO_ACK |
84 		      IEEE80211_TX_CTL_NO_PS_BUFFER;
85 	rate = &info->control.rates[0];
86 	rate->count = 1;
87 	rate->idx = td->tx_rate_idx;
88 
89 	switch (td->tx_rate_mode) {
90 	case MT76_TM_TX_MODE_CCK:
91 		if (dev->phy.chandef.chan->band != NL80211_BAND_2GHZ)
92 			return -EINVAL;
93 
94 		if (rate->idx > 4)
95 			return -EINVAL;
96 		break;
97 	case MT76_TM_TX_MODE_OFDM:
98 		if (dev->phy.chandef.chan->band != NL80211_BAND_2GHZ)
99 			break;
100 
101 		if (rate->idx > 8)
102 			return -EINVAL;
103 
104 		rate->idx += 4;
105 		break;
106 	case MT76_TM_TX_MODE_HT:
107 		if (rate->idx > 8 * max_nss &&
108 			!(rate->idx == 32 &&
109 			  dev->phy.chandef.width >= NL80211_CHAN_WIDTH_40))
110 			return -EINVAL;
111 
112 		rate->flags |= IEEE80211_TX_RC_MCS;
113 		break;
114 	case MT76_TM_TX_MODE_VHT:
115 		if (rate->idx > 9)
116 			return -EINVAL;
117 
118 		if (td->tx_rate_nss > max_nss)
119 			return -EINVAL;
120 
121 		ieee80211_rate_set_vht(rate, td->tx_rate_idx, td->tx_rate_nss);
122 		rate->flags |= IEEE80211_TX_RC_VHT_MCS;
123 		break;
124 	default:
125 		break;
126 	}
127 
128 	if (td->tx_rate_sgi)
129 		rate->flags |= IEEE80211_TX_RC_SHORT_GI;
130 
131 	if (td->tx_rate_ldpc)
132 		info->flags |= IEEE80211_TX_CTL_LDPC;
133 
134 	if (td->tx_rate_mode >= MT76_TM_TX_MODE_HT) {
135 		switch (dev->phy.chandef.width) {
136 		case NL80211_CHAN_WIDTH_40:
137 			rate->flags |= IEEE80211_TX_RC_40_MHZ_WIDTH;
138 			break;
139 		case NL80211_CHAN_WIDTH_80:
140 			rate->flags |= IEEE80211_TX_RC_80_MHZ_WIDTH;
141 			break;
142 		case NL80211_CHAN_WIDTH_80P80:
143 		case NL80211_CHAN_WIDTH_160:
144 			rate->flags |= IEEE80211_TX_RC_160_MHZ_WIDTH;
145 			break;
146 		default:
147 			break;
148 		}
149 	}
150 
151 	skb_set_queue_mapping(skb, IEEE80211_AC_BE);
152 
153 	return 0;
154 }
155 
156 static void
157 mt76_testmode_tx_start(struct mt76_dev *dev)
158 {
159 	struct mt76_testmode_data *td = &dev->test;
160 
161 	td->tx_queued = 0;
162 	td->tx_done = 0;
163 	td->tx_pending = td->tx_count;
164 	mt76_worker_schedule(&dev->tx_worker);
165 }
166 
167 static void
168 mt76_testmode_tx_stop(struct mt76_dev *dev)
169 {
170 	struct mt76_testmode_data *td = &dev->test;
171 
172 	mt76_worker_disable(&dev->tx_worker);
173 
174 	td->tx_pending = 0;
175 
176 	mt76_worker_enable(&dev->tx_worker);
177 
178 	wait_event_timeout(dev->tx_wait, td->tx_done == td->tx_queued, 10 * HZ);
179 
180 	dev_kfree_skb(td->tx_skb);
181 	td->tx_skb = NULL;
182 }
183 
184 static inline void
185 mt76_testmode_param_set(struct mt76_testmode_data *td, u16 idx)
186 {
187 	td->param_set[idx / 32] |= BIT(idx % 32);
188 }
189 
190 static inline bool
191 mt76_testmode_param_present(struct mt76_testmode_data *td, u16 idx)
192 {
193 	return td->param_set[idx / 32] & BIT(idx % 32);
194 }
195 
196 static void
197 mt76_testmode_init_defaults(struct mt76_dev *dev)
198 {
199 	struct mt76_testmode_data *td = &dev->test;
200 
201 	if (td->tx_msdu_len > 0)
202 		return;
203 
204 	td->tx_msdu_len = 1024;
205 	td->tx_count = 1;
206 	td->tx_rate_mode = MT76_TM_TX_MODE_OFDM;
207 	td->tx_rate_nss = 1;
208 }
209 
210 static int
211 __mt76_testmode_set_state(struct mt76_dev *dev, enum mt76_testmode_state state)
212 {
213 	enum mt76_testmode_state prev_state = dev->test.state;
214 	int err;
215 
216 	if (prev_state == MT76_TM_STATE_TX_FRAMES)
217 		mt76_testmode_tx_stop(dev);
218 
219 	if (state == MT76_TM_STATE_TX_FRAMES) {
220 		err = mt76_testmode_tx_init(dev);
221 		if (err)
222 			return err;
223 	}
224 
225 	err = dev->test_ops->set_state(dev, state);
226 	if (err) {
227 		if (state == MT76_TM_STATE_TX_FRAMES)
228 			mt76_testmode_tx_stop(dev);
229 
230 		return err;
231 	}
232 
233 	if (state == MT76_TM_STATE_TX_FRAMES)
234 		mt76_testmode_tx_start(dev);
235 	else if (state == MT76_TM_STATE_RX_FRAMES) {
236 		memset(&dev->test.rx_stats, 0, sizeof(dev->test.rx_stats));
237 	}
238 
239 	dev->test.state = state;
240 
241 	return 0;
242 }
243 
244 int mt76_testmode_set_state(struct mt76_dev *dev, enum mt76_testmode_state state)
245 {
246 	struct mt76_testmode_data *td = &dev->test;
247 	struct ieee80211_hw *hw = dev->phy.hw;
248 
249 	if (state == td->state && state == MT76_TM_STATE_OFF)
250 		return 0;
251 
252 	if (state > MT76_TM_STATE_OFF &&
253 	    (!test_bit(MT76_STATE_RUNNING, &dev->phy.state) ||
254 	     !(hw->conf.flags & IEEE80211_CONF_MONITOR)))
255 		return -ENOTCONN;
256 
257 	if (state != MT76_TM_STATE_IDLE &&
258 	    td->state != MT76_TM_STATE_IDLE) {
259 		int ret;
260 
261 		ret = __mt76_testmode_set_state(dev, MT76_TM_STATE_IDLE);
262 		if (ret)
263 			return ret;
264 	}
265 
266 	return __mt76_testmode_set_state(dev, state);
267 
268 }
269 EXPORT_SYMBOL(mt76_testmode_set_state);
270 
271 static int
272 mt76_tm_get_u8(struct nlattr *attr, u8 *dest, u8 min, u8 max)
273 {
274 	u8 val;
275 
276 	if (!attr)
277 		return 0;
278 
279 	val = nla_get_u8(attr);
280 	if (val < min || val > max)
281 		return -EINVAL;
282 
283 	*dest = val;
284 	return 0;
285 }
286 
287 int mt76_testmode_cmd(struct ieee80211_hw *hw, struct ieee80211_vif *vif,
288 		      void *data, int len)
289 {
290 	struct mt76_phy *phy = hw->priv;
291 	struct mt76_dev *dev = phy->dev;
292 	struct mt76_testmode_data *td = &dev->test;
293 	struct nlattr *tb[NUM_MT76_TM_ATTRS];
294 	u32 state;
295 	int err;
296 	int i;
297 
298 	if (!dev->test_ops)
299 		return -EOPNOTSUPP;
300 
301 	err = nla_parse_deprecated(tb, MT76_TM_ATTR_MAX, data, len,
302 				   mt76_tm_policy, NULL);
303 	if (err)
304 		return err;
305 
306 	err = -EINVAL;
307 
308 	mutex_lock(&dev->mutex);
309 
310 	if (tb[MT76_TM_ATTR_RESET]) {
311 		mt76_testmode_set_state(dev, MT76_TM_STATE_OFF);
312 		memset(td, 0, sizeof(*td));
313 	}
314 
315 	mt76_testmode_init_defaults(dev);
316 
317 	if (tb[MT76_TM_ATTR_TX_COUNT])
318 		td->tx_count = nla_get_u32(tb[MT76_TM_ATTR_TX_COUNT]);
319 
320 	if (tb[MT76_TM_ATTR_TX_LENGTH]) {
321 		u32 val = nla_get_u32(tb[MT76_TM_ATTR_TX_LENGTH]);
322 
323 		if (val > IEEE80211_MAX_FRAME_LEN ||
324 		    val < sizeof(struct ieee80211_hdr))
325 			goto out;
326 
327 		td->tx_msdu_len = val;
328 	}
329 
330 	if (tb[MT76_TM_ATTR_TX_RATE_IDX])
331 		td->tx_rate_idx = nla_get_u8(tb[MT76_TM_ATTR_TX_RATE_IDX]);
332 
333 	if (mt76_tm_get_u8(tb[MT76_TM_ATTR_TX_RATE_MODE], &td->tx_rate_mode,
334 			   0, MT76_TM_TX_MODE_MAX) ||
335 	    mt76_tm_get_u8(tb[MT76_TM_ATTR_TX_RATE_NSS], &td->tx_rate_nss,
336 			   1, hweight8(phy->antenna_mask)) ||
337 	    mt76_tm_get_u8(tb[MT76_TM_ATTR_TX_RATE_SGI], &td->tx_rate_sgi, 0, 1) ||
338 	    mt76_tm_get_u8(tb[MT76_TM_ATTR_TX_RATE_LDPC], &td->tx_rate_ldpc, 0, 1) ||
339 	    mt76_tm_get_u8(tb[MT76_TM_ATTR_TX_ANTENNA], &td->tx_antenna_mask, 1,
340 			   phy->antenna_mask) ||
341 	    mt76_tm_get_u8(tb[MT76_TM_ATTR_TX_POWER_CONTROL],
342 			   &td->tx_power_control, 0, 1))
343 		goto out;
344 
345 	if (tb[MT76_TM_ATTR_FREQ_OFFSET])
346 		td->freq_offset = nla_get_u32(tb[MT76_TM_ATTR_FREQ_OFFSET]);
347 
348 	if (tb[MT76_TM_ATTR_STATE]) {
349 		state = nla_get_u32(tb[MT76_TM_ATTR_STATE]);
350 		if (state > MT76_TM_STATE_MAX)
351 			goto out;
352 	} else {
353 		state = td->state;
354 	}
355 
356 	if (tb[MT76_TM_ATTR_TX_POWER]) {
357 		struct nlattr *cur;
358 		int idx = 0;
359 		int rem;
360 
361 		nla_for_each_nested(cur, tb[MT76_TM_ATTR_TX_POWER], rem) {
362 			if (nla_len(cur) != 1 ||
363 			    idx >= ARRAY_SIZE(td->tx_power))
364 				goto out;
365 
366 			td->tx_power[idx++] = nla_get_u8(cur);
367 		}
368 	}
369 
370 	if (dev->test_ops->set_params) {
371 		err = dev->test_ops->set_params(dev, tb, state);
372 		if (err)
373 			goto out;
374 	}
375 
376 	for (i = MT76_TM_ATTR_STATE; i < ARRAY_SIZE(tb); i++)
377 		if (tb[i])
378 			mt76_testmode_param_set(td, i);
379 
380 	err = 0;
381 	if (tb[MT76_TM_ATTR_STATE])
382 		err = mt76_testmode_set_state(dev, state);
383 
384 out:
385 	mutex_unlock(&dev->mutex);
386 
387 	return err;
388 }
389 EXPORT_SYMBOL(mt76_testmode_cmd);
390 
391 static int
392 mt76_testmode_dump_stats(struct mt76_dev *dev, struct sk_buff *msg)
393 {
394 	struct mt76_testmode_data *td = &dev->test;
395 	u64 rx_packets = 0;
396 	u64 rx_fcs_error = 0;
397 	int i;
398 
399 	for (i = 0; i < ARRAY_SIZE(td->rx_stats.packets); i++) {
400 		rx_packets += td->rx_stats.packets[i];
401 		rx_fcs_error += td->rx_stats.fcs_error[i];
402 	}
403 
404 	if (nla_put_u32(msg, MT76_TM_STATS_ATTR_TX_PENDING, td->tx_pending) ||
405 	    nla_put_u32(msg, MT76_TM_STATS_ATTR_TX_QUEUED, td->tx_queued) ||
406 	    nla_put_u32(msg, MT76_TM_STATS_ATTR_TX_DONE, td->tx_done) ||
407 	    nla_put_u64_64bit(msg, MT76_TM_STATS_ATTR_RX_PACKETS, rx_packets,
408 			      MT76_TM_STATS_ATTR_PAD) ||
409 	    nla_put_u64_64bit(msg, MT76_TM_STATS_ATTR_RX_FCS_ERROR, rx_fcs_error,
410 			      MT76_TM_STATS_ATTR_PAD))
411 		return -EMSGSIZE;
412 
413 	if (dev->test_ops->dump_stats)
414 		return dev->test_ops->dump_stats(dev, msg);
415 
416 	return 0;
417 }
418 
419 int mt76_testmode_dump(struct ieee80211_hw *hw, struct sk_buff *msg,
420 		       struct netlink_callback *cb, void *data, int len)
421 {
422 	struct mt76_phy *phy = hw->priv;
423 	struct mt76_dev *dev = phy->dev;
424 	struct mt76_testmode_data *td = &dev->test;
425 	struct nlattr *tb[NUM_MT76_TM_ATTRS] = {};
426 	int err = 0;
427 	void *a;
428 	int i;
429 
430 	if (!dev->test_ops)
431 		return -EOPNOTSUPP;
432 
433 	if (cb->args[2]++ > 0)
434 		return -ENOENT;
435 
436 	if (data) {
437 		err = nla_parse_deprecated(tb, MT76_TM_ATTR_MAX, data, len,
438 					   mt76_tm_policy, NULL);
439 		if (err)
440 			return err;
441 	}
442 
443 	mutex_lock(&dev->mutex);
444 
445 	if (tb[MT76_TM_ATTR_STATS]) {
446 		err = -EINVAL;
447 
448 		a = nla_nest_start(msg, MT76_TM_ATTR_STATS);
449 		if (a) {
450 			err = mt76_testmode_dump_stats(dev, msg);
451 			nla_nest_end(msg, a);
452 		}
453 
454 		goto out;
455 	}
456 
457 	mt76_testmode_init_defaults(dev);
458 
459 	err = -EMSGSIZE;
460 	if (nla_put_u32(msg, MT76_TM_ATTR_STATE, td->state))
461 		goto out;
462 
463 	if (td->mtd_name &&
464 	    (nla_put_string(msg, MT76_TM_ATTR_MTD_PART, td->mtd_name) ||
465 	     nla_put_u32(msg, MT76_TM_ATTR_MTD_OFFSET, td->mtd_offset)))
466 		goto out;
467 
468 	if (nla_put_u32(msg, MT76_TM_ATTR_TX_COUNT, td->tx_count) ||
469 	    nla_put_u32(msg, MT76_TM_ATTR_TX_LENGTH, td->tx_msdu_len) ||
470 	    nla_put_u8(msg, MT76_TM_ATTR_TX_RATE_MODE, td->tx_rate_mode) ||
471 	    nla_put_u8(msg, MT76_TM_ATTR_TX_RATE_NSS, td->tx_rate_nss) ||
472 	    nla_put_u8(msg, MT76_TM_ATTR_TX_RATE_IDX, td->tx_rate_idx) ||
473 	    nla_put_u8(msg, MT76_TM_ATTR_TX_RATE_SGI, td->tx_rate_sgi) ||
474 	    nla_put_u8(msg, MT76_TM_ATTR_TX_RATE_LDPC, td->tx_rate_ldpc) ||
475 	    (mt76_testmode_param_present(td, MT76_TM_ATTR_TX_ANTENNA) &&
476 	     nla_put_u8(msg, MT76_TM_ATTR_TX_ANTENNA, td->tx_antenna_mask)) ||
477 	    (mt76_testmode_param_present(td, MT76_TM_ATTR_TX_POWER_CONTROL) &&
478 	     nla_put_u8(msg, MT76_TM_ATTR_TX_POWER_CONTROL, td->tx_power_control)) ||
479 	    (mt76_testmode_param_present(td, MT76_TM_ATTR_FREQ_OFFSET) &&
480 	     nla_put_u8(msg, MT76_TM_ATTR_FREQ_OFFSET, td->freq_offset)))
481 		goto out;
482 
483 	if (mt76_testmode_param_present(td, MT76_TM_ATTR_TX_POWER)) {
484 		a = nla_nest_start(msg, MT76_TM_ATTR_TX_POWER);
485 		if (!a)
486 			goto out;
487 
488 		for (i = 0; i < ARRAY_SIZE(td->tx_power); i++)
489 			if (nla_put_u8(msg, i, td->tx_power[i]))
490 				goto out;
491 
492 		nla_nest_end(msg, a);
493 	}
494 
495 	err = 0;
496 
497 out:
498 	mutex_unlock(&dev->mutex);
499 
500 	return err;
501 }
502 EXPORT_SYMBOL(mt76_testmode_dump);
503