1 // SPDX-License-Identifier: ISC
2 /* Copyright (C) 2020 Felix Fietkau <nbd@nbd.name> */
3 #include "mt76.h"
4 
5 const struct nla_policy mt76_tm_policy[NUM_MT76_TM_ATTRS] = {
6 	[MT76_TM_ATTR_RESET] = { .type = NLA_FLAG },
7 	[MT76_TM_ATTR_STATE] = { .type = NLA_U8 },
8 	[MT76_TM_ATTR_TX_COUNT] = { .type = NLA_U32 },
9 	[MT76_TM_ATTR_TX_RATE_MODE] = { .type = NLA_U8 },
10 	[MT76_TM_ATTR_TX_RATE_NSS] = { .type = NLA_U8 },
11 	[MT76_TM_ATTR_TX_RATE_IDX] = { .type = NLA_U8 },
12 	[MT76_TM_ATTR_TX_RATE_SGI] = { .type = NLA_U8 },
13 	[MT76_TM_ATTR_TX_RATE_LDPC] = { .type = NLA_U8 },
14 	[MT76_TM_ATTR_TX_RATE_STBC] = { .type = NLA_U8 },
15 	[MT76_TM_ATTR_TX_LTF] = { .type = NLA_U8 },
16 	[MT76_TM_ATTR_TX_ANTENNA] = { .type = NLA_U8 },
17 	[MT76_TM_ATTR_TX_SPE_IDX] = { .type = NLA_U8 },
18 	[MT76_TM_ATTR_TX_POWER_CONTROL] = { .type = NLA_U8 },
19 	[MT76_TM_ATTR_TX_POWER] = { .type = NLA_NESTED },
20 	[MT76_TM_ATTR_TX_DUTY_CYCLE] = { .type = NLA_U8 },
21 	[MT76_TM_ATTR_TX_IPG] = { .type = NLA_U32 },
22 	[MT76_TM_ATTR_TX_TIME] = { .type = NLA_U32 },
23 	[MT76_TM_ATTR_FREQ_OFFSET] = { .type = NLA_U32 },
24 	[MT76_TM_ATTR_DRV_DATA] = { .type = NLA_NESTED },
25 };
26 EXPORT_SYMBOL_GPL(mt76_tm_policy);
27 
28 void mt76_testmode_tx_pending(struct mt76_phy *phy)
29 {
30 	struct mt76_testmode_data *td = &phy->test;
31 	struct mt76_dev *dev = phy->dev;
32 	struct mt76_wcid *wcid = &dev->global_wcid;
33 	struct sk_buff *skb = td->tx_skb;
34 	struct mt76_queue *q;
35 	u16 tx_queued_limit;
36 	int qid;
37 
38 	if (!skb || !td->tx_pending)
39 		return;
40 
41 	qid = skb_get_queue_mapping(skb);
42 	q = phy->q_tx[qid];
43 
44 	tx_queued_limit = td->tx_queued_limit ? td->tx_queued_limit : 1000;
45 
46 	spin_lock_bh(&q->lock);
47 
48 	while (td->tx_pending > 0 &&
49 	       td->tx_queued - td->tx_done < tx_queued_limit &&
50 	       q->queued < q->ndesc / 2) {
51 		int ret;
52 
53 		ret = dev->queue_ops->tx_queue_skb(dev, q, qid, skb_get(skb),
54 						   wcid, NULL);
55 		if (ret < 0)
56 			break;
57 
58 		td->tx_pending--;
59 		td->tx_queued++;
60 	}
61 
62 	dev->queue_ops->kick(dev, q);
63 
64 	spin_unlock_bh(&q->lock);
65 }
66 
67 static u32
68 mt76_testmode_max_mpdu_len(struct mt76_phy *phy, u8 tx_rate_mode)
69 {
70 	switch (tx_rate_mode) {
71 	case MT76_TM_TX_MODE_HT:
72 		return IEEE80211_MAX_MPDU_LEN_HT_7935;
73 	case MT76_TM_TX_MODE_VHT:
74 	case MT76_TM_TX_MODE_HE_SU:
75 	case MT76_TM_TX_MODE_HE_EXT_SU:
76 	case MT76_TM_TX_MODE_HE_TB:
77 	case MT76_TM_TX_MODE_HE_MU:
78 		if (phy->sband_5g.sband.vht_cap.cap &
79 		    IEEE80211_VHT_CAP_MAX_MPDU_LENGTH_7991)
80 			return IEEE80211_MAX_MPDU_LEN_VHT_7991;
81 		return IEEE80211_MAX_MPDU_LEN_VHT_11454;
82 	case MT76_TM_TX_MODE_CCK:
83 	case MT76_TM_TX_MODE_OFDM:
84 	default:
85 		return IEEE80211_MAX_FRAME_LEN;
86 	}
87 }
88 
89 static void
90 mt76_testmode_free_skb(struct mt76_phy *phy)
91 {
92 	struct mt76_testmode_data *td = &phy->test;
93 
94 	dev_kfree_skb(td->tx_skb);
95 	td->tx_skb = NULL;
96 }
97 
98 int mt76_testmode_alloc_skb(struct mt76_phy *phy, u32 len)
99 {
100 #define MT_TXP_MAX_LEN	4095
101 	u16 fc = IEEE80211_FTYPE_DATA | IEEE80211_STYPE_DATA |
102 		 IEEE80211_FCTL_FROMDS;
103 	struct mt76_testmode_data *td = &phy->test;
104 	struct sk_buff **frag_tail, *head;
105 	struct ieee80211_tx_info *info;
106 	struct ieee80211_hdr *hdr;
107 	u32 max_len, head_len;
108 	int nfrags, i;
109 
110 	max_len = mt76_testmode_max_mpdu_len(phy, td->tx_rate_mode);
111 	if (len > max_len)
112 		len = max_len;
113 	else if (len < sizeof(struct ieee80211_hdr))
114 		len = sizeof(struct ieee80211_hdr);
115 
116 	nfrags = len / MT_TXP_MAX_LEN;
117 	head_len = nfrags ? MT_TXP_MAX_LEN : len;
118 
119 	if (len > IEEE80211_MAX_FRAME_LEN)
120 		fc |= IEEE80211_STYPE_QOS_DATA;
121 
122 	head = alloc_skb(head_len, GFP_KERNEL);
123 	if (!head)
124 		return -ENOMEM;
125 
126 	hdr = __skb_put_zero(head, head_len);
127 	hdr->frame_control = cpu_to_le16(fc);
128 	memcpy(hdr->addr1, td->addr[0], ETH_ALEN);
129 	memcpy(hdr->addr2, td->addr[1], ETH_ALEN);
130 	memcpy(hdr->addr3, td->addr[2], ETH_ALEN);
131 	skb_set_queue_mapping(head, IEEE80211_AC_BE);
132 
133 	info = IEEE80211_SKB_CB(head);
134 	info->flags = IEEE80211_TX_CTL_INJECTED |
135 		      IEEE80211_TX_CTL_NO_ACK |
136 		      IEEE80211_TX_CTL_NO_PS_BUFFER;
137 
138 	info->hw_queue |= FIELD_PREP(MT_TX_HW_QUEUE_PHY, phy->band_idx);
139 	frag_tail = &skb_shinfo(head)->frag_list;
140 
141 	for (i = 0; i < nfrags; i++) {
142 		struct sk_buff *frag;
143 		u16 frag_len;
144 
145 		if (i == nfrags - 1)
146 			frag_len = len % MT_TXP_MAX_LEN;
147 		else
148 			frag_len = MT_TXP_MAX_LEN;
149 
150 		frag = alloc_skb(frag_len, GFP_KERNEL);
151 		if (!frag) {
152 			mt76_testmode_free_skb(phy);
153 			dev_kfree_skb(head);
154 			return -ENOMEM;
155 		}
156 
157 		__skb_put_zero(frag, frag_len);
158 		head->len += frag->len;
159 		head->data_len += frag->len;
160 
161 		*frag_tail = frag;
162 		frag_tail = &(*frag_tail)->next;
163 	}
164 
165 	mt76_testmode_free_skb(phy);
166 	td->tx_skb = head;
167 
168 	return 0;
169 }
170 EXPORT_SYMBOL(mt76_testmode_alloc_skb);
171 
172 static int
173 mt76_testmode_tx_init(struct mt76_phy *phy)
174 {
175 	struct mt76_testmode_data *td = &phy->test;
176 	struct ieee80211_tx_info *info;
177 	struct ieee80211_tx_rate *rate;
178 	u8 max_nss = hweight8(phy->antenna_mask);
179 	int ret;
180 
181 	ret = mt76_testmode_alloc_skb(phy, td->tx_mpdu_len);
182 	if (ret)
183 		return ret;
184 
185 	if (td->tx_rate_mode > MT76_TM_TX_MODE_VHT)
186 		goto out;
187 
188 	if (td->tx_antenna_mask)
189 		max_nss = min_t(u8, max_nss, hweight8(td->tx_antenna_mask));
190 
191 	info = IEEE80211_SKB_CB(td->tx_skb);
192 	rate = &info->control.rates[0];
193 	rate->count = 1;
194 	rate->idx = td->tx_rate_idx;
195 
196 	switch (td->tx_rate_mode) {
197 	case MT76_TM_TX_MODE_CCK:
198 		if (phy->chandef.chan->band != NL80211_BAND_2GHZ)
199 			return -EINVAL;
200 
201 		if (rate->idx > 4)
202 			return -EINVAL;
203 		break;
204 	case MT76_TM_TX_MODE_OFDM:
205 		if (phy->chandef.chan->band != NL80211_BAND_2GHZ)
206 			break;
207 
208 		if (rate->idx > 8)
209 			return -EINVAL;
210 
211 		rate->idx += 4;
212 		break;
213 	case MT76_TM_TX_MODE_HT:
214 		if (rate->idx > 8 * max_nss &&
215 			!(rate->idx == 32 &&
216 			  phy->chandef.width >= NL80211_CHAN_WIDTH_40))
217 			return -EINVAL;
218 
219 		rate->flags |= IEEE80211_TX_RC_MCS;
220 		break;
221 	case MT76_TM_TX_MODE_VHT:
222 		if (rate->idx > 9)
223 			return -EINVAL;
224 
225 		if (td->tx_rate_nss > max_nss)
226 			return -EINVAL;
227 
228 		ieee80211_rate_set_vht(rate, td->tx_rate_idx, td->tx_rate_nss);
229 		rate->flags |= IEEE80211_TX_RC_VHT_MCS;
230 		break;
231 	default:
232 		break;
233 	}
234 
235 	if (td->tx_rate_sgi)
236 		rate->flags |= IEEE80211_TX_RC_SHORT_GI;
237 
238 	if (td->tx_rate_ldpc)
239 		info->flags |= IEEE80211_TX_CTL_LDPC;
240 
241 	if (td->tx_rate_stbc)
242 		info->flags |= IEEE80211_TX_CTL_STBC;
243 
244 	if (td->tx_rate_mode >= MT76_TM_TX_MODE_HT) {
245 		switch (phy->chandef.width) {
246 		case NL80211_CHAN_WIDTH_40:
247 			rate->flags |= IEEE80211_TX_RC_40_MHZ_WIDTH;
248 			break;
249 		case NL80211_CHAN_WIDTH_80:
250 			rate->flags |= IEEE80211_TX_RC_80_MHZ_WIDTH;
251 			break;
252 		case NL80211_CHAN_WIDTH_80P80:
253 		case NL80211_CHAN_WIDTH_160:
254 			rate->flags |= IEEE80211_TX_RC_160_MHZ_WIDTH;
255 			break;
256 		default:
257 			break;
258 		}
259 	}
260 out:
261 	return 0;
262 }
263 
264 static void
265 mt76_testmode_tx_start(struct mt76_phy *phy)
266 {
267 	struct mt76_testmode_data *td = &phy->test;
268 	struct mt76_dev *dev = phy->dev;
269 
270 	td->tx_queued = 0;
271 	td->tx_done = 0;
272 	td->tx_pending = td->tx_count;
273 	mt76_worker_schedule(&dev->tx_worker);
274 }
275 
276 static void
277 mt76_testmode_tx_stop(struct mt76_phy *phy)
278 {
279 	struct mt76_testmode_data *td = &phy->test;
280 	struct mt76_dev *dev = phy->dev;
281 
282 	mt76_worker_disable(&dev->tx_worker);
283 
284 	td->tx_pending = 0;
285 
286 	mt76_worker_enable(&dev->tx_worker);
287 
288 	wait_event_timeout(dev->tx_wait, td->tx_done == td->tx_queued,
289 			   MT76_TM_TIMEOUT * HZ);
290 
291 	mt76_testmode_free_skb(phy);
292 }
293 
294 static inline void
295 mt76_testmode_param_set(struct mt76_testmode_data *td, u16 idx)
296 {
297 	td->param_set[idx / 32] |= BIT(idx % 32);
298 }
299 
300 static inline bool
301 mt76_testmode_param_present(struct mt76_testmode_data *td, u16 idx)
302 {
303 	return td->param_set[idx / 32] & BIT(idx % 32);
304 }
305 
306 static void
307 mt76_testmode_init_defaults(struct mt76_phy *phy)
308 {
309 	struct mt76_testmode_data *td = &phy->test;
310 
311 	if (td->tx_mpdu_len > 0)
312 		return;
313 
314 	td->tx_mpdu_len = 1024;
315 	td->tx_count = 1;
316 	td->tx_rate_mode = MT76_TM_TX_MODE_OFDM;
317 	td->tx_rate_nss = 1;
318 
319 	memcpy(td->addr[0], phy->macaddr, ETH_ALEN);
320 	memcpy(td->addr[1], phy->macaddr, ETH_ALEN);
321 	memcpy(td->addr[2], phy->macaddr, ETH_ALEN);
322 }
323 
324 static int
325 __mt76_testmode_set_state(struct mt76_phy *phy, enum mt76_testmode_state state)
326 {
327 	enum mt76_testmode_state prev_state = phy->test.state;
328 	struct mt76_dev *dev = phy->dev;
329 	int err;
330 
331 	if (prev_state == MT76_TM_STATE_TX_FRAMES)
332 		mt76_testmode_tx_stop(phy);
333 
334 	if (state == MT76_TM_STATE_TX_FRAMES) {
335 		err = mt76_testmode_tx_init(phy);
336 		if (err)
337 			return err;
338 	}
339 
340 	err = dev->test_ops->set_state(phy, state);
341 	if (err) {
342 		if (state == MT76_TM_STATE_TX_FRAMES)
343 			mt76_testmode_tx_stop(phy);
344 
345 		return err;
346 	}
347 
348 	if (state == MT76_TM_STATE_TX_FRAMES)
349 		mt76_testmode_tx_start(phy);
350 	else if (state == MT76_TM_STATE_RX_FRAMES) {
351 		memset(&phy->test.rx_stats, 0, sizeof(phy->test.rx_stats));
352 	}
353 
354 	phy->test.state = state;
355 
356 	return 0;
357 }
358 
359 int mt76_testmode_set_state(struct mt76_phy *phy, enum mt76_testmode_state state)
360 {
361 	struct mt76_testmode_data *td = &phy->test;
362 	struct ieee80211_hw *hw = phy->hw;
363 
364 	if (state == td->state && state == MT76_TM_STATE_OFF)
365 		return 0;
366 
367 	if (state > MT76_TM_STATE_OFF &&
368 	    (!test_bit(MT76_STATE_RUNNING, &phy->state) ||
369 	     !(hw->conf.flags & IEEE80211_CONF_MONITOR)))
370 		return -ENOTCONN;
371 
372 	if (state != MT76_TM_STATE_IDLE &&
373 	    td->state != MT76_TM_STATE_IDLE) {
374 		int ret;
375 
376 		ret = __mt76_testmode_set_state(phy, MT76_TM_STATE_IDLE);
377 		if (ret)
378 			return ret;
379 	}
380 
381 	return __mt76_testmode_set_state(phy, state);
382 
383 }
384 EXPORT_SYMBOL(mt76_testmode_set_state);
385 
386 static int
387 mt76_tm_get_u8(struct nlattr *attr, u8 *dest, u8 min, u8 max)
388 {
389 	u8 val;
390 
391 	if (!attr)
392 		return 0;
393 
394 	val = nla_get_u8(attr);
395 	if (val < min || val > max)
396 		return -EINVAL;
397 
398 	*dest = val;
399 	return 0;
400 }
401 
402 int mt76_testmode_cmd(struct ieee80211_hw *hw, struct ieee80211_vif *vif,
403 		      void *data, int len)
404 {
405 	struct mt76_phy *phy = hw->priv;
406 	struct mt76_dev *dev = phy->dev;
407 	struct mt76_testmode_data *td = &phy->test;
408 	struct nlattr *tb[NUM_MT76_TM_ATTRS];
409 	u32 state;
410 	int err;
411 	int i;
412 
413 	if (!dev->test_ops)
414 		return -EOPNOTSUPP;
415 
416 	err = nla_parse_deprecated(tb, MT76_TM_ATTR_MAX, data, len,
417 				   mt76_tm_policy, NULL);
418 	if (err)
419 		return err;
420 
421 	err = -EINVAL;
422 
423 	mutex_lock(&dev->mutex);
424 
425 	if (tb[MT76_TM_ATTR_RESET]) {
426 		mt76_testmode_set_state(phy, MT76_TM_STATE_OFF);
427 		memset(td, 0, sizeof(*td));
428 	}
429 
430 	mt76_testmode_init_defaults(phy);
431 
432 	if (tb[MT76_TM_ATTR_TX_COUNT])
433 		td->tx_count = nla_get_u32(tb[MT76_TM_ATTR_TX_COUNT]);
434 
435 	if (tb[MT76_TM_ATTR_TX_RATE_IDX])
436 		td->tx_rate_idx = nla_get_u8(tb[MT76_TM_ATTR_TX_RATE_IDX]);
437 
438 	if (mt76_tm_get_u8(tb[MT76_TM_ATTR_TX_RATE_MODE], &td->tx_rate_mode,
439 			   0, MT76_TM_TX_MODE_MAX) ||
440 	    mt76_tm_get_u8(tb[MT76_TM_ATTR_TX_RATE_NSS], &td->tx_rate_nss,
441 			   1, hweight8(phy->antenna_mask)) ||
442 	    mt76_tm_get_u8(tb[MT76_TM_ATTR_TX_RATE_SGI], &td->tx_rate_sgi, 0, 2) ||
443 	    mt76_tm_get_u8(tb[MT76_TM_ATTR_TX_RATE_LDPC], &td->tx_rate_ldpc, 0, 1) ||
444 	    mt76_tm_get_u8(tb[MT76_TM_ATTR_TX_RATE_STBC], &td->tx_rate_stbc, 0, 1) ||
445 	    mt76_tm_get_u8(tb[MT76_TM_ATTR_TX_LTF], &td->tx_ltf, 0, 2) ||
446 	    mt76_tm_get_u8(tb[MT76_TM_ATTR_TX_ANTENNA],
447 			   &td->tx_antenna_mask, 0, 0xff) ||
448 	    mt76_tm_get_u8(tb[MT76_TM_ATTR_TX_SPE_IDX], &td->tx_spe_idx, 0, 27) ||
449 	    mt76_tm_get_u8(tb[MT76_TM_ATTR_TX_DUTY_CYCLE],
450 			   &td->tx_duty_cycle, 0, 99) ||
451 	    mt76_tm_get_u8(tb[MT76_TM_ATTR_TX_POWER_CONTROL],
452 			   &td->tx_power_control, 0, 1))
453 		goto out;
454 
455 	if (tb[MT76_TM_ATTR_TX_LENGTH]) {
456 		u32 val = nla_get_u32(tb[MT76_TM_ATTR_TX_LENGTH]);
457 
458 		if (val > mt76_testmode_max_mpdu_len(phy, td->tx_rate_mode) ||
459 		    val < sizeof(struct ieee80211_hdr))
460 			goto out;
461 
462 		td->tx_mpdu_len = val;
463 	}
464 
465 	if (tb[MT76_TM_ATTR_TX_IPG])
466 		td->tx_ipg = nla_get_u32(tb[MT76_TM_ATTR_TX_IPG]);
467 
468 	if (tb[MT76_TM_ATTR_TX_TIME])
469 		td->tx_time = nla_get_u32(tb[MT76_TM_ATTR_TX_TIME]);
470 
471 	if (tb[MT76_TM_ATTR_FREQ_OFFSET])
472 		td->freq_offset = nla_get_u32(tb[MT76_TM_ATTR_FREQ_OFFSET]);
473 
474 	if (tb[MT76_TM_ATTR_STATE]) {
475 		state = nla_get_u32(tb[MT76_TM_ATTR_STATE]);
476 		if (state > MT76_TM_STATE_MAX)
477 			goto out;
478 	} else {
479 		state = td->state;
480 	}
481 
482 	if (tb[MT76_TM_ATTR_TX_POWER]) {
483 		struct nlattr *cur;
484 		int idx = 0;
485 		int rem;
486 
487 		nla_for_each_nested(cur, tb[MT76_TM_ATTR_TX_POWER], rem) {
488 			if (nla_len(cur) != 1 ||
489 			    idx >= ARRAY_SIZE(td->tx_power))
490 				goto out;
491 
492 			td->tx_power[idx++] = nla_get_u8(cur);
493 		}
494 	}
495 
496 	if (tb[MT76_TM_ATTR_MAC_ADDRS]) {
497 		struct nlattr *cur;
498 		int idx = 0;
499 		int rem;
500 
501 		nla_for_each_nested(cur, tb[MT76_TM_ATTR_MAC_ADDRS], rem) {
502 			if (nla_len(cur) != ETH_ALEN || idx >= 3)
503 				goto out;
504 
505 			memcpy(td->addr[idx], nla_data(cur), ETH_ALEN);
506 			idx++;
507 		}
508 	}
509 
510 	if (dev->test_ops->set_params) {
511 		err = dev->test_ops->set_params(phy, tb, state);
512 		if (err)
513 			goto out;
514 	}
515 
516 	for (i = MT76_TM_ATTR_STATE; i < ARRAY_SIZE(tb); i++)
517 		if (tb[i])
518 			mt76_testmode_param_set(td, i);
519 
520 	err = 0;
521 	if (tb[MT76_TM_ATTR_STATE])
522 		err = mt76_testmode_set_state(phy, state);
523 
524 out:
525 	mutex_unlock(&dev->mutex);
526 
527 	return err;
528 }
529 EXPORT_SYMBOL(mt76_testmode_cmd);
530 
531 static int
532 mt76_testmode_dump_stats(struct mt76_phy *phy, struct sk_buff *msg)
533 {
534 	struct mt76_testmode_data *td = &phy->test;
535 	struct mt76_dev *dev = phy->dev;
536 	u64 rx_packets = 0;
537 	u64 rx_fcs_error = 0;
538 	int i;
539 
540 	if (dev->test_ops->dump_stats) {
541 		int ret;
542 
543 		ret = dev->test_ops->dump_stats(phy, msg);
544 		if (ret)
545 			return ret;
546 	}
547 
548 	for (i = 0; i < ARRAY_SIZE(td->rx_stats.packets); i++) {
549 		rx_packets += td->rx_stats.packets[i];
550 		rx_fcs_error += td->rx_stats.fcs_error[i];
551 	}
552 
553 	if (nla_put_u32(msg, MT76_TM_STATS_ATTR_TX_PENDING, td->tx_pending) ||
554 	    nla_put_u32(msg, MT76_TM_STATS_ATTR_TX_QUEUED, td->tx_queued) ||
555 	    nla_put_u32(msg, MT76_TM_STATS_ATTR_TX_DONE, td->tx_done) ||
556 	    nla_put_u64_64bit(msg, MT76_TM_STATS_ATTR_RX_PACKETS, rx_packets,
557 			      MT76_TM_STATS_ATTR_PAD) ||
558 	    nla_put_u64_64bit(msg, MT76_TM_STATS_ATTR_RX_FCS_ERROR, rx_fcs_error,
559 			      MT76_TM_STATS_ATTR_PAD))
560 		return -EMSGSIZE;
561 
562 	return 0;
563 }
564 
565 int mt76_testmode_dump(struct ieee80211_hw *hw, struct sk_buff *msg,
566 		       struct netlink_callback *cb, void *data, int len)
567 {
568 	struct mt76_phy *phy = hw->priv;
569 	struct mt76_dev *dev = phy->dev;
570 	struct mt76_testmode_data *td = &phy->test;
571 	struct nlattr *tb[NUM_MT76_TM_ATTRS] = {};
572 	int err = 0;
573 	void *a;
574 	int i;
575 
576 	if (!dev->test_ops)
577 		return -EOPNOTSUPP;
578 
579 	if (cb->args[2]++ > 0)
580 		return -ENOENT;
581 
582 	if (data) {
583 		err = nla_parse_deprecated(tb, MT76_TM_ATTR_MAX, data, len,
584 					   mt76_tm_policy, NULL);
585 		if (err)
586 			return err;
587 	}
588 
589 	mutex_lock(&dev->mutex);
590 
591 	if (tb[MT76_TM_ATTR_STATS]) {
592 		err = -EINVAL;
593 
594 		a = nla_nest_start(msg, MT76_TM_ATTR_STATS);
595 		if (a) {
596 			err = mt76_testmode_dump_stats(phy, msg);
597 			nla_nest_end(msg, a);
598 		}
599 
600 		goto out;
601 	}
602 
603 	mt76_testmode_init_defaults(phy);
604 
605 	err = -EMSGSIZE;
606 	if (nla_put_u32(msg, MT76_TM_ATTR_STATE, td->state))
607 		goto out;
608 
609 	if (dev->test_mtd.name &&
610 	    (nla_put_string(msg, MT76_TM_ATTR_MTD_PART, dev->test_mtd.name) ||
611 	     nla_put_u32(msg, MT76_TM_ATTR_MTD_OFFSET, dev->test_mtd.offset)))
612 		goto out;
613 
614 	if (nla_put_u32(msg, MT76_TM_ATTR_TX_COUNT, td->tx_count) ||
615 	    nla_put_u32(msg, MT76_TM_ATTR_TX_LENGTH, td->tx_mpdu_len) ||
616 	    nla_put_u8(msg, MT76_TM_ATTR_TX_RATE_MODE, td->tx_rate_mode) ||
617 	    nla_put_u8(msg, MT76_TM_ATTR_TX_RATE_NSS, td->tx_rate_nss) ||
618 	    nla_put_u8(msg, MT76_TM_ATTR_TX_RATE_IDX, td->tx_rate_idx) ||
619 	    nla_put_u8(msg, MT76_TM_ATTR_TX_RATE_SGI, td->tx_rate_sgi) ||
620 	    nla_put_u8(msg, MT76_TM_ATTR_TX_RATE_LDPC, td->tx_rate_ldpc) ||
621 	    nla_put_u8(msg, MT76_TM_ATTR_TX_RATE_STBC, td->tx_rate_stbc) ||
622 	    (mt76_testmode_param_present(td, MT76_TM_ATTR_TX_LTF) &&
623 	     nla_put_u8(msg, MT76_TM_ATTR_TX_LTF, td->tx_ltf)) ||
624 	    (mt76_testmode_param_present(td, MT76_TM_ATTR_TX_ANTENNA) &&
625 	     nla_put_u8(msg, MT76_TM_ATTR_TX_ANTENNA, td->tx_antenna_mask)) ||
626 	    (mt76_testmode_param_present(td, MT76_TM_ATTR_TX_SPE_IDX) &&
627 	     nla_put_u8(msg, MT76_TM_ATTR_TX_SPE_IDX, td->tx_spe_idx)) ||
628 	    (mt76_testmode_param_present(td, MT76_TM_ATTR_TX_DUTY_CYCLE) &&
629 	     nla_put_u8(msg, MT76_TM_ATTR_TX_DUTY_CYCLE, td->tx_duty_cycle)) ||
630 	    (mt76_testmode_param_present(td, MT76_TM_ATTR_TX_IPG) &&
631 	     nla_put_u32(msg, MT76_TM_ATTR_TX_IPG, td->tx_ipg)) ||
632 	    (mt76_testmode_param_present(td, MT76_TM_ATTR_TX_TIME) &&
633 	     nla_put_u32(msg, MT76_TM_ATTR_TX_TIME, td->tx_time)) ||
634 	    (mt76_testmode_param_present(td, MT76_TM_ATTR_TX_POWER_CONTROL) &&
635 	     nla_put_u8(msg, MT76_TM_ATTR_TX_POWER_CONTROL, td->tx_power_control)) ||
636 	    (mt76_testmode_param_present(td, MT76_TM_ATTR_FREQ_OFFSET) &&
637 	     nla_put_u8(msg, MT76_TM_ATTR_FREQ_OFFSET, td->freq_offset)))
638 		goto out;
639 
640 	if (mt76_testmode_param_present(td, MT76_TM_ATTR_TX_POWER)) {
641 		a = nla_nest_start(msg, MT76_TM_ATTR_TX_POWER);
642 		if (!a)
643 			goto out;
644 
645 		for (i = 0; i < ARRAY_SIZE(td->tx_power); i++)
646 			if (nla_put_u8(msg, i, td->tx_power[i]))
647 				goto out;
648 
649 		nla_nest_end(msg, a);
650 	}
651 
652 	if (mt76_testmode_param_present(td, MT76_TM_ATTR_MAC_ADDRS)) {
653 		a = nla_nest_start(msg, MT76_TM_ATTR_MAC_ADDRS);
654 		if (!a)
655 			goto out;
656 
657 		for (i = 0; i < 3; i++)
658 			if (nla_put(msg, i, ETH_ALEN, td->addr[i]))
659 				goto out;
660 
661 		nla_nest_end(msg, a);
662 	}
663 
664 	err = 0;
665 
666 out:
667 	mutex_unlock(&dev->mutex);
668 
669 	return err;
670 }
671 EXPORT_SYMBOL(mt76_testmode_dump);
672