1 // SPDX-License-Identifier: ISC 2 /* Copyright (C) 2020 Felix Fietkau <nbd@nbd.name> */ 3 #include "mt76.h" 4 5 static const struct nla_policy mt76_tm_policy[NUM_MT76_TM_ATTRS] = { 6 [MT76_TM_ATTR_RESET] = { .type = NLA_FLAG }, 7 [MT76_TM_ATTR_STATE] = { .type = NLA_U8 }, 8 [MT76_TM_ATTR_TX_COUNT] = { .type = NLA_U32 }, 9 [MT76_TM_ATTR_TX_RATE_MODE] = { .type = NLA_U8 }, 10 [MT76_TM_ATTR_TX_RATE_NSS] = { .type = NLA_U8 }, 11 [MT76_TM_ATTR_TX_RATE_IDX] = { .type = NLA_U8 }, 12 [MT76_TM_ATTR_TX_RATE_SGI] = { .type = NLA_U8 }, 13 [MT76_TM_ATTR_TX_RATE_LDPC] = { .type = NLA_U8 }, 14 [MT76_TM_ATTR_TX_RATE_STBC] = { .type = NLA_U8 }, 15 [MT76_TM_ATTR_TX_LTF] = { .type = NLA_U8 }, 16 [MT76_TM_ATTR_TX_ANTENNA] = { .type = NLA_U8 }, 17 [MT76_TM_ATTR_TX_SPE_IDX] = { .type = NLA_U8 }, 18 [MT76_TM_ATTR_TX_POWER_CONTROL] = { .type = NLA_U8 }, 19 [MT76_TM_ATTR_TX_POWER] = { .type = NLA_NESTED }, 20 [MT76_TM_ATTR_TX_DUTY_CYCLE] = { .type = NLA_U8 }, 21 [MT76_TM_ATTR_TX_IPG] = { .type = NLA_U32 }, 22 [MT76_TM_ATTR_TX_TIME] = { .type = NLA_U32 }, 23 [MT76_TM_ATTR_FREQ_OFFSET] = { .type = NLA_U32 }, 24 }; 25 26 void mt76_testmode_tx_pending(struct mt76_phy *phy) 27 { 28 struct mt76_testmode_data *td = &phy->test; 29 struct mt76_dev *dev = phy->dev; 30 struct mt76_wcid *wcid = &dev->global_wcid; 31 struct sk_buff *skb = td->tx_skb; 32 struct mt76_queue *q; 33 u16 tx_queued_limit; 34 int qid; 35 36 if (!skb || !td->tx_pending) 37 return; 38 39 qid = skb_get_queue_mapping(skb); 40 q = phy->q_tx[qid]; 41 42 tx_queued_limit = td->tx_queued_limit ? td->tx_queued_limit : 1000; 43 44 spin_lock_bh(&q->lock); 45 46 while (td->tx_pending > 0 && 47 td->tx_queued - td->tx_done < tx_queued_limit && 48 q->queued < q->ndesc / 2) { 49 int ret; 50 51 ret = dev->queue_ops->tx_queue_skb(dev, q, skb_get(skb), wcid, 52 NULL); 53 if (ret < 0) 54 break; 55 56 td->tx_pending--; 57 td->tx_queued++; 58 } 59 60 dev->queue_ops->kick(dev, q); 61 62 spin_unlock_bh(&q->lock); 63 } 64 65 static u32 66 mt76_testmode_max_mpdu_len(struct mt76_phy *phy, u8 tx_rate_mode) 67 { 68 switch (tx_rate_mode) { 69 case MT76_TM_TX_MODE_HT: 70 return IEEE80211_MAX_MPDU_LEN_HT_7935; 71 case MT76_TM_TX_MODE_VHT: 72 case MT76_TM_TX_MODE_HE_SU: 73 case MT76_TM_TX_MODE_HE_EXT_SU: 74 case MT76_TM_TX_MODE_HE_TB: 75 case MT76_TM_TX_MODE_HE_MU: 76 if (phy->sband_5g.sband.vht_cap.cap & 77 IEEE80211_VHT_CAP_MAX_MPDU_LENGTH_7991) 78 return IEEE80211_MAX_MPDU_LEN_VHT_7991; 79 return IEEE80211_MAX_MPDU_LEN_VHT_11454; 80 case MT76_TM_TX_MODE_CCK: 81 case MT76_TM_TX_MODE_OFDM: 82 default: 83 return IEEE80211_MAX_FRAME_LEN; 84 } 85 } 86 87 static void 88 mt76_testmode_free_skb(struct mt76_phy *phy) 89 { 90 struct mt76_testmode_data *td = &phy->test; 91 struct sk_buff *skb = td->tx_skb; 92 93 if (!skb) 94 return; 95 96 if (skb_has_frag_list(skb)) { 97 kfree_skb_list(skb_shinfo(skb)->frag_list); 98 skb_shinfo(skb)->frag_list = NULL; 99 } 100 101 dev_kfree_skb(skb); 102 td->tx_skb = NULL; 103 } 104 105 int mt76_testmode_alloc_skb(struct mt76_phy *phy, u32 len) 106 { 107 #define MT_TXP_MAX_LEN 4095 108 u16 fc = IEEE80211_FTYPE_DATA | IEEE80211_STYPE_DATA | 109 IEEE80211_FCTL_FROMDS; 110 struct mt76_testmode_data *td = &phy->test; 111 bool ext_phy = phy != &phy->dev->phy; 112 struct sk_buff **frag_tail, *head; 113 struct ieee80211_tx_info *info; 114 struct ieee80211_hdr *hdr; 115 u32 max_len, head_len; 116 int nfrags, i; 117 118 max_len = mt76_testmode_max_mpdu_len(phy, td->tx_rate_mode); 119 if (len > max_len) 120 len = max_len; 121 else if (len < sizeof(struct ieee80211_hdr)) 122 len = sizeof(struct ieee80211_hdr); 123 124 nfrags = len / MT_TXP_MAX_LEN; 125 head_len = nfrags ? MT_TXP_MAX_LEN : len; 126 127 if (len > IEEE80211_MAX_FRAME_LEN) 128 fc |= IEEE80211_STYPE_QOS_DATA; 129 130 head = alloc_skb(head_len, GFP_KERNEL); 131 if (!head) 132 return -ENOMEM; 133 134 hdr = __skb_put_zero(head, head_len); 135 hdr->frame_control = cpu_to_le16(fc); 136 memcpy(hdr->addr1, phy->macaddr, sizeof(phy->macaddr)); 137 memcpy(hdr->addr2, phy->macaddr, sizeof(phy->macaddr)); 138 memcpy(hdr->addr3, phy->macaddr, sizeof(phy->macaddr)); 139 skb_set_queue_mapping(head, IEEE80211_AC_BE); 140 141 info = IEEE80211_SKB_CB(head); 142 info->flags = IEEE80211_TX_CTL_INJECTED | 143 IEEE80211_TX_CTL_NO_ACK | 144 IEEE80211_TX_CTL_NO_PS_BUFFER; 145 146 if (ext_phy) 147 info->hw_queue |= MT_TX_HW_QUEUE_EXT_PHY; 148 149 frag_tail = &skb_shinfo(head)->frag_list; 150 151 for (i = 0; i < nfrags; i++) { 152 struct sk_buff *frag; 153 u16 frag_len; 154 155 if (i == nfrags - 1) 156 frag_len = len % MT_TXP_MAX_LEN; 157 else 158 frag_len = MT_TXP_MAX_LEN; 159 160 frag = alloc_skb(frag_len, GFP_KERNEL); 161 if (!frag) 162 return -ENOMEM; 163 164 __skb_put_zero(frag, frag_len); 165 head->len += frag->len; 166 head->data_len += frag->len; 167 168 if (*frag_tail) { 169 (*frag_tail)->next = frag; 170 frag_tail = &frag; 171 } else { 172 *frag_tail = frag; 173 } 174 } 175 176 mt76_testmode_free_skb(phy); 177 td->tx_skb = head; 178 179 return 0; 180 } 181 EXPORT_SYMBOL(mt76_testmode_alloc_skb); 182 183 static int 184 mt76_testmode_tx_init(struct mt76_phy *phy) 185 { 186 struct mt76_testmode_data *td = &phy->test; 187 struct ieee80211_tx_info *info; 188 struct ieee80211_tx_rate *rate; 189 u8 max_nss = hweight8(phy->antenna_mask); 190 int ret; 191 192 ret = mt76_testmode_alloc_skb(phy, td->tx_mpdu_len); 193 if (ret) 194 return ret; 195 196 if (td->tx_rate_mode > MT76_TM_TX_MODE_VHT) 197 goto out; 198 199 if (td->tx_antenna_mask) 200 max_nss = min_t(u8, max_nss, hweight8(td->tx_antenna_mask)); 201 202 info = IEEE80211_SKB_CB(td->tx_skb); 203 rate = &info->control.rates[0]; 204 rate->count = 1; 205 rate->idx = td->tx_rate_idx; 206 207 switch (td->tx_rate_mode) { 208 case MT76_TM_TX_MODE_CCK: 209 if (phy->chandef.chan->band != NL80211_BAND_2GHZ) 210 return -EINVAL; 211 212 if (rate->idx > 4) 213 return -EINVAL; 214 break; 215 case MT76_TM_TX_MODE_OFDM: 216 if (phy->chandef.chan->band != NL80211_BAND_2GHZ) 217 break; 218 219 if (rate->idx > 8) 220 return -EINVAL; 221 222 rate->idx += 4; 223 break; 224 case MT76_TM_TX_MODE_HT: 225 if (rate->idx > 8 * max_nss && 226 !(rate->idx == 32 && 227 phy->chandef.width >= NL80211_CHAN_WIDTH_40)) 228 return -EINVAL; 229 230 rate->flags |= IEEE80211_TX_RC_MCS; 231 break; 232 case MT76_TM_TX_MODE_VHT: 233 if (rate->idx > 9) 234 return -EINVAL; 235 236 if (td->tx_rate_nss > max_nss) 237 return -EINVAL; 238 239 ieee80211_rate_set_vht(rate, td->tx_rate_idx, td->tx_rate_nss); 240 rate->flags |= IEEE80211_TX_RC_VHT_MCS; 241 break; 242 default: 243 break; 244 } 245 246 if (td->tx_rate_sgi) 247 rate->flags |= IEEE80211_TX_RC_SHORT_GI; 248 249 if (td->tx_rate_ldpc) 250 info->flags |= IEEE80211_TX_CTL_LDPC; 251 252 if (td->tx_rate_stbc) 253 info->flags |= IEEE80211_TX_CTL_STBC; 254 255 if (td->tx_rate_mode >= MT76_TM_TX_MODE_HT) { 256 switch (phy->chandef.width) { 257 case NL80211_CHAN_WIDTH_40: 258 rate->flags |= IEEE80211_TX_RC_40_MHZ_WIDTH; 259 break; 260 case NL80211_CHAN_WIDTH_80: 261 rate->flags |= IEEE80211_TX_RC_80_MHZ_WIDTH; 262 break; 263 case NL80211_CHAN_WIDTH_80P80: 264 case NL80211_CHAN_WIDTH_160: 265 rate->flags |= IEEE80211_TX_RC_160_MHZ_WIDTH; 266 break; 267 default: 268 break; 269 } 270 } 271 out: 272 return 0; 273 } 274 275 static void 276 mt76_testmode_tx_start(struct mt76_phy *phy) 277 { 278 struct mt76_testmode_data *td = &phy->test; 279 struct mt76_dev *dev = phy->dev; 280 281 td->tx_queued = 0; 282 td->tx_done = 0; 283 td->tx_pending = td->tx_count; 284 mt76_worker_schedule(&dev->tx_worker); 285 } 286 287 static void 288 mt76_testmode_tx_stop(struct mt76_phy *phy) 289 { 290 struct mt76_testmode_data *td = &phy->test; 291 struct mt76_dev *dev = phy->dev; 292 293 mt76_worker_disable(&dev->tx_worker); 294 295 td->tx_pending = 0; 296 297 mt76_worker_enable(&dev->tx_worker); 298 299 wait_event_timeout(dev->tx_wait, td->tx_done == td->tx_queued, 300 MT76_TM_TIMEOUT * HZ); 301 302 mt76_testmode_free_skb(phy); 303 } 304 305 static inline void 306 mt76_testmode_param_set(struct mt76_testmode_data *td, u16 idx) 307 { 308 td->param_set[idx / 32] |= BIT(idx % 32); 309 } 310 311 static inline bool 312 mt76_testmode_param_present(struct mt76_testmode_data *td, u16 idx) 313 { 314 return td->param_set[idx / 32] & BIT(idx % 32); 315 } 316 317 static void 318 mt76_testmode_init_defaults(struct mt76_phy *phy) 319 { 320 struct mt76_testmode_data *td = &phy->test; 321 322 if (td->tx_mpdu_len > 0) 323 return; 324 325 td->tx_mpdu_len = 1024; 326 td->tx_count = 1; 327 td->tx_rate_mode = MT76_TM_TX_MODE_OFDM; 328 td->tx_rate_nss = 1; 329 } 330 331 static int 332 __mt76_testmode_set_state(struct mt76_phy *phy, enum mt76_testmode_state state) 333 { 334 enum mt76_testmode_state prev_state = phy->test.state; 335 struct mt76_dev *dev = phy->dev; 336 int err; 337 338 if (prev_state == MT76_TM_STATE_TX_FRAMES) 339 mt76_testmode_tx_stop(phy); 340 341 if (state == MT76_TM_STATE_TX_FRAMES) { 342 err = mt76_testmode_tx_init(phy); 343 if (err) 344 return err; 345 } 346 347 err = dev->test_ops->set_state(phy, state); 348 if (err) { 349 if (state == MT76_TM_STATE_TX_FRAMES) 350 mt76_testmode_tx_stop(phy); 351 352 return err; 353 } 354 355 if (state == MT76_TM_STATE_TX_FRAMES) 356 mt76_testmode_tx_start(phy); 357 else if (state == MT76_TM_STATE_RX_FRAMES) { 358 memset(&phy->test.rx_stats, 0, sizeof(phy->test.rx_stats)); 359 } 360 361 phy->test.state = state; 362 363 return 0; 364 } 365 366 int mt76_testmode_set_state(struct mt76_phy *phy, enum mt76_testmode_state state) 367 { 368 struct mt76_testmode_data *td = &phy->test; 369 struct ieee80211_hw *hw = phy->hw; 370 371 if (state == td->state && state == MT76_TM_STATE_OFF) 372 return 0; 373 374 if (state > MT76_TM_STATE_OFF && 375 (!test_bit(MT76_STATE_RUNNING, &phy->state) || 376 !(hw->conf.flags & IEEE80211_CONF_MONITOR))) 377 return -ENOTCONN; 378 379 if (state != MT76_TM_STATE_IDLE && 380 td->state != MT76_TM_STATE_IDLE) { 381 int ret; 382 383 ret = __mt76_testmode_set_state(phy, MT76_TM_STATE_IDLE); 384 if (ret) 385 return ret; 386 } 387 388 return __mt76_testmode_set_state(phy, state); 389 390 } 391 EXPORT_SYMBOL(mt76_testmode_set_state); 392 393 static int 394 mt76_tm_get_u8(struct nlattr *attr, u8 *dest, u8 min, u8 max) 395 { 396 u8 val; 397 398 if (!attr) 399 return 0; 400 401 val = nla_get_u8(attr); 402 if (val < min || val > max) 403 return -EINVAL; 404 405 *dest = val; 406 return 0; 407 } 408 409 int mt76_testmode_cmd(struct ieee80211_hw *hw, struct ieee80211_vif *vif, 410 void *data, int len) 411 { 412 struct mt76_phy *phy = hw->priv; 413 struct mt76_dev *dev = phy->dev; 414 struct mt76_testmode_data *td = &phy->test; 415 struct nlattr *tb[NUM_MT76_TM_ATTRS]; 416 bool ext_phy = phy != &dev->phy; 417 u32 state; 418 int err; 419 int i; 420 421 if (!dev->test_ops) 422 return -EOPNOTSUPP; 423 424 err = nla_parse_deprecated(tb, MT76_TM_ATTR_MAX, data, len, 425 mt76_tm_policy, NULL); 426 if (err) 427 return err; 428 429 err = -EINVAL; 430 431 mutex_lock(&dev->mutex); 432 433 if (tb[MT76_TM_ATTR_RESET]) { 434 mt76_testmode_set_state(phy, MT76_TM_STATE_OFF); 435 memset(td, 0, sizeof(*td)); 436 } 437 438 mt76_testmode_init_defaults(phy); 439 440 if (tb[MT76_TM_ATTR_TX_COUNT]) 441 td->tx_count = nla_get_u32(tb[MT76_TM_ATTR_TX_COUNT]); 442 443 if (tb[MT76_TM_ATTR_TX_RATE_IDX]) 444 td->tx_rate_idx = nla_get_u8(tb[MT76_TM_ATTR_TX_RATE_IDX]); 445 446 if (mt76_tm_get_u8(tb[MT76_TM_ATTR_TX_RATE_MODE], &td->tx_rate_mode, 447 0, MT76_TM_TX_MODE_MAX) || 448 mt76_tm_get_u8(tb[MT76_TM_ATTR_TX_RATE_NSS], &td->tx_rate_nss, 449 1, hweight8(phy->antenna_mask)) || 450 mt76_tm_get_u8(tb[MT76_TM_ATTR_TX_RATE_SGI], &td->tx_rate_sgi, 0, 2) || 451 mt76_tm_get_u8(tb[MT76_TM_ATTR_TX_RATE_LDPC], &td->tx_rate_ldpc, 0, 1) || 452 mt76_tm_get_u8(tb[MT76_TM_ATTR_TX_RATE_STBC], &td->tx_rate_stbc, 0, 1) || 453 mt76_tm_get_u8(tb[MT76_TM_ATTR_TX_LTF], &td->tx_ltf, 0, 2) || 454 mt76_tm_get_u8(tb[MT76_TM_ATTR_TX_ANTENNA], &td->tx_antenna_mask, 455 1 << (ext_phy * 2), phy->antenna_mask << (ext_phy * 2)) || 456 mt76_tm_get_u8(tb[MT76_TM_ATTR_TX_SPE_IDX], &td->tx_spe_idx, 0, 27) || 457 mt76_tm_get_u8(tb[MT76_TM_ATTR_TX_DUTY_CYCLE], 458 &td->tx_duty_cycle, 0, 99) || 459 mt76_tm_get_u8(tb[MT76_TM_ATTR_TX_POWER_CONTROL], 460 &td->tx_power_control, 0, 1)) 461 goto out; 462 463 if (tb[MT76_TM_ATTR_TX_LENGTH]) { 464 u32 val = nla_get_u32(tb[MT76_TM_ATTR_TX_LENGTH]); 465 466 if (val > mt76_testmode_max_mpdu_len(phy, td->tx_rate_mode) || 467 val < sizeof(struct ieee80211_hdr)) 468 goto out; 469 470 td->tx_mpdu_len = val; 471 } 472 473 if (tb[MT76_TM_ATTR_TX_IPG]) 474 td->tx_ipg = nla_get_u32(tb[MT76_TM_ATTR_TX_IPG]); 475 476 if (tb[MT76_TM_ATTR_TX_TIME]) 477 td->tx_time = nla_get_u32(tb[MT76_TM_ATTR_TX_TIME]); 478 479 if (tb[MT76_TM_ATTR_FREQ_OFFSET]) 480 td->freq_offset = nla_get_u32(tb[MT76_TM_ATTR_FREQ_OFFSET]); 481 482 if (tb[MT76_TM_ATTR_STATE]) { 483 state = nla_get_u32(tb[MT76_TM_ATTR_STATE]); 484 if (state > MT76_TM_STATE_MAX) 485 goto out; 486 } else { 487 state = td->state; 488 } 489 490 if (tb[MT76_TM_ATTR_TX_POWER]) { 491 struct nlattr *cur; 492 int idx = 0; 493 int rem; 494 495 nla_for_each_nested(cur, tb[MT76_TM_ATTR_TX_POWER], rem) { 496 if (nla_len(cur) != 1 || 497 idx >= ARRAY_SIZE(td->tx_power)) 498 goto out; 499 500 td->tx_power[idx++] = nla_get_u8(cur); 501 } 502 } 503 504 if (dev->test_ops->set_params) { 505 err = dev->test_ops->set_params(phy, tb, state); 506 if (err) 507 goto out; 508 } 509 510 for (i = MT76_TM_ATTR_STATE; i < ARRAY_SIZE(tb); i++) 511 if (tb[i]) 512 mt76_testmode_param_set(td, i); 513 514 err = 0; 515 if (tb[MT76_TM_ATTR_STATE]) 516 err = mt76_testmode_set_state(phy, state); 517 518 out: 519 mutex_unlock(&dev->mutex); 520 521 return err; 522 } 523 EXPORT_SYMBOL(mt76_testmode_cmd); 524 525 static int 526 mt76_testmode_dump_stats(struct mt76_phy *phy, struct sk_buff *msg) 527 { 528 struct mt76_testmode_data *td = &phy->test; 529 struct mt76_dev *dev = phy->dev; 530 u64 rx_packets = 0; 531 u64 rx_fcs_error = 0; 532 int i; 533 534 for (i = 0; i < ARRAY_SIZE(td->rx_stats.packets); i++) { 535 rx_packets += td->rx_stats.packets[i]; 536 rx_fcs_error += td->rx_stats.fcs_error[i]; 537 } 538 539 if (nla_put_u32(msg, MT76_TM_STATS_ATTR_TX_PENDING, td->tx_pending) || 540 nla_put_u32(msg, MT76_TM_STATS_ATTR_TX_QUEUED, td->tx_queued) || 541 nla_put_u32(msg, MT76_TM_STATS_ATTR_TX_DONE, td->tx_done) || 542 nla_put_u64_64bit(msg, MT76_TM_STATS_ATTR_RX_PACKETS, rx_packets, 543 MT76_TM_STATS_ATTR_PAD) || 544 nla_put_u64_64bit(msg, MT76_TM_STATS_ATTR_RX_FCS_ERROR, rx_fcs_error, 545 MT76_TM_STATS_ATTR_PAD)) 546 return -EMSGSIZE; 547 548 if (dev->test_ops->dump_stats) 549 return dev->test_ops->dump_stats(phy, msg); 550 551 return 0; 552 } 553 554 int mt76_testmode_dump(struct ieee80211_hw *hw, struct sk_buff *msg, 555 struct netlink_callback *cb, void *data, int len) 556 { 557 struct mt76_phy *phy = hw->priv; 558 struct mt76_dev *dev = phy->dev; 559 struct mt76_testmode_data *td = &phy->test; 560 struct nlattr *tb[NUM_MT76_TM_ATTRS] = {}; 561 int err = 0; 562 void *a; 563 int i; 564 565 if (!dev->test_ops) 566 return -EOPNOTSUPP; 567 568 if (cb->args[2]++ > 0) 569 return -ENOENT; 570 571 if (data) { 572 err = nla_parse_deprecated(tb, MT76_TM_ATTR_MAX, data, len, 573 mt76_tm_policy, NULL); 574 if (err) 575 return err; 576 } 577 578 mutex_lock(&dev->mutex); 579 580 if (tb[MT76_TM_ATTR_STATS]) { 581 err = -EINVAL; 582 583 a = nla_nest_start(msg, MT76_TM_ATTR_STATS); 584 if (a) { 585 err = mt76_testmode_dump_stats(phy, msg); 586 nla_nest_end(msg, a); 587 } 588 589 goto out; 590 } 591 592 mt76_testmode_init_defaults(phy); 593 594 err = -EMSGSIZE; 595 if (nla_put_u32(msg, MT76_TM_ATTR_STATE, td->state)) 596 goto out; 597 598 if (dev->test_mtd.name && 599 (nla_put_string(msg, MT76_TM_ATTR_MTD_PART, dev->test_mtd.name) || 600 nla_put_u32(msg, MT76_TM_ATTR_MTD_OFFSET, dev->test_mtd.offset))) 601 goto out; 602 603 if (nla_put_u32(msg, MT76_TM_ATTR_TX_COUNT, td->tx_count) || 604 nla_put_u32(msg, MT76_TM_ATTR_TX_LENGTH, td->tx_mpdu_len) || 605 nla_put_u8(msg, MT76_TM_ATTR_TX_RATE_MODE, td->tx_rate_mode) || 606 nla_put_u8(msg, MT76_TM_ATTR_TX_RATE_NSS, td->tx_rate_nss) || 607 nla_put_u8(msg, MT76_TM_ATTR_TX_RATE_IDX, td->tx_rate_idx) || 608 nla_put_u8(msg, MT76_TM_ATTR_TX_RATE_SGI, td->tx_rate_sgi) || 609 nla_put_u8(msg, MT76_TM_ATTR_TX_RATE_LDPC, td->tx_rate_ldpc) || 610 nla_put_u8(msg, MT76_TM_ATTR_TX_RATE_STBC, td->tx_rate_stbc) || 611 (mt76_testmode_param_present(td, MT76_TM_ATTR_TX_LTF) && 612 nla_put_u8(msg, MT76_TM_ATTR_TX_LTF, td->tx_ltf)) || 613 (mt76_testmode_param_present(td, MT76_TM_ATTR_TX_ANTENNA) && 614 nla_put_u8(msg, MT76_TM_ATTR_TX_ANTENNA, td->tx_antenna_mask)) || 615 (mt76_testmode_param_present(td, MT76_TM_ATTR_TX_SPE_IDX) && 616 nla_put_u8(msg, MT76_TM_ATTR_TX_SPE_IDX, td->tx_spe_idx)) || 617 (mt76_testmode_param_present(td, MT76_TM_ATTR_TX_DUTY_CYCLE) && 618 nla_put_u8(msg, MT76_TM_ATTR_TX_DUTY_CYCLE, td->tx_duty_cycle)) || 619 (mt76_testmode_param_present(td, MT76_TM_ATTR_TX_IPG) && 620 nla_put_u32(msg, MT76_TM_ATTR_TX_IPG, td->tx_ipg)) || 621 (mt76_testmode_param_present(td, MT76_TM_ATTR_TX_TIME) && 622 nla_put_u32(msg, MT76_TM_ATTR_TX_TIME, td->tx_time)) || 623 (mt76_testmode_param_present(td, MT76_TM_ATTR_TX_POWER_CONTROL) && 624 nla_put_u8(msg, MT76_TM_ATTR_TX_POWER_CONTROL, td->tx_power_control)) || 625 (mt76_testmode_param_present(td, MT76_TM_ATTR_FREQ_OFFSET) && 626 nla_put_u8(msg, MT76_TM_ATTR_FREQ_OFFSET, td->freq_offset))) 627 goto out; 628 629 if (mt76_testmode_param_present(td, MT76_TM_ATTR_TX_POWER)) { 630 a = nla_nest_start(msg, MT76_TM_ATTR_TX_POWER); 631 if (!a) 632 goto out; 633 634 for (i = 0; i < ARRAY_SIZE(td->tx_power); i++) 635 if (nla_put_u8(msg, i, td->tx_power[i])) 636 goto out; 637 638 nla_nest_end(msg, a); 639 } 640 641 err = 0; 642 643 out: 644 mutex_unlock(&dev->mutex); 645 646 return err; 647 } 648 EXPORT_SYMBOL(mt76_testmode_dump); 649