1 // SPDX-License-Identifier: ISC 2 /* Copyright (C) 2020 Felix Fietkau <nbd@nbd.name> */ 3 #include "mt76.h" 4 5 static const struct nla_policy mt76_tm_policy[NUM_MT76_TM_ATTRS] = { 6 [MT76_TM_ATTR_RESET] = { .type = NLA_FLAG }, 7 [MT76_TM_ATTR_STATE] = { .type = NLA_U8 }, 8 [MT76_TM_ATTR_TX_COUNT] = { .type = NLA_U32 }, 9 [MT76_TM_ATTR_TX_RATE_MODE] = { .type = NLA_U8 }, 10 [MT76_TM_ATTR_TX_RATE_NSS] = { .type = NLA_U8 }, 11 [MT76_TM_ATTR_TX_RATE_IDX] = { .type = NLA_U8 }, 12 [MT76_TM_ATTR_TX_RATE_SGI] = { .type = NLA_U8 }, 13 [MT76_TM_ATTR_TX_RATE_LDPC] = { .type = NLA_U8 }, 14 [MT76_TM_ATTR_TX_RATE_STBC] = { .type = NLA_U8 }, 15 [MT76_TM_ATTR_TX_LTF] = { .type = NLA_U8 }, 16 [MT76_TM_ATTR_TX_ANTENNA] = { .type = NLA_U8 }, 17 [MT76_TM_ATTR_TX_SPE_IDX] = { .type = NLA_U8 }, 18 [MT76_TM_ATTR_TX_POWER_CONTROL] = { .type = NLA_U8 }, 19 [MT76_TM_ATTR_TX_POWER] = { .type = NLA_NESTED }, 20 [MT76_TM_ATTR_TX_DUTY_CYCLE] = { .type = NLA_U8 }, 21 [MT76_TM_ATTR_TX_IPG] = { .type = NLA_U32 }, 22 [MT76_TM_ATTR_TX_TIME] = { .type = NLA_U32 }, 23 [MT76_TM_ATTR_FREQ_OFFSET] = { .type = NLA_U32 }, 24 }; 25 26 void mt76_testmode_tx_pending(struct mt76_phy *phy) 27 { 28 struct mt76_testmode_data *td = &phy->test; 29 struct mt76_dev *dev = phy->dev; 30 struct mt76_wcid *wcid = &dev->global_wcid; 31 struct sk_buff *skb = td->tx_skb; 32 struct mt76_queue *q; 33 u16 tx_queued_limit; 34 int qid; 35 36 if (!skb || !td->tx_pending) 37 return; 38 39 qid = skb_get_queue_mapping(skb); 40 q = phy->q_tx[qid]; 41 42 tx_queued_limit = td->tx_queued_limit ? td->tx_queued_limit : 1000; 43 44 spin_lock_bh(&q->lock); 45 46 while (td->tx_pending > 0 && 47 td->tx_queued - td->tx_done < tx_queued_limit && 48 q->queued < q->ndesc / 2) { 49 int ret; 50 51 ret = dev->queue_ops->tx_queue_skb(dev, q, skb_get(skb), wcid, 52 NULL); 53 if (ret < 0) 54 break; 55 56 td->tx_pending--; 57 td->tx_queued++; 58 } 59 60 dev->queue_ops->kick(dev, q); 61 62 spin_unlock_bh(&q->lock); 63 } 64 65 66 static int 67 mt76_testmode_tx_init(struct mt76_phy *phy) 68 { 69 struct mt76_testmode_data *td = &phy->test; 70 struct ieee80211_tx_info *info; 71 struct ieee80211_hdr *hdr; 72 struct sk_buff *skb; 73 u16 fc = IEEE80211_FTYPE_DATA | IEEE80211_STYPE_DATA | 74 IEEE80211_FCTL_FROMDS; 75 struct ieee80211_tx_rate *rate; 76 u8 max_nss = hweight8(phy->antenna_mask); 77 bool ext_phy = phy != &phy->dev->phy; 78 79 if (td->tx_antenna_mask) 80 max_nss = min_t(u8, max_nss, hweight8(td->tx_antenna_mask)); 81 82 skb = alloc_skb(td->tx_msdu_len, GFP_KERNEL); 83 if (!skb) 84 return -ENOMEM; 85 86 dev_kfree_skb(td->tx_skb); 87 td->tx_skb = skb; 88 hdr = __skb_put_zero(skb, td->tx_msdu_len); 89 hdr->frame_control = cpu_to_le16(fc); 90 memcpy(hdr->addr1, phy->macaddr, sizeof(phy->macaddr)); 91 memcpy(hdr->addr2, phy->macaddr, sizeof(phy->macaddr)); 92 memcpy(hdr->addr3, phy->macaddr, sizeof(phy->macaddr)); 93 94 info = IEEE80211_SKB_CB(skb); 95 info->flags = IEEE80211_TX_CTL_INJECTED | 96 IEEE80211_TX_CTL_NO_ACK | 97 IEEE80211_TX_CTL_NO_PS_BUFFER; 98 99 if (ext_phy) 100 info->hw_queue |= MT_TX_HW_QUEUE_EXT_PHY; 101 102 if (td->tx_rate_mode > MT76_TM_TX_MODE_VHT) 103 goto out; 104 105 rate = &info->control.rates[0]; 106 rate->count = 1; 107 rate->idx = td->tx_rate_idx; 108 109 switch (td->tx_rate_mode) { 110 case MT76_TM_TX_MODE_CCK: 111 if (phy->chandef.chan->band != NL80211_BAND_2GHZ) 112 return -EINVAL; 113 114 if (rate->idx > 4) 115 return -EINVAL; 116 break; 117 case MT76_TM_TX_MODE_OFDM: 118 if (phy->chandef.chan->band != NL80211_BAND_2GHZ) 119 break; 120 121 if (rate->idx > 8) 122 return -EINVAL; 123 124 rate->idx += 4; 125 break; 126 case MT76_TM_TX_MODE_HT: 127 if (rate->idx > 8 * max_nss && 128 !(rate->idx == 32 && 129 phy->chandef.width >= NL80211_CHAN_WIDTH_40)) 130 return -EINVAL; 131 132 rate->flags |= IEEE80211_TX_RC_MCS; 133 break; 134 case MT76_TM_TX_MODE_VHT: 135 if (rate->idx > 9) 136 return -EINVAL; 137 138 if (td->tx_rate_nss > max_nss) 139 return -EINVAL; 140 141 ieee80211_rate_set_vht(rate, td->tx_rate_idx, td->tx_rate_nss); 142 rate->flags |= IEEE80211_TX_RC_VHT_MCS; 143 break; 144 default: 145 break; 146 } 147 148 if (td->tx_rate_sgi) 149 rate->flags |= IEEE80211_TX_RC_SHORT_GI; 150 151 if (td->tx_rate_ldpc) 152 info->flags |= IEEE80211_TX_CTL_LDPC; 153 154 if (td->tx_rate_stbc) 155 info->flags |= IEEE80211_TX_CTL_STBC; 156 157 if (td->tx_rate_mode >= MT76_TM_TX_MODE_HT) { 158 switch (phy->chandef.width) { 159 case NL80211_CHAN_WIDTH_40: 160 rate->flags |= IEEE80211_TX_RC_40_MHZ_WIDTH; 161 break; 162 case NL80211_CHAN_WIDTH_80: 163 rate->flags |= IEEE80211_TX_RC_80_MHZ_WIDTH; 164 break; 165 case NL80211_CHAN_WIDTH_80P80: 166 case NL80211_CHAN_WIDTH_160: 167 rate->flags |= IEEE80211_TX_RC_160_MHZ_WIDTH; 168 break; 169 default: 170 break; 171 } 172 } 173 out: 174 skb_set_queue_mapping(skb, IEEE80211_AC_BE); 175 176 return 0; 177 } 178 179 static void 180 mt76_testmode_tx_start(struct mt76_phy *phy) 181 { 182 struct mt76_testmode_data *td = &phy->test; 183 struct mt76_dev *dev = phy->dev; 184 185 td->tx_queued = 0; 186 td->tx_done = 0; 187 td->tx_pending = td->tx_count; 188 mt76_worker_schedule(&dev->tx_worker); 189 } 190 191 static void 192 mt76_testmode_tx_stop(struct mt76_phy *phy) 193 { 194 struct mt76_testmode_data *td = &phy->test; 195 struct mt76_dev *dev = phy->dev; 196 197 mt76_worker_disable(&dev->tx_worker); 198 199 td->tx_pending = 0; 200 201 mt76_worker_enable(&dev->tx_worker); 202 203 wait_event_timeout(dev->tx_wait, td->tx_done == td->tx_queued, 204 MT76_TM_TIMEOUT * HZ); 205 206 dev_kfree_skb(td->tx_skb); 207 td->tx_skb = NULL; 208 } 209 210 static inline void 211 mt76_testmode_param_set(struct mt76_testmode_data *td, u16 idx) 212 { 213 td->param_set[idx / 32] |= BIT(idx % 32); 214 } 215 216 static inline bool 217 mt76_testmode_param_present(struct mt76_testmode_data *td, u16 idx) 218 { 219 return td->param_set[idx / 32] & BIT(idx % 32); 220 } 221 222 static void 223 mt76_testmode_init_defaults(struct mt76_phy *phy) 224 { 225 struct mt76_testmode_data *td = &phy->test; 226 227 if (td->tx_msdu_len > 0) 228 return; 229 230 td->tx_msdu_len = 1024; 231 td->tx_count = 1; 232 td->tx_rate_mode = MT76_TM_TX_MODE_OFDM; 233 td->tx_rate_nss = 1; 234 } 235 236 static int 237 __mt76_testmode_set_state(struct mt76_phy *phy, enum mt76_testmode_state state) 238 { 239 enum mt76_testmode_state prev_state = phy->test.state; 240 struct mt76_dev *dev = phy->dev; 241 int err; 242 243 if (prev_state == MT76_TM_STATE_TX_FRAMES) 244 mt76_testmode_tx_stop(phy); 245 246 if (state == MT76_TM_STATE_TX_FRAMES) { 247 err = mt76_testmode_tx_init(phy); 248 if (err) 249 return err; 250 } 251 252 err = dev->test_ops->set_state(phy, state); 253 if (err) { 254 if (state == MT76_TM_STATE_TX_FRAMES) 255 mt76_testmode_tx_stop(phy); 256 257 return err; 258 } 259 260 if (state == MT76_TM_STATE_TX_FRAMES) 261 mt76_testmode_tx_start(phy); 262 else if (state == MT76_TM_STATE_RX_FRAMES) { 263 memset(&phy->test.rx_stats, 0, sizeof(phy->test.rx_stats)); 264 } 265 266 phy->test.state = state; 267 268 return 0; 269 } 270 271 int mt76_testmode_set_state(struct mt76_phy *phy, enum mt76_testmode_state state) 272 { 273 struct mt76_testmode_data *td = &phy->test; 274 struct ieee80211_hw *hw = phy->hw; 275 276 if (state == td->state && state == MT76_TM_STATE_OFF) 277 return 0; 278 279 if (state > MT76_TM_STATE_OFF && 280 (!test_bit(MT76_STATE_RUNNING, &phy->state) || 281 !(hw->conf.flags & IEEE80211_CONF_MONITOR))) 282 return -ENOTCONN; 283 284 if (state != MT76_TM_STATE_IDLE && 285 td->state != MT76_TM_STATE_IDLE) { 286 int ret; 287 288 ret = __mt76_testmode_set_state(phy, MT76_TM_STATE_IDLE); 289 if (ret) 290 return ret; 291 } 292 293 return __mt76_testmode_set_state(phy, state); 294 295 } 296 EXPORT_SYMBOL(mt76_testmode_set_state); 297 298 static int 299 mt76_tm_get_u8(struct nlattr *attr, u8 *dest, u8 min, u8 max) 300 { 301 u8 val; 302 303 if (!attr) 304 return 0; 305 306 val = nla_get_u8(attr); 307 if (val < min || val > max) 308 return -EINVAL; 309 310 *dest = val; 311 return 0; 312 } 313 314 int mt76_testmode_cmd(struct ieee80211_hw *hw, struct ieee80211_vif *vif, 315 void *data, int len) 316 { 317 struct mt76_phy *phy = hw->priv; 318 struct mt76_dev *dev = phy->dev; 319 struct mt76_testmode_data *td = &phy->test; 320 struct nlattr *tb[NUM_MT76_TM_ATTRS]; 321 bool ext_phy = phy != &dev->phy; 322 u32 state; 323 int err; 324 int i; 325 326 if (!dev->test_ops) 327 return -EOPNOTSUPP; 328 329 err = nla_parse_deprecated(tb, MT76_TM_ATTR_MAX, data, len, 330 mt76_tm_policy, NULL); 331 if (err) 332 return err; 333 334 err = -EINVAL; 335 336 mutex_lock(&dev->mutex); 337 338 if (tb[MT76_TM_ATTR_RESET]) { 339 mt76_testmode_set_state(phy, MT76_TM_STATE_OFF); 340 memset(td, 0, sizeof(*td)); 341 } 342 343 mt76_testmode_init_defaults(phy); 344 345 if (tb[MT76_TM_ATTR_TX_COUNT]) 346 td->tx_count = nla_get_u32(tb[MT76_TM_ATTR_TX_COUNT]); 347 348 if (tb[MT76_TM_ATTR_TX_LENGTH]) { 349 u32 val = nla_get_u32(tb[MT76_TM_ATTR_TX_LENGTH]); 350 351 if (val > IEEE80211_MAX_FRAME_LEN || 352 val < sizeof(struct ieee80211_hdr)) 353 goto out; 354 355 td->tx_msdu_len = val; 356 } 357 358 if (tb[MT76_TM_ATTR_TX_RATE_IDX]) 359 td->tx_rate_idx = nla_get_u8(tb[MT76_TM_ATTR_TX_RATE_IDX]); 360 361 if (mt76_tm_get_u8(tb[MT76_TM_ATTR_TX_RATE_MODE], &td->tx_rate_mode, 362 0, MT76_TM_TX_MODE_MAX) || 363 mt76_tm_get_u8(tb[MT76_TM_ATTR_TX_RATE_NSS], &td->tx_rate_nss, 364 1, hweight8(phy->antenna_mask)) || 365 mt76_tm_get_u8(tb[MT76_TM_ATTR_TX_RATE_SGI], &td->tx_rate_sgi, 0, 2) || 366 mt76_tm_get_u8(tb[MT76_TM_ATTR_TX_RATE_LDPC], &td->tx_rate_ldpc, 0, 1) || 367 mt76_tm_get_u8(tb[MT76_TM_ATTR_TX_RATE_STBC], &td->tx_rate_stbc, 0, 1) || 368 mt76_tm_get_u8(tb[MT76_TM_ATTR_TX_LTF], &td->tx_ltf, 0, 2) || 369 mt76_tm_get_u8(tb[MT76_TM_ATTR_TX_ANTENNA], &td->tx_antenna_mask, 370 1 << (ext_phy * 2), phy->antenna_mask << (ext_phy * 2)) || 371 mt76_tm_get_u8(tb[MT76_TM_ATTR_TX_SPE_IDX], &td->tx_spe_idx, 0, 27) || 372 mt76_tm_get_u8(tb[MT76_TM_ATTR_TX_DUTY_CYCLE], 373 &td->tx_duty_cycle, 0, 99) || 374 mt76_tm_get_u8(tb[MT76_TM_ATTR_TX_POWER_CONTROL], 375 &td->tx_power_control, 0, 1)) 376 goto out; 377 378 if (tb[MT76_TM_ATTR_TX_IPG]) 379 td->tx_ipg = nla_get_u32(tb[MT76_TM_ATTR_TX_IPG]); 380 381 if (tb[MT76_TM_ATTR_TX_TIME]) 382 td->tx_time = nla_get_u32(tb[MT76_TM_ATTR_TX_TIME]); 383 384 if (tb[MT76_TM_ATTR_FREQ_OFFSET]) 385 td->freq_offset = nla_get_u32(tb[MT76_TM_ATTR_FREQ_OFFSET]); 386 387 if (tb[MT76_TM_ATTR_STATE]) { 388 state = nla_get_u32(tb[MT76_TM_ATTR_STATE]); 389 if (state > MT76_TM_STATE_MAX) 390 goto out; 391 } else { 392 state = td->state; 393 } 394 395 if (tb[MT76_TM_ATTR_TX_POWER]) { 396 struct nlattr *cur; 397 int idx = 0; 398 int rem; 399 400 nla_for_each_nested(cur, tb[MT76_TM_ATTR_TX_POWER], rem) { 401 if (nla_len(cur) != 1 || 402 idx >= ARRAY_SIZE(td->tx_power)) 403 goto out; 404 405 td->tx_power[idx++] = nla_get_u8(cur); 406 } 407 } 408 409 if (dev->test_ops->set_params) { 410 err = dev->test_ops->set_params(phy, tb, state); 411 if (err) 412 goto out; 413 } 414 415 for (i = MT76_TM_ATTR_STATE; i < ARRAY_SIZE(tb); i++) 416 if (tb[i]) 417 mt76_testmode_param_set(td, i); 418 419 err = 0; 420 if (tb[MT76_TM_ATTR_STATE]) 421 err = mt76_testmode_set_state(phy, state); 422 423 out: 424 mutex_unlock(&dev->mutex); 425 426 return err; 427 } 428 EXPORT_SYMBOL(mt76_testmode_cmd); 429 430 static int 431 mt76_testmode_dump_stats(struct mt76_phy *phy, struct sk_buff *msg) 432 { 433 struct mt76_testmode_data *td = &phy->test; 434 struct mt76_dev *dev = phy->dev; 435 u64 rx_packets = 0; 436 u64 rx_fcs_error = 0; 437 int i; 438 439 for (i = 0; i < ARRAY_SIZE(td->rx_stats.packets); i++) { 440 rx_packets += td->rx_stats.packets[i]; 441 rx_fcs_error += td->rx_stats.fcs_error[i]; 442 } 443 444 if (nla_put_u32(msg, MT76_TM_STATS_ATTR_TX_PENDING, td->tx_pending) || 445 nla_put_u32(msg, MT76_TM_STATS_ATTR_TX_QUEUED, td->tx_queued) || 446 nla_put_u32(msg, MT76_TM_STATS_ATTR_TX_DONE, td->tx_done) || 447 nla_put_u64_64bit(msg, MT76_TM_STATS_ATTR_RX_PACKETS, rx_packets, 448 MT76_TM_STATS_ATTR_PAD) || 449 nla_put_u64_64bit(msg, MT76_TM_STATS_ATTR_RX_FCS_ERROR, rx_fcs_error, 450 MT76_TM_STATS_ATTR_PAD)) 451 return -EMSGSIZE; 452 453 if (dev->test_ops->dump_stats) 454 return dev->test_ops->dump_stats(phy, msg); 455 456 return 0; 457 } 458 459 int mt76_testmode_dump(struct ieee80211_hw *hw, struct sk_buff *msg, 460 struct netlink_callback *cb, void *data, int len) 461 { 462 struct mt76_phy *phy = hw->priv; 463 struct mt76_dev *dev = phy->dev; 464 struct mt76_testmode_data *td = &phy->test; 465 struct nlattr *tb[NUM_MT76_TM_ATTRS] = {}; 466 int err = 0; 467 void *a; 468 int i; 469 470 if (!dev->test_ops) 471 return -EOPNOTSUPP; 472 473 if (cb->args[2]++ > 0) 474 return -ENOENT; 475 476 if (data) { 477 err = nla_parse_deprecated(tb, MT76_TM_ATTR_MAX, data, len, 478 mt76_tm_policy, NULL); 479 if (err) 480 return err; 481 } 482 483 mutex_lock(&dev->mutex); 484 485 if (tb[MT76_TM_ATTR_STATS]) { 486 err = -EINVAL; 487 488 a = nla_nest_start(msg, MT76_TM_ATTR_STATS); 489 if (a) { 490 err = mt76_testmode_dump_stats(phy, msg); 491 nla_nest_end(msg, a); 492 } 493 494 goto out; 495 } 496 497 mt76_testmode_init_defaults(phy); 498 499 err = -EMSGSIZE; 500 if (nla_put_u32(msg, MT76_TM_ATTR_STATE, td->state)) 501 goto out; 502 503 if (dev->test_mtd.name && 504 (nla_put_string(msg, MT76_TM_ATTR_MTD_PART, dev->test_mtd.name) || 505 nla_put_u32(msg, MT76_TM_ATTR_MTD_OFFSET, dev->test_mtd.offset))) 506 goto out; 507 508 if (nla_put_u32(msg, MT76_TM_ATTR_TX_COUNT, td->tx_count) || 509 nla_put_u32(msg, MT76_TM_ATTR_TX_LENGTH, td->tx_msdu_len) || 510 nla_put_u8(msg, MT76_TM_ATTR_TX_RATE_MODE, td->tx_rate_mode) || 511 nla_put_u8(msg, MT76_TM_ATTR_TX_RATE_NSS, td->tx_rate_nss) || 512 nla_put_u8(msg, MT76_TM_ATTR_TX_RATE_IDX, td->tx_rate_idx) || 513 nla_put_u8(msg, MT76_TM_ATTR_TX_RATE_SGI, td->tx_rate_sgi) || 514 nla_put_u8(msg, MT76_TM_ATTR_TX_RATE_LDPC, td->tx_rate_ldpc) || 515 nla_put_u8(msg, MT76_TM_ATTR_TX_RATE_STBC, td->tx_rate_stbc) || 516 (mt76_testmode_param_present(td, MT76_TM_ATTR_TX_LTF) && 517 nla_put_u8(msg, MT76_TM_ATTR_TX_LTF, td->tx_ltf)) || 518 (mt76_testmode_param_present(td, MT76_TM_ATTR_TX_ANTENNA) && 519 nla_put_u8(msg, MT76_TM_ATTR_TX_ANTENNA, td->tx_antenna_mask)) || 520 (mt76_testmode_param_present(td, MT76_TM_ATTR_TX_SPE_IDX) && 521 nla_put_u8(msg, MT76_TM_ATTR_TX_SPE_IDX, td->tx_spe_idx)) || 522 (mt76_testmode_param_present(td, MT76_TM_ATTR_TX_DUTY_CYCLE) && 523 nla_put_u8(msg, MT76_TM_ATTR_TX_DUTY_CYCLE, td->tx_duty_cycle)) || 524 (mt76_testmode_param_present(td, MT76_TM_ATTR_TX_IPG) && 525 nla_put_u32(msg, MT76_TM_ATTR_TX_IPG, td->tx_ipg)) || 526 (mt76_testmode_param_present(td, MT76_TM_ATTR_TX_TIME) && 527 nla_put_u32(msg, MT76_TM_ATTR_TX_TIME, td->tx_time)) || 528 (mt76_testmode_param_present(td, MT76_TM_ATTR_TX_POWER_CONTROL) && 529 nla_put_u8(msg, MT76_TM_ATTR_TX_POWER_CONTROL, td->tx_power_control)) || 530 (mt76_testmode_param_present(td, MT76_TM_ATTR_FREQ_OFFSET) && 531 nla_put_u8(msg, MT76_TM_ATTR_FREQ_OFFSET, td->freq_offset))) 532 goto out; 533 534 if (mt76_testmode_param_present(td, MT76_TM_ATTR_TX_POWER)) { 535 a = nla_nest_start(msg, MT76_TM_ATTR_TX_POWER); 536 if (!a) 537 goto out; 538 539 for (i = 0; i < ARRAY_SIZE(td->tx_power); i++) 540 if (nla_put_u8(msg, i, td->tx_power[i])) 541 goto out; 542 543 nla_nest_end(msg, a); 544 } 545 546 err = 0; 547 548 out: 549 mutex_unlock(&dev->mutex); 550 551 return err; 552 } 553 EXPORT_SYMBOL(mt76_testmode_dump); 554