1 /* 2 * Copyright (C) 2016 Lorenzo Bianconi <lorenzo.bianconi83@gmail.com> 3 * 4 * Permission to use, copy, modify, and/or distribute this software for any 5 * purpose with or without fee is hereby granted, provided that the above 6 * copyright notice and this permission notice appear in all copies. 7 * 8 * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES 9 * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF 10 * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR 11 * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES 12 * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN 13 * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF 14 * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. 15 */ 16 17 #include "mt76x02.h" 18 19 #define RADAR_SPEC(m, len, el, eh, wl, wh, \ 20 w_tolerance, tl, th, t_tolerance, \ 21 bl, bh, event_exp, power_jmp) \ 22 { \ 23 .mode = m, \ 24 .avg_len = len, \ 25 .e_low = el, \ 26 .e_high = eh, \ 27 .w_low = wl, \ 28 .w_high = wh, \ 29 .w_margin = w_tolerance, \ 30 .t_low = tl, \ 31 .t_high = th, \ 32 .t_margin = t_tolerance, \ 33 .b_low = bl, \ 34 .b_high = bh, \ 35 .event_expiration = event_exp, \ 36 .pwr_jmp = power_jmp \ 37 } 38 39 static const struct mt76x02_radar_specs etsi_radar_specs[] = { 40 /* 20MHz */ 41 RADAR_SPEC(0, 8, 2, 15, 106, 150, 10, 4900, 100096, 10, 0, 42 0x7fffffff, 0x155cc0, 0x19cc), 43 RADAR_SPEC(0, 40, 4, 59, 96, 380, 150, 4900, 100096, 40, 0, 44 0x7fffffff, 0x155cc0, 0x19cc), 45 RADAR_SPEC(3, 60, 20, 46, 300, 640, 80, 4900, 10100, 80, 0, 46 0x7fffffff, 0x155cc0, 0x19dd), 47 RADAR_SPEC(8, 8, 2, 9, 106, 150, 32, 4900, 296704, 32, 0, 48 0x7fffffff, 0x2191c0, 0x15cc), 49 /* 40MHz */ 50 RADAR_SPEC(0, 8, 2, 15, 106, 150, 10, 4900, 100096, 10, 0, 51 0x7fffffff, 0x155cc0, 0x19cc), 52 RADAR_SPEC(0, 40, 4, 59, 96, 380, 150, 4900, 100096, 40, 0, 53 0x7fffffff, 0x155cc0, 0x19cc), 54 RADAR_SPEC(3, 60, 20, 46, 300, 640, 80, 4900, 10100, 80, 0, 55 0x7fffffff, 0x155cc0, 0x19dd), 56 RADAR_SPEC(8, 8, 2, 9, 106, 150, 32, 4900, 296704, 32, 0, 57 0x7fffffff, 0x2191c0, 0x15cc), 58 /* 80MHz */ 59 RADAR_SPEC(0, 8, 2, 15, 106, 150, 10, 4900, 100096, 10, 0, 60 0x7fffffff, 0x155cc0, 0x19cc), 61 RADAR_SPEC(0, 40, 4, 59, 96, 380, 150, 4900, 100096, 40, 0, 62 0x7fffffff, 0x155cc0, 0x19cc), 63 RADAR_SPEC(3, 60, 20, 46, 300, 640, 80, 4900, 10100, 80, 0, 64 0x7fffffff, 0x155cc0, 0x19dd), 65 RADAR_SPEC(8, 8, 2, 9, 106, 150, 32, 4900, 296704, 32, 0, 66 0x7fffffff, 0x2191c0, 0x15cc) 67 }; 68 69 static const struct mt76x02_radar_specs fcc_radar_specs[] = { 70 /* 20MHz */ 71 RADAR_SPEC(0, 8, 2, 12, 106, 150, 5, 2900, 80100, 5, 0, 72 0x7fffffff, 0xfe808, 0x13dc), 73 RADAR_SPEC(0, 8, 2, 7, 106, 140, 5, 27600, 27900, 5, 0, 74 0x7fffffff, 0xfe808, 0x19dd), 75 RADAR_SPEC(0, 40, 4, 54, 96, 480, 150, 2900, 80100, 40, 0, 76 0x7fffffff, 0xfe808, 0x12cc), 77 RADAR_SPEC(2, 60, 15, 63, 640, 2080, 32, 19600, 40200, 32, 0, 78 0x3938700, 0x57bcf00, 0x1289), 79 /* 40MHz */ 80 RADAR_SPEC(0, 8, 2, 12, 106, 150, 5, 2900, 80100, 5, 0, 81 0x7fffffff, 0xfe808, 0x13dc), 82 RADAR_SPEC(0, 8, 2, 7, 106, 140, 5, 27600, 27900, 5, 0, 83 0x7fffffff, 0xfe808, 0x19dd), 84 RADAR_SPEC(0, 40, 4, 54, 96, 480, 150, 2900, 80100, 40, 0, 85 0x7fffffff, 0xfe808, 0x12cc), 86 RADAR_SPEC(2, 60, 15, 63, 640, 2080, 32, 19600, 40200, 32, 0, 87 0x3938700, 0x57bcf00, 0x1289), 88 /* 80MHz */ 89 RADAR_SPEC(0, 8, 2, 14, 106, 150, 15, 2900, 80100, 15, 0, 90 0x7fffffff, 0xfe808, 0x16cc), 91 RADAR_SPEC(0, 8, 2, 7, 106, 140, 5, 27600, 27900, 5, 0, 92 0x7fffffff, 0xfe808, 0x19dd), 93 RADAR_SPEC(0, 40, 4, 54, 96, 480, 150, 2900, 80100, 40, 0, 94 0x7fffffff, 0xfe808, 0x12cc), 95 RADAR_SPEC(2, 60, 15, 63, 640, 2080, 32, 19600, 40200, 32, 0, 96 0x3938700, 0x57bcf00, 0x1289) 97 }; 98 99 static const struct mt76x02_radar_specs jp_w56_radar_specs[] = { 100 /* 20MHz */ 101 RADAR_SPEC(0, 8, 2, 7, 106, 150, 5, 2900, 80100, 5, 0, 102 0x7fffffff, 0x14c080, 0x13dc), 103 RADAR_SPEC(0, 8, 2, 7, 106, 140, 5, 27600, 27900, 5, 0, 104 0x7fffffff, 0x14c080, 0x19dd), 105 RADAR_SPEC(0, 40, 4, 44, 96, 480, 150, 2900, 80100, 40, 0, 106 0x7fffffff, 0x14c080, 0x12cc), 107 RADAR_SPEC(2, 60, 15, 48, 940, 2080, 32, 19600, 40200, 32, 0, 108 0x3938700, 0X57bcf00, 0x1289), 109 /* 40MHz */ 110 RADAR_SPEC(0, 8, 2, 7, 106, 150, 5, 2900, 80100, 5, 0, 111 0x7fffffff, 0x14c080, 0x13dc), 112 RADAR_SPEC(0, 8, 2, 7, 106, 140, 5, 27600, 27900, 5, 0, 113 0x7fffffff, 0x14c080, 0x19dd), 114 RADAR_SPEC(0, 40, 4, 44, 96, 480, 150, 2900, 80100, 40, 0, 115 0x7fffffff, 0x14c080, 0x12cc), 116 RADAR_SPEC(2, 60, 15, 48, 940, 2080, 32, 19600, 40200, 32, 0, 117 0x3938700, 0X57bcf00, 0x1289), 118 /* 80MHz */ 119 RADAR_SPEC(0, 8, 2, 9, 106, 150, 15, 2900, 80100, 15, 0, 120 0x7fffffff, 0x14c080, 0x16cc), 121 RADAR_SPEC(0, 8, 2, 7, 106, 140, 5, 27600, 27900, 5, 0, 122 0x7fffffff, 0x14c080, 0x19dd), 123 RADAR_SPEC(0, 40, 4, 44, 96, 480, 150, 2900, 80100, 40, 0, 124 0x7fffffff, 0x14c080, 0x12cc), 125 RADAR_SPEC(2, 60, 15, 48, 940, 2080, 32, 19600, 40200, 32, 0, 126 0x3938700, 0X57bcf00, 0x1289) 127 }; 128 129 static const struct mt76x02_radar_specs jp_w53_radar_specs[] = { 130 /* 20MHz */ 131 RADAR_SPEC(0, 8, 2, 9, 106, 150, 20, 28400, 77000, 20, 0, 132 0x7fffffff, 0x14c080, 0x16cc), 133 { 0 }, 134 RADAR_SPEC(0, 40, 4, 44, 96, 200, 150, 28400, 77000, 60, 0, 135 0x7fffffff, 0x14c080, 0x16cc), 136 { 0 }, 137 /* 40MHz */ 138 RADAR_SPEC(0, 8, 2, 9, 106, 150, 20, 28400, 77000, 20, 0, 139 0x7fffffff, 0x14c080, 0x16cc), 140 { 0 }, 141 RADAR_SPEC(0, 40, 4, 44, 96, 200, 150, 28400, 77000, 60, 0, 142 0x7fffffff, 0x14c080, 0x16cc), 143 { 0 }, 144 /* 80MHz */ 145 RADAR_SPEC(0, 8, 2, 9, 106, 150, 20, 28400, 77000, 20, 0, 146 0x7fffffff, 0x14c080, 0x16cc), 147 { 0 }, 148 RADAR_SPEC(0, 40, 4, 44, 96, 200, 150, 28400, 77000, 60, 0, 149 0x7fffffff, 0x14c080, 0x16cc), 150 { 0 } 151 }; 152 153 static void 154 mt76x02_dfs_set_capture_mode_ctrl(struct mt76x02_dev *dev, u8 enable) 155 { 156 u32 data; 157 158 data = (1 << 1) | enable; 159 mt76_wr(dev, MT_BBP(DFS, 36), data); 160 } 161 162 static void mt76x02_dfs_seq_pool_put(struct mt76x02_dev *dev, 163 struct mt76x02_dfs_sequence *seq) 164 { 165 struct mt76x02_dfs_pattern_detector *dfs_pd = &dev->dfs_pd; 166 167 list_add(&seq->head, &dfs_pd->seq_pool); 168 169 dfs_pd->seq_stats.seq_pool_len++; 170 dfs_pd->seq_stats.seq_len--; 171 } 172 173 static struct mt76x02_dfs_sequence * 174 mt76x02_dfs_seq_pool_get(struct mt76x02_dev *dev) 175 { 176 struct mt76x02_dfs_pattern_detector *dfs_pd = &dev->dfs_pd; 177 struct mt76x02_dfs_sequence *seq; 178 179 if (list_empty(&dfs_pd->seq_pool)) { 180 seq = devm_kzalloc(dev->mt76.dev, sizeof(*seq), GFP_ATOMIC); 181 } else { 182 seq = list_first_entry(&dfs_pd->seq_pool, 183 struct mt76x02_dfs_sequence, 184 head); 185 list_del(&seq->head); 186 dfs_pd->seq_stats.seq_pool_len--; 187 } 188 if (seq) 189 dfs_pd->seq_stats.seq_len++; 190 191 return seq; 192 } 193 194 static int mt76x02_dfs_get_multiple(int val, int frac, int margin) 195 { 196 int remainder, factor; 197 198 if (!frac) 199 return 0; 200 201 if (abs(val - frac) <= margin) 202 return 1; 203 204 factor = val / frac; 205 remainder = val % frac; 206 207 if (remainder > margin) { 208 if ((frac - remainder) <= margin) 209 factor++; 210 else 211 factor = 0; 212 } 213 return factor; 214 } 215 216 static void mt76x02_dfs_detector_reset(struct mt76x02_dev *dev) 217 { 218 struct mt76x02_dfs_pattern_detector *dfs_pd = &dev->dfs_pd; 219 struct mt76x02_dfs_sequence *seq, *tmp_seq; 220 int i; 221 222 /* reset hw detector */ 223 mt76_wr(dev, MT_BBP(DFS, 1), 0xf); 224 225 /* reset sw detector */ 226 for (i = 0; i < ARRAY_SIZE(dfs_pd->event_rb); i++) { 227 dfs_pd->event_rb[i].h_rb = 0; 228 dfs_pd->event_rb[i].t_rb = 0; 229 } 230 231 list_for_each_entry_safe(seq, tmp_seq, &dfs_pd->sequences, head) { 232 list_del_init(&seq->head); 233 mt76x02_dfs_seq_pool_put(dev, seq); 234 } 235 } 236 237 static bool mt76x02_dfs_check_chirp(struct mt76x02_dev *dev) 238 { 239 bool ret = false; 240 u32 current_ts, delta_ts; 241 struct mt76x02_dfs_pattern_detector *dfs_pd = &dev->dfs_pd; 242 243 current_ts = mt76_rr(dev, MT_PBF_LIFE_TIMER); 244 delta_ts = current_ts - dfs_pd->chirp_pulse_ts; 245 dfs_pd->chirp_pulse_ts = current_ts; 246 247 /* 12 sec */ 248 if (delta_ts <= (12 * (1 << 20))) { 249 if (++dfs_pd->chirp_pulse_cnt > 8) 250 ret = true; 251 } else { 252 dfs_pd->chirp_pulse_cnt = 1; 253 } 254 255 return ret; 256 } 257 258 static void mt76x02_dfs_get_hw_pulse(struct mt76x02_dev *dev, 259 struct mt76x02_dfs_hw_pulse *pulse) 260 { 261 u32 data; 262 263 /* select channel */ 264 data = (MT_DFS_CH_EN << 16) | pulse->engine; 265 mt76_wr(dev, MT_BBP(DFS, 0), data); 266 267 /* reported period */ 268 pulse->period = mt76_rr(dev, MT_BBP(DFS, 19)); 269 270 /* reported width */ 271 pulse->w1 = mt76_rr(dev, MT_BBP(DFS, 20)); 272 pulse->w2 = mt76_rr(dev, MT_BBP(DFS, 23)); 273 274 /* reported burst number */ 275 pulse->burst = mt76_rr(dev, MT_BBP(DFS, 22)); 276 } 277 278 static bool mt76x02_dfs_check_hw_pulse(struct mt76x02_dev *dev, 279 struct mt76x02_dfs_hw_pulse *pulse) 280 { 281 bool ret = false; 282 283 if (!pulse->period || !pulse->w1) 284 return false; 285 286 switch (dev->dfs_pd.region) { 287 case NL80211_DFS_FCC: 288 if (pulse->engine > 3) 289 break; 290 291 if (pulse->engine == 3) { 292 ret = mt76x02_dfs_check_chirp(dev); 293 break; 294 } 295 296 /* check short pulse*/ 297 if (pulse->w1 < 120) 298 ret = (pulse->period >= 2900 && 299 (pulse->period <= 4700 || 300 pulse->period >= 6400) && 301 (pulse->period <= 6800 || 302 pulse->period >= 10200) && 303 pulse->period <= 61600); 304 else if (pulse->w1 < 130) /* 120 - 130 */ 305 ret = (pulse->period >= 2900 && 306 pulse->period <= 61600); 307 else 308 ret = (pulse->period >= 3500 && 309 pulse->period <= 10100); 310 break; 311 case NL80211_DFS_ETSI: 312 if (pulse->engine >= 3) 313 break; 314 315 ret = (pulse->period >= 4900 && 316 (pulse->period <= 10200 || 317 pulse->period >= 12400) && 318 pulse->period <= 100100); 319 break; 320 case NL80211_DFS_JP: 321 if (dev->mt76.chandef.chan->center_freq >= 5250 && 322 dev->mt76.chandef.chan->center_freq <= 5350) { 323 /* JPW53 */ 324 if (pulse->w1 <= 130) 325 ret = (pulse->period >= 28360 && 326 (pulse->period <= 28700 || 327 pulse->period >= 76900) && 328 pulse->period <= 76940); 329 break; 330 } 331 332 if (pulse->engine > 3) 333 break; 334 335 if (pulse->engine == 3) { 336 ret = mt76x02_dfs_check_chirp(dev); 337 break; 338 } 339 340 /* check short pulse*/ 341 if (pulse->w1 < 120) 342 ret = (pulse->period >= 2900 && 343 (pulse->period <= 4700 || 344 pulse->period >= 6400) && 345 (pulse->period <= 6800 || 346 pulse->period >= 27560) && 347 (pulse->period <= 27960 || 348 pulse->period >= 28360) && 349 (pulse->period <= 28700 || 350 pulse->period >= 79900) && 351 pulse->period <= 80100); 352 else if (pulse->w1 < 130) /* 120 - 130 */ 353 ret = (pulse->period >= 2900 && 354 (pulse->period <= 10100 || 355 pulse->period >= 27560) && 356 (pulse->period <= 27960 || 357 pulse->period >= 28360) && 358 (pulse->period <= 28700 || 359 pulse->period >= 79900) && 360 pulse->period <= 80100); 361 else 362 ret = (pulse->period >= 3900 && 363 pulse->period <= 10100); 364 break; 365 case NL80211_DFS_UNSET: 366 default: 367 return false; 368 } 369 370 return ret; 371 } 372 373 static bool mt76x02_dfs_fetch_event(struct mt76x02_dev *dev, 374 struct mt76x02_dfs_event *event) 375 { 376 u32 data; 377 378 /* 1st: DFS_R37[31]: 0 (engine 0) - 1 (engine 2) 379 * 2nd: DFS_R37[21:0]: pulse time 380 * 3rd: DFS_R37[11:0]: pulse width 381 * 3rd: DFS_R37[25:16]: phase 382 * 4th: DFS_R37[12:0]: current pwr 383 * 4th: DFS_R37[21:16]: pwr stable counter 384 * 385 * 1st: DFS_R37[31:0] set to 0xffffffff means no event detected 386 */ 387 data = mt76_rr(dev, MT_BBP(DFS, 37)); 388 if (!MT_DFS_CHECK_EVENT(data)) 389 return false; 390 391 event->engine = MT_DFS_EVENT_ENGINE(data); 392 data = mt76_rr(dev, MT_BBP(DFS, 37)); 393 event->ts = MT_DFS_EVENT_TIMESTAMP(data); 394 data = mt76_rr(dev, MT_BBP(DFS, 37)); 395 event->width = MT_DFS_EVENT_WIDTH(data); 396 397 return true; 398 } 399 400 static bool mt76x02_dfs_check_event(struct mt76x02_dev *dev, 401 struct mt76x02_dfs_event *event) 402 { 403 if (event->engine == 2) { 404 struct mt76x02_dfs_pattern_detector *dfs_pd = &dev->dfs_pd; 405 struct mt76x02_dfs_event_rb *event_buff = &dfs_pd->event_rb[1]; 406 u16 last_event_idx; 407 u32 delta_ts; 408 409 last_event_idx = mt76_decr(event_buff->t_rb, 410 MT_DFS_EVENT_BUFLEN); 411 delta_ts = event->ts - event_buff->data[last_event_idx].ts; 412 if (delta_ts < MT_DFS_EVENT_TIME_MARGIN && 413 event_buff->data[last_event_idx].width >= 200) 414 return false; 415 } 416 return true; 417 } 418 419 static void mt76x02_dfs_queue_event(struct mt76x02_dev *dev, 420 struct mt76x02_dfs_event *event) 421 { 422 struct mt76x02_dfs_pattern_detector *dfs_pd = &dev->dfs_pd; 423 struct mt76x02_dfs_event_rb *event_buff; 424 425 /* add radar event to ring buffer */ 426 event_buff = event->engine == 2 ? &dfs_pd->event_rb[1] 427 : &dfs_pd->event_rb[0]; 428 event_buff->data[event_buff->t_rb] = *event; 429 event_buff->data[event_buff->t_rb].fetch_ts = jiffies; 430 431 event_buff->t_rb = mt76_incr(event_buff->t_rb, MT_DFS_EVENT_BUFLEN); 432 if (event_buff->t_rb == event_buff->h_rb) 433 event_buff->h_rb = mt76_incr(event_buff->h_rb, 434 MT_DFS_EVENT_BUFLEN); 435 } 436 437 static int mt76x02_dfs_create_sequence(struct mt76x02_dev *dev, 438 struct mt76x02_dfs_event *event, 439 u16 cur_len) 440 { 441 struct mt76x02_dfs_pattern_detector *dfs_pd = &dev->dfs_pd; 442 struct mt76x02_dfs_sw_detector_params *sw_params; 443 u32 width_delta, with_sum, factor, cur_pri; 444 struct mt76x02_dfs_sequence seq, *seq_p; 445 struct mt76x02_dfs_event_rb *event_rb; 446 struct mt76x02_dfs_event *cur_event; 447 int i, j, end, pri; 448 449 event_rb = event->engine == 2 ? &dfs_pd->event_rb[1] 450 : &dfs_pd->event_rb[0]; 451 452 i = mt76_decr(event_rb->t_rb, MT_DFS_EVENT_BUFLEN); 453 end = mt76_decr(event_rb->h_rb, MT_DFS_EVENT_BUFLEN); 454 455 while (i != end) { 456 cur_event = &event_rb->data[i]; 457 with_sum = event->width + cur_event->width; 458 459 sw_params = &dfs_pd->sw_dpd_params; 460 switch (dev->dfs_pd.region) { 461 case NL80211_DFS_FCC: 462 case NL80211_DFS_JP: 463 if (with_sum < 600) 464 width_delta = 8; 465 else 466 width_delta = with_sum >> 3; 467 break; 468 case NL80211_DFS_ETSI: 469 if (event->engine == 2) 470 width_delta = with_sum >> 6; 471 else if (with_sum < 620) 472 width_delta = 24; 473 else 474 width_delta = 8; 475 break; 476 case NL80211_DFS_UNSET: 477 default: 478 return -EINVAL; 479 } 480 481 pri = event->ts - cur_event->ts; 482 if (abs(event->width - cur_event->width) > width_delta || 483 pri < sw_params->min_pri) 484 goto next; 485 486 if (pri > sw_params->max_pri) 487 break; 488 489 seq.pri = event->ts - cur_event->ts; 490 seq.first_ts = cur_event->ts; 491 seq.last_ts = event->ts; 492 seq.engine = event->engine; 493 seq.count = 2; 494 495 j = mt76_decr(i, MT_DFS_EVENT_BUFLEN); 496 while (j != end) { 497 cur_event = &event_rb->data[j]; 498 cur_pri = event->ts - cur_event->ts; 499 factor = mt76x02_dfs_get_multiple(cur_pri, seq.pri, 500 sw_params->pri_margin); 501 if (factor > 0) { 502 seq.first_ts = cur_event->ts; 503 seq.count++; 504 } 505 506 j = mt76_decr(j, MT_DFS_EVENT_BUFLEN); 507 } 508 if (seq.count <= cur_len) 509 goto next; 510 511 seq_p = mt76x02_dfs_seq_pool_get(dev); 512 if (!seq_p) 513 return -ENOMEM; 514 515 *seq_p = seq; 516 INIT_LIST_HEAD(&seq_p->head); 517 list_add(&seq_p->head, &dfs_pd->sequences); 518 next: 519 i = mt76_decr(i, MT_DFS_EVENT_BUFLEN); 520 } 521 return 0; 522 } 523 524 static u16 mt76x02_dfs_add_event_to_sequence(struct mt76x02_dev *dev, 525 struct mt76x02_dfs_event *event) 526 { 527 struct mt76x02_dfs_pattern_detector *dfs_pd = &dev->dfs_pd; 528 struct mt76x02_dfs_sw_detector_params *sw_params; 529 struct mt76x02_dfs_sequence *seq, *tmp_seq; 530 u16 max_seq_len = 0; 531 u32 factor, pri; 532 533 sw_params = &dfs_pd->sw_dpd_params; 534 list_for_each_entry_safe(seq, tmp_seq, &dfs_pd->sequences, head) { 535 if (event->ts > seq->first_ts + MT_DFS_SEQUENCE_WINDOW) { 536 list_del_init(&seq->head); 537 mt76x02_dfs_seq_pool_put(dev, seq); 538 continue; 539 } 540 541 if (event->engine != seq->engine) 542 continue; 543 544 pri = event->ts - seq->last_ts; 545 factor = mt76x02_dfs_get_multiple(pri, seq->pri, 546 sw_params->pri_margin); 547 if (factor > 0) { 548 seq->last_ts = event->ts; 549 seq->count++; 550 max_seq_len = max_t(u16, max_seq_len, seq->count); 551 } 552 } 553 return max_seq_len; 554 } 555 556 static bool mt76x02_dfs_check_detection(struct mt76x02_dev *dev) 557 { 558 struct mt76x02_dfs_pattern_detector *dfs_pd = &dev->dfs_pd; 559 struct mt76x02_dfs_sequence *seq; 560 561 if (list_empty(&dfs_pd->sequences)) 562 return false; 563 564 list_for_each_entry(seq, &dfs_pd->sequences, head) { 565 if (seq->count > MT_DFS_SEQUENCE_TH) { 566 dfs_pd->stats[seq->engine].sw_pattern++; 567 return true; 568 } 569 } 570 return false; 571 } 572 573 static void mt76x02_dfs_add_events(struct mt76x02_dev *dev) 574 { 575 struct mt76x02_dfs_pattern_detector *dfs_pd = &dev->dfs_pd; 576 struct mt76x02_dfs_event event; 577 int i, seq_len; 578 579 /* disable debug mode */ 580 mt76x02_dfs_set_capture_mode_ctrl(dev, false); 581 for (i = 0; i < MT_DFS_EVENT_LOOP; i++) { 582 if (!mt76x02_dfs_fetch_event(dev, &event)) 583 break; 584 585 if (dfs_pd->last_event_ts > event.ts) 586 mt76x02_dfs_detector_reset(dev); 587 dfs_pd->last_event_ts = event.ts; 588 589 if (!mt76x02_dfs_check_event(dev, &event)) 590 continue; 591 592 seq_len = mt76x02_dfs_add_event_to_sequence(dev, &event); 593 mt76x02_dfs_create_sequence(dev, &event, seq_len); 594 595 mt76x02_dfs_queue_event(dev, &event); 596 } 597 mt76x02_dfs_set_capture_mode_ctrl(dev, true); 598 } 599 600 static void mt76x02_dfs_check_event_window(struct mt76x02_dev *dev) 601 { 602 struct mt76x02_dfs_pattern_detector *dfs_pd = &dev->dfs_pd; 603 struct mt76x02_dfs_event_rb *event_buff; 604 struct mt76x02_dfs_event *event; 605 int i; 606 607 for (i = 0; i < ARRAY_SIZE(dfs_pd->event_rb); i++) { 608 event_buff = &dfs_pd->event_rb[i]; 609 610 while (event_buff->h_rb != event_buff->t_rb) { 611 event = &event_buff->data[event_buff->h_rb]; 612 613 /* sorted list */ 614 if (time_is_after_jiffies(event->fetch_ts + 615 MT_DFS_EVENT_WINDOW)) 616 break; 617 event_buff->h_rb = mt76_incr(event_buff->h_rb, 618 MT_DFS_EVENT_BUFLEN); 619 } 620 } 621 } 622 623 static void mt76x02_dfs_tasklet(unsigned long arg) 624 { 625 struct mt76x02_dev *dev = (struct mt76x02_dev *)arg; 626 struct mt76x02_dfs_pattern_detector *dfs_pd = &dev->dfs_pd; 627 u32 engine_mask; 628 int i; 629 630 if (test_bit(MT76_SCANNING, &dev->mt76.state)) 631 goto out; 632 633 if (time_is_before_jiffies(dfs_pd->last_sw_check + 634 MT_DFS_SW_TIMEOUT)) { 635 bool radar_detected; 636 637 dfs_pd->last_sw_check = jiffies; 638 639 mt76x02_dfs_add_events(dev); 640 radar_detected = mt76x02_dfs_check_detection(dev); 641 if (radar_detected) { 642 /* sw detector rx radar pattern */ 643 ieee80211_radar_detected(dev->mt76.hw); 644 mt76x02_dfs_detector_reset(dev); 645 646 return; 647 } 648 mt76x02_dfs_check_event_window(dev); 649 } 650 651 engine_mask = mt76_rr(dev, MT_BBP(DFS, 1)); 652 if (!(engine_mask & 0xf)) 653 goto out; 654 655 for (i = 0; i < MT_DFS_NUM_ENGINES; i++) { 656 struct mt76x02_dfs_hw_pulse pulse; 657 658 if (!(engine_mask & (1 << i))) 659 continue; 660 661 pulse.engine = i; 662 mt76x02_dfs_get_hw_pulse(dev, &pulse); 663 664 if (!mt76x02_dfs_check_hw_pulse(dev, &pulse)) { 665 dfs_pd->stats[i].hw_pulse_discarded++; 666 continue; 667 } 668 669 /* hw detector rx radar pattern */ 670 dfs_pd->stats[i].hw_pattern++; 671 ieee80211_radar_detected(dev->mt76.hw); 672 mt76x02_dfs_detector_reset(dev); 673 674 return; 675 } 676 677 /* reset hw detector */ 678 mt76_wr(dev, MT_BBP(DFS, 1), 0xf); 679 680 out: 681 mt76x02_irq_enable(dev, MT_INT_GPTIMER); 682 } 683 684 static void mt76x02_dfs_init_sw_detector(struct mt76x02_dev *dev) 685 { 686 struct mt76x02_dfs_pattern_detector *dfs_pd = &dev->dfs_pd; 687 688 switch (dev->dfs_pd.region) { 689 case NL80211_DFS_FCC: 690 dfs_pd->sw_dpd_params.max_pri = MT_DFS_FCC_MAX_PRI; 691 dfs_pd->sw_dpd_params.min_pri = MT_DFS_FCC_MIN_PRI; 692 dfs_pd->sw_dpd_params.pri_margin = MT_DFS_PRI_MARGIN; 693 break; 694 case NL80211_DFS_ETSI: 695 dfs_pd->sw_dpd_params.max_pri = MT_DFS_ETSI_MAX_PRI; 696 dfs_pd->sw_dpd_params.min_pri = MT_DFS_ETSI_MIN_PRI; 697 dfs_pd->sw_dpd_params.pri_margin = MT_DFS_PRI_MARGIN << 2; 698 break; 699 case NL80211_DFS_JP: 700 dfs_pd->sw_dpd_params.max_pri = MT_DFS_JP_MAX_PRI; 701 dfs_pd->sw_dpd_params.min_pri = MT_DFS_JP_MIN_PRI; 702 dfs_pd->sw_dpd_params.pri_margin = MT_DFS_PRI_MARGIN; 703 break; 704 case NL80211_DFS_UNSET: 705 default: 706 break; 707 } 708 } 709 710 static void mt76x02_dfs_set_bbp_params(struct mt76x02_dev *dev) 711 { 712 const struct mt76x02_radar_specs *radar_specs; 713 u8 i, shift; 714 u32 data; 715 716 switch (dev->mt76.chandef.width) { 717 case NL80211_CHAN_WIDTH_40: 718 shift = MT_DFS_NUM_ENGINES; 719 break; 720 case NL80211_CHAN_WIDTH_80: 721 shift = 2 * MT_DFS_NUM_ENGINES; 722 break; 723 default: 724 shift = 0; 725 break; 726 } 727 728 switch (dev->dfs_pd.region) { 729 case NL80211_DFS_FCC: 730 radar_specs = &fcc_radar_specs[shift]; 731 break; 732 case NL80211_DFS_ETSI: 733 radar_specs = &etsi_radar_specs[shift]; 734 break; 735 case NL80211_DFS_JP: 736 if (dev->mt76.chandef.chan->center_freq >= 5250 && 737 dev->mt76.chandef.chan->center_freq <= 5350) 738 radar_specs = &jp_w53_radar_specs[shift]; 739 else 740 radar_specs = &jp_w56_radar_specs[shift]; 741 break; 742 case NL80211_DFS_UNSET: 743 default: 744 return; 745 } 746 747 data = (MT_DFS_VGA_MASK << 16) | 748 (MT_DFS_PWR_GAIN_OFFSET << 12) | 749 (MT_DFS_PWR_DOWN_TIME << 8) | 750 (MT_DFS_SYM_ROUND << 4) | 751 (MT_DFS_DELTA_DELAY & 0xf); 752 mt76_wr(dev, MT_BBP(DFS, 2), data); 753 754 data = (MT_DFS_RX_PE_MASK << 16) | MT_DFS_PKT_END_MASK; 755 mt76_wr(dev, MT_BBP(DFS, 3), data); 756 757 for (i = 0; i < MT_DFS_NUM_ENGINES; i++) { 758 /* configure engine */ 759 mt76_wr(dev, MT_BBP(DFS, 0), i); 760 761 /* detection mode + avg_len */ 762 data = ((radar_specs[i].avg_len & 0x1ff) << 16) | 763 (radar_specs[i].mode & 0xf); 764 mt76_wr(dev, MT_BBP(DFS, 4), data); 765 766 /* dfs energy */ 767 data = ((radar_specs[i].e_high & 0x0fff) << 16) | 768 (radar_specs[i].e_low & 0x0fff); 769 mt76_wr(dev, MT_BBP(DFS, 5), data); 770 771 /* dfs period */ 772 mt76_wr(dev, MT_BBP(DFS, 7), radar_specs[i].t_low); 773 mt76_wr(dev, MT_BBP(DFS, 9), radar_specs[i].t_high); 774 775 /* dfs burst */ 776 mt76_wr(dev, MT_BBP(DFS, 11), radar_specs[i].b_low); 777 mt76_wr(dev, MT_BBP(DFS, 13), radar_specs[i].b_high); 778 779 /* dfs width */ 780 data = ((radar_specs[i].w_high & 0x0fff) << 16) | 781 (radar_specs[i].w_low & 0x0fff); 782 mt76_wr(dev, MT_BBP(DFS, 14), data); 783 784 /* dfs margins */ 785 data = (radar_specs[i].w_margin << 16) | 786 radar_specs[i].t_margin; 787 mt76_wr(dev, MT_BBP(DFS, 15), data); 788 789 /* dfs event expiration */ 790 mt76_wr(dev, MT_BBP(DFS, 17), radar_specs[i].event_expiration); 791 792 /* dfs pwr adj */ 793 mt76_wr(dev, MT_BBP(DFS, 30), radar_specs[i].pwr_jmp); 794 } 795 796 /* reset status */ 797 mt76_wr(dev, MT_BBP(DFS, 1), 0xf); 798 mt76_wr(dev, MT_BBP(DFS, 36), 0x3); 799 800 /* enable detection*/ 801 mt76_wr(dev, MT_BBP(DFS, 0), MT_DFS_CH_EN << 16); 802 mt76_wr(dev, MT_BBP(IBI, 11), 0x0c350001); 803 } 804 805 void mt76x02_phy_dfs_adjust_agc(struct mt76x02_dev *dev) 806 { 807 u32 agc_r8, agc_r4, val_r8, val_r4, dfs_r31; 808 809 agc_r8 = mt76_rr(dev, MT_BBP(AGC, 8)); 810 agc_r4 = mt76_rr(dev, MT_BBP(AGC, 4)); 811 812 val_r8 = (agc_r8 & 0x00007e00) >> 9; 813 val_r4 = agc_r4 & ~0x1f000000; 814 val_r4 += (((val_r8 + 1) >> 1) << 24); 815 mt76_wr(dev, MT_BBP(AGC, 4), val_r4); 816 817 dfs_r31 = FIELD_GET(MT_BBP_AGC_LNA_HIGH_GAIN, val_r4); 818 dfs_r31 += val_r8; 819 dfs_r31 -= (agc_r8 & 0x00000038) >> 3; 820 dfs_r31 = (dfs_r31 << 16) | 0x00000307; 821 mt76_wr(dev, MT_BBP(DFS, 31), dfs_r31); 822 823 if (is_mt76x2(dev)) { 824 mt76_wr(dev, MT_BBP(DFS, 32), 0x00040071); 825 } else { 826 /* disable hw detector */ 827 mt76_wr(dev, MT_BBP(DFS, 0), 0); 828 /* enable hw detector */ 829 mt76_wr(dev, MT_BBP(DFS, 0), MT_DFS_CH_EN << 16); 830 } 831 } 832 EXPORT_SYMBOL_GPL(mt76x02_phy_dfs_adjust_agc); 833 834 void mt76x02_dfs_init_params(struct mt76x02_dev *dev) 835 { 836 struct cfg80211_chan_def *chandef = &dev->mt76.chandef; 837 838 if ((chandef->chan->flags & IEEE80211_CHAN_RADAR) && 839 dev->dfs_pd.region != NL80211_DFS_UNSET) { 840 mt76x02_dfs_init_sw_detector(dev); 841 mt76x02_dfs_set_bbp_params(dev); 842 /* enable debug mode */ 843 mt76x02_dfs_set_capture_mode_ctrl(dev, true); 844 845 mt76x02_irq_enable(dev, MT_INT_GPTIMER); 846 mt76_rmw_field(dev, MT_INT_TIMER_EN, 847 MT_INT_TIMER_EN_GP_TIMER_EN, 1); 848 } else { 849 /* disable hw detector */ 850 mt76_wr(dev, MT_BBP(DFS, 0), 0); 851 /* clear detector status */ 852 mt76_wr(dev, MT_BBP(DFS, 1), 0xf); 853 if (mt76_chip(&dev->mt76) == 0x7610 || 854 mt76_chip(&dev->mt76) == 0x7630) 855 mt76_wr(dev, MT_BBP(IBI, 11), 0xfde8081); 856 else 857 mt76_wr(dev, MT_BBP(IBI, 11), 0); 858 859 mt76x02_irq_disable(dev, MT_INT_GPTIMER); 860 mt76_rmw_field(dev, MT_INT_TIMER_EN, 861 MT_INT_TIMER_EN_GP_TIMER_EN, 0); 862 } 863 } 864 EXPORT_SYMBOL_GPL(mt76x02_dfs_init_params); 865 866 void mt76x02_dfs_init_detector(struct mt76x02_dev *dev) 867 { 868 struct mt76x02_dfs_pattern_detector *dfs_pd = &dev->dfs_pd; 869 870 INIT_LIST_HEAD(&dfs_pd->sequences); 871 INIT_LIST_HEAD(&dfs_pd->seq_pool); 872 dfs_pd->region = NL80211_DFS_UNSET; 873 dfs_pd->last_sw_check = jiffies; 874 tasklet_init(&dfs_pd->dfs_tasklet, mt76x02_dfs_tasklet, 875 (unsigned long)dev); 876 } 877 878 static void 879 mt76x02_dfs_set_domain(struct mt76x02_dev *dev, 880 enum nl80211_dfs_regions region) 881 { 882 struct mt76x02_dfs_pattern_detector *dfs_pd = &dev->dfs_pd; 883 884 mutex_lock(&dev->mt76.mutex); 885 if (dfs_pd->region != region) { 886 tasklet_disable(&dfs_pd->dfs_tasklet); 887 888 dev->ed_monitor = region == NL80211_DFS_ETSI; 889 mt76x02_edcca_init(dev, true); 890 891 dfs_pd->region = region; 892 mt76x02_dfs_init_params(dev); 893 tasklet_enable(&dfs_pd->dfs_tasklet); 894 } 895 mutex_unlock(&dev->mt76.mutex); 896 } 897 898 void mt76x02_regd_notifier(struct wiphy *wiphy, 899 struct regulatory_request *request) 900 { 901 struct ieee80211_hw *hw = wiphy_to_ieee80211_hw(wiphy); 902 struct mt76x02_dev *dev = hw->priv; 903 904 mt76x02_dfs_set_domain(dev, request->dfs_region); 905 } 906